BERATools 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- beratools/__init__.py +9 -0
- beratools/core/__init__.py +0 -0
- beratools/core/algo_centerline.py +351 -0
- beratools/core/constants.py +86 -0
- beratools/core/dijkstra_algorithm.py +460 -0
- beratools/core/logger.py +85 -0
- beratools/core/tool_base.py +133 -0
- beratools/gui/__init__.py +15 -0
- beratools/gui/batch_processing_dlg.py +463 -0
- beratools/gui/beratools.json +2300 -0
- beratools/gui/bt_data.py +487 -0
- beratools/gui/bt_gui_main.py +691 -0
- beratools/gui/cli.py +18 -0
- beratools/gui/gui.json +8 -0
- beratools/gui/img/BERALogo.png +0 -0
- beratools/gui/img/closed.gif +0 -0
- beratools/gui/img/closed.png +0 -0
- beratools/gui/img/open.gif +0 -0
- beratools/gui/img/open.png +0 -0
- beratools/gui/img/tool.gif +0 -0
- beratools/gui/img/tool.png +0 -0
- beratools/gui/map_window.py +146 -0
- beratools/gui/tool_widgets.py +493 -0
- beratools/gui_tk/ASCII Banners.txt +248 -0
- beratools/gui_tk/__init__.py +20 -0
- beratools/gui_tk/beratools_main.py +515 -0
- beratools/gui_tk/bt_widgets.py +442 -0
- beratools/gui_tk/cli.py +18 -0
- beratools/gui_tk/gui.json +8 -0
- beratools/gui_tk/img/BERALogo.png +0 -0
- beratools/gui_tk/img/closed.gif +0 -0
- beratools/gui_tk/img/closed.png +0 -0
- beratools/gui_tk/img/open.gif +0 -0
- beratools/gui_tk/img/open.png +0 -0
- beratools/gui_tk/img/tool.gif +0 -0
- beratools/gui_tk/img/tool.png +0 -0
- beratools/gui_tk/main.py +14 -0
- beratools/gui_tk/map_window.py +144 -0
- beratools/gui_tk/runner.py +1481 -0
- beratools/gui_tk/tooltip.py +55 -0
- beratools/third_party/pyqtlet2/__init__.py +9 -0
- beratools/third_party/pyqtlet2/leaflet/__init__.py +26 -0
- beratools/third_party/pyqtlet2/leaflet/control/__init__.py +6 -0
- beratools/third_party/pyqtlet2/leaflet/control/control.py +59 -0
- beratools/third_party/pyqtlet2/leaflet/control/draw.py +52 -0
- beratools/third_party/pyqtlet2/leaflet/control/layers.py +20 -0
- beratools/third_party/pyqtlet2/leaflet/core/Parser.py +24 -0
- beratools/third_party/pyqtlet2/leaflet/core/__init__.py +2 -0
- beratools/third_party/pyqtlet2/leaflet/core/evented.py +180 -0
- beratools/third_party/pyqtlet2/leaflet/layer/__init__.py +5 -0
- beratools/third_party/pyqtlet2/leaflet/layer/featuregroup.py +34 -0
- beratools/third_party/pyqtlet2/leaflet/layer/icon/__init__.py +1 -0
- beratools/third_party/pyqtlet2/leaflet/layer/icon/icon.py +30 -0
- beratools/third_party/pyqtlet2/leaflet/layer/imageoverlay.py +18 -0
- beratools/third_party/pyqtlet2/leaflet/layer/layer.py +105 -0
- beratools/third_party/pyqtlet2/leaflet/layer/layergroup.py +45 -0
- beratools/third_party/pyqtlet2/leaflet/layer/marker/__init__.py +1 -0
- beratools/third_party/pyqtlet2/leaflet/layer/marker/marker.py +91 -0
- beratools/third_party/pyqtlet2/leaflet/layer/tile/__init__.py +2 -0
- beratools/third_party/pyqtlet2/leaflet/layer/tile/gridlayer.py +4 -0
- beratools/third_party/pyqtlet2/leaflet/layer/tile/tilelayer.py +16 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/__init__.py +5 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/circle.py +15 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/circlemarker.py +18 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/path.py +5 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/polygon.py +14 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/polyline.py +18 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/rectangle.py +14 -0
- beratools/third_party/pyqtlet2/leaflet/map/__init__.py +1 -0
- beratools/third_party/pyqtlet2/leaflet/map/map.py +220 -0
- beratools/third_party/pyqtlet2/mapwidget.py +45 -0
- beratools/third_party/pyqtlet2/web/custom.js +43 -0
- beratools/third_party/pyqtlet2/web/map.html +23 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/layers-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/layers.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-icon-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-icon.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-shadow.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/leaflet.css +656 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/leaflet.js +6 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.codeclimate.yml +14 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.editorconfig +4 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.gitattributes +22 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.travis.yml +43 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/LICENSE +20 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/layers-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/layers.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-icon-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-icon.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-shadow.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet.svg +156 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/leaflet.draw.css +10 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/leaflet.draw.js +10 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_rotatedMarker_020/LICENSE +22 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_rotatedMarker_020/leaflet.rotatedMarker.js +57 -0
- beratools/tools/Beratools_r_script.r +1120 -0
- beratools/tools/Ht_metrics.py +116 -0
- beratools/tools/__init__.py +7 -0
- beratools/tools/batch_processing.py +132 -0
- beratools/tools/canopy_threshold_relative.py +670 -0
- beratools/tools/canopycostraster.py +222 -0
- beratools/tools/centerline.py +176 -0
- beratools/tools/common.py +885 -0
- beratools/tools/fl_regen_csf.py +428 -0
- beratools/tools/forest_line_attributes.py +408 -0
- beratools/tools/forest_line_ecosite.py +216 -0
- beratools/tools/lapis_all.py +103 -0
- beratools/tools/least_cost_path_from_chm.py +152 -0
- beratools/tools/line_footprint_absolute.py +363 -0
- beratools/tools/line_footprint_fixed.py +282 -0
- beratools/tools/line_footprint_functions.py +720 -0
- beratools/tools/line_footprint_relative.py +64 -0
- beratools/tools/ln_relative_metrics.py +615 -0
- beratools/tools/r_cal_lpi_elai.r +25 -0
- beratools/tools/r_generate_pd_focalraster.r +101 -0
- beratools/tools/r_interface.py +80 -0
- beratools/tools/r_point_density.r +9 -0
- beratools/tools/rpy_chm2trees.py +86 -0
- beratools/tools/rpy_dsm_chm_by.py +81 -0
- beratools/tools/rpy_dtm_by.py +63 -0
- beratools/tools/rpy_find_cellsize.py +43 -0
- beratools/tools/rpy_gnd_csf.py +74 -0
- beratools/tools/rpy_hummock_hollow.py +85 -0
- beratools/tools/rpy_hummock_hollow_raster.py +71 -0
- beratools/tools/rpy_las_info.py +51 -0
- beratools/tools/rpy_laz2las.py +40 -0
- beratools/tools/rpy_lpi_elai_lascat.py +466 -0
- beratools/tools/rpy_normalized_lidar_by.py +56 -0
- beratools/tools/rpy_percent_above_dbh.py +80 -0
- beratools/tools/rpy_points2trees.py +88 -0
- beratools/tools/rpy_vegcoverage.py +94 -0
- beratools/tools/tiler.py +206 -0
- beratools/tools/tool_template.py +54 -0
- beratools/tools/vertex_optimization.py +620 -0
- beratools/tools/zonal_threshold.py +144 -0
- beratools-0.2.0.dist-info/METADATA +63 -0
- beratools-0.2.0.dist-info/RECORD +142 -0
- beratools-0.2.0.dist-info/WHEEL +4 -0
- beratools-0.2.0.dist-info/entry_points.txt +2 -0
- beratools-0.2.0.dist-info/licenses/LICENSE +22 -0
|
@@ -0,0 +1,460 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
/***************************************************************************
|
|
5
|
+
LeastCostPath Algorithm
|
|
6
|
+
This algorithm is adapted from a QGIS plugin:
|
|
7
|
+
Find the least cost path with given cost raster and points
|
|
8
|
+
Original author: FlowMap Group@SESS.PKU
|
|
9
|
+
Source code repository: https://github.com/Gooong/LeastCostPath
|
|
10
|
+
***************************************************************************/
|
|
11
|
+
|
|
12
|
+
/***************************************************************************
|
|
13
|
+
* *
|
|
14
|
+
* This program is free software; you can redistribute it and/or modify *
|
|
15
|
+
* it under the terms of the GNU General Public License as published by *
|
|
16
|
+
* the Free Software Foundation; either version 2 of the License, or *
|
|
17
|
+
* (at your option) any later version. *
|
|
18
|
+
* *
|
|
19
|
+
***************************************************************************/
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
__author__ = 'Richard Zeng'
|
|
23
|
+
__date__ = '2023-03-01'
|
|
24
|
+
__copyright__ = '(C) 2023 by AppliedGRG'
|
|
25
|
+
|
|
26
|
+
# This will get replaced with a git SHA1 when you do a git archive
|
|
27
|
+
__revision__ = '$Format:%H$'
|
|
28
|
+
|
|
29
|
+
from math import sqrt
|
|
30
|
+
import queue
|
|
31
|
+
from collections import defaultdict
|
|
32
|
+
from skimage.graph import route_through_array
|
|
33
|
+
from beratools.tools.common import *
|
|
34
|
+
|
|
35
|
+
sqrt2 = sqrt(2)
|
|
36
|
+
USE_NUMPY_FOR_DIJKSTRA = True
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class MinCostPathHelper:
|
|
40
|
+
@staticmethod
|
|
41
|
+
def _point_to_row_col(pointxy, ras_transform):
|
|
42
|
+
col, row = ras_transform.rowcol(pointxy.x(), pointxy.y())
|
|
43
|
+
|
|
44
|
+
return row, col
|
|
45
|
+
|
|
46
|
+
@staticmethod
|
|
47
|
+
def _row_col_to_point(row_col, ras_transform):
|
|
48
|
+
x, y = ras_transform.xy(row_col[0], row_col[1])
|
|
49
|
+
return x, y
|
|
50
|
+
|
|
51
|
+
@staticmethod
|
|
52
|
+
def create_points_from_path(ras_transform, min_cost_path, start_point, end_point):
|
|
53
|
+
path_points = list(map(lambda row_col: MinCostPathHelper._row_col_to_point(row_col, ras_transform),
|
|
54
|
+
min_cost_path))
|
|
55
|
+
path_points[0] = (start_point.x, start_point.y)
|
|
56
|
+
path_points[-1] = (end_point.x, end_point.y)
|
|
57
|
+
return path_points
|
|
58
|
+
|
|
59
|
+
@staticmethod
|
|
60
|
+
def create_path_feature_from_points(path_points, attr_vals):
|
|
61
|
+
path_points_raw = [[pt.x, pt.y] for pt in path_points]
|
|
62
|
+
|
|
63
|
+
return LineString(path_points_raw), attr_vals
|
|
64
|
+
|
|
65
|
+
@staticmethod
|
|
66
|
+
def block2matrix_numpy(block, nodata):
|
|
67
|
+
contains_negative = False
|
|
68
|
+
with np.nditer(block, flags=["refs_ok"], op_flags=['readwrite']) as it:
|
|
69
|
+
for x in it:
|
|
70
|
+
# TODO: this speeds up a lot, but need further inspection
|
|
71
|
+
# if np.isclose(x, nodata) or np.isnan(x):
|
|
72
|
+
if x <= nodata or np.isnan(x):
|
|
73
|
+
x[...] = 9999.0
|
|
74
|
+
elif x < 0:
|
|
75
|
+
contains_negative = True
|
|
76
|
+
|
|
77
|
+
return block, contains_negative
|
|
78
|
+
|
|
79
|
+
@staticmethod
|
|
80
|
+
def block2matrix(block, nodata):
|
|
81
|
+
contains_negative = False
|
|
82
|
+
width, height = block.shape
|
|
83
|
+
# TODO: deal with nodata
|
|
84
|
+
matrix = [[None if np.isclose(block[i][j], nodata) or np.isclose(block[i][j], BT_NODATA)
|
|
85
|
+
else block[i][j] for j in range(height)] for i in range(width)]
|
|
86
|
+
|
|
87
|
+
for l in matrix:
|
|
88
|
+
for v in l:
|
|
89
|
+
if v is not None:
|
|
90
|
+
if v < 0 and not np.isclose(v, BT_NODATA):
|
|
91
|
+
contains_negative = True
|
|
92
|
+
|
|
93
|
+
return matrix, contains_negative
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def dijkstra(start_tuple, end_tuples, block, find_nearest, feedback=None):
|
|
97
|
+
class Grid:
|
|
98
|
+
def __init__(self, matrix):
|
|
99
|
+
self.map = matrix
|
|
100
|
+
self.h = len(matrix)
|
|
101
|
+
self.w = len(matrix[0])
|
|
102
|
+
self.manhattan_boundry = None
|
|
103
|
+
self.curr_boundry = None
|
|
104
|
+
|
|
105
|
+
def _in_bounds(self, id):
|
|
106
|
+
x, y = id
|
|
107
|
+
return 0 <= x < self.h and 0 <= y < self.w
|
|
108
|
+
|
|
109
|
+
def _passable(self, id):
|
|
110
|
+
x, y = id
|
|
111
|
+
return self.map[x][y] is not None
|
|
112
|
+
|
|
113
|
+
def is_valid(self, id):
|
|
114
|
+
return self._in_bounds(id) and self._passable(id)
|
|
115
|
+
|
|
116
|
+
def neighbors(self, id):
|
|
117
|
+
x, y = id
|
|
118
|
+
results = [(x + 1, y), (x, y - 1), (x - 1, y), (x, y + 1),
|
|
119
|
+
(x + 1, y - 1), (x + 1, y + 1), (x - 1, y - 1), (x - 1, y + 1)]
|
|
120
|
+
results = list(filter(self.is_valid, results))
|
|
121
|
+
return results
|
|
122
|
+
|
|
123
|
+
@staticmethod
|
|
124
|
+
def manhattan_distance(id1, id2):
|
|
125
|
+
x1, y1 = id1
|
|
126
|
+
x2, y2 = id2
|
|
127
|
+
return abs(x1 - x2) + abs(y1 - y2)
|
|
128
|
+
|
|
129
|
+
@staticmethod
|
|
130
|
+
def min_manhattan(curr_node, end_nodes):
|
|
131
|
+
return min(map(lambda node: Grid.manhattan_distance(curr_node, node), end_nodes))
|
|
132
|
+
|
|
133
|
+
@staticmethod
|
|
134
|
+
def max_manhattan(curr_node, end_nodes):
|
|
135
|
+
return max(map(lambda node: Grid.manhattan_distance(curr_node, node), end_nodes))
|
|
136
|
+
|
|
137
|
+
@staticmethod
|
|
138
|
+
def all_manhattan(curr_node, end_nodes):
|
|
139
|
+
return {end_node: Grid.manhattan_distance(curr_node, end_node) for end_node in end_nodes}
|
|
140
|
+
|
|
141
|
+
def simple_cost(self, cur, nex):
|
|
142
|
+
cx, cy = cur
|
|
143
|
+
nx, ny = nex
|
|
144
|
+
currV = self.map[cx][cy]
|
|
145
|
+
offsetV = self.map[nx][ny]
|
|
146
|
+
if cx == nx or cy == ny:
|
|
147
|
+
return (currV + offsetV) / 2
|
|
148
|
+
else:
|
|
149
|
+
return sqrt2 * (currV + offsetV) / 2
|
|
150
|
+
|
|
151
|
+
result = []
|
|
152
|
+
grid = Grid(block)
|
|
153
|
+
|
|
154
|
+
end_dict = defaultdict(list)
|
|
155
|
+
for end_tuple in end_tuples:
|
|
156
|
+
end_dict[end_tuple[0]].append(end_tuple)
|
|
157
|
+
end_row_cols = set(end_dict.keys())
|
|
158
|
+
end_row_col_list = list(end_row_cols)
|
|
159
|
+
start_row_col = start_tuple[0]
|
|
160
|
+
|
|
161
|
+
frontier = queue.PriorityQueue()
|
|
162
|
+
frontier.put((0, start_row_col))
|
|
163
|
+
came_from = {}
|
|
164
|
+
cost_so_far = {}
|
|
165
|
+
decided = set()
|
|
166
|
+
|
|
167
|
+
if not grid.is_valid(start_row_col):
|
|
168
|
+
return result
|
|
169
|
+
|
|
170
|
+
# init progress
|
|
171
|
+
index = 0
|
|
172
|
+
distance_dic = grid.all_manhattan(start_row_col, end_row_cols)
|
|
173
|
+
if find_nearest:
|
|
174
|
+
total_manhattan = min(distance_dic.values())
|
|
175
|
+
else:
|
|
176
|
+
total_manhattan = sum(distance_dic.values())
|
|
177
|
+
|
|
178
|
+
total_manhattan = total_manhattan + 1
|
|
179
|
+
bound = total_manhattan
|
|
180
|
+
if feedback:
|
|
181
|
+
feedback.setProgress(1 + 100 * (1 - bound / total_manhattan))
|
|
182
|
+
|
|
183
|
+
came_from[start_row_col] = None
|
|
184
|
+
cost_so_far[start_row_col] = 0
|
|
185
|
+
|
|
186
|
+
while not frontier.empty():
|
|
187
|
+
_, current_node = frontier.get()
|
|
188
|
+
if current_node in decided:
|
|
189
|
+
continue
|
|
190
|
+
decided.add(current_node)
|
|
191
|
+
|
|
192
|
+
# update the progress bar
|
|
193
|
+
if feedback:
|
|
194
|
+
if feedback.isCanceled():
|
|
195
|
+
return None
|
|
196
|
+
|
|
197
|
+
index = (index + 1) % len(end_row_col_list)
|
|
198
|
+
target_node = end_row_col_list[index]
|
|
199
|
+
new_manhattan = grid.manhattan_distance(current_node, target_node)
|
|
200
|
+
if new_manhattan < distance_dic[target_node]:
|
|
201
|
+
if find_nearest:
|
|
202
|
+
curr_bound = new_manhattan
|
|
203
|
+
else:
|
|
204
|
+
curr_bound = bound - (distance_dic[target_node] - new_manhattan)
|
|
205
|
+
|
|
206
|
+
distance_dic[target_node] = new_manhattan
|
|
207
|
+
|
|
208
|
+
if curr_bound < bound:
|
|
209
|
+
bound = curr_bound
|
|
210
|
+
if feedback:
|
|
211
|
+
feedback.setProgress(1 + 100 * (1 - bound / total_manhattan) * (1 - bound / total_manhattan))
|
|
212
|
+
|
|
213
|
+
# reacn destination
|
|
214
|
+
if current_node in end_row_cols:
|
|
215
|
+
path = []
|
|
216
|
+
costs = []
|
|
217
|
+
traverse_node = current_node
|
|
218
|
+
while traverse_node is not None:
|
|
219
|
+
path.append(traverse_node)
|
|
220
|
+
costs.append(cost_so_far[traverse_node])
|
|
221
|
+
traverse_node = came_from[traverse_node]
|
|
222
|
+
|
|
223
|
+
# start point and end point overlaps
|
|
224
|
+
if len(path) == 1:
|
|
225
|
+
path.append(start_row_col)
|
|
226
|
+
costs.append(0.0)
|
|
227
|
+
path.reverse()
|
|
228
|
+
costs.reverse()
|
|
229
|
+
result.append((path, costs, end_dict[current_node]))
|
|
230
|
+
|
|
231
|
+
end_row_cols.remove(current_node)
|
|
232
|
+
end_row_col_list.remove(current_node)
|
|
233
|
+
if len(end_row_cols) == 0 or find_nearest:
|
|
234
|
+
break
|
|
235
|
+
|
|
236
|
+
# relax distance
|
|
237
|
+
for nex in grid.neighbors(current_node):
|
|
238
|
+
new_cost = cost_so_far[current_node] + grid.simple_cost(current_node, nex)
|
|
239
|
+
if nex not in cost_so_far or new_cost < cost_so_far[nex]:
|
|
240
|
+
cost_so_far[nex] = new_cost
|
|
241
|
+
frontier.put((new_cost, nex))
|
|
242
|
+
came_from[nex] = current_node
|
|
243
|
+
|
|
244
|
+
return result
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
def valid_node(node, size_of_grid):
|
|
248
|
+
"""Checks if node is within the grid boundaries."""
|
|
249
|
+
if node[0] < 0 or node[0] >= size_of_grid:
|
|
250
|
+
return False
|
|
251
|
+
if node[1] < 0 or node[1] >= size_of_grid:
|
|
252
|
+
return False
|
|
253
|
+
return True
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
def up(node):
|
|
257
|
+
return node[0] - 1, node[1]
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def down(node):
|
|
261
|
+
return node[0] + 1, node[1]
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
def left(node):
|
|
265
|
+
return node[0], node[1] - 1
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
def right(node):
|
|
269
|
+
return node[0], node[1] + 1
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
def backtrack(initial_node, desired_node, distances):
|
|
273
|
+
# idea start at the last node then choose the least number of steps to go back
|
|
274
|
+
# last node
|
|
275
|
+
path = [desired_node]
|
|
276
|
+
|
|
277
|
+
size_of_grid = distances.shape[0]
|
|
278
|
+
|
|
279
|
+
while True:
|
|
280
|
+
# check up down left right - choose the direction that has the least distance
|
|
281
|
+
potential_distances = []
|
|
282
|
+
potential_nodes = []
|
|
283
|
+
|
|
284
|
+
directions = [up, down, left, right]
|
|
285
|
+
|
|
286
|
+
for direction in directions:
|
|
287
|
+
node = direction(path[-1])
|
|
288
|
+
if valid_node(node, size_of_grid):
|
|
289
|
+
potential_nodes.append(node)
|
|
290
|
+
potential_distances.append(distances[node[0], node[1]])
|
|
291
|
+
|
|
292
|
+
# least_distance_index = np.argmin(potential_distances)
|
|
293
|
+
print(potential_nodes)
|
|
294
|
+
|
|
295
|
+
least_distance_index = np.argsort(potential_distances)
|
|
296
|
+
|
|
297
|
+
pt_added = False
|
|
298
|
+
for index in least_distance_index:
|
|
299
|
+
p_point = potential_nodes[index]
|
|
300
|
+
if p_point == (1, 6):
|
|
301
|
+
pass
|
|
302
|
+
if p_point not in path:
|
|
303
|
+
path.append(p_point)
|
|
304
|
+
pt_added = True
|
|
305
|
+
break
|
|
306
|
+
|
|
307
|
+
if index >= len(potential_distances) - 1 and not pt_added:
|
|
308
|
+
print("No best path found.")
|
|
309
|
+
return
|
|
310
|
+
|
|
311
|
+
if path[-1][0] == initial_node[0] and path[-1][1] == initial_node[1]:
|
|
312
|
+
break
|
|
313
|
+
|
|
314
|
+
return list(reversed(path))
|
|
315
|
+
|
|
316
|
+
|
|
317
|
+
def dijkstra_np(start_tuple, end_tuple, matrix):
|
|
318
|
+
"""Dijkstras algorithm for finding the shortest path between two nodes in a graph.
|
|
319
|
+
|
|
320
|
+
Args:
|
|
321
|
+
start_node (list): [row,col] coordinates of the initial node
|
|
322
|
+
end_node (list): [row,col] coordinates of the desired node
|
|
323
|
+
matrix (array 2d): 2d numpy array that contains any matrix as 1s and free space as 0s
|
|
324
|
+
|
|
325
|
+
Returns:
|
|
326
|
+
list[list]: list of list of nodes that form the shortest path
|
|
327
|
+
"""
|
|
328
|
+
|
|
329
|
+
# source and destination are free
|
|
330
|
+
start_node = start_tuple[0]
|
|
331
|
+
end_node = end_tuple[0]
|
|
332
|
+
path = None
|
|
333
|
+
costs = None
|
|
334
|
+
|
|
335
|
+
try:
|
|
336
|
+
matrix[start_node[0], start_node[1]] = 0
|
|
337
|
+
matrix[end_node[0], end_node[1]] = 0
|
|
338
|
+
|
|
339
|
+
path, cost = route_through_array(matrix, start_node, end_node)
|
|
340
|
+
costs = [0.0 for i in range(len(path))]
|
|
341
|
+
except Exception as e:
|
|
342
|
+
print(e)
|
|
343
|
+
return None
|
|
344
|
+
|
|
345
|
+
return [(path, costs, end_tuple)]
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
def find_least_cost_path(out_image, in_meta, line, find_nearest=True, output_linear_reference=False):
|
|
349
|
+
default_return = None
|
|
350
|
+
ras_nodata = in_meta['nodata']
|
|
351
|
+
|
|
352
|
+
pt_start = line.coords[0]
|
|
353
|
+
pt_end = line.coords[-1]
|
|
354
|
+
|
|
355
|
+
out_image = np.where(out_image < 0, np.nan, out_image) # set negative value to nan
|
|
356
|
+
if len(out_image.shape) > 2:
|
|
357
|
+
out_image = np.squeeze(out_image, axis=0)
|
|
358
|
+
|
|
359
|
+
if USE_NUMPY_FOR_DIJKSTRA:
|
|
360
|
+
matrix, contains_negative = MinCostPathHelper.block2matrix_numpy(out_image, ras_nodata)
|
|
361
|
+
else:
|
|
362
|
+
matrix, contains_negative = MinCostPathHelper.block2matrix(out_image, ras_nodata)
|
|
363
|
+
|
|
364
|
+
if contains_negative:
|
|
365
|
+
print('ERROR: Raster has negative values.')
|
|
366
|
+
return default_return
|
|
367
|
+
|
|
368
|
+
transformer = rasterio.transform.AffineTransformer(in_meta['transform'])
|
|
369
|
+
|
|
370
|
+
if (type(pt_start[0]) is tuple or
|
|
371
|
+
type(pt_start[1]) is tuple or
|
|
372
|
+
type(pt_end[0]) is tuple or
|
|
373
|
+
type(pt_end[1]) is tuple):
|
|
374
|
+
print("Point initialization error. Input is tuple.")
|
|
375
|
+
return default_return
|
|
376
|
+
|
|
377
|
+
start_tuples = []
|
|
378
|
+
end_tuples = []
|
|
379
|
+
start_tuple = []
|
|
380
|
+
try:
|
|
381
|
+
start_tuples = [(transformer.rowcol(pt_start[0], pt_start[1]), Point(pt_start[0], pt_start[1]), 0)]
|
|
382
|
+
end_tuples = [(transformer.rowcol(pt_end[0], pt_end[1]), Point(pt_end[0], pt_end[1]), 1)]
|
|
383
|
+
start_tuple = start_tuples[0]
|
|
384
|
+
end_tuple = end_tuples[0]
|
|
385
|
+
|
|
386
|
+
# regulate end poit coords in case they are out of index of matrix
|
|
387
|
+
mat_size = matrix.shape
|
|
388
|
+
mat_size = (mat_size[0] - 1, mat_size[0] - 1)
|
|
389
|
+
start_tuple = (min(start_tuple[0], mat_size), start_tuple[1], start_tuple[2])
|
|
390
|
+
end_tuple = (min(end_tuple[0], mat_size), end_tuple[1], end_tuple[2])
|
|
391
|
+
|
|
392
|
+
except Exception as e:
|
|
393
|
+
print(e)
|
|
394
|
+
|
|
395
|
+
if USE_NUMPY_FOR_DIJKSTRA:
|
|
396
|
+
result = dijkstra_np(start_tuple, end_tuple, matrix)
|
|
397
|
+
else:
|
|
398
|
+
# TODO: change end_tuples to end_tuple
|
|
399
|
+
result = dijkstra(start_tuple, end_tuples, matrix, find_nearest)
|
|
400
|
+
|
|
401
|
+
if result is None:
|
|
402
|
+
# raise Exception
|
|
403
|
+
return default_return
|
|
404
|
+
|
|
405
|
+
if len(result) == 0:
|
|
406
|
+
# raise Exception
|
|
407
|
+
print('No result returned.')
|
|
408
|
+
return default_return
|
|
409
|
+
|
|
410
|
+
path_points = None
|
|
411
|
+
for path, costs, end_tuple in result:
|
|
412
|
+
path_points = MinCostPathHelper.create_points_from_path(transformer, path,
|
|
413
|
+
start_tuple[1], end_tuple[1])
|
|
414
|
+
if output_linear_reference:
|
|
415
|
+
# TODO: code not reached
|
|
416
|
+
# add linear reference
|
|
417
|
+
for point, cost in zip(path_points, costs):
|
|
418
|
+
point.addMValue(cost)
|
|
419
|
+
|
|
420
|
+
total_cost = costs[-1]
|
|
421
|
+
|
|
422
|
+
feat_attr = (start_tuple[2], end_tuple[2], total_cost)
|
|
423
|
+
lc_path = None
|
|
424
|
+
if len(path_points) >= 2:
|
|
425
|
+
lc_path = LineString(path_points)
|
|
426
|
+
|
|
427
|
+
return lc_path
|
|
428
|
+
|
|
429
|
+
|
|
430
|
+
def find_least_cost_path_skimage(cost_clip, in_meta, seed_line):
|
|
431
|
+
lc_path_new = []
|
|
432
|
+
if len(cost_clip.shape) > 2:
|
|
433
|
+
cost_clip = np.squeeze(cost_clip, axis=0)
|
|
434
|
+
|
|
435
|
+
out_transform = in_meta['transform']
|
|
436
|
+
transformer = rasterio.transform.AffineTransformer(out_transform)
|
|
437
|
+
|
|
438
|
+
x1, y1 = list(seed_line.coords)[0][:2]
|
|
439
|
+
x2, y2 = list(seed_line.coords)[-1][:2]
|
|
440
|
+
row1, col1 = transformer.rowcol(x1, y1)
|
|
441
|
+
row2, col2 = transformer.rowcol(x2, y2)
|
|
442
|
+
|
|
443
|
+
try:
|
|
444
|
+
path_new = route_through_array(cost_clip[0], [row1, col1], [row2, col2])
|
|
445
|
+
except Exception as e:
|
|
446
|
+
print(e)
|
|
447
|
+
return None
|
|
448
|
+
|
|
449
|
+
if path_new[0]:
|
|
450
|
+
for row, col in path_new[0]:
|
|
451
|
+
x, y = transformer.xy(row, col)
|
|
452
|
+
lc_path_new.append((x, y))
|
|
453
|
+
|
|
454
|
+
if len(lc_path_new) < 2:
|
|
455
|
+
print('No least cost path detected, pass.')
|
|
456
|
+
return None
|
|
457
|
+
else:
|
|
458
|
+
lc_path_new = LineString(lc_path_new)
|
|
459
|
+
|
|
460
|
+
return lc_path_new
|
beratools/core/logger.py
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import logging.handlers
|
|
3
|
+
import sys
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from beratools.gui.bt_data import BTData
|
|
6
|
+
|
|
7
|
+
bt = BTData()
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class NoParsingFilter(logging.Filter):
|
|
11
|
+
def filter(self, record):
|
|
12
|
+
return not record.getMessage().startswith("parsing")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class Logger(object):
|
|
16
|
+
def __init__(self, name, console_level=logging.INFO, file_level=logging.INFO):
|
|
17
|
+
self.logger = logging.getLogger(name)
|
|
18
|
+
self.name = name
|
|
19
|
+
self.console_level = console_level
|
|
20
|
+
self.file_level = file_level
|
|
21
|
+
|
|
22
|
+
self.setup_logger()
|
|
23
|
+
|
|
24
|
+
def get_logger(self):
|
|
25
|
+
return self.logger
|
|
26
|
+
|
|
27
|
+
def print(self, msg, flush=True):
|
|
28
|
+
"""
|
|
29
|
+
This is for including print in logging
|
|
30
|
+
Parameters
|
|
31
|
+
----------
|
|
32
|
+
msg :
|
|
33
|
+
flush :
|
|
34
|
+
|
|
35
|
+
Returns
|
|
36
|
+
-------
|
|
37
|
+
|
|
38
|
+
"""
|
|
39
|
+
self.logger.info(msg)
|
|
40
|
+
if flush:
|
|
41
|
+
for handler in self.logger.handlers:
|
|
42
|
+
handler.flush()
|
|
43
|
+
|
|
44
|
+
def setup_logger(self):
|
|
45
|
+
"""
|
|
46
|
+
# log = setup_logger('', r'PATH_TO_LOG_FILE')
|
|
47
|
+
# log.debug("Debug message, should only appear in the file.")
|
|
48
|
+
|
|
49
|
+
# for i in range(0, 10000):
|
|
50
|
+
# print("From print(): Info message, should appear in file and stdout.")
|
|
51
|
+
# log.info("Info message, should appear in file and stdout.")
|
|
52
|
+
# log.warning("Warning message, should appear in file and stdout.")
|
|
53
|
+
# log.error("Error message, should appear in file and stdout.")
|
|
54
|
+
# log.error("parsing, should appear in file and stdout.")
|
|
55
|
+
Parameters
|
|
56
|
+
----------
|
|
57
|
+
name :
|
|
58
|
+
console_level :
|
|
59
|
+
file_level :
|
|
60
|
+
|
|
61
|
+
Returns
|
|
62
|
+
-------
|
|
63
|
+
|
|
64
|
+
"""
|
|
65
|
+
# Change root logger level from WARNING (default) to NOTSET in order for all messages to be delegated.
|
|
66
|
+
logging.getLogger().setLevel(logging.NOTSET)
|
|
67
|
+
log_file = bt.get_logger_file_name(self.name)
|
|
68
|
+
|
|
69
|
+
# Add stdout handler, with level INFO
|
|
70
|
+
console_handler = logging.StreamHandler(sys.stdout)
|
|
71
|
+
console_handler.setLevel(self.console_level)
|
|
72
|
+
formatter = logging.Formatter("%(message)s")
|
|
73
|
+
console_handler.setFormatter(formatter)
|
|
74
|
+
logging.getLogger().addHandler(console_handler)
|
|
75
|
+
|
|
76
|
+
# Add file rotating handler, 5MB size limit, 5 backups
|
|
77
|
+
rotating_handler = logging.handlers.RotatingFileHandler(
|
|
78
|
+
filename=log_file, maxBytes=5*1000*1000, backupCount=5
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
rotating_handler.setLevel(self.file_level)
|
|
82
|
+
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
|
83
|
+
rotating_handler.setFormatter(formatter)
|
|
84
|
+
logging.getLogger().addHandler(rotating_handler)
|
|
85
|
+
logging.getLogger().addFilter(NoParsingFilter())
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
from multiprocessing.pool import Pool
|
|
2
|
+
import multiprocessing
|
|
3
|
+
import concurrent.futures
|
|
4
|
+
import warnings
|
|
5
|
+
|
|
6
|
+
import pandas as pd
|
|
7
|
+
import geopandas as gpd
|
|
8
|
+
|
|
9
|
+
from beratools.core.constants import *
|
|
10
|
+
|
|
11
|
+
from dask.distributed import Client, as_completed
|
|
12
|
+
from dask import config as cfg
|
|
13
|
+
import dask.distributed
|
|
14
|
+
# import ray
|
|
15
|
+
|
|
16
|
+
# settings for dask
|
|
17
|
+
cfg.set({'distributed.scheduler.worker-ttl': None})
|
|
18
|
+
warnings.simplefilter("ignore", dask.distributed.comm.core.CommClosedError)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class OperationCancelledException(Exception):
|
|
22
|
+
pass
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class ToolBase(object):
|
|
26
|
+
def __init__(self):
|
|
27
|
+
pass
|
|
28
|
+
|
|
29
|
+
def execute_multiprocessing(self):
|
|
30
|
+
pass
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def result_is_valid(result):
|
|
34
|
+
if type(result) is list or type(result) is tuple:
|
|
35
|
+
if len(result) > 0:
|
|
36
|
+
return True
|
|
37
|
+
elif type(result) is pd.DataFrame or type(result) is gpd.GeoDataFrame:
|
|
38
|
+
if not result.empty:
|
|
39
|
+
return True
|
|
40
|
+
elif result:
|
|
41
|
+
return True
|
|
42
|
+
|
|
43
|
+
return False
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def print_msg(app_name, step, total_steps):
|
|
47
|
+
print(f' "PROGRESS_LABEL {app_name} {step} of {total_steps}" ', flush=True)
|
|
48
|
+
print(f' %{step / total_steps * 100} ', flush=True)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def execute_multiprocessing(in_func, in_data, app_name, processes, workers,
|
|
52
|
+
mode=PARALLEL_MODE, verbose=False):
|
|
53
|
+
out_result = []
|
|
54
|
+
step = 0
|
|
55
|
+
print("Using {} CPU cores".format(processes))
|
|
56
|
+
total_steps = len(in_data)
|
|
57
|
+
|
|
58
|
+
try:
|
|
59
|
+
if mode == ParallelMode.MULTIPROCESSING:
|
|
60
|
+
multiprocessing.set_start_method('spawn')
|
|
61
|
+
print("Multiprocessing started...")
|
|
62
|
+
|
|
63
|
+
with Pool(processes) as pool:
|
|
64
|
+
print(multiprocessing.active_children())
|
|
65
|
+
for result in pool.imap_unordered(in_func, in_data):
|
|
66
|
+
if result_is_valid(result):
|
|
67
|
+
out_result.append(result)
|
|
68
|
+
|
|
69
|
+
step += 1
|
|
70
|
+
print_msg(app_name, step, total_steps)
|
|
71
|
+
|
|
72
|
+
pool.close()
|
|
73
|
+
pool.join()
|
|
74
|
+
elif mode == ParallelMode.SEQUENTIAL:
|
|
75
|
+
for line in in_data:
|
|
76
|
+
result_item = in_func(line)
|
|
77
|
+
if result_is_valid(result_item):
|
|
78
|
+
out_result.append(result_item)
|
|
79
|
+
|
|
80
|
+
step += 1
|
|
81
|
+
print_msg(app_name, step, total_steps)
|
|
82
|
+
elif mode == ParallelMode.CONCURRENT:
|
|
83
|
+
with concurrent.futures.ProcessPoolExecutor(max_workers=processes) as executor:
|
|
84
|
+
futures = [executor.submit(in_func, line) for line in in_data]
|
|
85
|
+
for future in concurrent.futures.as_completed(futures):
|
|
86
|
+
result_item = future.result()
|
|
87
|
+
if result_is_valid(result_item):
|
|
88
|
+
out_result.append(result_item)
|
|
89
|
+
|
|
90
|
+
step += 1
|
|
91
|
+
print_msg(app_name, step, total_steps)
|
|
92
|
+
elif mode == ParallelMode.DASK:
|
|
93
|
+
dask_client = Client(threads_per_worker=1, n_workers=processes)
|
|
94
|
+
print(dask_client)
|
|
95
|
+
try:
|
|
96
|
+
print('start processing')
|
|
97
|
+
result = dask_client.map(in_func, in_data)
|
|
98
|
+
seq = as_completed(result)
|
|
99
|
+
|
|
100
|
+
for i in seq:
|
|
101
|
+
if result_is_valid(result):
|
|
102
|
+
out_result.append(i.result())
|
|
103
|
+
|
|
104
|
+
step += 1
|
|
105
|
+
print_msg(app_name, step, total_steps)
|
|
106
|
+
except Exception as e:
|
|
107
|
+
dask_client.close()
|
|
108
|
+
|
|
109
|
+
dask_client.close()
|
|
110
|
+
|
|
111
|
+
# ! important !
|
|
112
|
+
# comment temporarily, man enable later if need to use ray
|
|
113
|
+
# elif mode == ParallelMode.RAY:
|
|
114
|
+
# ray.init(log_to_driver=False)
|
|
115
|
+
# process_single_line_ray = ray.remote(in_func)
|
|
116
|
+
# result_ids = [process_single_line_ray.remote(item) for item in in_data]
|
|
117
|
+
#
|
|
118
|
+
# while len(result_ids):
|
|
119
|
+
# done_id, result_ids = ray.wait(result_ids)
|
|
120
|
+
# result_item = ray.get(done_id[0])
|
|
121
|
+
#
|
|
122
|
+
# if result_is_valid(result_item):
|
|
123
|
+
# out_result.append(result_item)
|
|
124
|
+
#
|
|
125
|
+
# step += 1
|
|
126
|
+
# print_msg(app_name, step, total_steps)
|
|
127
|
+
|
|
128
|
+
# ray.shutdown()
|
|
129
|
+
except OperationCancelledException:
|
|
130
|
+
print("Operation cancelled")
|
|
131
|
+
return None
|
|
132
|
+
|
|
133
|
+
return out_result
|