BERATools 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (142) hide show
  1. beratools/__init__.py +9 -0
  2. beratools/core/__init__.py +0 -0
  3. beratools/core/algo_centerline.py +351 -0
  4. beratools/core/constants.py +86 -0
  5. beratools/core/dijkstra_algorithm.py +460 -0
  6. beratools/core/logger.py +85 -0
  7. beratools/core/tool_base.py +133 -0
  8. beratools/gui/__init__.py +15 -0
  9. beratools/gui/batch_processing_dlg.py +463 -0
  10. beratools/gui/beratools.json +2300 -0
  11. beratools/gui/bt_data.py +487 -0
  12. beratools/gui/bt_gui_main.py +691 -0
  13. beratools/gui/cli.py +18 -0
  14. beratools/gui/gui.json +8 -0
  15. beratools/gui/img/BERALogo.png +0 -0
  16. beratools/gui/img/closed.gif +0 -0
  17. beratools/gui/img/closed.png +0 -0
  18. beratools/gui/img/open.gif +0 -0
  19. beratools/gui/img/open.png +0 -0
  20. beratools/gui/img/tool.gif +0 -0
  21. beratools/gui/img/tool.png +0 -0
  22. beratools/gui/map_window.py +146 -0
  23. beratools/gui/tool_widgets.py +493 -0
  24. beratools/gui_tk/ASCII Banners.txt +248 -0
  25. beratools/gui_tk/__init__.py +20 -0
  26. beratools/gui_tk/beratools_main.py +515 -0
  27. beratools/gui_tk/bt_widgets.py +442 -0
  28. beratools/gui_tk/cli.py +18 -0
  29. beratools/gui_tk/gui.json +8 -0
  30. beratools/gui_tk/img/BERALogo.png +0 -0
  31. beratools/gui_tk/img/closed.gif +0 -0
  32. beratools/gui_tk/img/closed.png +0 -0
  33. beratools/gui_tk/img/open.gif +0 -0
  34. beratools/gui_tk/img/open.png +0 -0
  35. beratools/gui_tk/img/tool.gif +0 -0
  36. beratools/gui_tk/img/tool.png +0 -0
  37. beratools/gui_tk/main.py +14 -0
  38. beratools/gui_tk/map_window.py +144 -0
  39. beratools/gui_tk/runner.py +1481 -0
  40. beratools/gui_tk/tooltip.py +55 -0
  41. beratools/third_party/pyqtlet2/__init__.py +9 -0
  42. beratools/third_party/pyqtlet2/leaflet/__init__.py +26 -0
  43. beratools/third_party/pyqtlet2/leaflet/control/__init__.py +6 -0
  44. beratools/third_party/pyqtlet2/leaflet/control/control.py +59 -0
  45. beratools/third_party/pyqtlet2/leaflet/control/draw.py +52 -0
  46. beratools/third_party/pyqtlet2/leaflet/control/layers.py +20 -0
  47. beratools/third_party/pyqtlet2/leaflet/core/Parser.py +24 -0
  48. beratools/third_party/pyqtlet2/leaflet/core/__init__.py +2 -0
  49. beratools/third_party/pyqtlet2/leaflet/core/evented.py +180 -0
  50. beratools/third_party/pyqtlet2/leaflet/layer/__init__.py +5 -0
  51. beratools/third_party/pyqtlet2/leaflet/layer/featuregroup.py +34 -0
  52. beratools/third_party/pyqtlet2/leaflet/layer/icon/__init__.py +1 -0
  53. beratools/third_party/pyqtlet2/leaflet/layer/icon/icon.py +30 -0
  54. beratools/third_party/pyqtlet2/leaflet/layer/imageoverlay.py +18 -0
  55. beratools/third_party/pyqtlet2/leaflet/layer/layer.py +105 -0
  56. beratools/third_party/pyqtlet2/leaflet/layer/layergroup.py +45 -0
  57. beratools/third_party/pyqtlet2/leaflet/layer/marker/__init__.py +1 -0
  58. beratools/third_party/pyqtlet2/leaflet/layer/marker/marker.py +91 -0
  59. beratools/third_party/pyqtlet2/leaflet/layer/tile/__init__.py +2 -0
  60. beratools/third_party/pyqtlet2/leaflet/layer/tile/gridlayer.py +4 -0
  61. beratools/third_party/pyqtlet2/leaflet/layer/tile/tilelayer.py +16 -0
  62. beratools/third_party/pyqtlet2/leaflet/layer/vector/__init__.py +5 -0
  63. beratools/third_party/pyqtlet2/leaflet/layer/vector/circle.py +15 -0
  64. beratools/third_party/pyqtlet2/leaflet/layer/vector/circlemarker.py +18 -0
  65. beratools/third_party/pyqtlet2/leaflet/layer/vector/path.py +5 -0
  66. beratools/third_party/pyqtlet2/leaflet/layer/vector/polygon.py +14 -0
  67. beratools/third_party/pyqtlet2/leaflet/layer/vector/polyline.py +18 -0
  68. beratools/third_party/pyqtlet2/leaflet/layer/vector/rectangle.py +14 -0
  69. beratools/third_party/pyqtlet2/leaflet/map/__init__.py +1 -0
  70. beratools/third_party/pyqtlet2/leaflet/map/map.py +220 -0
  71. beratools/third_party/pyqtlet2/mapwidget.py +45 -0
  72. beratools/third_party/pyqtlet2/web/custom.js +43 -0
  73. beratools/third_party/pyqtlet2/web/map.html +23 -0
  74. beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/layers-2x.png +0 -0
  75. beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/layers.png +0 -0
  76. beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-icon-2x.png +0 -0
  77. beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-icon.png +0 -0
  78. beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-shadow.png +0 -0
  79. beratools/third_party/pyqtlet2/web/modules/leaflet_193/leaflet.css +656 -0
  80. beratools/third_party/pyqtlet2/web/modules/leaflet_193/leaflet.js +6 -0
  81. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.codeclimate.yml +14 -0
  82. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.editorconfig +4 -0
  83. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.gitattributes +22 -0
  84. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.travis.yml +43 -0
  85. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/LICENSE +20 -0
  86. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/layers-2x.png +0 -0
  87. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/layers.png +0 -0
  88. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-icon-2x.png +0 -0
  89. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-icon.png +0 -0
  90. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-shadow.png +0 -0
  91. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet-2x.png +0 -0
  92. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet.png +0 -0
  93. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet.svg +156 -0
  94. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/leaflet.draw.css +10 -0
  95. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/leaflet.draw.js +10 -0
  96. beratools/third_party/pyqtlet2/web/modules/leaflet_rotatedMarker_020/LICENSE +22 -0
  97. beratools/third_party/pyqtlet2/web/modules/leaflet_rotatedMarker_020/leaflet.rotatedMarker.js +57 -0
  98. beratools/tools/Beratools_r_script.r +1120 -0
  99. beratools/tools/Ht_metrics.py +116 -0
  100. beratools/tools/__init__.py +7 -0
  101. beratools/tools/batch_processing.py +132 -0
  102. beratools/tools/canopy_threshold_relative.py +670 -0
  103. beratools/tools/canopycostraster.py +222 -0
  104. beratools/tools/centerline.py +176 -0
  105. beratools/tools/common.py +885 -0
  106. beratools/tools/fl_regen_csf.py +428 -0
  107. beratools/tools/forest_line_attributes.py +408 -0
  108. beratools/tools/forest_line_ecosite.py +216 -0
  109. beratools/tools/lapis_all.py +103 -0
  110. beratools/tools/least_cost_path_from_chm.py +152 -0
  111. beratools/tools/line_footprint_absolute.py +363 -0
  112. beratools/tools/line_footprint_fixed.py +282 -0
  113. beratools/tools/line_footprint_functions.py +720 -0
  114. beratools/tools/line_footprint_relative.py +64 -0
  115. beratools/tools/ln_relative_metrics.py +615 -0
  116. beratools/tools/r_cal_lpi_elai.r +25 -0
  117. beratools/tools/r_generate_pd_focalraster.r +101 -0
  118. beratools/tools/r_interface.py +80 -0
  119. beratools/tools/r_point_density.r +9 -0
  120. beratools/tools/rpy_chm2trees.py +86 -0
  121. beratools/tools/rpy_dsm_chm_by.py +81 -0
  122. beratools/tools/rpy_dtm_by.py +63 -0
  123. beratools/tools/rpy_find_cellsize.py +43 -0
  124. beratools/tools/rpy_gnd_csf.py +74 -0
  125. beratools/tools/rpy_hummock_hollow.py +85 -0
  126. beratools/tools/rpy_hummock_hollow_raster.py +71 -0
  127. beratools/tools/rpy_las_info.py +51 -0
  128. beratools/tools/rpy_laz2las.py +40 -0
  129. beratools/tools/rpy_lpi_elai_lascat.py +466 -0
  130. beratools/tools/rpy_normalized_lidar_by.py +56 -0
  131. beratools/tools/rpy_percent_above_dbh.py +80 -0
  132. beratools/tools/rpy_points2trees.py +88 -0
  133. beratools/tools/rpy_vegcoverage.py +94 -0
  134. beratools/tools/tiler.py +206 -0
  135. beratools/tools/tool_template.py +54 -0
  136. beratools/tools/vertex_optimization.py +620 -0
  137. beratools/tools/zonal_threshold.py +144 -0
  138. beratools-0.2.0.dist-info/METADATA +63 -0
  139. beratools-0.2.0.dist-info/RECORD +142 -0
  140. beratools-0.2.0.dist-info/WHEEL +4 -0
  141. beratools-0.2.0.dist-info/entry_points.txt +2 -0
  142. beratools-0.2.0.dist-info/licenses/LICENSE +22 -0
@@ -0,0 +1,670 @@
1
+ import os.path
2
+ from multiprocessing.pool import Pool
3
+ import geopandas as gpd
4
+ import json
5
+ import argparse
6
+ import time
7
+ import pandas as pd
8
+ import numpy as np
9
+ import shapely
10
+ from common import *
11
+ import sys
12
+ import math
13
+
14
+
15
+ class OperationCancelledException(Exception):
16
+ pass
17
+
18
+
19
+ def main_canopy_threshold_relative(callback, in_line, in_chm, off_ln_dist, canopy_percentile,
20
+ canopy_thresh_percentage, tree_radius, max_line_dist, canopy_avoidance,
21
+ exponent, full_step, processes, verbose):
22
+ file_path, in_file_name = os.path.split(in_line)
23
+ out_file = os.path.join(file_path, 'DynCanTh_' + in_file_name)
24
+ line_seg = gpd.GeoDataFrame.from_file(in_line)
25
+
26
+ # check coordinate systems between line and raster features
27
+ # with rasterio.open(in_chm) as in_raster:
28
+ if compare_crs(vector_crs(in_line), raster_crs(in_chm)):
29
+ pass
30
+ else:
31
+ print("Line and raster spatial references are not same, please check.")
32
+ exit()
33
+
34
+ # Check the canopy threshold percent in 0-100 range. If it is not, 50% will be applied
35
+ if not 100 >= int(canopy_percentile) > 0:
36
+ canopy_percentile = 50
37
+
38
+ # Check the Dynamic Canopy threshold column in data. If it is not, new column will be created
39
+ if 'DynCanTh' not in line_seg.columns.array:
40
+ if BT_DEBUGGING:
41
+ print("{} column not found in input line".format('DynCanTh'))
42
+ print("New column created: {}".format('DynCanTh'))
43
+ line_seg['DynCanTh'] = np.nan
44
+
45
+ # Check the OLnFID column in data. If it is not, column will be created
46
+ if 'OLnFID' not in line_seg.columns.array:
47
+ if BT_DEBUGGING:
48
+ print("{} column not found in input line".format('OLnFID'))
49
+
50
+ print("New column created: {}".format('OLnFID'))
51
+ line_seg['OLnFID'] = line_seg.index
52
+
53
+ # Check the OLnSEG column in data. If it is not, column will be created
54
+ if 'OLnSEG' not in line_seg.columns.array:
55
+ if BT_DEBUGGING:
56
+ print("{} column not found in input line".format('OLnSEG'))
57
+
58
+ print("New column created: {}".format('OLnSEG'))
59
+ line_seg['OLnSEG'] = 0
60
+
61
+ line_seg = chk_df_multipart(line_seg, 'LineString')[0]
62
+
63
+ proc_segments = False
64
+ if proc_segments:
65
+ line_seg = split_into_segments(line_seg)
66
+ else:
67
+ pass
68
+
69
+ # copy original line input to another GeoDataframe
70
+ workln_dfC = gpd.GeoDataFrame.copy((line_seg))
71
+ workln_dfC.geometry = workln_dfC.geometry.simplify(tolerance=0.5, preserve_topology=True)
72
+
73
+ print('%{}'.format(5))
74
+
75
+ worklnbuffer_dfLRing = gpd.GeoDataFrame.copy((workln_dfC))
76
+ worklnbuffer_dfRRing = gpd.GeoDataFrame.copy((workln_dfC))
77
+
78
+ print('Create ring buffer for input line to find the forest edge....')
79
+
80
+ def multiringbuffer(df, nrings, ringdist):
81
+ """
82
+ Buffers an input dataframes geometry nring (number of rings) times, with a distance between
83
+ rings of ringdist and returns a list of non overlapping buffers
84
+ """
85
+
86
+ rings = [] # A list to hold the individual buffers
87
+ for ring in np.arange(0, ringdist, nrings): # For each ring (1, 2, 3, ..., nrings)
88
+ big_ring = df["geometry"].buffer(nrings + ring, single_sided=True,
89
+ cap_style='flat') # Create one big buffer
90
+ small_ring = df["geometry"].buffer(ring, single_sided=True, cap_style='flat') # Create one smaller one
91
+ the_ring = big_ring.difference(small_ring) # Difference the big with the small to create a ring
92
+ if (~shapely.is_empty(the_ring) or ~shapely.is_missing(the_ring) or not None or ~the_ring.area == 0):
93
+ if isinstance(the_ring, shapely.MultiPolygon) or isinstance(the_ring, shapely.Polygon):
94
+ rings.append(the_ring) # Append the ring to the rings list
95
+ else:
96
+ if isinstance(the_ring, shapely.GeometryCollection):
97
+ for i in range(0, len(the_ring.geoms)):
98
+ if not isinstance(the_ring.geoms[i], shapely.LineString):
99
+ rings.append(the_ring.geoms[i])
100
+ print(' %{} '.format((ring / ringdist) * 100))
101
+
102
+ return rings # return the list
103
+
104
+ # Create a column with the rings as a list
105
+
106
+ worklnbuffer_dfLRing['mgeometry'] = worklnbuffer_dfLRing.apply(lambda x: multiringbuffer(df=x, nrings=1,
107
+ ringdist=15), axis=1)
108
+
109
+ worklnbuffer_dfLRing = worklnbuffer_dfLRing.explode("mgeometry") # Explode to create a row for each ring
110
+ worklnbuffer_dfLRing = worklnbuffer_dfLRing.set_geometry("mgeometry")
111
+ worklnbuffer_dfLRing = worklnbuffer_dfLRing.drop(columns=["geometry"]).rename_geometry("geometry").set_crs(
112
+ workln_dfC.crs)
113
+ worklnbuffer_dfLRing['iRing'] = worklnbuffer_dfLRing.groupby(['OLnFID', 'OLnSEG']).cumcount()
114
+ worklnbuffer_dfLRing = worklnbuffer_dfLRing.sort_values(by=['OLnFID', 'OLnSEG', 'iRing'])
115
+ worklnbuffer_dfLRing = worklnbuffer_dfLRing.reset_index(drop=True)
116
+
117
+ worklnbuffer_dfRRing['mgeometry'] = worklnbuffer_dfRRing.apply(
118
+ lambda x: multiringbuffer(df=x, nrings=-1, ringdist=-15), axis=1)
119
+
120
+ worklnbuffer_dfRRing = worklnbuffer_dfRRing.explode("mgeometry") # Explode to create a row for each ring
121
+ worklnbuffer_dfRRing = worklnbuffer_dfRRing.set_geometry("mgeometry")
122
+ worklnbuffer_dfRRing = worklnbuffer_dfRRing.drop(columns=["geometry"]).rename_geometry("geometry").set_crs(
123
+ workln_dfC.crs)
124
+ worklnbuffer_dfRRing['iRing'] = worklnbuffer_dfRRing.groupby(['OLnFID', 'OLnSEG']).cumcount()
125
+ worklnbuffer_dfRRing = worklnbuffer_dfRRing.sort_values(by=['OLnFID', 'OLnSEG', 'iRing'])
126
+ worklnbuffer_dfRRing = worklnbuffer_dfRRing.reset_index(drop=True)
127
+
128
+ print("Task done.")
129
+ print('%{}'.format(20))
130
+
131
+ worklnbuffer_dfRRing['Percentile_RRing'] = np.nan
132
+ worklnbuffer_dfLRing['Percentile_LRing'] = np.nan
133
+ line_seg['CL_CutHt'] = np.nan
134
+ line_seg['CR_CutHt'] = np.nan
135
+ line_seg['RDist_Cut'] = np.nan
136
+ line_seg['LDist_Cut'] = np.nan
137
+ print('%{}'.format(80))
138
+
139
+ # calculate the Height percentile for each parallel area using CHM
140
+ worklnbuffer_dfLRing = multiprocessing_Percentile(worklnbuffer_dfLRing, int(canopy_percentile),
141
+ float(canopy_thresh_percentage), in_chm,
142
+ processes, side='LRing')
143
+
144
+ worklnbuffer_dfLRing = worklnbuffer_dfLRing.sort_values(by=['OLnFID', 'OLnSEG', 'iRing'])
145
+ worklnbuffer_dfLRing = worklnbuffer_dfLRing.reset_index(drop=True)
146
+
147
+ worklnbuffer_dfRRing = multiprocessing_Percentile(worklnbuffer_dfRRing, int(canopy_percentile),
148
+ float(canopy_thresh_percentage), in_chm,
149
+ processes, side='RRing')
150
+
151
+ worklnbuffer_dfRRing = worklnbuffer_dfRRing.sort_values(by=['OLnFID', 'OLnSEG', 'iRing'])
152
+ worklnbuffer_dfRRing = worklnbuffer_dfRRing.reset_index(drop=True)
153
+
154
+ result = multiprocessing_RofC(line_seg, worklnbuffer_dfLRing, worklnbuffer_dfRRing, processes)
155
+ print('%{}'.format(40))
156
+ print("Task done.")
157
+
158
+ print("Saving percentile information to input line ...")
159
+ gpd.GeoDataFrame.to_file(result, out_file)
160
+ print("Task done.")
161
+
162
+ if full_step:
163
+ return out_file
164
+
165
+ print('%{}'.format(100))
166
+
167
+
168
+ def rate_of_change(in_arg): # ,max_chmht):
169
+ x = in_arg[0]
170
+ Olnfid = in_arg[1]
171
+ Olnseg = in_arg[2]
172
+ side = in_arg[3]
173
+ df = in_arg[4]
174
+ index = in_arg[5]
175
+
176
+ # Since the x interval is 1 unit, the array 'diff' is the rate of change (slope)
177
+ diff = np.ediff1d(x)
178
+ cut_dist = len(x) / 5
179
+
180
+ median_percentile = np.nanmedian(x)
181
+ if not np.isnan(median_percentile):
182
+ cut_percentile = math.floor(median_percentile)
183
+ else:
184
+ cut_percentile = 0.5
185
+ found = False
186
+ changes = 1.50
187
+ Change = np.insert(diff, 0, 0)
188
+ scale_down = 1
189
+
190
+ # test the rate of change is > than 150% (1.5), if it is
191
+ # no result found then lower to 140% (1.4) until 110% (1.1)
192
+ try:
193
+ while not found and changes >= 1.1:
194
+ for ii in range(0, len(Change) - 1):
195
+ if x[ii] >= 0.5:
196
+ if (Change[ii]) >= changes:
197
+ cut_dist = (ii + 1) * scale_down
198
+ cut_percentile = math.floor(x[ii])
199
+ # median_diff=(cut_percentile-median_percentile)
200
+ if 0.5 >= cut_percentile:
201
+ if cut_dist > 5:
202
+ cut_percentile = 2
203
+ cut_dist = cut_dist * scale_down ** 3
204
+ print("{}: OLnFID:{}, OLnSEG: {} @<0.5 found and modified".format(side,
205
+ Olnfid,
206
+ Olnseg), flush=True)
207
+ elif 0.5 < cut_percentile <= 5.0:
208
+ if cut_dist > 6:
209
+ cut_dist = cut_dist * scale_down ** 3 # 4.0
210
+ print("{}: OLnFID:{}, OLnSEG: {} @0.5-5.0 found and modified".format(side,
211
+ Olnfid,
212
+ Olnseg),
213
+ flush=True)
214
+ elif 5.0 < cut_percentile <= 10.0:
215
+ if cut_dist > 8: # 5
216
+ cut_dist = cut_dist * scale_down ** 3
217
+ print("{}: OLnFID:{}, OLnSEG: {} @5-10 found and modified".format(side,
218
+ Olnfid,
219
+ Olnseg), flush=True)
220
+ elif 10.0 < cut_percentile <= 15:
221
+ if cut_dist > 5:
222
+ cut_dist = cut_dist * scale_down ** 3 # 5.5
223
+ print("{}: OLnFID:{}, OLnSEG: {} @10-15 found and modified".format(side,
224
+ Olnfid,
225
+ Olnseg), flush=True)
226
+ elif 15 < cut_percentile:
227
+ if cut_dist > 4:
228
+ cut_dist = cut_dist * scale_down ** 2
229
+ cut_percentile = 15.5
230
+ print("{}: OLnFID:{}, OLnSEG: {} @>15 found and modified".format(side,
231
+ Olnfid,
232
+ Olnseg), flush=True)
233
+ found = True
234
+ print("{}: OLnFID:{}, OLnSEG: {} rate of change found".format(side, Olnfid, Olnseg), flush=True)
235
+ break
236
+ changes = changes - 0.1
237
+
238
+ except IndexError:
239
+ pass
240
+
241
+ # if still is no result found, lower to 10% (1.1), if no result found then default is used
242
+ if not found:
243
+
244
+ if 0.5 >= median_percentile:
245
+ cut_dist = 4 * scale_down # 3
246
+ cut_percentile = 0.5
247
+ elif 0.5 < median_percentile <= 5.0:
248
+ cut_dist = 4.5 * scale_down # 4.0
249
+ cut_percentile = math.floor(median_percentile)
250
+ elif 5.0 < median_percentile <= 10.0:
251
+ cut_dist = 5.5 * scale_down # 5
252
+ cut_percentile = math.floor(median_percentile)
253
+ elif 10.0 < median_percentile <= 15:
254
+ cut_dist = 6 * scale_down # 5.5
255
+ cut_percentile = math.floor(median_percentile)
256
+ elif 15 < median_percentile:
257
+ cut_dist = 5 * scale_down # 5
258
+ cut_percentile = 15.5
259
+ print("{}: OLnFID:{}, OLnSEG: {} Estimated".format(side, Olnfid, Olnseg), flush=True)
260
+ if side == 'Right':
261
+ df['RDist_Cut'] = cut_dist
262
+ df['CR_CutHt'] = cut_percentile
263
+ elif side == 'Left':
264
+ df['LDist_Cut'] = cut_dist
265
+ df['CL_CutHt'] = cut_percentile
266
+
267
+ return df
268
+
269
+
270
+ def multiprocessing_RofC(line_seg, worklnbuffer_dfLRing, worklnbuffer_dfRRing, processes):
271
+ in_argsL = []
272
+ in_argsR = []
273
+
274
+ for index in (line_seg.index):
275
+ resultsL = []
276
+ resultsR = []
277
+ Olnfid = int(line_seg.OLnFID.iloc[index])
278
+ Olnseg = int(line_seg.OLnSEG.iloc[index])
279
+ sql_dfL = worklnbuffer_dfLRing.loc[
280
+ (worklnbuffer_dfLRing['OLnFID'] == Olnfid) & (worklnbuffer_dfLRing['OLnSEG'] == Olnseg)].sort_values(
281
+ by=['iRing'])
282
+ PLRing = list(sql_dfL['Percentile_LRing'])
283
+ sql_dfR = worklnbuffer_dfRRing.loc[
284
+ (worklnbuffer_dfRRing['OLnFID'] == Olnfid) & (worklnbuffer_dfRRing['OLnSEG'] == Olnseg)].sort_values(
285
+ by=['iRing'])
286
+ PRRing = list(sql_dfR['Percentile_RRing'])
287
+ in_argsL.append([PLRing, Olnfid, Olnseg, 'Left', line_seg.loc[index], index])
288
+ in_argsR.append([PRRing, Olnfid, Olnseg, 'Right', line_seg.loc[index], index])
289
+ print(' "PROGRESS_LABEL Preparing grouped buffer areas...." ', flush=True)
290
+ print(' %{} '.format((index + 1 / len(line_seg)) * 100))
291
+
292
+ total_steps = len(in_argsL) + len(in_argsR)
293
+ featuresL = []
294
+ featuresR = []
295
+
296
+ if PARALLEL_MODE == ParallelMode.MULTIPROCESSING:
297
+ with Pool(processes=int(processes)) as pool:
298
+
299
+ step = 0
300
+ # execute tasks in order, process results out of order
301
+ try:
302
+ for resultL in pool.imap_unordered(rate_of_change, in_argsL):
303
+ if BT_DEBUGGING:
304
+ print('Got result: {}'.format(resultL), flush=True)
305
+ featuresL.append(resultL)
306
+ step += 1
307
+ print(
308
+ ' "PROGRESS_LABEL Calculate Rate of Change In Buffer Area {} of {}" '.format(step, total_steps),
309
+ flush=True)
310
+ print('%{}'.format(step / total_steps * 100), flush=True)
311
+ except Exception:
312
+ print(Exception)
313
+ raise
314
+
315
+ gpdL = gpd.GeoDataFrame(pd.concat(featuresL, axis=1).T)
316
+ with Pool(processes=int(processes)) as pool:
317
+ try:
318
+ for resultR in pool.imap_unordered(rate_of_change, in_argsR):
319
+ if BT_DEBUGGING:
320
+ print('Got result: {}'.format(resultR), flush=True)
321
+ featuresR.append(resultR)
322
+ step += 1
323
+ print(
324
+ ' "PROGRESS_LABEL Calculate Rate of Change Area {} of {}" '.format(step + len(in_argsL),
325
+ total_steps),
326
+ flush=True)
327
+ print('%{}'.format((step + len(in_argsL)) / total_steps * 100), flush=True)
328
+ except Exception:
329
+ print(Exception)
330
+ raise
331
+ gpdR = gpd.GeoDataFrame(pd.concat(featuresR, axis=1).T)
332
+ else:
333
+ for rowL in in_argsL:
334
+ featuresL.append(rate_of_change(rowL))
335
+
336
+ for rowR in in_argsR:
337
+ featuresR.append(rate_of_change(rowR))
338
+
339
+ gpdL = gpd.GeoDataFrame(pd.concat(featuresL, axis=1).T)
340
+ gpdR = gpd.GeoDataFrame(pd.concat(featuresR, axis=1).T)
341
+
342
+ for index in line_seg.index:
343
+ lnfid = line_seg.OLnFID.iloc[index]
344
+ Olnseg = line_seg.OLnSEG.iloc[index]
345
+ line_seg.loc[index, 'RDist_Cut'] = float(
346
+ gpdR.loc[(gpdR.OLnFID == lnfid) & (gpdR.OLnSEG == Olnseg)]['RDist_Cut'])
347
+ line_seg.loc[index, 'LDist_Cut'] = float(
348
+ gpdL.loc[(gpdL.OLnFID == lnfid) & (gpdL.OLnSEG == Olnseg)]['LDist_Cut'])
349
+ line_seg.loc[index, 'CL_CutHt'] = float(gpdL.loc[(gpdL.OLnFID == lnfid) & (gpdL.OLnSEG == Olnseg)]['CL_CutHt'])
350
+ line_seg.loc[index, 'CR_CutHt'] = float(gpdR.loc[(gpdR.OLnFID == lnfid) & (gpdR.OLnSEG == Olnseg)]['CR_CutHt'])
351
+ line_seg.loc[index, 'DynCanTh'] = (line_seg.loc[index, 'CL_CutHt'] + line_seg.loc[index, 'CR_CutHt']) / 2
352
+ print(' "PROGRESS_LABEL Recording ... {} of {}" '.format(index + 1, len(line_seg)), flush=True)
353
+ print(' %{} '.format(index + 1 / len(line_seg) * 100), flush=True)
354
+
355
+ return line_seg
356
+
357
+
358
+ def split_line_fc(line):
359
+ if line:
360
+ return list(map(shapely.LineString, zip(line.coords[:-1], line.coords[1:])))
361
+ else:
362
+ return None
363
+
364
+
365
+ def split_into_segments(df):
366
+ odf = df
367
+ crs = odf.crs
368
+ if 'OLnSEG' not in odf.columns.array:
369
+ df['OLnSEG'] = np.nan
370
+ else:
371
+ pass
372
+ df = odf.assign(geometry=odf.apply(lambda x: split_line_fc(x.geometry), axis=1))
373
+ df = df.explode()
374
+
375
+ df['OLnSEG'] = df.groupby('OLnFID').cumcount()
376
+ gdf = gpd.GeoDataFrame(df, geometry=df.geometry, crs=crs)
377
+ gdf = gdf.sort_values(by=['OLnFID', 'OLnSEG'])
378
+ gdf = gdf.reset_index(drop=True)
379
+ return gdf
380
+
381
+
382
+ def multiprocessing_copyparallel_lineLRC(dfL, dfR, dfc, processes, left_dis, right_dist, center_dist):
383
+ try:
384
+ line_arg = []
385
+ total_steps = len(dfL)
386
+
387
+ for item in dfL.index:
388
+ item_list = [dfL, dfR, dfc, left_dis, right_dist, center_dist, item]
389
+ line_arg.append(item_list)
390
+
391
+ featuresL = []
392
+ featuresR = []
393
+ result = None
394
+ step = 0
395
+
396
+ if PARALLEL_MODE == ParallelMode.MULTIPROCESSING:
397
+ with Pool(processes=int(processes)) as pool:
398
+ # execute tasks in order, process results out of order
399
+ for result in pool.imap_unordered(copyparallel_lineLRC, line_arg):
400
+ if BT_DEBUGGING:
401
+ print(f'Got result: {result}', flush=True)
402
+ if result:
403
+ featuresL.append(result[0]) # resultL
404
+ featuresR.append(result[1]) # resultR
405
+ step += 1
406
+ print(f' %{step / total_steps * 100} ')
407
+
408
+ return gpd.GeoDataFrame(pd.concat(featuresL)), \
409
+ gpd.GeoDataFrame(pd.concat(featuresR)) # , gpd.GeoDataFrame(pd.concat(featuresC))
410
+ elif PARALLEL_MODE == ParallelMode.SEQUENTIAL:
411
+ for line in line_arg:
412
+ result = copyparallel_lineLRC(line)
413
+ if BT_DEBUGGING:
414
+ print(f'Got result: {result}', flush=True)
415
+ if result:
416
+ featuresL.append(result[0]) # resultL
417
+ featuresR.append(result[1]) # resultR
418
+ step += 1
419
+ print(f' %{step / total_steps * 100} ')
420
+
421
+ return gpd.GeoDataFrame(pd.concat(featuresL)), \
422
+ gpd.GeoDataFrame(pd.concat(featuresR)) # , gpd.GeoDataFrame(pd.concat(featuresC))
423
+
424
+ except OperationCancelledException:
425
+ print("Operation cancelled")
426
+
427
+
428
+ def multiprocessing_Percentile(df, CanPercentile, CanThrPercentage, in_CHM, processes, side):
429
+ try:
430
+ line_arg = []
431
+ total_steps = len(df)
432
+ cal_percentile = cal_percentileLR
433
+ if side == 'left':
434
+ PerCol = 'Percentile_L'
435
+ which_side = 'left'
436
+ cal_percentile = cal_percentileLR
437
+ elif side == 'right':
438
+ PerCol = 'Percentile_R'
439
+ which_side = 'right'
440
+ cal_percentile = cal_percentileLR
441
+ elif side == 'LRing':
442
+ PerCol = 'Percentile_LRing'
443
+ cal_percentile = cal_percentileRing
444
+ which_side = 'left'
445
+ elif side == 'RRing':
446
+ PerCol = 'Percentile_RRing'
447
+ which_side = 'right'
448
+ cal_percentile = cal_percentileRing
449
+
450
+ print("Calculating surrounding ({}) forest population for buffer area ...".format(which_side))
451
+
452
+ for item in df.index:
453
+ item_list = [df.iloc[[item]], CanPercentile, CanThrPercentage, in_CHM, item, PerCol]
454
+ line_arg.append(item_list)
455
+ print(' "PROGRESS_LABEL Preparing... {} of {}" '.format(item + 1, len(df)), flush=True)
456
+ print(' %{} '.format(item / len(df) * 100), flush=True)
457
+
458
+ features = []
459
+ # chunksize = math.ceil(total_steps / processes)
460
+ # PARALLEL_MODE=False
461
+ if PARALLEL_MODE == ParallelMode.MULTIPROCESSING:
462
+ with Pool(processes=int(processes)) as pool:
463
+
464
+ step = 0
465
+ # execute tasks in order, process results out of order
466
+ try:
467
+ for result in pool.imap_unordered(cal_percentile, line_arg):
468
+ if BT_DEBUGGING:
469
+ print('Got result: {}'.format(result), flush=True)
470
+ features.append(result)
471
+ step += 1
472
+ print(
473
+ ' "PROGRESS_LABEL Calculate Percentile In Buffer Area {} of {}" '.format(step, total_steps),
474
+ flush=True)
475
+ print('%{}'.format(step / total_steps * 100), flush=True)
476
+ except Exception:
477
+ print(Exception)
478
+ raise
479
+ del line_arg
480
+
481
+ return gpd.GeoDataFrame(pd.concat(features))
482
+ else:
483
+ verbose = False
484
+ total_steps = len(line_arg)
485
+ step = 0
486
+ for row in line_arg:
487
+ features.append(cal_percentile(row))
488
+ step += 1
489
+ if verbose:
490
+ print(' "PROGRESS_LABEL Calculate Percentile on line {} of {}" '.format(step, total_steps),
491
+ flush=True)
492
+ print(' %{} '.format(step / total_steps * 100), flush=True)
493
+ return gpd.GeoDataFrame(pd.concat(features))
494
+
495
+ except OperationCancelledException:
496
+ print("Operation cancelled")
497
+
498
+
499
+ def cal_percentileLR(line_arg):
500
+ from shapely import ops
501
+ try:
502
+ df = line_arg[0]
503
+ CanPercentile = line_arg[1]
504
+ CanThrPercentage = line_arg[2]
505
+ in_CHM = line_arg[3]
506
+ row_index = line_arg[4]
507
+ PerCol = line_arg[5]
508
+ line_buffer = df.loc[row_index, 'geometry']
509
+
510
+ if line_buffer.is_empty or shapely.is_missing(line_buffer):
511
+ return None
512
+ if line_buffer.has_z:
513
+ line_buffer = ops.transform(lambda x, y, z=None: (x, y), line_buffer)
514
+ except Exception as e:
515
+ print(e)
516
+ print("Assigning variable on index:{} Error: ".format(line_arg) + sys.exc_info())
517
+ exit()
518
+
519
+ # TODO: temporary workaround for exception causing not percentile defined
520
+ percentile = 0
521
+ Dyn_Canopy_Threshold = 0.05
522
+ try:
523
+ with rasterio.open(in_CHM) as raster:
524
+ clipped_raster, out_transform = rasterio.mask.mask(raster, [line_buffer], crop=True,
525
+ nodata=BT_NODATA, filled=True)
526
+ clipped_raster = np.squeeze(clipped_raster, axis=0)
527
+
528
+ # mask all -9999 (nodata) value cells
529
+ masked_raster = np.ma.masked_where(clipped_raster == BT_NODATA, clipped_raster)
530
+ filled_raster = np.ma.filled(masked_raster, np.nan)
531
+
532
+ # Calculate the percentile
533
+ # masked_mean = np.ma.mean(masked_raster)
534
+ percentile = np.nanpercentile(filled_raster, CanPercentile) # ,method='hazen')
535
+ median = np.nanmedian(filled_raster)
536
+ if percentile > 0.05: # (percentile+median)>0.0:
537
+ Dyn_Canopy_Threshold = percentile * (CanThrPercentage / 100.0)
538
+ else:
539
+ # print("(percentile)<0.05 @ {}".format(row_index))
540
+ Dyn_Canopy_Threshold = 0.05
541
+
542
+ del clipped_raster, out_transform
543
+ del raster
544
+ # return the generated value
545
+ except Exception as e:
546
+ print(e)
547
+ # print(sys.exc_info())
548
+ percentile = 0
549
+ Dyn_Canopy_Threshold = 0
550
+
551
+ try:
552
+ df.loc[row_index, PerCol] = percentile
553
+ df.loc[row_index, 'DynCanTh'] = Dyn_Canopy_Threshold
554
+ return df
555
+ except Exception as e:
556
+ print("Error writing Percentile and Dynamic Canopy into table: " + sys.exc_info())
557
+
558
+
559
+ def cal_percentileRing(line_arg):
560
+ from shapely import ops
561
+ try:
562
+ df = line_arg[0]
563
+ CanPercentile = line_arg[1]
564
+ CanThrPercentage = line_arg[2]
565
+ in_CHM = line_arg[3]
566
+ row_index = line_arg[4]
567
+ PerCol = line_arg[5]
568
+
569
+ line_buffer = df.loc[row_index, 'geometry']
570
+ if line_buffer.is_empty or shapely.is_missing(line_buffer):
571
+ return None
572
+ if line_buffer.has_z:
573
+ line_buffer = ops.transform(lambda x, y, z=None: (x, y), line_buffer)
574
+
575
+
576
+ except Exception as e:
577
+ print(e)
578
+ print("Assigning variable on index:{} Error: ".format(line_arg) + sys.exc_info())
579
+ exit()
580
+
581
+ # TODO: temporary workaround for exception causing not percentile defined
582
+ percentile = 0.5
583
+ Dyn_Canopy_Threshold = 0.05
584
+ try:
585
+
586
+ with rasterio.open(in_CHM) as raster:
587
+ clipped_raster, out_transform = rasterio.mask.mask(raster, [line_buffer], crop=True,
588
+ nodata=BT_NODATA, filled=True)
589
+ clipped_raster = np.squeeze(clipped_raster, axis=0)
590
+
591
+ # mask all -9999 (nodata) value cells
592
+ masked_raster = np.ma.masked_where(clipped_raster == BT_NODATA, clipped_raster)
593
+ filled_raster = np.ma.filled(masked_raster, np.nan)
594
+
595
+ # Calculate the percentile
596
+ # masked_mean = np.ma.mean(masked_raster)
597
+ percentile = np.nanpercentile(filled_raster, 50) # CanPercentile)#,method='hazen')
598
+
599
+ if percentile > 1: # (percentile+median)>0.0:
600
+ Dyn_Canopy_Threshold = percentile * (0.3)
601
+ else:
602
+ Dyn_Canopy_Threshold = 1
603
+
604
+ del clipped_raster, out_transform
605
+ del raster
606
+ # return the generated value
607
+ except Exception as e:
608
+ print(e)
609
+ # print('Something wrong in ID:{}'.format(row_index))
610
+ print("Default values are used.")
611
+
612
+
613
+ finally:
614
+ df.loc[row_index, PerCol] = percentile
615
+ df.loc[row_index, 'DynCanTh'] = Dyn_Canopy_Threshold
616
+ return df
617
+
618
+
619
+ def copyparallel_lineLRC(line_arg):
620
+ dfL = line_arg[0]
621
+ dfR = line_arg[1]
622
+
623
+ # Simplify input center lines
624
+ geom = dfL.loc[line_arg[6], 'geometry']
625
+ if not geom:
626
+ return None
627
+
628
+ lineL = dfL.loc[line_arg[6], 'geometry'].simplify(tolerance=0.05, preserve_topology=True)
629
+ lineR = dfR.loc[line_arg[6], 'geometry'].simplify(tolerance=0.05, preserve_topology=True)
630
+ # lineC = dfC.loc[line_arg[6], 'geometry'].simplify(tolerance=0.05, preserve_topology=True)
631
+ offset_distL = float(line_arg[3])
632
+ offset_distR = float(line_arg[4])
633
+
634
+ # Older alternative method to the offset_curve() method,
635
+ # but uses resolution instead of quad_segs and a side keyword (‘left’ or ‘right’) instead
636
+ # of sign of the distance. This method is kept for backwards compatibility for now,
637
+ # but it is recommended to use offset_curve() instead.
638
+ # (ref: https://shapely.readthedocs.io/en/stable/manual.html#object.offset_curve)
639
+ parallel_lineL = lineL.parallel_offset(distance=offset_distL, side='left',
640
+ join_style=shapely.BufferJoinStyle.mitre)
641
+
642
+ parallel_lineR = lineR.parallel_offset(distance=-offset_distR, side='right',
643
+ join_style=shapely.BufferJoinStyle.mitre)
644
+
645
+ if not parallel_lineL.is_empty:
646
+ dfL.loc[line_arg[6], 'geometry'] = parallel_lineL
647
+ if not parallel_lineR.is_empty:
648
+ dfR.loc[line_arg[6], 'geometry'] = parallel_lineR
649
+
650
+ return dfL.iloc[[line_arg[6]]], dfR.iloc[[line_arg[6]]] # ,dfC.iloc[[line_arg[6]]]
651
+
652
+
653
+ if __name__ == '__main__':
654
+ start_time = time.time()
655
+ print('Starting Dynamic Canopy Threshold calculation processing\n @ {}'.format(
656
+ time.strftime("%d %b %Y %H:%M:%S", time.localtime())))
657
+
658
+ parser = argparse.ArgumentParser()
659
+ parser.add_argument('-i', '--input', type=json.loads)
660
+ parser.add_argument('-p', '--processes')
661
+ parser.add_argument('-v', '--verbose')
662
+ args = parser.parse_args()
663
+ args.input['full_step'] = False
664
+
665
+ verbose = True if args.verbose == 'True' else False
666
+ main_canopy_threshold_relative(print, **args.input, processes=int(args.processes), verbose=verbose)
667
+
668
+ print('%{}'.format(100))
669
+ print('Finishing Dynamic Canopy Threshold calculation @ {}\n(or in {} second)'.format(
670
+ time.strftime("%d %b %Y %H:%M:%S", time.localtime()), round(time.time() - start_time, 5)))