BERATools 0.2.0__py3-none-any.whl → 0.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (153) hide show
  1. beratools/__init__.py +1 -7
  2. beratools/core/algo_centerline.py +491 -351
  3. beratools/core/algo_common.py +497 -0
  4. beratools/core/algo_cost.py +192 -0
  5. beratools/core/{dijkstra_algorithm.py → algo_dijkstra.py} +503 -460
  6. beratools/core/algo_footprint_rel.py +577 -0
  7. beratools/core/algo_line_grouping.py +944 -0
  8. beratools/core/algo_merge_lines.py +214 -0
  9. beratools/core/algo_split_with_lines.py +304 -0
  10. beratools/core/algo_tiler.py +428 -0
  11. beratools/core/algo_vertex_optimization.py +469 -0
  12. beratools/core/constants.py +52 -86
  13. beratools/core/logger.py +76 -85
  14. beratools/core/tool_base.py +196 -133
  15. beratools/gui/__init__.py +11 -15
  16. beratools/gui/{beratools.json → assets/beratools.json} +2185 -2300
  17. beratools/gui/batch_processing_dlg.py +513 -463
  18. beratools/gui/bt_data.py +481 -487
  19. beratools/gui/bt_gui_main.py +710 -691
  20. beratools/gui/main.py +26 -0
  21. beratools/gui/map_window.py +162 -146
  22. beratools/gui/tool_widgets.py +725 -493
  23. beratools/tools/Beratools_r_script.r +1120 -1120
  24. beratools/tools/Ht_metrics.py +116 -116
  25. beratools/tools/__init__.py +7 -7
  26. beratools/tools/batch_processing.py +136 -132
  27. beratools/tools/canopy_threshold_relative.py +672 -670
  28. beratools/tools/canopycostraster.py +222 -222
  29. beratools/tools/centerline.py +136 -176
  30. beratools/tools/common.py +857 -885
  31. beratools/tools/fl_regen_csf.py +428 -428
  32. beratools/tools/forest_line_attributes.py +408 -408
  33. beratools/tools/line_footprint_absolute.py +213 -363
  34. beratools/tools/line_footprint_fixed.py +436 -282
  35. beratools/tools/line_footprint_functions.py +733 -720
  36. beratools/tools/line_footprint_relative.py +73 -64
  37. beratools/tools/line_grouping.py +45 -0
  38. beratools/tools/ln_relative_metrics.py +615 -615
  39. beratools/tools/r_cal_lpi_elai.r +24 -24
  40. beratools/tools/r_generate_pd_focalraster.r +100 -100
  41. beratools/tools/r_interface.py +79 -79
  42. beratools/tools/r_point_density.r +8 -8
  43. beratools/tools/rpy_chm2trees.py +86 -86
  44. beratools/tools/rpy_dsm_chm_by.py +81 -81
  45. beratools/tools/rpy_dtm_by.py +63 -63
  46. beratools/tools/rpy_find_cellsize.py +43 -43
  47. beratools/tools/rpy_gnd_csf.py +74 -74
  48. beratools/tools/rpy_hummock_hollow.py +85 -85
  49. beratools/tools/rpy_hummock_hollow_raster.py +71 -71
  50. beratools/tools/rpy_las_info.py +51 -51
  51. beratools/tools/rpy_laz2las.py +40 -40
  52. beratools/tools/rpy_lpi_elai_lascat.py +466 -466
  53. beratools/tools/rpy_normalized_lidar_by.py +56 -56
  54. beratools/tools/rpy_percent_above_dbh.py +80 -80
  55. beratools/tools/rpy_points2trees.py +88 -88
  56. beratools/tools/rpy_vegcoverage.py +94 -94
  57. beratools/tools/tiler.py +48 -206
  58. beratools/tools/tool_template.py +69 -54
  59. beratools/tools/vertex_optimization.py +61 -620
  60. beratools/tools/zonal_threshold.py +144 -144
  61. beratools-0.2.2.dist-info/METADATA +108 -0
  62. beratools-0.2.2.dist-info/RECORD +74 -0
  63. {beratools-0.2.0.dist-info → beratools-0.2.2.dist-info}/WHEEL +1 -1
  64. {beratools-0.2.0.dist-info → beratools-0.2.2.dist-info}/licenses/LICENSE +22 -22
  65. beratools/gui/cli.py +0 -18
  66. beratools/gui/gui.json +0 -8
  67. beratools/gui_tk/ASCII Banners.txt +0 -248
  68. beratools/gui_tk/__init__.py +0 -20
  69. beratools/gui_tk/beratools_main.py +0 -515
  70. beratools/gui_tk/bt_widgets.py +0 -442
  71. beratools/gui_tk/cli.py +0 -18
  72. beratools/gui_tk/img/BERALogo.png +0 -0
  73. beratools/gui_tk/img/closed.gif +0 -0
  74. beratools/gui_tk/img/closed.png +0 -0
  75. beratools/gui_tk/img/open.gif +0 -0
  76. beratools/gui_tk/img/open.png +0 -0
  77. beratools/gui_tk/img/tool.gif +0 -0
  78. beratools/gui_tk/img/tool.png +0 -0
  79. beratools/gui_tk/main.py +0 -14
  80. beratools/gui_tk/map_window.py +0 -144
  81. beratools/gui_tk/runner.py +0 -1481
  82. beratools/gui_tk/tooltip.py +0 -55
  83. beratools/third_party/pyqtlet2/__init__.py +0 -9
  84. beratools/third_party/pyqtlet2/leaflet/__init__.py +0 -26
  85. beratools/third_party/pyqtlet2/leaflet/control/__init__.py +0 -6
  86. beratools/third_party/pyqtlet2/leaflet/control/control.py +0 -59
  87. beratools/third_party/pyqtlet2/leaflet/control/draw.py +0 -52
  88. beratools/third_party/pyqtlet2/leaflet/control/layers.py +0 -20
  89. beratools/third_party/pyqtlet2/leaflet/core/Parser.py +0 -24
  90. beratools/third_party/pyqtlet2/leaflet/core/__init__.py +0 -2
  91. beratools/third_party/pyqtlet2/leaflet/core/evented.py +0 -180
  92. beratools/third_party/pyqtlet2/leaflet/layer/__init__.py +0 -5
  93. beratools/third_party/pyqtlet2/leaflet/layer/featuregroup.py +0 -34
  94. beratools/third_party/pyqtlet2/leaflet/layer/icon/__init__.py +0 -1
  95. beratools/third_party/pyqtlet2/leaflet/layer/icon/icon.py +0 -30
  96. beratools/third_party/pyqtlet2/leaflet/layer/imageoverlay.py +0 -18
  97. beratools/third_party/pyqtlet2/leaflet/layer/layer.py +0 -105
  98. beratools/third_party/pyqtlet2/leaflet/layer/layergroup.py +0 -45
  99. beratools/third_party/pyqtlet2/leaflet/layer/marker/__init__.py +0 -1
  100. beratools/third_party/pyqtlet2/leaflet/layer/marker/marker.py +0 -91
  101. beratools/third_party/pyqtlet2/leaflet/layer/tile/__init__.py +0 -2
  102. beratools/third_party/pyqtlet2/leaflet/layer/tile/gridlayer.py +0 -4
  103. beratools/third_party/pyqtlet2/leaflet/layer/tile/tilelayer.py +0 -16
  104. beratools/third_party/pyqtlet2/leaflet/layer/vector/__init__.py +0 -5
  105. beratools/third_party/pyqtlet2/leaflet/layer/vector/circle.py +0 -15
  106. beratools/third_party/pyqtlet2/leaflet/layer/vector/circlemarker.py +0 -18
  107. beratools/third_party/pyqtlet2/leaflet/layer/vector/path.py +0 -5
  108. beratools/third_party/pyqtlet2/leaflet/layer/vector/polygon.py +0 -14
  109. beratools/third_party/pyqtlet2/leaflet/layer/vector/polyline.py +0 -18
  110. beratools/third_party/pyqtlet2/leaflet/layer/vector/rectangle.py +0 -14
  111. beratools/third_party/pyqtlet2/leaflet/map/__init__.py +0 -1
  112. beratools/third_party/pyqtlet2/leaflet/map/map.py +0 -220
  113. beratools/third_party/pyqtlet2/mapwidget.py +0 -45
  114. beratools/third_party/pyqtlet2/web/custom.js +0 -43
  115. beratools/third_party/pyqtlet2/web/map.html +0 -23
  116. beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/layers-2x.png +0 -0
  117. beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/layers.png +0 -0
  118. beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-icon-2x.png +0 -0
  119. beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-icon.png +0 -0
  120. beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-shadow.png +0 -0
  121. beratools/third_party/pyqtlet2/web/modules/leaflet_193/leaflet.css +0 -656
  122. beratools/third_party/pyqtlet2/web/modules/leaflet_193/leaflet.js +0 -6
  123. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.codeclimate.yml +0 -14
  124. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.editorconfig +0 -4
  125. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.gitattributes +0 -22
  126. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.travis.yml +0 -43
  127. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/LICENSE +0 -20
  128. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/layers-2x.png +0 -0
  129. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/layers.png +0 -0
  130. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-icon-2x.png +0 -0
  131. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-icon.png +0 -0
  132. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-shadow.png +0 -0
  133. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet-2x.png +0 -0
  134. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet.png +0 -0
  135. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet.svg +0 -156
  136. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/leaflet.draw.css +0 -10
  137. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/leaflet.draw.js +0 -10
  138. beratools/third_party/pyqtlet2/web/modules/leaflet_rotatedMarker_020/LICENSE +0 -22
  139. beratools/third_party/pyqtlet2/web/modules/leaflet_rotatedMarker_020/leaflet.rotatedMarker.js +0 -57
  140. beratools/tools/forest_line_ecosite.py +0 -216
  141. beratools/tools/lapis_all.py +0 -103
  142. beratools/tools/least_cost_path_from_chm.py +0 -152
  143. beratools-0.2.0.dist-info/METADATA +0 -63
  144. beratools-0.2.0.dist-info/RECORD +0 -142
  145. /beratools/gui/{img → assets}/BERALogo.png +0 -0
  146. /beratools/gui/{img → assets}/closed.gif +0 -0
  147. /beratools/gui/{img → assets}/closed.png +0 -0
  148. /beratools/{gui_tk → gui/assets}/gui.json +0 -0
  149. /beratools/gui/{img → assets}/open.gif +0 -0
  150. /beratools/gui/{img → assets}/open.png +0 -0
  151. /beratools/gui/{img → assets}/tool.gif +0 -0
  152. /beratools/gui/{img → assets}/tool.png +0 -0
  153. {beratools-0.2.0.dist-info → beratools-0.2.2.dist-info}/entry_points.txt +0 -0
@@ -1,615 +1,615 @@
1
- import sys
2
- import os.path
3
- import geopandas
4
- import warnings
5
-
6
- import time
7
- import pandas
8
- import numpy
9
- import shapely
10
- import math
11
-
12
- from multiprocessing.pool import Pool
13
- from numpy.lib.stride_tricks import as_strided
14
-
15
- from beratools.tools.common import *
16
-
17
- # to suppress panadas UserWarning: SettingWithCopyWarning:
18
- # A value is trying to be set on a copy of a slice from a DataFrame.
19
- # Try using .loc[row_indexer,col_indexer] = value instead
20
- warnings.simplefilter(action='ignore', category=UserWarning)
21
-
22
- USE_MULTI_PROCESSING = True
23
-
24
-
25
- class OperationCancelledException(Exception):
26
- pass
27
-
28
-
29
- # by Dan Patterson
30
-
31
- def _check(a, r_c, subok=False):
32
- """Performs the array checks necessary for stride and block.
33
- : in_array - Array or list.
34
- : r_c - tuple/list/array of rows x cols.
35
- : subok - from numpy 1.12 added, keep for now
36
- :Returns:
37
- :------
38
- :Attempts will be made to ...
39
- : produce a shape at least (1*c). For a scalar, the
40
- : minimum shape will be (1*r) for 1D array or (1*c) for 2D
41
- : array if r<c. Be aware
42
- """
43
- if isinstance(r_c, (int, float)):
44
- r_c = (1, int(r_c))
45
- r, c = r_c
46
- if a.ndim == 1:
47
- a = numpy.atleast_2d(a)
48
- r, c = r_c = (min(r, a.shape[0]), min(c, a.shape[1]))
49
- a = numpy.array(a, copy=False, subok=subok)
50
- return a, r, c, tuple(r_c)
51
-
52
-
53
- def _pad(in_array, kernel):
54
- """Pad a sliding array to allow for stats"""
55
- pad_x = int(kernel.shape[0] / 2)
56
- pad_y = int(kernel.shape[0] / 2)
57
- result = numpy.pad(in_array, pad_width=(pad_x, pad_y), mode="constant", constant_values=(numpy.NaN, numpy.NaN))
58
-
59
- return result
60
-
61
-
62
- def stride(a, r_c):
63
- """Provide a 2D sliding/moving view of an array.
64
- : There is no edge correction for outputs.
65
- :
66
- :Requires:
67
- :--------
68
- : _check(a, r_c) ... Runs the checks on the inputs.
69
- : a - array or list, usually a 2D array. Assumes rows is >=1,
70
- : it is corrected as is the number of columns.
71
- : r_c - tuple/list/array of rows x cols. Attempts to
72
- : produce a shape at least (1*c). For a scalar, the
73
- : minimum shape will be (1*r) for 1D array or 2D
74
- : array if r<c. Be aware
75
- """
76
-
77
- a, r, c, r_c = _check(a, r_c)
78
- shape = (a.shape[0] - r + 1, a.shape[1] - c + 1) + r_c
79
- strides = a.strides * 2
80
- a_s = (as_strided(a, shape=shape, strides=strides)).squeeze()
81
- return a_s
82
-
83
-
84
- def cal_sar(a_s, cell_x, cell_y, diag):
85
- # Jenness, J. S. 2004. Calculating landscape surface area from digital elevation models.
86
- # Wildlife Society Bulletin. 32(3):829-839
87
- # For SAR
88
- # kernel index cell values (example)
89
- # 0,0 0,1 0,2 4.326477 9.00671 10.430054
90
- # 1,0 1,1 1,2 7.472778 7.408875 4.323486
91
- # 2,0 2,1 2,2 8.534485 8.106201 7.350098
92
- #
93
- # Direction
94
- # 8 1 2
95
- # 7 Center 3
96
- # 6 5 4
97
- #
98
- center = a_s[1, 1]
99
- # Pythagorean Theorem
100
-
101
- if not numpy.isnan(center):
102
- # Center to 8 Directions
103
- dir1 = math.sqrt(abs(center - a_s[0, 1]) ** 2 + (cell_y ** 2))
104
- dir2 = math.sqrt(abs(center - a_s[0, 2]) ** 2 + diag ** 2)
105
- dir3 = math.sqrt(abs(center - a_s[1, 2]) ** 2 + cell_x ** 2)
106
- dir4 = math.sqrt(abs(center - a_s[2, 2]) ** 2 + diag ** 2)
107
- dir5 = math.sqrt(abs(center - a_s[2, 1]) ** 2 + cell_y ** 2)
108
- dir6 = math.sqrt(abs(center - a_s[2, 0]) ** 2 + diag ** 2)
109
- dir7 = math.sqrt(abs(center - a_s[1, 0]) ** 2 + cell_x ** 2)
110
- dir8 = math.sqrt(abs(center - a_s[0, 0]) ** 2 + diag ** 2)
111
- # 8 Outer sides
112
- dir1_2 = math.sqrt(abs(a_s[0, 1] - a_s[0, 2]) ** 2 + cell_x ** 2)
113
- dir2_3 = math.sqrt(abs(a_s[0, 2] - a_s[1, 2]) ** 2 + cell_y ** 2)
114
- dir3_4 = math.sqrt(abs(a_s[1, 2] - a_s[2, 2]) ** 2 + cell_y ** 2)
115
- dir4_5 = math.sqrt(abs(a_s[2, 2] - a_s[2, 1]) ** 2 + cell_x ** 2)
116
- dir5_6 = math.sqrt(abs(a_s[2, 1] - a_s[2, 0]) ** 2 + cell_x ** 2)
117
- dir6_7 = math.sqrt(abs(a_s[2, 0] - a_s[1, 0]) ** 2 + cell_y ** 2)
118
- dir7_8 = math.sqrt(abs(a_s[1, 0] - a_s[0, 0]) ** 2 + cell_y ** 2)
119
- dir8_1 = math.sqrt(abs(a_s[0, 0] - a_s[0, 1]) ** 2 + cell_x ** 2)
120
-
121
- # Heron of Alexandria and Archimedes (see also Abramowitz and Stegun [1972, p. 79]):
122
- p1 = (dir1 + dir2 + dir1_2) / 2
123
- area1 = math.sqrt(p1 * (p1 - dir1) * (p1 - dir2) * (p1 - dir1_2))
124
- p2 = (dir2 + dir3 + dir2_3) / 2
125
- area2 = math.sqrt(p2 * (p2 - dir2) * (p2 - dir3) * (p2 - dir2_3))
126
- p3 = (dir3 + dir4 + dir3_4) / 2
127
- area3 = math.sqrt(p3 * (p3 - dir3) * (p3 - dir4) * (p3 - dir3_4))
128
- p4 = (dir4 + dir5 + dir4_5) / 2
129
- area4 = math.sqrt(p4 * (p4 - dir4) * (p4 - dir5) * (p4 - dir4_5))
130
- p5 = (dir5 + dir6 + dir5_6) / 2
131
- area5 = math.sqrt(p5 * (p5 - dir5) * (p5 - dir6) * (p5 - dir5_6))
132
- p6 = (dir6 + dir7 + dir6_7) / 2
133
- area6 = math.sqrt(p6 * (p6 - dir6) * (p6 - dir7) * (p6 - dir6_7))
134
- p7 = (dir7 + dir8 + dir7_8) / 2
135
- area7 = math.sqrt(p7 * (p7 - dir7) * (p7 - dir8) * (p7 - dir7_8))
136
- p8 = (dir8 + dir1 + dir8_1) / 2
137
- area8 = math.sqrt(p8 * (p8 - dir8) * (p8 - dir1) * (p8 - dir8_1))
138
- areas = (list([area1, area2, area3, area4, area5, area6, area7, area8]))
139
- surface_area = 0
140
- for area in areas:
141
- if not math.isnan(area):
142
- surface_area = surface_area + area
143
- return surface_area
144
- else:
145
- surface_area = math.nan
146
- return surface_area
147
-
148
-
149
- def cal_tri(a_s):
150
- # For TRI
151
- # refer https://livingatlas-dcdev.opendata.arcgis.com/content/28360713391948af9303c0aeabb45afd/about
152
- # for example: TRI with your elevation data.The results may be interpreted as follows:
153
- # a level terrain surface
154
- # a nearly level surface
155
- # a slightly rugged surface
156
- # an intermediately rugged surface
157
- # a moderately rugged surface
158
- # a highly rugged surface
159
- # an extremely rugged surface
160
- if not numpy.isnan(a_s[1, 1]):
161
- result = math.sqrt(abs((numpy.nanmax(a_s)) ** 2 - (numpy.nanmin(a_s)) ** 2))
162
- else:
163
- result = math.nan
164
- return result
165
-
166
-
167
- def cal_index(in_ndarray, cell_x, cell_y, type):
168
- kernel = numpy.arange(3 ** 2)
169
- kernel.shape = (3, 3)
170
- kernel.fill(1)
171
- padded_array = _pad(in_ndarray, kernel)
172
- a_s = stride(padded_array, kernel.shape)
173
- rows, cols = a_s.shape[0], a_s.shape[1]
174
- result = numpy.arange(rows * cols)
175
- result.shape = (rows, cols)
176
- result = result.astype('float64')
177
- result.fill(numpy.nan)
178
- diag = math.sqrt(cell_x ** 2 + cell_y ** 2)
179
- plannar_area = (cell_y * cell_x) / 2 # area of one cell
180
-
181
- if type == 'SAR':
182
- for y in range(rows):
183
- for x in range(cols):
184
- result[y, x] = cal_sar(a_s[y, x], cell_x, cell_y, diag)
185
- total_surface_area = numpy.nansum(result)
186
- with numpy.errstate(divide='ignore', invalid='ignore'):
187
- result_ratio = numpy.true_divide(result, plannar_area)
188
-
189
- return result_ratio, total_surface_area
190
- elif type == 'TRI':
191
- for y in range(rows):
192
- for x in range(cols):
193
- result[y, x] = cal_tri(a_s[y, x])
194
- return result
195
-
196
-
197
- def forest_metrics(callback, in_line, out_line, raster_type, in_raster, proc_segments, buffer_ln_dist,
198
- cl_metrics_gap, forest_buffer_dist, processes, verbose, worklnbuffer_dfLR=None):
199
- # file_path,in_file_name=os.path.split(in_line)
200
- output_path, output_file = os.path.split(out_line)
201
- output_filename, file_ext = os.path.splitext((output_file))
202
- if not os.path.exists(output_path):
203
- print("No output file path found, pls check.")
204
- exit()
205
- else:
206
- if file_ext.lower() != ".shp":
207
- print("Output file type should be shapefile, pls check.")
208
- exit()
209
-
210
- try:
211
- line_seg = geopandas.GeoDataFrame.from_file(in_line)
212
- except:
213
- print(sys.exc_info())
214
- exit()
215
-
216
- # check coordinate systems between line and raster features
217
- with rasterio.open(in_raster) as in_image:
218
- if not in_image.crs.to_epsg() in [2956]:
219
- print("Line and raster spatial references are not same, please check.")
220
- exit()
221
-
222
- # # Check the ** column in data. If it is not, new column will be created
223
- # if not '**' in line_seg.columns.array:
224
- # print("Cannot find {} column in input line data.\n '{}' column will be create".format("**","**"))
225
- # line_seg['**'] = numpy.nan
226
-
227
- # Check the OLnFID column in data. If it is not, column will be created
228
- if not 'OLnFID' in line_seg.columns.array:
229
- print(
230
- "Cannot find {} column in input line data.\n '{}' column will be create".format('OLnFID', 'OLnFID'))
231
- line_seg['OLnFID'] = line_seg.index
232
-
233
- if proc_segments == True:
234
- line_seg = split_into_Equal_Nth_segments(line_seg)
235
-
236
- else:
237
- pass
238
-
239
- # copy original line input to another Geodataframe
240
- workln_dfC = geopandas.GeoDataFrame.copy((line_seg))
241
- workln_dfL = geopandas.GeoDataFrame.copy((line_seg))
242
- workln_dfR = geopandas.GeoDataFrame.copy((line_seg))
243
-
244
- # copy parallel lines for both side of the input lines
245
- print("Creating area for CL....")
246
- workln_dfC['geometry'] = workln_dfC['geometry'].simplify(tolerance=0.05, preserve_topology=True)
247
- worklnbuffer_dfC = geopandas.GeoDataFrame.copy((workln_dfC))
248
- worklnbuffer_dfC['geometry'] = shapely.buffer(workln_dfC['geometry'], distance=float(buffer_ln_dist),
249
- cap_style=2, join_style=2, single_sided=False)
250
-
251
- print("Creating offset area for surrounding forest....")
252
- workln_dfL, workln_dfR = multiprocessing_copyparallel_lineLR(workln_dfL, workln_dfR, processes,
253
- left_dis=float(buffer_ln_dist + cl_metrics_gap),
254
- right_dist=-float(buffer_ln_dist + cl_metrics_gap))
255
- workln_dfR = workln_dfR.sort_values(by=['OLnFID'])
256
- workln_dfL = workln_dfL.sort_values(by=['OLnFID'])
257
- workln_dfL = workln_dfL.reset_index(drop=True)
258
- workln_dfR = workln_dfR.reset_index(drop=True)
259
-
260
- print('%{}'.format(30))
261
-
262
- worklnbuffer_dfL = geopandas.GeoDataFrame.copy((workln_dfL))
263
- worklnbuffer_dfR = geopandas.GeoDataFrame.copy((workln_dfR))
264
-
265
- # buffer the parallel line in one side (extend the area into forest)
266
-
267
- worklnbuffer_dfL['geometry'] = shapely.buffer(workln_dfL['geometry'], distance=float(forest_buffer_dist),
268
- cap_style=2, join_style=2, single_sided=True)
269
- worklnbuffer_dfR['geometry'] = shapely.buffer(workln_dfR['geometry'], distance=-float(forest_buffer_dist),
270
- cap_style=2, join_style=2, single_sided=True)
271
- print("Creating offset area for surrounding forest....Done")
272
- print('%{}'.format(50))
273
- # create a New column for surrounding forest statistics:
274
- # 1) Height Percentile (add more in the future)
275
- # worklnbuffer_dfL['**_L'] = numpy.nan
276
- # worklnbuffer_dfR['**_R'] = numpy.nan
277
- # worklnbuffer_dfL = worklnbuffer_dfL.reset_index(drop=True)
278
- # worklnbuffer_dfR=worklnbuffer_dfR.reset_index(drop=True)
279
- # line_seg['L_**'] = numpy.nan
280
- # line_seg['R_**'] = numpy.nan
281
- print('%{}'.format(80))
282
-
283
- print("Calculating CL metrics ..")
284
- worklnbuffer_dfC, new_col_c = multiprocessing_metrics(worklnbuffer_dfC, in_raster, raster_type, processes,
285
- side='center')
286
-
287
- worklnbuffer_dfC = worklnbuffer_dfC.sort_values(by=['OLnFID'])
288
- worklnbuffer_dfC = worklnbuffer_dfC.reset_index(drop=True)
289
- print("Calculating surrounding forest metrics....")
290
- # calculate the Height percentile for each parallel area using CHM
291
- worklnbuffer_dfL, new_col_l = multiprocessing_metrics(worklnbuffer_dfL, in_raster, raster_type, processes,
292
- side='left')
293
-
294
- worklnbuffer_dfL = worklnbuffer_dfL.sort_values(by=['OLnFID'])
295
- worklnbuffer_dfL = worklnbuffer_dfL.reset_index(drop=True)
296
-
297
- worklnbuffer_dfR, new_col_r = multiprocessing_metrics(worklnbuffer_dfR, in_raster, raster_type, processes,
298
- side='right')
299
-
300
- worklnbuffer_dfR = worklnbuffer_dfR.sort_values(by=['OLnFID'])
301
- worklnbuffer_dfR = worklnbuffer_dfR.reset_index(drop=True)
302
-
303
- print('%{}'.format(90))
304
-
305
- all_new_col = numpy.append(numpy.array(new_col_c), numpy.array(new_col_l))
306
- all_new_col = numpy.append(all_new_col, numpy.array(new_col_r))
307
-
308
- for index in (line_seg.index):
309
- if raster_type == 'DEM':
310
- for col in all_new_col:
311
- if "C_" in col:
312
- line_seg.loc[index, col] = worklnbuffer_dfC.loc[index, col]
313
- elif "L_" in col:
314
- line_seg.loc[index, col] = worklnbuffer_dfL.loc[index, col]
315
- elif "R_" in col:
316
- line_seg.loc[index, col] = worklnbuffer_dfR.loc[index, col]
317
- print("Calculating forest metrics....Done")
318
-
319
- print("Saving forest metrics output.....")
320
- geopandas.GeoDataFrame.to_file(line_seg, out_line)
321
- print("Saving forest metrics output.....Done")
322
- del line_seg, worklnbuffer_dfL, worklnbuffer_dfR, workln_dfL, workln_dfR
323
-
324
- print('%{}'.format(100))
325
-
326
-
327
- # TODO: inspect duplicates, split_line_npart in line_footprint_functions.py
328
- def split_line_nPart(line):
329
- from shapely.ops import split, snap
330
- # Work out n parts for each line (divided by 10m)
331
- n = math.ceil(line.length / 10)
332
- if n > 1:
333
- # divided line into n-1 equal parts;
334
- distances = numpy.linspace(0, line.length, n)
335
- points = [line.interpolate(distance) for distance in distances]
336
-
337
- split_points = shapely.multipoints(points)
338
- # mline=cut_line_at_points(line,points)
339
- mline = split(line, split_points)
340
- # mline=split_line_fc(line)
341
- else:
342
- mline = line
343
- return mline
344
-
345
-
346
- def split_into_Equal_Nth_segments(df):
347
- odf = df
348
- crs = odf.crs
349
- if not 'OLnSEG' in odf.columns.array:
350
- df['OLnSEG'] = numpy.nan
351
- df = odf.assign(geometry=odf.apply(lambda x: split_line_nPart(x.geometry), axis=1))
352
- df = df.explode()
353
-
354
- df['OLnSEG'] = df.groupby('OLnFID').cumcount()
355
- gdf = geopandas.GeoDataFrame(df, geometry=df.geometry, crs=crs)
356
- gdf = gdf.sort_values(by=['OLnFID', 'OLnSEG'])
357
- gdf = gdf.reset_index(drop=True)
358
- return gdf
359
-
360
-
361
- def split_line_fc(line):
362
- return list(map(shapely.LineString, zip(line.coords[:-1], line.coords[1:])))
363
-
364
-
365
- def split_into_every_segments(df):
366
- odf = df
367
- crs = odf.crs
368
- if not 'OLnSEG' in odf.columns.array:
369
- df['OLnSEG'] = numpy.nan
370
- else:
371
- pass
372
- df = odf.assign(geometry=odf.apply(lambda x: split_line_fc(x.geometry), axis=1))
373
- df = df.explode()
374
-
375
- df['OLnSEG'] = df.groupby('OLnFID').cumcount()
376
- gdf = geopandas.GeoDataFrame(df, geometry=df.geometry, crs=crs)
377
- gdf = gdf.sort_values(by=['OLnFID', 'OLnSEG'])
378
- gdf = gdf.reset_index(drop=True)
379
- return gdf
380
-
381
-
382
- def multiprocessing_copyparallel_lineLR(dfL, dfR, processes, left_dis, right_dist):
383
- try:
384
-
385
- line_arg = []
386
- total_steps = len(dfL)
387
-
388
- for item in dfL.index:
389
- item_list = [dfL, dfR, left_dis, right_dist, item]
390
- line_arg.append(item_list)
391
-
392
- featuresL = []
393
- featuresR = []
394
- chunksize = math.ceil(total_steps / processes)
395
- with Pool(processes=int(processes)) as pool:
396
- step = 0
397
- # execute tasks in order, process results out of order
398
- for resultL, resultR in pool.imap_unordered(copyparallel_lineLR, line_arg, chunksize=chunksize):
399
- if BT_DEBUGGING:
400
- print('Got result: {}{}'.format(resultL, resultR), flush=True)
401
- featuresL.append(resultL)
402
- featuresR.append(resultR)
403
- step += 1
404
- print('%{}'.format(step / total_steps * 100))
405
- return geopandas.GeoDataFrame(pandas.concat(featuresL)), geopandas.GeoDataFrame(pandas.concat(featuresR))
406
- except OperationCancelledException:
407
- print("Operation cancelled")
408
-
409
-
410
- def multiprocessing_metrics(df, in_raster, raster_type, processes, side):
411
- try:
412
- line_arg = []
413
- total_steps = len(df)
414
- if side == 'left':
415
- PerCol = 'L'
416
- elif side == 'right':
417
- PerCol = 'R'
418
- else:
419
- PerCol = 'C'
420
-
421
- for item in df.index:
422
- item_list = [df.iloc[[item]], in_raster, item, PerCol, raster_type]
423
- line_arg.append(item_list)
424
- features = []
425
- chunksize = math.ceil(total_steps / processes)
426
-
427
- if USE_MULTI_PROCESSING:
428
- with Pool(processes=int(processes)) as pool:
429
-
430
- step = 0
431
- # execute tasks in order, process results out of order
432
- try:
433
- for result in pool.imap_unordered(cal_metrics, line_arg, chunksize=chunksize):
434
- if BT_DEBUGGING:
435
- print('Got result: {}'.format(result), flush=True)
436
- features.append(result)
437
- step += 1
438
- print('%{}'.format(step / total_steps * 100))
439
- except Exception:
440
- print(Exception)
441
- raise
442
- del line_arg
443
- df = geopandas.GeoDataFrame(pandas.concat(features))
444
- new_col = []
445
- for col in df.columns.array:
446
- if "C_" in col:
447
- new_col.append(col)
448
- elif "L_" in col:
449
- new_col.append(col)
450
- elif "R_" in col:
451
- new_col.append(col)
452
-
453
- return df, new_col
454
- else:
455
- for row in line_arg:
456
- features.append(cal_metrics(row))
457
- df = geopandas.GeoDataFrame(pandas.concat(features))
458
- new_col = []
459
- for col in df.columns.array:
460
- if "C_" in col:
461
- new_col.append(col)
462
- elif "L_" in col:
463
- new_col.append(col)
464
- elif "R_" in col:
465
- new_col.append(col)
466
-
467
- return df, new_col
468
-
469
- except OperationCancelledException:
470
- print("Operation cancelled")
471
-
472
-
473
- def cal_metrics(line_arg):
474
- try:
475
- df = line_arg[0]
476
- raster = line_arg[1]
477
- row_index = line_arg[2]
478
- PerCol = line_arg[3]
479
- raster_type = line_arg[4]
480
- line_buffer = df.loc[row_index, 'geometry']
481
- except:
482
- print("Assigning variable on index:{} Error: ".format(line_arg) + sys.exc_info())
483
- exit()
484
- try:
485
- with rasterio.open(raster) as image:
486
-
487
- clipped_raster, out_transform = rasterio.mask.mask(image, [line_buffer], crop=True,
488
- nodata=BT_NODATA, filled=True)
489
- clipped_raster = numpy.squeeze(clipped_raster, axis=0)
490
- cell_x, cell_y = image.res
491
- cell_area = cell_x * cell_y
492
- # mask all -9999 (nodata) value cells
493
- masked_raster = numpy.ma.masked_where(clipped_raster == BT_NODATA, clipped_raster)
494
- filled_raster = numpy.ma.filled(masked_raster, numpy.nan)
495
-
496
- # Calculate the metrics
497
- if raster_type == "DEM":
498
- # Surface area ratio (SAR)
499
-
500
- SAR, total_surface_area = cal_index(filled_raster, cell_x, cell_y, 'SAR')
501
- SAR_mean = numpy.nanmean(SAR)
502
- SAR_percentile90 = numpy.nanpercentile(SAR, 90, method='hazen')
503
- SAR_median = numpy.nanmedian(SAR)
504
- SAR_std = numpy.nanstd(SAR)
505
- SAR_max = numpy.nanmax(SAR)
506
- SAR_min = numpy.nanmin(SAR)
507
-
508
- # General Statistics
509
- total_planar_area = numpy.ma.count(masked_raster) * cell_area
510
- cell_volume = masked_raster * cell_area
511
- total_volume = numpy.ma.sum(cell_volume)
512
- mean = numpy.nanmean(filled_raster)
513
- percentile90 = numpy.nanpercentile(filled_raster, 90, method='hazen')
514
- median = numpy.nanmedian(filled_raster)
515
- std = numpy.nanstd(filled_raster)
516
- max = numpy.nanmax(filled_raster)
517
- min = numpy.nanmin(filled_raster)
518
-
519
- # Terrain Ruggedness Index (TRI)
520
- # TRI = cal_index(filled_raster, cell_x, cell_y, 'TRI')
521
- # TRI_mean = numpy.nanmean(TRI)
522
- # TRI_percentile90 = numpy.nanpercentile(TRI, 90, method='hazen')
523
- # TRI_median = numpy.nanmedian(TRI)
524
- # TRI_std = numpy.nanstd(TRI)
525
- # TRI_max = numpy.nanmax(TRI)
526
- # TRI_min = numpy.nanmin(TRI)
527
-
528
- del clipped_raster, out_transform
529
-
530
- # return the generated value
531
- except:
532
- print(sys.exc_info())
533
- try:
534
-
535
- # Writing General statisic
536
- df.at[row_index, PerCol + '_PlArea'] = total_planar_area
537
- df.at[row_index, PerCol + '_mean'] = mean
538
- # df.at[row_index, PerCol + '_P90'] = percentile90
539
- # df.at[row_index, PerCol + '_median'] = median
540
- df.at[row_index, PerCol + '_StdDev'] = std
541
- df.at[row_index, PerCol + '_max'] = max
542
- df.at[row_index, PerCol + '_min'] = min
543
- # df.at[row_index, PerCol + '_Vol'] = total_volume
544
-
545
- # Writing SAR statisic
546
- df.at[row_index, PerCol + '_SurArea'] = total_surface_area
547
- df.at[row_index, PerCol + '_SARmean'] = SAR_mean
548
- # df.at[row_index, PerCol + '_SARP90'] = SAR_percentile90
549
- # df.at[row_index, PerCol + '_SARmed'] = SAR_median
550
- df.at[row_index, PerCol + '_SARStd'] = SAR_std
551
- df.at[row_index, PerCol + '_SARmax'] = SAR_max
552
- df.at[row_index, PerCol + '_SARmin'] = SAR_min
553
-
554
- # highest=df[PerCol + '_max'].max()
555
- # lowest = df[PerCol + '_min'].min()
556
-
557
- # Writing TRI statisic
558
- # df.at[row_index, PerCol + '_TRImean'] = TRI_mean
559
- # # df.at[row_index, PerCol + '_TRIP90'] = TRI_percentile90
560
- # # df.at[row_index, PerCol + '_TRImed'] = TRI_median
561
- # df.at[row_index, PerCol + '_TRIStd'] = TRI_std
562
- # df.at[row_index, PerCol + '_TRImax'] = TRI_max
563
- # df.at[row_index, PerCol + '_TRImin'] = TRI_min
564
-
565
- return df
566
- except:
567
- print("Error writing forest metrics into table: " + sys.exc_info())
568
-
569
-
570
- def copyparallel_lineLR(line_arg):
571
- dfL = line_arg[0]
572
- dfR = line_arg[1]
573
- # Simplify input center lines
574
- lineL = dfL.loc[line_arg[4], 'geometry'].simplify(tolerance=0.05, preserve_topology=True)
575
- lineR = dfL.loc[line_arg[4], 'geometry'].simplify(tolerance=0.05, preserve_topology=True)
576
- offset_distL = float(line_arg[2])
577
- offset_distR = float(line_arg[3])
578
-
579
- # Older alternative method to the offset_curve() method,
580
- # but uses resolution instead of quad_segs and a side keyword (‘left’ or ‘right’) instead
581
- # of sign of the distance. This method is kept for backwards compatibility for now,
582
- # but it is recommended to use offset_curve() instead.
583
- # (ref: https://shapely.readthedocs.io/en/stable/manual.html#object.offset_curve)
584
-
585
- # parallel_lineL = shapely.offset_curve(geometry=lineL, distance=offset_distL,
586
- # join_style=shapely.BufferJoinStyle.mitre)
587
- # parallel_lineR = shapely.offset_curve(geometry=lineR, distance=offset_distR,
588
- # join_style=shapely.BufferJoinStyle.mitre)
589
-
590
- parallel_lineL = lineL.parallel_offset(distance=offset_distL, side='left',
591
- join_style=shapely.BufferJoinStyle.mitre)
592
- parallel_lineR = lineR.parallel_offset(distance=-offset_distR, side='right',
593
- join_style=shapely.BufferJoinStyle.mitre)
594
-
595
- if not parallel_lineL.is_empty:
596
- dfL.loc[line_arg[4], 'geometry'] = parallel_lineL
597
- if not parallel_lineR.is_empty:
598
- dfR.loc[line_arg[4], 'geometry'] = parallel_lineR
599
- return dfL.iloc[[line_arg[4]]], dfR.iloc[[line_arg[4]]]
600
-
601
-
602
- if __name__ == '__main__':
603
- start_time = time.time()
604
- print('Starting forest metrics calculation processing\n @ {}'.format(
605
- time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())))
606
-
607
- print("Checking input parameters....")
608
- in_args, in_verbose = check_arguments()
609
-
610
- verbose = True if in_args.verbose == 'True' else False
611
- forest_metrics(print, **in_args.input, processes=int(in_args.processes), verbose=verbose)
612
-
613
- print('%{}'.format(100))
614
- print('Starting forest metrics calculation processing is done @ {}\n(or in {} second)'.format(
615
- time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime()), round(time.time() - start_time, 5)))
1
+ import sys
2
+ import os.path
3
+ import geopandas
4
+ import warnings
5
+
6
+ import time
7
+ import pandas
8
+ import numpy
9
+ import shapely
10
+ import math
11
+
12
+ from multiprocessing.pool import Pool
13
+ from numpy.lib.stride_tricks import as_strided
14
+
15
+ from beratools.tools.common import *
16
+
17
+ # to suppress panadas UserWarning: SettingWithCopyWarning:
18
+ # A value is trying to be set on a copy of a slice from a DataFrame.
19
+ # Try using .loc[row_indexer,col_indexer] = value instead
20
+ warnings.simplefilter(action='ignore', category=UserWarning)
21
+
22
+ USE_MULTI_PROCESSING = True
23
+
24
+
25
+ class OperationCancelledException(Exception):
26
+ pass
27
+
28
+
29
+ # by Dan Patterson
30
+
31
+ def _check(a, r_c, subok=False):
32
+ """Performs the array checks necessary for stride and block.
33
+ : in_array - Array or list.
34
+ : r_c - tuple/list/array of rows x cols.
35
+ : subok - from numpy 1.12 added, keep for now
36
+ :Returns:
37
+ :------
38
+ :Attempts will be made to ...
39
+ : produce a shape at least (1*c). For a scalar, the
40
+ : minimum shape will be (1*r) for 1D array or (1*c) for 2D
41
+ : array if r<c. Be aware
42
+ """
43
+ if isinstance(r_c, (int, float)):
44
+ r_c = (1, int(r_c))
45
+ r, c = r_c
46
+ if a.ndim == 1:
47
+ a = numpy.atleast_2d(a)
48
+ r, c = r_c = (min(r, a.shape[0]), min(c, a.shape[1]))
49
+ a = numpy.array(a, copy=False, subok=subok)
50
+ return a, r, c, tuple(r_c)
51
+
52
+
53
+ def _pad(in_array, kernel):
54
+ """Pad a sliding array to allow for stats"""
55
+ pad_x = int(kernel.shape[0] / 2)
56
+ pad_y = int(kernel.shape[0] / 2)
57
+ result = numpy.pad(in_array, pad_width=(pad_x, pad_y), mode="constant", constant_values=(numpy.NaN, numpy.NaN))
58
+
59
+ return result
60
+
61
+
62
+ def stride(a, r_c):
63
+ """Provide a 2D sliding/moving view of an array.
64
+ : There is no edge correction for outputs.
65
+ :
66
+ :Requires:
67
+ :--------
68
+ : _check(a, r_c) ... Runs the checks on the inputs.
69
+ : a - array or list, usually a 2D array. Assumes rows is >=1,
70
+ : it is corrected as is the number of columns.
71
+ : r_c - tuple/list/array of rows x cols. Attempts to
72
+ : produce a shape at least (1*c). For a scalar, the
73
+ : minimum shape will be (1*r) for 1D array or 2D
74
+ : array if r<c. Be aware
75
+ """
76
+
77
+ a, r, c, r_c = _check(a, r_c)
78
+ shape = (a.shape[0] - r + 1, a.shape[1] - c + 1) + r_c
79
+ strides = a.strides * 2
80
+ a_s = (as_strided(a, shape=shape, strides=strides)).squeeze()
81
+ return a_s
82
+
83
+
84
+ def cal_sar(a_s, cell_x, cell_y, diag):
85
+ # Jenness, J. S. 2004. Calculating landscape surface area from digital elevation models.
86
+ # Wildlife Society Bulletin. 32(3):829-839
87
+ # For SAR
88
+ # kernel index cell values (example)
89
+ # 0,0 0,1 0,2 4.326477 9.00671 10.430054
90
+ # 1,0 1,1 1,2 7.472778 7.408875 4.323486
91
+ # 2,0 2,1 2,2 8.534485 8.106201 7.350098
92
+ #
93
+ # Direction
94
+ # 8 1 2
95
+ # 7 Center 3
96
+ # 6 5 4
97
+ #
98
+ center = a_s[1, 1]
99
+ # Pythagorean Theorem
100
+
101
+ if not numpy.isnan(center):
102
+ # Center to 8 Directions
103
+ dir1 = math.sqrt(abs(center - a_s[0, 1]) ** 2 + (cell_y ** 2))
104
+ dir2 = math.sqrt(abs(center - a_s[0, 2]) ** 2 + diag ** 2)
105
+ dir3 = math.sqrt(abs(center - a_s[1, 2]) ** 2 + cell_x ** 2)
106
+ dir4 = math.sqrt(abs(center - a_s[2, 2]) ** 2 + diag ** 2)
107
+ dir5 = math.sqrt(abs(center - a_s[2, 1]) ** 2 + cell_y ** 2)
108
+ dir6 = math.sqrt(abs(center - a_s[2, 0]) ** 2 + diag ** 2)
109
+ dir7 = math.sqrt(abs(center - a_s[1, 0]) ** 2 + cell_x ** 2)
110
+ dir8 = math.sqrt(abs(center - a_s[0, 0]) ** 2 + diag ** 2)
111
+ # 8 Outer sides
112
+ dir1_2 = math.sqrt(abs(a_s[0, 1] - a_s[0, 2]) ** 2 + cell_x ** 2)
113
+ dir2_3 = math.sqrt(abs(a_s[0, 2] - a_s[1, 2]) ** 2 + cell_y ** 2)
114
+ dir3_4 = math.sqrt(abs(a_s[1, 2] - a_s[2, 2]) ** 2 + cell_y ** 2)
115
+ dir4_5 = math.sqrt(abs(a_s[2, 2] - a_s[2, 1]) ** 2 + cell_x ** 2)
116
+ dir5_6 = math.sqrt(abs(a_s[2, 1] - a_s[2, 0]) ** 2 + cell_x ** 2)
117
+ dir6_7 = math.sqrt(abs(a_s[2, 0] - a_s[1, 0]) ** 2 + cell_y ** 2)
118
+ dir7_8 = math.sqrt(abs(a_s[1, 0] - a_s[0, 0]) ** 2 + cell_y ** 2)
119
+ dir8_1 = math.sqrt(abs(a_s[0, 0] - a_s[0, 1]) ** 2 + cell_x ** 2)
120
+
121
+ # Heron of Alexandria and Archimedes (see also Abramowitz and Stegun [1972, p. 79]):
122
+ p1 = (dir1 + dir2 + dir1_2) / 2
123
+ area1 = math.sqrt(p1 * (p1 - dir1) * (p1 - dir2) * (p1 - dir1_2))
124
+ p2 = (dir2 + dir3 + dir2_3) / 2
125
+ area2 = math.sqrt(p2 * (p2 - dir2) * (p2 - dir3) * (p2 - dir2_3))
126
+ p3 = (dir3 + dir4 + dir3_4) / 2
127
+ area3 = math.sqrt(p3 * (p3 - dir3) * (p3 - dir4) * (p3 - dir3_4))
128
+ p4 = (dir4 + dir5 + dir4_5) / 2
129
+ area4 = math.sqrt(p4 * (p4 - dir4) * (p4 - dir5) * (p4 - dir4_5))
130
+ p5 = (dir5 + dir6 + dir5_6) / 2
131
+ area5 = math.sqrt(p5 * (p5 - dir5) * (p5 - dir6) * (p5 - dir5_6))
132
+ p6 = (dir6 + dir7 + dir6_7) / 2
133
+ area6 = math.sqrt(p6 * (p6 - dir6) * (p6 - dir7) * (p6 - dir6_7))
134
+ p7 = (dir7 + dir8 + dir7_8) / 2
135
+ area7 = math.sqrt(p7 * (p7 - dir7) * (p7 - dir8) * (p7 - dir7_8))
136
+ p8 = (dir8 + dir1 + dir8_1) / 2
137
+ area8 = math.sqrt(p8 * (p8 - dir8) * (p8 - dir1) * (p8 - dir8_1))
138
+ areas = (list([area1, area2, area3, area4, area5, area6, area7, area8]))
139
+ surface_area = 0
140
+ for area in areas:
141
+ if not math.isnan(area):
142
+ surface_area = surface_area + area
143
+ return surface_area
144
+ else:
145
+ surface_area = math.nan
146
+ return surface_area
147
+
148
+
149
+ def cal_tri(a_s):
150
+ # For TRI
151
+ # refer https://livingatlas-dcdev.opendata.arcgis.com/content/28360713391948af9303c0aeabb45afd/about
152
+ # for example: TRI with your elevation data.The results may be interpreted as follows:
153
+ # a level terrain surface
154
+ # a nearly level surface
155
+ # a slightly rugged surface
156
+ # an intermediately rugged surface
157
+ # a moderately rugged surface
158
+ # a highly rugged surface
159
+ # an extremely rugged surface
160
+ if not numpy.isnan(a_s[1, 1]):
161
+ result = math.sqrt(abs((numpy.nanmax(a_s)) ** 2 - (numpy.nanmin(a_s)) ** 2))
162
+ else:
163
+ result = math.nan
164
+ return result
165
+
166
+
167
+ def cal_index(in_ndarray, cell_x, cell_y, type):
168
+ kernel = numpy.arange(3 ** 2)
169
+ kernel.shape = (3, 3)
170
+ kernel.fill(1)
171
+ padded_array = _pad(in_ndarray, kernel)
172
+ a_s = stride(padded_array, kernel.shape)
173
+ rows, cols = a_s.shape[0], a_s.shape[1]
174
+ result = numpy.arange(rows * cols)
175
+ result.shape = (rows, cols)
176
+ result = result.astype('float64')
177
+ result.fill(numpy.nan)
178
+ diag = math.sqrt(cell_x ** 2 + cell_y ** 2)
179
+ plannar_area = (cell_y * cell_x) / 2 # area of one cell
180
+
181
+ if type == 'SAR':
182
+ for y in range(rows):
183
+ for x in range(cols):
184
+ result[y, x] = cal_sar(a_s[y, x], cell_x, cell_y, diag)
185
+ total_surface_area = numpy.nansum(result)
186
+ with numpy.errstate(divide='ignore', invalid='ignore'):
187
+ result_ratio = numpy.true_divide(result, plannar_area)
188
+
189
+ return result_ratio, total_surface_area
190
+ elif type == 'TRI':
191
+ for y in range(rows):
192
+ for x in range(cols):
193
+ result[y, x] = cal_tri(a_s[y, x])
194
+ return result
195
+
196
+
197
+ def forest_metrics(callback, in_line, out_line, raster_type, in_raster, proc_segments, buffer_ln_dist,
198
+ cl_metrics_gap, forest_buffer_dist, processes, verbose, worklnbuffer_dfLR=None):
199
+ # file_path,in_file_name=os.path.split(in_line)
200
+ output_path, output_file = os.path.split(out_line)
201
+ output_filename, file_ext = os.path.splitext((output_file))
202
+ if not os.path.exists(output_path):
203
+ print("No output file path found, pls check.")
204
+ exit()
205
+ else:
206
+ if file_ext.lower() != ".shp":
207
+ print("Output file type should be shapefile, pls check.")
208
+ exit()
209
+
210
+ try:
211
+ line_seg = geopandas.GeoDataFrame.from_file(in_line)
212
+ except:
213
+ print(sys.exc_info())
214
+ exit()
215
+
216
+ # check coordinate systems between line and raster features
217
+ with rasterio.open(in_raster) as in_image:
218
+ if not in_image.crs.to_epsg() in [2956]:
219
+ print("Line and raster spatial references are not same, please check.")
220
+ exit()
221
+
222
+ # # Check the ** column in data. If it is not, new column will be created
223
+ # if not '**' in line_seg.columns.array:
224
+ # print("Cannot find {} column in input line data.\n '{}' column will be create".format("**","**"))
225
+ # line_seg['**'] = numpy.nan
226
+
227
+ # Check the OLnFID column in data. If it is not, column will be created
228
+ if not 'OLnFID' in line_seg.columns.array:
229
+ print(
230
+ "Cannot find {} column in input line data.\n '{}' column will be create".format('OLnFID', 'OLnFID'))
231
+ line_seg['OLnFID'] = line_seg.index
232
+
233
+ if proc_segments == True:
234
+ line_seg = split_into_Equal_Nth_segments(line_seg)
235
+
236
+ else:
237
+ pass
238
+
239
+ # copy original line input to another Geodataframe
240
+ workln_dfC = geopandas.GeoDataFrame.copy((line_seg))
241
+ workln_dfL = geopandas.GeoDataFrame.copy((line_seg))
242
+ workln_dfR = geopandas.GeoDataFrame.copy((line_seg))
243
+
244
+ # copy parallel lines for both side of the input lines
245
+ print("Creating area for CL....")
246
+ workln_dfC['geometry'] = workln_dfC['geometry'].simplify(tolerance=0.05, preserve_topology=True)
247
+ worklnbuffer_dfC = geopandas.GeoDataFrame.copy((workln_dfC))
248
+ worklnbuffer_dfC['geometry'] = shapely.buffer(workln_dfC['geometry'], distance=float(buffer_ln_dist),
249
+ cap_style=2, join_style=2, single_sided=False)
250
+
251
+ print("Creating offset area for surrounding forest....")
252
+ workln_dfL, workln_dfR = multiprocessing_copyparallel_lineLR(workln_dfL, workln_dfR, processes,
253
+ left_dis=float(buffer_ln_dist + cl_metrics_gap),
254
+ right_dist=-float(buffer_ln_dist + cl_metrics_gap))
255
+ workln_dfR = workln_dfR.sort_values(by=['OLnFID'])
256
+ workln_dfL = workln_dfL.sort_values(by=['OLnFID'])
257
+ workln_dfL = workln_dfL.reset_index(drop=True)
258
+ workln_dfR = workln_dfR.reset_index(drop=True)
259
+
260
+ print('%{}'.format(30))
261
+
262
+ worklnbuffer_dfL = geopandas.GeoDataFrame.copy((workln_dfL))
263
+ worklnbuffer_dfR = geopandas.GeoDataFrame.copy((workln_dfR))
264
+
265
+ # buffer the parallel line in one side (extend the area into forest)
266
+
267
+ worklnbuffer_dfL['geometry'] = shapely.buffer(workln_dfL['geometry'], distance=float(forest_buffer_dist),
268
+ cap_style=2, join_style=2, single_sided=True)
269
+ worklnbuffer_dfR['geometry'] = shapely.buffer(workln_dfR['geometry'], distance=-float(forest_buffer_dist),
270
+ cap_style=2, join_style=2, single_sided=True)
271
+ print("Creating offset area for surrounding forest....Done")
272
+ print('%{}'.format(50))
273
+ # create a New column for surrounding forest statistics:
274
+ # 1) Height Percentile (add more in the future)
275
+ # worklnbuffer_dfL['**_L'] = numpy.nan
276
+ # worklnbuffer_dfR['**_R'] = numpy.nan
277
+ # worklnbuffer_dfL = worklnbuffer_dfL.reset_index(drop=True)
278
+ # worklnbuffer_dfR=worklnbuffer_dfR.reset_index(drop=True)
279
+ # line_seg['L_**'] = numpy.nan
280
+ # line_seg['R_**'] = numpy.nan
281
+ print('%{}'.format(80))
282
+
283
+ print("Calculating CL metrics ..")
284
+ worklnbuffer_dfC, new_col_c = multiprocessing_metrics(worklnbuffer_dfC, in_raster, raster_type, processes,
285
+ side='center')
286
+
287
+ worklnbuffer_dfC = worklnbuffer_dfC.sort_values(by=['OLnFID'])
288
+ worklnbuffer_dfC = worklnbuffer_dfC.reset_index(drop=True)
289
+ print("Calculating surrounding forest metrics....")
290
+ # calculate the Height percentile for each parallel area using CHM
291
+ worklnbuffer_dfL, new_col_l = multiprocessing_metrics(worklnbuffer_dfL, in_raster, raster_type, processes,
292
+ side='left')
293
+
294
+ worklnbuffer_dfL = worklnbuffer_dfL.sort_values(by=['OLnFID'])
295
+ worklnbuffer_dfL = worklnbuffer_dfL.reset_index(drop=True)
296
+
297
+ worklnbuffer_dfR, new_col_r = multiprocessing_metrics(worklnbuffer_dfR, in_raster, raster_type, processes,
298
+ side='right')
299
+
300
+ worklnbuffer_dfR = worklnbuffer_dfR.sort_values(by=['OLnFID'])
301
+ worklnbuffer_dfR = worklnbuffer_dfR.reset_index(drop=True)
302
+
303
+ print('%{}'.format(90))
304
+
305
+ all_new_col = numpy.append(numpy.array(new_col_c), numpy.array(new_col_l))
306
+ all_new_col = numpy.append(all_new_col, numpy.array(new_col_r))
307
+
308
+ for index in (line_seg.index):
309
+ if raster_type == 'DEM':
310
+ for col in all_new_col:
311
+ if "C_" in col:
312
+ line_seg.loc[index, col] = worklnbuffer_dfC.loc[index, col]
313
+ elif "L_" in col:
314
+ line_seg.loc[index, col] = worklnbuffer_dfL.loc[index, col]
315
+ elif "R_" in col:
316
+ line_seg.loc[index, col] = worklnbuffer_dfR.loc[index, col]
317
+ print("Calculating forest metrics....Done")
318
+
319
+ print("Saving forest metrics output.....")
320
+ geopandas.GeoDataFrame.to_file(line_seg, out_line)
321
+ print("Saving forest metrics output.....Done")
322
+ del line_seg, worklnbuffer_dfL, worklnbuffer_dfR, workln_dfL, workln_dfR
323
+
324
+ print('%{}'.format(100))
325
+
326
+
327
+ # TODO: inspect duplicates, split_line_npart in line_footprint_functions.py
328
+ def split_line_nPart(line):
329
+ from shapely.ops import split, snap
330
+ # Work out n parts for each line (divided by 10m)
331
+ n = math.ceil(line.length / 10)
332
+ if n > 1:
333
+ # divided line into n-1 equal parts;
334
+ distances = numpy.linspace(0, line.length, n)
335
+ points = [line.interpolate(distance) for distance in distances]
336
+
337
+ split_points = shapely.multipoints(points)
338
+ # mline=cut_line_at_points(line,points)
339
+ mline = split(line, split_points)
340
+ # mline=split_line_fc(line)
341
+ else:
342
+ mline = line
343
+ return mline
344
+
345
+
346
+ def split_into_Equal_Nth_segments(df):
347
+ odf = df
348
+ crs = odf.crs
349
+ if not 'OLnSEG' in odf.columns.array:
350
+ df['OLnSEG'] = numpy.nan
351
+ df = odf.assign(geometry=odf.apply(lambda x: split_line_nPart(x.geometry), axis=1))
352
+ df = df.explode()
353
+
354
+ df['OLnSEG'] = df.groupby('OLnFID').cumcount()
355
+ gdf = geopandas.GeoDataFrame(df, geometry=df.geometry, crs=crs)
356
+ gdf = gdf.sort_values(by=['OLnFID', 'OLnSEG'])
357
+ gdf = gdf.reset_index(drop=True)
358
+ return gdf
359
+
360
+
361
+ def split_line_fc(line):
362
+ return list(map(shapely.LineString, zip(line.coords[:-1], line.coords[1:])))
363
+
364
+
365
+ def split_into_every_segments(df):
366
+ odf = df
367
+ crs = odf.crs
368
+ if not 'OLnSEG' in odf.columns.array:
369
+ df['OLnSEG'] = numpy.nan
370
+ else:
371
+ pass
372
+ df = odf.assign(geometry=odf.apply(lambda x: split_line_fc(x.geometry), axis=1))
373
+ df = df.explode()
374
+
375
+ df['OLnSEG'] = df.groupby('OLnFID').cumcount()
376
+ gdf = geopandas.GeoDataFrame(df, geometry=df.geometry, crs=crs)
377
+ gdf = gdf.sort_values(by=['OLnFID', 'OLnSEG'])
378
+ gdf = gdf.reset_index(drop=True)
379
+ return gdf
380
+
381
+
382
+ def multiprocessing_copyparallel_lineLR(dfL, dfR, processes, left_dis, right_dist):
383
+ try:
384
+
385
+ line_arg = []
386
+ total_steps = len(dfL)
387
+
388
+ for item in dfL.index:
389
+ item_list = [dfL, dfR, left_dis, right_dist, item]
390
+ line_arg.append(item_list)
391
+
392
+ featuresL = []
393
+ featuresR = []
394
+ chunksize = math.ceil(total_steps / processes)
395
+ with Pool(processes=int(processes)) as pool:
396
+ step = 0
397
+ # execute tasks in order, process results out of order
398
+ for resultL, resultR in pool.imap_unordered(copyparallel_lineLR, line_arg, chunksize=chunksize):
399
+ if BT_DEBUGGING:
400
+ print('Got result: {}{}'.format(resultL, resultR), flush=True)
401
+ featuresL.append(resultL)
402
+ featuresR.append(resultR)
403
+ step += 1
404
+ print('%{}'.format(step / total_steps * 100))
405
+ return geopandas.GeoDataFrame(pandas.concat(featuresL)), geopandas.GeoDataFrame(pandas.concat(featuresR))
406
+ except OperationCancelledException:
407
+ print("Operation cancelled")
408
+
409
+
410
+ def multiprocessing_metrics(df, in_raster, raster_type, processes, side):
411
+ try:
412
+ line_arg = []
413
+ total_steps = len(df)
414
+ if side == 'left':
415
+ PerCol = 'L'
416
+ elif side == 'right':
417
+ PerCol = 'R'
418
+ else:
419
+ PerCol = 'C'
420
+
421
+ for item in df.index:
422
+ item_list = [df.iloc[[item]], in_raster, item, PerCol, raster_type]
423
+ line_arg.append(item_list)
424
+ features = []
425
+ chunksize = math.ceil(total_steps / processes)
426
+
427
+ if USE_MULTI_PROCESSING:
428
+ with Pool(processes=int(processes)) as pool:
429
+
430
+ step = 0
431
+ # execute tasks in order, process results out of order
432
+ try:
433
+ for result in pool.imap_unordered(cal_metrics, line_arg, chunksize=chunksize):
434
+ if BT_DEBUGGING:
435
+ print('Got result: {}'.format(result), flush=True)
436
+ features.append(result)
437
+ step += 1
438
+ print('%{}'.format(step / total_steps * 100))
439
+ except Exception:
440
+ print(Exception)
441
+ raise
442
+ del line_arg
443
+ df = geopandas.GeoDataFrame(pandas.concat(features))
444
+ new_col = []
445
+ for col in df.columns.array:
446
+ if "C_" in col:
447
+ new_col.append(col)
448
+ elif "L_" in col:
449
+ new_col.append(col)
450
+ elif "R_" in col:
451
+ new_col.append(col)
452
+
453
+ return df, new_col
454
+ else:
455
+ for row in line_arg:
456
+ features.append(cal_metrics(row))
457
+ df = geopandas.GeoDataFrame(pandas.concat(features))
458
+ new_col = []
459
+ for col in df.columns.array:
460
+ if "C_" in col:
461
+ new_col.append(col)
462
+ elif "L_" in col:
463
+ new_col.append(col)
464
+ elif "R_" in col:
465
+ new_col.append(col)
466
+
467
+ return df, new_col
468
+
469
+ except OperationCancelledException:
470
+ print("Operation cancelled")
471
+
472
+
473
+ def cal_metrics(line_arg):
474
+ try:
475
+ df = line_arg[0]
476
+ raster = line_arg[1]
477
+ row_index = line_arg[2]
478
+ PerCol = line_arg[3]
479
+ raster_type = line_arg[4]
480
+ line_buffer = df.loc[row_index, 'geometry']
481
+ except:
482
+ print("Assigning variable on index:{} Error: ".format(line_arg) + sys.exc_info())
483
+ exit()
484
+ try:
485
+ with rasterio.open(raster) as image:
486
+
487
+ clipped_raster, out_transform = rasterio.mask.mask(image, [line_buffer], crop=True,
488
+ nodata=BT_NODATA, filled=True)
489
+ clipped_raster = numpy.squeeze(clipped_raster, axis=0)
490
+ cell_x, cell_y = image.res
491
+ cell_area = cell_x * cell_y
492
+ # mask all -9999 (nodata) value cells
493
+ masked_raster = numpy.ma.masked_where(clipped_raster == BT_NODATA, clipped_raster)
494
+ filled_raster = numpy.ma.filled(masked_raster, numpy.nan)
495
+
496
+ # Calculate the metrics
497
+ if raster_type == "DEM":
498
+ # Surface area ratio (SAR)
499
+
500
+ SAR, total_surface_area = cal_index(filled_raster, cell_x, cell_y, 'SAR')
501
+ SAR_mean = numpy.nanmean(SAR)
502
+ SAR_percentile90 = numpy.nanpercentile(SAR, 90, method='hazen')
503
+ SAR_median = numpy.nanmedian(SAR)
504
+ SAR_std = numpy.nanstd(SAR)
505
+ SAR_max = numpy.nanmax(SAR)
506
+ SAR_min = numpy.nanmin(SAR)
507
+
508
+ # General Statistics
509
+ total_planar_area = numpy.ma.count(masked_raster) * cell_area
510
+ cell_volume = masked_raster * cell_area
511
+ total_volume = numpy.ma.sum(cell_volume)
512
+ mean = numpy.nanmean(filled_raster)
513
+ percentile90 = numpy.nanpercentile(filled_raster, 90, method='hazen')
514
+ median = numpy.nanmedian(filled_raster)
515
+ std = numpy.nanstd(filled_raster)
516
+ max = numpy.nanmax(filled_raster)
517
+ min = numpy.nanmin(filled_raster)
518
+
519
+ # Terrain Ruggedness Index (TRI)
520
+ # TRI = cal_index(filled_raster, cell_x, cell_y, 'TRI')
521
+ # TRI_mean = numpy.nanmean(TRI)
522
+ # TRI_percentile90 = numpy.nanpercentile(TRI, 90, method='hazen')
523
+ # TRI_median = numpy.nanmedian(TRI)
524
+ # TRI_std = numpy.nanstd(TRI)
525
+ # TRI_max = numpy.nanmax(TRI)
526
+ # TRI_min = numpy.nanmin(TRI)
527
+
528
+ del clipped_raster, out_transform
529
+
530
+ # return the generated value
531
+ except:
532
+ print(sys.exc_info())
533
+ try:
534
+
535
+ # Writing General statisic
536
+ df.at[row_index, PerCol + '_PlArea'] = total_planar_area
537
+ df.at[row_index, PerCol + '_mean'] = mean
538
+ # df.at[row_index, PerCol + '_P90'] = percentile90
539
+ # df.at[row_index, PerCol + '_median'] = median
540
+ df.at[row_index, PerCol + '_StdDev'] = std
541
+ df.at[row_index, PerCol + '_max'] = max
542
+ df.at[row_index, PerCol + '_min'] = min
543
+ # df.at[row_index, PerCol + '_Vol'] = total_volume
544
+
545
+ # Writing SAR statisic
546
+ df.at[row_index, PerCol + '_SurArea'] = total_surface_area
547
+ df.at[row_index, PerCol + '_SARmean'] = SAR_mean
548
+ # df.at[row_index, PerCol + '_SARP90'] = SAR_percentile90
549
+ # df.at[row_index, PerCol + '_SARmed'] = SAR_median
550
+ df.at[row_index, PerCol + '_SARStd'] = SAR_std
551
+ df.at[row_index, PerCol + '_SARmax'] = SAR_max
552
+ df.at[row_index, PerCol + '_SARmin'] = SAR_min
553
+
554
+ # highest=df[PerCol + '_max'].max()
555
+ # lowest = df[PerCol + '_min'].min()
556
+
557
+ # Writing TRI statisic
558
+ # df.at[row_index, PerCol + '_TRImean'] = TRI_mean
559
+ # # df.at[row_index, PerCol + '_TRIP90'] = TRI_percentile90
560
+ # # df.at[row_index, PerCol + '_TRImed'] = TRI_median
561
+ # df.at[row_index, PerCol + '_TRIStd'] = TRI_std
562
+ # df.at[row_index, PerCol + '_TRImax'] = TRI_max
563
+ # df.at[row_index, PerCol + '_TRImin'] = TRI_min
564
+
565
+ return df
566
+ except:
567
+ print("Error writing forest metrics into table: " + sys.exc_info())
568
+
569
+
570
+ def copyparallel_lineLR(line_arg):
571
+ dfL = line_arg[0]
572
+ dfR = line_arg[1]
573
+ # Simplify input center lines
574
+ lineL = dfL.loc[line_arg[4], 'geometry'].simplify(tolerance=0.05, preserve_topology=True)
575
+ lineR = dfL.loc[line_arg[4], 'geometry'].simplify(tolerance=0.05, preserve_topology=True)
576
+ offset_distL = float(line_arg[2])
577
+ offset_distR = float(line_arg[3])
578
+
579
+ # Older alternative method to the offset_curve() method,
580
+ # but uses resolution instead of quad_segs and a side keyword (‘left’ or ‘right’) instead
581
+ # of sign of the distance. This method is kept for backwards compatibility for now,
582
+ # but it is recommended to use offset_curve() instead.
583
+ # (ref: https://shapely.readthedocs.io/en/stable/manual.html#object.offset_curve)
584
+
585
+ # parallel_lineL = shapely.offset_curve(geometry=lineL, distance=offset_distL,
586
+ # join_style=shapely.BufferJoinStyle.mitre)
587
+ # parallel_lineR = shapely.offset_curve(geometry=lineR, distance=offset_distR,
588
+ # join_style=shapely.BufferJoinStyle.mitre)
589
+
590
+ parallel_lineL = lineL.parallel_offset(distance=offset_distL, side='left',
591
+ join_style=shapely.BufferJoinStyle.mitre)
592
+ parallel_lineR = lineR.parallel_offset(distance=-offset_distR, side='right',
593
+ join_style=shapely.BufferJoinStyle.mitre)
594
+
595
+ if not parallel_lineL.is_empty:
596
+ dfL.loc[line_arg[4], 'geometry'] = parallel_lineL
597
+ if not parallel_lineR.is_empty:
598
+ dfR.loc[line_arg[4], 'geometry'] = parallel_lineR
599
+ return dfL.iloc[[line_arg[4]]], dfR.iloc[[line_arg[4]]]
600
+
601
+
602
+ if __name__ == '__main__':
603
+ start_time = time.time()
604
+ print('Starting forest metrics calculation processing\n @ {}'.format(
605
+ time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())))
606
+
607
+ print("Checking input parameters....")
608
+ in_args, in_verbose = check_arguments()
609
+
610
+ verbose = True if in_args.verbose == 'True' else False
611
+ forest_metrics(print, **in_args.input, processes=int(in_args.processes), verbose=verbose)
612
+
613
+ print('%{}'.format(100))
614
+ print('Starting forest metrics calculation processing is done @ {}\n(or in {} second)'.format(
615
+ time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime()), round(time.time() - start_time, 5)))