BERATools 0.2.0__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (153) hide show
  1. beratools/__init__.py +1 -7
  2. beratools/core/algo_centerline.py +491 -351
  3. beratools/core/algo_common.py +497 -0
  4. beratools/core/algo_cost.py +192 -0
  5. beratools/core/{dijkstra_algorithm.py → algo_dijkstra.py} +503 -460
  6. beratools/core/algo_footprint_rel.py +577 -0
  7. beratools/core/algo_line_grouping.py +944 -0
  8. beratools/core/algo_merge_lines.py +214 -0
  9. beratools/core/algo_split_with_lines.py +304 -0
  10. beratools/core/algo_tiler.py +428 -0
  11. beratools/core/algo_vertex_optimization.py +469 -0
  12. beratools/core/constants.py +52 -86
  13. beratools/core/logger.py +76 -85
  14. beratools/core/tool_base.py +196 -133
  15. beratools/gui/__init__.py +11 -15
  16. beratools/gui/{beratools.json → assets/beratools.json} +2185 -2300
  17. beratools/gui/batch_processing_dlg.py +513 -463
  18. beratools/gui/bt_data.py +481 -487
  19. beratools/gui/bt_gui_main.py +710 -691
  20. beratools/gui/main.py +26 -0
  21. beratools/gui/map_window.py +162 -146
  22. beratools/gui/tool_widgets.py +725 -493
  23. beratools/tools/Beratools_r_script.r +1120 -1120
  24. beratools/tools/Ht_metrics.py +116 -116
  25. beratools/tools/__init__.py +7 -7
  26. beratools/tools/batch_processing.py +136 -132
  27. beratools/tools/canopy_threshold_relative.py +672 -670
  28. beratools/tools/canopycostraster.py +222 -222
  29. beratools/tools/centerline.py +136 -176
  30. beratools/tools/common.py +857 -885
  31. beratools/tools/fl_regen_csf.py +428 -428
  32. beratools/tools/forest_line_attributes.py +408 -408
  33. beratools/tools/line_footprint_absolute.py +213 -363
  34. beratools/tools/line_footprint_fixed.py +436 -282
  35. beratools/tools/line_footprint_functions.py +733 -720
  36. beratools/tools/line_footprint_relative.py +73 -64
  37. beratools/tools/line_grouping.py +45 -0
  38. beratools/tools/ln_relative_metrics.py +615 -615
  39. beratools/tools/r_cal_lpi_elai.r +24 -24
  40. beratools/tools/r_generate_pd_focalraster.r +100 -100
  41. beratools/tools/r_interface.py +79 -79
  42. beratools/tools/r_point_density.r +8 -8
  43. beratools/tools/rpy_chm2trees.py +86 -86
  44. beratools/tools/rpy_dsm_chm_by.py +81 -81
  45. beratools/tools/rpy_dtm_by.py +63 -63
  46. beratools/tools/rpy_find_cellsize.py +43 -43
  47. beratools/tools/rpy_gnd_csf.py +74 -74
  48. beratools/tools/rpy_hummock_hollow.py +85 -85
  49. beratools/tools/rpy_hummock_hollow_raster.py +71 -71
  50. beratools/tools/rpy_las_info.py +51 -51
  51. beratools/tools/rpy_laz2las.py +40 -40
  52. beratools/tools/rpy_lpi_elai_lascat.py +466 -466
  53. beratools/tools/rpy_normalized_lidar_by.py +56 -56
  54. beratools/tools/rpy_percent_above_dbh.py +80 -80
  55. beratools/tools/rpy_points2trees.py +88 -88
  56. beratools/tools/rpy_vegcoverage.py +94 -94
  57. beratools/tools/tiler.py +48 -206
  58. beratools/tools/tool_template.py +69 -54
  59. beratools/tools/vertex_optimization.py +61 -620
  60. beratools/tools/zonal_threshold.py +144 -144
  61. beratools-0.2.1.dist-info/METADATA +109 -0
  62. beratools-0.2.1.dist-info/RECORD +74 -0
  63. {beratools-0.2.0.dist-info → beratools-0.2.1.dist-info}/WHEEL +1 -1
  64. {beratools-0.2.0.dist-info → beratools-0.2.1.dist-info}/licenses/LICENSE +22 -22
  65. beratools/gui/cli.py +0 -18
  66. beratools/gui/gui.json +0 -8
  67. beratools/gui_tk/ASCII Banners.txt +0 -248
  68. beratools/gui_tk/__init__.py +0 -20
  69. beratools/gui_tk/beratools_main.py +0 -515
  70. beratools/gui_tk/bt_widgets.py +0 -442
  71. beratools/gui_tk/cli.py +0 -18
  72. beratools/gui_tk/img/BERALogo.png +0 -0
  73. beratools/gui_tk/img/closed.gif +0 -0
  74. beratools/gui_tk/img/closed.png +0 -0
  75. beratools/gui_tk/img/open.gif +0 -0
  76. beratools/gui_tk/img/open.png +0 -0
  77. beratools/gui_tk/img/tool.gif +0 -0
  78. beratools/gui_tk/img/tool.png +0 -0
  79. beratools/gui_tk/main.py +0 -14
  80. beratools/gui_tk/map_window.py +0 -144
  81. beratools/gui_tk/runner.py +0 -1481
  82. beratools/gui_tk/tooltip.py +0 -55
  83. beratools/third_party/pyqtlet2/__init__.py +0 -9
  84. beratools/third_party/pyqtlet2/leaflet/__init__.py +0 -26
  85. beratools/third_party/pyqtlet2/leaflet/control/__init__.py +0 -6
  86. beratools/third_party/pyqtlet2/leaflet/control/control.py +0 -59
  87. beratools/third_party/pyqtlet2/leaflet/control/draw.py +0 -52
  88. beratools/third_party/pyqtlet2/leaflet/control/layers.py +0 -20
  89. beratools/third_party/pyqtlet2/leaflet/core/Parser.py +0 -24
  90. beratools/third_party/pyqtlet2/leaflet/core/__init__.py +0 -2
  91. beratools/third_party/pyqtlet2/leaflet/core/evented.py +0 -180
  92. beratools/third_party/pyqtlet2/leaflet/layer/__init__.py +0 -5
  93. beratools/third_party/pyqtlet2/leaflet/layer/featuregroup.py +0 -34
  94. beratools/third_party/pyqtlet2/leaflet/layer/icon/__init__.py +0 -1
  95. beratools/third_party/pyqtlet2/leaflet/layer/icon/icon.py +0 -30
  96. beratools/third_party/pyqtlet2/leaflet/layer/imageoverlay.py +0 -18
  97. beratools/third_party/pyqtlet2/leaflet/layer/layer.py +0 -105
  98. beratools/third_party/pyqtlet2/leaflet/layer/layergroup.py +0 -45
  99. beratools/third_party/pyqtlet2/leaflet/layer/marker/__init__.py +0 -1
  100. beratools/third_party/pyqtlet2/leaflet/layer/marker/marker.py +0 -91
  101. beratools/third_party/pyqtlet2/leaflet/layer/tile/__init__.py +0 -2
  102. beratools/third_party/pyqtlet2/leaflet/layer/tile/gridlayer.py +0 -4
  103. beratools/third_party/pyqtlet2/leaflet/layer/tile/tilelayer.py +0 -16
  104. beratools/third_party/pyqtlet2/leaflet/layer/vector/__init__.py +0 -5
  105. beratools/third_party/pyqtlet2/leaflet/layer/vector/circle.py +0 -15
  106. beratools/third_party/pyqtlet2/leaflet/layer/vector/circlemarker.py +0 -18
  107. beratools/third_party/pyqtlet2/leaflet/layer/vector/path.py +0 -5
  108. beratools/third_party/pyqtlet2/leaflet/layer/vector/polygon.py +0 -14
  109. beratools/third_party/pyqtlet2/leaflet/layer/vector/polyline.py +0 -18
  110. beratools/third_party/pyqtlet2/leaflet/layer/vector/rectangle.py +0 -14
  111. beratools/third_party/pyqtlet2/leaflet/map/__init__.py +0 -1
  112. beratools/third_party/pyqtlet2/leaflet/map/map.py +0 -220
  113. beratools/third_party/pyqtlet2/mapwidget.py +0 -45
  114. beratools/third_party/pyqtlet2/web/custom.js +0 -43
  115. beratools/third_party/pyqtlet2/web/map.html +0 -23
  116. beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/layers-2x.png +0 -0
  117. beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/layers.png +0 -0
  118. beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-icon-2x.png +0 -0
  119. beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-icon.png +0 -0
  120. beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-shadow.png +0 -0
  121. beratools/third_party/pyqtlet2/web/modules/leaflet_193/leaflet.css +0 -656
  122. beratools/third_party/pyqtlet2/web/modules/leaflet_193/leaflet.js +0 -6
  123. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.codeclimate.yml +0 -14
  124. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.editorconfig +0 -4
  125. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.gitattributes +0 -22
  126. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.travis.yml +0 -43
  127. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/LICENSE +0 -20
  128. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/layers-2x.png +0 -0
  129. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/layers.png +0 -0
  130. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-icon-2x.png +0 -0
  131. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-icon.png +0 -0
  132. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-shadow.png +0 -0
  133. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet-2x.png +0 -0
  134. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet.png +0 -0
  135. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet.svg +0 -156
  136. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/leaflet.draw.css +0 -10
  137. beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/leaflet.draw.js +0 -10
  138. beratools/third_party/pyqtlet2/web/modules/leaflet_rotatedMarker_020/LICENSE +0 -22
  139. beratools/third_party/pyqtlet2/web/modules/leaflet_rotatedMarker_020/leaflet.rotatedMarker.js +0 -57
  140. beratools/tools/forest_line_ecosite.py +0 -216
  141. beratools/tools/lapis_all.py +0 -103
  142. beratools/tools/least_cost_path_from_chm.py +0 -152
  143. beratools-0.2.0.dist-info/METADATA +0 -63
  144. beratools-0.2.0.dist-info/RECORD +0 -142
  145. /beratools/gui/{img → assets}/BERALogo.png +0 -0
  146. /beratools/gui/{img → assets}/closed.gif +0 -0
  147. /beratools/gui/{img → assets}/closed.png +0 -0
  148. /beratools/{gui_tk → gui/assets}/gui.json +0 -0
  149. /beratools/gui/{img → assets}/open.gif +0 -0
  150. /beratools/gui/{img → assets}/open.png +0 -0
  151. /beratools/gui/{img → assets}/tool.gif +0 -0
  152. /beratools/gui/{img → assets}/tool.png +0 -0
  153. {beratools-0.2.0.dist-info → beratools-0.2.1.dist-info}/entry_points.txt +0 -0
beratools/tools/common.py CHANGED
@@ -1,885 +1,857 @@
1
- #!/usr/bin/env python3
2
- """ This file is intended to be hosting common functions for BERA Tools.
3
- """
4
-
5
- # This script is part of the BERA Tools geospatial library.
6
- # Author: Richard Zeng
7
- # Created: 12/04/2023
8
- # License: MIT
9
-
10
- # imports
11
- import sys
12
- import math
13
- import tempfile
14
- from pathlib import Path
15
- from collections import OrderedDict
16
- from itertools import zip_longest, compress
17
-
18
- import json
19
- import shlex
20
- import argparse
21
- import warnings
22
- import numpy as np
23
-
24
- import rasterio
25
- from rasterio import mask
26
-
27
- import fiona
28
- import shapely
29
- from shapely.affinity import rotate
30
- from shapely.ops import split, transform
31
- from shapely.geometry import shape, mapping, Point, LineString, box
32
-
33
- import pandas as pd
34
- import geopandas as gpd
35
- from osgeo import ogr, gdal
36
- from pyproj import CRS, Transformer
37
- from pyogrio import set_gdal_config_options
38
-
39
- from skimage.graph import MCP_Geometric, MCP_Connect
40
-
41
- from scipy import ndimage
42
- import xarray as xr
43
- from xrspatial import focal, convolution
44
-
45
- from beratools.core.tool_base import *
46
-
47
- # to suppress pandas UserWarning: Geometry column does not contain geometry when splitting lines
48
- warnings.simplefilter(action='ignore', category=UserWarning)
49
-
50
- # restore .shx for shapefile for using GDAL or pyogrio
51
- gdal.SetConfigOption('SHAPE_RESTORE_SHX', 'YES')
52
- set_gdal_config_options({'SHAPE_RESTORE_SHX': 'YES'})
53
-
54
- # suppress all kinds of warnings
55
- if not BT_DEBUGGING:
56
- gdal.SetConfigOption('CPL_LOG', 'NUL') # gdal warning
57
- warnings.filterwarnings("ignore") # suppress warnings
58
- warnings.simplefilter(action='ignore', category=UserWarning) # suppress Pandas UserWarning
59
-
60
-
61
- def clip_raster(in_raster_file, clip_geom, buffer=0.0, out_raster_file=None, ras_nodata=BT_NODATA):
62
- out_meta = None
63
- with (rasterio.open(in_raster_file)) as raster_file:
64
- out_meta = raster_file.meta
65
- if out_meta['nodata']:
66
- ras_nodata = out_meta['nodata']
67
- else:
68
- out_meta['nodata'] = ras_nodata
69
-
70
- clip_geo_buffer = [clip_geom.buffer(buffer)]
71
- out_image: np.ndarray
72
- out_image, out_transform = mask.mask(raster_file, clip_geo_buffer,
73
- crop=True, nodata=ras_nodata, filled=True)
74
-
75
- if out_meta['nodata']:
76
- out_image[out_image == out_meta['nodata']] = BT_NODATA
77
- ras_nodata = BT_NODATA
78
-
79
- height, width = out_image.shape[1:]
80
- out_meta.update({"driver": "GTiff",
81
- "height": height,
82
- "width": width,
83
- "transform": out_transform,
84
- "nodata": ras_nodata})
85
-
86
- if out_raster_file:
87
- with rasterio.open(out_raster_file, "w", **out_meta) as dest:
88
- dest.write(out_image)
89
- print('[Clip raster]: data saved to {}.'.format(out_raster_file))
90
-
91
- return out_image, out_meta
92
-
93
-
94
- def save_raster_to_file(in_raster_mem, in_meta, out_raster_file):
95
- """
96
-
97
- Parameters
98
- ----------
99
- in_raster_mem: npmpy raster
100
- in_meta: input meta
101
- out_raster_file: output raster file
102
-
103
- Returns
104
- -------
105
-
106
- """
107
- with rasterio.open(out_raster_file, "w", **in_meta) as dest:
108
- dest.write(in_raster_mem, indexes=1)
109
-
110
-
111
- def clip_lines(clip_geom, buffer, in_line_file, out_line_file):
112
- in_line = gpd.read_file(in_line_file)
113
- out_line = in_line.clip(clip_geom.buffer(buffer * BT_BUFFER_RATIO))
114
-
115
- if out_line_file and len(out_line) > 0:
116
- out_line.to_file(out_line_file)
117
- print('[Clip lines]: data saved to {}.'.format(out_line_file))
118
-
119
- return out_line
120
-
121
-
122
- def read_geoms_from_shapefile(in_file):
123
- geoms = []
124
- with fiona.open(in_file) as open_file:
125
- layer_crs = open_file.crs
126
- for geom in open_file:
127
- geoms.append(geom['geometry'])
128
-
129
- return geoms
130
-
131
-
132
- # Read feature from shapefile
133
- def read_feature_from_shapefile(in_file):
134
- shapes = []
135
- with fiona.open(in_file) as open_file:
136
- for feat in open_file:
137
- shapes.append([shape(feat.geometry), feat.properties])
138
-
139
- return shapes
140
-
141
-
142
- def generate_raster_footprint(in_raster, latlon=True):
143
- inter_img = 'image_overview.tif'
144
-
145
- # get raster datasource
146
- src_ds = gdal.Open(in_raster)
147
- width, height = src_ds.RasterXSize, src_ds.RasterYSize
148
- src_crs = src_ds.GetSpatialRef().ExportToWkt()
149
-
150
- geom = None
151
- with tempfile.TemporaryDirectory() as tmp_folder:
152
- if BT_DEBUGGING:
153
- print('Temporary folder: {}'.format(tmp_folder))
154
-
155
- if max(width, height) <= 1024:
156
- inter_img = in_raster
157
- else:
158
- if width >= height:
159
- options = gdal.TranslateOptions(width=1024, height=0)
160
- else:
161
- options = gdal.TranslateOptions(width=0, height=1024)
162
-
163
- inter_img = Path(tmp_folder).joinpath(inter_img).as_posix()
164
- gdal.Translate(inter_img, src_ds, options=options)
165
-
166
- shapes = gdal.Footprint(None, inter_img, dstSRS=src_crs, format='GeoJSON')
167
- target_feat = shapes['features'][0]
168
- geom = shape(target_feat['geometry'])
169
-
170
- # coords = None
171
- # with rasterio.open(inter_img) as src:
172
- # if np.isnan(src.nodata):
173
- # geom = box(*src.bounds)
174
- # coords_geo = list(geom.exterior.coords)
175
- # else:
176
- # msk = src.read_masks(1)
177
- # shapes = features.shapes(msk, mask=msk)
178
- # shapes = list(shapes)
179
- # coords = shapes[0][0]['coordinates'][0]
180
- #
181
- # for pt in coords:
182
- # pt = rasterio.transform.xy(src.transform, pt[1], pt[0])
183
- # coords_geo.append(pt)
184
- #
185
- # coords_geo.pop(-1)
186
-
187
- if latlon:
188
- out_crs = CRS('EPSG:4326')
189
- transformer = Transformer.from_crs(CRS(src_crs), out_crs)
190
-
191
- geom = transform(transformer.transform, geom)
192
- # coords_geo = list(transformer.itransform(coords_geo))
193
- # coords_geo = [list(pt) for pt in coords_geo]
194
-
195
- return geom
196
-
197
-
198
- def remove_nan_from_array(matrix):
199
- with np.nditer(matrix, op_flags=['readwrite']) as it:
200
- for x in it:
201
- if np.isnan(x[...]):
202
- x[...] = BT_NODATA_COST
203
-
204
-
205
- def replace_Nodata2NaN(matrix, nodata):
206
- with np.nditer(matrix, op_flags=['readwrite']) as it:
207
- for x in it:
208
- if (x[...] == nodata):
209
- x[...] = np.NaN
210
-
211
-
212
- def replace_Nodata2Inf(matrix, nodata):
213
- with np.nditer(matrix, op_flags=['readwrite']) as it:
214
- for x in it:
215
- if (x[...] == nodata):
216
- x[...] = np.Inf
217
-
218
-
219
- # Split LineString to segments at vertices
220
- def segments(line_coords):
221
- if len(line_coords) < 2:
222
- return None
223
- elif len(line_coords) == 2:
224
- return [fiona.Geometry.from_dict({'type': 'LineString', 'coordinates': line_coords})]
225
- else:
226
- seg_list = zip(line_coords[:-1], line_coords[1:])
227
- line_list = [{'type': 'LineString', 'coordinates': coords} for coords in seg_list]
228
- return [fiona.Geometry.from_dict(line) for line in line_list]
229
-
230
-
231
- def extract_string_from_printout(str_print, str_extract):
232
- str_array = shlex.split(str_print) # keep string in double quotes
233
- str_array_enum = enumerate(str_array)
234
- index = 0
235
- for item in str_array_enum:
236
- if str_extract in item[1]:
237
- index = item[0]
238
- break
239
- str_out = str_array[index]
240
- return str_out.strip()
241
-
242
-
243
- def check_arguments():
244
- # Get tool arguments
245
- parser = argparse.ArgumentParser()
246
- parser.add_argument('-i', '--input', type=json.loads)
247
- parser.add_argument('-p', '--processes')
248
- parser.add_argument('-v', '--verbose')
249
- args = parser.parse_args()
250
-
251
- verbose = True if args.verbose == 'True' else False
252
- for item in args.input:
253
- if args.input[item] == 'false':
254
- args.input[item] = False
255
- elif args.input[item] == 'true':
256
- args.input[item] = True
257
-
258
- return args, verbose
259
-
260
-
261
- def save_features_to_shapefile(out_file, crs, geoms, properties=None, schema=None):
262
- """
263
-
264
- Parameters
265
- ----------
266
- out_file :
267
- crs :
268
- geoms : shapely geometry objects
269
- schema :
270
- properties :
271
-
272
- Returns
273
- -------
274
-
275
- """
276
- # remove all None items
277
- # TODO: check geom type consistency
278
- # geoms = [item for item in geoms if item is not None]
279
-
280
- if len(geoms) < 1:
281
- return
282
-
283
- try:
284
- geom_type = mapping(geoms[0])['type']
285
- except Exception as e:
286
- print(e)
287
-
288
- if not schema:
289
- props_tuple = zip([], []) # if lengths are not the same, ValueError raises
290
- props_schema = [(item, type(value).__name__) for item, value in props_tuple]
291
-
292
- schema = {
293
- 'geometry': geom_type,
294
- 'properties': OrderedDict([])
295
- }
296
-
297
- properties = None
298
-
299
- driver = 'ESRI Shapefile'
300
- print('Writing to shapefile {}'.format(out_file))
301
-
302
- try:
303
- out_line_file = fiona.open(out_file, 'w', driver, schema, crs)
304
- except Exception as e:
305
- print(e)
306
- out_line_file.close()
307
- return
308
-
309
- if properties:
310
- feat_tuple = zip_longest(geoms, properties)
311
- else: # properties are None
312
- feat_tuple = [(item, None) for item in geoms]
313
-
314
- try:
315
- for geom, prop in feat_tuple:
316
- if geom:
317
- feature = {
318
- 'geometry': mapping(geom),
319
- 'properties': prop
320
- }
321
-
322
- out_line_file.write(feature)
323
- except Exception as e:
324
- print(e)
325
-
326
- out_line_file.close()
327
-
328
-
329
- def vector_crs(in_vector):
330
- vec_crs = None
331
- with ogr.Open(in_vector) as vector_file:
332
- if vector_file:
333
- vec_crs = vector_file.GetLayer().GetSpatialRef()
334
-
335
- return vec_crs
336
-
337
-
338
- def raster_crs(in_raster):
339
- ras_crs = None
340
- with gdal.Open(in_raster) as raster_file:
341
- if raster_file:
342
- ras_crs = raster_file.GetSpatialRef()
343
-
344
- return ras_crs
345
-
346
-
347
- def compare_crs(crs_org, crs_dst):
348
- if crs_org and crs_dst:
349
- if crs_org.IsSameGeogCS(crs_dst):
350
- print('Check: Input file Spatial Reference are the same, continue.')
351
- return True
352
- else:
353
- crs_org_norm = CRS(crs_org.ExportToWkt())
354
- crs_dst_norm = CRS(crs_dst.ExportToWkt())
355
- if crs_org_norm.is_compound:
356
- crs_org_proj = crs_org_norm.sub_crs_list[0].coordinate_operation.name
357
- elif crs_org_norm.name == 'unnamed':
358
- return False
359
- else:
360
- crs_org_proj = crs_org_norm.coordinate_operation.name
361
-
362
- if crs_dst_norm.is_compound:
363
- crs_dst_proj = crs_dst_norm.sub_crs_list[0].coordinate_operation.name
364
- elif crs_org_norm.name == 'unnamed':
365
- return False
366
- else:
367
- crs_dst_proj = crs_dst_norm.coordinate_operation.name
368
-
369
- if crs_org_proj == crs_dst_proj:
370
- print('Checked: Input files Spatial Reference are the same, continue.')
371
- return True
372
-
373
- return False
374
-
375
-
376
- def identity_polygon(line_args):
377
- """
378
- Return polygon of line segment
379
-
380
- Parameters
381
- ----------
382
- line_args : list of geodataframe
383
- 0 : geodataframe line segment, one item
384
- 1 : geodataframe line buffer, one item
385
- 2 : geodataframe polygons returned by spatial search
386
-
387
- Returns
388
- -------
389
- line, identity : tuple of line and associated footprint
390
-
391
- """
392
- line = line_args[0]
393
- in_cl_buffer = line_args[1][['geometry', 'OLnFID']]
394
- in_fp_polygon = line_args[2]
395
-
396
- identity = None
397
- try:
398
- # drop polygons not intersecting with line segment
399
- line_geom = line.iloc[0].geometry
400
- drop_list = []
401
- for i in in_fp_polygon.index:
402
- if not in_fp_polygon.loc[i].geometry.intersects(line_geom):
403
- drop_list.append(i)
404
- elif line_geom.intersection(in_fp_polygon.loc[i].geometry).length / line_geom.length < 0.30:
405
- drop_list.append(i) # if less the 1/5 of line is inside of polygon, ignore
406
-
407
- # drop all polygons not used
408
- in_fp_polygon = in_fp_polygon.drop(index=drop_list)
409
-
410
- if not in_fp_polygon.empty:
411
- identity = in_fp_polygon.overlay(in_cl_buffer, how='intersection')
412
- except Exception as e:
413
- print(e)
414
-
415
- return line, identity
416
-
417
-
418
- def line_split2(in_ln_shp, seg_length):
419
- # Check the OLnFID column in data. If it is not, column will be created
420
- if 'OLnFID' not in in_ln_shp.columns.array:
421
- if BT_DEBUGGING:
422
- print("Cannot find {} column in input line data")
423
-
424
- print("New column created: {}".format('OLnFID', 'OLnFID'))
425
- in_ln_shp['OLnFID'] = in_ln_shp.index
426
- line_seg = split_into_Equal_Nth_segments(in_ln_shp, seg_length)
427
-
428
- return line_seg
429
-
430
-
431
- def split_into_Equal_Nth_segments(df, seg_length):
432
- odf = df
433
- crs = odf.crs
434
- if 'OLnSEG' not in odf.columns.array:
435
- df['OLnSEG'] = np.nan
436
- df = odf.assign(geometry=odf.apply(lambda x: cut_line(x.geometry, seg_length), axis=1))
437
- # df = odf.assign(geometry=odf.apply(lambda x: cut_line(x.geometry, x.geometry.length), axis=1))
438
- df = df.explode()
439
-
440
- df['OLnSEG'] = df.groupby('OLnFID').cumcount()
441
- gdf = gpd.GeoDataFrame(df, geometry=df.geometry, crs=crs)
442
- gdf = gdf.sort_values(by=['OLnFID', 'OLnSEG'])
443
- gdf = gdf.reset_index(drop=True)
444
-
445
- if "shape_leng" in gdf.columns.array:
446
- gdf["shape_leng"] = gdf.geometry.length
447
- elif "LENGTH" in gdf.columns.array:
448
- gdf["LENGTH"] = gdf.geometry.length
449
- else:
450
- gdf["shape_leng"] = gdf.geometry.length
451
- return gdf
452
-
453
-
454
- def split_line_nPart(line, seg_length):
455
- seg_line = shapely.segmentize(line, seg_length)
456
- distances = np.arange(seg_length, line.length, seg_length)
457
-
458
- if len(distances) > 0:
459
- points = [shapely.line_interpolate_point(seg_line, distance) for distance in distances]
460
-
461
- split_points = shapely.multipoints(points)
462
- mline = split(seg_line, split_points)
463
- else:
464
- mline = seg_line
465
-
466
- return mline
467
-
468
-
469
- def cut_line(line, distance):
470
- """
471
-
472
- Parameters
473
- ----------
474
- line : LineString line to be split by distance along line
475
- distance : float length of segment to cut
476
-
477
- Returns
478
- -------
479
- List of LineString
480
- """
481
- lines = list()
482
- lines = cut(line, distance, lines)
483
- return lines
484
-
485
-
486
- def cut(line, distance, lines):
487
- # Cuts a line in several segments at a distance from its starting point
488
- if line.has_z:
489
- line = transform(lambda x, y, z=None: (x, y), line)
490
-
491
- if shapely.is_empty(line) or shapely.is_missing(line):
492
- return None
493
- # else:
494
- if math.fmod(line.length, distance) < 1:
495
- return [line]
496
- elif distance >= line.length:
497
- return [line]
498
- # else:
499
- end_pt = None
500
- line = shapely.segmentize(line, distance)
501
-
502
- while line.length > distance:
503
- coords = list(line.coords)
504
- for i, p in enumerate(coords):
505
- pd = line.project(Point(p))
506
-
507
- if abs(pd - distance) < BT_EPSILON:
508
- lines.append(LineString(coords[:i + 1]))
509
- line = LineString(coords[i:])
510
- end_pt = None
511
- break
512
- elif pd > distance:
513
- end_pt = line.interpolate(distance)
514
- lines.append(LineString(coords[:i] + list(end_pt.coords)))
515
- line = LineString(list(end_pt.coords) + coords[i:])
516
- break
517
-
518
- if end_pt:
519
- lines.append(line)
520
- return lines
521
-
522
-
523
- def line_angle(point_1, point_2):
524
- """
525
- Calculates the angle of the line
526
-
527
- Parameters
528
- ----------
529
- point_1, point_2: start and end points of shapely line
530
- """
531
- delta_y = point_2.y - point_1.y
532
- delta_x = point_2.x - point_1.x
533
-
534
- angle = math.atan2(delta_y, delta_x)
535
- return angle
536
-
537
-
538
- def generate_perpendicular_line_precise(points, offset=20):
539
- """
540
- Generate a perpendicular line to the input line at the given point.
541
-
542
- Parameters
543
- ----------
544
- points : shapely.geometry.Point list
545
- The points on the line where the perpendicular should be generated.
546
- offset : float, optional
547
- The length of the perpendicular line.
548
-
549
- Returns
550
- -------
551
- shapely.geometry.LineString
552
- The generated perpendicular line.
553
- """
554
- # Compute the angle of the line
555
- center = points[1]
556
- perp_line = None
557
-
558
- if len(points) == 2:
559
- head = points[0]
560
- tail = points[1]
561
-
562
- delta_x = head.x - tail.x
563
- delta_y = head.y - tail.y
564
- angle = 0.0
565
-
566
- if math.isclose(delta_x, 0.0):
567
- angle = math.pi / 2
568
- else:
569
- angle = math.atan(delta_y / delta_x)
570
-
571
- start = [center.x + offset / 2.0, center.y]
572
- end = [center.x - offset / 2.0, center.y]
573
- line = LineString([start, end])
574
- perp_line = rotate(line, angle + math.pi / 2.0, origin=center, use_radians=True)
575
- elif len(points) == 3:
576
- head = points[0]
577
- tail = points[2]
578
-
579
- angle_1 = line_angle(center, head)
580
- angle_2 = line_angle(center, tail)
581
- angle_diff = (angle_2 - angle_1) / 2.0
582
- head_new = Point(center.x + offset / 2.0 * math.cos(angle_1), center.y + offset / 2.0 * math.sin(angle_1))
583
- if head.has_z:
584
- head_new = shapely.force_3d(head_new)
585
- try:
586
- perp_seg_1 = LineString([center, head_new])
587
- perp_seg_1 = rotate(perp_seg_1, angle_diff, origin=center, use_radians=True)
588
- perp_seg_2 = rotate(perp_seg_1, math.pi, origin=center, use_radians=True)
589
- perp_line = LineString([list(perp_seg_1.coords)[1], list(perp_seg_2.coords)[1]])
590
- except Exception as e:
591
- print(e)
592
-
593
- return perp_line
594
-
595
-
596
- def corridor_raster(raster_clip, out_meta, source, destination, cell_size, corridor_threshold):
597
- """
598
- Calculate corridor raster
599
- Parameters
600
- ----------
601
- raster_clip : raster
602
- out_meta : raster file meta
603
- source : list of point tuple(s)
604
- start point in row/col
605
- destination : list of point tuple(s)
606
- end point in row/col
607
- cell_size: tuple
608
- (cell_size_x, cell_size_y)
609
- corridor_threshold : double
610
-
611
- Returns
612
- -------
613
- corridor raster
614
- """
615
-
616
- try:
617
- # change all nan to BT_NODATA_COST for workaround
618
- if len(raster_clip.shape) > 2:
619
- raster_clip = np.squeeze(raster_clip, axis=0)
620
- remove_nan_from_array(raster_clip)
621
-
622
- # generate the cost raster to source point
623
- mcp_source = MCP_Geometric(raster_clip, sampling=cell_size)
624
- source_cost_acc = mcp_source.find_costs(source)[0]
625
- del mcp_source
626
-
627
- # # # generate the cost raster to destination point
628
- mcp_dest = MCP_Geometric(raster_clip, sampling=cell_size)
629
- dest_cost_acc = mcp_dest.find_costs(destination)[0]
630
-
631
- # Generate corridor
632
- corridor = source_cost_acc + dest_cost_acc
633
- corridor = np.ma.masked_invalid(corridor)
634
-
635
- # Calculate minimum value of corridor raster
636
- if not np.ma.min(corridor) is None:
637
- corr_min = float(np.ma.min(corridor))
638
- else:
639
- corr_min = 0.5
640
-
641
- # normalize corridor raster by deducting corr_min
642
- corridor_norm = corridor - corr_min
643
- corridor_thresh_cl = np.ma.where(corridor_norm >= corridor_threshold, 1.0, 0.0)
644
-
645
- except Exception as e:
646
- print(e)
647
- print('corridor_raster: Exception occurred.')
648
- return None
649
-
650
- return corridor_thresh_cl
651
-
652
-
653
- def LCP_skimage_mcp_connect(cost_clip, in_meta, seed_line):
654
- lc_path_new = []
655
- if len(cost_clip.shape) > 2:
656
- cost_clip = np.squeeze(cost_clip, axis=0)
657
-
658
- out_transform = in_meta['transform']
659
- transformer = rasterio.transform.AffineTransformer(out_transform)
660
-
661
- x1, y1 = list(seed_line.coords)[0][:2]
662
- x2, y2 = list(seed_line.coords)[-1][:2]
663
- source = [transformer.rowcol(x1, y1)]
664
- destination = [transformer.rowcol(x2, y2)]
665
-
666
- try:
667
-
668
- init_obj1 = MCP_Connect(cost_clip)
669
- results = init_obj1.find_costs(source, destination)
670
- # init_obj2 = MCP_Geometric(cost_clip)
671
- path = []
672
- for end in destination:
673
- path.append(init_obj1.traceback(end))
674
- for row, col in path[0]:
675
- x, y = transformer.xy(row, col)
676
- lc_path_new.append((x, y))
677
- except Exception as e:
678
- print(e)
679
- return None
680
-
681
- if len(lc_path_new) < 2:
682
- print('No least cost path detected, pass.')
683
- return None
684
- else:
685
- lc_path_new = LineString(lc_path_new)
686
-
687
- return lc_path_new
688
-
689
-
690
- def chk_df_multipart(df, chk_shp_in_string):
691
- try:
692
- found = False
693
- if str.upper(chk_shp_in_string) in [x.upper() for x in df.geom_type.values]:
694
- found = True
695
- df = df.explode()
696
- if type(df) is gpd.geodataframe.GeoDataFrame:
697
- df['OLnSEG'] = df.groupby('OLnFID').cumcount()
698
- df = df.sort_values(by=['OLnFID', 'OLnSEG'])
699
- df = df.reset_index(drop=True)
700
- else:
701
- found = False
702
- return df, found
703
- except Exception as e:
704
- print(e)
705
- return df, False
706
-
707
-
708
- def dyn_fs_raster_stdmean(in_ndarray, kernel, nodata):
709
- # This function uses xrspatial which can handle large data but slow
710
- # print("Calculating Canopy Closure's Focal Statistic-Stand Deviation Raster ...")
711
- in_ndarray[in_ndarray == nodata] = np.nan
712
- result_ndarray = focal.focal_stats(xr.DataArray(in_ndarray), kernel, stats_funcs=['std', 'mean'])
713
-
714
- # Assign std and mean ndarray
715
- reshape_std_ndarray = result_ndarray[0].data # .reshape(-1)
716
- reshape_mean_ndarray = result_ndarray[1].data # .reshape(-1)
717
-
718
- return reshape_std_ndarray, reshape_mean_ndarray
719
-
720
-
721
- def dyn_smooth_cost(in_raster, max_line_dist, sampling):
722
- # print('Generating Cost Raster ...')
723
-
724
- # scipy way to do Euclidean distance transform
725
- euc_dist_array = ndimage.distance_transform_edt(np.logical_not(in_raster), sampling=sampling)
726
-
727
- smooth1 = float(max_line_dist) - euc_dist_array
728
- smooth1[smooth1 <= 0.0] = 0.0
729
- smooth_cost_array = smooth1 / float(max_line_dist)
730
-
731
- return smooth_cost_array
732
-
733
-
734
- def dyn_np_cost_raster(canopy_ndarray, cc_mean, cc_std, cc_smooth, avoidance, cost_raster_exponent):
735
- aM1a = (cc_mean - cc_std)
736
- aM1b = (cc_mean + cc_std)
737
- aM1 = np.divide(aM1a, aM1b, where=aM1b != 0, out=np.zeros(aM1a.shape, dtype=float))
738
- aM = (1 + aM1) / 2
739
- aaM = (cc_mean + cc_std)
740
- bM = np.where(aaM <= 0, 0, aM)
741
- cM = bM * (1 - avoidance) + (cc_smooth * avoidance)
742
- dM = np.where(canopy_ndarray == 1, 1, cM)
743
- eM = np.exp(dM)
744
- result = np.power(eM, float(cost_raster_exponent))
745
-
746
- return result
747
-
748
-
749
- def dyn_np_cc_map(in_array, canopy_ht_threshold, nodata):
750
- canopy_ht_threshold = 0.8
751
- canopy_ndarray = np.ma.where(in_array >= canopy_ht_threshold, 1., 0.).astype(float)
752
- canopy_ndarray = np.ma.filled(canopy_ndarray, nodata)
753
- # canopy_ndarray[canopy_ndarray==nodata]=np.NaN # TODO check the code, extra step?
754
-
755
- return canopy_ndarray
756
-
757
-
758
- def cost_raster(in_raster, meta):
759
- if len(in_raster.shape) > 2:
760
- in_raster = np.squeeze(in_raster, axis=0)
761
-
762
- # raster_clip, out_meta = clip_raster(self.in_raster, seed_line, self.line_radius)
763
- # in_raster = np.squeeze(in_raster, axis=0)
764
- cell_x, cell_y = meta['transform'][0], -meta['transform'][4]
765
-
766
- kernel = convolution.circle_kernel(cell_x, cell_y, 2.5)
767
- dyn_canopy_ndarray = dyn_np_cc_map(in_raster, FP_CORRIDOR_THRESHOLD, BT_NODATA)
768
- cc_std, cc_mean = dyn_fs_raster_stdmean(dyn_canopy_ndarray, kernel, BT_NODATA)
769
- cc_smooth = dyn_smooth_cost(dyn_canopy_ndarray, 2.5, [cell_x, cell_y])
770
-
771
- # TODO avoidance, re-use this code
772
- avoidance = max(min(float(0.4), 1), 0)
773
- cost_clip = dyn_np_cost_raster(dyn_canopy_ndarray, cc_mean, cc_std,
774
- cc_smooth, 0.4, 1.5)
775
-
776
- # TODO use nan or BT_DATA?
777
- cost_clip[in_raster == BT_NODATA] = np.nan
778
- dyn_canopy_ndarray[in_raster == BT_NODATA] = np.nan
779
-
780
- return cost_clip, dyn_canopy_ndarray
781
-
782
-
783
- def generate_line_args_NoClipraster(line_seg, work_in_buffer, in_chm_obj, in_chm, tree_radius, max_line_dist,
784
- canopy_avoidance, exponent, canopy_thresh_percentage):
785
- line_argsC = []
786
-
787
- for record in range(0, len(work_in_buffer)):
788
- try:
789
- line_bufferC = work_in_buffer.loc[record, 'geometry']
790
-
791
- nodata = BT_NODATA
792
- line_argsC.append([in_chm, float(work_in_buffer.loc[record, 'DynCanTh']), float(tree_radius),
793
- float(max_line_dist), float(canopy_avoidance), float(exponent), in_chm_obj.res, nodata,
794
- line_seg.iloc[[record]], in_chm_obj.meta.copy(), record, 10, 'Center',
795
- canopy_thresh_percentage, line_bufferC])
796
- except Exception as e:
797
-
798
- print(e)
799
-
800
- step = record + 1
801
- total = len(work_in_buffer)
802
-
803
- print(f' "PROGRESS_LABEL Preparing lines {step} of {total}" ', flush=True)
804
- print(f' %{step / total * 100} ', flush=True)
805
-
806
- return line_argsC
807
-
808
-
809
- def generate_line_args_DFP_NoClip(line_seg, work_in_bufferL, work_in_bufferC, in_chm_obj,
810
- in_chm, tree_radius, max_line_dist, canopy_avoidance,
811
- exponent, work_in_bufferR, canopy_thresh_percentage):
812
- line_argsL = []
813
- line_argsR = []
814
- line_argsC = []
815
- line_id = 0
816
- for record in range(0, len(work_in_bufferL)):
817
- line_bufferL = work_in_bufferL.loc[record, 'geometry']
818
- line_bufferC = work_in_bufferC.loc[record, 'geometry']
819
- LCut = work_in_bufferL.loc[record, 'LDist_Cut']
820
-
821
- nodata = BT_NODATA
822
- line_argsL.append([in_chm, float(work_in_bufferL.loc[record, 'DynCanTh']), float(tree_radius),
823
- float(max_line_dist), float(canopy_avoidance), float(exponent), in_chm_obj.res, nodata,
824
- line_seg.iloc[[record]], in_chm_obj.meta.copy(), line_id, LCut, 'Left',
825
- canopy_thresh_percentage, line_bufferL])
826
-
827
- line_argsC.append([in_chm, float(work_in_bufferC.loc[record, 'DynCanTh']), float(tree_radius),
828
- float(max_line_dist), float(canopy_avoidance), float(exponent), in_chm_obj.res, nodata,
829
- line_seg.iloc[[record]], in_chm_obj.meta.copy(), line_id, 10, 'Center',
830
- canopy_thresh_percentage, line_bufferC])
831
-
832
- line_id += 1
833
-
834
- line_id = 0
835
- for record in range(0, len(work_in_bufferR)):
836
- line_bufferR = work_in_bufferR.loc[record, 'geometry']
837
- RCut = work_in_bufferR.loc[record, 'RDist_Cut']
838
- # clipped_rasterR, out_transformR = rasterio.mask.mask(in_chm, [line_bufferR], crop=True,
839
- # nodata=BT_NODATA, filled=True)
840
- # clipped_rasterR = np.squeeze(clipped_rasterR, axis=0)
841
- #
842
- # # make rasterio meta for saving raster later
843
- # out_metaR = in_chm.meta.copy()
844
- # out_metaR.update({"driver": "GTiff",
845
- # "height": clipped_rasterR.shape[0],
846
- # "width": clipped_rasterR.shape[1],
847
- # "nodata": BT_NODATA,
848
- # "transform": out_transformR})
849
- line_bufferC = work_in_bufferC.loc[record, 'geometry']
850
- # clipped_rasterC, out_transformC = rasterio.mask.mask(in_chm, [line_bufferC], crop=True,
851
- # nodata=BT_NODATA, filled=True)
852
- #
853
- # clipped_rasterC = np.squeeze(clipped_rasterC, axis=0)
854
- # out_metaC = in_chm.meta.copy()
855
- # out_metaC.update({"driver": "GTiff",
856
- # "height": clipped_rasterC.shape[0],
857
- # "width": clipped_rasterC.shape[1],
858
- # "nodata": BT_NODATA,
859
- # "transform": out_transformC})
860
-
861
- nodata = BT_NODATA
862
- # TODO deal with inherited nodata and BT_NODATA_COST
863
- # TODO convert nodata to BT_NODATA_COST
864
- line_argsR.append([in_chm, float(work_in_bufferR.loc[record, 'DynCanTh']), float(tree_radius),
865
- float(max_line_dist), float(canopy_avoidance), float(exponent), in_chm_obj.res, nodata,
866
- line_seg.iloc[[record]], in_chm_obj.meta.copy(), line_id, RCut, 'Right',
867
- canopy_thresh_percentage, line_bufferR])
868
-
869
- step = line_id + 1 + len(work_in_bufferL)
870
- total = len(work_in_bufferL) + len(work_in_bufferR)
871
- print(f' "PROGRESS_LABEL Preparing... {step} of {total}" ', flush=True)
872
- print(f' %{step / total * 100} ', flush=True)
873
-
874
- line_id += 1
875
-
876
- return line_argsL, line_argsR, line_argsC
877
-
878
-
879
- def chk_null_geometry(in_data):
880
- find = False
881
- if isinstance(in_data, gpd.GeoDataFrame):
882
- if len(in_data[(in_data.is_empty | in_data.isna())]) > 0:
883
- find = True
884
-
885
- return find
1
+ """
2
+ Copyright (C) 2025 Applied Geospatial Research Group.
3
+
4
+ This script is licensed under the GNU General Public License v3.0.
5
+ See <https://gnu.org/licenses/gpl-3.0> for full license details.
6
+
7
+ Author: Richard Zeng, Maverick Fong
8
+
9
+ Description:
10
+ This script is part of the BERA Tools.
11
+ Webpage: https://github.com/appliedgrg/beratools
12
+
13
+ This file is intended to be hosting common classes/functions for BERA Tools
14
+ """
15
+ import argparse
16
+ import json
17
+ import shlex
18
+ import warnings
19
+
20
+ import geopandas as gpd
21
+ import numpy as np
22
+ import osgeo
23
+ import pyogrio
24
+ import pyproj
25
+ import rasterio
26
+ import shapely
27
+ import shapely.geometry as sh_geom
28
+ import shapely.ops as sh_ops
29
+ import xarray as xr
30
+ import xrspatial
31
+ from osgeo import gdal
32
+ from rasterio import mask
33
+ from scipy import ndimage
34
+
35
+ import beratools.core.constants as bt_const
36
+
37
+ # suppress pandas UserWarning: Geometry column contains no geometry when splitting lines
38
+ warnings.simplefilter(action="ignore", category=UserWarning)
39
+
40
+ # restore .shx for shapefile for using GDAL or pyogrio
41
+ gdal.SetConfigOption("SHAPE_RESTORE_SHX", "YES")
42
+ pyogrio.set_gdal_config_options({"SHAPE_RESTORE_SHX": "YES"})
43
+
44
+ # suppress all kinds of warnings
45
+ if not bt_const.BT_DEBUGGING:
46
+ gdal.SetConfigOption("CPL_LOG", "NUL") # GDAL warning
47
+ warnings.filterwarnings("ignore") # suppress warnings
48
+ warnings.simplefilter(
49
+ action="ignore", category=UserWarning
50
+ ) # suppress Pandas UserWarning
51
+
52
+
53
+ def clip_raster(
54
+ in_raster_file,
55
+ clip_geom,
56
+ buffer=0.0,
57
+ out_raster_file=None,
58
+ default_nodata=bt_const.BT_NODATA,
59
+ ):
60
+ out_meta = None
61
+ with rasterio.open(in_raster_file) as raster_file:
62
+ out_meta = raster_file.meta
63
+ ras_nodata = out_meta["nodata"]
64
+ if ras_nodata is None:
65
+ ras_nodata = default_nodata
66
+
67
+ clip_geo_buffer = [clip_geom.buffer(buffer)]
68
+ out_image: np.ndarray
69
+ out_image, out_transform = mask.mask(
70
+ raster_file, clip_geo_buffer, crop=True, nodata=ras_nodata, filled=True
71
+ )
72
+ if np.isnan(ras_nodata):
73
+ out_image[np.isnan(out_image)] = default_nodata
74
+
75
+ elif np.isinf(ras_nodata):
76
+ out_image[np.isinf(out_image)] = default_nodata
77
+ else:
78
+ out_image[out_image == ras_nodata] = default_nodata
79
+
80
+ out_image = np.ma.masked_where(out_image == default_nodata, out_image)
81
+ out_image.fill_value = default_nodata
82
+ ras_nodata = default_nodata
83
+
84
+ height, width = out_image.shape[1:]
85
+
86
+ out_meta.update(
87
+ {
88
+ "driver": "GTiff",
89
+ "height": height,
90
+ "width": width,
91
+ "transform": out_transform,
92
+ "nodata": ras_nodata,
93
+ }
94
+ )
95
+
96
+ if out_raster_file:
97
+ with rasterio.open(out_raster_file, "w", **out_meta) as dest:
98
+ dest.write(out_image)
99
+ print("[Clip raster]: data saved to {}.".format(out_raster_file))
100
+
101
+ return out_image, out_meta
102
+
103
+
104
+ # def clip_lines(clip_geom, buffer, in_line_file, out_line_file):
105
+ # in_line = gpd.read_file(in_line_file)
106
+ # out_line = in_line.clip(clip_geom.buffer(buffer * bt_const.BT_BUFFER_RATIO))
107
+
108
+ # if out_line_file and len(out_line) > 0:
109
+ # out_line.to_file(out_line_file)
110
+ # print("[Clip lines]: data saved to {}.".format(out_line_file))
111
+
112
+ # return out_line
113
+
114
+
115
+ # def read_geoms_from_shapefile(in_file):
116
+ # geoms = []
117
+ # with fiona.open(in_file) as open_file:
118
+ # for geom in open_file:
119
+ # geoms.append(geom['geometry'])
120
+
121
+ # return geoms
122
+
123
+
124
+ # def read_feature_from_shapefile(in_file):
125
+ # """ Read feature from shapefile
126
+
127
+ # Args:
128
+ # in_file (str): file name
129
+
130
+ # Returns:
131
+ # list: list of features
132
+ # """
133
+ # shapes = []
134
+ # with fiona.open(in_file) as open_file:
135
+ # for feat in open_file:
136
+ # shapes.append([shape(feat.geometry), feat.properties])
137
+
138
+ # return shapes
139
+
140
+
141
+ def remove_nan_from_array(matrix):
142
+ with np.nditer(matrix, op_flags=["readwrite"]) as it:
143
+ for x in it:
144
+ if np.isnan(x[...]):
145
+ x[...] = bt_const.BT_NODATA_COST
146
+
147
+
148
+ # def replace_Nodata2NaN(matrix, nodata):
149
+ # with np.nditer(matrix, op_flags=["readwrite"]) as it:
150
+ # for x in it:
151
+ # if x[...] == nodata:
152
+ # x[...] = np.NaN
153
+
154
+
155
+ # def replace_Nodata2Inf(matrix, nodata):
156
+ # with np.nditer(matrix, op_flags=["readwrite"]) as it:
157
+ # for x in it:
158
+ # if x[...] == nodata:
159
+ # x[...] = np.Inf
160
+
161
+
162
+ # Split LineString to segments at vertices
163
+ # def segments(line_coords):
164
+ # if len(line_coords) < 2:
165
+ # return None
166
+ # elif len(line_coords) == 2:
167
+ # return [fiona.Geometry.from_dict({'type': 'LineString', 'coordinates': line_coords})]
168
+ # else:
169
+ # seg_list = zip(line_coords[:-1], line_coords[1:])
170
+ # line_list = [{'type': 'LineString', 'coordinates': coords} for coords in seg_list]
171
+ # return [fiona.Geometry.from_dict(line) for line in line_list]
172
+
173
+
174
+ def extract_string_from_printout(str_print, str_extract):
175
+ str_array = shlex.split(str_print) # keep string in double quotes
176
+ str_array_enum = enumerate(str_array)
177
+ index = 0
178
+ for item in str_array_enum:
179
+ if str_extract in item[1]:
180
+ index = item[0]
181
+ break
182
+ str_out = str_array[index]
183
+ return str_out.strip()
184
+
185
+
186
+ def check_arguments():
187
+ # Get tool arguments
188
+ parser = argparse.ArgumentParser()
189
+ parser.add_argument("-i", "--input", type=json.loads)
190
+ parser.add_argument("-p", "--processes")
191
+ parser.add_argument("-v", "--verbose")
192
+ args = parser.parse_args()
193
+
194
+ verbose = True if args.verbose == "True" else False
195
+ for item in args.input:
196
+ if args.input[item].lower() == "false":
197
+ args.input[item] = False
198
+ elif args.input[item].lower() == "true":
199
+ args.input[item] = True
200
+
201
+ return args, verbose
202
+
203
+
204
+ # def save_features_to_file(out_file, crs, geoms, properties=None, schema=None,
205
+ # driver='ESRI Shapefile', layer=None):
206
+ # """
207
+
208
+ # Args:
209
+ # out_file :
210
+ # crs :
211
+ # geoms : shapely geometry objects
212
+ # schema :
213
+ # properties :
214
+ # driver:
215
+ # layer:
216
+ # """
217
+ # # remove all None items
218
+ # # TODO: check geom type consistency
219
+ # if len(geoms) < 1:
220
+ # return
221
+
222
+ # try:
223
+ # geom_type = mapping(geoms[0])['type']
224
+ # except Exception as e:
225
+ # print(e)
226
+
227
+ # if not schema:
228
+ # props_tuple = zip([], []) # if lengths are not the same, ValueError raises
229
+ # props_schema = [(item, type(value).__name__) for item, value in props_tuple]
230
+
231
+ # schema = {
232
+ # 'geometry': geom_type,
233
+ # 'properties': OrderedDict([])
234
+ # }
235
+
236
+ # properties = None
237
+
238
+ # print('Writing to file {}'.format(out_file), flush=True)
239
+
240
+ # try:
241
+ # out_line_file = fiona.open(out_file, 'w', driver, schema, crs, layer=layer)
242
+ # except Exception as e:
243
+ # print(e)
244
+ # out_line_file.close()
245
+ # return
246
+
247
+ # if properties:
248
+ # feat_tuple = zip_longest(geoms, properties)
249
+ # else: # properties are None
250
+ # feat_tuple = [(item, None) for item in geoms]
251
+
252
+ # try:
253
+ # for geom, prop in feat_tuple:
254
+ # if geom:
255
+ # feature = {
256
+ # 'geometry': mapping(geom),
257
+ # 'properties': prop
258
+ # }
259
+
260
+ # out_line_file.write(feature)
261
+ # except Exception as e:
262
+ # print(e)
263
+
264
+ # out_line_file.close()
265
+
266
+
267
+ def vector_crs(in_vector):
268
+ osr_crs = osgeo.osr.SpatialReference()
269
+ from pyproj.enums import WktVersion
270
+
271
+ vec_crs = None
272
+ # open input vector data as GeoDataFrame
273
+ gpd_vector = gpd.GeoDataFrame.from_file(in_vector)
274
+ try:
275
+ if gpd_vector.crs is not None:
276
+ vec_crs = gpd_vector.crs
277
+ if osgeo.version_info.major < 3:
278
+ osr_crs.ImportFromWkt(vec_crs.to_wkt(WktVersion.WKT1_GDAL))
279
+ else:
280
+ osr_crs.ImportFromEPSG(vec_crs.to_epsg())
281
+ return osr_crs
282
+ else:
283
+ print(
284
+ "No CRS found in the input feature, please check!"
285
+ )
286
+ exit()
287
+ except Exception as e:
288
+ print(e)
289
+ exit()
290
+
291
+
292
+ # def df_crs(in_df):
293
+ # vec_crs = None
294
+ # osr_crs = osgeo.osr.SpatialReference()
295
+ # from pyproj.enums import WktVersion
296
+
297
+ # try:
298
+ # if in_df.crs is not None:
299
+ # vec_crs = in_df.crs
300
+ # if osgeo.version_info.major < 3:
301
+ # osr_crs.ImportFromWkt(vec_crs.to_wkt(WktVersion.WKT1_GDAL))
302
+ # else:
303
+ # osr_crs.ImportFromEPSG(vec_crs.to_epsg())
304
+ # return osr_crs
305
+ # else:
306
+ # print(
307
+ # "No Coordinate Reference System (CRS) find in the input feature, please check!"
308
+ # )
309
+ # exit()
310
+ # except Exception as e:
311
+ # print(e)
312
+ # exit()
313
+
314
+
315
+ def raster_crs(in_raster):
316
+ osr_crs = osgeo.osr.SpatialReference()
317
+ with rasterio.open(in_raster) as raster_file:
318
+ from pyproj.enums import WktVersion
319
+
320
+ try:
321
+ if raster_file.crs is not None:
322
+ vec_crs = raster_file.crs
323
+ if osgeo.version_info.major < 3:
324
+ osr_crs.ImportFromWkt(vec_crs.to_wkt(WktVersion.WKT1_GDAL))
325
+ else:
326
+ osr_crs.ImportFromEPSG(vec_crs.to_epsg())
327
+ return osr_crs
328
+ else:
329
+ print(
330
+ "No Coordinate Reference System (CRS) find in the input feature, please check!"
331
+ )
332
+ exit()
333
+ except Exception as e:
334
+ print(e)
335
+ exit()
336
+
337
+
338
+ def compare_crs(crs_org, crs_dst):
339
+ if crs_org and crs_dst:
340
+ if crs_org.IsSameGeogCS(crs_dst):
341
+ print("Check: Input file Spatial Reference are the same, continue.")
342
+ return True
343
+ else:
344
+ crs_org_norm = pyproj.CRS(crs_org.ExportToWkt())
345
+ crs_dst_norm = pyproj.CRS(crs_dst.ExportToWkt())
346
+ if crs_org_norm.is_compound:
347
+ crs_org_proj = crs_org_norm.sub_crs_list[0].coordinate_operation.name
348
+ elif crs_org_norm.name == "unnamed":
349
+ return False
350
+ else:
351
+ crs_org_proj = crs_org_norm.coordinate_operation.name
352
+
353
+ if crs_dst_norm.is_compound:
354
+ crs_dst_proj = crs_dst_norm.sub_crs_list[0].coordinate_operation.name
355
+ elif crs_org_norm.name == "unnamed":
356
+ return False
357
+ else:
358
+ crs_dst_proj = crs_dst_norm.coordinate_operation.name
359
+
360
+ if crs_org_proj == crs_dst_proj:
361
+ if crs_org_norm.name == crs_dst_norm.name:
362
+ print("Input files Spatial Reference are the same, continue.")
363
+ return True
364
+ else:
365
+ print(
366
+ """Checked: Data are on the same projected Zone but using
367
+ different Spatial Reference. \n Consider to re-project
368
+ all data onto same spatial reference system.\n Process Stop."""
369
+ )
370
+ exit()
371
+ else:
372
+ return False
373
+
374
+ return False
375
+
376
+
377
+ def identity_polygon(line_args):
378
+ """
379
+ Return polygon of line segment.
380
+
381
+ Args:
382
+ line_args : list[GeoDataFrame]
383
+ 0 : GeoDataFrame line segment, one item
384
+ 1 : GeoDataFrame line buffer, one item
385
+ 2 : GeoDataFrame polygons returned by spatial search
386
+
387
+ Returns:
388
+ line, identity : tuple of line and associated footprint
389
+
390
+ """
391
+ line = line_args[0]
392
+ in_cl_buffer = line_args[1][["geometry", "OLnFID"]]
393
+ in_fp_polygon = line_args[2]
394
+
395
+ identity = None
396
+ try:
397
+ # drop polygons not intersecting with line segment
398
+ line_geom = line.iloc[0].geometry
399
+ drop_list = []
400
+ for i in in_fp_polygon.index:
401
+ if not in_fp_polygon.loc[i].geometry.intersects(line_geom):
402
+ drop_list.append(i)
403
+ elif (
404
+ line_geom.intersection(in_fp_polygon.loc[i].geometry).length
405
+ / line_geom.length
406
+ < 0.30
407
+ ):
408
+ drop_list.append(
409
+ i
410
+ ) # if less the 1/5 of line is inside of polygon, ignore
411
+
412
+ # drop all polygons not used
413
+ in_fp_polygon = in_fp_polygon.drop(index=drop_list)
414
+
415
+ if not in_fp_polygon.empty:
416
+ identity = in_fp_polygon.overlay(in_cl_buffer, how="intersection")
417
+ except Exception as e:
418
+ print(e)
419
+
420
+ return line, identity
421
+
422
+
423
+ def line_split2(in_ln_shp, seg_length):
424
+ # Check the OLnFID column in data. If it is not, column will be created
425
+ if "OLnFID" not in in_ln_shp.columns.array:
426
+ if bt_const.BT_DEBUGGING:
427
+ print("Cannot find {} column in input line data")
428
+
429
+ print(f"New column created: {'OLnFID'}, {'OLnFID'}")
430
+ in_ln_shp["OLnFID"] = in_ln_shp.index
431
+ line_seg = split_into_Equal_Nth_segments(in_ln_shp, seg_length)
432
+
433
+ return line_seg
434
+
435
+
436
+ def split_into_Equal_Nth_segments(df, seg_length):
437
+ odf = df
438
+ crs = odf.crs
439
+ if "OLnSEG" not in odf.columns.array:
440
+ df["OLnSEG"] = np.nan
441
+ df = odf.assign(
442
+ geometry=odf.apply(lambda x: cut_line_by_length(x.geometry, seg_length), axis=1)
443
+ )
444
+ df = df.explode()
445
+
446
+ df["OLnSEG"] = df.groupby("OLnFID").cumcount()
447
+ gdf = gpd.GeoDataFrame(df, geometry=df.geometry, crs=crs)
448
+ gdf = gdf.sort_values(by=["OLnFID", "OLnSEG"])
449
+ gdf = gdf.reset_index(drop=True)
450
+
451
+ if "shape_leng" in gdf.columns.array:
452
+ gdf["shape_leng"] = gdf.geometry.length
453
+ elif "LENGTH" in gdf.columns.array:
454
+ gdf["LENGTH"] = gdf.geometry.length
455
+ else:
456
+ gdf["shape_leng"] = gdf.geometry.length
457
+ return gdf
458
+
459
+
460
+ def split_line_nPart(line, seg_length):
461
+ seg_line = shapely.segmentize(line, seg_length)
462
+ distances = np.arange(seg_length, line.length, seg_length)
463
+
464
+ if len(distances) > 0:
465
+ points = [
466
+ shapely.line_interpolate_point(seg_line, distance) for distance in distances
467
+ ]
468
+
469
+ split_points = shapely.multipoints(points)
470
+ mline = sh_ops.split(seg_line, split_points)
471
+ else:
472
+ mline = seg_line
473
+
474
+ return mline
475
+
476
+
477
+ def cut_line_by_length(line, length, merge_threshold=0.5):
478
+ """
479
+ Split line into segments of equal length.
480
+
481
+ Merge the last segment with the second-to-last if its length
482
+ is smaller than the given threshold.
483
+
484
+ Args:
485
+ line : LineString
486
+ Line to be split by distance along the line.
487
+ length : float
488
+ Length of each segment to cut.
489
+ merge_threshold : float, optional
490
+ Threshold below which the last segment is merged with the previous one. Default is 0.5.
491
+
492
+ Returns:
493
+ List of LineString objects
494
+ A list containing the resulting line segments.
495
+
496
+ Example:
497
+ >>> from shapely.geometry import LineString
498
+ >>> line = LineString([(0, 0), (10, 0)])
499
+ >>> segments = cut_line_by_length(line, 3, merge_threshold=1)
500
+ >>> for segment in segments:
501
+ >>> print(f"Segment: {segment}, Length: {segment.length}")
502
+
503
+ Output:
504
+ Segment: LINESTRING (0 0, 3 0), Length: 3.0
505
+ Segment: LINESTRING (3 0, 6 0), Length: 3.0
506
+ Segment: LINESTRING (6 0, 9 0), Length: 3.0
507
+ Segment: LINESTRING (9 0, 10 0), Length: 1.0
508
+
509
+ After merging the last segment with the second-to-last segment:
510
+
511
+ Output:
512
+ Segment: LINESTRING (0 0, 3 0), Length: 3.0
513
+ Segment: LINESTRING (3 0, 6 0), Length: 3.0
514
+ Segment: LINESTRING (6 0, 10 0), Length: 4.0
515
+
516
+ """
517
+ if line.has_z:
518
+ # Remove the Z component of the line if it exists
519
+ line = sh_ops.transform(lambda x, y, z=None: (x, y), line)
520
+
521
+ if shapely.is_empty(line):
522
+ return []
523
+
524
+ # Segment the line based on the specified distance
525
+ line = shapely.segmentize(line, length)
526
+ lines = []
527
+ end_pt = None
528
+
529
+ while line.length > length:
530
+ coords = list(line.coords)
531
+
532
+ for i, p in enumerate(coords):
533
+ p_dist = line.project(sh_geom.Point(p))
534
+
535
+ # Check if the distance matches closely and split the line
536
+ if abs(p_dist - length) < 1e-9: # Use a small epsilon value
537
+ lines.append(sh_geom.LineString(coords[:i + 1]))
538
+ line = sh_geom.LineString(coords[i:])
539
+ end_pt = None
540
+ break
541
+ elif p_dist > length:
542
+ end_pt = line.interpolate(length)
543
+ lines.append(sh_geom.LineString(coords[:i] + list(end_pt.coords)))
544
+ line = sh_geom.LineString(list(end_pt.coords) + coords[i:])
545
+ break
546
+
547
+ if end_pt:
548
+ lines.append(line)
549
+
550
+ # Handle the threshold condition: merge the last segment if its length is below the threshold
551
+ if len(lines) > 1:
552
+ if lines[-1].length < merge_threshold:
553
+ # Merge the last segment with the second-to-last one
554
+ lines[-2] = sh_geom.LineString(list(lines[-2].coords) + list(lines[-1].coords))
555
+ lines.pop() # Remove the last segment after merging
556
+
557
+ return lines
558
+
559
+
560
+ # def LCP_skimage_mcp_connect(cost_clip, in_meta, seed_line):
561
+ # lc_path_new = []
562
+ # if len(cost_clip.shape) > 2:
563
+ # cost_clip = np.squeeze(cost_clip, axis=0)
564
+
565
+ # out_transform = in_meta["transform"]
566
+ # transformer = rasterio.transform.AffineTransformer(out_transform)
567
+
568
+ # x1, y1 = list(seed_line.coords)[0][:2]
569
+ # x2, y2 = list(seed_line.coords)[-1][:2]
570
+ # source = [transformer.rowcol(x1, y1)]
571
+ # destination = [transformer.rowcol(x2, y2)]
572
+
573
+ # try:
574
+ # init_obj1 = sk_graph.MCP_Connect(cost_clip)
575
+ # path = []
576
+ # for end in destination:
577
+ # path.append(init_obj1.traceback(end))
578
+ # for row, col in path[0]:
579
+ # x, y = transformer.xy(row, col)
580
+ # lc_path_new.append((x, y))
581
+ # except Exception as e:
582
+ # print(e)
583
+ # return None
584
+
585
+ # if len(lc_path_new) < 2:
586
+ # print("No least cost path detected, pass.")
587
+ # return None
588
+ # else:
589
+ # lc_path_new = sh_geom.LineString(lc_path_new)
590
+
591
+ # return lc_path_new
592
+
593
+
594
+ def chk_df_multipart(df, chk_shp_in_string):
595
+ try:
596
+ found = False
597
+ if str.upper(chk_shp_in_string) in [x.upper() for x in df.geom_type.values]:
598
+ found = True
599
+ df = df.explode()
600
+ if type(df) is gpd.geodataframe.GeoDataFrame:
601
+ df["OLnSEG"] = df.groupby("OLnFID").cumcount()
602
+ df = df.sort_values(by=["OLnFID", "OLnSEG"])
603
+ df = df.reset_index(drop=True)
604
+ else:
605
+ found = False
606
+ return df, found
607
+ except Exception as e:
608
+ print(e)
609
+ return df, True
610
+
611
+
612
+ def dyn_fs_raster_stdmean(canopy_ndarray, kernel, nodata):
613
+ # This function uses xrspatial which can handle large data but slow
614
+ mask = canopy_ndarray.mask
615
+ in_ndarray = np.ma.where(mask == True, np.NaN, canopy_ndarray)
616
+ result_ndarray = xrspatial.focal.focal_stats(
617
+ xr.DataArray(in_ndarray.data), kernel, stats_funcs=["std", "mean"]
618
+ )
619
+
620
+ # Assign std and mean ndarray (return array contain NaN value)
621
+ reshape_std_ndarray = result_ndarray[0].data
622
+ reshape_mean_ndarray = result_ndarray[1].data
623
+
624
+ return reshape_std_ndarray, reshape_mean_ndarray
625
+
626
+
627
+ def dyn_smooth_cost(canopy_ndarray, max_line_dist, sampling):
628
+ mask = canopy_ndarray.mask
629
+ in_ndarray = np.ma.where(mask == True, np.NaN, canopy_ndarray)
630
+ # scipy way to do Euclidean distance transform
631
+ euc_dist_array = ndimage.distance_transform_edt(
632
+ np.logical_not(np.isnan(in_ndarray.data)), sampling=sampling
633
+ )
634
+ euc_dist_array[mask == True] = np.NaN
635
+ smooth1 = float(max_line_dist) - euc_dist_array
636
+ smooth1[smooth1 <= 0.0] = 0.0
637
+ smooth_cost_array = smooth1 / float(max_line_dist)
638
+
639
+ return smooth_cost_array
640
+
641
+
642
+ def dyn_np_cost_raster(
643
+ canopy_ndarray, cc_mean, cc_std, cc_smooth, avoidance, cost_raster_exponent
644
+ ):
645
+ aM1a = cc_mean - cc_std
646
+ aM1b = cc_mean + cc_std
647
+ aM1 = np.divide(aM1a, aM1b, where=aM1b != 0, out=np.zeros(aM1a.shape, dtype=float))
648
+ aM = (1 + aM1) / 2
649
+ aaM = cc_mean + cc_std
650
+ bM = np.where(aaM <= 0, 0, aM)
651
+ cM = bM * (1 - avoidance) + (cc_smooth * avoidance)
652
+ dM = np.where(canopy_ndarray.data == 1, 1, cM)
653
+ eM = np.exp(dM)
654
+ result = np.power(eM, float(cost_raster_exponent))
655
+
656
+ return result
657
+
658
+
659
+ def dyn_np_cc_map(in_chm, canopy_ht_threshold, nodata):
660
+ canopy_ndarray = np.ma.where(in_chm >= canopy_ht_threshold, 1.0, 0.0).astype(float)
661
+ canopy_ndarray.fill_value = nodata
662
+
663
+ return canopy_ndarray
664
+
665
+
666
+ # def morph_raster(corridor_thresh, canopy_raster, exp_shk_cell, cell_size_x):
667
+ # # Process: Stamp CC and Max Line Width
668
+ # ras_sum = corridor_thresh + canopy_raster
669
+ # raster_class = np.ma.where(ras_sum == 0, 1, 0).data
670
+
671
+ # if exp_shk_cell > 0 and cell_size_x < 1:
672
+ # # Process: Expand
673
+ # # FLM original Expand equivalent
674
+ # cell_size = int(exp_shk_cell * 2 + 1)
675
+ # expanded = ndimage.grey_dilation(raster_class, size=(cell_size, cell_size))
676
+
677
+ # # Process: Shrink
678
+ # # FLM original Shrink equivalent
679
+ # file_shrink = ndimage.grey_erosion(expanded, size=(cell_size, cell_size))
680
+
681
+ # else:
682
+ # if bt_const.BT_DEBUGGING:
683
+ # print("No Expand And Shrink cell performed.")
684
+ # file_shrink = raster_class
685
+
686
+ # # Process: Boundary Clean
687
+ # clean_raster = ndimage.gaussian_filter(file_shrink, sigma=0, mode="nearest")
688
+
689
+ # return clean_raster
690
+
691
+
692
+ # def generate_line_args_NoClipraster(
693
+ # line_seg,
694
+ # work_in_buffer,
695
+ # in_chm_obj,
696
+ # in_chm,
697
+ # tree_radius,
698
+ # max_line_dist,
699
+ # canopy_avoidance,
700
+ # exponent,
701
+ # canopy_thresh_percentage,
702
+ # ):
703
+ # line_argsC = []
704
+
705
+ # for record in range(0, len(work_in_buffer)):
706
+ # try:
707
+ # line_bufferC = work_in_buffer.loc[record, "geometry"]
708
+
709
+ # nodata = bt_const.BT_NODATA
710
+ # line_argsC.append(
711
+ # [
712
+ # in_chm,
713
+ # float(work_in_buffer.loc[record, "DynCanTh"]),
714
+ # float(tree_radius),
715
+ # float(max_line_dist),
716
+ # float(canopy_avoidance),
717
+ # float(exponent),
718
+ # in_chm_obj.res,
719
+ # nodata,
720
+ # line_seg.iloc[[record]],
721
+ # in_chm_obj.meta.copy(),
722
+ # record,
723
+ # 10,
724
+ # "Center",
725
+ # canopy_thresh_percentage,
726
+ # line_bufferC,
727
+ # ]
728
+ # )
729
+ # except Exception as e:
730
+ # print(e)
731
+
732
+ # step = record + 1
733
+ # total = len(work_in_buffer)
734
+
735
+ # print(f' "PROGRESS_LABEL Preparing lines {step} of {total}" ', flush=True)
736
+ # print(f" %{step / total * 100} ", flush=True)
737
+
738
+ # return line_argsC
739
+
740
+
741
+ def generate_line_args_DFP_NoClip(
742
+ line_seg,
743
+ work_in_bufferL,
744
+ work_in_bufferC,
745
+ in_chm_obj,
746
+ in_chm,
747
+ tree_radius,
748
+ max_line_dist,
749
+ canopy_avoidance,
750
+ exponent,
751
+ work_in_bufferR,
752
+ canopy_thresh_percentage,
753
+ ):
754
+ line_argsL = []
755
+ line_argsR = []
756
+ line_argsC = []
757
+ line_id = 0
758
+ for record in range(0, len(work_in_bufferL)):
759
+ line_bufferL = work_in_bufferL.loc[record, "geometry"]
760
+ line_bufferC = work_in_bufferC.loc[record, "geometry"]
761
+ LCut = work_in_bufferL.loc[record, "LDist_Cut"]
762
+
763
+ nodata = bt_const.BT_NODATA
764
+ line_argsL.append(
765
+ [
766
+ in_chm,
767
+ float(work_in_bufferL.loc[record, "DynCanTh"]),
768
+ float(tree_radius),
769
+ float(max_line_dist),
770
+ float(canopy_avoidance),
771
+ float(exponent),
772
+ in_chm_obj.res,
773
+ nodata,
774
+ line_seg.iloc[[record]],
775
+ in_chm_obj.meta.copy(),
776
+ line_id,
777
+ LCut,
778
+ "Left",
779
+ canopy_thresh_percentage,
780
+ line_bufferL,
781
+ ]
782
+ )
783
+
784
+ line_argsC.append(
785
+ [
786
+ in_chm,
787
+ float(work_in_bufferC.loc[record, "DynCanTh"]),
788
+ float(tree_radius),
789
+ float(max_line_dist),
790
+ float(canopy_avoidance),
791
+ float(exponent),
792
+ in_chm_obj.res,
793
+ nodata,
794
+ line_seg.iloc[[record]],
795
+ in_chm_obj.meta.copy(),
796
+ line_id,
797
+ 10,
798
+ "Center",
799
+ canopy_thresh_percentage,
800
+ line_bufferC,
801
+ ]
802
+ )
803
+
804
+ line_id += 1
805
+
806
+ line_id = 0
807
+ for record in range(0, len(work_in_bufferR)):
808
+ line_bufferR = work_in_bufferR.loc[record, "geometry"]
809
+ RCut = work_in_bufferR.loc[record, "RDist_Cut"]
810
+ line_bufferC = work_in_bufferC.loc[record, "geometry"]
811
+
812
+ nodata = bt_const.BT_NODATA
813
+ # TODO deal with inherited nodata and BT_NODATA_COST
814
+ # TODO convert nodata to BT_NODATA_COST
815
+ line_argsR.append(
816
+ [
817
+ in_chm,
818
+ float(work_in_bufferR.loc[record, "DynCanTh"]),
819
+ float(tree_radius),
820
+ float(max_line_dist),
821
+ float(canopy_avoidance),
822
+ float(exponent),
823
+ in_chm_obj.res,
824
+ nodata,
825
+ line_seg.iloc[[record]],
826
+ in_chm_obj.meta.copy(),
827
+ line_id,
828
+ RCut,
829
+ "Right",
830
+ canopy_thresh_percentage,
831
+ line_bufferR,
832
+ ]
833
+ )
834
+
835
+ step = line_id + 1 + len(work_in_bufferL)
836
+ total = len(work_in_bufferL) + len(work_in_bufferR)
837
+ print(f' "PROGRESS_LABEL Preparing... {step} of {total}" ', flush=True)
838
+ print(f" %{step / total * 100} ", flush=True)
839
+
840
+ line_id += 1
841
+
842
+ return line_argsL, line_argsR, line_argsC
843
+
844
+
845
+ # def chk_null_geometry(in_data):
846
+ # find = False
847
+ # if isinstance(in_data, gpd.GeoDataFrame):
848
+ # if len(in_data[(in_data.is_empty | in_data.isna())]) > 0:
849
+ # find = True
850
+ #
851
+ # return find
852
+
853
+
854
+ # def read_data2gpd(in_data):
855
+ # print("Reading data.......")
856
+ # out_gpd_obj = gpd.GeoDataFrame.from_file(in_data)
857
+ # return out_gpd_obj