BERATools 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. beratools/__init__.py +3 -0
  2. beratools/core/__init__.py +0 -0
  3. beratools/core/algo_centerline.py +476 -0
  4. beratools/core/algo_common.py +489 -0
  5. beratools/core/algo_cost.py +185 -0
  6. beratools/core/algo_dijkstra.py +492 -0
  7. beratools/core/algo_footprint_rel.py +693 -0
  8. beratools/core/algo_line_grouping.py +941 -0
  9. beratools/core/algo_merge_lines.py +255 -0
  10. beratools/core/algo_split_with_lines.py +296 -0
  11. beratools/core/algo_vertex_optimization.py +451 -0
  12. beratools/core/constants.py +56 -0
  13. beratools/core/logger.py +92 -0
  14. beratools/core/tool_base.py +126 -0
  15. beratools/gui/__init__.py +11 -0
  16. beratools/gui/assets/BERALogo.png +0 -0
  17. beratools/gui/assets/beratools.json +471 -0
  18. beratools/gui/assets/closed.gif +0 -0
  19. beratools/gui/assets/closed.png +0 -0
  20. beratools/gui/assets/gui.json +8 -0
  21. beratools/gui/assets/open.gif +0 -0
  22. beratools/gui/assets/open.png +0 -0
  23. beratools/gui/assets/tool.gif +0 -0
  24. beratools/gui/assets/tool.png +0 -0
  25. beratools/gui/bt_data.py +485 -0
  26. beratools/gui/bt_gui_main.py +700 -0
  27. beratools/gui/main.py +27 -0
  28. beratools/gui/tool_widgets.py +730 -0
  29. beratools/tools/__init__.py +7 -0
  30. beratools/tools/canopy_threshold_relative.py +769 -0
  31. beratools/tools/centerline.py +127 -0
  32. beratools/tools/check_seed_line.py +48 -0
  33. beratools/tools/common.py +622 -0
  34. beratools/tools/line_footprint_absolute.py +203 -0
  35. beratools/tools/line_footprint_fixed.py +480 -0
  36. beratools/tools/line_footprint_functions.py +884 -0
  37. beratools/tools/line_footprint_relative.py +75 -0
  38. beratools/tools/tool_template.py +72 -0
  39. beratools/tools/vertex_optimization.py +57 -0
  40. beratools-0.1.0.dist-info/METADATA +134 -0
  41. beratools-0.1.0.dist-info/RECORD +44 -0
  42. beratools-0.1.0.dist-info/WHEEL +4 -0
  43. beratools-0.1.0.dist-info/entry_points.txt +2 -0
  44. beratools-0.1.0.dist-info/licenses/LICENSE +22 -0
@@ -0,0 +1,622 @@
1
+ """
2
+ Copyright (C) 2025 Applied Geospatial Research Group.
3
+
4
+ This script is licensed under the GNU General Public License v3.0.
5
+ See <https://gnu.org/licenses/gpl-3.0> for full license details.
6
+
7
+ Author: Richard Zeng, Maverick Fong
8
+
9
+ Description:
10
+ This script is part of the BERA Tools.
11
+ Webpage: https://github.com/appliedgrg/beratools
12
+
13
+ This file is intended to be hosting common classes/functions for BERA Tools
14
+ """
15
+
16
+ import argparse
17
+ import json
18
+ import shlex
19
+ import warnings
20
+
21
+ import geopandas as gpd
22
+ import numpy as np
23
+ import osgeo
24
+ import pyogrio
25
+ import pyproj
26
+ import rasterio
27
+ import shapely
28
+ import shapely.geometry as sh_geom
29
+ import shapely.ops as sh_ops
30
+ import xarray as xr
31
+ import xrspatial
32
+ from osgeo import gdal
33
+ from rasterio import mask
34
+ from scipy import ndimage
35
+
36
+ import beratools.core.constants as bt_const
37
+ from beratools.core.algo_merge_lines import custom_line_merge
38
+ from beratools.core.algo_split_with_lines import LineSplitter
39
+
40
+ # suppress pandas UserWarning: Geometry column contains no geometry when splitting lines
41
+ warnings.simplefilter(action="ignore", category=UserWarning)
42
+
43
+ # restore .shx for shapefile for using GDAL or pyogrio
44
+ gdal.SetConfigOption("SHAPE_RESTORE_SHX", "YES")
45
+ pyogrio.set_gdal_config_options({"SHAPE_RESTORE_SHX": "YES"})
46
+
47
+ # suppress all kinds of warnings
48
+ if not bt_const.BT_DEBUGGING:
49
+ gdal.SetConfigOption("CPL_LOG", "NUL") # GDAL warning
50
+ warnings.filterwarnings("ignore") # suppress warnings
51
+ warnings.simplefilter(action="ignore", category=UserWarning) # suppress Pandas UserWarning
52
+
53
+ def qc_merge_multilinestring(gdf):
54
+ """
55
+ QC step: Merge MultiLineStrings if possible, else split into LineStrings.
56
+
57
+ Args:
58
+ gdf (GeoDataFrame): Input GeoDataFrame.
59
+
60
+ Returns:
61
+ GeoDataFrame: Cleaned GeoDataFrame with only LineStrings.
62
+ """
63
+ records = []
64
+ for idx, row in gdf.iterrows():
65
+ geom = row.geometry
66
+ # Try to merge MultiLineString
67
+ if geom.geom_type == "MultiLineString":
68
+ merged = custom_line_merge(geom)
69
+ if merged.geom_type == "MultiLineString":
70
+ # Could not merge, split into LineStrings
71
+ for part in merged.geoms:
72
+ new_row = row.copy()
73
+ new_row.geometry = part
74
+ records.append(new_row)
75
+ elif merged.geom_type == "LineString":
76
+ new_row = row.copy()
77
+ new_row.geometry = merged
78
+ records.append(new_row)
79
+ else:
80
+ # Unexpected geometry, keep as is
81
+ new_row = row.copy()
82
+ new_row.geometry = merged
83
+ records.append(new_row)
84
+ elif geom.geom_type == "LineString":
85
+ records.append(row)
86
+ else:
87
+ # Keep other geometry types unchanged
88
+ records.append(row)
89
+ # Build new GeoDataFrame
90
+ out_gdf = gpd.GeoDataFrame(records, columns=gdf.columns, crs=gdf.crs)
91
+ out_gdf = out_gdf[out_gdf.geometry.type == "LineString"].reset_index(drop=True)
92
+ return out_gdf
93
+
94
+ def qc_split_lines_at_intersections(gdf):
95
+ """
96
+ QC step: Split lines at intersections so each segment becomes a separate line object.
97
+
98
+ Args:
99
+ gdf (GeoDataFrame): Input GeoDataFrame of LineStrings.
100
+
101
+ Returns:
102
+ GeoDataFrame: New GeoDataFrame with lines split at all intersection points.
103
+ """
104
+ splitter = LineSplitter(gdf)
105
+ splitter.process()
106
+ if splitter.split_lines_gdf is not None:
107
+ return splitter.split_lines_gdf.reset_index(drop=True)
108
+ else:
109
+ return gdf.reset_index(drop=True)
110
+
111
+ def clip_raster(
112
+ in_raster_file,
113
+ clip_geom,
114
+ buffer=0.0,
115
+ out_raster_file=None,
116
+ default_nodata=bt_const.BT_NODATA,
117
+ ):
118
+ out_meta = None
119
+ with rasterio.open(in_raster_file) as raster_file:
120
+ out_meta = raster_file.meta
121
+ ras_nodata = out_meta["nodata"]
122
+ if ras_nodata is None:
123
+ ras_nodata = default_nodata
124
+
125
+ clip_geo_buffer = [clip_geom.buffer(buffer)]
126
+ out_image: np.ndarray
127
+ out_image, out_transform = mask.mask(
128
+ raster_file, clip_geo_buffer, crop=True, nodata=ras_nodata, filled=True
129
+ )
130
+ if np.isnan(ras_nodata):
131
+ out_image[np.isnan(out_image)] = default_nodata
132
+
133
+ elif np.isinf(ras_nodata):
134
+ out_image[np.isinf(out_image)] = default_nodata
135
+ else:
136
+ out_image[out_image == ras_nodata] = default_nodata
137
+
138
+ out_image = np.ma.masked_where(out_image == default_nodata, out_image)
139
+ out_image.fill_value = default_nodata
140
+ ras_nodata = default_nodata
141
+
142
+ height, width = out_image.shape[1:]
143
+
144
+ out_meta.update(
145
+ {
146
+ "driver": "GTiff",
147
+ "height": height,
148
+ "width": width,
149
+ "transform": out_transform,
150
+ "nodata": ras_nodata,
151
+ }
152
+ )
153
+
154
+ if out_raster_file:
155
+ with rasterio.open(out_raster_file, "w", **out_meta) as dest:
156
+ dest.write(out_image)
157
+ print("[Clip raster]: data saved to {}.".format(out_raster_file))
158
+
159
+ return out_image, out_meta
160
+
161
+ def remove_nan_from_array(matrix):
162
+ with np.nditer(matrix, op_flags=["readwrite"]) as it:
163
+ for x in it:
164
+ if np.isnan(x[...]):
165
+ x[...] = bt_const.BT_NODATA_COST
166
+
167
+ def extract_string_from_printout(str_print, str_extract):
168
+ str_array = shlex.split(str_print) # keep string in double quotes
169
+ str_array_enum = enumerate(str_array)
170
+ index = 0
171
+ for item in str_array_enum:
172
+ if str_extract in item[1]:
173
+ index = item[0]
174
+ break
175
+ str_out = str_array[index]
176
+ return str_out.strip()
177
+
178
+ def check_arguments():
179
+ # Get tool arguments
180
+ parser = argparse.ArgumentParser()
181
+ parser.add_argument("-i", "--input", type=json.loads)
182
+ parser.add_argument("-p", "--processes")
183
+ parser.add_argument("-v", "--verbose")
184
+ args = parser.parse_args()
185
+
186
+ verbose = True if args.verbose == "True" else False
187
+ for item in args.input:
188
+ if args.input[item].lower() == "false":
189
+ args.input[item] = False
190
+ elif args.input[item].lower() == "true":
191
+ args.input[item] = True
192
+
193
+ return args, verbose
194
+
195
+
196
+ def vector_crs(in_vector):
197
+ osr_crs = osgeo.osr.SpatialReference()
198
+ from pyproj.enums import WktVersion
199
+
200
+ vec_crs = None
201
+ # open input vector data as GeoDataFrame
202
+ gpd_vector = gpd.GeoDataFrame.from_file(in_vector)
203
+ try:
204
+ if gpd_vector.crs is not None:
205
+ vec_crs = gpd_vector.crs
206
+ if osgeo.version_info.major < 3:
207
+ osr_crs.ImportFromWkt(vec_crs.to_wkt(WktVersion.WKT1_GDAL))
208
+ else:
209
+ osr_crs.ImportFromEPSG(vec_crs.to_epsg())
210
+ return osr_crs
211
+ else:
212
+ print("No CRS found in the input feature, please check!")
213
+ exit()
214
+ except Exception as e:
215
+ print(e)
216
+ exit()
217
+
218
+
219
+ def raster_crs(in_raster):
220
+ osr_crs = osgeo.osr.SpatialReference()
221
+ with rasterio.open(in_raster) as raster_file:
222
+ from pyproj.enums import WktVersion
223
+
224
+ try:
225
+ if raster_file.crs is not None:
226
+ vec_crs = raster_file.crs
227
+ if osgeo.version_info.major < 3:
228
+ osr_crs.ImportFromWkt(vec_crs.to_wkt(WktVersion.WKT1_GDAL))
229
+ else:
230
+ osr_crs.ImportFromEPSG(vec_crs.to_epsg())
231
+ return osr_crs
232
+ else:
233
+ print("No Coordinate Reference System (CRS) find in the input feature, please check!")
234
+ exit()
235
+ except Exception as e:
236
+ print(e)
237
+ exit()
238
+
239
+
240
+ def compare_crs(crs_org, crs_dst):
241
+ if crs_org and crs_dst:
242
+ if crs_org.IsSameGeogCS(crs_dst):
243
+ print("Check: Input file Spatial Reference are the same, continue.")
244
+ return True
245
+ else:
246
+ crs_org_norm = pyproj.CRS(crs_org.ExportToWkt())
247
+ crs_dst_norm = pyproj.CRS(crs_dst.ExportToWkt())
248
+ if crs_org_norm.is_compound:
249
+ crs_org_proj = crs_org_norm.sub_crs_list[0].coordinate_operation.name
250
+ elif crs_org_norm.name == "unnamed":
251
+ return False
252
+ else:
253
+ crs_org_proj = crs_org_norm.coordinate_operation.name
254
+
255
+ if crs_dst_norm.is_compound:
256
+ crs_dst_proj = crs_dst_norm.sub_crs_list[0].coordinate_operation.name
257
+ elif crs_org_norm.name == "unnamed":
258
+ return False
259
+ else:
260
+ crs_dst_proj = crs_dst_norm.coordinate_operation.name
261
+
262
+ if crs_org_proj == crs_dst_proj:
263
+ if crs_org_norm.name == crs_dst_norm.name:
264
+ print("Input files Spatial Reference are the same, continue.")
265
+ return True
266
+ else:
267
+ print(
268
+ """Checked: Data are on the same projected Zone but using
269
+ different Spatial Reference. \n Consider to re-project
270
+ all data onto same spatial reference system.\n Process Stop."""
271
+ )
272
+ exit()
273
+ else:
274
+ return False
275
+
276
+ return False
277
+
278
+
279
+ def identity_polygon(line_args):
280
+ """
281
+ Return polygon of line segment.
282
+
283
+ Args:
284
+ line_args : list[GeoDataFrame]
285
+ 0 : GeoDataFrame line segment, one item
286
+ 1 : GeoDataFrame line buffer, one item
287
+ 2 : GeoDataFrame polygons returned by spatial search
288
+
289
+ Returns:
290
+ line, identity : tuple of line and associated footprint
291
+
292
+ """
293
+ line = line_args[0]
294
+ in_cl_buffer = line_args[1][["geometry", "OLnFID"]]
295
+ in_fp_polygon = line_args[2]
296
+
297
+ identity = None
298
+ try:
299
+ # drop polygons not intersecting with line segment
300
+ line_geom = line.iloc[0].geometry
301
+ drop_list = []
302
+ for i in in_fp_polygon.index:
303
+ if not in_fp_polygon.loc[i].geometry.intersects(line_geom):
304
+ drop_list.append(i)
305
+ elif line_geom.intersection(in_fp_polygon.loc[i].geometry).length / line_geom.length < 0.30:
306
+ drop_list.append(i) # if less the 1/5 of line is inside of polygon, ignore
307
+
308
+ # drop all polygons not used
309
+ in_fp_polygon = in_fp_polygon.drop(index=drop_list)
310
+
311
+ if not in_fp_polygon.empty:
312
+ identity = in_fp_polygon.overlay(in_cl_buffer, how="intersection")
313
+ except Exception as e:
314
+ print(e)
315
+
316
+ return line, identity
317
+
318
+
319
+ def line_split2(in_ln_shp, seg_length):
320
+ # Check the OLnFID column in data. If it is not, column will be created
321
+ if "OLnFID" not in in_ln_shp.columns.array:
322
+ if bt_const.BT_DEBUGGING:
323
+ print("Cannot find {} column in input line data")
324
+
325
+ print(f"New column created: {'OLnFID'}, {'OLnFID'}")
326
+ in_ln_shp["OLnFID"] = in_ln_shp.index
327
+ line_seg = split_into_equal_Nth_segments(in_ln_shp, seg_length)
328
+
329
+ return line_seg
330
+
331
+
332
+ def split_into_equal_Nth_segments(df, seg_length):
333
+ odf = df
334
+ crs = odf.crs
335
+ if "OLnSEG" not in odf.columns.array:
336
+ df["OLnSEG"] = np.nan
337
+ df = odf.assign(geometry=odf.apply(lambda x: cut_line_by_length(x.geometry, seg_length), axis=1))
338
+ df = df.explode()
339
+
340
+ df["OLnSEG"] = df.groupby("OLnFID").cumcount()
341
+ gdf = gpd.GeoDataFrame(df, geometry=df.geometry, crs=crs)
342
+ gdf = gdf.sort_values(by=["OLnFID", "OLnSEG"])
343
+ gdf = gdf.reset_index(drop=True)
344
+
345
+ if "shape_leng" in gdf.columns.array:
346
+ gdf["shape_leng"] = gdf.geometry.length
347
+ elif "LENGTH" in gdf.columns.array:
348
+ gdf["LENGTH"] = gdf.geometry.length
349
+ else:
350
+ gdf["shape_leng"] = gdf.geometry.length
351
+ return gdf
352
+
353
+
354
+ def split_line_nPart(line, seg_length):
355
+ seg_line = shapely.segmentize(line, seg_length)
356
+ distances = np.arange(seg_length, line.length, seg_length)
357
+
358
+ if len(distances) > 0:
359
+ points = [shapely.line_interpolate_point(seg_line, distance) for distance in distances]
360
+
361
+ split_points = shapely.multipoints(points)
362
+ mline = sh_ops.split(seg_line, split_points)
363
+ else:
364
+ mline = seg_line
365
+
366
+ return mline
367
+
368
+
369
+ def cut_line_by_length(line, length, merge_threshold=0.5):
370
+ """
371
+ Split line into segments of equal length.
372
+
373
+ Merge the last segment with the second-to-last if its length
374
+ is smaller than the given threshold.
375
+
376
+ Args:
377
+ line : LineString
378
+ Line to be split by distance along the line.
379
+ length : float
380
+ Length of each segment to cut.
381
+ merge_threshold : float, optional
382
+ Threshold below which the last segment is merged with the previous one. Default is 0.5.
383
+
384
+ Returns:
385
+ List of LineString objects
386
+ A list containing the resulting line segments.
387
+
388
+ Example:
389
+ ">>> from shapely.geometry import LineString
390
+ ">>> line = LineString([(0, 0), (10, 0)])
391
+ ">>> segments = cut_line_by_length(line, 3, merge_threshold=1)
392
+ ">>> for segment in segments:
393
+ ">>> print(f"Segment: {segment}, Length: {segment.length}")
394
+
395
+ Output:
396
+ Segment: LINESTRING (0 0, 3 0), Length: 3.0
397
+ Segment: LINESTRING (3 0, 6 0), Length: 3.0
398
+ Segment: LINESTRING (6 0, 9 0), Length: 3.0
399
+ Segment: LINESTRING (9 0, 10 0), Length: 1.0
400
+
401
+ After merging the last segment with the second-to-last segment:
402
+
403
+ Output:
404
+ Segment: LINESTRING (0 0, 3 0), Length: 3.0
405
+ Segment: LINESTRING (3 0, 6 0), Length: 3.0
406
+ Segment: LINESTRING (6 0, 10 0), Length: 4.0
407
+
408
+ """
409
+ if line.has_z:
410
+ # Remove the Z component of the line if it exists
411
+ line = sh_ops.transform(lambda x, y, z=None: (x, y), line)
412
+
413
+ if shapely.is_empty(line):
414
+ return []
415
+
416
+ # Segment the line based on the specified distance
417
+ line = shapely.segmentize(line, length)
418
+ lines = []
419
+ end_pt = None
420
+
421
+ while line.length > length:
422
+ coords = list(line.coords)
423
+
424
+ for i, p in enumerate(coords):
425
+ p_dist = line.project(sh_geom.Point(p))
426
+
427
+ # Check if the distance matches closely and split the line
428
+ if abs(p_dist - length) < 1e-9: # Use a small epsilon value
429
+ lines.append(sh_geom.LineString(coords[: i + 1]))
430
+ line = sh_geom.LineString(coords[i:])
431
+ end_pt = None
432
+ break
433
+ elif p_dist > length:
434
+ end_pt = line.interpolate(length)
435
+ lines.append(sh_geom.LineString(coords[:i] + list(end_pt.coords)))
436
+ line = sh_geom.LineString(list(end_pt.coords) + coords[i:])
437
+ break
438
+
439
+ if end_pt:
440
+ lines.append(line)
441
+
442
+ # Handle the threshold condition: merge the last segment if its length is below the threshold
443
+ if len(lines) > 1:
444
+ if lines[-1].length < merge_threshold:
445
+ # Merge the last segment with the second-to-last one
446
+ lines[-2] = sh_geom.LineString(list(lines[-2].coords) + list(lines[-1].coords))
447
+ lines.pop() # Remove the last segment after merging
448
+
449
+ return lines
450
+
451
+ def chk_df_multipart(df, chk_shp_in_string):
452
+ try:
453
+ found = False
454
+ if str.upper(chk_shp_in_string) in [x.upper() for x in df.geom_type.values]:
455
+ found = True
456
+ df = df.explode()
457
+ if type(df) is gpd.geodataframe.GeoDataFrame:
458
+ df["OLnSEG"] = df.groupby("OLnFID").cumcount()
459
+ df = df.sort_values(by=["OLnFID", "OLnSEG"])
460
+ df = df.reset_index(drop=True)
461
+ else:
462
+ found = False
463
+ return df, found
464
+ except Exception as e:
465
+ print(e)
466
+ return df, True
467
+
468
+
469
+ def dyn_fs_raster_stdmean(canopy_ndarray, kernel, nodata):
470
+ # This function uses xrspatial which can handle large data but slow
471
+ mask = canopy_ndarray.mask
472
+ in_ndarray = np.ma.where(mask == True, np.nan, canopy_ndarray)
473
+ result_ndarray = xrspatial.focal.focal_stats(
474
+ xr.DataArray(in_ndarray.data), kernel, stats_funcs=["std", "mean"]
475
+ )
476
+
477
+ # Assign std and mean ndarray (return array contain nan value)
478
+ reshape_std_ndarray = result_ndarray[0].data
479
+ reshape_mean_ndarray = result_ndarray[1].data
480
+
481
+ return reshape_std_ndarray, reshape_mean_ndarray
482
+
483
+
484
+ def dyn_smooth_cost(canopy_ndarray, max_line_dist, sampling):
485
+ mask = canopy_ndarray.mask
486
+ in_ndarray = np.ma.where(mask == True, np.nan, canopy_ndarray)
487
+ # scipy way to do Euclidean distance transform
488
+ euc_dist_array = ndimage.distance_transform_edt(
489
+ np.logical_not(np.isnan(in_ndarray.data)), sampling=sampling
490
+ )
491
+ euc_dist_array[mask == True] = np.nan
492
+ smooth1 = float(max_line_dist) - euc_dist_array
493
+ smooth1[smooth1 <= 0.0] = 0.0
494
+ smooth_cost_array = smooth1 / float(max_line_dist)
495
+
496
+ return smooth_cost_array
497
+
498
+
499
+ def dyn_np_cost_raster(canopy_ndarray, cc_mean, cc_std, cc_smooth, avoidance, cost_raster_exponent):
500
+ aM1a = cc_mean - cc_std
501
+ aM1b = cc_mean + cc_std
502
+ aM1 = np.divide(aM1a, aM1b, where=aM1b != 0, out=np.zeros(aM1a.shape, dtype=float))
503
+ aM = (1 + aM1) / 2
504
+ aaM = cc_mean + cc_std
505
+ bM = np.where(aaM <= 0, 0, aM)
506
+ cM = bM * (1 - avoidance) + (cc_smooth * avoidance)
507
+ dM = np.where(canopy_ndarray.data == 1, 1, cM)
508
+ eM = np.exp(dM)
509
+ result = np.power(eM, float(cost_raster_exponent))
510
+
511
+ return result
512
+
513
+
514
+ def dyn_np_cc_map(in_chm, canopy_ht_threshold, nodata):
515
+ canopy_ndarray = np.ma.where(in_chm >= canopy_ht_threshold, 1.0, 0.0).astype(float)
516
+ canopy_ndarray.fill_value = nodata
517
+
518
+ return canopy_ndarray
519
+
520
+
521
+ def generate_line_args_DFP_NoClip(
522
+ line_seg,
523
+ work_in_bufferL,
524
+ work_in_bufferC,
525
+ in_chm_obj,
526
+ in_chm,
527
+ tree_radius,
528
+ max_line_dist,
529
+ canopy_avoidance,
530
+ exponent,
531
+ work_in_bufferR,
532
+ canopy_thresh_percentage,
533
+ ):
534
+ line_argsL = []
535
+ line_argsR = []
536
+ line_argsC = []
537
+ line_id = 0
538
+ for record in range(0, len(work_in_bufferL)):
539
+ line_bufferL = work_in_bufferL.loc[record, "geometry"]
540
+ line_bufferC = work_in_bufferC.loc[record, "geometry"]
541
+ LCut = work_in_bufferL.loc[record, "LDist_Cut"]
542
+
543
+ nodata = bt_const.BT_NODATA
544
+ line_argsL.append(
545
+ [
546
+ in_chm,
547
+ float(work_in_bufferL.loc[record, "DynCanTh"]),
548
+ float(tree_radius),
549
+ float(max_line_dist),
550
+ float(canopy_avoidance),
551
+ float(exponent),
552
+ in_chm_obj.res,
553
+ nodata,
554
+ line_seg.iloc[[record]],
555
+ in_chm_obj.meta.copy(),
556
+ line_id,
557
+ LCut,
558
+ "Left",
559
+ canopy_thresh_percentage,
560
+ line_bufferL,
561
+ ]
562
+ )
563
+
564
+ line_argsC.append(
565
+ [
566
+ in_chm,
567
+ float(work_in_bufferC.loc[record, "DynCanTh"]),
568
+ float(tree_radius),
569
+ float(max_line_dist),
570
+ float(canopy_avoidance),
571
+ float(exponent),
572
+ in_chm_obj.res,
573
+ nodata,
574
+ line_seg.iloc[[record]],
575
+ in_chm_obj.meta.copy(),
576
+ line_id,
577
+ 10,
578
+ "Center",
579
+ canopy_thresh_percentage,
580
+ line_bufferC,
581
+ ]
582
+ )
583
+
584
+ line_id += 1
585
+
586
+ line_id = 0
587
+ for record in range(0, len(work_in_bufferR)):
588
+ line_bufferR = work_in_bufferR.loc[record, "geometry"]
589
+ RCut = work_in_bufferR.loc[record, "RDist_Cut"]
590
+ line_bufferC = work_in_bufferC.loc[record, "geometry"]
591
+
592
+ nodata = bt_const.BT_NODATA
593
+ # TODO deal with inherited nodata and BT_NODATA_COST
594
+ # TODO convert nodata to BT_NODATA_COST
595
+ line_argsR.append(
596
+ [
597
+ in_chm,
598
+ float(work_in_bufferR.loc[record, "DynCanTh"]),
599
+ float(tree_radius),
600
+ float(max_line_dist),
601
+ float(canopy_avoidance),
602
+ float(exponent),
603
+ in_chm_obj.res,
604
+ nodata,
605
+ line_seg.iloc[[record]],
606
+ in_chm_obj.meta.copy(),
607
+ line_id,
608
+ RCut,
609
+ "Right",
610
+ canopy_thresh_percentage,
611
+ line_bufferR,
612
+ ]
613
+ )
614
+
615
+ step = line_id + 1 + len(work_in_bufferL)
616
+ total = len(work_in_bufferL) + len(work_in_bufferR)
617
+ print(f' "PROGRESS_LABEL Preparing... {step} of {total}" ', flush=True)
618
+ print(f" %{step / total * 100} ", flush=True)
619
+
620
+ line_id += 1
621
+
622
+ return line_argsL, line_argsR, line_argsC