BERATools 0.2.3__py3-none-any.whl → 0.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. beratools/__init__.py +8 -3
  2. beratools/core/{algo_footprint_rel.py → algo_canopy_footprint_exp.py} +176 -139
  3. beratools/core/algo_centerline.py +61 -77
  4. beratools/core/algo_common.py +48 -57
  5. beratools/core/algo_cost.py +18 -25
  6. beratools/core/algo_dijkstra.py +37 -45
  7. beratools/core/algo_line_grouping.py +100 -100
  8. beratools/core/algo_merge_lines.py +40 -8
  9. beratools/core/algo_split_with_lines.py +289 -304
  10. beratools/core/algo_vertex_optimization.py +25 -46
  11. beratools/core/canopy_threshold_relative.py +755 -0
  12. beratools/core/constants.py +8 -9
  13. beratools/{tools → core}/line_footprint_functions.py +411 -258
  14. beratools/core/logger.py +18 -2
  15. beratools/core/tool_base.py +17 -75
  16. beratools/gui/assets/BERALogo.ico +0 -0
  17. beratools/gui/assets/BERA_Splash.gif +0 -0
  18. beratools/gui/assets/BERA_WizardImage.png +0 -0
  19. beratools/gui/assets/beratools.json +475 -2171
  20. beratools/gui/bt_data.py +585 -234
  21. beratools/gui/bt_gui_main.py +129 -91
  22. beratools/gui/main.py +4 -7
  23. beratools/gui/tool_widgets.py +530 -354
  24. beratools/tools/__init__.py +0 -7
  25. beratools/tools/{line_footprint_absolute.py → canopy_footprint_absolute.py} +81 -56
  26. beratools/tools/canopy_footprint_exp.py +113 -0
  27. beratools/tools/centerline.py +30 -37
  28. beratools/tools/check_seed_line.py +127 -0
  29. beratools/tools/common.py +65 -586
  30. beratools/tools/{line_footprint_fixed.py → ground_footprint.py} +140 -117
  31. beratools/tools/line_footprint_relative.py +64 -35
  32. beratools/tools/tool_template.py +48 -40
  33. beratools/tools/vertex_optimization.py +20 -34
  34. beratools/utility/env_checks.py +53 -0
  35. beratools/utility/spatial_common.py +210 -0
  36. beratools/utility/tool_args.py +138 -0
  37. beratools-0.2.4.dist-info/METADATA +134 -0
  38. beratools-0.2.4.dist-info/RECORD +50 -0
  39. {beratools-0.2.3.dist-info → beratools-0.2.4.dist-info}/WHEEL +1 -1
  40. beratools-0.2.4.dist-info/entry_points.txt +3 -0
  41. beratools-0.2.4.dist-info/licenses/LICENSE +674 -0
  42. beratools/core/algo_tiler.py +0 -428
  43. beratools/gui/__init__.py +0 -11
  44. beratools/gui/batch_processing_dlg.py +0 -513
  45. beratools/gui/map_window.py +0 -162
  46. beratools/tools/Beratools_r_script.r +0 -1120
  47. beratools/tools/Ht_metrics.py +0 -116
  48. beratools/tools/batch_processing.py +0 -136
  49. beratools/tools/canopy_threshold_relative.py +0 -672
  50. beratools/tools/canopycostraster.py +0 -222
  51. beratools/tools/fl_regen_csf.py +0 -428
  52. beratools/tools/forest_line_attributes.py +0 -408
  53. beratools/tools/line_grouping.py +0 -45
  54. beratools/tools/ln_relative_metrics.py +0 -615
  55. beratools/tools/r_cal_lpi_elai.r +0 -25
  56. beratools/tools/r_generate_pd_focalraster.r +0 -101
  57. beratools/tools/r_interface.py +0 -80
  58. beratools/tools/r_point_density.r +0 -9
  59. beratools/tools/rpy_chm2trees.py +0 -86
  60. beratools/tools/rpy_dsm_chm_by.py +0 -81
  61. beratools/tools/rpy_dtm_by.py +0 -63
  62. beratools/tools/rpy_find_cellsize.py +0 -43
  63. beratools/tools/rpy_gnd_csf.py +0 -74
  64. beratools/tools/rpy_hummock_hollow.py +0 -85
  65. beratools/tools/rpy_hummock_hollow_raster.py +0 -71
  66. beratools/tools/rpy_las_info.py +0 -51
  67. beratools/tools/rpy_laz2las.py +0 -40
  68. beratools/tools/rpy_lpi_elai_lascat.py +0 -466
  69. beratools/tools/rpy_normalized_lidar_by.py +0 -56
  70. beratools/tools/rpy_percent_above_dbh.py +0 -80
  71. beratools/tools/rpy_points2trees.py +0 -88
  72. beratools/tools/rpy_vegcoverage.py +0 -94
  73. beratools/tools/tiler.py +0 -48
  74. beratools/tools/zonal_threshold.py +0 -144
  75. beratools-0.2.3.dist-info/METADATA +0 -108
  76. beratools-0.2.3.dist-info/RECORD +0 -74
  77. beratools-0.2.3.dist-info/entry_points.txt +0 -2
  78. beratools-0.2.3.dist-info/licenses/LICENSE +0 -22
@@ -1,7 +0,0 @@
1
- # -*- coding: utf-8 -*-
2
-
3
- # import os
4
- # import sys
5
- # sys.path.append(os.path.dirname(os.path.realpath(__file__)))
6
-
7
- name = 'tools'
@@ -13,20 +13,28 @@ Description:
13
13
  The purpose of this script is to provide main interface for canopy footprint tool.
14
14
  The tool is used to generate the footprint of a line based on absolute threshold.
15
15
  """
16
+
17
+ import logging
16
18
  import time
17
19
 
18
20
  import geopandas as gpd
19
21
  import numpy as np
20
22
  import pandas as pd
21
- import rasterio
22
- import shapely
23
+ from rasterio import features
24
+ from rasterio.transform import rowcol
25
+ from shapely.geometry import MultiPolygon, Polygon, shape
23
26
 
24
27
  import beratools.core.algo_centerline as algo_cl
25
28
  import beratools.core.algo_common as algo_common
26
29
  import beratools.core.algo_cost as algo_cost
27
- import beratools.core.constants as bt_const
28
30
  import beratools.core.tool_base as bt_base
29
- import beratools.tools.common as bt_common
31
+ import beratools.utility.spatial_common as sp_common
32
+ from beratools.core.logger import Logger
33
+ from beratools.utility.tool_args import CallMode
34
+
35
+ log = Logger("canopy_footprint_abs", file_level=logging.INFO)
36
+ logger = log.get_logger()
37
+ print = log.print
30
38
 
31
39
 
32
40
  class FootprintAbsolute:
@@ -63,8 +71,10 @@ class FootprintAbsolute:
63
71
  if corridor_thresh < 0.0:
64
72
  corridor_thresh = 3.0
65
73
  except ValueError as e:
66
- print(f"process_single_line_segment: {e}")
74
+ print(f"FootprintAbsolute.compute: ValueError {e}")
67
75
  corridor_thresh = 3.0
76
+ except Exception as e:
77
+ print(f"FootprintAbsolute.compute: exception {e}")
68
78
 
69
79
  segment_list = []
70
80
  feat = self.line_seg.geometry[0]
@@ -77,7 +87,7 @@ class FootprintAbsolute:
77
87
 
78
88
  # Buffer around line and clip cost raster and canopy raster
79
89
  # TODO: deal with NODATA
80
- clip_cost, out_meta = bt_common.clip_raster(in_chm, feat, max_ln_width)
90
+ clip_cost, out_meta = sp_common.clip_raster(in_chm, feat, max_ln_width)
81
91
  out_transform = out_meta["transform"]
82
92
  cell_size_x = out_transform[0]
83
93
  cell_size_y = -out_transform[4]
@@ -88,9 +98,8 @@ class FootprintAbsolute:
88
98
  if len(clip_canopy.shape) > 2:
89
99
  clip_canopy = np.squeeze(clip_canopy, axis=0)
90
100
 
91
- transformer = rasterio.transform.AffineTransformer(out_transform)
92
- source = [transformer.rowcol(x1, y1)]
93
- destination = [transformer.rowcol(x2, y2)]
101
+ source = [rowcol(out_transform, x1, y1)]
102
+ destination = [rowcol(out_transform, x2, y2)]
94
103
 
95
104
  corridor_thresh = algo_common.corridor_raster(
96
105
  clip_cost,
@@ -101,9 +110,7 @@ class FootprintAbsolute:
101
110
  corridor_thresh,
102
111
  )
103
112
 
104
- clean_raster = algo_common.morph_raster(
105
- corridor_thresh, clip_canopy, exp_shk_cell, cell_size_x
106
- )
113
+ clean_raster = algo_common.morph_raster(corridor_thresh, clip_canopy, exp_shk_cell, cell_size_x)
107
114
 
108
115
  # create mask for non-polygon area
109
116
  msk = np.where(clean_raster == 1, True, False)
@@ -111,33 +118,56 @@ class FootprintAbsolute:
111
118
  clean_raster = clean_raster.astype(np.int32)
112
119
 
113
120
  # Process: ndarray to shapely Polygon
114
- out_polygon = rasterio.features.shapes(
115
- clean_raster, mask=msk, transform=out_transform
116
- )
121
+ out_polygon = features.shapes(clean_raster, mask=msk, transform=out_transform)
117
122
 
118
123
  # create a shapely multipolygon
119
124
  multi_polygon = []
120
125
  for shp, value in out_polygon:
121
- multi_polygon.append(shapely.geometry.shape(shp))
122
- poly = shapely.geometry.MultiPolygon(multi_polygon)
126
+ multi_polygon.append(shape(shp))
127
+ poly = MultiPolygon(multi_polygon)
123
128
 
124
129
  # create a pandas dataframe for the footprint
125
- footprint = gpd.GeoDataFrame(geometry=[poly], crs=self.line_seg.crs)
130
+ # Ensure CRS is a string
131
+ crs_str = None
132
+ if hasattr(self.line_seg, "crs") and self.line_seg.crs:
133
+ if hasattr(self.line_seg.crs, "to_string"):
134
+ crs_str = self.line_seg.crs.to_string()
135
+ else:
136
+ crs_str = str(self.line_seg.crs)
137
+ else:
138
+ crs_str = "EPSG:4326"
139
+ # Ensure poly is a valid shapely geometry
140
+ if not isinstance(poly, (Polygon, MultiPolygon)):
141
+ poly = MultiPolygon([poly]) if poly else None
142
+
143
+ # Fallback CRS if invalid
144
+ if not crs_str or not isinstance(crs_str, str) or not crs_str.startswith("EPSG"):
145
+ crs_str = "EPSG:4326"
146
+
147
+ # Only create GeoDataFrame if poly is not None
148
+ if poly is not None and isinstance(poly, (Polygon, MultiPolygon)):
149
+ geometry_list = [poly]
150
+ else:
151
+ geometry_list = []
152
+
153
+ import pandas as pd
154
+
155
+ self.footprint = gpd.GeoDataFrame({"geometry": geometry_list})
156
+ self.footprint.set_crs(crs_str, inplace=True)
126
157
 
127
158
  # find contiguous corridor polygon for centerline
128
- corridor_poly_gpd = algo_cl.find_corridor_polygon(
129
- corridor_thresh, out_transform, line_gpd
130
- )
131
- centerline, status = algo_cl.find_centerline(
132
- corridor_poly_gpd.geometry.iloc[0], feat
133
- )
159
+ corridor_poly_gpd = algo_cl.find_corridor_polygon(corridor_thresh, out_transform, line_gpd)
160
+ centerline, status = algo_cl.find_centerline(corridor_poly_gpd.geometry.iloc[0], feat)
134
161
 
135
- self.footprint = footprint
136
162
  self.corridor_poly_gpd = corridor_poly_gpd
137
163
  self.centerline = centerline
138
164
 
165
+
139
166
  def process_single_line(line_footprint):
140
- line_footprint.compute()
167
+ try:
168
+ line_footprint.compute()
169
+ except Exception as e:
170
+ print(f"process_single_line: exception {e}")
141
171
  return line_footprint
142
172
 
143
173
 
@@ -153,26 +183,17 @@ def generate_line_class_list(
153
183
  line_list = algo_common.prepare_lines_gdf(in_line, in_layer, proc_segments=False)
154
184
 
155
185
  for line in line_list:
156
- line_classes.append(
157
- FootprintAbsolute(line, in_chm, corridor_thresh, max_ln_width, exp_shk_cell)
158
- )
186
+ line_classes.append(FootprintAbsolute(line, in_chm, corridor_thresh, max_ln_width, exp_shk_cell))
159
187
 
160
188
  return line_classes
161
189
 
162
190
 
163
- def line_footprint_abs(
164
- in_line,
165
- in_chm,
166
- corridor_thresh,
167
- max_ln_width,
168
- exp_shk_cell,
169
- out_footprint,
170
- processes,
171
- verbose,
172
- in_layer=None,
173
- out_layer=None,
174
- parallel_mode=bt_const.ParallelMode.MULTIPROCESSING
175
- ):
191
+ def canopy_footprint_abs(
192
+ in_line, in_chm, corridor_thresh, max_ln_width, exp_shk_cell, out_footprint,
193
+ processes=0, call_mode=CallMode.CLI, log_level="INFO"):
194
+ in_file, in_layer = sp_common.decode_file_layer(in_line)
195
+ out_file, out_layer = sp_common.decode_file_layer(out_footprint)
196
+
176
197
  max_ln_width = float(max_ln_width)
177
198
  exp_shk_cell = int(exp_shk_cell)
178
199
 
@@ -180,7 +201,7 @@ def line_footprint_abs(
180
201
  poly_list = []
181
202
 
182
203
  line_class_list = generate_line_class_list(
183
- in_line, in_chm, corridor_thresh, max_ln_width, exp_shk_cell, in_layer
204
+ in_file, in_chm, corridor_thresh, max_ln_width, exp_shk_cell, in_layer
184
205
  )
185
206
 
186
207
  feat_list = bt_base.execute_multiprocessing(
@@ -188,26 +209,30 @@ def line_footprint_abs(
188
209
  line_class_list,
189
210
  "Line footprint",
190
211
  processes,
191
- 1,
192
- verbose=verbose,
212
+ call_mode
193
213
  )
194
214
 
195
215
  if feat_list:
196
216
  for i in feat_list:
197
- footprint_list.append(i.footprint)
198
- poly_list.append(i.corridor_poly_gpd)
217
+ if i.footprint is not None:
218
+ footprint_list.append(i.footprint)
219
+ if i.corridor_poly_gpd is not None:
220
+ poly_list.append(i.corridor_poly_gpd)
199
221
 
200
- results = gpd.GeoDataFrame(pd.concat(footprint_list))
201
- results = results.reset_index(drop=True)
202
- results.to_file(out_footprint, layer=out_layer)
222
+ if footprint_list:
223
+ results = gpd.GeoDataFrame(pd.concat(footprint_list))
224
+ results = results.reset_index(drop=True)
225
+ layer_name = out_layer if out_layer else "canopy_footprint"
226
+ results.to_file(out_file, layer=layer_name)
227
+ print(f"Saved footprint to {out_file}, layer: {layer_name}")
228
+ else:
229
+ print("Warning: No footprints generated. Output file not written.")
203
230
 
204
231
 
205
232
  if __name__ == "__main__":
233
+ from beratools.utility.tool_args import compose_tool_kwargs
206
234
  start_time = time.time()
207
235
  print("Footprint processing started")
208
-
209
- in_args, in_verbose = bt_common.check_arguments()
210
- line_footprint_abs(
211
- **in_args.input, processes=int(in_args.processes), verbose=in_verbose
212
- )
213
- print('Elapsed time: {}'.format(time.time() - start_time))
236
+ kwargs = compose_tool_kwargs("canopy_footprint_absolute")
237
+ canopy_footprint_abs(**kwargs)
238
+ print("Elapsed time: {}".format(time.time() - start_time))
@@ -0,0 +1,113 @@
1
+ """Canopy footprint tool with exception handling."""
2
+
3
+ from pathlib import Path
4
+ from tabnanny import verbose
5
+
6
+ import beratools.utility.spatial_common as sp_common
7
+ from beratools.core.algo_canopy_footprint_exp import FootprintCanopy
8
+ from beratools.utility.tool_args import CallMode
9
+
10
+
11
+ def line_footprint_exp(
12
+ in_line,
13
+ in_chm,
14
+ out_footprint,
15
+ max_ln_width=32,
16
+ tree_radius=1.5,
17
+ max_line_dist=1.5,
18
+ canopy_avoidance=0.0,
19
+ exponent=1.0,
20
+ canopy_thresh_percentage=50,
21
+ processes=0,
22
+ call_mode=CallMode.CLI,
23
+ log_level="INFO",
24
+ ):
25
+ """Safe version of relative canopy footprint tool."""
26
+ try:
27
+ footprint = FootprintCanopy(in_line, in_chm)
28
+ except Exception as e:
29
+ print(f"Failed to initialize FootprintCanopy: {e}")
30
+ return
31
+
32
+ try:
33
+ footprint.compute(processes)
34
+ except Exception as e:
35
+ print(f"Error in compute(): {e}")
36
+ import traceback
37
+
38
+ traceback.print_exc()
39
+ return
40
+
41
+ # Save only if footprints were actually generated
42
+ out_file, out_layer = sp_common.decode_file_layer(out_footprint)
43
+ if (
44
+ hasattr(footprint, "footprints")
45
+ and footprint.footprints is not None
46
+ and hasattr(footprint.footprints, "empty")
47
+ and not footprint.footprints.empty
48
+ ):
49
+ try:
50
+ footprint.save_footprint(out_file, out_layer)
51
+ print(f"Footprint saved to {out_footprint}")
52
+ except Exception as e:
53
+ print(f"Failed to save footprint: {e}")
54
+ else:
55
+ print("No valid footprints to save.")
56
+
57
+ # Optionally save percentile lines (if needed)
58
+ if (
59
+ hasattr(footprint, "lines_percentile")
60
+ and footprint.lines_percentile is not None
61
+ and hasattr(footprint.lines_percentile, "empty")
62
+ and not footprint.lines_percentile.empty
63
+ ):
64
+ out_file_path = Path(out_file)
65
+ out_file_aux = out_file_path.with_stem(out_file_path.stem + "_aux")
66
+ try:
67
+ footprint.save_line_percentile(out_file_aux.as_posix())
68
+ if verbose:
69
+ print(f"Line percentile saved to {out_file_aux}")
70
+ except Exception as e:
71
+ print(f"Failed to save line percentile: {e}")
72
+
73
+ def parse_cli_args():
74
+ import argparse
75
+
76
+ parser = argparse.ArgumentParser(
77
+ description="Canopy footprint tool with exception handling.",
78
+ usage="%(prog)s in_line in_chm out_footprint [options]"
79
+ )
80
+ parser.add_argument("in_line", help="Input line file")
81
+ parser.add_argument("in_chm", help="Input CHM file")
82
+ parser.add_argument("out_footprint", help="Output footprint file")
83
+ parser.add_argument("--max-ln-width", type=float, default=32, help="Maximum line width (default: 32)")
84
+ parser.add_argument("--tree-radius", type=float, default=1.5, help="Tree radius (default: 1.5)")
85
+ parser.add_argument("--max-line-dist", type=float, default=1.5, help="Maximum line distance (default: 1.5)")
86
+ parser.add_argument("--canopy-avoidance", type=float, default=0.0, help="Canopy avoidance (default: 0.0)")
87
+ parser.add_argument("--exponent", type=float, default=1.0, help="Exponent (default: 1.0)")
88
+ parser.add_argument("--canopy-thresh-percentage", type=float, default=50, help="Canopy threshold percentage (default: 50)")
89
+ parser.add_argument("--processes", type=int, default=0, help="Number of processes (default: 0)")
90
+ parser.add_argument("--log-level", type=str, default="INFO", help="Log level (default: INFO)")
91
+
92
+ args = parser.parse_args()
93
+ return {
94
+ "in_line": args.in_line,
95
+ "in_chm": args.in_chm,
96
+ "out_footprint": args.out_footprint,
97
+ "max_ln_width": args.max_ln_width,
98
+ "tree_radius": args.tree_radius,
99
+ "max_line_dist": args.max_line_dist,
100
+ "canopy_avoidance": args.canopy_avoidance,
101
+ "exponent": args.exponent,
102
+ "canopy_thresh_percentage": args.canopy_thresh_percentage,
103
+ "processes": args.processes,
104
+ "log_level": args.log_level,
105
+ }
106
+
107
+ if __name__ == "__main__":
108
+ import time
109
+
110
+ start_time = time.time()
111
+ kwargs = parse_cli_args()
112
+ line_footprint_exp(**kwargs)
113
+ print("Elapsed time: {}".format(time.time() - start_time))
@@ -14,32 +14,28 @@ Description:
14
14
  """
15
15
 
16
16
  import logging
17
- import time
18
17
  from pathlib import Path
19
18
 
20
19
  import pandas as pd
21
20
 
22
21
  import beratools.core.algo_centerline as algo_centerline
23
22
  import beratools.core.algo_common as algo_common
24
- import beratools.core.constants as bt_const
25
- import beratools.tools.common as bt_common
23
+ import beratools.utility.spatial_common as sp_common
26
24
  from beratools.core.logger import Logger
27
25
  from beratools.core.tool_base import execute_multiprocessing
26
+ from beratools.utility.tool_args import CallMode
28
27
 
29
28
  log = Logger("centerline", file_level=logging.INFO)
30
29
  logger = log.get_logger()
31
30
  print = log.print
32
31
 
33
- def generate_line_class_list(
34
- in_vector, in_raster, line_radius, layer=None, proc_segments=True
35
- ) -> list:
32
+
33
+ def generate_line_class_list(in_vector, in_raster, line_radius, layer=None, proc_segments=True) -> list:
36
34
  line_classes = []
37
35
  line_list = algo_common.prepare_lines_gdf(in_vector, layer, proc_segments)
38
36
 
39
37
  for item in line_list:
40
- line_classes.append(
41
- algo_centerline.SeedLine(item, in_raster, proc_segments, line_radius)
42
- )
38
+ line_classes.append(algo_centerline.SeedLine(item, in_raster, proc_segments, line_radius))
43
39
 
44
40
  return line_classes
45
41
 
@@ -55,20 +51,20 @@ def centerline(
55
51
  line_radius,
56
52
  proc_segments,
57
53
  out_line,
58
- processes,
59
- verbose,
60
- in_layer=None,
61
- out_layer=None,
62
- parallel_mode=bt_const.ParallelMode.MULTIPROCESSING
54
+ use_angle_grouping=True,
55
+ processes=0,
56
+ call_mode=CallMode.CLI,
57
+ log_level="INFO",
63
58
  ):
64
- if not bt_common.compare_crs(
65
- bt_common.vector_crs(in_line), bt_common.raster_crs(in_raster)
66
- ):
59
+ in_file, in_layer = sp_common.decode_file_layer(in_line)
60
+ out_file, out_layer = sp_common.decode_file_layer(out_line)
61
+
62
+ if not sp_common.compare_crs(sp_common.vector_crs(in_file), sp_common.raster_crs(in_raster)):
67
63
  print("Line and CHM have different spatial references, please check.")
68
64
  return
69
65
 
70
66
  line_class_list = generate_line_class_list(
71
- in_line,
67
+ in_file,
72
68
  in_raster,
73
69
  line_radius=float(line_radius),
74
70
  layer=in_layer,
@@ -85,12 +81,11 @@ def centerline(
85
81
  line_class_list,
86
82
  "Centerline",
87
83
  processes,
88
- verbose=verbose,
89
- mode=parallel_mode,
84
+ call_mode,
90
85
  )
91
86
  if not result:
92
87
  print("No centerlines found.")
93
- return
88
+ return 1
94
89
 
95
90
  for item in result:
96
91
  lc_path_list.append(item.lc_path)
@@ -98,11 +93,7 @@ def centerline(
98
93
  corridor_poly_list.append(item.corridor_poly_gpd)
99
94
 
100
95
  # Concatenate the lists of GeoDataFrames into single GeoDataFrames
101
- if (
102
- len(lc_path_list) == 0
103
- or len(centerline_list) == 0
104
- or len(corridor_poly_list) == 0
105
- ):
96
+ if len(lc_path_list) == 0 or len(centerline_list) == 0 or len(corridor_poly_list) == 0:
106
97
  print("No centerline generated.")
107
98
  return 1
108
99
 
@@ -111,26 +102,28 @@ def centerline(
111
102
  corridor_polys = pd.concat(corridor_poly_list, ignore_index=True)
112
103
 
113
104
  # Save the concatenated GeoDataFrames to the shapefile/gpkg
114
- centerline_list.to_file(out_line, layer=out_layer)
105
+ centerline_list.to_file(out_file, layer=out_layer)
106
+ print(f"Saved centerlines to: {out_file}")
115
107
 
116
108
  # Check if the output file is a shapefile
117
- out_line_path = Path(out_line)
109
+ out_line_path = Path(out_file)
118
110
 
119
- if out_line_path.suffix == ".shp":
120
- # Generate the new file name for the GeoPackage with '_aux' appended
121
- aux_file = out_line_path.with_name(out_line_path.stem + "_aux.gpkg")
122
- print(f"Saved auxiliary data to: {aux_file}")
123
- else:
124
- aux_file = out_line # continue using out_line (gpkg)
111
+ # Generate the new file name for the GeoPackage with '_aux' appended
112
+ aux_file = out_line_path.with_name(out_line_path.stem + "_aux.gpkg")
113
+ print(f"Saved auxiliary data to: {aux_file}")
125
114
 
126
115
  # Save lc_path_list and corridor_polys to the new GeoPackage with '_aux' suffix
127
116
  lc_path_list.to_file(aux_file, layer="least_cost_path")
128
117
  corridor_polys.to_file(aux_file, layer="corridor_polygon")
129
118
 
119
+ return 0
120
+
130
121
 
131
- # TODO: fix geometries when job done
132
122
  if __name__ == "__main__":
133
- in_args, in_verbose = bt_common.check_arguments()
123
+ import time
124
+
125
+ from beratools.utility.tool_args import compose_tool_kwargs
134
126
  start_time = time.time()
135
- centerline(**in_args.input, processes=int(in_args.processes), verbose=in_verbose)
127
+ kwargs = compose_tool_kwargs("centerline")
128
+ centerline(**kwargs)
136
129
  print("Elapsed time: {}".format(time.time() - start_time))
@@ -0,0 +1,127 @@
1
+ """
2
+ Copyright (C) 2025 Applied Geospatial Research Group.
3
+
4
+ This script is licensed under the GNU General Public License v3.0.
5
+ See <https://gnu.org/licenses/gpl-3.0> for full license details.
6
+
7
+ Author: Richard Zeng
8
+
9
+ Description:
10
+ This script is part of the BERA Tools.
11
+ Webpage: https://github.com/appliedgrg/beratools
12
+
13
+ The purpose of this script is to provide main interface for line grouping tool.
14
+ """
15
+
16
+ import logging
17
+
18
+ import geopandas as gpd
19
+
20
+ import beratools.utility.spatial_common as sp_common
21
+ from beratools.core.logger import Logger
22
+ from beratools.utility.tool_args import CallMode
23
+
24
+ log = Logger("check_seed_line", file_level=logging.INFO)
25
+ logger = log.get_logger()
26
+ print = log.print
27
+
28
+
29
+ def qc_merge_multilinestring(gdf):
30
+ """
31
+ QC step: Merge MultiLineStrings if possible, else split into LineStrings.
32
+
33
+ Args:
34
+ gdf (GeoDataFrame): Input GeoDataFrame.
35
+
36
+ Returns:
37
+ GeoDataFrame: Cleaned GeoDataFrame with only LineStrings.
38
+ """
39
+ from shapely.geometry.base import BaseGeometry
40
+
41
+ from beratools.core.algo_merge_lines import custom_line_merge
42
+
43
+ records = []
44
+ for idx, row in gdf.iterrows():
45
+ geom = row.geometry
46
+ if geom is None:
47
+ continue
48
+ row_dict = row.to_dict()
49
+ # Try to merge MultiLineString
50
+ if geom.geom_type == "MultiLineString":
51
+ merged = custom_line_merge(geom)
52
+ if merged.geom_type == "MultiLineString":
53
+ # Could not merge, split into LineStrings
54
+ for part in merged.geoms:
55
+ new_row = row_dict.copy()
56
+ new_row["geometry"] = part
57
+ if part.geom_type == "LineString":
58
+ records.append(new_row)
59
+ elif merged.geom_type == "LineString":
60
+ new_row = row_dict.copy()
61
+ new_row["geometry"] = merged
62
+ records.append(new_row)
63
+ else:
64
+ # Unexpected geometry, keep as is
65
+ new_row = row_dict.copy()
66
+ new_row["geometry"] = merged
67
+ if hasattr(merged, "geom_type") and merged.geom_type == "LineString":
68
+ records.append(new_row)
69
+ elif geom.geom_type == "LineString":
70
+ records.append(row_dict)
71
+ # else: skip non-LineString geometries
72
+
73
+ # Build new GeoDataFrame
74
+ valid_records = [rec for rec in records if isinstance(rec.get("geometry", None), BaseGeometry)]
75
+ out_gdf = gpd.GeoDataFrame.from_records(valid_records, columns=gdf.columns)
76
+ out_gdf.set_crs(gdf.crs, inplace=True)
77
+ out_gdf = out_gdf.reset_index(drop=True)
78
+ return out_gdf
79
+
80
+
81
+ def qc_split_lines_at_intersections(gdf):
82
+ """
83
+ QC step: Split lines at intersections so each segment becomes a separate line object.
84
+
85
+ Args:
86
+ gdf (GeoDataFrame): Input GeoDataFrame of LineStrings.
87
+
88
+ Returns:
89
+ GeoDataFrame: New GeoDataFrame with lines split at all intersection points.
90
+ """
91
+ from beratools.core.algo_split_with_lines import LineSplitter
92
+
93
+ splitter = LineSplitter(gdf)
94
+ splitter.process()
95
+ if splitter.split_lines_gdf is not None:
96
+ if isinstance(splitter.split_lines_gdf, gpd.GeoDataFrame):
97
+ return splitter.split_lines_gdf.reset_index(drop=True)
98
+ else:
99
+ return splitter.split_lines_gdf
100
+ else:
101
+ return gdf.reset_index(drop=True)
102
+
103
+
104
+ def check_seed_line(in_line, out_line, use_angle_grouping=True,
105
+ processes=0, call_mode=CallMode.CLI, log_level="INFO"):
106
+ from beratools.core.algo_line_grouping import LineGrouping
107
+
108
+ in_file, in_layer = sp_common.decode_file_layer(in_line)
109
+ out_file, out_layer = sp_common.decode_file_layer(out_line)
110
+
111
+ in_line_gdf = gpd.read_file(in_file, layer=in_layer)
112
+ in_line_gdf = qc_merge_multilinestring(in_line_gdf)
113
+ in_line_gdf = qc_split_lines_at_intersections(in_line_gdf)
114
+ lg = LineGrouping(in_line_gdf, use_angle_grouping=use_angle_grouping)
115
+ lg.run_grouping()
116
+ lg.lines.to_file(out_file, layer=out_layer)
117
+ print(f"Output saved to file: {out_file}, layer: {out_layer}")
118
+
119
+
120
+ if __name__ == "__main__":
121
+ import time
122
+
123
+ from beratools.utility.tool_args import compose_tool_kwargs
124
+ start_time = time.time()
125
+ kwargs = compose_tool_kwargs("check_seed_line")
126
+ check_seed_line(**kwargs)
127
+ print("Elapsed time: {}".format(time.time() - start_time))