BERATools 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. beratools/__init__.py +3 -0
  2. beratools/core/__init__.py +0 -0
  3. beratools/core/algo_centerline.py +476 -0
  4. beratools/core/algo_common.py +489 -0
  5. beratools/core/algo_cost.py +185 -0
  6. beratools/core/algo_dijkstra.py +492 -0
  7. beratools/core/algo_footprint_rel.py +693 -0
  8. beratools/core/algo_line_grouping.py +941 -0
  9. beratools/core/algo_merge_lines.py +255 -0
  10. beratools/core/algo_split_with_lines.py +296 -0
  11. beratools/core/algo_vertex_optimization.py +451 -0
  12. beratools/core/constants.py +56 -0
  13. beratools/core/logger.py +92 -0
  14. beratools/core/tool_base.py +126 -0
  15. beratools/gui/__init__.py +11 -0
  16. beratools/gui/assets/BERALogo.png +0 -0
  17. beratools/gui/assets/beratools.json +471 -0
  18. beratools/gui/assets/closed.gif +0 -0
  19. beratools/gui/assets/closed.png +0 -0
  20. beratools/gui/assets/gui.json +8 -0
  21. beratools/gui/assets/open.gif +0 -0
  22. beratools/gui/assets/open.png +0 -0
  23. beratools/gui/assets/tool.gif +0 -0
  24. beratools/gui/assets/tool.png +0 -0
  25. beratools/gui/bt_data.py +485 -0
  26. beratools/gui/bt_gui_main.py +700 -0
  27. beratools/gui/main.py +27 -0
  28. beratools/gui/tool_widgets.py +730 -0
  29. beratools/tools/__init__.py +7 -0
  30. beratools/tools/canopy_threshold_relative.py +769 -0
  31. beratools/tools/centerline.py +127 -0
  32. beratools/tools/check_seed_line.py +48 -0
  33. beratools/tools/common.py +622 -0
  34. beratools/tools/line_footprint_absolute.py +203 -0
  35. beratools/tools/line_footprint_fixed.py +480 -0
  36. beratools/tools/line_footprint_functions.py +884 -0
  37. beratools/tools/line_footprint_relative.py +75 -0
  38. beratools/tools/tool_template.py +72 -0
  39. beratools/tools/vertex_optimization.py +57 -0
  40. beratools-0.1.0.dist-info/METADATA +134 -0
  41. beratools-0.1.0.dist-info/RECORD +44 -0
  42. beratools-0.1.0.dist-info/WHEEL +4 -0
  43. beratools-0.1.0.dist-info/entry_points.txt +2 -0
  44. beratools-0.1.0.dist-info/licenses/LICENSE +22 -0
@@ -0,0 +1,203 @@
1
+ """
2
+ Copyright (C) 2025 Applied Geospatial Research Group.
3
+
4
+ This script is licensed under the GNU General Public License v3.0.
5
+ See <https://gnu.org/licenses/gpl-3.0> for full license details.
6
+
7
+ Author: Richard Zeng
8
+
9
+ Description:
10
+ This script is part of the BERA Tools.
11
+ Webpage: https://github.com/appliedgrg/beratools
12
+
13
+ The purpose of this script is to provide main interface for canopy footprint tool.
14
+ The tool is used to generate the footprint of a line based on absolute threshold.
15
+ """
16
+
17
+ import time
18
+
19
+ import geopandas as gpd
20
+ import numpy as np
21
+ import pandas as pd
22
+ import rasterio
23
+ import shapely
24
+
25
+ import beratools.core.algo_centerline as algo_cl
26
+ import beratools.core.algo_common as algo_common
27
+ import beratools.core.algo_cost as algo_cost
28
+ import beratools.core.constants as bt_const
29
+ import beratools.core.tool_base as bt_base
30
+ import beratools.tools.common as bt_common
31
+
32
+
33
+ class FootprintAbsolute:
34
+ """Class to compute the footprint of a line based on absolute threshold."""
35
+
36
+ def __init__(
37
+ self,
38
+ line_seg,
39
+ in_chm,
40
+ corridor_thresh,
41
+ max_ln_width,
42
+ exp_shk_cell,
43
+ ):
44
+ self.line_seg = line_seg
45
+ self.in_chm = in_chm
46
+ self.corridor_thresh = corridor_thresh
47
+ self.max_ln_width = max_ln_width
48
+ self.exp_shk_cell = exp_shk_cell
49
+
50
+ self.footprint = None
51
+ self.corridor_poly_gpd = None
52
+ self.centerline = None
53
+
54
+ def compute(self):
55
+ """Generate line footprint."""
56
+ in_chm = self.in_chm
57
+ corridor_thresh = self.corridor_thresh
58
+ line_gpd = self.line_seg
59
+ max_ln_width = self.max_ln_width
60
+ exp_shk_cell = self.exp_shk_cell
61
+
62
+ try:
63
+ corridor_thresh = float(corridor_thresh)
64
+ if corridor_thresh < 0.0:
65
+ corridor_thresh = 3.0
66
+ except ValueError as e:
67
+ print(f"process_single_line_segment: {e}")
68
+ corridor_thresh = 3.0
69
+
70
+ segment_list = []
71
+ feat = self.line_seg.geometry[0]
72
+ for coord in feat.coords:
73
+ segment_list.append(coord)
74
+
75
+ # Find origin and destination coordinates
76
+ x1, y1 = segment_list[0][0], segment_list[0][1]
77
+ x2, y2 = segment_list[-1][0], segment_list[-1][1]
78
+
79
+ # Buffer around line and clip cost raster and canopy raster
80
+ # TODO: deal with NODATA
81
+ clip_cost, out_meta = bt_common.clip_raster(in_chm, feat, max_ln_width)
82
+ out_transform = out_meta["transform"]
83
+ cell_size_x = out_transform[0]
84
+ cell_size_y = -out_transform[4]
85
+
86
+ clip_cost, clip_canopy = algo_cost.cost_raster(clip_cost, out_meta)
87
+
88
+ # Work out the corridor from both end of the centerline
89
+ if len(clip_canopy.shape) > 2:
90
+ clip_canopy = np.squeeze(clip_canopy, axis=0)
91
+
92
+ transformer = rasterio.transform.AffineTransformer(out_transform)
93
+ source = [transformer.rowcol(x1, y1)]
94
+ destination = [transformer.rowcol(x2, y2)]
95
+
96
+ corridor_thresh = algo_common.corridor_raster(
97
+ clip_cost,
98
+ out_meta,
99
+ source,
100
+ destination,
101
+ (cell_size_x, cell_size_y),
102
+ corridor_thresh,
103
+ )
104
+
105
+ clean_raster = algo_common.morph_raster(corridor_thresh, clip_canopy, exp_shk_cell, cell_size_x)
106
+
107
+ # create mask for non-polygon area
108
+ msk = np.where(clean_raster == 1, True, False)
109
+ if clean_raster.dtype == np.int64:
110
+ clean_raster = clean_raster.astype(np.int32)
111
+
112
+ # Process: ndarray to shapely Polygon
113
+ out_polygon = rasterio.features.shapes(clean_raster, mask=msk, transform=out_transform)
114
+
115
+ # create a shapely multipolygon
116
+ multi_polygon = []
117
+ for shp, value in out_polygon:
118
+ multi_polygon.append(shapely.geometry.shape(shp))
119
+ poly = shapely.geometry.MultiPolygon(multi_polygon)
120
+
121
+ # create a pandas dataframe for the footprint
122
+ footprint = gpd.GeoDataFrame(geometry=[poly], crs=self.line_seg.crs)
123
+
124
+ # find contiguous corridor polygon for centerline
125
+ corridor_poly_gpd = algo_cl.find_corridor_polygon(corridor_thresh, out_transform, line_gpd)
126
+ centerline, status = algo_cl.find_centerline(corridor_poly_gpd.geometry.iloc[0], feat)
127
+
128
+ self.footprint = footprint
129
+ self.corridor_poly_gpd = corridor_poly_gpd
130
+ self.centerline = centerline
131
+
132
+
133
+ def process_single_line(line_footprint):
134
+ line_footprint.compute()
135
+ return line_footprint
136
+
137
+
138
+ def generate_line_class_list(
139
+ in_line,
140
+ in_chm,
141
+ corridor_thresh,
142
+ max_ln_width,
143
+ exp_shk_cell,
144
+ in_layer=None,
145
+ ):
146
+ line_classes = []
147
+ line_list = algo_common.prepare_lines_gdf(in_line, in_layer, proc_segments=False)
148
+
149
+ for line in line_list:
150
+ line_classes.append(FootprintAbsolute(line, in_chm, corridor_thresh, max_ln_width, exp_shk_cell))
151
+
152
+ return line_classes
153
+
154
+
155
+ def line_footprint_abs(
156
+ in_line,
157
+ in_chm,
158
+ corridor_thresh,
159
+ max_ln_width,
160
+ exp_shk_cell,
161
+ out_footprint,
162
+ processes,
163
+ verbose,
164
+ in_layer=None,
165
+ out_layer=None,
166
+ parallel_mode=bt_const.ParallelMode.MULTIPROCESSING,
167
+ ):
168
+ max_ln_width = float(max_ln_width)
169
+ exp_shk_cell = int(exp_shk_cell)
170
+
171
+ footprint_list = []
172
+ poly_list = []
173
+
174
+ line_class_list = generate_line_class_list(
175
+ in_line, in_chm, corridor_thresh, max_ln_width, exp_shk_cell, in_layer
176
+ )
177
+
178
+ feat_list = bt_base.execute_multiprocessing(
179
+ process_single_line,
180
+ line_class_list,
181
+ "Line footprint",
182
+ processes,
183
+ parallel_mode,
184
+ verbose=verbose,
185
+ )
186
+
187
+ if feat_list:
188
+ for i in feat_list:
189
+ footprint_list.append(i.footprint)
190
+ poly_list.append(i.corridor_poly_gpd)
191
+
192
+ results = gpd.GeoDataFrame(pd.concat(footprint_list))
193
+ results = results.reset_index(drop=True)
194
+ results.to_file(out_footprint, layer=out_layer)
195
+
196
+
197
+ if __name__ == "__main__":
198
+ start_time = time.time()
199
+ print("Footprint processing started")
200
+
201
+ in_args, in_verbose = bt_common.check_arguments()
202
+ line_footprint_abs(**in_args.input, processes=int(in_args.processes), verbose=in_verbose)
203
+ print("Elapsed time: {}".format(time.time() - start_time))
@@ -0,0 +1,480 @@
1
+ """
2
+ Copyright (C) 2025 Applied Geospatial Research Group.
3
+
4
+ This script is licensed under the GNU General Public License v3.0.
5
+ See <https://gnu.org/licenses/gpl-3.0> for full license details.
6
+
7
+ Author: Richard Zeng, Maverick Fong
8
+
9
+ Description:
10
+ This script is part of the BERA Tools.
11
+ Webpage: https://github.com/appliedgrg/beratools
12
+
13
+ This file hosts the line_footprint_fixed tool.
14
+ """
15
+
16
+ import math
17
+ import time
18
+ from itertools import chain
19
+ from pathlib import Path
20
+
21
+ import geopandas as gpd
22
+ import pyogrio.errors
23
+ import numpy as np
24
+ import pandas as pd
25
+ import shapely.geometry as sh_geom
26
+ import shapely.ops as sh_ops
27
+ from shapely.ops import linemerge
28
+
29
+ import beratools.core.algo_common as algo_common
30
+ import beratools.core.constants as bt_const
31
+ import beratools.tools.common as bt_common
32
+ from beratools.core.algo_line_grouping import LineGrouping
33
+ from beratools.core.algo_merge_lines import MergeLines, custom_line_merge
34
+ from beratools.core.algo_split_with_lines import LineSplitter
35
+ from beratools.core.tool_base import execute_multiprocessing
36
+
37
+ FP_FIXED_WIDTH_DEFAULT = 5.0
38
+
39
+
40
+ def prepare_line_args(line_gdf, poly_gdf, n_samples, offset, width_percentile):
41
+ """
42
+ Generate arguments for each line in the GeoDataFrame.
43
+
44
+ Args:
45
+ line_gdf
46
+ poly_gdf
47
+ n_samples
48
+ offset
49
+
50
+ Returns:
51
+ line_args : list
52
+ row :
53
+ inter_poly :
54
+ n_samples :
55
+ offset :
56
+ width_percentile :
57
+
58
+ """
59
+ spatial_index = poly_gdf.sindex
60
+ line_args = []
61
+
62
+ for idx in line_gdf.index:
63
+ row = line_gdf.loc[[idx]]
64
+ line = row.geometry.iloc[0]
65
+
66
+ # Skip rows where geometry is None
67
+ if line is None:
68
+ print(row)
69
+ continue
70
+
71
+ inter_poly = poly_gdf.loc[spatial_index.query(line)]
72
+ if bt_const.BT_GROUP in inter_poly.columns:
73
+ inter_poly = inter_poly[inter_poly[bt_const.BT_GROUP] == row[bt_const.BT_GROUP].values[0]]
74
+
75
+ try:
76
+ line_args.append([row, inter_poly, n_samples, offset, width_percentile])
77
+ except Exception as e:
78
+ print(e)
79
+
80
+ return line_args
81
+
82
+
83
+ # Calculating Line Widths
84
+ def generate_sample_points(line, n_samples=10):
85
+ """
86
+ Generate evenly spaced points along a line.
87
+
88
+ Args:
89
+ line (LineString): The line along which to generate points.
90
+ n_samples (int): The number of points to generate (default is 10).
91
+
92
+ Returns:
93
+ list: List of shapely Point objects.
94
+
95
+ """
96
+ # TODO: determine line type
97
+ try:
98
+ pts = line.coords
99
+ except Exception as e: # TODO: check the code
100
+ print(e)
101
+ line = sh_ops.linemerge(line)
102
+ tuple_coord = sh_geom.mapping(line)["coordinates"]
103
+ pts = list(chain(*tuple_coord))
104
+
105
+ return [sh_geom.Point(item) for item in pts]
106
+
107
+
108
+ def process_single_line(line_arg):
109
+ row = line_arg[0]
110
+ inter_poly = line_arg[1]
111
+ n_samples = line_arg[2]
112
+ offset = line_arg[3]
113
+ width_percentile = line_arg[4]
114
+
115
+ # TODO: deal with case when inter_poly is empty
116
+ try:
117
+ widths, line, perp_lines, perp_lines_original = calculate_average_width(
118
+ row.iloc[0].geometry, inter_poly, offset, n_samples
119
+ )
120
+ except Exception as e:
121
+ print(e)
122
+ return None
123
+
124
+ # Calculate the 75th percentile width
125
+ # filter zeros in width array
126
+ arr_filter = [False if math.isclose(i, 0.0) else True for i in widths]
127
+ widths = widths[arr_filter]
128
+
129
+ q3_width = FP_FIXED_WIDTH_DEFAULT
130
+ q4_width = FP_FIXED_WIDTH_DEFAULT
131
+ try:
132
+ # TODO: check the code. widths is empty
133
+ if len(widths) > 0:
134
+ q3_width = np.percentile(widths, width_percentile)
135
+ q4_width = np.percentile(widths, 90)
136
+ except Exception as e:
137
+ print(e)
138
+
139
+ # Store the 75th percentile width as a new attribute
140
+ row["avg_width"] = q3_width
141
+ row["max_width"] = q4_width
142
+
143
+ row["geometry"] = line
144
+ try:
145
+ row["perp_lines"] = perp_lines
146
+ row["perp_lines_original"] = perp_lines_original
147
+ except Exception as e:
148
+ print(e)
149
+
150
+ return row
151
+
152
+
153
+ def generate_fixed_width_footprint(line_gdf, max_width=False):
154
+ """
155
+ Create a buffer around each line.
156
+
157
+ In the GeoDataFrame using its 'max_width' attribute and
158
+ saves the resulting polygons in a new shapefile.
159
+
160
+ Args:
161
+ line_gdf: GeoDataFrame containing LineString with 'max_width' attribute.
162
+ max_width: Use max width or not to produce buffer.
163
+
164
+ """
165
+ # Create a new GeoDataFrame with the buffer polygons
166
+ buffer_gdf = line_gdf.copy(deep=True)
167
+
168
+ mean_avg_width = line_gdf["avg_width"].mean()
169
+ mean_max_width = line_gdf["max_width"].mean()
170
+
171
+ # Use .loc to avoid chained assignment
172
+ line_gdf.loc[line_gdf["avg_width"].isna(), "avg_width"] = mean_avg_width
173
+ line_gdf.loc[line_gdf["max_width"].isna(), "max_width"] = mean_max_width
174
+
175
+ line_gdf.loc[line_gdf["avg_width"] == 0.0, "avg_width"] = mean_avg_width
176
+ line_gdf.loc[line_gdf["max_width"] == 0.0, "max_width"] = mean_max_width
177
+
178
+ if not max_width:
179
+ print("Using quantile 75% width")
180
+ buffer_gdf["geometry"] = line_gdf.apply(
181
+ lambda row: row.geometry.buffer(row.avg_width / 2) if row.geometry is not None else None,
182
+ axis=1,
183
+ )
184
+ else:
185
+ print("Using quantile 90% + 20% width")
186
+ buffer_gdf["geometry"] = line_gdf.apply(
187
+ lambda row: row.geometry.buffer(row.max_width * 1.2 / 2) if row.geometry is not None else None,
188
+ axis=1,
189
+ )
190
+
191
+ return buffer_gdf
192
+
193
+
194
+ def calculate_average_width(line, in_poly, offset, n_samples):
195
+ """Calculate the average width of a polygon perpendicular to the given line."""
196
+ # Smooth the line
197
+ try:
198
+ line = line.simplify(0.1)
199
+
200
+ valid_widths = 0
201
+ sample_points = generate_sample_points(line, n_samples=n_samples)
202
+ sample_points_pairs = list(zip(sample_points[:-2], sample_points[1:-1], sample_points[2:]))
203
+ widths = np.zeros(len(sample_points_pairs))
204
+ perp_lines = []
205
+ perp_lines_original = []
206
+ except Exception as e:
207
+ print(e)
208
+
209
+ try:
210
+ for i, points in enumerate(sample_points_pairs):
211
+ try:
212
+ perp_line = algo_common.generate_perpendicular_line_precise(points, offset=offset)
213
+ perp_lines_original.append(perp_line)
214
+ except Exception as e:
215
+ print(f"Failed to generate perpendicular at index {i}: {e}")
216
+ perp_lines_original.append(None)
217
+ continue
218
+
219
+ try:
220
+ polygon_intersect = in_poly.iloc[in_poly.sindex.query(perp_line)]
221
+ intersections = polygon_intersect.intersection(perp_line)
222
+ except Exception as e:
223
+ print(f"Failed intersection at index {i}: {e}")
224
+ intersections = []
225
+
226
+ line_list = []
227
+ for inter in intersections:
228
+ if inter.is_empty:
229
+ continue
230
+
231
+ if isinstance(inter, sh_geom.GeometryCollection):
232
+ for item in inter.geoms:
233
+ if isinstance(item, sh_geom.LineString):
234
+ line_list.append(item)
235
+ elif isinstance(inter, sh_geom.MultiLineString):
236
+ line_list += list(inter.geoms)
237
+ else:
238
+ line_list.append(inter)
239
+
240
+ perp_lines += line_list
241
+
242
+ if isinstance(line_list, sh_geom.GeometryCollection):
243
+ print("Found 2: GeometryCollection")
244
+
245
+ for item in line_list:
246
+ widths[i] = max(widths[i], item.length)
247
+ valid_widths += 1
248
+
249
+ # Todo: check missing perpendicular lines
250
+ # if len(perp_lines_original) < len(sample_points_pairs):
251
+ # print(f"Missing perpendicular at index {i}")
252
+
253
+ except Exception as e:
254
+ print(f"loop: {e}")
255
+
256
+ return (
257
+ widths,
258
+ line,
259
+ sh_geom.MultiLineString(perp_lines),
260
+ sh_geom.MultiLineString(perp_lines_original),
261
+ )
262
+
263
+
264
+ def line_footprint_fixed(
265
+ in_line,
266
+ in_footprint,
267
+ n_samples,
268
+ offset,
269
+ max_width,
270
+ out_footprint,
271
+ processes,
272
+ verbose,
273
+ in_layer=None,
274
+ in_layer_lc_path="least_cost_path",
275
+ in_layer_fp=None,
276
+ out_layer=None,
277
+ merge_group=True,
278
+ width_percentile=75,
279
+ parallel_mode=bt_const.ParallelMode.MULTIPROCESSING,
280
+ trim_output=True,
281
+ ):
282
+ n_samples = int(n_samples)
283
+ offset = float(offset)
284
+ width_percentile = int(width_percentile)
285
+
286
+ import time
287
+ print(f"[{time.time()}] Starting line_footprint_fixed")
288
+
289
+ # TODO: refactor this code for better line quality check
290
+ print("Step: Reading input files")
291
+ line_gdf = gpd.read_file(in_line, layer=in_layer)
292
+ if bt_const.BT_GROUP not in line_gdf.columns:
293
+ line_gdf[bt_const.BT_GROUP] = range(1, len(line_gdf) + 1)
294
+
295
+ use_least_cost_path = True
296
+ try:
297
+ print("Step: Reading least cost path layer")
298
+ lc_path_gdf = gpd.read_file(in_line, layer=in_layer_lc_path)
299
+ except (ValueError, OSError, pyogrio.errors.DataLayerError):
300
+ print(f"Layer '{in_layer_lc_path}' not found in {in_line}, skipping least cost path logic.")
301
+ use_least_cost_path = False
302
+
303
+ print(f"[{time.time()}] Finished reading input files")
304
+
305
+ if not merge_group:
306
+ print("Step: Merging lines")
307
+ line_gdf["geometry"] = line_gdf.geometry.apply(custom_line_merge)
308
+ if use_least_cost_path:
309
+ lc_path_gdf["geometry"] = lc_path_gdf.geometry.apply(custom_line_merge)
310
+
311
+ print("Step: Cleaning line geometries")
312
+ line_gdf = algo_common.clean_line_geometries(line_gdf)
313
+ print(f"[{time.time()}] Finished cleaning line geometries")
314
+
315
+ # read footprints and remove holes
316
+ print("Step: Reading footprint polygons")
317
+ poly_gdf = gpd.read_file(in_footprint, layer=in_layer_fp)
318
+ poly_gdf["geometry"] = poly_gdf["geometry"].apply(algo_common.remove_holes)
319
+ print(f"[{time.time()}] Finished reading footprint polygons")
320
+
321
+ # merge group and/or split lines at intersections
322
+ merged_line_gdf = line_gdf.copy(deep=True)
323
+ if merge_group:
324
+ print("Step: Running line grouping and merging")
325
+ lg = LineGrouping(line_gdf, merge_group)
326
+ lg.run_grouping()
327
+ merged_line_gdf = lg.run_line_merge()
328
+ else:
329
+ print("Step: Running line grouping, merging, and splitting")
330
+ try:
331
+ lg = LineGrouping(line_gdf, not merge_group)
332
+ lg.run_grouping()
333
+ merged_line_gdf = lg.run_line_merge()
334
+ splitter = LineSplitter(merged_line_gdf)
335
+ splitter.process()
336
+ splitter.save_to_geopackage(
337
+ out_footprint,
338
+ line_layer="split_centerline",
339
+ intersection_layer="inter_points",
340
+ invalid_layer="invalid_splits",
341
+ )
342
+
343
+ # least cost path merge and split
344
+ if use_least_cost_path:
345
+ print("Step: Running least cost path grouping, merging, and splitting")
346
+ lg_leastcost = LineGrouping(lc_path_gdf, not merge_group)
347
+ lg_leastcost.run_grouping()
348
+ merged_lc_path_gdf = lg_leastcost.run_line_merge()
349
+ splitter_leastcost = LineSplitter(merged_lc_path_gdf)
350
+ splitter_leastcost.process(splitter.intersection_gdf)
351
+
352
+ splitter_leastcost.save_to_geopackage(
353
+ out_footprint,
354
+ line_layer="split_leastcost",
355
+ )
356
+
357
+ lg = LineGrouping(splitter.split_lines_gdf, merge_group)
358
+ lg.run_grouping()
359
+ merged_line_gdf = lg.run_line_merge()
360
+ except ValueError as e:
361
+ print(f"Exception: line_footprint_fixed: {e}")
362
+
363
+ print(f"[{time.time()}] Finished merging and splitting lines")
364
+
365
+ # save original merged lines
366
+ print("Step: Saving merged lines")
367
+ merged_line_gdf.to_file(out_footprint, layer="merged_lines_original")
368
+
369
+ # prepare line arguments
370
+ print("Step: Preparing line arguments for multiprocessing")
371
+ line_args = prepare_line_args(merged_line_gdf, poly_gdf, n_samples, offset, width_percentile)
372
+ print(f"[{time.time()}] Finished preparing line arguments")
373
+
374
+ print("Step: Running multiprocessing for fixed footprint calculation")
375
+ out_lines = execute_multiprocessing(
376
+ process_single_line, line_args, "Fixed footprint", processes, mode=parallel_mode
377
+ )
378
+ line_attr = pd.concat(out_lines)
379
+ print(f"[{time.time()}] Finished multiprocessing")
380
+
381
+ # Ensure BT_GROUP is present in line_attr
382
+ if bt_const.BT_GROUP not in line_attr.columns:
383
+ raise ValueError("BT_GROUP column is required in line_attr but is missing.")
384
+
385
+ # update avg_width and max_width by max value of group
386
+ if not merge_group:
387
+ group_max = (
388
+ line_attr.groupby(bt_const.BT_GROUP).agg({"avg_width": "max", "max_width": "max"}).reset_index()
389
+ )
390
+
391
+ # Merge the result back to the original dataframe based on 'group'
392
+ line_attr = line_attr.merge(group_max, on=bt_const.BT_GROUP, suffixes=("", "_max"))
393
+
394
+ # Overwrite the original columns directly with the max values
395
+ line_attr["avg_width"] = line_attr["avg_width_max"]
396
+ line_attr["max_width"] = line_attr["max_width_max"]
397
+
398
+ # Drop the temporary max columns
399
+ line_attr.drop(columns=["avg_width_max", "max_width_max"], inplace=True)
400
+
401
+ print(f"[{time.time()}] Finished updating widths")
402
+
403
+ # create fixed width footprint (always assign buffer_gdf)
404
+ print("Step: Generating fixed width footprints")
405
+ buffer_gdf = generate_fixed_width_footprint(line_attr, max_width=max_width)
406
+ print(f"[{time.time()}] Finished generating footprints")
407
+
408
+ # reserve all layers for output
409
+ perp_lines_gdf = buffer_gdf.copy(deep=True)
410
+ perp_lines_original_gdf = buffer_gdf.copy(deep=True)
411
+
412
+ # Save untrimmed fixed width footprint
413
+ buffer_gdf = buffer_gdf.drop(columns=["perp_lines"])
414
+ buffer_gdf = buffer_gdf.drop(columns=["perp_lines_original"])
415
+ buffer_gdf = buffer_gdf.set_crs(perp_lines_gdf.crs, allow_override=True)
416
+ buffer_gdf.reset_index(inplace=True, drop=True)
417
+
418
+ print("Step: Saving untrimmed fixed width footprint")
419
+ untrimmed_footprint = "untrimmed_footprint"
420
+ buffer_gdf.to_file(out_footprint, layer=untrimmed_footprint)
421
+ print(f"Untrimmed fixed width footprint saved as '{untrimmed_footprint}'")
422
+ print(f"[{time.time()}] Finished saving untrimmed footprint")
423
+
424
+ # trim lines and footprints
425
+ if trim_output:
426
+ print("Step: Trimming lines and footprints")
427
+ lg.run_cleanup(buffer_gdf)
428
+ # Ensure only polygons are saved in clean_footprint
429
+ def ensure_polygons(gdf, buffer_width=0.01):
430
+ gdf['geometry'] = gdf['geometry'].apply(
431
+ lambda geom: geom.buffer(buffer_width) if geom.geom_type in ['LineString', 'MultiLineString'] else geom
432
+ )
433
+ gdf = gdf[gdf.geometry.type.isin(['Polygon', 'MultiPolygon'])]
434
+ return gdf
435
+ # Patch: after trimming, ensure polygons in clean_footprint layer
436
+ if hasattr(lg, "merged_lines_trimmed") and lg.merged_lines_trimmed is not None:
437
+ lg.merged_lines_trimmed = ensure_polygons(lg.merged_lines_trimmed)
438
+ print("Step: Saving trimmed outputs")
439
+ lg.save_file(out_footprint)
440
+ print(f"[{time.time()}] Finished trimming")
441
+ else:
442
+ print("Skipping line and footprint trimming per user option.")
443
+
444
+ # perpendicular lines
445
+ layer = "perp_lines"
446
+ out_footprint = Path(out_footprint)
447
+ out_aux_gpkg = out_footprint.with_stem(out_footprint.stem + "_aux").with_suffix(".gpkg")
448
+ print("Step: Saving auxiliary outputs")
449
+ perp_lines_gdf = perp_lines_gdf.set_geometry("perp_lines")
450
+ perp_lines_gdf = perp_lines_gdf.drop(columns=["perp_lines_original"])
451
+ perp_lines_gdf = perp_lines_gdf.drop(columns=["geometry"])
452
+ perp_lines_gdf = perp_lines_gdf.set_crs(buffer_gdf.crs, allow_override=True)
453
+ perp_lines_gdf.to_file(out_aux_gpkg.as_posix(), layer=layer)
454
+
455
+ layer = "perp_lines_original"
456
+ perp_lines_original_gdf = perp_lines_original_gdf.set_geometry("perp_lines_original")
457
+ perp_lines_original_gdf = perp_lines_original_gdf.drop(columns=["perp_lines"])
458
+ perp_lines_original_gdf = perp_lines_original_gdf.drop(columns=["geometry"])
459
+ perp_lines_original_gdf = perp_lines_original_gdf.set_crs(buffer_gdf.crs, allow_override=True)
460
+ perp_lines_original_gdf.to_file(out_aux_gpkg.as_posix(), layer=layer)
461
+
462
+ layer = "centerline_simplified"
463
+ # Drop perp_lines_original column if present to avoid export warnings
464
+ if "perp_lines_original" in line_attr.columns:
465
+ line_attr = line_attr.drop(columns=["perp_lines_original"])
466
+ line_attr = line_attr.drop(columns="perp_lines")
467
+ line_attr.to_file(out_aux_gpkg.as_posix(), layer=layer)
468
+
469
+ # save footprints without holes
470
+ poly_gdf.to_file(out_aux_gpkg.as_posix(), layer="footprint_no_holes")
471
+
472
+ print(f"[{time.time()}] Finished saving auxiliary outputs")
473
+ print("Step: Finished fixed width footprint tool")
474
+
475
+
476
+ if __name__ == "__main__":
477
+ in_args, in_verbose = bt_common.check_arguments()
478
+ start_time = time.time()
479
+ line_footprint_fixed(**in_args.input, processes=int(in_args.processes), verbose=in_verbose)
480
+ print("Elapsed time: {}".format(time.time() - start_time))