BERATools 0.2.0__py3-none-any.whl → 0.2.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- beratools/__init__.py +1 -7
- beratools/core/algo_centerline.py +491 -351
- beratools/core/algo_common.py +497 -0
- beratools/core/algo_cost.py +192 -0
- beratools/core/{dijkstra_algorithm.py → algo_dijkstra.py} +503 -460
- beratools/core/algo_footprint_rel.py +577 -0
- beratools/core/algo_line_grouping.py +944 -0
- beratools/core/algo_merge_lines.py +214 -0
- beratools/core/algo_split_with_lines.py +304 -0
- beratools/core/algo_tiler.py +428 -0
- beratools/core/algo_vertex_optimization.py +469 -0
- beratools/core/constants.py +52 -86
- beratools/core/logger.py +76 -85
- beratools/core/tool_base.py +196 -133
- beratools/gui/__init__.py +11 -15
- beratools/gui/{beratools.json → assets/beratools.json} +2185 -2300
- beratools/gui/batch_processing_dlg.py +513 -463
- beratools/gui/bt_data.py +481 -487
- beratools/gui/bt_gui_main.py +710 -691
- beratools/gui/main.py +26 -0
- beratools/gui/map_window.py +162 -146
- beratools/gui/tool_widgets.py +725 -493
- beratools/tools/Beratools_r_script.r +1120 -1120
- beratools/tools/Ht_metrics.py +116 -116
- beratools/tools/__init__.py +7 -7
- beratools/tools/batch_processing.py +136 -132
- beratools/tools/canopy_threshold_relative.py +672 -670
- beratools/tools/canopycostraster.py +222 -222
- beratools/tools/centerline.py +136 -176
- beratools/tools/common.py +857 -885
- beratools/tools/fl_regen_csf.py +428 -428
- beratools/tools/forest_line_attributes.py +408 -408
- beratools/tools/line_footprint_absolute.py +213 -363
- beratools/tools/line_footprint_fixed.py +436 -282
- beratools/tools/line_footprint_functions.py +733 -720
- beratools/tools/line_footprint_relative.py +73 -64
- beratools/tools/line_grouping.py +45 -0
- beratools/tools/ln_relative_metrics.py +615 -615
- beratools/tools/r_cal_lpi_elai.r +24 -24
- beratools/tools/r_generate_pd_focalraster.r +100 -100
- beratools/tools/r_interface.py +79 -79
- beratools/tools/r_point_density.r +8 -8
- beratools/tools/rpy_chm2trees.py +86 -86
- beratools/tools/rpy_dsm_chm_by.py +81 -81
- beratools/tools/rpy_dtm_by.py +63 -63
- beratools/tools/rpy_find_cellsize.py +43 -43
- beratools/tools/rpy_gnd_csf.py +74 -74
- beratools/tools/rpy_hummock_hollow.py +85 -85
- beratools/tools/rpy_hummock_hollow_raster.py +71 -71
- beratools/tools/rpy_las_info.py +51 -51
- beratools/tools/rpy_laz2las.py +40 -40
- beratools/tools/rpy_lpi_elai_lascat.py +466 -466
- beratools/tools/rpy_normalized_lidar_by.py +56 -56
- beratools/tools/rpy_percent_above_dbh.py +80 -80
- beratools/tools/rpy_points2trees.py +88 -88
- beratools/tools/rpy_vegcoverage.py +94 -94
- beratools/tools/tiler.py +48 -206
- beratools/tools/tool_template.py +69 -54
- beratools/tools/vertex_optimization.py +61 -620
- beratools/tools/zonal_threshold.py +144 -144
- beratools-0.2.2.dist-info/METADATA +108 -0
- beratools-0.2.2.dist-info/RECORD +74 -0
- {beratools-0.2.0.dist-info → beratools-0.2.2.dist-info}/WHEEL +1 -1
- {beratools-0.2.0.dist-info → beratools-0.2.2.dist-info}/licenses/LICENSE +22 -22
- beratools/gui/cli.py +0 -18
- beratools/gui/gui.json +0 -8
- beratools/gui_tk/ASCII Banners.txt +0 -248
- beratools/gui_tk/__init__.py +0 -20
- beratools/gui_tk/beratools_main.py +0 -515
- beratools/gui_tk/bt_widgets.py +0 -442
- beratools/gui_tk/cli.py +0 -18
- beratools/gui_tk/img/BERALogo.png +0 -0
- beratools/gui_tk/img/closed.gif +0 -0
- beratools/gui_tk/img/closed.png +0 -0
- beratools/gui_tk/img/open.gif +0 -0
- beratools/gui_tk/img/open.png +0 -0
- beratools/gui_tk/img/tool.gif +0 -0
- beratools/gui_tk/img/tool.png +0 -0
- beratools/gui_tk/main.py +0 -14
- beratools/gui_tk/map_window.py +0 -144
- beratools/gui_tk/runner.py +0 -1481
- beratools/gui_tk/tooltip.py +0 -55
- beratools/third_party/pyqtlet2/__init__.py +0 -9
- beratools/third_party/pyqtlet2/leaflet/__init__.py +0 -26
- beratools/third_party/pyqtlet2/leaflet/control/__init__.py +0 -6
- beratools/third_party/pyqtlet2/leaflet/control/control.py +0 -59
- beratools/third_party/pyqtlet2/leaflet/control/draw.py +0 -52
- beratools/third_party/pyqtlet2/leaflet/control/layers.py +0 -20
- beratools/third_party/pyqtlet2/leaflet/core/Parser.py +0 -24
- beratools/third_party/pyqtlet2/leaflet/core/__init__.py +0 -2
- beratools/third_party/pyqtlet2/leaflet/core/evented.py +0 -180
- beratools/third_party/pyqtlet2/leaflet/layer/__init__.py +0 -5
- beratools/third_party/pyqtlet2/leaflet/layer/featuregroup.py +0 -34
- beratools/third_party/pyqtlet2/leaflet/layer/icon/__init__.py +0 -1
- beratools/third_party/pyqtlet2/leaflet/layer/icon/icon.py +0 -30
- beratools/third_party/pyqtlet2/leaflet/layer/imageoverlay.py +0 -18
- beratools/third_party/pyqtlet2/leaflet/layer/layer.py +0 -105
- beratools/third_party/pyqtlet2/leaflet/layer/layergroup.py +0 -45
- beratools/third_party/pyqtlet2/leaflet/layer/marker/__init__.py +0 -1
- beratools/third_party/pyqtlet2/leaflet/layer/marker/marker.py +0 -91
- beratools/third_party/pyqtlet2/leaflet/layer/tile/__init__.py +0 -2
- beratools/third_party/pyqtlet2/leaflet/layer/tile/gridlayer.py +0 -4
- beratools/third_party/pyqtlet2/leaflet/layer/tile/tilelayer.py +0 -16
- beratools/third_party/pyqtlet2/leaflet/layer/vector/__init__.py +0 -5
- beratools/third_party/pyqtlet2/leaflet/layer/vector/circle.py +0 -15
- beratools/third_party/pyqtlet2/leaflet/layer/vector/circlemarker.py +0 -18
- beratools/third_party/pyqtlet2/leaflet/layer/vector/path.py +0 -5
- beratools/third_party/pyqtlet2/leaflet/layer/vector/polygon.py +0 -14
- beratools/third_party/pyqtlet2/leaflet/layer/vector/polyline.py +0 -18
- beratools/third_party/pyqtlet2/leaflet/layer/vector/rectangle.py +0 -14
- beratools/third_party/pyqtlet2/leaflet/map/__init__.py +0 -1
- beratools/third_party/pyqtlet2/leaflet/map/map.py +0 -220
- beratools/third_party/pyqtlet2/mapwidget.py +0 -45
- beratools/third_party/pyqtlet2/web/custom.js +0 -43
- beratools/third_party/pyqtlet2/web/map.html +0 -23
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/layers-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/layers.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-icon-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-icon.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-shadow.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/leaflet.css +0 -656
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/leaflet.js +0 -6
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.codeclimate.yml +0 -14
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.editorconfig +0 -4
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.gitattributes +0 -22
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.travis.yml +0 -43
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/LICENSE +0 -20
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/layers-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/layers.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-icon-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-icon.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-shadow.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet.svg +0 -156
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/leaflet.draw.css +0 -10
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/leaflet.draw.js +0 -10
- beratools/third_party/pyqtlet2/web/modules/leaflet_rotatedMarker_020/LICENSE +0 -22
- beratools/third_party/pyqtlet2/web/modules/leaflet_rotatedMarker_020/leaflet.rotatedMarker.js +0 -57
- beratools/tools/forest_line_ecosite.py +0 -216
- beratools/tools/lapis_all.py +0 -103
- beratools/tools/least_cost_path_from_chm.py +0 -152
- beratools-0.2.0.dist-info/METADATA +0 -63
- beratools-0.2.0.dist-info/RECORD +0 -142
- /beratools/gui/{img → assets}/BERALogo.png +0 -0
- /beratools/gui/{img → assets}/closed.gif +0 -0
- /beratools/gui/{img → assets}/closed.png +0 -0
- /beratools/{gui_tk → gui/assets}/gui.json +0 -0
- /beratools/gui/{img → assets}/open.gif +0 -0
- /beratools/gui/{img → assets}/open.png +0 -0
- /beratools/gui/{img → assets}/tool.gif +0 -0
- /beratools/gui/{img → assets}/tool.png +0 -0
- {beratools-0.2.0.dist-info → beratools-0.2.2.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,497 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Copyright (C) 2025 Applied Geospatial Research Group.
|
|
3
|
+
|
|
4
|
+
This script is licensed under the GNU General Public License v3.0.
|
|
5
|
+
See <https://gnu.org/licenses/gpl-3.0> for full license details.
|
|
6
|
+
|
|
7
|
+
Author: Richard Zeng
|
|
8
|
+
|
|
9
|
+
Description:
|
|
10
|
+
This script is part of the BERA Tools.
|
|
11
|
+
Webpage: https://github.com/appliedgrg/beratools
|
|
12
|
+
|
|
13
|
+
The purpose of this script is to provide common algorithms
|
|
14
|
+
and utility functions/classes.
|
|
15
|
+
"""
|
|
16
|
+
import math
|
|
17
|
+
import tempfile
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
|
|
20
|
+
import geopandas as gpd
|
|
21
|
+
import numpy as np
|
|
22
|
+
import pyproj
|
|
23
|
+
import rasterio
|
|
24
|
+
import shapely
|
|
25
|
+
import shapely.affinity as sh_aff
|
|
26
|
+
import shapely.geometry as sh_geom
|
|
27
|
+
import shapely.ops as sh_ops
|
|
28
|
+
import skimage.graph as sk_graph
|
|
29
|
+
from osgeo import gdal
|
|
30
|
+
from scipy import ndimage
|
|
31
|
+
|
|
32
|
+
import beratools.core.algo_cost as algo_cost
|
|
33
|
+
import beratools.core.constants as bt_const
|
|
34
|
+
|
|
35
|
+
DISTANCE_THRESHOLD = 2 # 1 meter for intersection neighborhood
|
|
36
|
+
|
|
37
|
+
def process_single_item(cls_obj):
|
|
38
|
+
"""
|
|
39
|
+
Process a class object for universal multiprocessing.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
cls_obj: Class object to be processed
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
cls_obj: Class object after processing
|
|
46
|
+
|
|
47
|
+
"""
|
|
48
|
+
cls_obj.compute()
|
|
49
|
+
return cls_obj
|
|
50
|
+
|
|
51
|
+
def read_geospatial_file(file_path, layer=None):
|
|
52
|
+
"""
|
|
53
|
+
Read a geospatial file, clean the geometries and return a GeoDataFrame.
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
file_path (str): The path to the geospatial file (e.g., .shp, .gpkg).
|
|
57
|
+
layer (str, optional): The specific layer to read if the file is
|
|
58
|
+
multi-layered (e.g., GeoPackage).
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
GeoDataFrame: The cleaned GeoDataFrame containing the data from the file
|
|
62
|
+
with valid geometries only.
|
|
63
|
+
None: If there is an error reading the file or layer.
|
|
64
|
+
|
|
65
|
+
"""
|
|
66
|
+
try:
|
|
67
|
+
if layer is None:
|
|
68
|
+
# Read the file without specifying a layer
|
|
69
|
+
gdf = gpd.read_file(file_path)
|
|
70
|
+
else:
|
|
71
|
+
# Read the file with the specified layer
|
|
72
|
+
gdf = gpd.read_file(file_path, layer=layer)
|
|
73
|
+
|
|
74
|
+
# Clean the geometries in the GeoDataFrame
|
|
75
|
+
gdf = clean_geometries(gdf)
|
|
76
|
+
gdf["BT_UID"] = range(len(gdf)) # assign temporary UID
|
|
77
|
+
return gdf
|
|
78
|
+
|
|
79
|
+
except Exception as e:
|
|
80
|
+
print(f"Error reading file {file_path}: {e}")
|
|
81
|
+
return None
|
|
82
|
+
|
|
83
|
+
def has_multilinestring(gdf):
|
|
84
|
+
"""Check if any geometry is a MultiLineString."""
|
|
85
|
+
# Filter out None values (invalid geometries) from the GeoDataFrame
|
|
86
|
+
valid_geometries = gdf.geometry
|
|
87
|
+
return any(isinstance(geom, sh_geom.MultiLineString) for geom in valid_geometries)
|
|
88
|
+
|
|
89
|
+
def clean_geometries(gdf):
|
|
90
|
+
"""
|
|
91
|
+
Remove rows with invalid, None, or empty geometries from the GeoDataFrame.
|
|
92
|
+
|
|
93
|
+
Args:
|
|
94
|
+
gdf (GeoDataFrame): The GeoDataFrame to clean.
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
GeoDataFrame: The cleaned GeoDataFrame with valid, non-null,
|
|
98
|
+
and non-empty geometries.
|
|
99
|
+
|
|
100
|
+
"""
|
|
101
|
+
# Remove rows where the geometry is invalid, None, or empty
|
|
102
|
+
gdf = gdf[gdf.geometry.is_valid] # Only keep valid geometries
|
|
103
|
+
gdf = gdf[~gdf.geometry.isna()] # Remove rows with None geometries
|
|
104
|
+
gdf = gdf[
|
|
105
|
+
gdf.geometry.apply(lambda geom: not geom.is_empty)
|
|
106
|
+
] # Remove empty geometries
|
|
107
|
+
return gdf
|
|
108
|
+
|
|
109
|
+
def clean_line_geometries(line_gdf):
|
|
110
|
+
"""Clean line geometries in the GeoDataFrame."""
|
|
111
|
+
if line_gdf is None:
|
|
112
|
+
return line_gdf
|
|
113
|
+
|
|
114
|
+
if line_gdf.empty:
|
|
115
|
+
return line_gdf
|
|
116
|
+
|
|
117
|
+
line_gdf = line_gdf[
|
|
118
|
+
~line_gdf.geometry.isna()
|
|
119
|
+
& ~line_gdf.geometry.is_empty
|
|
120
|
+
]
|
|
121
|
+
line_gdf = line_gdf[line_gdf.geometry.length > bt_const.SMALL_BUFFER]
|
|
122
|
+
return line_gdf
|
|
123
|
+
|
|
124
|
+
def prepare_lines_gdf(file_path, layer=None, proc_segments=True):
|
|
125
|
+
"""
|
|
126
|
+
Split lines at vertices or return original rows.
|
|
127
|
+
|
|
128
|
+
It handles for MultiLineString.
|
|
129
|
+
|
|
130
|
+
"""
|
|
131
|
+
# Check if there are any MultiLineString geometries
|
|
132
|
+
gdf = read_geospatial_file(file_path, layer=layer)
|
|
133
|
+
|
|
134
|
+
# Explode MultiLineStrings into individual LineStrings
|
|
135
|
+
if has_multilinestring(gdf):
|
|
136
|
+
gdf = gdf.explode(index_parts=False)
|
|
137
|
+
|
|
138
|
+
split_gdf_list = []
|
|
139
|
+
|
|
140
|
+
for row in gdf.itertuples(index=False): # Use itertuples to iterate
|
|
141
|
+
line = row.geometry # Access geometry directly via the named tuple
|
|
142
|
+
|
|
143
|
+
# If proc_segment is True, split the line at vertices
|
|
144
|
+
if proc_segments:
|
|
145
|
+
coords = list(line.coords) # Extract the list of coordinates (vertices)
|
|
146
|
+
|
|
147
|
+
# For each LineString, split the line into segments by the vertices
|
|
148
|
+
for i in range(len(coords) - 1):
|
|
149
|
+
segment = sh_geom.LineString([coords[i], coords[i + 1]])
|
|
150
|
+
|
|
151
|
+
# Copy over all non-geometry columns (excluding 'geometry')
|
|
152
|
+
attributes = {
|
|
153
|
+
col: getattr(row, col) for col in gdf.columns if col != "geometry"
|
|
154
|
+
}
|
|
155
|
+
single_row_gdf = gpd.GeoDataFrame(
|
|
156
|
+
[attributes], geometry=[segment], crs=gdf.crs
|
|
157
|
+
)
|
|
158
|
+
split_gdf_list.append(single_row_gdf)
|
|
159
|
+
|
|
160
|
+
else:
|
|
161
|
+
# If not proc_segment, add the original row as a single-row GeoDataFrame
|
|
162
|
+
attributes = {
|
|
163
|
+
col: getattr(row, col) for col in gdf.columns if col != "geometry"
|
|
164
|
+
}
|
|
165
|
+
single_row_gdf = gpd.GeoDataFrame(
|
|
166
|
+
[attributes], geometry=[line], crs=gdf.crs
|
|
167
|
+
)
|
|
168
|
+
split_gdf_list.append(single_row_gdf)
|
|
169
|
+
|
|
170
|
+
return split_gdf_list
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
# TODO use function from common
|
|
174
|
+
def morph_raster(corridor_thresh, canopy_raster, exp_shk_cell, cell_size_x):
|
|
175
|
+
# Process: Stamp CC and Max Line Width
|
|
176
|
+
temp1 = corridor_thresh + canopy_raster
|
|
177
|
+
raster_class = np.ma.where(temp1 == 0, 1, 0).data
|
|
178
|
+
|
|
179
|
+
if exp_shk_cell > 0 and cell_size_x < 1:
|
|
180
|
+
# Process: Expand
|
|
181
|
+
# FLM original Expand equivalent
|
|
182
|
+
cell_size = int(exp_shk_cell * 2 + 1)
|
|
183
|
+
expanded = ndimage.grey_dilation(
|
|
184
|
+
raster_class, size=(cell_size, cell_size)
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
# Process: Shrink
|
|
188
|
+
# FLM original Shrink equivalent
|
|
189
|
+
file_shrink = ndimage.grey_erosion(
|
|
190
|
+
expanded, size=(cell_size, cell_size)
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
else:
|
|
194
|
+
if bt_const.BT_DEBUGGING:
|
|
195
|
+
print("No Expand And Shrink cell performed.")
|
|
196
|
+
file_shrink = raster_class
|
|
197
|
+
|
|
198
|
+
# Process: Boundary Clean
|
|
199
|
+
clean_raster = ndimage.gaussian_filter(file_shrink, sigma=0, mode="nearest")
|
|
200
|
+
|
|
201
|
+
return clean_raster
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def closest_point_to_line(point, line):
|
|
205
|
+
if not line:
|
|
206
|
+
return None
|
|
207
|
+
|
|
208
|
+
pt = line.interpolate(line.project(sh_geom.Point(point)))
|
|
209
|
+
return pt
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
def line_coord_list(line):
|
|
213
|
+
point_list = []
|
|
214
|
+
try:
|
|
215
|
+
for point in list(line.coords): # loops through every point in a line
|
|
216
|
+
# loops through every vertex of every segment
|
|
217
|
+
if point: # adds all the vertices to segment_list, which creates an array
|
|
218
|
+
point_list.append(sh_geom.Point(point[0], point[1]))
|
|
219
|
+
except Exception as e:
|
|
220
|
+
print(e)
|
|
221
|
+
|
|
222
|
+
return point_list
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
def intersection_of_lines(line_1, line_2):
|
|
226
|
+
"""
|
|
227
|
+
Only LINESTRING is dealt with for now.
|
|
228
|
+
|
|
229
|
+
Args:
|
|
230
|
+
line_1 :
|
|
231
|
+
line_2 :
|
|
232
|
+
|
|
233
|
+
Returns:
|
|
234
|
+
sh_geom.Point: intersection point
|
|
235
|
+
|
|
236
|
+
"""
|
|
237
|
+
# intersection collection, may contain points and lines
|
|
238
|
+
inter = None
|
|
239
|
+
if line_1 and line_2:
|
|
240
|
+
inter = line_1.intersection(line_2)
|
|
241
|
+
|
|
242
|
+
# TODO: intersection may return GeometryCollection, LineString or MultiLineString
|
|
243
|
+
if inter:
|
|
244
|
+
if (
|
|
245
|
+
type(inter) is sh_geom.GeometryCollection
|
|
246
|
+
or type(inter) is sh_geom.LineString
|
|
247
|
+
or type(inter) is sh_geom.MultiLineString
|
|
248
|
+
):
|
|
249
|
+
return inter.centroid
|
|
250
|
+
|
|
251
|
+
return inter
|
|
252
|
+
|
|
253
|
+
def get_angle(line, vertex_index):
|
|
254
|
+
"""
|
|
255
|
+
Calculate the angle of the first or last segment.
|
|
256
|
+
|
|
257
|
+
# TODO: use np.arctan2 instead of np.arctan
|
|
258
|
+
|
|
259
|
+
Args:
|
|
260
|
+
line: LineString
|
|
261
|
+
end_index: 0 or -1 of the line vertices. Consider the multipart.
|
|
262
|
+
|
|
263
|
+
"""
|
|
264
|
+
pts = line_coord_list(line)
|
|
265
|
+
|
|
266
|
+
if vertex_index == 0:
|
|
267
|
+
pt_1 = pts[0]
|
|
268
|
+
pt_2 = pts[1]
|
|
269
|
+
elif vertex_index == -1:
|
|
270
|
+
pt_1 = pts[-1]
|
|
271
|
+
pt_2 = pts[-2]
|
|
272
|
+
|
|
273
|
+
delta_x = pt_2.x - pt_1.x
|
|
274
|
+
delta_y = pt_2.y - pt_1.y
|
|
275
|
+
if np.isclose(pt_1.x, pt_2.x):
|
|
276
|
+
angle = np.pi / 2
|
|
277
|
+
if delta_y > 0:
|
|
278
|
+
angle = np.pi / 2
|
|
279
|
+
elif delta_y < 0:
|
|
280
|
+
angle = -np.pi / 2
|
|
281
|
+
else:
|
|
282
|
+
angle = np.arctan(delta_y / delta_x)
|
|
283
|
+
|
|
284
|
+
# arctan is in range [-pi/2, pi/2], regulate all angles to [[-pi/2, 3*pi/2]]
|
|
285
|
+
if delta_x < 0:
|
|
286
|
+
angle += np.pi # the second or fourth quadrant
|
|
287
|
+
|
|
288
|
+
return angle
|
|
289
|
+
|
|
290
|
+
def points_are_close(pt1, pt2):
|
|
291
|
+
if (
|
|
292
|
+
abs(pt1.x - pt2.x) < DISTANCE_THRESHOLD
|
|
293
|
+
and abs(pt1.y - pt2.y) < DISTANCE_THRESHOLD
|
|
294
|
+
):
|
|
295
|
+
return True
|
|
296
|
+
else:
|
|
297
|
+
return False
|
|
298
|
+
|
|
299
|
+
def generate_raster_footprint(in_raster, latlon=True):
|
|
300
|
+
inter_img = "image_overview.tif"
|
|
301
|
+
|
|
302
|
+
src_ds = gdal.Open(in_raster)
|
|
303
|
+
width, height = src_ds.RasterXSize, src_ds.RasterYSize
|
|
304
|
+
src_crs = src_ds.GetSpatialRef().ExportToWkt()
|
|
305
|
+
|
|
306
|
+
geom = None
|
|
307
|
+
with tempfile.TemporaryDirectory() as tmp_folder:
|
|
308
|
+
if bt_const.BT_DEBUGGING:
|
|
309
|
+
print("Temporary folder: {}".format(tmp_folder))
|
|
310
|
+
|
|
311
|
+
if max(width, height) <= 1024:
|
|
312
|
+
inter_img = in_raster
|
|
313
|
+
else:
|
|
314
|
+
if width >= height:
|
|
315
|
+
options = gdal.TranslateOptions(width=1024, height=0)
|
|
316
|
+
else:
|
|
317
|
+
options = gdal.TranslateOptions(width=0, height=1024)
|
|
318
|
+
|
|
319
|
+
inter_img = Path(tmp_folder).joinpath(inter_img).as_posix()
|
|
320
|
+
gdal.Translate(inter_img, src_ds, options=options)
|
|
321
|
+
|
|
322
|
+
shapes = gdal.Footprint(None, inter_img, dstSRS=src_crs, format="GeoJSON")
|
|
323
|
+
target_feat = shapes["features"][0]
|
|
324
|
+
geom = sh_geom.shape(target_feat["geometry"])
|
|
325
|
+
|
|
326
|
+
if latlon:
|
|
327
|
+
out_crs = pyproj.CRS("EPSG:4326")
|
|
328
|
+
transformer = pyproj.Transformer.from_crs(pyproj.CRS(src_crs), out_crs)
|
|
329
|
+
|
|
330
|
+
geom = sh_ops.transform(transformer.transform, geom)
|
|
331
|
+
|
|
332
|
+
return geom
|
|
333
|
+
|
|
334
|
+
def save_raster_to_file(in_raster_mem, in_meta, out_raster_file):
|
|
335
|
+
"""
|
|
336
|
+
Save raster matrix in memory to file.
|
|
337
|
+
|
|
338
|
+
Args:
|
|
339
|
+
in_raster_mem: numpy raster
|
|
340
|
+
in_meta: input meta
|
|
341
|
+
out_raster_file: output raster file
|
|
342
|
+
|
|
343
|
+
"""
|
|
344
|
+
with rasterio.open(out_raster_file, "w", **in_meta) as dest:
|
|
345
|
+
dest.write(in_raster_mem, indexes=1)
|
|
346
|
+
|
|
347
|
+
def generate_perpendicular_line_precise(points, offset=20):
|
|
348
|
+
"""
|
|
349
|
+
Generate a perpendicular line to the input line at the given point.
|
|
350
|
+
|
|
351
|
+
Args:
|
|
352
|
+
points (list[Point]): The points where to generate the perpendicular lines.
|
|
353
|
+
offset (float): The length of the perpendicular line.
|
|
354
|
+
|
|
355
|
+
Returns:
|
|
356
|
+
shapely.geometry.LineString: The generated perpendicular line.
|
|
357
|
+
|
|
358
|
+
"""
|
|
359
|
+
# Compute the angle of the line
|
|
360
|
+
if len(points) not in [2, 3]:
|
|
361
|
+
return None
|
|
362
|
+
|
|
363
|
+
center = points[1]
|
|
364
|
+
perp_line = None
|
|
365
|
+
|
|
366
|
+
if len(points) == 2:
|
|
367
|
+
head = points[0]
|
|
368
|
+
tail = points[1]
|
|
369
|
+
|
|
370
|
+
delta_x = head.x - tail.x
|
|
371
|
+
delta_y = head.y - tail.y
|
|
372
|
+
angle = 0.0
|
|
373
|
+
|
|
374
|
+
if math.isclose(delta_x, 0.0):
|
|
375
|
+
angle = math.pi / 2
|
|
376
|
+
else:
|
|
377
|
+
angle = math.atan(delta_y / delta_x)
|
|
378
|
+
|
|
379
|
+
start = [center.x + offset / 2.0, center.y]
|
|
380
|
+
end = [center.x - offset / 2.0, center.y]
|
|
381
|
+
line = sh_geom.LineString([start, end])
|
|
382
|
+
perp_line = sh_aff.rotate(
|
|
383
|
+
line, angle + math.pi / 2.0, origin=center, use_radians=True
|
|
384
|
+
)
|
|
385
|
+
elif len(points) == 3:
|
|
386
|
+
head = points[0]
|
|
387
|
+
tail = points[2]
|
|
388
|
+
|
|
389
|
+
angle_1 = _line_angle(center, head)
|
|
390
|
+
angle_2 = _line_angle(center, tail)
|
|
391
|
+
angle_diff = (angle_2 - angle_1) / 2.0
|
|
392
|
+
head_new = sh_geom.Point(
|
|
393
|
+
center.x + offset / 2.0 * math.cos(angle_1),
|
|
394
|
+
center.y + offset / 2.0 * math.sin(angle_1),
|
|
395
|
+
)
|
|
396
|
+
if head.has_z:
|
|
397
|
+
head_new = shapely.force_3d(head_new)
|
|
398
|
+
try:
|
|
399
|
+
perp_seg_1 = sh_geom.LineString([center, head_new])
|
|
400
|
+
perp_seg_1 = sh_aff.rotate(
|
|
401
|
+
perp_seg_1, angle_diff, origin=center, use_radians=True
|
|
402
|
+
)
|
|
403
|
+
perp_seg_2 = sh_aff.rotate(
|
|
404
|
+
perp_seg_1, math.pi, origin=center, use_radians=True
|
|
405
|
+
)
|
|
406
|
+
perp_line = sh_geom.LineString(
|
|
407
|
+
[list(perp_seg_1.coords)[1], list(perp_seg_2.coords)[1]]
|
|
408
|
+
)
|
|
409
|
+
except Exception as e:
|
|
410
|
+
print(e)
|
|
411
|
+
|
|
412
|
+
return perp_line
|
|
413
|
+
|
|
414
|
+
|
|
415
|
+
def _line_angle(point_1, point_2):
|
|
416
|
+
"""
|
|
417
|
+
Calculate the angle of the line.
|
|
418
|
+
|
|
419
|
+
Args:
|
|
420
|
+
point_1, point_2: start and end points of shapely line
|
|
421
|
+
|
|
422
|
+
"""
|
|
423
|
+
delta_y = point_2.y - point_1.y
|
|
424
|
+
delta_x = point_2.x - point_1.x
|
|
425
|
+
|
|
426
|
+
angle = math.atan2(delta_y, delta_x)
|
|
427
|
+
return angle
|
|
428
|
+
|
|
429
|
+
def corridor_raster(
|
|
430
|
+
raster_clip, out_meta, source, destination, cell_size, corridor_threshold
|
|
431
|
+
):
|
|
432
|
+
"""
|
|
433
|
+
Calculate corridor raster.
|
|
434
|
+
|
|
435
|
+
Args:
|
|
436
|
+
raster_clip (raster):
|
|
437
|
+
out_meta : raster file meta
|
|
438
|
+
source (list of point tuple(s)): start point in row/col
|
|
439
|
+
destination (list of point tuple(s)): end point in row/col
|
|
440
|
+
cell_size (tuple): (cell_size_x, cell_size_y)
|
|
441
|
+
corridor_threshold (double)
|
|
442
|
+
|
|
443
|
+
Returns:
|
|
444
|
+
corridor raster
|
|
445
|
+
|
|
446
|
+
"""
|
|
447
|
+
try:
|
|
448
|
+
# change all nan to BT_NODATA_COST for workaround
|
|
449
|
+
if len(raster_clip.shape) > 2:
|
|
450
|
+
raster_clip = np.squeeze(raster_clip, axis=0)
|
|
451
|
+
|
|
452
|
+
algo_cost.remove_nan_from_array_refactor(raster_clip)
|
|
453
|
+
|
|
454
|
+
# generate the cost raster to source point
|
|
455
|
+
mcp_source = sk_graph.MCP_Geometric(raster_clip, sampling=cell_size)
|
|
456
|
+
source_cost_acc = mcp_source.find_costs(source)[0]
|
|
457
|
+
del mcp_source
|
|
458
|
+
|
|
459
|
+
# # # generate the cost raster to destination point
|
|
460
|
+
mcp_dest = sk_graph.MCP_Geometric(raster_clip, sampling=cell_size)
|
|
461
|
+
dest_cost_acc = mcp_dest.find_costs(destination)[0]
|
|
462
|
+
|
|
463
|
+
# Generate corridor
|
|
464
|
+
corridor = source_cost_acc + dest_cost_acc
|
|
465
|
+
corridor = np.ma.masked_invalid(corridor)
|
|
466
|
+
|
|
467
|
+
# Calculate minimum value of corridor raster
|
|
468
|
+
if np.ma.min(corridor) is not None:
|
|
469
|
+
corr_min = float(np.ma.min(corridor))
|
|
470
|
+
else:
|
|
471
|
+
corr_min = 0.5
|
|
472
|
+
|
|
473
|
+
# normalize corridor raster by deducting corr_min
|
|
474
|
+
corridor_norm = corridor - corr_min
|
|
475
|
+
corridor_thresh_cl = np.ma.where(corridor_norm >= corridor_threshold, 1.0, 0.0)
|
|
476
|
+
|
|
477
|
+
except Exception as e:
|
|
478
|
+
print(e)
|
|
479
|
+
print("corridor_raster: Exception occurred.")
|
|
480
|
+
return None
|
|
481
|
+
|
|
482
|
+
return corridor_thresh_cl
|
|
483
|
+
|
|
484
|
+
def remove_holes(geom):
|
|
485
|
+
if geom.geom_type == "Polygon":
|
|
486
|
+
if geom.interiors:
|
|
487
|
+
return sh_geom.Polygon(geom.exterior)
|
|
488
|
+
return geom
|
|
489
|
+
elif geom.geom_type == "MultiPolygon":
|
|
490
|
+
new_polygons = []
|
|
491
|
+
for polygon in geom.geoms: # Iterate through MultiPolygon
|
|
492
|
+
if polygon.interiors:
|
|
493
|
+
new_polygons.append(sh_geom.Polygon(polygon.exterior))
|
|
494
|
+
else:
|
|
495
|
+
new_polygons.append(polygon)
|
|
496
|
+
return sh_geom.MultiPolygon(new_polygons)
|
|
497
|
+
return geom # Return other geometry types as is
|
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Copyright (C) 2025 Applied Geospatial Research Group.
|
|
3
|
+
|
|
4
|
+
This script is licensed under the GNU General Public License v3.0.
|
|
5
|
+
See <https://gnu.org/licenses/gpl-3.0> for full license details.
|
|
6
|
+
|
|
7
|
+
Author: Richard Zeng
|
|
8
|
+
|
|
9
|
+
Description:
|
|
10
|
+
This script is part of the BERA Tools.
|
|
11
|
+
Webpage: https://github.com/appliedgrg/beratools
|
|
12
|
+
|
|
13
|
+
This file hosts cost raster related functions.
|
|
14
|
+
"""
|
|
15
|
+
import numpy as np
|
|
16
|
+
import scipy
|
|
17
|
+
|
|
18
|
+
import beratools.core.constants as bt_const
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def cost_raster(
|
|
22
|
+
in_raster,
|
|
23
|
+
meta,
|
|
24
|
+
tree_radius=2.5,
|
|
25
|
+
canopy_ht_threshold=2.5,
|
|
26
|
+
max_line_dist=2.5,
|
|
27
|
+
canopy_avoid=0.4,
|
|
28
|
+
cost_raster_exponent=1.5,
|
|
29
|
+
):
|
|
30
|
+
"""
|
|
31
|
+
General version of cost_raster.
|
|
32
|
+
|
|
33
|
+
To be merged later: variables and consistent nodata solution
|
|
34
|
+
|
|
35
|
+
"""
|
|
36
|
+
if len(in_raster.shape) > 2:
|
|
37
|
+
in_raster = np.squeeze(in_raster, axis=0)
|
|
38
|
+
|
|
39
|
+
# regulate canopy_avoid between 0 and 1
|
|
40
|
+
avoidance = max(0, min(1, canopy_avoid))
|
|
41
|
+
cell_x, cell_y = meta["transform"][0], -meta["transform"][4]
|
|
42
|
+
|
|
43
|
+
kernel_radius = int(tree_radius / cell_x)
|
|
44
|
+
kernel = circle_kernel_refactor(2 * kernel_radius + 1, kernel_radius)
|
|
45
|
+
dyn_canopy_ndarray = dyn_np_cc_map(in_raster, canopy_ht_threshold)
|
|
46
|
+
|
|
47
|
+
cc_std, cc_mean = cost_focal_stats(dyn_canopy_ndarray, kernel)
|
|
48
|
+
cc_smooth = cost_norm_dist_transform(
|
|
49
|
+
dyn_canopy_ndarray, max_line_dist, [cell_x, cell_y]
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
cost_clip = dyn_np_cost_raster_refactor(
|
|
53
|
+
dyn_canopy_ndarray, cc_mean, cc_std, cc_smooth, avoidance, cost_raster_exponent
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
# TODO use nan or BT_DATA?
|
|
57
|
+
cost_clip[in_raster == bt_const.BT_NODATA] = np.nan
|
|
58
|
+
dyn_canopy_ndarray[in_raster == bt_const.BT_NODATA] = np.nan
|
|
59
|
+
|
|
60
|
+
return cost_clip, dyn_canopy_ndarray
|
|
61
|
+
|
|
62
|
+
def remove_nan_from_array_refactor(matrix, replacement_value=bt_const.BT_NODATA_COST):
|
|
63
|
+
# Use boolean indexing to replace nan values
|
|
64
|
+
matrix[np.isnan(matrix)] = replacement_value
|
|
65
|
+
|
|
66
|
+
def dyn_np_cc_map(in_chm, canopy_ht_threshold):
|
|
67
|
+
"""
|
|
68
|
+
Create a new canopy raster.
|
|
69
|
+
|
|
70
|
+
MaskedArray based on the threshold comparison of in_chm (canopy height model)
|
|
71
|
+
with canopy_ht_threshold. It assigns 1.0 where the condition is True (canopy)
|
|
72
|
+
and 0.0 where the condition is False (non-canopy).
|
|
73
|
+
|
|
74
|
+
"""
|
|
75
|
+
canopy_ndarray = np.ma.where(in_chm >= canopy_ht_threshold, 1.0, 0.0).astype(float)
|
|
76
|
+
return canopy_ndarray
|
|
77
|
+
|
|
78
|
+
def cost_focal_stats(canopy_ndarray, kernel):
|
|
79
|
+
mask = canopy_ndarray.mask
|
|
80
|
+
in_ndarray = np.ma.where(mask, np.nan, canopy_ndarray)
|
|
81
|
+
|
|
82
|
+
# Function to compute mean and standard deviation
|
|
83
|
+
def calc_mean(arr):
|
|
84
|
+
# Check if the array is empty or full of NaNs
|
|
85
|
+
if arr.size == 0 or np.all(np.isnan(arr)):
|
|
86
|
+
return np.nan # Or any other value you'd prefer for empty arrays
|
|
87
|
+
return np.nanmean(arr)
|
|
88
|
+
|
|
89
|
+
def calc_std(arr):
|
|
90
|
+
# Check if the array is empty or full of NaNs
|
|
91
|
+
if arr.size == 0 or np.all(np.isnan(arr)):
|
|
92
|
+
return np.nan # Or any other placeholder you prefer
|
|
93
|
+
return np.nanstd(arr)
|
|
94
|
+
|
|
95
|
+
# Apply the generic_filter function to compute mean and std
|
|
96
|
+
mean_array = scipy.ndimage.generic_filter(
|
|
97
|
+
in_ndarray, calc_mean, footprint=kernel, mode="nearest"
|
|
98
|
+
)
|
|
99
|
+
std_array = scipy.ndimage.generic_filter(
|
|
100
|
+
in_ndarray, calc_std, footprint=kernel, mode="nearest"
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
return std_array, mean_array
|
|
104
|
+
|
|
105
|
+
def cost_norm_dist_transform(canopy_ndarray, max_line_dist, sampling):
|
|
106
|
+
"""Compute a distance-based cost map based on the proximity of valid data points."""
|
|
107
|
+
# Convert masked array to a regular array and fill the masked areas with np.nan
|
|
108
|
+
in_ndarray = canopy_ndarray.filled(np.nan)
|
|
109
|
+
|
|
110
|
+
# Compute the Euclidean distance transform (edt) where the valid values are
|
|
111
|
+
euc_dist_array = scipy.ndimage.distance_transform_edt(
|
|
112
|
+
np.logical_not(np.isnan(in_ndarray)), sampling=sampling
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
# Apply the mask back to set the distances to np.nan
|
|
116
|
+
euc_dist_array[canopy_ndarray.mask] = np.nan
|
|
117
|
+
|
|
118
|
+
# Calculate the smoothness (cost) array
|
|
119
|
+
normalized_cost = float(max_line_dist) - euc_dist_array
|
|
120
|
+
normalized_cost[normalized_cost <= 0.0] = 0.0
|
|
121
|
+
smooth_cost_array = normalized_cost / float(max_line_dist)
|
|
122
|
+
|
|
123
|
+
return smooth_cost_array
|
|
124
|
+
|
|
125
|
+
def dyn_np_cost_raster_refactor(
|
|
126
|
+
canopy_ndarray, cc_mean, cc_std, cc_smooth, avoidance, cost_raster_exponent
|
|
127
|
+
):
|
|
128
|
+
# Calculate the lower and upper bounds for canopy cover (mean ± std deviation)
|
|
129
|
+
lower_bound = cc_mean - cc_std
|
|
130
|
+
upper_bound = cc_mean + cc_std
|
|
131
|
+
|
|
132
|
+
# Calculate the ratio between the lower and upper bounds
|
|
133
|
+
ratio_lower_upper = np.divide(
|
|
134
|
+
lower_bound,
|
|
135
|
+
upper_bound,
|
|
136
|
+
where=upper_bound != 0,
|
|
137
|
+
out=np.zeros(lower_bound.shape, dtype=float),
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
# Normalize the ratio to a scale between 0 and 1
|
|
141
|
+
normalized_ratio = (1 + ratio_lower_upper) / 2
|
|
142
|
+
|
|
143
|
+
# Adjust where the sum of mean and std deviation is less than or equal to zero
|
|
144
|
+
adjusted_cover = cc_mean + cc_std
|
|
145
|
+
adjusted_ratio = np.where(adjusted_cover <= 0, 0, normalized_ratio)
|
|
146
|
+
|
|
147
|
+
# Combine canopy cover ratio with smoothing, weighted by avoidance factor
|
|
148
|
+
weighted_cover = adjusted_ratio * (1 - avoidance) + (cc_smooth * avoidance)
|
|
149
|
+
|
|
150
|
+
# Final cost modification based on canopy presence (masked by canopy_ndarray)
|
|
151
|
+
final_cost = np.where(canopy_ndarray.data == 1, 1, weighted_cover)
|
|
152
|
+
|
|
153
|
+
# Apply the exponential transformation to the cost values
|
|
154
|
+
exponent_cost = np.exp(final_cost)
|
|
155
|
+
|
|
156
|
+
# Raise the cost to the specified exponent
|
|
157
|
+
result_cost_raster = np.power(exponent_cost, float(cost_raster_exponent))
|
|
158
|
+
|
|
159
|
+
return result_cost_raster
|
|
160
|
+
|
|
161
|
+
def circle_kernel_refactor(size, radius):
|
|
162
|
+
"""
|
|
163
|
+
Create a circular kernel using Scipy.
|
|
164
|
+
|
|
165
|
+
Args:
|
|
166
|
+
size : kernel size
|
|
167
|
+
radius : radius of the circle
|
|
168
|
+
|
|
169
|
+
Returns:
|
|
170
|
+
kernel (ndarray): A circular kernel.
|
|
171
|
+
|
|
172
|
+
Examples:
|
|
173
|
+
kernel_scipy = create_circle_kernel_scipy(17, 8)
|
|
174
|
+
will replicate xarray-spatial kernel
|
|
175
|
+
cell_x = 0.3
|
|
176
|
+
cell_y = 0.3
|
|
177
|
+
tree_radius = 2.5
|
|
178
|
+
convolution.circle_kernel(cell_x, cell_y, tree_radius)
|
|
179
|
+
|
|
180
|
+
"""
|
|
181
|
+
# Create grid points (mesh)
|
|
182
|
+
y, x = np.ogrid[:size, :size]
|
|
183
|
+
|
|
184
|
+
# Center of the kernel
|
|
185
|
+
center_x, center_y = (size - 1) / 2, (size - 1) / 2
|
|
186
|
+
|
|
187
|
+
# Calculate the distance from the center
|
|
188
|
+
distance = np.sqrt((x - center_x) ** 2 + (y - center_y) ** 2)
|
|
189
|
+
|
|
190
|
+
# Create a circular kernel
|
|
191
|
+
kernel = distance <= radius
|
|
192
|
+
return kernel.astype(float)
|