BERATools 0.2.2__py3-none-any.whl → 0.2.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- beratools/__init__.py +8 -3
- beratools/core/{algo_footprint_rel.py → algo_canopy_footprint_exp.py} +176 -139
- beratools/core/algo_centerline.py +61 -77
- beratools/core/algo_common.py +48 -57
- beratools/core/algo_cost.py +18 -25
- beratools/core/algo_dijkstra.py +37 -45
- beratools/core/algo_line_grouping.py +100 -100
- beratools/core/algo_merge_lines.py +40 -8
- beratools/core/algo_split_with_lines.py +289 -304
- beratools/core/algo_vertex_optimization.py +25 -46
- beratools/core/canopy_threshold_relative.py +755 -0
- beratools/core/constants.py +8 -9
- beratools/{tools → core}/line_footprint_functions.py +411 -258
- beratools/core/logger.py +18 -2
- beratools/core/tool_base.py +17 -75
- beratools/gui/assets/BERALogo.ico +0 -0
- beratools/gui/assets/BERA_Splash.gif +0 -0
- beratools/gui/assets/BERA_WizardImage.png +0 -0
- beratools/gui/assets/beratools.json +475 -2171
- beratools/gui/bt_data.py +585 -234
- beratools/gui/bt_gui_main.py +129 -91
- beratools/gui/main.py +4 -7
- beratools/gui/tool_widgets.py +530 -354
- beratools/tools/__init__.py +0 -7
- beratools/tools/{line_footprint_absolute.py → canopy_footprint_absolute.py} +81 -56
- beratools/tools/canopy_footprint_exp.py +113 -0
- beratools/tools/centerline.py +30 -37
- beratools/tools/check_seed_line.py +127 -0
- beratools/tools/common.py +65 -586
- beratools/tools/{line_footprint_fixed.py → ground_footprint.py} +140 -117
- beratools/tools/line_footprint_relative.py +64 -35
- beratools/tools/tool_template.py +48 -40
- beratools/tools/vertex_optimization.py +20 -34
- beratools/utility/env_checks.py +53 -0
- beratools/utility/spatial_common.py +210 -0
- beratools/utility/tool_args.py +138 -0
- beratools-0.2.4.dist-info/METADATA +134 -0
- beratools-0.2.4.dist-info/RECORD +50 -0
- {beratools-0.2.2.dist-info → beratools-0.2.4.dist-info}/WHEEL +1 -1
- beratools-0.2.4.dist-info/entry_points.txt +3 -0
- beratools-0.2.4.dist-info/licenses/LICENSE +674 -0
- beratools/core/algo_tiler.py +0 -428
- beratools/gui/__init__.py +0 -11
- beratools/gui/batch_processing_dlg.py +0 -513
- beratools/gui/map_window.py +0 -162
- beratools/tools/Beratools_r_script.r +0 -1120
- beratools/tools/Ht_metrics.py +0 -116
- beratools/tools/batch_processing.py +0 -136
- beratools/tools/canopy_threshold_relative.py +0 -672
- beratools/tools/canopycostraster.py +0 -222
- beratools/tools/fl_regen_csf.py +0 -428
- beratools/tools/forest_line_attributes.py +0 -408
- beratools/tools/line_grouping.py +0 -45
- beratools/tools/ln_relative_metrics.py +0 -615
- beratools/tools/r_cal_lpi_elai.r +0 -25
- beratools/tools/r_generate_pd_focalraster.r +0 -101
- beratools/tools/r_interface.py +0 -80
- beratools/tools/r_point_density.r +0 -9
- beratools/tools/rpy_chm2trees.py +0 -86
- beratools/tools/rpy_dsm_chm_by.py +0 -81
- beratools/tools/rpy_dtm_by.py +0 -63
- beratools/tools/rpy_find_cellsize.py +0 -43
- beratools/tools/rpy_gnd_csf.py +0 -74
- beratools/tools/rpy_hummock_hollow.py +0 -85
- beratools/tools/rpy_hummock_hollow_raster.py +0 -71
- beratools/tools/rpy_las_info.py +0 -51
- beratools/tools/rpy_laz2las.py +0 -40
- beratools/tools/rpy_lpi_elai_lascat.py +0 -466
- beratools/tools/rpy_normalized_lidar_by.py +0 -56
- beratools/tools/rpy_percent_above_dbh.py +0 -80
- beratools/tools/rpy_points2trees.py +0 -88
- beratools/tools/rpy_vegcoverage.py +0 -94
- beratools/tools/tiler.py +0 -48
- beratools/tools/zonal_threshold.py +0 -144
- beratools-0.2.2.dist-info/METADATA +0 -108
- beratools-0.2.2.dist-info/RECORD +0 -74
- beratools-0.2.2.dist-info/entry_points.txt +0 -2
- beratools-0.2.2.dist-info/licenses/LICENSE +0 -22
beratools/tools/common.py
CHANGED
|
@@ -10,437 +10,33 @@ Description:
|
|
|
10
10
|
This script is part of the BERA Tools.
|
|
11
11
|
Webpage: https://github.com/appliedgrg/beratools
|
|
12
12
|
|
|
13
|
-
This file is intended to be hosting common classes/functions for BERA Tools
|
|
13
|
+
This file is intended to be hosting common spatial classes/functions for BERA Tools
|
|
14
14
|
"""
|
|
15
|
-
import argparse
|
|
16
|
-
import json
|
|
17
|
-
import shlex
|
|
18
|
-
import warnings
|
|
19
|
-
|
|
20
15
|
import geopandas as gpd
|
|
21
16
|
import numpy as np
|
|
22
|
-
import osgeo
|
|
23
|
-
import pyogrio
|
|
24
|
-
import pyproj
|
|
25
|
-
import rasterio
|
|
26
17
|
import shapely
|
|
27
18
|
import shapely.geometry as sh_geom
|
|
28
19
|
import shapely.ops as sh_ops
|
|
29
20
|
import xarray as xr
|
|
30
21
|
import xrspatial
|
|
31
|
-
from osgeo import gdal
|
|
32
|
-
from rasterio import mask
|
|
33
22
|
from scipy import ndimage
|
|
34
23
|
|
|
35
24
|
import beratools.core.constants as bt_const
|
|
36
25
|
|
|
37
|
-
|
|
38
|
-
warnings.simplefilter(action="ignore", category=UserWarning)
|
|
39
|
-
|
|
40
|
-
# restore .shx for shapefile for using GDAL or pyogrio
|
|
41
|
-
gdal.SetConfigOption("SHAPE_RESTORE_SHX", "YES")
|
|
42
|
-
pyogrio.set_gdal_config_options({"SHAPE_RESTORE_SHX": "YES"})
|
|
43
|
-
|
|
44
|
-
# suppress all kinds of warnings
|
|
45
|
-
if not bt_const.BT_DEBUGGING:
|
|
46
|
-
gdal.SetConfigOption("CPL_LOG", "NUL") # GDAL warning
|
|
47
|
-
warnings.filterwarnings("ignore") # suppress warnings
|
|
48
|
-
warnings.simplefilter(
|
|
49
|
-
action="ignore", category=UserWarning
|
|
50
|
-
) # suppress Pandas UserWarning
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
def clip_raster(
|
|
54
|
-
in_raster_file,
|
|
55
|
-
clip_geom,
|
|
56
|
-
buffer=0.0,
|
|
57
|
-
out_raster_file=None,
|
|
58
|
-
default_nodata=bt_const.BT_NODATA,
|
|
59
|
-
):
|
|
60
|
-
out_meta = None
|
|
61
|
-
with rasterio.open(in_raster_file) as raster_file:
|
|
62
|
-
out_meta = raster_file.meta
|
|
63
|
-
ras_nodata = out_meta["nodata"]
|
|
64
|
-
if ras_nodata is None:
|
|
65
|
-
ras_nodata = default_nodata
|
|
66
|
-
|
|
67
|
-
clip_geo_buffer = [clip_geom.buffer(buffer)]
|
|
68
|
-
out_image: np.ndarray
|
|
69
|
-
out_image, out_transform = mask.mask(
|
|
70
|
-
raster_file, clip_geo_buffer, crop=True, nodata=ras_nodata, filled=True
|
|
71
|
-
)
|
|
72
|
-
if np.isnan(ras_nodata):
|
|
73
|
-
out_image[np.isnan(out_image)] = default_nodata
|
|
74
|
-
|
|
75
|
-
elif np.isinf(ras_nodata):
|
|
76
|
-
out_image[np.isinf(out_image)] = default_nodata
|
|
77
|
-
else:
|
|
78
|
-
out_image[out_image == ras_nodata] = default_nodata
|
|
79
|
-
|
|
80
|
-
out_image = np.ma.masked_where(out_image == default_nodata, out_image)
|
|
81
|
-
out_image.fill_value = default_nodata
|
|
82
|
-
ras_nodata = default_nodata
|
|
83
|
-
|
|
84
|
-
height, width = out_image.shape[1:]
|
|
85
|
-
|
|
86
|
-
out_meta.update(
|
|
87
|
-
{
|
|
88
|
-
"driver": "GTiff",
|
|
89
|
-
"height": height,
|
|
90
|
-
"width": width,
|
|
91
|
-
"transform": out_transform,
|
|
92
|
-
"nodata": ras_nodata,
|
|
93
|
-
}
|
|
94
|
-
)
|
|
95
|
-
|
|
96
|
-
if out_raster_file:
|
|
97
|
-
with rasterio.open(out_raster_file, "w", **out_meta) as dest:
|
|
98
|
-
dest.write(out_image)
|
|
99
|
-
print("[Clip raster]: data saved to {}.".format(out_raster_file))
|
|
100
|
-
|
|
101
|
-
return out_image, out_meta
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
# def clip_lines(clip_geom, buffer, in_line_file, out_line_file):
|
|
105
|
-
# in_line = gpd.read_file(in_line_file)
|
|
106
|
-
# out_line = in_line.clip(clip_geom.buffer(buffer * bt_const.BT_BUFFER_RATIO))
|
|
107
|
-
|
|
108
|
-
# if out_line_file and len(out_line) > 0:
|
|
109
|
-
# out_line.to_file(out_line_file)
|
|
110
|
-
# print("[Clip lines]: data saved to {}.".format(out_line_file))
|
|
111
|
-
|
|
112
|
-
# return out_line
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
# def read_geoms_from_shapefile(in_file):
|
|
116
|
-
# geoms = []
|
|
117
|
-
# with fiona.open(in_file) as open_file:
|
|
118
|
-
# for geom in open_file:
|
|
119
|
-
# geoms.append(geom['geometry'])
|
|
120
|
-
|
|
121
|
-
# return geoms
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
# def read_feature_from_shapefile(in_file):
|
|
125
|
-
# """ Read feature from shapefile
|
|
126
|
-
|
|
127
|
-
# Args:
|
|
128
|
-
# in_file (str): file name
|
|
129
|
-
|
|
130
|
-
# Returns:
|
|
131
|
-
# list: list of features
|
|
132
|
-
# """
|
|
133
|
-
# shapes = []
|
|
134
|
-
# with fiona.open(in_file) as open_file:
|
|
135
|
-
# for feat in open_file:
|
|
136
|
-
# shapes.append([shape(feat.geometry), feat.properties])
|
|
137
|
-
|
|
138
|
-
# return shapes
|
|
139
|
-
|
|
26
|
+
PARALLEL_MODE = bt_const.ParallelMode.MULTIPROCESSING
|
|
140
27
|
|
|
141
28
|
def remove_nan_from_array(matrix):
|
|
142
29
|
with np.nditer(matrix, op_flags=["readwrite"]) as it:
|
|
143
30
|
for x in it:
|
|
144
31
|
if np.isnan(x[...]):
|
|
145
32
|
x[...] = bt_const.BT_NODATA_COST
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
# def replace_Nodata2NaN(matrix, nodata):
|
|
149
|
-
# with np.nditer(matrix, op_flags=["readwrite"]) as it:
|
|
150
|
-
# for x in it:
|
|
151
|
-
# if x[...] == nodata:
|
|
152
|
-
# x[...] = np.NaN
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
# def replace_Nodata2Inf(matrix, nodata):
|
|
156
|
-
# with np.nditer(matrix, op_flags=["readwrite"]) as it:
|
|
157
|
-
# for x in it:
|
|
158
|
-
# if x[...] == nodata:
|
|
159
|
-
# x[...] = np.Inf
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
# Split LineString to segments at vertices
|
|
163
|
-
# def segments(line_coords):
|
|
164
|
-
# if len(line_coords) < 2:
|
|
165
|
-
# return None
|
|
166
|
-
# elif len(line_coords) == 2:
|
|
167
|
-
# return [fiona.Geometry.from_dict({'type': 'LineString', 'coordinates': line_coords})]
|
|
168
|
-
# else:
|
|
169
|
-
# seg_list = zip(line_coords[:-1], line_coords[1:])
|
|
170
|
-
# line_list = [{'type': 'LineString', 'coordinates': coords} for coords in seg_list]
|
|
171
|
-
# return [fiona.Geometry.from_dict(line) for line in line_list]
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
def extract_string_from_printout(str_print, str_extract):
|
|
175
|
-
str_array = shlex.split(str_print) # keep string in double quotes
|
|
176
|
-
str_array_enum = enumerate(str_array)
|
|
177
|
-
index = 0
|
|
178
|
-
for item in str_array_enum:
|
|
179
|
-
if str_extract in item[1]:
|
|
180
|
-
index = item[0]
|
|
181
|
-
break
|
|
182
|
-
str_out = str_array[index]
|
|
183
|
-
return str_out.strip()
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
def check_arguments():
|
|
187
|
-
# Get tool arguments
|
|
188
|
-
parser = argparse.ArgumentParser()
|
|
189
|
-
parser.add_argument("-i", "--input", type=json.loads)
|
|
190
|
-
parser.add_argument("-p", "--processes")
|
|
191
|
-
parser.add_argument("-v", "--verbose")
|
|
192
|
-
args = parser.parse_args()
|
|
193
|
-
|
|
194
|
-
verbose = True if args.verbose == "True" else False
|
|
195
|
-
for item in args.input:
|
|
196
|
-
if args.input[item].lower() == "false":
|
|
197
|
-
args.input[item] = False
|
|
198
|
-
elif args.input[item].lower() == "true":
|
|
199
|
-
args.input[item] = True
|
|
200
|
-
|
|
201
|
-
return args, verbose
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
# def save_features_to_file(out_file, crs, geoms, properties=None, schema=None,
|
|
205
|
-
# driver='ESRI Shapefile', layer=None):
|
|
206
|
-
# """
|
|
207
|
-
|
|
208
|
-
# Args:
|
|
209
|
-
# out_file :
|
|
210
|
-
# crs :
|
|
211
|
-
# geoms : shapely geometry objects
|
|
212
|
-
# schema :
|
|
213
|
-
# properties :
|
|
214
|
-
# driver:
|
|
215
|
-
# layer:
|
|
216
|
-
# """
|
|
217
|
-
# # remove all None items
|
|
218
|
-
# # TODO: check geom type consistency
|
|
219
|
-
# if len(geoms) < 1:
|
|
220
|
-
# return
|
|
221
|
-
|
|
222
|
-
# try:
|
|
223
|
-
# geom_type = mapping(geoms[0])['type']
|
|
224
|
-
# except Exception as e:
|
|
225
|
-
# print(e)
|
|
226
|
-
|
|
227
|
-
# if not schema:
|
|
228
|
-
# props_tuple = zip([], []) # if lengths are not the same, ValueError raises
|
|
229
|
-
# props_schema = [(item, type(value).__name__) for item, value in props_tuple]
|
|
230
|
-
|
|
231
|
-
# schema = {
|
|
232
|
-
# 'geometry': geom_type,
|
|
233
|
-
# 'properties': OrderedDict([])
|
|
234
|
-
# }
|
|
235
|
-
|
|
236
|
-
# properties = None
|
|
237
|
-
|
|
238
|
-
# print('Writing to file {}'.format(out_file), flush=True)
|
|
239
|
-
|
|
240
|
-
# try:
|
|
241
|
-
# out_line_file = fiona.open(out_file, 'w', driver, schema, crs, layer=layer)
|
|
242
|
-
# except Exception as e:
|
|
243
|
-
# print(e)
|
|
244
|
-
# out_line_file.close()
|
|
245
|
-
# return
|
|
246
|
-
|
|
247
|
-
# if properties:
|
|
248
|
-
# feat_tuple = zip_longest(geoms, properties)
|
|
249
|
-
# else: # properties are None
|
|
250
|
-
# feat_tuple = [(item, None) for item in geoms]
|
|
251
|
-
|
|
252
|
-
# try:
|
|
253
|
-
# for geom, prop in feat_tuple:
|
|
254
|
-
# if geom:
|
|
255
|
-
# feature = {
|
|
256
|
-
# 'geometry': mapping(geom),
|
|
257
|
-
# 'properties': prop
|
|
258
|
-
# }
|
|
259
|
-
|
|
260
|
-
# out_line_file.write(feature)
|
|
261
|
-
# except Exception as e:
|
|
262
|
-
# print(e)
|
|
263
|
-
|
|
264
|
-
# out_line_file.close()
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
def vector_crs(in_vector):
|
|
268
|
-
osr_crs = osgeo.osr.SpatialReference()
|
|
269
|
-
from pyproj.enums import WktVersion
|
|
270
|
-
|
|
271
|
-
vec_crs = None
|
|
272
|
-
# open input vector data as GeoDataFrame
|
|
273
|
-
gpd_vector = gpd.GeoDataFrame.from_file(in_vector)
|
|
274
|
-
try:
|
|
275
|
-
if gpd_vector.crs is not None:
|
|
276
|
-
vec_crs = gpd_vector.crs
|
|
277
|
-
if osgeo.version_info.major < 3:
|
|
278
|
-
osr_crs.ImportFromWkt(vec_crs.to_wkt(WktVersion.WKT1_GDAL))
|
|
279
|
-
else:
|
|
280
|
-
osr_crs.ImportFromEPSG(vec_crs.to_epsg())
|
|
281
|
-
return osr_crs
|
|
282
|
-
else:
|
|
283
|
-
print(
|
|
284
|
-
"No CRS found in the input feature, please check!"
|
|
285
|
-
)
|
|
286
|
-
exit()
|
|
287
|
-
except Exception as e:
|
|
288
|
-
print(e)
|
|
289
|
-
exit()
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
# def df_crs(in_df):
|
|
293
|
-
# vec_crs = None
|
|
294
|
-
# osr_crs = osgeo.osr.SpatialReference()
|
|
295
|
-
# from pyproj.enums import WktVersion
|
|
296
|
-
|
|
297
|
-
# try:
|
|
298
|
-
# if in_df.crs is not None:
|
|
299
|
-
# vec_crs = in_df.crs
|
|
300
|
-
# if osgeo.version_info.major < 3:
|
|
301
|
-
# osr_crs.ImportFromWkt(vec_crs.to_wkt(WktVersion.WKT1_GDAL))
|
|
302
|
-
# else:
|
|
303
|
-
# osr_crs.ImportFromEPSG(vec_crs.to_epsg())
|
|
304
|
-
# return osr_crs
|
|
305
|
-
# else:
|
|
306
|
-
# print(
|
|
307
|
-
# "No Coordinate Reference System (CRS) find in the input feature, please check!"
|
|
308
|
-
# )
|
|
309
|
-
# exit()
|
|
310
|
-
# except Exception as e:
|
|
311
|
-
# print(e)
|
|
312
|
-
# exit()
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
def raster_crs(in_raster):
|
|
316
|
-
osr_crs = osgeo.osr.SpatialReference()
|
|
317
|
-
with rasterio.open(in_raster) as raster_file:
|
|
318
|
-
from pyproj.enums import WktVersion
|
|
319
|
-
|
|
320
|
-
try:
|
|
321
|
-
if raster_file.crs is not None:
|
|
322
|
-
vec_crs = raster_file.crs
|
|
323
|
-
if osgeo.version_info.major < 3:
|
|
324
|
-
osr_crs.ImportFromWkt(vec_crs.to_wkt(WktVersion.WKT1_GDAL))
|
|
325
|
-
else:
|
|
326
|
-
osr_crs.ImportFromEPSG(vec_crs.to_epsg())
|
|
327
|
-
return osr_crs
|
|
328
|
-
else:
|
|
329
|
-
print(
|
|
330
|
-
"No Coordinate Reference System (CRS) find in the input feature, please check!"
|
|
331
|
-
)
|
|
332
|
-
exit()
|
|
333
|
-
except Exception as e:
|
|
334
|
-
print(e)
|
|
335
|
-
exit()
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
def compare_crs(crs_org, crs_dst):
|
|
339
|
-
if crs_org and crs_dst:
|
|
340
|
-
if crs_org.IsSameGeogCS(crs_dst):
|
|
341
|
-
print("Check: Input file Spatial Reference are the same, continue.")
|
|
342
|
-
return True
|
|
343
|
-
else:
|
|
344
|
-
crs_org_norm = pyproj.CRS(crs_org.ExportToWkt())
|
|
345
|
-
crs_dst_norm = pyproj.CRS(crs_dst.ExportToWkt())
|
|
346
|
-
if crs_org_norm.is_compound:
|
|
347
|
-
crs_org_proj = crs_org_norm.sub_crs_list[0].coordinate_operation.name
|
|
348
|
-
elif crs_org_norm.name == "unnamed":
|
|
349
|
-
return False
|
|
350
|
-
else:
|
|
351
|
-
crs_org_proj = crs_org_norm.coordinate_operation.name
|
|
352
|
-
|
|
353
|
-
if crs_dst_norm.is_compound:
|
|
354
|
-
crs_dst_proj = crs_dst_norm.sub_crs_list[0].coordinate_operation.name
|
|
355
|
-
elif crs_org_norm.name == "unnamed":
|
|
356
|
-
return False
|
|
357
|
-
else:
|
|
358
|
-
crs_dst_proj = crs_dst_norm.coordinate_operation.name
|
|
359
|
-
|
|
360
|
-
if crs_org_proj == crs_dst_proj:
|
|
361
|
-
if crs_org_norm.name == crs_dst_norm.name:
|
|
362
|
-
print("Input files Spatial Reference are the same, continue.")
|
|
363
|
-
return True
|
|
364
|
-
else:
|
|
365
|
-
print(
|
|
366
|
-
"""Checked: Data are on the same projected Zone but using
|
|
367
|
-
different Spatial Reference. \n Consider to re-project
|
|
368
|
-
all data onto same spatial reference system.\n Process Stop."""
|
|
369
|
-
)
|
|
370
|
-
exit()
|
|
371
|
-
else:
|
|
372
|
-
return False
|
|
373
|
-
|
|
374
|
-
return False
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
def identity_polygon(line_args):
|
|
378
|
-
"""
|
|
379
|
-
Return polygon of line segment.
|
|
380
|
-
|
|
381
|
-
Args:
|
|
382
|
-
line_args : list[GeoDataFrame]
|
|
383
|
-
0 : GeoDataFrame line segment, one item
|
|
384
|
-
1 : GeoDataFrame line buffer, one item
|
|
385
|
-
2 : GeoDataFrame polygons returned by spatial search
|
|
386
|
-
|
|
387
|
-
Returns:
|
|
388
|
-
line, identity : tuple of line and associated footprint
|
|
389
|
-
|
|
390
|
-
"""
|
|
391
|
-
line = line_args[0]
|
|
392
|
-
in_cl_buffer = line_args[1][["geometry", "OLnFID"]]
|
|
393
|
-
in_fp_polygon = line_args[2]
|
|
394
|
-
|
|
395
|
-
identity = None
|
|
396
|
-
try:
|
|
397
|
-
# drop polygons not intersecting with line segment
|
|
398
|
-
line_geom = line.iloc[0].geometry
|
|
399
|
-
drop_list = []
|
|
400
|
-
for i in in_fp_polygon.index:
|
|
401
|
-
if not in_fp_polygon.loc[i].geometry.intersects(line_geom):
|
|
402
|
-
drop_list.append(i)
|
|
403
|
-
elif (
|
|
404
|
-
line_geom.intersection(in_fp_polygon.loc[i].geometry).length
|
|
405
|
-
/ line_geom.length
|
|
406
|
-
< 0.30
|
|
407
|
-
):
|
|
408
|
-
drop_list.append(
|
|
409
|
-
i
|
|
410
|
-
) # if less the 1/5 of line is inside of polygon, ignore
|
|
411
|
-
|
|
412
|
-
# drop all polygons not used
|
|
413
|
-
in_fp_polygon = in_fp_polygon.drop(index=drop_list)
|
|
414
|
-
|
|
415
|
-
if not in_fp_polygon.empty:
|
|
416
|
-
identity = in_fp_polygon.overlay(in_cl_buffer, how="intersection")
|
|
417
|
-
except Exception as e:
|
|
418
|
-
print(e)
|
|
419
|
-
|
|
420
|
-
return line, identity
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
def line_split2(in_ln_shp, seg_length):
|
|
424
|
-
# Check the OLnFID column in data. If it is not, column will be created
|
|
425
|
-
if "OLnFID" not in in_ln_shp.columns.array:
|
|
426
|
-
if bt_const.BT_DEBUGGING:
|
|
427
|
-
print("Cannot find {} column in input line data")
|
|
428
|
-
|
|
429
|
-
print(f"New column created: {'OLnFID'}, {'OLnFID'}")
|
|
430
|
-
in_ln_shp["OLnFID"] = in_ln_shp.index
|
|
431
|
-
line_seg = split_into_Equal_Nth_segments(in_ln_shp, seg_length)
|
|
432
|
-
|
|
433
|
-
return line_seg
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
def split_into_Equal_Nth_segments(df, seg_length):
|
|
33
|
+
|
|
34
|
+
def split_into_equal_Nth_segments(df, seg_length):
|
|
437
35
|
odf = df
|
|
438
36
|
crs = odf.crs
|
|
439
37
|
if "OLnSEG" not in odf.columns.array:
|
|
440
38
|
df["OLnSEG"] = np.nan
|
|
441
|
-
df = odf.assign(
|
|
442
|
-
geometry=odf.apply(lambda x: cut_line_by_length(x.geometry, seg_length), axis=1)
|
|
443
|
-
)
|
|
39
|
+
df = odf.assign(geometry=odf.apply(lambda x: cut_line_by_length(x.geometry, seg_length), axis=1))
|
|
444
40
|
df = df.explode()
|
|
445
41
|
|
|
446
42
|
df["OLnSEG"] = df.groupby("OLnFID").cumcount()
|
|
@@ -457,28 +53,11 @@ def split_into_Equal_Nth_segments(df, seg_length):
|
|
|
457
53
|
return gdf
|
|
458
54
|
|
|
459
55
|
|
|
460
|
-
def split_line_nPart(line, seg_length):
|
|
461
|
-
seg_line = shapely.segmentize(line, seg_length)
|
|
462
|
-
distances = np.arange(seg_length, line.length, seg_length)
|
|
463
|
-
|
|
464
|
-
if len(distances) > 0:
|
|
465
|
-
points = [
|
|
466
|
-
shapely.line_interpolate_point(seg_line, distance) for distance in distances
|
|
467
|
-
]
|
|
468
|
-
|
|
469
|
-
split_points = shapely.multipoints(points)
|
|
470
|
-
mline = sh_ops.split(seg_line, split_points)
|
|
471
|
-
else:
|
|
472
|
-
mline = seg_line
|
|
473
|
-
|
|
474
|
-
return mline
|
|
475
|
-
|
|
476
|
-
|
|
477
56
|
def cut_line_by_length(line, length, merge_threshold=0.5):
|
|
478
57
|
"""
|
|
479
|
-
Split line into segments of equal length.
|
|
480
|
-
|
|
481
|
-
Merge the last segment with the second-to-last if its length
|
|
58
|
+
Split line into segments of equal length.
|
|
59
|
+
|
|
60
|
+
Merge the last segment with the second-to-last if its length
|
|
482
61
|
is smaller than the given threshold.
|
|
483
62
|
|
|
484
63
|
Args:
|
|
@@ -494,12 +73,12 @@ def cut_line_by_length(line, length, merge_threshold=0.5):
|
|
|
494
73
|
A list containing the resulting line segments.
|
|
495
74
|
|
|
496
75
|
Example:
|
|
497
|
-
>>> from shapely.geometry import LineString
|
|
498
|
-
>>> line = LineString([(0, 0), (10, 0)])
|
|
499
|
-
>>> segments = cut_line_by_length(line, 3, merge_threshold=1)
|
|
500
|
-
>>> for segment in segments:
|
|
501
|
-
>>> print(f"Segment: {segment}, Length: {segment.length}")
|
|
502
|
-
|
|
76
|
+
">>> from shapely.geometry import LineString
|
|
77
|
+
">>> line = LineString([(0, 0), (10, 0)])
|
|
78
|
+
">>> segments = cut_line_by_length(line, 3, merge_threshold=1)
|
|
79
|
+
">>> for segment in segments:
|
|
80
|
+
">>> print(f"Segment: {segment}, Length: {segment.length}")
|
|
81
|
+
|
|
503
82
|
Output:
|
|
504
83
|
Segment: LINESTRING (0 0, 3 0), Length: 3.0
|
|
505
84
|
Segment: LINESTRING (3 0, 6 0), Length: 3.0
|
|
@@ -507,7 +86,7 @@ def cut_line_by_length(line, length, merge_threshold=0.5):
|
|
|
507
86
|
Segment: LINESTRING (9 0, 10 0), Length: 1.0
|
|
508
87
|
|
|
509
88
|
After merging the last segment with the second-to-last segment:
|
|
510
|
-
|
|
89
|
+
|
|
511
90
|
Output:
|
|
512
91
|
Segment: LINESTRING (0 0, 3 0), Length: 3.0
|
|
513
92
|
Segment: LINESTRING (3 0, 6 0), Length: 3.0
|
|
@@ -534,7 +113,7 @@ def cut_line_by_length(line, length, merge_threshold=0.5):
|
|
|
534
113
|
|
|
535
114
|
# Check if the distance matches closely and split the line
|
|
536
115
|
if abs(p_dist - length) < 1e-9: # Use a small epsilon value
|
|
537
|
-
lines.append(sh_geom.LineString(coords[:i + 1]))
|
|
116
|
+
lines.append(sh_geom.LineString(coords[: i + 1]))
|
|
538
117
|
line = sh_geom.LineString(coords[i:])
|
|
539
118
|
end_pt = None
|
|
540
119
|
break
|
|
@@ -556,42 +135,19 @@ def cut_line_by_length(line, length, merge_threshold=0.5):
|
|
|
556
135
|
|
|
557
136
|
return lines
|
|
558
137
|
|
|
138
|
+
def chk_df_multipart(df:gpd.GeoDataFrame,
|
|
139
|
+
chk_shp_in_string:str)-> tuple[gpd.GeoDataFrame , bool]:
|
|
140
|
+
"""
|
|
141
|
+
This function is check the input geopandas.GeoDataFrame object contains multipart geometry.
|
|
142
|
+
If multipart geometry is found, function will try to explode and return single geometry and
|
|
143
|
+
a boolean of multipart is found or not.
|
|
144
|
+
Args:
|
|
145
|
+
df: Any geopandas.GeoDataFrame like
|
|
146
|
+
chk_shp_in_string: String that the input GeoDataFrame geometry type, i.e. 'Point', 'Polygon', 'LineString'
|
|
559
147
|
|
|
560
|
-
|
|
561
|
-
# lc_path_new = []
|
|
562
|
-
# if len(cost_clip.shape) > 2:
|
|
563
|
-
# cost_clip = np.squeeze(cost_clip, axis=0)
|
|
564
|
-
|
|
565
|
-
# out_transform = in_meta["transform"]
|
|
566
|
-
# transformer = rasterio.transform.AffineTransformer(out_transform)
|
|
567
|
-
|
|
568
|
-
# x1, y1 = list(seed_line.coords)[0][:2]
|
|
569
|
-
# x2, y2 = list(seed_line.coords)[-1][:2]
|
|
570
|
-
# source = [transformer.rowcol(x1, y1)]
|
|
571
|
-
# destination = [transformer.rowcol(x2, y2)]
|
|
572
|
-
|
|
573
|
-
# try:
|
|
574
|
-
# init_obj1 = sk_graph.MCP_Connect(cost_clip)
|
|
575
|
-
# path = []
|
|
576
|
-
# for end in destination:
|
|
577
|
-
# path.append(init_obj1.traceback(end))
|
|
578
|
-
# for row, col in path[0]:
|
|
579
|
-
# x, y = transformer.xy(row, col)
|
|
580
|
-
# lc_path_new.append((x, y))
|
|
581
|
-
# except Exception as e:
|
|
582
|
-
# print(e)
|
|
583
|
-
# return None
|
|
584
|
-
|
|
585
|
-
# if len(lc_path_new) < 2:
|
|
586
|
-
# print("No least cost path detected, pass.")
|
|
587
|
-
# return None
|
|
588
|
-
# else:
|
|
589
|
-
# lc_path_new = sh_geom.LineString(lc_path_new)
|
|
590
|
-
|
|
591
|
-
# return lc_path_new
|
|
592
|
-
|
|
148
|
+
Returns:
|
|
593
149
|
|
|
594
|
-
|
|
150
|
+
"""
|
|
595
151
|
try:
|
|
596
152
|
found = False
|
|
597
153
|
if str.upper(chk_shp_in_string) in [x.upper() for x in df.geom_type.values]:
|
|
@@ -612,12 +168,12 @@ def chk_df_multipart(df, chk_shp_in_string):
|
|
|
612
168
|
def dyn_fs_raster_stdmean(canopy_ndarray, kernel, nodata):
|
|
613
169
|
# This function uses xrspatial which can handle large data but slow
|
|
614
170
|
mask = canopy_ndarray.mask
|
|
615
|
-
in_ndarray = np.ma.where(mask == True, np.
|
|
171
|
+
in_ndarray = np.ma.where(mask == True, np.nan, canopy_ndarray)
|
|
616
172
|
result_ndarray = xrspatial.focal.focal_stats(
|
|
617
173
|
xr.DataArray(in_ndarray.data), kernel, stats_funcs=["std", "mean"]
|
|
618
174
|
)
|
|
619
175
|
|
|
620
|
-
# Assign std and mean ndarray (return array contain
|
|
176
|
+
# Assign std and mean ndarray (return array contain nan value)
|
|
621
177
|
reshape_std_ndarray = result_ndarray[0].data
|
|
622
178
|
reshape_mean_ndarray = result_ndarray[1].data
|
|
623
179
|
|
|
@@ -626,12 +182,12 @@ def dyn_fs_raster_stdmean(canopy_ndarray, kernel, nodata):
|
|
|
626
182
|
|
|
627
183
|
def dyn_smooth_cost(canopy_ndarray, max_line_dist, sampling):
|
|
628
184
|
mask = canopy_ndarray.mask
|
|
629
|
-
in_ndarray = np.ma.where(mask == True, np.
|
|
185
|
+
in_ndarray = np.ma.where(mask == True, np.nan, canopy_ndarray)
|
|
630
186
|
# scipy way to do Euclidean distance transform
|
|
631
187
|
euc_dist_array = ndimage.distance_transform_edt(
|
|
632
188
|
np.logical_not(np.isnan(in_ndarray.data)), sampling=sampling
|
|
633
189
|
)
|
|
634
|
-
euc_dist_array[mask == True] = np.
|
|
190
|
+
euc_dist_array[mask == True] = np.nan
|
|
635
191
|
smooth1 = float(max_line_dist) - euc_dist_array
|
|
636
192
|
smooth1[smooth1 <= 0.0] = 0.0
|
|
637
193
|
smooth_cost_array = smooth1 / float(max_line_dist)
|
|
@@ -639,9 +195,7 @@ def dyn_smooth_cost(canopy_ndarray, max_line_dist, sampling):
|
|
|
639
195
|
return smooth_cost_array
|
|
640
196
|
|
|
641
197
|
|
|
642
|
-
def dyn_np_cost_raster(
|
|
643
|
-
canopy_ndarray, cc_mean, cc_std, cc_smooth, avoidance, cost_raster_exponent
|
|
644
|
-
):
|
|
198
|
+
def dyn_np_cost_raster(canopy_ndarray, cc_mean, cc_std, cc_smooth, avoidance, cost_raster_exponent):
|
|
645
199
|
aM1a = cc_mean - cc_std
|
|
646
200
|
aM1b = cc_mean + cc_std
|
|
647
201
|
aM1 = np.divide(aM1a, aM1b, where=aM1b != 0, out=np.zeros(aM1a.shape, dtype=float))
|
|
@@ -663,81 +217,6 @@ def dyn_np_cc_map(in_chm, canopy_ht_threshold, nodata):
|
|
|
663
217
|
return canopy_ndarray
|
|
664
218
|
|
|
665
219
|
|
|
666
|
-
# def morph_raster(corridor_thresh, canopy_raster, exp_shk_cell, cell_size_x):
|
|
667
|
-
# # Process: Stamp CC and Max Line Width
|
|
668
|
-
# ras_sum = corridor_thresh + canopy_raster
|
|
669
|
-
# raster_class = np.ma.where(ras_sum == 0, 1, 0).data
|
|
670
|
-
|
|
671
|
-
# if exp_shk_cell > 0 and cell_size_x < 1:
|
|
672
|
-
# # Process: Expand
|
|
673
|
-
# # FLM original Expand equivalent
|
|
674
|
-
# cell_size = int(exp_shk_cell * 2 + 1)
|
|
675
|
-
# expanded = ndimage.grey_dilation(raster_class, size=(cell_size, cell_size))
|
|
676
|
-
|
|
677
|
-
# # Process: Shrink
|
|
678
|
-
# # FLM original Shrink equivalent
|
|
679
|
-
# file_shrink = ndimage.grey_erosion(expanded, size=(cell_size, cell_size))
|
|
680
|
-
|
|
681
|
-
# else:
|
|
682
|
-
# if bt_const.BT_DEBUGGING:
|
|
683
|
-
# print("No Expand And Shrink cell performed.")
|
|
684
|
-
# file_shrink = raster_class
|
|
685
|
-
|
|
686
|
-
# # Process: Boundary Clean
|
|
687
|
-
# clean_raster = ndimage.gaussian_filter(file_shrink, sigma=0, mode="nearest")
|
|
688
|
-
|
|
689
|
-
# return clean_raster
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
# def generate_line_args_NoClipraster(
|
|
693
|
-
# line_seg,
|
|
694
|
-
# work_in_buffer,
|
|
695
|
-
# in_chm_obj,
|
|
696
|
-
# in_chm,
|
|
697
|
-
# tree_radius,
|
|
698
|
-
# max_line_dist,
|
|
699
|
-
# canopy_avoidance,
|
|
700
|
-
# exponent,
|
|
701
|
-
# canopy_thresh_percentage,
|
|
702
|
-
# ):
|
|
703
|
-
# line_argsC = []
|
|
704
|
-
|
|
705
|
-
# for record in range(0, len(work_in_buffer)):
|
|
706
|
-
# try:
|
|
707
|
-
# line_bufferC = work_in_buffer.loc[record, "geometry"]
|
|
708
|
-
|
|
709
|
-
# nodata = bt_const.BT_NODATA
|
|
710
|
-
# line_argsC.append(
|
|
711
|
-
# [
|
|
712
|
-
# in_chm,
|
|
713
|
-
# float(work_in_buffer.loc[record, "DynCanTh"]),
|
|
714
|
-
# float(tree_radius),
|
|
715
|
-
# float(max_line_dist),
|
|
716
|
-
# float(canopy_avoidance),
|
|
717
|
-
# float(exponent),
|
|
718
|
-
# in_chm_obj.res,
|
|
719
|
-
# nodata,
|
|
720
|
-
# line_seg.iloc[[record]],
|
|
721
|
-
# in_chm_obj.meta.copy(),
|
|
722
|
-
# record,
|
|
723
|
-
# 10,
|
|
724
|
-
# "Center",
|
|
725
|
-
# canopy_thresh_percentage,
|
|
726
|
-
# line_bufferC,
|
|
727
|
-
# ]
|
|
728
|
-
# )
|
|
729
|
-
# except Exception as e:
|
|
730
|
-
# print(e)
|
|
731
|
-
|
|
732
|
-
# step = record + 1
|
|
733
|
-
# total = len(work_in_buffer)
|
|
734
|
-
|
|
735
|
-
# print(f' "PROGRESS_LABEL Preparing lines {step} of {total}" ', flush=True)
|
|
736
|
-
# print(f" %{step / total * 100} ", flush=True)
|
|
737
|
-
|
|
738
|
-
# return line_argsC
|
|
739
|
-
|
|
740
|
-
|
|
741
220
|
def generate_line_args_DFP_NoClip(
|
|
742
221
|
line_seg,
|
|
743
222
|
work_in_bufferL,
|
|
@@ -750,21 +229,20 @@ def generate_line_args_DFP_NoClip(
|
|
|
750
229
|
exponent,
|
|
751
230
|
work_in_bufferR,
|
|
752
231
|
canopy_thresh_percentage,
|
|
232
|
+
exp_shk_cell
|
|
753
233
|
):
|
|
754
234
|
line_argsL = []
|
|
755
235
|
line_argsR = []
|
|
756
236
|
line_argsC = []
|
|
757
237
|
line_id = 0
|
|
758
|
-
|
|
759
|
-
|
|
238
|
+
total = len(work_in_bufferC)+len(work_in_bufferL)+len(work_in_bufferR)
|
|
239
|
+
for record in range(0, len(work_in_bufferC)):
|
|
760
240
|
line_bufferC = work_in_bufferC.loc[record, "geometry"]
|
|
761
|
-
LCut = work_in_bufferL.loc[record, "LDist_Cut"]
|
|
762
|
-
|
|
763
241
|
nodata = bt_const.BT_NODATA
|
|
764
|
-
|
|
242
|
+
line_argsC.append(
|
|
765
243
|
[
|
|
766
244
|
in_chm,
|
|
767
|
-
float(
|
|
245
|
+
float(work_in_bufferC.loc[record, "DynCanTh"]),
|
|
768
246
|
float(tree_radius),
|
|
769
247
|
float(max_line_dist),
|
|
770
248
|
float(canopy_avoidance),
|
|
@@ -774,17 +252,30 @@ def generate_line_args_DFP_NoClip(
|
|
|
774
252
|
line_seg.iloc[[record]],
|
|
775
253
|
in_chm_obj.meta.copy(),
|
|
776
254
|
line_id,
|
|
777
|
-
|
|
778
|
-
"
|
|
255
|
+
10,
|
|
256
|
+
"Center",
|
|
779
257
|
canopy_thresh_percentage,
|
|
780
|
-
|
|
258
|
+
line_bufferC,
|
|
259
|
+
exp_shk_cell
|
|
781
260
|
]
|
|
782
261
|
)
|
|
783
262
|
|
|
784
|
-
|
|
263
|
+
step = line_id + 1
|
|
264
|
+
print(f' "PROGRESS_LABEL Preparing... {step} of {total}" ', flush=True)
|
|
265
|
+
print(f" {(step / total) * 100}% ", flush=True)
|
|
266
|
+
line_id += 1
|
|
267
|
+
|
|
268
|
+
line_id = 0
|
|
269
|
+
|
|
270
|
+
for record in range(0, len(work_in_bufferL)):
|
|
271
|
+
line_bufferL = work_in_bufferL.loc[record, "geometry"]
|
|
272
|
+
LCut = work_in_bufferL.loc[record, "LDist_Cut"]
|
|
273
|
+
|
|
274
|
+
nodata = bt_const.BT_NODATA
|
|
275
|
+
line_argsL.append(
|
|
785
276
|
[
|
|
786
277
|
in_chm,
|
|
787
|
-
float(
|
|
278
|
+
float(work_in_bufferL.loc[record, "DynCanTh"]),
|
|
788
279
|
float(tree_radius),
|
|
789
280
|
float(max_line_dist),
|
|
790
281
|
float(canopy_avoidance),
|
|
@@ -794,20 +285,23 @@ def generate_line_args_DFP_NoClip(
|
|
|
794
285
|
line_seg.iloc[[record]],
|
|
795
286
|
in_chm_obj.meta.copy(),
|
|
796
287
|
line_id,
|
|
797
|
-
|
|
798
|
-
"
|
|
288
|
+
LCut,
|
|
289
|
+
"Left",
|
|
799
290
|
canopy_thresh_percentage,
|
|
800
|
-
|
|
291
|
+
line_bufferL,
|
|
292
|
+
exp_shk_cell
|
|
801
293
|
]
|
|
802
294
|
)
|
|
803
295
|
|
|
296
|
+
step = line_id + 1+ len(work_in_bufferC)
|
|
297
|
+
print(f' "PROGRESS_LABEL Preparing... {step} of {total}" ', flush=True)
|
|
298
|
+
print(f" {(step / total) * 100}% ", flush=True)
|
|
804
299
|
line_id += 1
|
|
805
300
|
|
|
806
301
|
line_id = 0
|
|
807
302
|
for record in range(0, len(work_in_bufferR)):
|
|
808
303
|
line_bufferR = work_in_bufferR.loc[record, "geometry"]
|
|
809
304
|
RCut = work_in_bufferR.loc[record, "RDist_Cut"]
|
|
810
|
-
line_bufferC = work_in_bufferC.loc[record, "geometry"]
|
|
811
305
|
|
|
812
306
|
nodata = bt_const.BT_NODATA
|
|
813
307
|
# TODO deal with inherited nodata and BT_NODATA_COST
|
|
@@ -829,29 +323,14 @@ def generate_line_args_DFP_NoClip(
|
|
|
829
323
|
"Right",
|
|
830
324
|
canopy_thresh_percentage,
|
|
831
325
|
line_bufferR,
|
|
326
|
+
exp_shk_cell
|
|
832
327
|
]
|
|
833
328
|
)
|
|
834
329
|
|
|
835
|
-
step = line_id + 1 + len(work_in_bufferL)
|
|
836
|
-
total = len(work_in_bufferL) + len(work_in_bufferR)
|
|
330
|
+
step = line_id + 1 + len(work_in_bufferC)+len(work_in_bufferL)
|
|
837
331
|
print(f' "PROGRESS_LABEL Preparing... {step} of {total}" ', flush=True)
|
|
838
|
-
print(f"
|
|
332
|
+
print(f" {(step / total) * 100}% ", flush=True)
|
|
839
333
|
|
|
840
334
|
line_id += 1
|
|
841
335
|
|
|
842
336
|
return line_argsL, line_argsR, line_argsC
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
# def chk_null_geometry(in_data):
|
|
846
|
-
# find = False
|
|
847
|
-
# if isinstance(in_data, gpd.GeoDataFrame):
|
|
848
|
-
# if len(in_data[(in_data.is_empty | in_data.isna())]) > 0:
|
|
849
|
-
# find = True
|
|
850
|
-
#
|
|
851
|
-
# return find
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
# def read_data2gpd(in_data):
|
|
855
|
-
# print("Reading data.......")
|
|
856
|
-
# out_gpd_obj = gpd.GeoDataFrame.from_file(in_data)
|
|
857
|
-
# return out_gpd_obj
|