BERATools 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- beratools/__init__.py +9 -0
- beratools/core/__init__.py +0 -0
- beratools/core/algo_centerline.py +351 -0
- beratools/core/constants.py +86 -0
- beratools/core/dijkstra_algorithm.py +460 -0
- beratools/core/logger.py +85 -0
- beratools/core/tool_base.py +133 -0
- beratools/gui/__init__.py +15 -0
- beratools/gui/batch_processing_dlg.py +463 -0
- beratools/gui/beratools.json +2300 -0
- beratools/gui/bt_data.py +487 -0
- beratools/gui/bt_gui_main.py +691 -0
- beratools/gui/cli.py +18 -0
- beratools/gui/gui.json +8 -0
- beratools/gui/img/BERALogo.png +0 -0
- beratools/gui/img/closed.gif +0 -0
- beratools/gui/img/closed.png +0 -0
- beratools/gui/img/open.gif +0 -0
- beratools/gui/img/open.png +0 -0
- beratools/gui/img/tool.gif +0 -0
- beratools/gui/img/tool.png +0 -0
- beratools/gui/map_window.py +146 -0
- beratools/gui/tool_widgets.py +493 -0
- beratools/gui_tk/ASCII Banners.txt +248 -0
- beratools/gui_tk/__init__.py +20 -0
- beratools/gui_tk/beratools_main.py +515 -0
- beratools/gui_tk/bt_widgets.py +442 -0
- beratools/gui_tk/cli.py +18 -0
- beratools/gui_tk/gui.json +8 -0
- beratools/gui_tk/img/BERALogo.png +0 -0
- beratools/gui_tk/img/closed.gif +0 -0
- beratools/gui_tk/img/closed.png +0 -0
- beratools/gui_tk/img/open.gif +0 -0
- beratools/gui_tk/img/open.png +0 -0
- beratools/gui_tk/img/tool.gif +0 -0
- beratools/gui_tk/img/tool.png +0 -0
- beratools/gui_tk/main.py +14 -0
- beratools/gui_tk/map_window.py +144 -0
- beratools/gui_tk/runner.py +1481 -0
- beratools/gui_tk/tooltip.py +55 -0
- beratools/third_party/pyqtlet2/__init__.py +9 -0
- beratools/third_party/pyqtlet2/leaflet/__init__.py +26 -0
- beratools/third_party/pyqtlet2/leaflet/control/__init__.py +6 -0
- beratools/third_party/pyqtlet2/leaflet/control/control.py +59 -0
- beratools/third_party/pyqtlet2/leaflet/control/draw.py +52 -0
- beratools/third_party/pyqtlet2/leaflet/control/layers.py +20 -0
- beratools/third_party/pyqtlet2/leaflet/core/Parser.py +24 -0
- beratools/third_party/pyqtlet2/leaflet/core/__init__.py +2 -0
- beratools/third_party/pyqtlet2/leaflet/core/evented.py +180 -0
- beratools/third_party/pyqtlet2/leaflet/layer/__init__.py +5 -0
- beratools/third_party/pyqtlet2/leaflet/layer/featuregroup.py +34 -0
- beratools/third_party/pyqtlet2/leaflet/layer/icon/__init__.py +1 -0
- beratools/third_party/pyqtlet2/leaflet/layer/icon/icon.py +30 -0
- beratools/third_party/pyqtlet2/leaflet/layer/imageoverlay.py +18 -0
- beratools/third_party/pyqtlet2/leaflet/layer/layer.py +105 -0
- beratools/third_party/pyqtlet2/leaflet/layer/layergroup.py +45 -0
- beratools/third_party/pyqtlet2/leaflet/layer/marker/__init__.py +1 -0
- beratools/third_party/pyqtlet2/leaflet/layer/marker/marker.py +91 -0
- beratools/third_party/pyqtlet2/leaflet/layer/tile/__init__.py +2 -0
- beratools/third_party/pyqtlet2/leaflet/layer/tile/gridlayer.py +4 -0
- beratools/third_party/pyqtlet2/leaflet/layer/tile/tilelayer.py +16 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/__init__.py +5 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/circle.py +15 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/circlemarker.py +18 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/path.py +5 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/polygon.py +14 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/polyline.py +18 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/rectangle.py +14 -0
- beratools/third_party/pyqtlet2/leaflet/map/__init__.py +1 -0
- beratools/third_party/pyqtlet2/leaflet/map/map.py +220 -0
- beratools/third_party/pyqtlet2/mapwidget.py +45 -0
- beratools/third_party/pyqtlet2/web/custom.js +43 -0
- beratools/third_party/pyqtlet2/web/map.html +23 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/layers-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/layers.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-icon-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-icon.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-shadow.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/leaflet.css +656 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/leaflet.js +6 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.codeclimate.yml +14 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.editorconfig +4 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.gitattributes +22 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.travis.yml +43 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/LICENSE +20 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/layers-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/layers.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-icon-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-icon.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-shadow.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet.svg +156 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/leaflet.draw.css +10 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/leaflet.draw.js +10 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_rotatedMarker_020/LICENSE +22 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_rotatedMarker_020/leaflet.rotatedMarker.js +57 -0
- beratools/tools/Beratools_r_script.r +1120 -0
- beratools/tools/Ht_metrics.py +116 -0
- beratools/tools/__init__.py +7 -0
- beratools/tools/batch_processing.py +132 -0
- beratools/tools/canopy_threshold_relative.py +670 -0
- beratools/tools/canopycostraster.py +222 -0
- beratools/tools/centerline.py +176 -0
- beratools/tools/common.py +885 -0
- beratools/tools/fl_regen_csf.py +428 -0
- beratools/tools/forest_line_attributes.py +408 -0
- beratools/tools/forest_line_ecosite.py +216 -0
- beratools/tools/lapis_all.py +103 -0
- beratools/tools/least_cost_path_from_chm.py +152 -0
- beratools/tools/line_footprint_absolute.py +363 -0
- beratools/tools/line_footprint_fixed.py +282 -0
- beratools/tools/line_footprint_functions.py +720 -0
- beratools/tools/line_footprint_relative.py +64 -0
- beratools/tools/ln_relative_metrics.py +615 -0
- beratools/tools/r_cal_lpi_elai.r +25 -0
- beratools/tools/r_generate_pd_focalraster.r +101 -0
- beratools/tools/r_interface.py +80 -0
- beratools/tools/r_point_density.r +9 -0
- beratools/tools/rpy_chm2trees.py +86 -0
- beratools/tools/rpy_dsm_chm_by.py +81 -0
- beratools/tools/rpy_dtm_by.py +63 -0
- beratools/tools/rpy_find_cellsize.py +43 -0
- beratools/tools/rpy_gnd_csf.py +74 -0
- beratools/tools/rpy_hummock_hollow.py +85 -0
- beratools/tools/rpy_hummock_hollow_raster.py +71 -0
- beratools/tools/rpy_las_info.py +51 -0
- beratools/tools/rpy_laz2las.py +40 -0
- beratools/tools/rpy_lpi_elai_lascat.py +466 -0
- beratools/tools/rpy_normalized_lidar_by.py +56 -0
- beratools/tools/rpy_percent_above_dbh.py +80 -0
- beratools/tools/rpy_points2trees.py +88 -0
- beratools/tools/rpy_vegcoverage.py +94 -0
- beratools/tools/tiler.py +206 -0
- beratools/tools/tool_template.py +54 -0
- beratools/tools/vertex_optimization.py +620 -0
- beratools/tools/zonal_threshold.py +144 -0
- beratools-0.2.0.dist-info/METADATA +63 -0
- beratools-0.2.0.dist-info/RECORD +142 -0
- beratools-0.2.0.dist-info/WHEEL +4 -0
- beratools-0.2.0.dist-info/entry_points.txt +2 -0
- beratools-0.2.0.dist-info/licenses/LICENSE +22 -0
|
@@ -0,0 +1,885 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
""" This file is intended to be hosting common functions for BERA Tools.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
# This script is part of the BERA Tools geospatial library.
|
|
6
|
+
# Author: Richard Zeng
|
|
7
|
+
# Created: 12/04/2023
|
|
8
|
+
# License: MIT
|
|
9
|
+
|
|
10
|
+
# imports
|
|
11
|
+
import sys
|
|
12
|
+
import math
|
|
13
|
+
import tempfile
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from collections import OrderedDict
|
|
16
|
+
from itertools import zip_longest, compress
|
|
17
|
+
|
|
18
|
+
import json
|
|
19
|
+
import shlex
|
|
20
|
+
import argparse
|
|
21
|
+
import warnings
|
|
22
|
+
import numpy as np
|
|
23
|
+
|
|
24
|
+
import rasterio
|
|
25
|
+
from rasterio import mask
|
|
26
|
+
|
|
27
|
+
import fiona
|
|
28
|
+
import shapely
|
|
29
|
+
from shapely.affinity import rotate
|
|
30
|
+
from shapely.ops import split, transform
|
|
31
|
+
from shapely.geometry import shape, mapping, Point, LineString, box
|
|
32
|
+
|
|
33
|
+
import pandas as pd
|
|
34
|
+
import geopandas as gpd
|
|
35
|
+
from osgeo import ogr, gdal
|
|
36
|
+
from pyproj import CRS, Transformer
|
|
37
|
+
from pyogrio import set_gdal_config_options
|
|
38
|
+
|
|
39
|
+
from skimage.graph import MCP_Geometric, MCP_Connect
|
|
40
|
+
|
|
41
|
+
from scipy import ndimage
|
|
42
|
+
import xarray as xr
|
|
43
|
+
from xrspatial import focal, convolution
|
|
44
|
+
|
|
45
|
+
from beratools.core.tool_base import *
|
|
46
|
+
|
|
47
|
+
# to suppress pandas UserWarning: Geometry column does not contain geometry when splitting lines
|
|
48
|
+
warnings.simplefilter(action='ignore', category=UserWarning)
|
|
49
|
+
|
|
50
|
+
# restore .shx for shapefile for using GDAL or pyogrio
|
|
51
|
+
gdal.SetConfigOption('SHAPE_RESTORE_SHX', 'YES')
|
|
52
|
+
set_gdal_config_options({'SHAPE_RESTORE_SHX': 'YES'})
|
|
53
|
+
|
|
54
|
+
# suppress all kinds of warnings
|
|
55
|
+
if not BT_DEBUGGING:
|
|
56
|
+
gdal.SetConfigOption('CPL_LOG', 'NUL') # gdal warning
|
|
57
|
+
warnings.filterwarnings("ignore") # suppress warnings
|
|
58
|
+
warnings.simplefilter(action='ignore', category=UserWarning) # suppress Pandas UserWarning
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def clip_raster(in_raster_file, clip_geom, buffer=0.0, out_raster_file=None, ras_nodata=BT_NODATA):
|
|
62
|
+
out_meta = None
|
|
63
|
+
with (rasterio.open(in_raster_file)) as raster_file:
|
|
64
|
+
out_meta = raster_file.meta
|
|
65
|
+
if out_meta['nodata']:
|
|
66
|
+
ras_nodata = out_meta['nodata']
|
|
67
|
+
else:
|
|
68
|
+
out_meta['nodata'] = ras_nodata
|
|
69
|
+
|
|
70
|
+
clip_geo_buffer = [clip_geom.buffer(buffer)]
|
|
71
|
+
out_image: np.ndarray
|
|
72
|
+
out_image, out_transform = mask.mask(raster_file, clip_geo_buffer,
|
|
73
|
+
crop=True, nodata=ras_nodata, filled=True)
|
|
74
|
+
|
|
75
|
+
if out_meta['nodata']:
|
|
76
|
+
out_image[out_image == out_meta['nodata']] = BT_NODATA
|
|
77
|
+
ras_nodata = BT_NODATA
|
|
78
|
+
|
|
79
|
+
height, width = out_image.shape[1:]
|
|
80
|
+
out_meta.update({"driver": "GTiff",
|
|
81
|
+
"height": height,
|
|
82
|
+
"width": width,
|
|
83
|
+
"transform": out_transform,
|
|
84
|
+
"nodata": ras_nodata})
|
|
85
|
+
|
|
86
|
+
if out_raster_file:
|
|
87
|
+
with rasterio.open(out_raster_file, "w", **out_meta) as dest:
|
|
88
|
+
dest.write(out_image)
|
|
89
|
+
print('[Clip raster]: data saved to {}.'.format(out_raster_file))
|
|
90
|
+
|
|
91
|
+
return out_image, out_meta
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def save_raster_to_file(in_raster_mem, in_meta, out_raster_file):
|
|
95
|
+
"""
|
|
96
|
+
|
|
97
|
+
Parameters
|
|
98
|
+
----------
|
|
99
|
+
in_raster_mem: npmpy raster
|
|
100
|
+
in_meta: input meta
|
|
101
|
+
out_raster_file: output raster file
|
|
102
|
+
|
|
103
|
+
Returns
|
|
104
|
+
-------
|
|
105
|
+
|
|
106
|
+
"""
|
|
107
|
+
with rasterio.open(out_raster_file, "w", **in_meta) as dest:
|
|
108
|
+
dest.write(in_raster_mem, indexes=1)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def clip_lines(clip_geom, buffer, in_line_file, out_line_file):
|
|
112
|
+
in_line = gpd.read_file(in_line_file)
|
|
113
|
+
out_line = in_line.clip(clip_geom.buffer(buffer * BT_BUFFER_RATIO))
|
|
114
|
+
|
|
115
|
+
if out_line_file and len(out_line) > 0:
|
|
116
|
+
out_line.to_file(out_line_file)
|
|
117
|
+
print('[Clip lines]: data saved to {}.'.format(out_line_file))
|
|
118
|
+
|
|
119
|
+
return out_line
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def read_geoms_from_shapefile(in_file):
|
|
123
|
+
geoms = []
|
|
124
|
+
with fiona.open(in_file) as open_file:
|
|
125
|
+
layer_crs = open_file.crs
|
|
126
|
+
for geom in open_file:
|
|
127
|
+
geoms.append(geom['geometry'])
|
|
128
|
+
|
|
129
|
+
return geoms
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
# Read feature from shapefile
|
|
133
|
+
def read_feature_from_shapefile(in_file):
|
|
134
|
+
shapes = []
|
|
135
|
+
with fiona.open(in_file) as open_file:
|
|
136
|
+
for feat in open_file:
|
|
137
|
+
shapes.append([shape(feat.geometry), feat.properties])
|
|
138
|
+
|
|
139
|
+
return shapes
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def generate_raster_footprint(in_raster, latlon=True):
|
|
143
|
+
inter_img = 'image_overview.tif'
|
|
144
|
+
|
|
145
|
+
# get raster datasource
|
|
146
|
+
src_ds = gdal.Open(in_raster)
|
|
147
|
+
width, height = src_ds.RasterXSize, src_ds.RasterYSize
|
|
148
|
+
src_crs = src_ds.GetSpatialRef().ExportToWkt()
|
|
149
|
+
|
|
150
|
+
geom = None
|
|
151
|
+
with tempfile.TemporaryDirectory() as tmp_folder:
|
|
152
|
+
if BT_DEBUGGING:
|
|
153
|
+
print('Temporary folder: {}'.format(tmp_folder))
|
|
154
|
+
|
|
155
|
+
if max(width, height) <= 1024:
|
|
156
|
+
inter_img = in_raster
|
|
157
|
+
else:
|
|
158
|
+
if width >= height:
|
|
159
|
+
options = gdal.TranslateOptions(width=1024, height=0)
|
|
160
|
+
else:
|
|
161
|
+
options = gdal.TranslateOptions(width=0, height=1024)
|
|
162
|
+
|
|
163
|
+
inter_img = Path(tmp_folder).joinpath(inter_img).as_posix()
|
|
164
|
+
gdal.Translate(inter_img, src_ds, options=options)
|
|
165
|
+
|
|
166
|
+
shapes = gdal.Footprint(None, inter_img, dstSRS=src_crs, format='GeoJSON')
|
|
167
|
+
target_feat = shapes['features'][0]
|
|
168
|
+
geom = shape(target_feat['geometry'])
|
|
169
|
+
|
|
170
|
+
# coords = None
|
|
171
|
+
# with rasterio.open(inter_img) as src:
|
|
172
|
+
# if np.isnan(src.nodata):
|
|
173
|
+
# geom = box(*src.bounds)
|
|
174
|
+
# coords_geo = list(geom.exterior.coords)
|
|
175
|
+
# else:
|
|
176
|
+
# msk = src.read_masks(1)
|
|
177
|
+
# shapes = features.shapes(msk, mask=msk)
|
|
178
|
+
# shapes = list(shapes)
|
|
179
|
+
# coords = shapes[0][0]['coordinates'][0]
|
|
180
|
+
#
|
|
181
|
+
# for pt in coords:
|
|
182
|
+
# pt = rasterio.transform.xy(src.transform, pt[1], pt[0])
|
|
183
|
+
# coords_geo.append(pt)
|
|
184
|
+
#
|
|
185
|
+
# coords_geo.pop(-1)
|
|
186
|
+
|
|
187
|
+
if latlon:
|
|
188
|
+
out_crs = CRS('EPSG:4326')
|
|
189
|
+
transformer = Transformer.from_crs(CRS(src_crs), out_crs)
|
|
190
|
+
|
|
191
|
+
geom = transform(transformer.transform, geom)
|
|
192
|
+
# coords_geo = list(transformer.itransform(coords_geo))
|
|
193
|
+
# coords_geo = [list(pt) for pt in coords_geo]
|
|
194
|
+
|
|
195
|
+
return geom
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def remove_nan_from_array(matrix):
|
|
199
|
+
with np.nditer(matrix, op_flags=['readwrite']) as it:
|
|
200
|
+
for x in it:
|
|
201
|
+
if np.isnan(x[...]):
|
|
202
|
+
x[...] = BT_NODATA_COST
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def replace_Nodata2NaN(matrix, nodata):
|
|
206
|
+
with np.nditer(matrix, op_flags=['readwrite']) as it:
|
|
207
|
+
for x in it:
|
|
208
|
+
if (x[...] == nodata):
|
|
209
|
+
x[...] = np.NaN
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
def replace_Nodata2Inf(matrix, nodata):
|
|
213
|
+
with np.nditer(matrix, op_flags=['readwrite']) as it:
|
|
214
|
+
for x in it:
|
|
215
|
+
if (x[...] == nodata):
|
|
216
|
+
x[...] = np.Inf
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
# Split LineString to segments at vertices
|
|
220
|
+
def segments(line_coords):
|
|
221
|
+
if len(line_coords) < 2:
|
|
222
|
+
return None
|
|
223
|
+
elif len(line_coords) == 2:
|
|
224
|
+
return [fiona.Geometry.from_dict({'type': 'LineString', 'coordinates': line_coords})]
|
|
225
|
+
else:
|
|
226
|
+
seg_list = zip(line_coords[:-1], line_coords[1:])
|
|
227
|
+
line_list = [{'type': 'LineString', 'coordinates': coords} for coords in seg_list]
|
|
228
|
+
return [fiona.Geometry.from_dict(line) for line in line_list]
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
def extract_string_from_printout(str_print, str_extract):
|
|
232
|
+
str_array = shlex.split(str_print) # keep string in double quotes
|
|
233
|
+
str_array_enum = enumerate(str_array)
|
|
234
|
+
index = 0
|
|
235
|
+
for item in str_array_enum:
|
|
236
|
+
if str_extract in item[1]:
|
|
237
|
+
index = item[0]
|
|
238
|
+
break
|
|
239
|
+
str_out = str_array[index]
|
|
240
|
+
return str_out.strip()
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
def check_arguments():
|
|
244
|
+
# Get tool arguments
|
|
245
|
+
parser = argparse.ArgumentParser()
|
|
246
|
+
parser.add_argument('-i', '--input', type=json.loads)
|
|
247
|
+
parser.add_argument('-p', '--processes')
|
|
248
|
+
parser.add_argument('-v', '--verbose')
|
|
249
|
+
args = parser.parse_args()
|
|
250
|
+
|
|
251
|
+
verbose = True if args.verbose == 'True' else False
|
|
252
|
+
for item in args.input:
|
|
253
|
+
if args.input[item] == 'false':
|
|
254
|
+
args.input[item] = False
|
|
255
|
+
elif args.input[item] == 'true':
|
|
256
|
+
args.input[item] = True
|
|
257
|
+
|
|
258
|
+
return args, verbose
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
def save_features_to_shapefile(out_file, crs, geoms, properties=None, schema=None):
|
|
262
|
+
"""
|
|
263
|
+
|
|
264
|
+
Parameters
|
|
265
|
+
----------
|
|
266
|
+
out_file :
|
|
267
|
+
crs :
|
|
268
|
+
geoms : shapely geometry objects
|
|
269
|
+
schema :
|
|
270
|
+
properties :
|
|
271
|
+
|
|
272
|
+
Returns
|
|
273
|
+
-------
|
|
274
|
+
|
|
275
|
+
"""
|
|
276
|
+
# remove all None items
|
|
277
|
+
# TODO: check geom type consistency
|
|
278
|
+
# geoms = [item for item in geoms if item is not None]
|
|
279
|
+
|
|
280
|
+
if len(geoms) < 1:
|
|
281
|
+
return
|
|
282
|
+
|
|
283
|
+
try:
|
|
284
|
+
geom_type = mapping(geoms[0])['type']
|
|
285
|
+
except Exception as e:
|
|
286
|
+
print(e)
|
|
287
|
+
|
|
288
|
+
if not schema:
|
|
289
|
+
props_tuple = zip([], []) # if lengths are not the same, ValueError raises
|
|
290
|
+
props_schema = [(item, type(value).__name__) for item, value in props_tuple]
|
|
291
|
+
|
|
292
|
+
schema = {
|
|
293
|
+
'geometry': geom_type,
|
|
294
|
+
'properties': OrderedDict([])
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
properties = None
|
|
298
|
+
|
|
299
|
+
driver = 'ESRI Shapefile'
|
|
300
|
+
print('Writing to shapefile {}'.format(out_file))
|
|
301
|
+
|
|
302
|
+
try:
|
|
303
|
+
out_line_file = fiona.open(out_file, 'w', driver, schema, crs)
|
|
304
|
+
except Exception as e:
|
|
305
|
+
print(e)
|
|
306
|
+
out_line_file.close()
|
|
307
|
+
return
|
|
308
|
+
|
|
309
|
+
if properties:
|
|
310
|
+
feat_tuple = zip_longest(geoms, properties)
|
|
311
|
+
else: # properties are None
|
|
312
|
+
feat_tuple = [(item, None) for item in geoms]
|
|
313
|
+
|
|
314
|
+
try:
|
|
315
|
+
for geom, prop in feat_tuple:
|
|
316
|
+
if geom:
|
|
317
|
+
feature = {
|
|
318
|
+
'geometry': mapping(geom),
|
|
319
|
+
'properties': prop
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
out_line_file.write(feature)
|
|
323
|
+
except Exception as e:
|
|
324
|
+
print(e)
|
|
325
|
+
|
|
326
|
+
out_line_file.close()
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
def vector_crs(in_vector):
|
|
330
|
+
vec_crs = None
|
|
331
|
+
with ogr.Open(in_vector) as vector_file:
|
|
332
|
+
if vector_file:
|
|
333
|
+
vec_crs = vector_file.GetLayer().GetSpatialRef()
|
|
334
|
+
|
|
335
|
+
return vec_crs
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
def raster_crs(in_raster):
|
|
339
|
+
ras_crs = None
|
|
340
|
+
with gdal.Open(in_raster) as raster_file:
|
|
341
|
+
if raster_file:
|
|
342
|
+
ras_crs = raster_file.GetSpatialRef()
|
|
343
|
+
|
|
344
|
+
return ras_crs
|
|
345
|
+
|
|
346
|
+
|
|
347
|
+
def compare_crs(crs_org, crs_dst):
|
|
348
|
+
if crs_org and crs_dst:
|
|
349
|
+
if crs_org.IsSameGeogCS(crs_dst):
|
|
350
|
+
print('Check: Input file Spatial Reference are the same, continue.')
|
|
351
|
+
return True
|
|
352
|
+
else:
|
|
353
|
+
crs_org_norm = CRS(crs_org.ExportToWkt())
|
|
354
|
+
crs_dst_norm = CRS(crs_dst.ExportToWkt())
|
|
355
|
+
if crs_org_norm.is_compound:
|
|
356
|
+
crs_org_proj = crs_org_norm.sub_crs_list[0].coordinate_operation.name
|
|
357
|
+
elif crs_org_norm.name == 'unnamed':
|
|
358
|
+
return False
|
|
359
|
+
else:
|
|
360
|
+
crs_org_proj = crs_org_norm.coordinate_operation.name
|
|
361
|
+
|
|
362
|
+
if crs_dst_norm.is_compound:
|
|
363
|
+
crs_dst_proj = crs_dst_norm.sub_crs_list[0].coordinate_operation.name
|
|
364
|
+
elif crs_org_norm.name == 'unnamed':
|
|
365
|
+
return False
|
|
366
|
+
else:
|
|
367
|
+
crs_dst_proj = crs_dst_norm.coordinate_operation.name
|
|
368
|
+
|
|
369
|
+
if crs_org_proj == crs_dst_proj:
|
|
370
|
+
print('Checked: Input files Spatial Reference are the same, continue.')
|
|
371
|
+
return True
|
|
372
|
+
|
|
373
|
+
return False
|
|
374
|
+
|
|
375
|
+
|
|
376
|
+
def identity_polygon(line_args):
|
|
377
|
+
"""
|
|
378
|
+
Return polygon of line segment
|
|
379
|
+
|
|
380
|
+
Parameters
|
|
381
|
+
----------
|
|
382
|
+
line_args : list of geodataframe
|
|
383
|
+
0 : geodataframe line segment, one item
|
|
384
|
+
1 : geodataframe line buffer, one item
|
|
385
|
+
2 : geodataframe polygons returned by spatial search
|
|
386
|
+
|
|
387
|
+
Returns
|
|
388
|
+
-------
|
|
389
|
+
line, identity : tuple of line and associated footprint
|
|
390
|
+
|
|
391
|
+
"""
|
|
392
|
+
line = line_args[0]
|
|
393
|
+
in_cl_buffer = line_args[1][['geometry', 'OLnFID']]
|
|
394
|
+
in_fp_polygon = line_args[2]
|
|
395
|
+
|
|
396
|
+
identity = None
|
|
397
|
+
try:
|
|
398
|
+
# drop polygons not intersecting with line segment
|
|
399
|
+
line_geom = line.iloc[0].geometry
|
|
400
|
+
drop_list = []
|
|
401
|
+
for i in in_fp_polygon.index:
|
|
402
|
+
if not in_fp_polygon.loc[i].geometry.intersects(line_geom):
|
|
403
|
+
drop_list.append(i)
|
|
404
|
+
elif line_geom.intersection(in_fp_polygon.loc[i].geometry).length / line_geom.length < 0.30:
|
|
405
|
+
drop_list.append(i) # if less the 1/5 of line is inside of polygon, ignore
|
|
406
|
+
|
|
407
|
+
# drop all polygons not used
|
|
408
|
+
in_fp_polygon = in_fp_polygon.drop(index=drop_list)
|
|
409
|
+
|
|
410
|
+
if not in_fp_polygon.empty:
|
|
411
|
+
identity = in_fp_polygon.overlay(in_cl_buffer, how='intersection')
|
|
412
|
+
except Exception as e:
|
|
413
|
+
print(e)
|
|
414
|
+
|
|
415
|
+
return line, identity
|
|
416
|
+
|
|
417
|
+
|
|
418
|
+
def line_split2(in_ln_shp, seg_length):
|
|
419
|
+
# Check the OLnFID column in data. If it is not, column will be created
|
|
420
|
+
if 'OLnFID' not in in_ln_shp.columns.array:
|
|
421
|
+
if BT_DEBUGGING:
|
|
422
|
+
print("Cannot find {} column in input line data")
|
|
423
|
+
|
|
424
|
+
print("New column created: {}".format('OLnFID', 'OLnFID'))
|
|
425
|
+
in_ln_shp['OLnFID'] = in_ln_shp.index
|
|
426
|
+
line_seg = split_into_Equal_Nth_segments(in_ln_shp, seg_length)
|
|
427
|
+
|
|
428
|
+
return line_seg
|
|
429
|
+
|
|
430
|
+
|
|
431
|
+
def split_into_Equal_Nth_segments(df, seg_length):
|
|
432
|
+
odf = df
|
|
433
|
+
crs = odf.crs
|
|
434
|
+
if 'OLnSEG' not in odf.columns.array:
|
|
435
|
+
df['OLnSEG'] = np.nan
|
|
436
|
+
df = odf.assign(geometry=odf.apply(lambda x: cut_line(x.geometry, seg_length), axis=1))
|
|
437
|
+
# df = odf.assign(geometry=odf.apply(lambda x: cut_line(x.geometry, x.geometry.length), axis=1))
|
|
438
|
+
df = df.explode()
|
|
439
|
+
|
|
440
|
+
df['OLnSEG'] = df.groupby('OLnFID').cumcount()
|
|
441
|
+
gdf = gpd.GeoDataFrame(df, geometry=df.geometry, crs=crs)
|
|
442
|
+
gdf = gdf.sort_values(by=['OLnFID', 'OLnSEG'])
|
|
443
|
+
gdf = gdf.reset_index(drop=True)
|
|
444
|
+
|
|
445
|
+
if "shape_leng" in gdf.columns.array:
|
|
446
|
+
gdf["shape_leng"] = gdf.geometry.length
|
|
447
|
+
elif "LENGTH" in gdf.columns.array:
|
|
448
|
+
gdf["LENGTH"] = gdf.geometry.length
|
|
449
|
+
else:
|
|
450
|
+
gdf["shape_leng"] = gdf.geometry.length
|
|
451
|
+
return gdf
|
|
452
|
+
|
|
453
|
+
|
|
454
|
+
def split_line_nPart(line, seg_length):
|
|
455
|
+
seg_line = shapely.segmentize(line, seg_length)
|
|
456
|
+
distances = np.arange(seg_length, line.length, seg_length)
|
|
457
|
+
|
|
458
|
+
if len(distances) > 0:
|
|
459
|
+
points = [shapely.line_interpolate_point(seg_line, distance) for distance in distances]
|
|
460
|
+
|
|
461
|
+
split_points = shapely.multipoints(points)
|
|
462
|
+
mline = split(seg_line, split_points)
|
|
463
|
+
else:
|
|
464
|
+
mline = seg_line
|
|
465
|
+
|
|
466
|
+
return mline
|
|
467
|
+
|
|
468
|
+
|
|
469
|
+
def cut_line(line, distance):
|
|
470
|
+
"""
|
|
471
|
+
|
|
472
|
+
Parameters
|
|
473
|
+
----------
|
|
474
|
+
line : LineString line to be split by distance along line
|
|
475
|
+
distance : float length of segment to cut
|
|
476
|
+
|
|
477
|
+
Returns
|
|
478
|
+
-------
|
|
479
|
+
List of LineString
|
|
480
|
+
"""
|
|
481
|
+
lines = list()
|
|
482
|
+
lines = cut(line, distance, lines)
|
|
483
|
+
return lines
|
|
484
|
+
|
|
485
|
+
|
|
486
|
+
def cut(line, distance, lines):
|
|
487
|
+
# Cuts a line in several segments at a distance from its starting point
|
|
488
|
+
if line.has_z:
|
|
489
|
+
line = transform(lambda x, y, z=None: (x, y), line)
|
|
490
|
+
|
|
491
|
+
if shapely.is_empty(line) or shapely.is_missing(line):
|
|
492
|
+
return None
|
|
493
|
+
# else:
|
|
494
|
+
if math.fmod(line.length, distance) < 1:
|
|
495
|
+
return [line]
|
|
496
|
+
elif distance >= line.length:
|
|
497
|
+
return [line]
|
|
498
|
+
# else:
|
|
499
|
+
end_pt = None
|
|
500
|
+
line = shapely.segmentize(line, distance)
|
|
501
|
+
|
|
502
|
+
while line.length > distance:
|
|
503
|
+
coords = list(line.coords)
|
|
504
|
+
for i, p in enumerate(coords):
|
|
505
|
+
pd = line.project(Point(p))
|
|
506
|
+
|
|
507
|
+
if abs(pd - distance) < BT_EPSILON:
|
|
508
|
+
lines.append(LineString(coords[:i + 1]))
|
|
509
|
+
line = LineString(coords[i:])
|
|
510
|
+
end_pt = None
|
|
511
|
+
break
|
|
512
|
+
elif pd > distance:
|
|
513
|
+
end_pt = line.interpolate(distance)
|
|
514
|
+
lines.append(LineString(coords[:i] + list(end_pt.coords)))
|
|
515
|
+
line = LineString(list(end_pt.coords) + coords[i:])
|
|
516
|
+
break
|
|
517
|
+
|
|
518
|
+
if end_pt:
|
|
519
|
+
lines.append(line)
|
|
520
|
+
return lines
|
|
521
|
+
|
|
522
|
+
|
|
523
|
+
def line_angle(point_1, point_2):
|
|
524
|
+
"""
|
|
525
|
+
Calculates the angle of the line
|
|
526
|
+
|
|
527
|
+
Parameters
|
|
528
|
+
----------
|
|
529
|
+
point_1, point_2: start and end points of shapely line
|
|
530
|
+
"""
|
|
531
|
+
delta_y = point_2.y - point_1.y
|
|
532
|
+
delta_x = point_2.x - point_1.x
|
|
533
|
+
|
|
534
|
+
angle = math.atan2(delta_y, delta_x)
|
|
535
|
+
return angle
|
|
536
|
+
|
|
537
|
+
|
|
538
|
+
def generate_perpendicular_line_precise(points, offset=20):
|
|
539
|
+
"""
|
|
540
|
+
Generate a perpendicular line to the input line at the given point.
|
|
541
|
+
|
|
542
|
+
Parameters
|
|
543
|
+
----------
|
|
544
|
+
points : shapely.geometry.Point list
|
|
545
|
+
The points on the line where the perpendicular should be generated.
|
|
546
|
+
offset : float, optional
|
|
547
|
+
The length of the perpendicular line.
|
|
548
|
+
|
|
549
|
+
Returns
|
|
550
|
+
-------
|
|
551
|
+
shapely.geometry.LineString
|
|
552
|
+
The generated perpendicular line.
|
|
553
|
+
"""
|
|
554
|
+
# Compute the angle of the line
|
|
555
|
+
center = points[1]
|
|
556
|
+
perp_line = None
|
|
557
|
+
|
|
558
|
+
if len(points) == 2:
|
|
559
|
+
head = points[0]
|
|
560
|
+
tail = points[1]
|
|
561
|
+
|
|
562
|
+
delta_x = head.x - tail.x
|
|
563
|
+
delta_y = head.y - tail.y
|
|
564
|
+
angle = 0.0
|
|
565
|
+
|
|
566
|
+
if math.isclose(delta_x, 0.0):
|
|
567
|
+
angle = math.pi / 2
|
|
568
|
+
else:
|
|
569
|
+
angle = math.atan(delta_y / delta_x)
|
|
570
|
+
|
|
571
|
+
start = [center.x + offset / 2.0, center.y]
|
|
572
|
+
end = [center.x - offset / 2.0, center.y]
|
|
573
|
+
line = LineString([start, end])
|
|
574
|
+
perp_line = rotate(line, angle + math.pi / 2.0, origin=center, use_radians=True)
|
|
575
|
+
elif len(points) == 3:
|
|
576
|
+
head = points[0]
|
|
577
|
+
tail = points[2]
|
|
578
|
+
|
|
579
|
+
angle_1 = line_angle(center, head)
|
|
580
|
+
angle_2 = line_angle(center, tail)
|
|
581
|
+
angle_diff = (angle_2 - angle_1) / 2.0
|
|
582
|
+
head_new = Point(center.x + offset / 2.0 * math.cos(angle_1), center.y + offset / 2.0 * math.sin(angle_1))
|
|
583
|
+
if head.has_z:
|
|
584
|
+
head_new = shapely.force_3d(head_new)
|
|
585
|
+
try:
|
|
586
|
+
perp_seg_1 = LineString([center, head_new])
|
|
587
|
+
perp_seg_1 = rotate(perp_seg_1, angle_diff, origin=center, use_radians=True)
|
|
588
|
+
perp_seg_2 = rotate(perp_seg_1, math.pi, origin=center, use_radians=True)
|
|
589
|
+
perp_line = LineString([list(perp_seg_1.coords)[1], list(perp_seg_2.coords)[1]])
|
|
590
|
+
except Exception as e:
|
|
591
|
+
print(e)
|
|
592
|
+
|
|
593
|
+
return perp_line
|
|
594
|
+
|
|
595
|
+
|
|
596
|
+
def corridor_raster(raster_clip, out_meta, source, destination, cell_size, corridor_threshold):
|
|
597
|
+
"""
|
|
598
|
+
Calculate corridor raster
|
|
599
|
+
Parameters
|
|
600
|
+
----------
|
|
601
|
+
raster_clip : raster
|
|
602
|
+
out_meta : raster file meta
|
|
603
|
+
source : list of point tuple(s)
|
|
604
|
+
start point in row/col
|
|
605
|
+
destination : list of point tuple(s)
|
|
606
|
+
end point in row/col
|
|
607
|
+
cell_size: tuple
|
|
608
|
+
(cell_size_x, cell_size_y)
|
|
609
|
+
corridor_threshold : double
|
|
610
|
+
|
|
611
|
+
Returns
|
|
612
|
+
-------
|
|
613
|
+
corridor raster
|
|
614
|
+
"""
|
|
615
|
+
|
|
616
|
+
try:
|
|
617
|
+
# change all nan to BT_NODATA_COST for workaround
|
|
618
|
+
if len(raster_clip.shape) > 2:
|
|
619
|
+
raster_clip = np.squeeze(raster_clip, axis=0)
|
|
620
|
+
remove_nan_from_array(raster_clip)
|
|
621
|
+
|
|
622
|
+
# generate the cost raster to source point
|
|
623
|
+
mcp_source = MCP_Geometric(raster_clip, sampling=cell_size)
|
|
624
|
+
source_cost_acc = mcp_source.find_costs(source)[0]
|
|
625
|
+
del mcp_source
|
|
626
|
+
|
|
627
|
+
# # # generate the cost raster to destination point
|
|
628
|
+
mcp_dest = MCP_Geometric(raster_clip, sampling=cell_size)
|
|
629
|
+
dest_cost_acc = mcp_dest.find_costs(destination)[0]
|
|
630
|
+
|
|
631
|
+
# Generate corridor
|
|
632
|
+
corridor = source_cost_acc + dest_cost_acc
|
|
633
|
+
corridor = np.ma.masked_invalid(corridor)
|
|
634
|
+
|
|
635
|
+
# Calculate minimum value of corridor raster
|
|
636
|
+
if not np.ma.min(corridor) is None:
|
|
637
|
+
corr_min = float(np.ma.min(corridor))
|
|
638
|
+
else:
|
|
639
|
+
corr_min = 0.5
|
|
640
|
+
|
|
641
|
+
# normalize corridor raster by deducting corr_min
|
|
642
|
+
corridor_norm = corridor - corr_min
|
|
643
|
+
corridor_thresh_cl = np.ma.where(corridor_norm >= corridor_threshold, 1.0, 0.0)
|
|
644
|
+
|
|
645
|
+
except Exception as e:
|
|
646
|
+
print(e)
|
|
647
|
+
print('corridor_raster: Exception occurred.')
|
|
648
|
+
return None
|
|
649
|
+
|
|
650
|
+
return corridor_thresh_cl
|
|
651
|
+
|
|
652
|
+
|
|
653
|
+
def LCP_skimage_mcp_connect(cost_clip, in_meta, seed_line):
|
|
654
|
+
lc_path_new = []
|
|
655
|
+
if len(cost_clip.shape) > 2:
|
|
656
|
+
cost_clip = np.squeeze(cost_clip, axis=0)
|
|
657
|
+
|
|
658
|
+
out_transform = in_meta['transform']
|
|
659
|
+
transformer = rasterio.transform.AffineTransformer(out_transform)
|
|
660
|
+
|
|
661
|
+
x1, y1 = list(seed_line.coords)[0][:2]
|
|
662
|
+
x2, y2 = list(seed_line.coords)[-1][:2]
|
|
663
|
+
source = [transformer.rowcol(x1, y1)]
|
|
664
|
+
destination = [transformer.rowcol(x2, y2)]
|
|
665
|
+
|
|
666
|
+
try:
|
|
667
|
+
|
|
668
|
+
init_obj1 = MCP_Connect(cost_clip)
|
|
669
|
+
results = init_obj1.find_costs(source, destination)
|
|
670
|
+
# init_obj2 = MCP_Geometric(cost_clip)
|
|
671
|
+
path = []
|
|
672
|
+
for end in destination:
|
|
673
|
+
path.append(init_obj1.traceback(end))
|
|
674
|
+
for row, col in path[0]:
|
|
675
|
+
x, y = transformer.xy(row, col)
|
|
676
|
+
lc_path_new.append((x, y))
|
|
677
|
+
except Exception as e:
|
|
678
|
+
print(e)
|
|
679
|
+
return None
|
|
680
|
+
|
|
681
|
+
if len(lc_path_new) < 2:
|
|
682
|
+
print('No least cost path detected, pass.')
|
|
683
|
+
return None
|
|
684
|
+
else:
|
|
685
|
+
lc_path_new = LineString(lc_path_new)
|
|
686
|
+
|
|
687
|
+
return lc_path_new
|
|
688
|
+
|
|
689
|
+
|
|
690
|
+
def chk_df_multipart(df, chk_shp_in_string):
|
|
691
|
+
try:
|
|
692
|
+
found = False
|
|
693
|
+
if str.upper(chk_shp_in_string) in [x.upper() for x in df.geom_type.values]:
|
|
694
|
+
found = True
|
|
695
|
+
df = df.explode()
|
|
696
|
+
if type(df) is gpd.geodataframe.GeoDataFrame:
|
|
697
|
+
df['OLnSEG'] = df.groupby('OLnFID').cumcount()
|
|
698
|
+
df = df.sort_values(by=['OLnFID', 'OLnSEG'])
|
|
699
|
+
df = df.reset_index(drop=True)
|
|
700
|
+
else:
|
|
701
|
+
found = False
|
|
702
|
+
return df, found
|
|
703
|
+
except Exception as e:
|
|
704
|
+
print(e)
|
|
705
|
+
return df, False
|
|
706
|
+
|
|
707
|
+
|
|
708
|
+
def dyn_fs_raster_stdmean(in_ndarray, kernel, nodata):
|
|
709
|
+
# This function uses xrspatial which can handle large data but slow
|
|
710
|
+
# print("Calculating Canopy Closure's Focal Statistic-Stand Deviation Raster ...")
|
|
711
|
+
in_ndarray[in_ndarray == nodata] = np.nan
|
|
712
|
+
result_ndarray = focal.focal_stats(xr.DataArray(in_ndarray), kernel, stats_funcs=['std', 'mean'])
|
|
713
|
+
|
|
714
|
+
# Assign std and mean ndarray
|
|
715
|
+
reshape_std_ndarray = result_ndarray[0].data # .reshape(-1)
|
|
716
|
+
reshape_mean_ndarray = result_ndarray[1].data # .reshape(-1)
|
|
717
|
+
|
|
718
|
+
return reshape_std_ndarray, reshape_mean_ndarray
|
|
719
|
+
|
|
720
|
+
|
|
721
|
+
def dyn_smooth_cost(in_raster, max_line_dist, sampling):
|
|
722
|
+
# print('Generating Cost Raster ...')
|
|
723
|
+
|
|
724
|
+
# scipy way to do Euclidean distance transform
|
|
725
|
+
euc_dist_array = ndimage.distance_transform_edt(np.logical_not(in_raster), sampling=sampling)
|
|
726
|
+
|
|
727
|
+
smooth1 = float(max_line_dist) - euc_dist_array
|
|
728
|
+
smooth1[smooth1 <= 0.0] = 0.0
|
|
729
|
+
smooth_cost_array = smooth1 / float(max_line_dist)
|
|
730
|
+
|
|
731
|
+
return smooth_cost_array
|
|
732
|
+
|
|
733
|
+
|
|
734
|
+
def dyn_np_cost_raster(canopy_ndarray, cc_mean, cc_std, cc_smooth, avoidance, cost_raster_exponent):
|
|
735
|
+
aM1a = (cc_mean - cc_std)
|
|
736
|
+
aM1b = (cc_mean + cc_std)
|
|
737
|
+
aM1 = np.divide(aM1a, aM1b, where=aM1b != 0, out=np.zeros(aM1a.shape, dtype=float))
|
|
738
|
+
aM = (1 + aM1) / 2
|
|
739
|
+
aaM = (cc_mean + cc_std)
|
|
740
|
+
bM = np.where(aaM <= 0, 0, aM)
|
|
741
|
+
cM = bM * (1 - avoidance) + (cc_smooth * avoidance)
|
|
742
|
+
dM = np.where(canopy_ndarray == 1, 1, cM)
|
|
743
|
+
eM = np.exp(dM)
|
|
744
|
+
result = np.power(eM, float(cost_raster_exponent))
|
|
745
|
+
|
|
746
|
+
return result
|
|
747
|
+
|
|
748
|
+
|
|
749
|
+
def dyn_np_cc_map(in_array, canopy_ht_threshold, nodata):
|
|
750
|
+
canopy_ht_threshold = 0.8
|
|
751
|
+
canopy_ndarray = np.ma.where(in_array >= canopy_ht_threshold, 1., 0.).astype(float)
|
|
752
|
+
canopy_ndarray = np.ma.filled(canopy_ndarray, nodata)
|
|
753
|
+
# canopy_ndarray[canopy_ndarray==nodata]=np.NaN # TODO check the code, extra step?
|
|
754
|
+
|
|
755
|
+
return canopy_ndarray
|
|
756
|
+
|
|
757
|
+
|
|
758
|
+
def cost_raster(in_raster, meta):
|
|
759
|
+
if len(in_raster.shape) > 2:
|
|
760
|
+
in_raster = np.squeeze(in_raster, axis=0)
|
|
761
|
+
|
|
762
|
+
# raster_clip, out_meta = clip_raster(self.in_raster, seed_line, self.line_radius)
|
|
763
|
+
# in_raster = np.squeeze(in_raster, axis=0)
|
|
764
|
+
cell_x, cell_y = meta['transform'][0], -meta['transform'][4]
|
|
765
|
+
|
|
766
|
+
kernel = convolution.circle_kernel(cell_x, cell_y, 2.5)
|
|
767
|
+
dyn_canopy_ndarray = dyn_np_cc_map(in_raster, FP_CORRIDOR_THRESHOLD, BT_NODATA)
|
|
768
|
+
cc_std, cc_mean = dyn_fs_raster_stdmean(dyn_canopy_ndarray, kernel, BT_NODATA)
|
|
769
|
+
cc_smooth = dyn_smooth_cost(dyn_canopy_ndarray, 2.5, [cell_x, cell_y])
|
|
770
|
+
|
|
771
|
+
# TODO avoidance, re-use this code
|
|
772
|
+
avoidance = max(min(float(0.4), 1), 0)
|
|
773
|
+
cost_clip = dyn_np_cost_raster(dyn_canopy_ndarray, cc_mean, cc_std,
|
|
774
|
+
cc_smooth, 0.4, 1.5)
|
|
775
|
+
|
|
776
|
+
# TODO use nan or BT_DATA?
|
|
777
|
+
cost_clip[in_raster == BT_NODATA] = np.nan
|
|
778
|
+
dyn_canopy_ndarray[in_raster == BT_NODATA] = np.nan
|
|
779
|
+
|
|
780
|
+
return cost_clip, dyn_canopy_ndarray
|
|
781
|
+
|
|
782
|
+
|
|
783
|
+
def generate_line_args_NoClipraster(line_seg, work_in_buffer, in_chm_obj, in_chm, tree_radius, max_line_dist,
|
|
784
|
+
canopy_avoidance, exponent, canopy_thresh_percentage):
|
|
785
|
+
line_argsC = []
|
|
786
|
+
|
|
787
|
+
for record in range(0, len(work_in_buffer)):
|
|
788
|
+
try:
|
|
789
|
+
line_bufferC = work_in_buffer.loc[record, 'geometry']
|
|
790
|
+
|
|
791
|
+
nodata = BT_NODATA
|
|
792
|
+
line_argsC.append([in_chm, float(work_in_buffer.loc[record, 'DynCanTh']), float(tree_radius),
|
|
793
|
+
float(max_line_dist), float(canopy_avoidance), float(exponent), in_chm_obj.res, nodata,
|
|
794
|
+
line_seg.iloc[[record]], in_chm_obj.meta.copy(), record, 10, 'Center',
|
|
795
|
+
canopy_thresh_percentage, line_bufferC])
|
|
796
|
+
except Exception as e:
|
|
797
|
+
|
|
798
|
+
print(e)
|
|
799
|
+
|
|
800
|
+
step = record + 1
|
|
801
|
+
total = len(work_in_buffer)
|
|
802
|
+
|
|
803
|
+
print(f' "PROGRESS_LABEL Preparing lines {step} of {total}" ', flush=True)
|
|
804
|
+
print(f' %{step / total * 100} ', flush=True)
|
|
805
|
+
|
|
806
|
+
return line_argsC
|
|
807
|
+
|
|
808
|
+
|
|
809
|
+
def generate_line_args_DFP_NoClip(line_seg, work_in_bufferL, work_in_bufferC, in_chm_obj,
|
|
810
|
+
in_chm, tree_radius, max_line_dist, canopy_avoidance,
|
|
811
|
+
exponent, work_in_bufferR, canopy_thresh_percentage):
|
|
812
|
+
line_argsL = []
|
|
813
|
+
line_argsR = []
|
|
814
|
+
line_argsC = []
|
|
815
|
+
line_id = 0
|
|
816
|
+
for record in range(0, len(work_in_bufferL)):
|
|
817
|
+
line_bufferL = work_in_bufferL.loc[record, 'geometry']
|
|
818
|
+
line_bufferC = work_in_bufferC.loc[record, 'geometry']
|
|
819
|
+
LCut = work_in_bufferL.loc[record, 'LDist_Cut']
|
|
820
|
+
|
|
821
|
+
nodata = BT_NODATA
|
|
822
|
+
line_argsL.append([in_chm, float(work_in_bufferL.loc[record, 'DynCanTh']), float(tree_radius),
|
|
823
|
+
float(max_line_dist), float(canopy_avoidance), float(exponent), in_chm_obj.res, nodata,
|
|
824
|
+
line_seg.iloc[[record]], in_chm_obj.meta.copy(), line_id, LCut, 'Left',
|
|
825
|
+
canopy_thresh_percentage, line_bufferL])
|
|
826
|
+
|
|
827
|
+
line_argsC.append([in_chm, float(work_in_bufferC.loc[record, 'DynCanTh']), float(tree_radius),
|
|
828
|
+
float(max_line_dist), float(canopy_avoidance), float(exponent), in_chm_obj.res, nodata,
|
|
829
|
+
line_seg.iloc[[record]], in_chm_obj.meta.copy(), line_id, 10, 'Center',
|
|
830
|
+
canopy_thresh_percentage, line_bufferC])
|
|
831
|
+
|
|
832
|
+
line_id += 1
|
|
833
|
+
|
|
834
|
+
line_id = 0
|
|
835
|
+
for record in range(0, len(work_in_bufferR)):
|
|
836
|
+
line_bufferR = work_in_bufferR.loc[record, 'geometry']
|
|
837
|
+
RCut = work_in_bufferR.loc[record, 'RDist_Cut']
|
|
838
|
+
# clipped_rasterR, out_transformR = rasterio.mask.mask(in_chm, [line_bufferR], crop=True,
|
|
839
|
+
# nodata=BT_NODATA, filled=True)
|
|
840
|
+
# clipped_rasterR = np.squeeze(clipped_rasterR, axis=0)
|
|
841
|
+
#
|
|
842
|
+
# # make rasterio meta for saving raster later
|
|
843
|
+
# out_metaR = in_chm.meta.copy()
|
|
844
|
+
# out_metaR.update({"driver": "GTiff",
|
|
845
|
+
# "height": clipped_rasterR.shape[0],
|
|
846
|
+
# "width": clipped_rasterR.shape[1],
|
|
847
|
+
# "nodata": BT_NODATA,
|
|
848
|
+
# "transform": out_transformR})
|
|
849
|
+
line_bufferC = work_in_bufferC.loc[record, 'geometry']
|
|
850
|
+
# clipped_rasterC, out_transformC = rasterio.mask.mask(in_chm, [line_bufferC], crop=True,
|
|
851
|
+
# nodata=BT_NODATA, filled=True)
|
|
852
|
+
#
|
|
853
|
+
# clipped_rasterC = np.squeeze(clipped_rasterC, axis=0)
|
|
854
|
+
# out_metaC = in_chm.meta.copy()
|
|
855
|
+
# out_metaC.update({"driver": "GTiff",
|
|
856
|
+
# "height": clipped_rasterC.shape[0],
|
|
857
|
+
# "width": clipped_rasterC.shape[1],
|
|
858
|
+
# "nodata": BT_NODATA,
|
|
859
|
+
# "transform": out_transformC})
|
|
860
|
+
|
|
861
|
+
nodata = BT_NODATA
|
|
862
|
+
# TODO deal with inherited nodata and BT_NODATA_COST
|
|
863
|
+
# TODO convert nodata to BT_NODATA_COST
|
|
864
|
+
line_argsR.append([in_chm, float(work_in_bufferR.loc[record, 'DynCanTh']), float(tree_radius),
|
|
865
|
+
float(max_line_dist), float(canopy_avoidance), float(exponent), in_chm_obj.res, nodata,
|
|
866
|
+
line_seg.iloc[[record]], in_chm_obj.meta.copy(), line_id, RCut, 'Right',
|
|
867
|
+
canopy_thresh_percentage, line_bufferR])
|
|
868
|
+
|
|
869
|
+
step = line_id + 1 + len(work_in_bufferL)
|
|
870
|
+
total = len(work_in_bufferL) + len(work_in_bufferR)
|
|
871
|
+
print(f' "PROGRESS_LABEL Preparing... {step} of {total}" ', flush=True)
|
|
872
|
+
print(f' %{step / total * 100} ', flush=True)
|
|
873
|
+
|
|
874
|
+
line_id += 1
|
|
875
|
+
|
|
876
|
+
return line_argsL, line_argsR, line_argsC
|
|
877
|
+
|
|
878
|
+
|
|
879
|
+
def chk_null_geometry(in_data):
|
|
880
|
+
find = False
|
|
881
|
+
if isinstance(in_data, gpd.GeoDataFrame):
|
|
882
|
+
if len(in_data[(in_data.is_empty | in_data.isna())]) > 0:
|
|
883
|
+
find = True
|
|
884
|
+
|
|
885
|
+
return find
|