ign-pdal-tools 1.7.4__tar.gz → 1.7.6__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ign_pdal_tools-1.7.4/ign_pdal_tools.egg-info → ign_pdal_tools-1.7.6}/PKG-INFO +7 -2
- ign_pdal_tools-1.7.4/PKG-INFO → ign_pdal_tools-1.7.6/README.md +5 -8
- ign_pdal_tools-1.7.4/README.md → ign_pdal_tools-1.7.6/ign_pdal_tools.egg-info/PKG-INFO +13 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/ign_pdal_tools.egg-info/SOURCES.txt +4 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/pdaltools/_version.py +1 -1
- ign_pdal_tools-1.7.6/pdaltools/add_points_in_las.py +104 -0
- ign_pdal_tools-1.7.6/pdaltools/add_points_in_pointcloud.py +102 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/pdaltools/las_info.py +27 -1
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/pdaltools/las_remove_dimensions.py +0 -1
- ign_pdal_tools-1.7.6/pdaltools/pcd_info.py +76 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/pdaltools/standardize_format.py +1 -1
- ign_pdal_tools-1.7.6/test/test_add_points_in_las.py +72 -0
- ign_pdal_tools-1.7.6/test/test_add_points_in_pointcloud.py +82 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/test/test_las_info.py +5 -0
- ign_pdal_tools-1.7.6/test/test_pcd_info.py +87 -0
- ign_pdal_tools-1.7.4/pdaltools/pcd_info.py +0 -46
- ign_pdal_tools-1.7.4/test/test_pcd_info.py +0 -61
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/LICENSE.md +0 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/ign_pdal_tools.egg-info/dependency_links.txt +0 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/ign_pdal_tools.egg-info/top_level.txt +0 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/pdaltools/color.py +0 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/pdaltools/las_add_buffer.py +0 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/pdaltools/las_clip.py +0 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/pdaltools/las_merge.py +0 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/pdaltools/replace_attribute_in_las.py +0 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/pdaltools/unlock_file.py +0 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/pyproject.toml +0 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/setup.cfg +0 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/test/test_color.py +0 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/test/test_las_add_buffer.py +0 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/test/test_las_clip.py +0 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/test/test_las_merge.py +0 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/test/test_las_remove_dimensions.py +0 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/test/test_replace_attribute_in_las.py +0 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/test/test_standardize_format.py +0 -0
- {ign_pdal_tools-1.7.4 → ign_pdal_tools-1.7.6}/test/test_unlock.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.2
|
|
2
2
|
Name: ign-pdal-tools
|
|
3
|
-
Version: 1.7.
|
|
3
|
+
Version: 1.7.6
|
|
4
4
|
Summary: Library for common LAS files manipulation with PDAL
|
|
5
5
|
Author-email: Guillaume Liegard <guillaume.liegard@ign.fr>
|
|
6
6
|
Description-Content-Type: text/markdown
|
|
@@ -87,6 +87,11 @@ By default, `xcoord` and `ycoord` are given in kilometers and the shape of the t
|
|
|
87
87
|
`readers.las: Global encoding WKT flag not set for point format 6 - 10.` which is due to TerraSolid
|
|
88
88
|
malformed LAS output for LAS1.4 files with point format 6 to 10.
|
|
89
89
|
|
|
90
|
+
## Add points in Las
|
|
91
|
+
|
|
92
|
+
[add_points_in_las.py](pdaltools/add_points_in_las.py): add points from some vector files (ex: shp, geojson, ...) inside Las. New points will have X,Y and Z coordinates. Other attributes values given by the initial las file are null (ex: classification at 0). These others attributes could be forced by using the '--dimensions/-d' option in the command line (ex : 'add_points_in_las.py -i myLas.las -g myPoints.json -d classification=64' - points will have their classification set to 64). The dimension should be present in the initial las ; this is not allowed to add new dimension.
|
|
93
|
+
|
|
94
|
+
|
|
90
95
|
# Dev / Build
|
|
91
96
|
|
|
92
97
|
## Contribute
|
|
@@ -1,11 +1,3 @@
|
|
|
1
|
-
Metadata-Version: 2.1
|
|
2
|
-
Name: ign-pdal-tools
|
|
3
|
-
Version: 1.7.4
|
|
4
|
-
Summary: Library for common LAS files manipulation with PDAL
|
|
5
|
-
Author-email: Guillaume Liegard <guillaume.liegard@ign.fr>
|
|
6
|
-
Description-Content-Type: text/markdown
|
|
7
|
-
License-File: LICENSE.md
|
|
8
|
-
|
|
9
1
|
# ign-pdal-tools
|
|
10
2
|
|
|
11
3
|
This repo contains various python tools based on [PDAL](https://pdal.io/) that are used to work on
|
|
@@ -87,6 +79,11 @@ By default, `xcoord` and `ycoord` are given in kilometers and the shape of the t
|
|
|
87
79
|
`readers.las: Global encoding WKT flag not set for point format 6 - 10.` which is due to TerraSolid
|
|
88
80
|
malformed LAS output for LAS1.4 files with point format 6 to 10.
|
|
89
81
|
|
|
82
|
+
## Add points in Las
|
|
83
|
+
|
|
84
|
+
[add_points_in_las.py](pdaltools/add_points_in_las.py): add points from some vector files (ex: shp, geojson, ...) inside Las. New points will have X,Y and Z coordinates. Other attributes values given by the initial las file are null (ex: classification at 0). These others attributes could be forced by using the '--dimensions/-d' option in the command line (ex : 'add_points_in_las.py -i myLas.las -g myPoints.json -d classification=64' - points will have their classification set to 64). The dimension should be present in the initial las ; this is not allowed to add new dimension.
|
|
85
|
+
|
|
86
|
+
|
|
90
87
|
# Dev / Build
|
|
91
88
|
|
|
92
89
|
## Contribute
|
|
@@ -1,3 +1,11 @@
|
|
|
1
|
+
Metadata-Version: 2.2
|
|
2
|
+
Name: ign-pdal-tools
|
|
3
|
+
Version: 1.7.6
|
|
4
|
+
Summary: Library for common LAS files manipulation with PDAL
|
|
5
|
+
Author-email: Guillaume Liegard <guillaume.liegard@ign.fr>
|
|
6
|
+
Description-Content-Type: text/markdown
|
|
7
|
+
License-File: LICENSE.md
|
|
8
|
+
|
|
1
9
|
# ign-pdal-tools
|
|
2
10
|
|
|
3
11
|
This repo contains various python tools based on [PDAL](https://pdal.io/) that are used to work on
|
|
@@ -79,6 +87,11 @@ By default, `xcoord` and `ycoord` are given in kilometers and the shape of the t
|
|
|
79
87
|
`readers.las: Global encoding WKT flag not set for point format 6 - 10.` which is due to TerraSolid
|
|
80
88
|
malformed LAS output for LAS1.4 files with point format 6 to 10.
|
|
81
89
|
|
|
90
|
+
## Add points in Las
|
|
91
|
+
|
|
92
|
+
[add_points_in_las.py](pdaltools/add_points_in_las.py): add points from some vector files (ex: shp, geojson, ...) inside Las. New points will have X,Y and Z coordinates. Other attributes values given by the initial las file are null (ex: classification at 0). These others attributes could be forced by using the '--dimensions/-d' option in the command line (ex : 'add_points_in_las.py -i myLas.las -g myPoints.json -d classification=64' - points will have their classification set to 64). The dimension should be present in the initial las ; this is not allowed to add new dimension.
|
|
93
|
+
|
|
94
|
+
|
|
82
95
|
# Dev / Build
|
|
83
96
|
|
|
84
97
|
## Contribute
|
|
@@ -6,6 +6,8 @@ ign_pdal_tools.egg-info/SOURCES.txt
|
|
|
6
6
|
ign_pdal_tools.egg-info/dependency_links.txt
|
|
7
7
|
ign_pdal_tools.egg-info/top_level.txt
|
|
8
8
|
pdaltools/_version.py
|
|
9
|
+
pdaltools/add_points_in_las.py
|
|
10
|
+
pdaltools/add_points_in_pointcloud.py
|
|
9
11
|
pdaltools/color.py
|
|
10
12
|
pdaltools/las_add_buffer.py
|
|
11
13
|
pdaltools/las_clip.py
|
|
@@ -16,6 +18,8 @@ pdaltools/pcd_info.py
|
|
|
16
18
|
pdaltools/replace_attribute_in_las.py
|
|
17
19
|
pdaltools/standardize_format.py
|
|
18
20
|
pdaltools/unlock_file.py
|
|
21
|
+
test/test_add_points_in_las.py
|
|
22
|
+
test/test_add_points_in_pointcloud.py
|
|
19
23
|
test/test_color.py
|
|
20
24
|
test/test_las_add_buffer.py
|
|
21
25
|
test/test_las_clip.py
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
|
|
3
|
+
import geopandas
|
|
4
|
+
import numpy as np
|
|
5
|
+
import pdal
|
|
6
|
+
|
|
7
|
+
from pdaltools.las_info import get_writer_parameters_from_reader_metadata, las_info_metadata, get_bounds_from_header_info
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def extract_points_from_geo(input_geo: str):
|
|
11
|
+
file = open(input_geo)
|
|
12
|
+
df = geopandas.read_file(file)
|
|
13
|
+
return df.get_coordinates(ignore_index=True, include_z=True)
|
|
14
|
+
|
|
15
|
+
def point_in_bound(bound_minx, bound_maxx, bound_miny, bound_maxy, pt_x, pt_y):
|
|
16
|
+
return pt_x >= bound_minx and pt_x <= bound_maxx and pt_y >= bound_miny and pt_y <= bound_maxy
|
|
17
|
+
|
|
18
|
+
def add_points_in_las(input_las: str, input_geo: str, output_las: str, inside_las: bool, values_dimensions: {}):
|
|
19
|
+
points_geo = extract_points_from_geo(input_geo)
|
|
20
|
+
pipeline = pdal.Pipeline() | pdal.Reader.las(input_las)
|
|
21
|
+
pipeline.execute()
|
|
22
|
+
points_las = pipeline.arrays[0]
|
|
23
|
+
dimensions = list(points_las.dtype.fields.keys())
|
|
24
|
+
|
|
25
|
+
if inside_las:
|
|
26
|
+
mtd = las_info_metadata(input_las)
|
|
27
|
+
bound_minx, bound_maxx, bound_miny, bound_maxy = get_bounds_from_header_info(mtd)
|
|
28
|
+
|
|
29
|
+
for i in points_geo.index:
|
|
30
|
+
if inside_las :
|
|
31
|
+
if not point_in_bound(bound_minx, bound_maxx, bound_miny, bound_maxy, points_geo["x"][i], points_geo["y"][i]):
|
|
32
|
+
continue
|
|
33
|
+
pt_las = np.empty(1, dtype=points_las.dtype)
|
|
34
|
+
pt_las[0][dimensions.index("X")] = points_geo["x"][i]
|
|
35
|
+
pt_las[0][dimensions.index("Y")] = points_geo["y"][i]
|
|
36
|
+
pt_las[0][dimensions.index("Z")] = points_geo["z"][i]
|
|
37
|
+
for val in values_dimensions:
|
|
38
|
+
pt_las[0][dimensions.index(val)] = values_dimensions[val]
|
|
39
|
+
points_las = np.append(points_las, pt_las, axis=0)
|
|
40
|
+
|
|
41
|
+
params = get_writer_parameters_from_reader_metadata(pipeline.metadata)
|
|
42
|
+
pipeline_end = pdal.Pipeline(arrays=[points_las])
|
|
43
|
+
pipeline_end |= pdal.Writer.las(output_las, forward="all", **params)
|
|
44
|
+
pipeline_end.execute()
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def parse_args():
|
|
48
|
+
parser = argparse.ArgumentParser("Add points from geometry file in a las/laz file.")
|
|
49
|
+
parser.add_argument("--input_file", "-i", type=str, help="Las/Laz input file")
|
|
50
|
+
parser.add_argument("--output_file", "-o", type=str, help="Las/Laz output file.")
|
|
51
|
+
parser.add_argument("--input_geo_file", "-g", type=str, help="Geometry input file.")
|
|
52
|
+
parser.add_argument("--inside_las", "-l", type=str, help="Keep points only inside the las boundary.")
|
|
53
|
+
parser.add_argument(
|
|
54
|
+
"--dimensions",
|
|
55
|
+
"-d",
|
|
56
|
+
metavar="KEY=VALUE",
|
|
57
|
+
nargs="+",
|
|
58
|
+
help="Set a number of key-value pairs corresponding to value "
|
|
59
|
+
"needed in points added in the output las; key should be included in the input las.",
|
|
60
|
+
)
|
|
61
|
+
return parser.parse_args()
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def is_nature(value, nature):
|
|
65
|
+
if value is None:
|
|
66
|
+
return False
|
|
67
|
+
try:
|
|
68
|
+
nature(value)
|
|
69
|
+
return True
|
|
70
|
+
except:
|
|
71
|
+
return False
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def parse_var(s):
|
|
75
|
+
items = s.split("=")
|
|
76
|
+
key = items[0].strip()
|
|
77
|
+
if len(items) > 1:
|
|
78
|
+
value = "=".join(items[1:])
|
|
79
|
+
if is_nature(value, int):
|
|
80
|
+
value = int(value)
|
|
81
|
+
elif is_nature(value, float):
|
|
82
|
+
value = float(value)
|
|
83
|
+
return (key, value)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def parse_vars(items):
|
|
87
|
+
d = {}
|
|
88
|
+
if items:
|
|
89
|
+
for item in items:
|
|
90
|
+
key, value = parse_var(item)
|
|
91
|
+
d[key] = value
|
|
92
|
+
return d
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
if __name__ == "__main__":
|
|
96
|
+
args = parse_args()
|
|
97
|
+
added_dimensions = parse_vars(args.dimensions)
|
|
98
|
+
add_points_in_las(
|
|
99
|
+
input_las=args.input_file,
|
|
100
|
+
input_geo=args.input_geo_file,
|
|
101
|
+
output_las=args.input_file if args.output_file is None else args.output_file,
|
|
102
|
+
inside_las=args.inside_las,
|
|
103
|
+
values_dimensions=added_dimensions,
|
|
104
|
+
)
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
import geopandas as gpd
|
|
2
|
+
import laspy
|
|
3
|
+
import numpy as np
|
|
4
|
+
from shapely.geometry import box
|
|
5
|
+
|
|
6
|
+
from pdaltools.las_info import get_tile_origin_using_header_info
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def get_tile_bbox(input_las, tile_width=1000) -> tuple:
|
|
10
|
+
"""
|
|
11
|
+
Get the theoretical bounding box (xmin, ymin, xmax, ymax) of a LIDAR tile
|
|
12
|
+
using its origin and the predefined tile width.
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
input_las (str): Path to the LIDAR `.las/.laz` file.
|
|
16
|
+
tile_width (int): Width of the tile in meters (default: 1000).
|
|
17
|
+
|
|
18
|
+
Returns:
|
|
19
|
+
tuple: Bounding box as (xmin, ymin, xmax, ymax).
|
|
20
|
+
"""
|
|
21
|
+
origin_x, origin_y = get_tile_origin_using_header_info(input_las)
|
|
22
|
+
bbox = (origin_x, origin_y - tile_width, origin_x + tile_width, origin_y)
|
|
23
|
+
return bbox
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def clip_3d_points_to_tile(input_points: str, input_las: str, crs: str) -> gpd.GeoDataFrame:
|
|
27
|
+
"""
|
|
28
|
+
Add points from a GeoJSON file in the LIDAR's tile.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
input_points (str): Path to the input GeoJSON file with 3D points.
|
|
32
|
+
input_las (str): Path to the LIDAR `.las/.laz` file.
|
|
33
|
+
crs (str): CRS of the data, e.g., 'EPSG:2154'.
|
|
34
|
+
|
|
35
|
+
Return:
|
|
36
|
+
gpd.GeoDataFrame: Points 2d with "Z" value
|
|
37
|
+
"""
|
|
38
|
+
# Compute the bounding box of the LIDAR tile
|
|
39
|
+
tile_bbox = get_tile_bbox(input_las)
|
|
40
|
+
|
|
41
|
+
# Read the input GeoJSON with 3D points
|
|
42
|
+
points_gdf = gpd.read_file(input_points)
|
|
43
|
+
|
|
44
|
+
# Ensure the CRS matches
|
|
45
|
+
if crs:
|
|
46
|
+
points_gdf = points_gdf.to_crs(crs)
|
|
47
|
+
|
|
48
|
+
# Create a polygon from the bounding box
|
|
49
|
+
bbox_polygon = box(*tile_bbox)
|
|
50
|
+
|
|
51
|
+
# Clip the points to the bounding box
|
|
52
|
+
clipped_points = points_gdf[points_gdf.intersects(bbox_polygon)].copy()
|
|
53
|
+
|
|
54
|
+
return clipped_points
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def add_points_to_las(
|
|
58
|
+
input_points_with_z: gpd.GeoDataFrame, input_las: str, output_las: str, virtual_points_classes=66
|
|
59
|
+
):
|
|
60
|
+
"""Add points (3D points in LAZ format) by LIDAR tiles (tiling file)
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
input_points_with_z(gpd.GeoDataFrame): geometry columns (2D points) as encoded to WKT.
|
|
64
|
+
input_las (str): Path to the LIDAR tiles (LAZ).
|
|
65
|
+
output_las (str): Path to save the updated LIDAR file (LAS/LAZ format).
|
|
66
|
+
virtual_points_classes (int): The classification value to assign to those virtual points (default: 66).
|
|
67
|
+
"""
|
|
68
|
+
# Check if input points are empty
|
|
69
|
+
if input_points_with_z.empty:
|
|
70
|
+
raise ValueError("No points to add. The input GeoDataFrame is empty.")
|
|
71
|
+
|
|
72
|
+
# Extract XYZ coordinates and additional attribute (classification)
|
|
73
|
+
x_coords = input_points_with_z.geometry.x
|
|
74
|
+
y_coords = input_points_with_z.geometry.y
|
|
75
|
+
z_coords = input_points_with_z.RecupZ
|
|
76
|
+
classes = virtual_points_classes * np.ones(len(input_points_with_z.index))
|
|
77
|
+
|
|
78
|
+
# Read the existing LIDAR file
|
|
79
|
+
with laspy.open(input_las, mode="r") as las:
|
|
80
|
+
las_data = las.read()
|
|
81
|
+
header = las.header
|
|
82
|
+
|
|
83
|
+
# Create a new header if the original header is missing or invalid
|
|
84
|
+
if header is None:
|
|
85
|
+
header = laspy.LasHeader(point_format=6, version="1.4") # Example format and version
|
|
86
|
+
|
|
87
|
+
# Append the clipped points to the existing LIDAR data
|
|
88
|
+
new_x = np.concatenate([las_data.x, x_coords])
|
|
89
|
+
new_y = np.concatenate([las_data.y, y_coords])
|
|
90
|
+
new_z = np.concatenate([las_data.z, z_coords])
|
|
91
|
+
new_classes = np.concatenate([las_data.classification, classes])
|
|
92
|
+
|
|
93
|
+
# Create a new LAS file with updated data
|
|
94
|
+
updated_las = laspy.LasData(header)
|
|
95
|
+
updated_las.x = new_x
|
|
96
|
+
updated_las.y = new_y
|
|
97
|
+
updated_las.z = new_z
|
|
98
|
+
updated_las.classification = new_classes
|
|
99
|
+
|
|
100
|
+
# Write the updated LAS file
|
|
101
|
+
with laspy.open(output_las, mode="w", header=header, do_compress=True) as writer:
|
|
102
|
+
writer.write_points(updated_las.points)
|
|
@@ -6,6 +6,8 @@ from typing import Dict, Tuple
|
|
|
6
6
|
import osgeo.osr as osr
|
|
7
7
|
import pdal
|
|
8
8
|
|
|
9
|
+
from pdaltools.pcd_info import infer_tile_origin
|
|
10
|
+
|
|
9
11
|
osr.UseExceptions()
|
|
10
12
|
|
|
11
13
|
|
|
@@ -17,13 +19,37 @@ def las_info_metadata(filename: str):
|
|
|
17
19
|
return metadata
|
|
18
20
|
|
|
19
21
|
|
|
20
|
-
def get_bounds_from_header_info(metadata):
|
|
22
|
+
def get_bounds_from_header_info(metadata: Dict) -> Tuple[float, float, float, float]:
|
|
23
|
+
"""Get bounds from metadata that has been extracted previously from the header of a las file
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
metadata (str): Dictonary containing metadata from a las file (as extracted with pipeline.quickinfo)
|
|
27
|
+
|
|
28
|
+
Returns:
|
|
29
|
+
Tuple[float, float, float, float]: minx, maxx, miny, maxy
|
|
30
|
+
"""
|
|
21
31
|
bounds = metadata["bounds"]
|
|
22
32
|
minx, maxx, miny, maxy = bounds["minx"], bounds["maxx"], bounds["miny"], bounds["maxy"]
|
|
23
33
|
|
|
24
34
|
return minx, maxx, miny, maxy
|
|
25
35
|
|
|
26
36
|
|
|
37
|
+
def get_tile_origin_using_header_info(filename: str, tile_width: int = 1000) -> Tuple[int, int]:
|
|
38
|
+
""" "Get las file theoretical origin (xmin, ymax) for a data that originates from a square tesselation/tiling
|
|
39
|
+
using the tesselation tile width only, directly from its path
|
|
40
|
+
Args:
|
|
41
|
+
filename (str): path to the las file
|
|
42
|
+
tile_width (int, optional): Tesselation tile width (in meters). Defaults to 1000.
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
Tuple[int, int]: (origin_x, origin_y) tile origin coordinates = theoretical (xmin, ymax)
|
|
46
|
+
"""
|
|
47
|
+
metadata = las_info_metadata(filename)
|
|
48
|
+
minx, maxx, miny, maxy = get_bounds_from_header_info(metadata)
|
|
49
|
+
|
|
50
|
+
return infer_tile_origin(minx, maxx, miny, maxy, tile_width)
|
|
51
|
+
|
|
52
|
+
|
|
27
53
|
def get_epsg_from_header_info(metadata):
|
|
28
54
|
if "srs" not in metadata.keys():
|
|
29
55
|
raise RuntimeError("EPSG could not be inferred from metadata: No 'srs' key in metadata.")
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
"""Tools to get information from a point cloud (points as a numpy array)"""
|
|
2
|
+
|
|
3
|
+
from typing import Tuple
|
|
4
|
+
|
|
5
|
+
import numpy as np
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def infer_tile_origin(minx: float, maxx: float, miny: float, maxy: float, tile_width: int) -> Tuple[int, int]:
|
|
9
|
+
"""Get point cloud theoretical origin (xmin, ymax) for a data that originates from a square tesselation/tiling
|
|
10
|
+
using the tesselation tile width only, based on the min/max values
|
|
11
|
+
|
|
12
|
+
Edge values are supposed to be included in the tile
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
minx (float): point cloud min x value
|
|
16
|
+
maxx (float): point cloud max x value
|
|
17
|
+
miny (float): point cloud min y value
|
|
18
|
+
maxy (float): point cloud max y value
|
|
19
|
+
tile_width (int): tile width in meters
|
|
20
|
+
|
|
21
|
+
Raises:
|
|
22
|
+
ValueError: In case the min and max values do not belong to the same tile
|
|
23
|
+
|
|
24
|
+
Returns:
|
|
25
|
+
Tuple[int, int]: (origin_x, origin_y) tile origin coordinates = theoretical (xmin, ymax)
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
minx_tile_index = np.floor(minx / tile_width)
|
|
29
|
+
maxx_tile_index = np.floor(maxx / tile_width) if maxx % tile_width != 0 else np.floor(maxx / tile_width) - 1
|
|
30
|
+
miny_tile_index = np.ceil(miny / tile_width) if miny % tile_width != 0 else np.floor(miny / tile_width) + 1
|
|
31
|
+
maxy_tile_index = np.ceil(maxy / tile_width)
|
|
32
|
+
|
|
33
|
+
if maxx_tile_index == minx_tile_index and maxy_tile_index == miny_tile_index:
|
|
34
|
+
origin_x = minx_tile_index * tile_width
|
|
35
|
+
origin_y = maxy_tile_index * tile_width
|
|
36
|
+
return origin_x, origin_y
|
|
37
|
+
else:
|
|
38
|
+
raise ValueError(
|
|
39
|
+
f"Min values (x={minx} and y={miny}) do not belong to the same theoretical tile as"
|
|
40
|
+
f"max values (x={maxx} and y={maxy})."
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def get_pointcloud_origin_from_tile_width(
|
|
45
|
+
points: np.ndarray, tile_width: int = 1000, buffer_size: float = 0
|
|
46
|
+
) -> Tuple[int, int]:
|
|
47
|
+
"""Get point cloud theoretical origin (xmin, ymax) for a data that originates from a square tesselation/tiling
|
|
48
|
+
using the tesselation tile width only, based on the point cloud as a np.ndarray
|
|
49
|
+
|
|
50
|
+
Edge values are supposed to be included in the tile
|
|
51
|
+
|
|
52
|
+
In case buffer_size is provided, the origin will be calculated on an "original" tile, supposing that
|
|
53
|
+
there has been a buffer added to the input tile.
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
points (np.ndarray): numpy array with the tile points
|
|
57
|
+
tile_width (int, optional): Edge size of the square used for tiling. Defaults to 1000.
|
|
58
|
+
buffer_size (float, optional): Optional buffer around the tile. Defaults to 0.
|
|
59
|
+
|
|
60
|
+
Raises:
|
|
61
|
+
ValueError: Raise an error when the initial tile is smaller than the buffer (in this case, we cannot find the
|
|
62
|
+
origin (it can be either in the buffer or in the tile))
|
|
63
|
+
|
|
64
|
+
Returns:
|
|
65
|
+
Tuple[int, int]: (origin_x, origin_y) origin coordinates
|
|
66
|
+
"""
|
|
67
|
+
# Extract coordinates xmin, xmax, ymin and ymax of the original tile without buffer
|
|
68
|
+
minx, miny = np.min(points[:, :2], axis=0) + buffer_size
|
|
69
|
+
maxx, maxy = np.max(points[:, :2], axis=0) - buffer_size
|
|
70
|
+
|
|
71
|
+
if maxx < minx or maxy < miny:
|
|
72
|
+
raise ValueError(
|
|
73
|
+
"Cannot find pointcloud origin as the pointcloud width or height is smaller than buffer width"
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
return infer_tile_origin(minx, maxx, miny, maxy, tile_width)
|
|
@@ -19,6 +19,7 @@ import pdal
|
|
|
19
19
|
|
|
20
20
|
from pdaltools.unlock_file import copy_and_hack_decorator
|
|
21
21
|
|
|
22
|
+
# Standard parameters to pass to the pdal writer
|
|
22
23
|
STANDARD_PARAMETERS = dict(
|
|
23
24
|
major_version="1",
|
|
24
25
|
minor_version="4", # Laz format version (pdal always write in 1.x format)
|
|
@@ -33,7 +34,6 @@ STANDARD_PARAMETERS = dict(
|
|
|
33
34
|
offset_z=0,
|
|
34
35
|
dataformat_id=6, # No color by default
|
|
35
36
|
a_srs="EPSG:2154",
|
|
36
|
-
class_points_removed=[], # remove points from class
|
|
37
37
|
)
|
|
38
38
|
|
|
39
39
|
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
import os
|
|
3
|
+
import random as rand
|
|
4
|
+
import tempfile
|
|
5
|
+
import math
|
|
6
|
+
|
|
7
|
+
import pdal
|
|
8
|
+
|
|
9
|
+
import geopandas as gpd
|
|
10
|
+
from shapely.geometry import Point
|
|
11
|
+
|
|
12
|
+
from pdaltools import add_points_in_las
|
|
13
|
+
|
|
14
|
+
numeric_precision = 4
|
|
15
|
+
|
|
16
|
+
TEST_PATH = os.path.dirname(os.path.abspath(__file__))
|
|
17
|
+
INPUT_DIR = os.path.join(TEST_PATH, "data")
|
|
18
|
+
INPUT_LAS = os.path.join(INPUT_DIR, "test_data_77055_627760_LA93_IGN69.laz")
|
|
19
|
+
|
|
20
|
+
Xmin = 770575
|
|
21
|
+
Ymin = 6277575
|
|
22
|
+
Zmin = 20
|
|
23
|
+
Size = 20
|
|
24
|
+
|
|
25
|
+
def distance3D(pt_geo, pt_las):
|
|
26
|
+
return round(
|
|
27
|
+
math.sqrt((pt_geo.x - pt_las['X']) ** 2 + (pt_geo.y - pt_las['Y']) ** 2 + (pt_geo.z - pt_las['Z']) ** 2),
|
|
28
|
+
numeric_precision,
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
def add_point_in_las(pt_geo, inside_las):
|
|
32
|
+
geom = [pt_geo]
|
|
33
|
+
series = gpd.GeoSeries(geom, crs="2154")
|
|
34
|
+
|
|
35
|
+
with tempfile.NamedTemporaryFile(suffix="_geom_tmp.las") as out_las_file:
|
|
36
|
+
with tempfile.NamedTemporaryFile(suffix="_geom_tmp.geojson") as geom_file:
|
|
37
|
+
series.to_file(geom_file.name)
|
|
38
|
+
|
|
39
|
+
added_dimensions = {"Classification":64, "Intensity":1.}
|
|
40
|
+
add_points_in_las.add_points_in_las(INPUT_LAS, geom_file.name, out_las_file.name, inside_las, added_dimensions)
|
|
41
|
+
|
|
42
|
+
pipeline = pdal.Pipeline() | pdal.Reader.las(out_las_file.name)
|
|
43
|
+
pipeline.execute()
|
|
44
|
+
points_las = pipeline.arrays[0]
|
|
45
|
+
points_las = [e for e in points_las if all(e[val] == added_dimensions[val] for val in added_dimensions)]
|
|
46
|
+
return points_las
|
|
47
|
+
|
|
48
|
+
def test_add_point_inside_las():
|
|
49
|
+
X = Xmin + rand.uniform(0, 1) * Size
|
|
50
|
+
Y = Ymin + rand.uniform(0, 1) * Size
|
|
51
|
+
Z = Zmin + rand.uniform(0, 1) * 10
|
|
52
|
+
pt_geo = Point(X, Y, Z)
|
|
53
|
+
points_las = add_point_in_las(pt_geo=pt_geo, inside_las=True)
|
|
54
|
+
assert len(points_las) == 1
|
|
55
|
+
assert distance3D(pt_geo, points_las[0]) < 1 / numeric_precision
|
|
56
|
+
|
|
57
|
+
def test_add_point_outside_las_no_control():
|
|
58
|
+
X = Xmin + rand.uniform(2, 3) * Size
|
|
59
|
+
Y = Ymin + rand.uniform(0, 1) * Size
|
|
60
|
+
Z = Zmin + rand.uniform(0, 1) * 10
|
|
61
|
+
pt_geo = Point(X, Y, Z)
|
|
62
|
+
points_las = add_point_in_las(pt_geo=pt_geo, inside_las=False)
|
|
63
|
+
assert len(points_las) == 1
|
|
64
|
+
assert distance3D(pt_geo, points_las[0]) < 1 / numeric_precision
|
|
65
|
+
|
|
66
|
+
def test_add_point_outside_las_with_control():
|
|
67
|
+
X = Xmin + rand.uniform(2, 3) * Size
|
|
68
|
+
Y = Ymin + rand.uniform(2, 3) * Size
|
|
69
|
+
Z = Zmin + rand.uniform(0, 1) * 10
|
|
70
|
+
pt_geo = Point(X, Y, Z)
|
|
71
|
+
points_las = add_point_in_las(pt_geo=pt_geo, inside_las=True)
|
|
72
|
+
assert len(points_las) == 0
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
import pdal
|
|
5
|
+
|
|
6
|
+
from pdaltools import add_points_in_pointcloud
|
|
7
|
+
|
|
8
|
+
TEST_PATH = os.path.dirname(os.path.abspath(__file__))
|
|
9
|
+
TMP_PATH = os.path.join(TEST_PATH, "data/output")
|
|
10
|
+
DATA_LIDAR_PATH = os.path.join(TEST_PATH, "data/decimated_laz")
|
|
11
|
+
DATA_POINTS_PATH = os.path.join(TEST_PATH, "data/points_3d")
|
|
12
|
+
|
|
13
|
+
INPUT_FILE = os.path.join(DATA_LIDAR_PATH, "test_semis_2023_0292_6833_LA93_IGN69.laz")
|
|
14
|
+
INPUT_POINTS = os.path.join(DATA_POINTS_PATH, "Points_virtuels_0292_6833.geojson")
|
|
15
|
+
OUTPUT_FILE = os.path.join(TMP_PATH, "test_semis_2023_0292_6833_LA93_IGN69.laz")
|
|
16
|
+
|
|
17
|
+
INPUT_FILE_SMALL = os.path.join(DATA_LIDAR_PATH, "test_semis_2021_0382_6565_LA93_IGN69.laz")
|
|
18
|
+
INPUT_POINTS_SMALL = os.path.join(DATA_POINTS_PATH, "Points_virtuels_0382_6565.geojson")
|
|
19
|
+
OUTPUT_FILE_SMALL = os.path.join(TMP_PATH, "test_semis_2021_0382_6565_LA93_IGN69.laz")
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def setup_module(module):
|
|
23
|
+
os.makedirs("test/data/output", exist_ok=True)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def test_get_tile_bbox():
|
|
27
|
+
bbox = add_points_in_pointcloud.get_tile_bbox(INPUT_FILE, 1000)
|
|
28
|
+
assert bbox == (292000.0, 6832000.0, 293000.0, 6833000.0) # check the bbox from LIDAR tile
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def test_clip_3d_points_to_tile():
|
|
32
|
+
points_clipped = add_points_in_pointcloud.clip_3d_points_to_tile(INPUT_POINTS, INPUT_FILE, "EPSG:2154")
|
|
33
|
+
assert len(points_clipped) == 678 # chech the entity's number of points
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def test_add_line_to_lidar():
|
|
37
|
+
points_clipped = add_points_in_pointcloud.clip_3d_points_to_tile(INPUT_POINTS, INPUT_FILE, "EPSG:2154")
|
|
38
|
+
|
|
39
|
+
add_points_in_pointcloud.add_points_to_las(points_clipped, INPUT_FILE, OUTPUT_FILE, 68)
|
|
40
|
+
assert Path(OUTPUT_FILE).exists() # check output exists
|
|
41
|
+
|
|
42
|
+
# Filter pointcloud by classes
|
|
43
|
+
pipeline = (
|
|
44
|
+
pdal.Reader.las(filename=OUTPUT_FILE, nosrs=True)
|
|
45
|
+
| pdal.Filter.range(
|
|
46
|
+
limits="Classification[68:68]",
|
|
47
|
+
)
|
|
48
|
+
| pdal.Filter.stats()
|
|
49
|
+
)
|
|
50
|
+
pipeline.execute()
|
|
51
|
+
metadata = pipeline.metadata
|
|
52
|
+
# Count the pointcloud's number from classe "68"
|
|
53
|
+
point_count = metadata["metadata"]["filters.stats"]["statistic"][0]["count"]
|
|
54
|
+
assert point_count == 678
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def test_get_tile_bbox_small():
|
|
58
|
+
# Tile is not complete (NOT 1km * 1km)
|
|
59
|
+
bbox = add_points_in_pointcloud.get_tile_bbox(INPUT_FILE_SMALL, 1000)
|
|
60
|
+
assert bbox == (382000.0, 6564000.0, 383000.0, 6565000.0) # return BBOX 1km * 1km
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def test_add_line_to_lidar_small():
|
|
64
|
+
# Tile is not complete (NOT 1km * 1km)
|
|
65
|
+
points_clipped = add_points_in_pointcloud.clip_3d_points_to_tile(INPUT_POINTS_SMALL, INPUT_FILE_SMALL, "EPSG:2154")
|
|
66
|
+
|
|
67
|
+
add_points_in_pointcloud.add_points_to_las(points_clipped, INPUT_FILE_SMALL, OUTPUT_FILE_SMALL, 68)
|
|
68
|
+
assert Path(OUTPUT_FILE).exists() # check output exists
|
|
69
|
+
|
|
70
|
+
# Filter pointcloud by classes
|
|
71
|
+
pipeline = (
|
|
72
|
+
pdal.Reader.las(filename=OUTPUT_FILE_SMALL, nosrs=True)
|
|
73
|
+
| pdal.Filter.range(
|
|
74
|
+
limits="Classification[68:68]",
|
|
75
|
+
)
|
|
76
|
+
| pdal.Filter.stats()
|
|
77
|
+
)
|
|
78
|
+
pipeline.execute()
|
|
79
|
+
metadata = pipeline.metadata
|
|
80
|
+
# Count the pointcloud's number from classe "68"
|
|
81
|
+
point_count = metadata["metadata"]["filters.stats"]["statistic"][0]["count"]
|
|
82
|
+
assert point_count == 186
|
|
@@ -40,6 +40,11 @@ def test_get_bounds_from_quickinfo_metadata():
|
|
|
40
40
|
assert bounds == (INPUT_MINS[0], INPUT_MAXS[0], INPUT_MINS[1], INPUT_MAXS[1])
|
|
41
41
|
|
|
42
42
|
|
|
43
|
+
def test_get_tile_origin_using_header_info():
|
|
44
|
+
origin_x, origin_y = las_info.get_tile_origin_using_header_info(INPUT_FILE, tile_width=TILE_WIDTH)
|
|
45
|
+
assert (origin_x, origin_y) == (COORD_X * TILE_COORD_SCALE, COORD_Y * TILE_COORD_SCALE)
|
|
46
|
+
|
|
47
|
+
|
|
43
48
|
def test_get_epsg_from_quickinfo_metadata_ok():
|
|
44
49
|
metadata = las_info.las_info_metadata(INPUT_FILE)
|
|
45
50
|
assert las_info.get_epsg_from_header_info(metadata) == "2154"
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
import laspy
|
|
4
|
+
import numpy as np
|
|
5
|
+
import pytest
|
|
6
|
+
|
|
7
|
+
from pdaltools import pcd_info
|
|
8
|
+
|
|
9
|
+
TEST_PATH = os.path.dirname(os.path.abspath(__file__))
|
|
10
|
+
TMP_PATH = os.path.join(TEST_PATH, "tmp")
|
|
11
|
+
DATA_PATH = os.path.join(TEST_PATH, "data")
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@pytest.mark.parametrize(
|
|
15
|
+
"minx, maxx, miny, maxy, expected_origin",
|
|
16
|
+
[
|
|
17
|
+
(501, 999, 501, 999, (0, 1000)), # points in the second half
|
|
18
|
+
(1, 400, 1, 400, (0, 1000)), # points in the first half
|
|
19
|
+
(500, 1000, 500, 500, (0, 1000)), # xmax on edge and xmin in the tile
|
|
20
|
+
(0, 20, 500, 500, (0, 1000)), # xmin on edge and xmax in the tile
|
|
21
|
+
(950, 1000, 500, 500, (0, 1000)), # xmax on edge and xmin in the tile
|
|
22
|
+
(500, 500, 980, 1000, (0, 1000)), # ymax on edge and ymin in the tile
|
|
23
|
+
(500, 500, 0, 20, (0, 1000)), # ymin on edge and ymax in the tile
|
|
24
|
+
(0, 1000, 0, 1000, (0, 1000)), # points at each corner
|
|
25
|
+
],
|
|
26
|
+
)
|
|
27
|
+
def test_infer_tile_origin_edge_cases(minx, maxx, miny, maxy, expected_origin):
|
|
28
|
+
origin_x, origin_y = pcd_info.infer_tile_origin(minx, maxx, miny, maxy, tile_width=1000)
|
|
29
|
+
assert (origin_x, origin_y) == expected_origin
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@pytest.mark.parametrize(
|
|
33
|
+
"minx, maxx, miny, maxy",
|
|
34
|
+
[
|
|
35
|
+
(0, 20, -1, 20), # ymin slightly outside the tile
|
|
36
|
+
(-1, 20, 0, 20), # xmin slightly outside the tile
|
|
37
|
+
(280, 1000, 980, 1001), # ymax slightly outside the tile
|
|
38
|
+
(980, 1001, 980, 1000), # xmax slightly outside the tile
|
|
39
|
+
(-1, 1000, 0, 1000), # xmax on edge but xmin outside the tile
|
|
40
|
+
(0, 1000, 0, 1001), # ymin on edge but ymax outside the tile
|
|
41
|
+
(0, 1001, 0, 1000), # xmin on edge but xmax outside the tile
|
|
42
|
+
(0, 1000, -1, 1000), # ymax on edge but ymin outside the tile
|
|
43
|
+
],
|
|
44
|
+
)
|
|
45
|
+
def test_infer_tile_origin_edge_cases_fail(minx, maxx, miny, maxy):
|
|
46
|
+
with pytest.raises(ValueError):
|
|
47
|
+
pcd_info.infer_tile_origin(minx, maxx, miny, maxy, tile_width=1000)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@pytest.mark.parametrize(
|
|
51
|
+
"input_points",
|
|
52
|
+
[
|
|
53
|
+
(np.array([[0, -1, 0], [20, 20, 0]])), # ymin slightly outside the tile
|
|
54
|
+
(np.array([[-1, 0, 0], [20, 20, 0]])), # xmin slightly outside the tile
|
|
55
|
+
(np.array([[980, 980, 0], [1000, 1001, 0]])), # ymax slightly outside the tile
|
|
56
|
+
(np.array([[980, 980, 0], [1001, 1000, 0]])), # xmax slightly outside the tile
|
|
57
|
+
(np.array([[-1, 0, 0], [1000, 1000, 0]])), # xmax on edge but xmin outside the tile
|
|
58
|
+
(np.array([[0, 0, 0], [1000, 1001, 0]])), # ymin on edge but ymax outside the tile
|
|
59
|
+
(np.array([[0, 0, 0], [1001, 1000, 0]])), # xmin on edge but xmax outside the tile
|
|
60
|
+
(np.array([[0, -1, 0], [1000, 1000, 0]])), # ymax on edge but ymin outside the tile
|
|
61
|
+
],
|
|
62
|
+
)
|
|
63
|
+
def test_get_pointcloud_origin_edge_cases_fail(input_points):
|
|
64
|
+
with pytest.raises(ValueError):
|
|
65
|
+
pcd_info.get_pointcloud_origin_from_tile_width(points=input_points, tile_width=1000)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def test_get_pointcloud_origin_on_file():
|
|
69
|
+
input_las = os.path.join(DATA_PATH, "test_data_77055_627760_LA93_IGN69.laz")
|
|
70
|
+
expected_origin = (770550, 6277600)
|
|
71
|
+
LAS = laspy.read(input_las)
|
|
72
|
+
INPUT_POINTS = np.vstack((LAS.x, LAS.y, LAS.z)).transpose()
|
|
73
|
+
|
|
74
|
+
origin_x, origin_y = pcd_info.get_pointcloud_origin_from_tile_width(points=INPUT_POINTS, tile_width=50)
|
|
75
|
+
assert (origin_x, origin_y) == expected_origin
|
|
76
|
+
origin_x_2, origin_y_2 = pcd_info.get_pointcloud_origin_from_tile_width(
|
|
77
|
+
points=INPUT_POINTS, tile_width=10, buffer_size=20
|
|
78
|
+
)
|
|
79
|
+
assert (origin_x_2, origin_y_2) == (expected_origin[0] + 20, expected_origin[1] - 20)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def test_get_pointcloud_origin_fail_on_buffersize():
|
|
83
|
+
with pytest.raises(ValueError):
|
|
84
|
+
# Case when buffer size is bigger than the tile extremities (case not handled)
|
|
85
|
+
points = np.array([[0, 0, 0], [20, 20, 0]])
|
|
86
|
+
buffer_size = 30
|
|
87
|
+
pcd_info.get_pointcloud_origin_from_tile_width(points=points, tile_width=1000, buffer_size=buffer_size)
|
|
@@ -1,46 +0,0 @@
|
|
|
1
|
-
"""Tools to get information from a point cloud (points as a numpy array)"""
|
|
2
|
-
|
|
3
|
-
from typing import Tuple
|
|
4
|
-
|
|
5
|
-
import numpy as np
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
def get_pointcloud_origin_from_tile_width(
|
|
9
|
-
points: np.ndarray, tile_width: int = 1000, buffer_size: float = 0
|
|
10
|
-
) -> Tuple[int, int]:
|
|
11
|
-
"""Get point cloud theoretical origin (xmin, ymax) for a data that originates from a square tesselation/tiling
|
|
12
|
-
using the tesselation tile width only.
|
|
13
|
-
|
|
14
|
-
Edge values are supposed to be included in the tile
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
Args:
|
|
18
|
-
points (np.ndarray): numpy array with the tile points
|
|
19
|
-
tile_width (int, optional): Edge size of the square used for tiling. Defaults to 1000.
|
|
20
|
-
buffer_size (float, optional): Optional buffer around the tile. Defaults to 0.
|
|
21
|
-
|
|
22
|
-
Raises:
|
|
23
|
-
ValueError: Raise an error when the bounding box of the tile is not included in a tile
|
|
24
|
-
|
|
25
|
-
Returns:
|
|
26
|
-
Tuple[int, int]: (origin_x, origin_y) origin coordinates
|
|
27
|
-
"""
|
|
28
|
-
# Extract coordinates xmin, xmax, ymin and ymax of the original tile without buffer
|
|
29
|
-
x_min, y_min = np.min(points[:, :2], axis=0) + buffer_size
|
|
30
|
-
x_max, y_max = np.max(points[:, :2], axis=0) - buffer_size
|
|
31
|
-
|
|
32
|
-
# Calculate the tiles to which x, y bounds belong
|
|
33
|
-
tile_x_min = np.floor(x_min / tile_width)
|
|
34
|
-
tile_x_max = np.floor(x_max / tile_width) if x_max % tile_width != 0 else np.floor(x_max / tile_width) - 1
|
|
35
|
-
tile_y_min = np.ceil(y_min / tile_width) if y_min % tile_width != 0 else np.floor(y_min / tile_width) + 1
|
|
36
|
-
tile_y_max = np.ceil(y_max / tile_width)
|
|
37
|
-
|
|
38
|
-
if not (tile_x_max - tile_x_min) and not (tile_y_max - tile_y_min):
|
|
39
|
-
origin_x = tile_x_min * tile_width
|
|
40
|
-
origin_y = tile_y_max * tile_width
|
|
41
|
-
return origin_x, origin_y
|
|
42
|
-
else:
|
|
43
|
-
raise ValueError(
|
|
44
|
-
f"Min values (x={x_min} and y={y_min}) do not belong to the same theoretical tile as"
|
|
45
|
-
f"max values (x={x_max} and y={y_max})."
|
|
46
|
-
)
|
|
@@ -1,61 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
|
|
3
|
-
import laspy
|
|
4
|
-
import numpy as np
|
|
5
|
-
import pytest
|
|
6
|
-
|
|
7
|
-
from pdaltools import pcd_info
|
|
8
|
-
|
|
9
|
-
TEST_PATH = os.path.dirname(os.path.abspath(__file__))
|
|
10
|
-
TMP_PATH = os.path.join(TEST_PATH, "tmp")
|
|
11
|
-
DATA_PATH = os.path.join(TEST_PATH, "data")
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
@pytest.mark.parametrize(
|
|
15
|
-
"input_points, expected_origin",
|
|
16
|
-
[
|
|
17
|
-
(np.array([[501, 501, 0], [999, 999, 0]]), (0, 1000)), # points in the second half
|
|
18
|
-
(np.array([[1, 1, 0], [400, 400, 0]]), (0, 1000)), # points in the frist half
|
|
19
|
-
(np.array([[500, 500, 0], [1000, 500, 0]]), (0, 1000)), # xmax on edge and xmin in the tile
|
|
20
|
-
(np.array([[0, 500, 0], [20, 500, 0]]), (0, 1000)), # xmin on edge and xmax in the tile
|
|
21
|
-
(np.array([[950, 500, 0], [1000, 500, 0]]), (0, 1000)), # xmax on edge and xmin in the tile
|
|
22
|
-
(np.array([[500, 980, 0], [500, 1000, 0]]), (0, 1000)), # ymax on edge and ymin in the tile
|
|
23
|
-
(np.array([[500, 0, 0], [500, 20, 0]]), (0, 1000)), # ymin on edge and ymax in the tile
|
|
24
|
-
(np.array([[0, 0, 0], [1000, 1000, 0]]), (0, 1000)), # points at each corner
|
|
25
|
-
],
|
|
26
|
-
)
|
|
27
|
-
def test_get_pointcloud_origin_edge_cases(input_points, expected_origin):
|
|
28
|
-
origin_x, origin_y = pcd_info.get_pointcloud_origin_from_tile_width(points=input_points, tile_width=1000)
|
|
29
|
-
assert (origin_x, origin_y) == expected_origin
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
@pytest.mark.parametrize(
|
|
33
|
-
"input_points",
|
|
34
|
-
[
|
|
35
|
-
(np.array([[0, -1, 0], [20, 20, 0]])), # ymin slightly outside the tile
|
|
36
|
-
(np.array([[-1, 0, 0], [20, 20, 0]])), # xmin slightly outside the tile
|
|
37
|
-
(np.array([[980, 980, 0], [1000, 1001, 0]])), # ymax slightly outside the tile
|
|
38
|
-
(np.array([[980, 980, 0], [1001, 1000, 0]])), # xmax slightly outside the tile
|
|
39
|
-
(np.array([[-1, 0, 0], [1000, 1000, 0]])), # xmax on edge but xmin outside the tile
|
|
40
|
-
(np.array([[0, 0, 0], [1000, 1001, 0]])), # ymin on edge but ymax outside the tile
|
|
41
|
-
(np.array([[0, 0, 0], [1001, 1000, 0]])), # xmin on edge but xmax outside the tile
|
|
42
|
-
(np.array([[0, -1, 0], [1000, 1000, 0]])), # ymax on edge but ymin outside the tile
|
|
43
|
-
],
|
|
44
|
-
)
|
|
45
|
-
def test_get_pointcloud_origin_edge_cases_fail(input_points):
|
|
46
|
-
with pytest.raises(ValueError):
|
|
47
|
-
pcd_info.get_pointcloud_origin_from_tile_width(points=input_points, tile_width=1000)
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
def test_get_pointcloud_origin_on_file():
|
|
51
|
-
input_las = os.path.join(DATA_PATH, "test_data_77055_627760_LA93_IGN69.laz")
|
|
52
|
-
expected_origin = (770550, 6277600)
|
|
53
|
-
LAS = laspy.read(input_las)
|
|
54
|
-
INPUT_POINTS = np.vstack((LAS.x, LAS.y, LAS.z)).transpose()
|
|
55
|
-
|
|
56
|
-
origin_x, origin_y = pcd_info.get_pointcloud_origin_from_tile_width(points=INPUT_POINTS, tile_width=50)
|
|
57
|
-
assert (origin_x, origin_y) == expected_origin
|
|
58
|
-
origin_x_2, origin_y_2 = pcd_info.get_pointcloud_origin_from_tile_width(
|
|
59
|
-
points=INPUT_POINTS, tile_width=10, buffer_size=20
|
|
60
|
-
)
|
|
61
|
-
assert (origin_x_2, origin_y_2) == (expected_origin[0] + 20, expected_origin[1] - 20)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|