ign-pdal-tools 1.11.1__tar.gz → 1.12.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/PKG-INFO +1 -1
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/ign_pdal_tools.egg-info/PKG-INFO +1 -1
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/ign_pdal_tools.egg-info/SOURCES.txt +4 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/pdaltools/_version.py +1 -1
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/pdaltools/add_points_in_pointcloud.py +5 -5
- ign_pdal_tools-1.12.2/pdaltools/create_random_laz.py +146 -0
- ign_pdal_tools-1.12.2/pdaltools/las_comparison.py +117 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/pdaltools/replace_attribute_in_las.py +2 -16
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/pdaltools/standardize_format.py +2 -31
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/test/test_add_points_in_pointcloud.py +22 -4
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/test/test_color.py +12 -9
- ign_pdal_tools-1.12.2/test/test_create_random_laz.py +180 -0
- ign_pdal_tools-1.12.2/test/test_las_comparison.py +284 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/test/test_las_rename_dimension.py +39 -30
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/test/test_replace_attribute_in_las.py +2 -3
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/test/test_standardize_format.py +84 -46
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/LICENSE.md +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/README.md +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/ign_pdal_tools.egg-info/dependency_links.txt +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/ign_pdal_tools.egg-info/top_level.txt +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/pdaltools/color.py +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/pdaltools/las_add_buffer.py +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/pdaltools/las_clip.py +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/pdaltools/las_info.py +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/pdaltools/las_merge.py +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/pdaltools/las_remove_dimensions.py +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/pdaltools/las_rename_dimension.py +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/pdaltools/pcd_info.py +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/pdaltools/unlock_file.py +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/pyproject.toml +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/setup.cfg +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/test/test_las_add_buffer.py +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/test/test_las_clip.py +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/test/test_las_info.py +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/test/test_las_merge.py +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/test/test_las_remove_dimensions.py +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/test/test_pcd_info.py +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/test/test_pdal_custom.py +0 -0
- {ign_pdal_tools-1.11.1 → ign_pdal_tools-1.12.2}/test/test_unlock.py +0 -0
|
@@ -8,8 +8,10 @@ ign_pdal_tools.egg-info/top_level.txt
|
|
|
8
8
|
pdaltools/_version.py
|
|
9
9
|
pdaltools/add_points_in_pointcloud.py
|
|
10
10
|
pdaltools/color.py
|
|
11
|
+
pdaltools/create_random_laz.py
|
|
11
12
|
pdaltools/las_add_buffer.py
|
|
12
13
|
pdaltools/las_clip.py
|
|
14
|
+
pdaltools/las_comparison.py
|
|
13
15
|
pdaltools/las_info.py
|
|
14
16
|
pdaltools/las_merge.py
|
|
15
17
|
pdaltools/las_remove_dimensions.py
|
|
@@ -20,8 +22,10 @@ pdaltools/standardize_format.py
|
|
|
20
22
|
pdaltools/unlock_file.py
|
|
21
23
|
test/test_add_points_in_pointcloud.py
|
|
22
24
|
test/test_color.py
|
|
25
|
+
test/test_create_random_laz.py
|
|
23
26
|
test/test_las_add_buffer.py
|
|
24
27
|
test/test_las_clip.py
|
|
28
|
+
test/test_las_comparison.py
|
|
25
29
|
test/test_las_info.py
|
|
26
30
|
test/test_las_merge.py
|
|
27
31
|
test/test_las_remove_dimensions.py
|
|
@@ -1,18 +1,15 @@
|
|
|
1
1
|
import argparse
|
|
2
|
-
from shutil import copy2
|
|
3
2
|
import tempfile
|
|
3
|
+
from shutil import copy2
|
|
4
4
|
|
|
5
5
|
import geopandas as gpd
|
|
6
6
|
import laspy
|
|
7
7
|
import numpy as np
|
|
8
|
-
|
|
9
|
-
from pyproj.exceptions import CRSError
|
|
8
|
+
import pdal
|
|
10
9
|
from shapely.geometry import MultiPoint, Point, box
|
|
11
10
|
|
|
12
11
|
from pdaltools.las_info import get_epsg_from_las, get_tile_bbox
|
|
13
12
|
|
|
14
|
-
import pdal
|
|
15
|
-
|
|
16
13
|
|
|
17
14
|
def parse_args(argv=None):
|
|
18
15
|
parser = argparse.ArgumentParser("Add points from GeoJSON in LIDAR tile")
|
|
@@ -223,6 +220,9 @@ def generate_3d_points_from_lines(
|
|
|
223
220
|
and Z coordinates are not available in the geometry.
|
|
224
221
|
"""
|
|
225
222
|
# Check if altitude_column is provided and exists in the GeoDataFrame
|
|
223
|
+
if lines_gdf.empty:
|
|
224
|
+
return lines_gdf
|
|
225
|
+
|
|
226
226
|
if altitude_column and (altitude_column not in lines_gdf.columns):
|
|
227
227
|
raise ValueError("altitude_column must exist in the GeoDataFrame if provided.")
|
|
228
228
|
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
import laspy
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
import argparse
|
|
5
|
+
from pyproj import CRS
|
|
6
|
+
from typing import List, Tuple
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def create_random_laz(
|
|
10
|
+
output_file: str,
|
|
11
|
+
point_format: int = 3,
|
|
12
|
+
num_points: int = 100,
|
|
13
|
+
crs: int = 2154,
|
|
14
|
+
center: Tuple[float, float] = (650000, 6810000),
|
|
15
|
+
extra_dims: List[Tuple[str, str]] = [],
|
|
16
|
+
):
|
|
17
|
+
"""
|
|
18
|
+
Create a test LAZ file with EPSG code and additional dimensions.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
output_file: Path to save the LAZ file
|
|
22
|
+
point_format: Point format of the LAZ file (default: 3)
|
|
23
|
+
num_points: Number of points to generate
|
|
24
|
+
crs: EPSG code of the CRS (default: 2154)
|
|
25
|
+
center: Tuple of floats (x, y) of the center of the area to generate points in
|
|
26
|
+
(default: (650000, 6810000) ; around Paris)
|
|
27
|
+
extra_dims: List of tuples (dimension_name, dimension_type) where type can be:
|
|
28
|
+
'float32', 'float64', 'int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64'
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
# Create a new point cloud
|
|
32
|
+
header = laspy.LasHeader(point_format=point_format, version="1.4")
|
|
33
|
+
|
|
34
|
+
# Map string types to numpy types
|
|
35
|
+
type_mapping = {
|
|
36
|
+
"float32": np.float32,
|
|
37
|
+
"float64": np.float64,
|
|
38
|
+
"int8": np.int8,
|
|
39
|
+
"int16": np.int16,
|
|
40
|
+
"int32": np.int32,
|
|
41
|
+
"int64": np.int64,
|
|
42
|
+
"uint8": np.uint8,
|
|
43
|
+
"uint16": np.uint16,
|
|
44
|
+
"uint32": np.uint32,
|
|
45
|
+
"uint64": np.uint64,
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
for dim_name, dim_type in extra_dims:
|
|
49
|
+
if dim_type not in type_mapping:
|
|
50
|
+
raise ValueError(f"Unsupported dimension type: {dim_type}. Supported types: {list(type_mapping.keys())}")
|
|
51
|
+
|
|
52
|
+
numpy_type = type_mapping[dim_type]
|
|
53
|
+
header.add_extra_dim(laspy.ExtraBytesParams(name=dim_name, type=numpy_type))
|
|
54
|
+
|
|
55
|
+
# Create point cloud
|
|
56
|
+
las = laspy.LasData(header)
|
|
57
|
+
las.header.add_crs(CRS.from_string(f"epsg:{crs}"))
|
|
58
|
+
|
|
59
|
+
# Generate random points in a small area
|
|
60
|
+
las.x = np.random.uniform(center[0] - 1000, center[0] + 1000, num_points)
|
|
61
|
+
las.y = np.random.uniform(center[1] - 1000, center[1] + 1000, num_points)
|
|
62
|
+
las.z = np.random.uniform(0, 200, num_points)
|
|
63
|
+
|
|
64
|
+
# Generate random intensity values
|
|
65
|
+
las.intensity = np.random.randint(0, 255, num_points)
|
|
66
|
+
|
|
67
|
+
# Generate random classification values
|
|
68
|
+
# 66 is the max value for classification of IGN LidarHD
|
|
69
|
+
# cf. https://geoservices.ign.fr/sites/default/files/2022-05/DT_LiDAR_HD_1-0.pdf
|
|
70
|
+
if point_format > 3:
|
|
71
|
+
num_classifications = 66
|
|
72
|
+
else:
|
|
73
|
+
num_classifications = 10
|
|
74
|
+
las.classification = np.random.randint(0, num_classifications, num_points)
|
|
75
|
+
|
|
76
|
+
# Generate random values for each extra dimension
|
|
77
|
+
for dim_name, dim_type in extra_dims:
|
|
78
|
+
numpy_type = type_mapping[dim_type]
|
|
79
|
+
|
|
80
|
+
# Generate appropriate random values based on the type
|
|
81
|
+
if numpy_type in [np.float32, np.float64]:
|
|
82
|
+
las[dim_name] = np.random.uniform(0, 10, num_points).astype(numpy_type)
|
|
83
|
+
elif numpy_type in [np.int8, np.int16, np.int32, np.int64]:
|
|
84
|
+
las[dim_name] = np.random.randint(-100, 100, num_points).astype(numpy_type)
|
|
85
|
+
elif numpy_type in [np.uint8, np.uint16, np.uint32, np.uint64]:
|
|
86
|
+
las[dim_name] = np.random.randint(0, 100, num_points).astype(numpy_type)
|
|
87
|
+
|
|
88
|
+
# Write to file
|
|
89
|
+
las.write(output_file)
|
|
90
|
+
dimensions = list(las.point_format.dimension_names)
|
|
91
|
+
return {
|
|
92
|
+
"output_file": output_file,
|
|
93
|
+
"num_points": num_points,
|
|
94
|
+
"dimensions": dimensions,
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def test_output_file(result: dict, output_file: str):
|
|
99
|
+
|
|
100
|
+
# Validate output file path
|
|
101
|
+
output_path = Path(output_file)
|
|
102
|
+
if not output_path.exists():
|
|
103
|
+
raise ValueError(f"Error: Output file {output_file} does not exist")
|
|
104
|
+
|
|
105
|
+
# Print results
|
|
106
|
+
print(f"Successfully created test LAZ file at {result['output_file']}")
|
|
107
|
+
print(f"Number of points: {result['num_points']}")
|
|
108
|
+
print(f"Dimensions available: {result['dimensions']}")
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def parse_args():
|
|
112
|
+
# Parse arguments (assuming argparse is used)
|
|
113
|
+
parser = argparse.ArgumentParser(description="Create a random LAZ file.")
|
|
114
|
+
parser.add_argument("--output_file", type=str, help="Path to save the LAZ file")
|
|
115
|
+
parser.add_argument("--point_format", type=int, default=3, help="Point format of the LAZ file")
|
|
116
|
+
parser.add_argument("--num_points", type=int, default=100, help="Number of points to generate")
|
|
117
|
+
parser.add_argument(
|
|
118
|
+
"--extra_dims", type=str, nargs="*", default=[], help="Extra dimensions in the format name:type"
|
|
119
|
+
)
|
|
120
|
+
parser.add_argument("--crs", type=int, default=2154, help="Projection code")
|
|
121
|
+
parser.add_argument(
|
|
122
|
+
"--center", type=str, default="650000,6810000", help="Center of the area to generate points in"
|
|
123
|
+
)
|
|
124
|
+
return parser.parse_args()
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def main():
|
|
128
|
+
|
|
129
|
+
# Parse arguments
|
|
130
|
+
args = parse_args()
|
|
131
|
+
|
|
132
|
+
# Parse extra dimensions
|
|
133
|
+
extra_dims = [tuple(dim.split(":")) for dim in args.extra_dims]
|
|
134
|
+
|
|
135
|
+
# Parse center
|
|
136
|
+
center = tuple(map(float, args.center.split(",")))
|
|
137
|
+
|
|
138
|
+
# Call create_random_laz
|
|
139
|
+
result = create_random_laz(args.output_file, args.point_format, args.num_points, args.crs, center, extra_dims)
|
|
140
|
+
|
|
141
|
+
# Test output file
|
|
142
|
+
test_output_file(result, args.output_file)
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
if __name__ == "__main__":
|
|
146
|
+
main()
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
import laspy
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
import numpy as np
|
|
4
|
+
import argparse
|
|
5
|
+
from typing import Tuple
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def compare_las_dimensions(file1: Path, file2: Path, dimensions: list = None) -> Tuple[bool, int, float]:
|
|
9
|
+
"""
|
|
10
|
+
Compare specified dimensions between two LAS files.
|
|
11
|
+
If no dimensions are specified, compares all available dimensions.
|
|
12
|
+
Sorts points by x,y,z,gps_time coordinates before comparison to ensure point order consistency.
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
file1: Path to the first LAS file
|
|
16
|
+
file2: Path to the second LAS file
|
|
17
|
+
dimensions: List of dimension names to compare (optional)
|
|
18
|
+
|
|
19
|
+
Returns:
|
|
20
|
+
bool: True if all specified dimensions are identical, False otherwise
|
|
21
|
+
int: Number of points with different dimensions
|
|
22
|
+
float: Percentage of points with different dimensions
|
|
23
|
+
"""
|
|
24
|
+
try:
|
|
25
|
+
# Read both LAS files
|
|
26
|
+
las1 = laspy.read(file1)
|
|
27
|
+
las2 = laspy.read(file2)
|
|
28
|
+
|
|
29
|
+
# Check if files have the same number of points
|
|
30
|
+
if len(las1) != len(las2):
|
|
31
|
+
print(f"Files have different number of points: {len(las1)} vs {len(las2)}")
|
|
32
|
+
return False, 0, 0
|
|
33
|
+
print(f"Files have the same number of points: {len(las1)} vs {len(las2)}")
|
|
34
|
+
|
|
35
|
+
# Sort points by x,y,z,gps_time coordinates
|
|
36
|
+
# Create sorting indices
|
|
37
|
+
sort_idx1 = np.lexsort((las1.z, las1.y, las1.x, las1.gps_time))
|
|
38
|
+
sort_idx2 = np.lexsort((las2.z, las2.y, las2.x, las2.gps_time))
|
|
39
|
+
|
|
40
|
+
# If no dimensions specified, compare all dimensions
|
|
41
|
+
dimensions_las1 = sorted(las1.point_format.dimension_names)
|
|
42
|
+
dimensions_las2 = sorted(las2.point_format.dimension_names)
|
|
43
|
+
|
|
44
|
+
if dimensions is None:
|
|
45
|
+
if dimensions_las1 != dimensions_las2:
|
|
46
|
+
print("Files have different dimensions")
|
|
47
|
+
return False, 0, 0
|
|
48
|
+
dimensions = dimensions_las1
|
|
49
|
+
else:
|
|
50
|
+
for dim in dimensions:
|
|
51
|
+
if dim not in dimensions_las1 or dim not in dimensions_las2:
|
|
52
|
+
print(
|
|
53
|
+
f"Dimension '{dim}' is not found in one or both files.\n"
|
|
54
|
+
f"Available dimensions: {las1.point_format.dimension_names}"
|
|
55
|
+
)
|
|
56
|
+
return False, 0, 0
|
|
57
|
+
|
|
58
|
+
# Compare each dimension
|
|
59
|
+
for dim in dimensions:
|
|
60
|
+
try:
|
|
61
|
+
# Get sorted dimension arrays
|
|
62
|
+
dim1 = np.array(las1[dim])[sort_idx1]
|
|
63
|
+
dim2 = np.array(las2[dim])[sort_idx2]
|
|
64
|
+
|
|
65
|
+
# Compare dimensions
|
|
66
|
+
if not np.array_equal(dim1, dim2):
|
|
67
|
+
# Find differences
|
|
68
|
+
diff_indices = np.where(dim1 != dim2)[0]
|
|
69
|
+
print(f"Found {len(diff_indices)} points with different {dim}:")
|
|
70
|
+
for idx in diff_indices[:10]: # Show first 10 differences
|
|
71
|
+
print(f"Point {idx}: file1={dim1[idx]}, file2={dim2[idx]}")
|
|
72
|
+
if len(diff_indices) > 10:
|
|
73
|
+
print(f"... and {len(diff_indices) - 10} more differences")
|
|
74
|
+
return False, len(diff_indices), 100 * len(diff_indices) / len(las1)
|
|
75
|
+
|
|
76
|
+
except KeyError:
|
|
77
|
+
print(f"Dimension '{dim}' not found in one or both files")
|
|
78
|
+
return False, 0, 0
|
|
79
|
+
|
|
80
|
+
return True, 0, 0
|
|
81
|
+
|
|
82
|
+
except laspy.errors.LaspyException as e:
|
|
83
|
+
print(f"LAS file error: {str(e)}")
|
|
84
|
+
return False, 0, 0
|
|
85
|
+
except FileNotFoundError as e:
|
|
86
|
+
print(f"File not found: {str(e)}")
|
|
87
|
+
return False, 0, 0
|
|
88
|
+
except ValueError as e:
|
|
89
|
+
print(f"Value error: {str(e)}")
|
|
90
|
+
return False, 0, 0
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
# Update main function to use the new compare function
|
|
94
|
+
def main():
|
|
95
|
+
parser = argparse.ArgumentParser(description="Compare dimensions between two LAS files")
|
|
96
|
+
parser.add_argument("file1", type=str, help="Path to first LAS file")
|
|
97
|
+
parser.add_argument("file2", type=str, help="Path to second LAS file")
|
|
98
|
+
parser.add_argument(
|
|
99
|
+
"--dimensions", nargs="*", help="List of dimensions to compare. If not specified, compares all dimensions."
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
args = parser.parse_args()
|
|
103
|
+
|
|
104
|
+
file1 = Path(args.file1)
|
|
105
|
+
file2 = Path(args.file2)
|
|
106
|
+
|
|
107
|
+
if not file1.exists() or not file2.exists():
|
|
108
|
+
print("Error: One or both files do not exist")
|
|
109
|
+
exit(1)
|
|
110
|
+
|
|
111
|
+
result = compare_las_dimensions(file1, file2, args.dimensions)
|
|
112
|
+
print(f"Dimensions comparison result: {'identical' if result[0] else 'different'}")
|
|
113
|
+
return result
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
if __name__ == "__main__":
|
|
117
|
+
main()
|
|
@@ -4,13 +4,12 @@ import argparse
|
|
|
4
4
|
import json
|
|
5
5
|
import logging
|
|
6
6
|
import os
|
|
7
|
-
import tempfile
|
|
8
7
|
from collections import Counter
|
|
9
8
|
from typing import Dict, List
|
|
10
9
|
|
|
11
10
|
import pdal
|
|
12
11
|
|
|
13
|
-
from pdaltools.standardize_format import
|
|
12
|
+
from pdaltools.standardize_format import get_writer_parameters
|
|
14
13
|
from pdaltools.unlock_file import copy_and_hack_decorator
|
|
15
14
|
|
|
16
15
|
|
|
@@ -106,26 +105,13 @@ def parse_replacement_map_from_path_or_json_string(replacement_map):
|
|
|
106
105
|
return parsed_map
|
|
107
106
|
|
|
108
107
|
|
|
109
|
-
def replace_values_clean(
|
|
110
|
-
input_file: str,
|
|
111
|
-
output_file: str,
|
|
112
|
-
replacement_map: Dict,
|
|
113
|
-
attribute: str = "Classification",
|
|
114
|
-
writer_parameters: Dict = {},
|
|
115
|
-
):
|
|
116
|
-
filename = os.path.basename(output_file)
|
|
117
|
-
with tempfile.NamedTemporaryFile(suffix=filename) as tmp:
|
|
118
|
-
replace_values(input_file, tmp.name, replacement_map, attribute, writer_parameters)
|
|
119
|
-
exec_las2las(tmp.name, output_file)
|
|
120
|
-
|
|
121
|
-
|
|
122
108
|
def main():
|
|
123
109
|
args = parse_args()
|
|
124
110
|
writer_params_from_parser = dict(dataformat_id=args.record_format, a_srs=args.projection)
|
|
125
111
|
writer_parameters = get_writer_parameters(writer_params_from_parser)
|
|
126
112
|
replacement_map = parse_replacement_map_from_path_or_json_string(args.replacement_map)
|
|
127
113
|
|
|
128
|
-
|
|
114
|
+
replace_values(args.input_file, args.output_file, replacement_map, args.attribute, writer_parameters)
|
|
129
115
|
|
|
130
116
|
|
|
131
117
|
if __name__ == "__main__":
|
|
@@ -9,9 +9,6 @@
|
|
|
9
9
|
"""
|
|
10
10
|
|
|
11
11
|
import argparse
|
|
12
|
-
import os
|
|
13
|
-
import platform
|
|
14
|
-
import subprocess as sp
|
|
15
12
|
import tempfile
|
|
16
13
|
from typing import Dict, List
|
|
17
14
|
|
|
@@ -79,8 +76,8 @@ def get_writer_parameters(new_parameters: Dict) -> Dict:
|
|
|
79
76
|
params = STANDARD_PARAMETERS | new_parameters
|
|
80
77
|
return params
|
|
81
78
|
|
|
82
|
-
|
|
83
|
-
def
|
|
79
|
+
@copy_and_hack_decorator
|
|
80
|
+
def standardize(
|
|
84
81
|
input_file: str, output_file: str, params_from_parser: Dict, classes_to_remove: List = [], rename_dims: List = []
|
|
85
82
|
) -> None:
|
|
86
83
|
params = get_writer_parameters(params_from_parser)
|
|
@@ -109,32 +106,6 @@ def rewrite_with_pdal(
|
|
|
109
106
|
pipeline.execute()
|
|
110
107
|
|
|
111
108
|
|
|
112
|
-
def exec_las2las(input_file: str, output_file: str):
|
|
113
|
-
if platform.processor() == "arm" and platform.architecture()[0] == "64bit":
|
|
114
|
-
las2las = "las2las64"
|
|
115
|
-
else:
|
|
116
|
-
las2las = "las2las"
|
|
117
|
-
r = sp.run([las2las, "-i", input_file, "-o", output_file], stderr=sp.PIPE, stdout=sp.PIPE)
|
|
118
|
-
if r.returncode == 1:
|
|
119
|
-
msg = r.stderr.decode()
|
|
120
|
-
print(msg)
|
|
121
|
-
raise RuntimeError(msg)
|
|
122
|
-
|
|
123
|
-
output = r.stdout.decode()
|
|
124
|
-
for line in output.splitlines():
|
|
125
|
-
print(line)
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
@copy_and_hack_decorator
|
|
129
|
-
def standardize(
|
|
130
|
-
input_file: str, output_file: str, params_from_parser: Dict, class_points_removed: [], rename_dims: []
|
|
131
|
-
) -> None:
|
|
132
|
-
filename = os.path.basename(output_file)
|
|
133
|
-
with tempfile.NamedTemporaryFile(suffix=filename) as tmp:
|
|
134
|
-
rewrite_with_pdal(input_file, tmp.name, params_from_parser, class_points_removed, rename_dims)
|
|
135
|
-
exec_las2las(tmp.name, output_file)
|
|
136
|
-
|
|
137
|
-
|
|
138
109
|
def main():
|
|
139
110
|
args = parse_args()
|
|
140
111
|
params_from_parser = dict(
|
|
@@ -220,6 +220,16 @@ def test_line_to_multipoint(line, spacing, z_value, expected_points):
|
|
|
220
220
|
Point(10, 10, 6.0),
|
|
221
221
|
],
|
|
222
222
|
),
|
|
223
|
+
# Test case for empty lines
|
|
224
|
+
(
|
|
225
|
+
gpd.GeoDataFrame(
|
|
226
|
+
{"geometry": [], "RecupZ": []},
|
|
227
|
+
crs="EPSG:2154",
|
|
228
|
+
),
|
|
229
|
+
2.5,
|
|
230
|
+
"RecupZ",
|
|
231
|
+
[],
|
|
232
|
+
),
|
|
223
233
|
],
|
|
224
234
|
)
|
|
225
235
|
def test_generate_3d_points_from_lines(lines_gdf, spacing, altitude_column, expected_points):
|
|
@@ -251,7 +261,7 @@ def test_generate_3d_points_from_lines(lines_gdf, spacing, altitude_column, expe
|
|
|
251
261
|
678,
|
|
252
262
|
0.25,
|
|
253
263
|
"RecupZ",
|
|
254
|
-
), # should add only
|
|
264
|
+
), # should add only lines (.shp) within tile extend
|
|
255
265
|
(INPUT_PCD, INPUT_LIGNES_SHAPE, None, 678, 0.25, "RecupZ"), # Should work with or with an input epsg
|
|
256
266
|
(
|
|
257
267
|
INPUT_PCD,
|
|
@@ -261,6 +271,14 @@ def test_generate_3d_points_from_lines(lines_gdf, spacing, altitude_column, expe
|
|
|
261
271
|
0.25,
|
|
262
272
|
None,
|
|
263
273
|
), # Should work with or without an input epsg and without altitude_column
|
|
274
|
+
(
|
|
275
|
+
INPUT_PCD_CROPPED,
|
|
276
|
+
INPUT_LIGNES_3D_GEOJSON,
|
|
277
|
+
None,
|
|
278
|
+
0,
|
|
279
|
+
0.25,
|
|
280
|
+
None,
|
|
281
|
+
), # Should work with lines and add no points if there is no geometry in the tile extent
|
|
264
282
|
],
|
|
265
283
|
)
|
|
266
284
|
def test_add_points_from_geometry_to_las(input_file, input_points, epsg, expected_nb_points, spacing, altitude_column):
|
|
@@ -272,11 +290,11 @@ def test_add_points_from_geometry_to_las(input_file, input_points, epsg, expecte
|
|
|
272
290
|
input_points, input_file, OUTPUT_FILE, 68, epsg, 1000, spacing, altitude_column
|
|
273
291
|
)
|
|
274
292
|
assert Path(OUTPUT_FILE).exists() # check output exists
|
|
275
|
-
|
|
293
|
+
|
|
276
294
|
# Read input and output files to compare headers
|
|
277
295
|
input_las = laspy.read(input_file)
|
|
278
296
|
output_las = laspy.read(OUTPUT_FILE)
|
|
279
|
-
|
|
297
|
+
|
|
280
298
|
# Compare headers
|
|
281
299
|
assert input_las.header.version == output_las.header.version
|
|
282
300
|
assert input_las.header.system_identifier == output_las.header.system_identifier
|
|
@@ -287,7 +305,7 @@ def test_add_points_from_geometry_to_las(input_file, input_points, epsg, expecte
|
|
|
287
305
|
assert np.array_equal(input_las.header.scales, output_las.header.scales)
|
|
288
306
|
assert np.array_equal(input_las.header.offsets, output_las.header.offsets)
|
|
289
307
|
assert input_las.header.vlrs[0].string == output_las.header.vlrs[0].string
|
|
290
|
-
|
|
308
|
+
|
|
291
309
|
point_count = compute_count_one_file(OUTPUT_FILE)["68"]
|
|
292
310
|
assert point_count == expected_nb_points # Add all points from geojson
|
|
293
311
|
|
|
@@ -315,15 +315,18 @@ def test_is_image_white_false():
|
|
|
315
315
|
assert not color.is_image_white(input_path), "This image should NOT be detected as white"
|
|
316
316
|
|
|
317
317
|
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
318
|
+
# the test is not working, the image is not detected as white
|
|
319
|
+
# certainly because of a fix on GPF side
|
|
320
|
+
# TODO: find a new area where the GPF returns a white image
|
|
321
|
+
#@pytest.mark.geopf
|
|
322
|
+
#def test_color_raise_for_white_image():
|
|
323
|
+
# input_path = os.path.join(TEST_PATH, "data/sample_lareunion_epsg2975.laz")
|
|
324
|
+
# output_path = os.path.join(TMPDIR, "sample_lareunion_epsg2975.colorized.white.laz")#
|
|
325
|
+
|
|
326
|
+
# with pytest.raises(ValueError) as excinfo:
|
|
327
|
+
# color.color(input_path, output_path, check_images=True)
|
|
328
|
+
|
|
329
|
+
# assert "Downloaded image is white" in str(excinfo.value)
|
|
327
330
|
|
|
328
331
|
|
|
329
332
|
@pytest.mark.geopf
|