ign-pdal-tools 1.8.0__tar.gz → 1.10.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/PKG-INFO +1 -1
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/ign_pdal_tools.egg-info/PKG-INFO +1 -1
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/ign_pdal_tools.egg-info/SOURCES.txt +3 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/pdaltools/_version.py +1 -1
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/pdaltools/add_points_in_pointcloud.py +29 -22
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/pdaltools/las_add_buffer.py +0 -1
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/pdaltools/las_remove_dimensions.py +19 -5
- ign_pdal_tools-1.10.0/pdaltools/las_rename_dimension.py +79 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/pdaltools/unlock_file.py +1 -2
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/test/test_add_points_in_pointcloud.py +25 -6
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/test/test_color.py +42 -14
- ign_pdal_tools-1.10.0/test/test_las_rename_dimension.py +159 -0
- ign_pdal_tools-1.10.0/test/test_pdal_custom.py +24 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/LICENSE.md +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/README.md +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/ign_pdal_tools.egg-info/dependency_links.txt +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/ign_pdal_tools.egg-info/top_level.txt +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/pdaltools/color.py +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/pdaltools/las_clip.py +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/pdaltools/las_info.py +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/pdaltools/las_merge.py +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/pdaltools/pcd_info.py +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/pdaltools/replace_attribute_in_las.py +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/pdaltools/standardize_format.py +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/pyproject.toml +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/setup.cfg +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/test/test_las_add_buffer.py +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/test/test_las_clip.py +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/test/test_las_info.py +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/test/test_las_merge.py +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/test/test_las_remove_dimensions.py +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/test/test_pcd_info.py +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/test/test_replace_attribute_in_las.py +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/test/test_standardize_format.py +0 -0
- {ign_pdal_tools-1.8.0 → ign_pdal_tools-1.10.0}/test/test_unlock.py +0 -0
|
@@ -13,6 +13,7 @@ pdaltools/las_clip.py
|
|
|
13
13
|
pdaltools/las_info.py
|
|
14
14
|
pdaltools/las_merge.py
|
|
15
15
|
pdaltools/las_remove_dimensions.py
|
|
16
|
+
pdaltools/las_rename_dimension.py
|
|
16
17
|
pdaltools/pcd_info.py
|
|
17
18
|
pdaltools/replace_attribute_in_las.py
|
|
18
19
|
pdaltools/standardize_format.py
|
|
@@ -24,7 +25,9 @@ test/test_las_clip.py
|
|
|
24
25
|
test/test_las_info.py
|
|
25
26
|
test/test_las_merge.py
|
|
26
27
|
test/test_las_remove_dimensions.py
|
|
28
|
+
test/test_las_rename_dimension.py
|
|
27
29
|
test/test_pcd_info.py
|
|
30
|
+
test/test_pdal_custom.py
|
|
28
31
|
test/test_replace_attribute_in_las.py
|
|
29
32
|
test/test_standardize_format.py
|
|
30
33
|
test/test_unlock.py
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import argparse
|
|
2
2
|
from shutil import copy2
|
|
3
|
+
import tempfile
|
|
3
4
|
|
|
4
5
|
import geopandas as gpd
|
|
5
6
|
import laspy
|
|
@@ -10,6 +11,8 @@ from shapely.geometry import MultiPoint, Point, box
|
|
|
10
11
|
|
|
11
12
|
from pdaltools.las_info import get_epsg_from_las, get_tile_bbox
|
|
12
13
|
|
|
14
|
+
import pdal
|
|
15
|
+
|
|
13
16
|
|
|
14
17
|
def parse_args(argv=None):
|
|
15
18
|
parser = argparse.ArgumentParser("Add points from GeoJSON in LIDAR tile")
|
|
@@ -127,10 +130,12 @@ def add_points_to_las(
|
|
|
127
130
|
crs (str): CRS of the data.
|
|
128
131
|
virtual_points_classes (int): The classification value to assign to those virtual points (default: 66).
|
|
129
132
|
"""
|
|
133
|
+
|
|
130
134
|
if input_points_with_z.empty:
|
|
131
135
|
print(
|
|
132
136
|
"No points to add. All points of the geojson file are outside the tile. Copying the input file to output"
|
|
133
137
|
)
|
|
138
|
+
copy2(input_las, output_las)
|
|
134
139
|
return
|
|
135
140
|
|
|
136
141
|
# Extract XYZ coordinates and additional attribute (classification)
|
|
@@ -141,32 +146,34 @@ def add_points_to_las(
|
|
|
141
146
|
classes = virtual_points_classes * np.ones(nb_points)
|
|
142
147
|
|
|
143
148
|
# Open the input LAS file to check and possibly update the header of the output
|
|
144
|
-
with laspy.open(input_las) as las:
|
|
149
|
+
with laspy.open(input_las, "r") as las:
|
|
145
150
|
header = las.header
|
|
146
151
|
if not header:
|
|
147
152
|
header = laspy.LasHeader(point_format=8, version="1.4")
|
|
153
|
+
|
|
154
|
+
new_points = laspy.ScaleAwarePointRecord.zeros(nb_points, header=header) # use header for input_las
|
|
155
|
+
# then fill in the gaps (X, Y, Z an classification)
|
|
156
|
+
new_points.x = x_coords.astype(new_points.x.dtype)
|
|
157
|
+
new_points.y = y_coords.astype(new_points.y.dtype)
|
|
158
|
+
new_points.z = z_coords.astype(new_points.z.dtype)
|
|
159
|
+
new_points.classification = classes.astype(new_points.classification.dtype)
|
|
160
|
+
|
|
161
|
+
with tempfile.NamedTemporaryFile(suffix="_new_points.las") as tmp:
|
|
162
|
+
with laspy.open(tmp.name, mode="w", header=header) as las_file:
|
|
163
|
+
las_file.write_points(new_points)
|
|
164
|
+
|
|
148
165
|
if crs:
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
with laspy.open(output_las, mode="a", header=header) as output_las: # mode `a` for adding points
|
|
161
|
-
# create nb_points points with "0" everywhere
|
|
162
|
-
new_points = laspy.ScaleAwarePointRecord.zeros(nb_points, header=header) # use header for input_las
|
|
163
|
-
# then fill in the gaps (X, Y, Z an classification)
|
|
164
|
-
new_points.x = x_coords.astype(new_points.x.dtype)
|
|
165
|
-
new_points.y = y_coords.astype(new_points.y.dtype)
|
|
166
|
-
new_points.z = z_coords.astype(new_points.z.dtype)
|
|
167
|
-
new_points.classification = classes.astype(new_points.classification.dtype)
|
|
168
|
-
|
|
169
|
-
output_las.append_points(new_points)
|
|
166
|
+
a_srs = crs
|
|
167
|
+
else:
|
|
168
|
+
a_srs = get_epsg_from_las(input_las)
|
|
169
|
+
|
|
170
|
+
# Use pdal to merge the new points with the existing points
|
|
171
|
+
pipeline = pdal.Pipeline()
|
|
172
|
+
pipeline |= pdal.Reader.las(filename=input_las)
|
|
173
|
+
pipeline |= pdal.Reader.las(filename=tmp.name)
|
|
174
|
+
pipeline |= pdal.Filter.merge()
|
|
175
|
+
pipeline |= pdal.Writer.las(filename=output_las, forward="all", a_srs=a_srs)
|
|
176
|
+
pipeline.execute()
|
|
170
177
|
|
|
171
178
|
|
|
172
179
|
def line_to_multipoint(line, spacing: float, z_value: float = None):
|
|
@@ -158,7 +158,6 @@ def remove_points_from_buffer(input_file: str, output_file: str):
|
|
|
158
158
|
pipeline |= pdal.Filter.range(limits=f"{ORIGINAL_TILE_TAG}[1:1]")
|
|
159
159
|
pipeline |= pdal.Writer.las(filename=tmp_las.name, forward="all", extra_dims="all")
|
|
160
160
|
pipeline.execute()
|
|
161
|
-
|
|
162
161
|
remove_dimensions_from_las(tmp_las.name, dimensions=[ORIGINAL_TILE_TAG], output_las=output_file)
|
|
163
162
|
|
|
164
163
|
|
|
@@ -5,22 +5,36 @@ import pdal
|
|
|
5
5
|
from pdaltools.las_info import get_writer_parameters_from_reader_metadata
|
|
6
6
|
|
|
7
7
|
|
|
8
|
-
def
|
|
8
|
+
def remove_dimensions_from_points(points, metadata, dimensions: [str], output_las: str):
|
|
9
9
|
"""
|
|
10
10
|
export new las without some dimensions
|
|
11
11
|
"""
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
12
|
+
|
|
13
|
+
mandatory_dimensions = ["X", "Y", "Z", "x", "y", "z"]
|
|
14
|
+
output_dimensions_test = [dim for dim in dimensions if dim not in mandatory_dimensions]
|
|
15
|
+
assert len(output_dimensions_test) == len(
|
|
16
|
+
dimensions
|
|
17
|
+
), "All dimensions to remove must not be mandatory dimensions (X,Y,Z,x,y,z)"
|
|
18
|
+
|
|
15
19
|
input_dimensions = list(points.dtype.fields.keys())
|
|
16
20
|
output_dimensions = [dim for dim in input_dimensions if dim not in dimensions]
|
|
17
21
|
points_pruned = points[output_dimensions]
|
|
18
|
-
params = get_writer_parameters_from_reader_metadata(
|
|
22
|
+
params = get_writer_parameters_from_reader_metadata(metadata)
|
|
19
23
|
pipeline_end = pdal.Pipeline(arrays=[points_pruned])
|
|
20
24
|
pipeline_end |= pdal.Writer.las(output_las, forward="all", **params)
|
|
21
25
|
pipeline_end.execute()
|
|
22
26
|
|
|
23
27
|
|
|
28
|
+
def remove_dimensions_from_las(input_las: str, dimensions: [str], output_las: str):
|
|
29
|
+
"""
|
|
30
|
+
export new las without some dimensions
|
|
31
|
+
"""
|
|
32
|
+
pipeline = pdal.Pipeline() | pdal.Reader.las(input_las)
|
|
33
|
+
pipeline.execute()
|
|
34
|
+
points = pipeline.arrays[0]
|
|
35
|
+
remove_dimensions_from_points(points, pipeline.metadata, dimensions, output_las)
|
|
36
|
+
|
|
37
|
+
|
|
24
38
|
def parse_args():
|
|
25
39
|
parser = argparse.ArgumentParser("Remove dimensions from las")
|
|
26
40
|
parser.add_argument(
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Rename dimensions in a LAS file using PDAL's Python API.
|
|
3
|
+
|
|
4
|
+
This script allows renaming dimensions in a LAS file while preserving all other data.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import argparse
|
|
8
|
+
import pdal
|
|
9
|
+
import sys
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from pdaltools.las_remove_dimensions import remove_dimensions_from_points
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def rename_dimension(input_file: str, output_file: str, old_dims: list[str], new_dims: list[str]):
|
|
15
|
+
"""
|
|
16
|
+
Rename one or multiple dimensions in a LAS file using PDAL.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
input_file: Path to the input LAS file
|
|
20
|
+
output_file: Path to save the output LAS file
|
|
21
|
+
old_dims: List of names of dimensions to rename
|
|
22
|
+
new_dims: List of new names for the dimensions
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
# Validate dimensions
|
|
26
|
+
if len(old_dims) != len(new_dims):
|
|
27
|
+
raise ValueError("Number of old dimensions must match number of new dimensions")
|
|
28
|
+
|
|
29
|
+
mandatory_dimensions = ['X', 'Y', 'Z', 'x', 'y', 'z']
|
|
30
|
+
for dim in new_dims:
|
|
31
|
+
if dim in mandatory_dimensions:
|
|
32
|
+
raise ValueError(f"New dimension {dim} cannot be a mandatory dimension (X,Y,Z,x,y,z)")
|
|
33
|
+
|
|
34
|
+
pipeline = pdal.Pipeline() | pdal.Reader.las(input_file)
|
|
35
|
+
for old, new in zip(old_dims, new_dims):
|
|
36
|
+
pipeline |= pdal.Filter.ferry(dimensions=f"{old} => {new}")
|
|
37
|
+
pipeline |= pdal.Writer.las(output_file)
|
|
38
|
+
pipeline.execute()
|
|
39
|
+
points = pipeline.arrays[0]
|
|
40
|
+
|
|
41
|
+
# Remove old dimensions
|
|
42
|
+
remove_dimensions_from_points(points, pipeline.metadata, old_dims, output_file)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def main():
|
|
46
|
+
parser = argparse.ArgumentParser(description="Rename dimensions in a LAS file")
|
|
47
|
+
parser.add_argument("input_file", help="Input LAS file")
|
|
48
|
+
parser.add_argument("output_file", help="Output LAS file")
|
|
49
|
+
parser.add_argument(
|
|
50
|
+
"--old-dims",
|
|
51
|
+
nargs="+",
|
|
52
|
+
required=True,
|
|
53
|
+
help="Names of dimensions to rename (can specify multiple)",
|
|
54
|
+
)
|
|
55
|
+
parser.add_argument(
|
|
56
|
+
"--new-dims",
|
|
57
|
+
nargs="+",
|
|
58
|
+
required=True,
|
|
59
|
+
help="New names for the dimensions (must match --old-dims count)",
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
args = parser.parse_args()
|
|
63
|
+
|
|
64
|
+
# Validate input file
|
|
65
|
+
input_path = Path(args.input_file)
|
|
66
|
+
if not input_path.exists():
|
|
67
|
+
print(f"Error: Input file {args.input_file} does not exist", file=sys.stderr)
|
|
68
|
+
sys.exit(1)
|
|
69
|
+
|
|
70
|
+
# Validate output file
|
|
71
|
+
output_path = Path(args.output_file)
|
|
72
|
+
if output_path.exists():
|
|
73
|
+
print(f"Warning: Output file {args.output_file} already exists. It will be overwritten.")
|
|
74
|
+
|
|
75
|
+
rename_dimension(args.input_file, args.output_file, args.old_dims, args.new_dims)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
if __name__ == "__main__":
|
|
79
|
+
main()
|
|
@@ -26,6 +26,7 @@ INPUT_LIGNES_2D_GEOJSON = os.path.join(DATA_LIGNES_PATH, "Lignes_2d_0292_6833.ge
|
|
|
26
26
|
INPUT_LIGNES_3D_GEOJSON = os.path.join(DATA_LIGNES_PATH, "Lignes_3d_0292_6833.geojson")
|
|
27
27
|
INPUT_LIGNES_SHAPE = os.path.join(DATA_LIGNES_PATH, "Lignes_3d_0292_6833.shp")
|
|
28
28
|
OUTPUT_FILE = os.path.join(TMP_PATH, "test_semis_2023_0292_6833_LA93_IGN69.laz")
|
|
29
|
+
INPUT_EMPTY_POINTS_2D = os.path.join(DATA_POINTS_3D_PATH, "Points_virtuels_2d_empty.geojson")
|
|
29
30
|
|
|
30
31
|
# Cropped las tile used to test adding points that belong to the theorical tile but not to the
|
|
31
32
|
# effective las file extent
|
|
@@ -81,19 +82,21 @@ def test_clip_3d_lines_to_tile(input_file, epsg):
|
|
|
81
82
|
|
|
82
83
|
|
|
83
84
|
@pytest.mark.parametrize(
|
|
84
|
-
"input_file, epsg, expected_nb_points",
|
|
85
|
+
"input_file, epsg, input_points_2d, expected_nb_points",
|
|
85
86
|
[
|
|
86
|
-
(INPUT_PCD, "EPSG:2154", 2423), # should work when providing an epsg value
|
|
87
|
-
(INPUT_PCD, None, 2423), # Should also work with no epsg value (get from las file)
|
|
88
|
-
(INPUT_PCD_CROPPED, None,
|
|
87
|
+
(INPUT_PCD, "EPSG:2154", INPUT_POINTS_2D, 2423), # should work when providing an epsg value
|
|
88
|
+
(INPUT_PCD, None, INPUT_POINTS_2D, 2423), # Should also work with no epsg value (get from las file)
|
|
89
|
+
(INPUT_PCD_CROPPED, None, INPUT_POINTS_2D_FOR_CROPPED_PCD, 451),
|
|
90
|
+
# Should also work if there is no points (direct copy of the input file)
|
|
91
|
+
(INPUT_PCD_CROPPED, None, INPUT_EMPTY_POINTS_2D, 0),
|
|
89
92
|
],
|
|
90
93
|
)
|
|
91
|
-
def test_add_points_to_las(input_file, epsg, expected_nb_points):
|
|
94
|
+
def test_add_points_to_las(input_file, epsg, input_points_2d, expected_nb_points):
|
|
92
95
|
# Ensure the output file doesn't exist before the test
|
|
93
96
|
if Path(OUTPUT_FILE).exists():
|
|
94
97
|
os.remove(OUTPUT_FILE)
|
|
95
98
|
|
|
96
|
-
points = gpd.read_file(
|
|
99
|
+
points = gpd.read_file(input_points_2d)
|
|
97
100
|
add_points_in_pointcloud.add_points_to_las(points, input_file, OUTPUT_FILE, epsg, 68)
|
|
98
101
|
assert Path(OUTPUT_FILE).exists() # check output exists
|
|
99
102
|
|
|
@@ -269,6 +272,22 @@ def test_add_points_from_geometry_to_las(input_file, input_points, epsg, expecte
|
|
|
269
272
|
input_points, input_file, OUTPUT_FILE, 68, epsg, 1000, spacing, altitude_column
|
|
270
273
|
)
|
|
271
274
|
assert Path(OUTPUT_FILE).exists() # check output exists
|
|
275
|
+
|
|
276
|
+
# Read input and output files to compare headers
|
|
277
|
+
input_las = laspy.read(input_file)
|
|
278
|
+
output_las = laspy.read(OUTPUT_FILE)
|
|
279
|
+
|
|
280
|
+
# Compare headers
|
|
281
|
+
assert input_las.header.version == output_las.header.version
|
|
282
|
+
assert input_las.header.system_identifier == output_las.header.system_identifier
|
|
283
|
+
assert input_las.header.extra_header_bytes == output_las.header.extra_header_bytes
|
|
284
|
+
assert input_las.header.extra_vlr_bytes == output_las.header.extra_vlr_bytes
|
|
285
|
+
assert input_las.header.number_of_evlrs == output_las.header.number_of_evlrs
|
|
286
|
+
assert input_las.header.point_format == output_las.header.point_format
|
|
287
|
+
assert np.array_equal(input_las.header.scales, output_las.header.scales)
|
|
288
|
+
assert np.array_equal(input_las.header.offsets, output_las.header.offsets)
|
|
289
|
+
assert input_las.header.vlrs[0].string == output_las.header.vlrs[0].string
|
|
290
|
+
|
|
272
291
|
point_count = compute_count_one_file(OUTPUT_FILE)["68"]
|
|
273
292
|
assert point_count == expected_nb_points # Add all points from geojson
|
|
274
293
|
|
|
@@ -80,8 +80,12 @@ def test_download_image_ok():
|
|
|
80
80
|
|
|
81
81
|
# check there is no noData
|
|
82
82
|
raster = gdal.Open(tif_output)
|
|
83
|
-
|
|
84
|
-
|
|
83
|
+
assert np.any(raster.ReadAsArray()) # Check that the raster array is not empty
|
|
84
|
+
# TODO: Fix this test: it did not correspond to what was expected:
|
|
85
|
+
# - GetNoDataValue returns the value of no_data, not the number of occurrences
|
|
86
|
+
# - it is possible to have occasional no data values if no_data == 255. (white pixels)
|
|
87
|
+
# for i in range(raster.RasterCount):
|
|
88
|
+
# assert raster.GetRasterBand(i + 1).GetNoDataValue() is None
|
|
85
89
|
|
|
86
90
|
|
|
87
91
|
@pytest.mark.geopf
|
|
@@ -96,8 +100,12 @@ def test_download_image_ok_one_download():
|
|
|
96
100
|
# check there is no noData
|
|
97
101
|
raster = gdal.Open(tif_output)
|
|
98
102
|
assert raster.ReadAsArray().shape == (3, expected_pixel_size, expected_pixel_size)
|
|
99
|
-
|
|
100
|
-
|
|
103
|
+
assert np.any(raster.ReadAsArray()) # Check that the raster array is not empty
|
|
104
|
+
# TODO: Fix this test: it did not correspond to what was expected:
|
|
105
|
+
# - GetNoDataValue returns the value of no_data, not the number of occurrences
|
|
106
|
+
# - it is possible to have occasional no data values if no_data == 255. (white pixels)
|
|
107
|
+
# for i in range(raster.RasterCount):
|
|
108
|
+
# assert raster.GetRasterBand(i + 1).GetNoDataValue() is None
|
|
101
109
|
|
|
102
110
|
|
|
103
111
|
@pytest.mark.geopf
|
|
@@ -117,8 +125,12 @@ def test_download_image_ok_one_download_with_extra_pixel(pixel_per_meter, expect
|
|
|
117
125
|
raster = gdal.Open(tif_output)
|
|
118
126
|
|
|
119
127
|
assert raster.ReadAsArray().shape == (3, expected_pixel_size, expected_pixel_size)
|
|
120
|
-
|
|
121
|
-
|
|
128
|
+
assert np.any(raster.ReadAsArray()) # Check that the raster array is not empty
|
|
129
|
+
# TODO: Fix this test: it did not correspond to what was expected:
|
|
130
|
+
# - GetNoDataValue returns the value of no_data, not the number of occurrences
|
|
131
|
+
# - it is possible to have occasional no data values if no_data == 255. (white pixels)
|
|
132
|
+
# for i in range(raster.RasterCount):
|
|
133
|
+
# assert raster.GetRasterBand(i + 1).GetNoDataValue() is None
|
|
122
134
|
|
|
123
135
|
|
|
124
136
|
@pytest.mark.geopf
|
|
@@ -144,8 +156,12 @@ def test_download_image_ok_more_downloads(pixel_per_meter, expected_pixel_size):
|
|
|
144
156
|
# check there is no noData
|
|
145
157
|
raster = gdal.Open(tif_output)
|
|
146
158
|
assert raster.ReadAsArray().shape == (3, expected_pixel_size, expected_pixel_size)
|
|
147
|
-
|
|
148
|
-
|
|
159
|
+
assert np.any(raster.ReadAsArray()) # Check that the raster array is not empty
|
|
160
|
+
# TODO: Fix this test: it did not correspond to what was expected:
|
|
161
|
+
# - GetNoDataValue returns the value of no_data, not the number of occurrences
|
|
162
|
+
# - it is possible to have occasional no data values if no_data == 255. (white pixels)
|
|
163
|
+
# for i in range(raster.RasterCount):
|
|
164
|
+
# assert raster.GetRasterBand(i + 1).GetNoDataValue() is None
|
|
149
165
|
|
|
150
166
|
|
|
151
167
|
@pytest.mark.geopf
|
|
@@ -166,8 +182,12 @@ def test_download_image_ok_more_downloads_with_extra_pixel(pixel_per_meter, expe
|
|
|
166
182
|
# check there is no noData
|
|
167
183
|
raster = gdal.Open(tif_output)
|
|
168
184
|
assert raster.ReadAsArray().shape == (3, expected_pixel_size, expected_pixel_size)
|
|
169
|
-
|
|
170
|
-
|
|
185
|
+
assert np.any(raster.ReadAsArray()) # Check that the raster array is not empty
|
|
186
|
+
# TODO: Fix this test: it did not correspond to what was expected:
|
|
187
|
+
# - GetNoDataValue returns the value of no_data, not the number of occurrences
|
|
188
|
+
# - it is possible to have occasional no data values if no_data == 255. (white pixels)
|
|
189
|
+
# for i in range(raster.RasterCount):
|
|
190
|
+
# assert raster.GetRasterBand(i + 1).GetNoDataValue() is None
|
|
171
191
|
|
|
172
192
|
|
|
173
193
|
@pytest.mark.geopf
|
|
@@ -177,8 +197,12 @@ def test_download_image_download_size_gpf_bigger():
|
|
|
177
197
|
|
|
178
198
|
# check there is no noData
|
|
179
199
|
raster = gdal.Open(tif_output)
|
|
180
|
-
|
|
181
|
-
|
|
200
|
+
assert np.any(raster.ReadAsArray()) # Check that the raster array is not empty
|
|
201
|
+
# TODO: Fix this test: it did not correspond to what was expected:
|
|
202
|
+
# - GetNoDataValue returns the value of no_data, not the number of occurrences
|
|
203
|
+
# - it is possible to have occasional no data values if no_data == 255. (white pixels)
|
|
204
|
+
# for i in range(raster.RasterCount):
|
|
205
|
+
# assert raster.GetRasterBand(i + 1).GetNoDataValue() is None
|
|
182
206
|
|
|
183
207
|
|
|
184
208
|
@pytest.mark.geopf
|
|
@@ -191,8 +215,12 @@ def test_download_image_download_size_gpf_size_almost_ok():
|
|
|
191
215
|
|
|
192
216
|
# check there is no noData
|
|
193
217
|
raster = gdal.Open(tif_output)
|
|
194
|
-
|
|
195
|
-
|
|
218
|
+
assert np.any(raster.ReadAsArray()) # Check that the raster array is not empty
|
|
219
|
+
# TODO: Fix this test: it did not correspond to what was expected:
|
|
220
|
+
# - GetNoDataValue returns the value of no_data, not the number of occurrences
|
|
221
|
+
# - it is possible to have occasional no data values if no_data == 255. (white pixels)
|
|
222
|
+
# for i in range(raster.RasterCount):
|
|
223
|
+
# assert raster.GetRasterBand(i + 1).GetNoDataValue() is None
|
|
196
224
|
|
|
197
225
|
|
|
198
226
|
@pytest.mark.geopf
|
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import pytest
|
|
3
|
+
import tempfile
|
|
4
|
+
import numpy as np
|
|
5
|
+
import laspy
|
|
6
|
+
import sys
|
|
7
|
+
from pdaltools.las_rename_dimension import rename_dimension, main
|
|
8
|
+
from pyproj import CRS
|
|
9
|
+
|
|
10
|
+
def create_test_las_file():
|
|
11
|
+
"""Create a temporary LAS file with test data."""
|
|
12
|
+
with tempfile.NamedTemporaryFile(suffix='.las', delete=False) as tmp_file:
|
|
13
|
+
# Create a LAS file with some test points
|
|
14
|
+
header = laspy.LasHeader(point_format=3, version="1.4")
|
|
15
|
+
header.add_extra_dim(laspy.ExtraBytesParams(name="test_dim", type=np.float32))
|
|
16
|
+
header.add_extra_dim(laspy.ExtraBytesParams(name="test_dim2", type=np.int32))
|
|
17
|
+
|
|
18
|
+
las = laspy.LasData(header)
|
|
19
|
+
|
|
20
|
+
crs_pyproj = CRS.from_string("epsg:4326")
|
|
21
|
+
las.header.add_crs(crs_pyproj)
|
|
22
|
+
|
|
23
|
+
# Add some test points
|
|
24
|
+
las.x = np.array([1.0, 2.0, 3.0])
|
|
25
|
+
las.y = np.array([4.0, 5.0, 6.0])
|
|
26
|
+
las.z = np.array([7.0, 8.0, 9.0])
|
|
27
|
+
las.test_dim = np.array([10.0, 11.0, 12.0])
|
|
28
|
+
las.test_dim2 = np.array([12, 13, 14])
|
|
29
|
+
|
|
30
|
+
las.write(tmp_file.name)
|
|
31
|
+
return tmp_file.name
|
|
32
|
+
|
|
33
|
+
def test_rename_dimension():
|
|
34
|
+
"""Test renaming a dimension in a LAS file."""
|
|
35
|
+
# Create a temporary input LAS file
|
|
36
|
+
input_file = create_test_las_file()
|
|
37
|
+
|
|
38
|
+
# Create temporary output file
|
|
39
|
+
with tempfile.NamedTemporaryFile(suffix='.las', delete=False) as tmp_file:
|
|
40
|
+
output_file = tmp_file.name
|
|
41
|
+
|
|
42
|
+
try:
|
|
43
|
+
# Rename dimension using direct function call
|
|
44
|
+
rename_dimension(input_file, output_file, ["test_dim", "test_dim2"], ["new_test_dim", "new_test_dim2"])
|
|
45
|
+
|
|
46
|
+
# Verify the dimension was renamed
|
|
47
|
+
with laspy.open(output_file) as las_file:
|
|
48
|
+
las = las_file.read()
|
|
49
|
+
assert "new_test_dim" in las.point_format.dimension_names
|
|
50
|
+
assert "test_dim" not in las.point_format.dimension_names
|
|
51
|
+
assert "new_test_dim2" in las.point_format.dimension_names
|
|
52
|
+
assert "test_dim2" not in las.point_format.dimension_names
|
|
53
|
+
|
|
54
|
+
# Verify the data is preserved
|
|
55
|
+
np.testing.assert_array_equal(las.x, [1.0, 2.0, 3.0])
|
|
56
|
+
np.testing.assert_array_equal(las.y, [4.0, 5.0, 6.0])
|
|
57
|
+
np.testing.assert_array_equal(las.z, [7.0, 8.0, 9.0])
|
|
58
|
+
np.testing.assert_array_equal(las["new_test_dim"], [10.0, 11.0, 12.0])
|
|
59
|
+
np.testing.assert_array_equal(las["new_test_dim2"], [12, 13, 14])
|
|
60
|
+
finally:
|
|
61
|
+
# Clean up temporary files
|
|
62
|
+
try:
|
|
63
|
+
os.unlink(input_file)
|
|
64
|
+
os.unlink(output_file)
|
|
65
|
+
except:
|
|
66
|
+
pass
|
|
67
|
+
|
|
68
|
+
def test_rename_nonexistent_dimension():
|
|
69
|
+
"""Test attempting to rename a dimension that doesn't exist."""
|
|
70
|
+
input_file = create_test_las_file()
|
|
71
|
+
|
|
72
|
+
with tempfile.NamedTemporaryFile(suffix='.las', delete=False) as tmp_file:
|
|
73
|
+
output_file = tmp_file.name
|
|
74
|
+
|
|
75
|
+
try:
|
|
76
|
+
with pytest.raises(RuntimeError):
|
|
77
|
+
rename_dimension(input_file, output_file, ["nonexistent_dim"], ["new_dim"])
|
|
78
|
+
finally:
|
|
79
|
+
os.unlink(input_file)
|
|
80
|
+
os.unlink(output_file)
|
|
81
|
+
|
|
82
|
+
def test_rename_to_existing_dimension():
|
|
83
|
+
"""Test attempting to rename to an existing dimension."""
|
|
84
|
+
input_file = create_test_las_file()
|
|
85
|
+
|
|
86
|
+
with tempfile.NamedTemporaryFile(suffix='.las', delete=False) as tmp_file:
|
|
87
|
+
output_file = tmp_file.name
|
|
88
|
+
|
|
89
|
+
try:
|
|
90
|
+
with pytest.raises(ValueError):
|
|
91
|
+
rename_dimension(input_file, output_file, ["test_dim"], ["x"])
|
|
92
|
+
finally:
|
|
93
|
+
os.unlink(input_file)
|
|
94
|
+
os.unlink(output_file)
|
|
95
|
+
|
|
96
|
+
def test_rename_dimension_case_sensitive():
|
|
97
|
+
"""Test that dimension renaming is case-sensitive."""
|
|
98
|
+
input_file = create_test_las_file()
|
|
99
|
+
|
|
100
|
+
with tempfile.NamedTemporaryFile(suffix='.las', delete=False) as tmp_file:
|
|
101
|
+
output_file = tmp_file.name
|
|
102
|
+
|
|
103
|
+
try:
|
|
104
|
+
with pytest.raises(RuntimeError):
|
|
105
|
+
rename_dimension(input_file, output_file, ["TEST_DIM"], ["new_dim"])
|
|
106
|
+
finally:
|
|
107
|
+
os.unlink(input_file)
|
|
108
|
+
os.unlink(output_file)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def test_rename_dimension_main():
|
|
112
|
+
"""Test renaming dimensions using the main() function."""
|
|
113
|
+
# Create a temporary input LAS file
|
|
114
|
+
input_file = create_test_las_file()
|
|
115
|
+
|
|
116
|
+
# Create temporary output file
|
|
117
|
+
with tempfile.NamedTemporaryFile(suffix='.las', delete=False) as tmp_file:
|
|
118
|
+
output_file = tmp_file.name
|
|
119
|
+
|
|
120
|
+
try:
|
|
121
|
+
# Save original sys.argv
|
|
122
|
+
original_argv = sys.argv
|
|
123
|
+
|
|
124
|
+
# Mock command-line arguments
|
|
125
|
+
sys.argv = [
|
|
126
|
+
"las_rename_dimension.py", # script name
|
|
127
|
+
input_file,
|
|
128
|
+
output_file,
|
|
129
|
+
"--old-dims", "test_dim", "test_dim2",
|
|
130
|
+
"--new-dims", "new_test_dim", "new_test_dim2"
|
|
131
|
+
]
|
|
132
|
+
|
|
133
|
+
# Call main() function
|
|
134
|
+
main()
|
|
135
|
+
|
|
136
|
+
# Restore original sys.argv
|
|
137
|
+
sys.argv = original_argv
|
|
138
|
+
|
|
139
|
+
# Verify the dimension was renamed
|
|
140
|
+
with laspy.open(output_file) as las_file:
|
|
141
|
+
las = las_file.read()
|
|
142
|
+
assert "new_test_dim" in las.point_format.dimension_names
|
|
143
|
+
assert "test_dim" not in las.point_format.dimension_names
|
|
144
|
+
assert "new_test_dim2" in las.point_format.dimension_names
|
|
145
|
+
assert "test_dim2" not in las.point_format.dimension_names
|
|
146
|
+
|
|
147
|
+
# Verify the data is preserved
|
|
148
|
+
np.testing.assert_array_equal(las.x, [1.0, 2.0, 3.0])
|
|
149
|
+
np.testing.assert_array_equal(las.y, [4.0, 5.0, 6.0])
|
|
150
|
+
np.testing.assert_array_equal(las.z, [7.0, 8.0, 9.0])
|
|
151
|
+
np.testing.assert_array_equal(las["new_test_dim"], [10.0, 11.0, 12.0])
|
|
152
|
+
np.testing.assert_array_equal(las["new_test_dim2"], [12, 13, 14])
|
|
153
|
+
finally:
|
|
154
|
+
# Clean up temporary files
|
|
155
|
+
try:
|
|
156
|
+
os.unlink(input_file)
|
|
157
|
+
os.unlink(output_file)
|
|
158
|
+
except:
|
|
159
|
+
pass
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
import pdal
|
|
4
|
+
import pytest
|
|
5
|
+
|
|
6
|
+
TEST_PATH = os.path.dirname(os.path.abspath(__file__))
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
# this test only works with PDAL compiled on a custom fork and branch, so we mark it to avoid running it.
|
|
10
|
+
@pytest.mark.pdal_custom
|
|
11
|
+
def test_pdal_read_severals_extra_dims():
|
|
12
|
+
test_file = os.path.join(TEST_PATH, "data/las_with_several_extra_byte_bloc.laz")
|
|
13
|
+
|
|
14
|
+
pipeline = pdal.Reader.las(filename=test_file).pipeline()
|
|
15
|
+
metadata = pipeline.quickinfo["readers.las"]
|
|
16
|
+
|
|
17
|
+
# dimensions should contains 'Deviation' and 'confidence'
|
|
18
|
+
assert "Deviation" in metadata["dimensions"]
|
|
19
|
+
assert "confidence" in metadata["dimensions"]
|
|
20
|
+
|
|
21
|
+
# Test Python PDAL bindings
|
|
22
|
+
pipeline = pdal.Reader.las(filename=test_file).pipeline()
|
|
23
|
+
num_points = pipeline.execute()
|
|
24
|
+
assert num_points > 0
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|