ign-pdal-tools 1.8.1__tar.gz → 1.11.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/PKG-INFO +1 -1
  2. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/ign_pdal_tools.egg-info/PKG-INFO +1 -1
  3. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/ign_pdal_tools.egg-info/SOURCES.txt +3 -0
  4. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/pdaltools/_version.py +1 -1
  5. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/pdaltools/add_points_in_pointcloud.py +27 -19
  6. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/pdaltools/las_add_buffer.py +0 -1
  7. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/pdaltools/las_remove_dimensions.py +19 -5
  8. ign_pdal_tools-1.11.1/pdaltools/las_rename_dimension.py +79 -0
  9. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/pdaltools/standardize_format.py +43 -13
  10. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/pdaltools/unlock_file.py +1 -2
  11. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/test/test_add_points_in_pointcloud.py +16 -0
  12. ign_pdal_tools-1.11.1/test/test_las_rename_dimension.py +159 -0
  13. ign_pdal_tools-1.11.1/test/test_pdal_custom.py +24 -0
  14. ign_pdal_tools-1.11.1/test/test_standardize_format.py +339 -0
  15. ign_pdal_tools-1.8.1/test/test_standardize_format.py +0 -159
  16. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/LICENSE.md +0 -0
  17. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/README.md +0 -0
  18. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/ign_pdal_tools.egg-info/dependency_links.txt +0 -0
  19. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/ign_pdal_tools.egg-info/top_level.txt +0 -0
  20. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/pdaltools/color.py +0 -0
  21. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/pdaltools/las_clip.py +0 -0
  22. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/pdaltools/las_info.py +0 -0
  23. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/pdaltools/las_merge.py +0 -0
  24. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/pdaltools/pcd_info.py +0 -0
  25. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/pdaltools/replace_attribute_in_las.py +0 -0
  26. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/pyproject.toml +0 -0
  27. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/setup.cfg +0 -0
  28. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/test/test_color.py +0 -0
  29. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/test/test_las_add_buffer.py +0 -0
  30. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/test/test_las_clip.py +0 -0
  31. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/test/test_las_info.py +0 -0
  32. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/test/test_las_merge.py +0 -0
  33. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/test/test_las_remove_dimensions.py +0 -0
  34. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/test/test_pcd_info.py +0 -0
  35. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/test/test_replace_attribute_in_las.py +0 -0
  36. {ign_pdal_tools-1.8.1 → ign_pdal_tools-1.11.1}/test/test_unlock.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ign-pdal-tools
3
- Version: 1.8.1
3
+ Version: 1.11.1
4
4
  Summary: Library for common LAS files manipulation with PDAL
5
5
  Author-email: Guillaume Liegard <guillaume.liegard@ign.fr>
6
6
  Description-Content-Type: text/markdown
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ign-pdal-tools
3
- Version: 1.8.1
3
+ Version: 1.11.1
4
4
  Summary: Library for common LAS files manipulation with PDAL
5
5
  Author-email: Guillaume Liegard <guillaume.liegard@ign.fr>
6
6
  Description-Content-Type: text/markdown
@@ -13,6 +13,7 @@ pdaltools/las_clip.py
13
13
  pdaltools/las_info.py
14
14
  pdaltools/las_merge.py
15
15
  pdaltools/las_remove_dimensions.py
16
+ pdaltools/las_rename_dimension.py
16
17
  pdaltools/pcd_info.py
17
18
  pdaltools/replace_attribute_in_las.py
18
19
  pdaltools/standardize_format.py
@@ -24,7 +25,9 @@ test/test_las_clip.py
24
25
  test/test_las_info.py
25
26
  test/test_las_merge.py
26
27
  test/test_las_remove_dimensions.py
28
+ test/test_las_rename_dimension.py
27
29
  test/test_pcd_info.py
30
+ test/test_pdal_custom.py
28
31
  test/test_replace_attribute_in_las.py
29
32
  test/test_standardize_format.py
30
33
  test/test_unlock.py
@@ -1,4 +1,4 @@
1
- __version__ = "1.8.1"
1
+ __version__ = "1.11.1"
2
2
 
3
3
 
4
4
  if __name__ == "__main__":
@@ -1,5 +1,6 @@
1
1
  import argparse
2
2
  from shutil import copy2
3
+ import tempfile
3
4
 
4
5
  import geopandas as gpd
5
6
  import laspy
@@ -10,6 +11,8 @@ from shapely.geometry import MultiPoint, Point, box
10
11
 
11
12
  from pdaltools.las_info import get_epsg_from_las, get_tile_bbox
12
13
 
14
+ import pdal
15
+
13
16
 
14
17
  def parse_args(argv=None):
15
18
  parser = argparse.ArgumentParser("Add points from GeoJSON in LIDAR tile")
@@ -127,13 +130,12 @@ def add_points_to_las(
127
130
  crs (str): CRS of the data.
128
131
  virtual_points_classes (int): The classification value to assign to those virtual points (default: 66).
129
132
  """
130
- # Copy data pointcloud
131
- copy2(input_las, output_las)
132
133
 
133
134
  if input_points_with_z.empty:
134
135
  print(
135
136
  "No points to add. All points of the geojson file are outside the tile. Copying the input file to output"
136
137
  )
138
+ copy2(input_las, output_las)
137
139
  return
138
140
 
139
141
  # Extract XYZ coordinates and additional attribute (classification)
@@ -148,24 +150,30 @@ def add_points_to_las(
148
150
  header = las.header
149
151
  if not header:
150
152
  header = laspy.LasHeader(point_format=8, version="1.4")
153
+
154
+ new_points = laspy.ScaleAwarePointRecord.zeros(nb_points, header=header) # use header for input_las
155
+ # then fill in the gaps (X, Y, Z an classification)
156
+ new_points.x = x_coords.astype(new_points.x.dtype)
157
+ new_points.y = y_coords.astype(new_points.y.dtype)
158
+ new_points.z = z_coords.astype(new_points.z.dtype)
159
+ new_points.classification = classes.astype(new_points.classification.dtype)
160
+
161
+ with tempfile.NamedTemporaryFile(suffix="_new_points.las") as tmp:
162
+ with laspy.open(tmp.name, mode="w", header=header) as las_file:
163
+ las_file.write_points(new_points)
164
+
151
165
  if crs:
152
- try:
153
- crs_obj = CRS.from_user_input(crs) # Convert to a pyproj.CRS object
154
- except CRSError:
155
- raise ValueError(f"Invalid CRS: {crs}")
156
- header.add_crs(crs_obj)
157
-
158
- # Add the new points with 3D points
159
- with laspy.open(output_las, mode="a", header=header) as output_las: # mode `a` for adding points
160
- # create nb_points points with "0" everywhere
161
- new_points = laspy.ScaleAwarePointRecord.zeros(nb_points, header=header) # use header for input_las
162
- # then fill in the gaps (X, Y, Z an classification)
163
- new_points.x = x_coords.astype(new_points.x.dtype)
164
- new_points.y = y_coords.astype(new_points.y.dtype)
165
- new_points.z = z_coords.astype(new_points.z.dtype)
166
- new_points.classification = classes.astype(new_points.classification.dtype)
167
-
168
- output_las.append_points(new_points)
166
+ a_srs = crs
167
+ else:
168
+ a_srs = get_epsg_from_las(input_las)
169
+
170
+ # Use pdal to merge the new points with the existing points
171
+ pipeline = pdal.Pipeline()
172
+ pipeline |= pdal.Reader.las(filename=input_las)
173
+ pipeline |= pdal.Reader.las(filename=tmp.name)
174
+ pipeline |= pdal.Filter.merge()
175
+ pipeline |= pdal.Writer.las(filename=output_las, forward="all", a_srs=a_srs)
176
+ pipeline.execute()
169
177
 
170
178
 
171
179
  def line_to_multipoint(line, spacing: float, z_value: float = None):
@@ -158,7 +158,6 @@ def remove_points_from_buffer(input_file: str, output_file: str):
158
158
  pipeline |= pdal.Filter.range(limits=f"{ORIGINAL_TILE_TAG}[1:1]")
159
159
  pipeline |= pdal.Writer.las(filename=tmp_las.name, forward="all", extra_dims="all")
160
160
  pipeline.execute()
161
-
162
161
  remove_dimensions_from_las(tmp_las.name, dimensions=[ORIGINAL_TILE_TAG], output_las=output_file)
163
162
 
164
163
 
@@ -5,22 +5,36 @@ import pdal
5
5
  from pdaltools.las_info import get_writer_parameters_from_reader_metadata
6
6
 
7
7
 
8
- def remove_dimensions_from_las(input_las: str, dimensions: [str], output_las: str):
8
+ def remove_dimensions_from_points(points, metadata, dimensions: [str], output_las: str):
9
9
  """
10
10
  export new las without some dimensions
11
11
  """
12
- pipeline = pdal.Pipeline() | pdal.Reader.las(input_las)
13
- pipeline.execute()
14
- points = pipeline.arrays[0]
12
+
13
+ mandatory_dimensions = ["X", "Y", "Z", "x", "y", "z"]
14
+ output_dimensions_test = [dim for dim in dimensions if dim not in mandatory_dimensions]
15
+ assert len(output_dimensions_test) == len(
16
+ dimensions
17
+ ), "All dimensions to remove must not be mandatory dimensions (X,Y,Z,x,y,z)"
18
+
15
19
  input_dimensions = list(points.dtype.fields.keys())
16
20
  output_dimensions = [dim for dim in input_dimensions if dim not in dimensions]
17
21
  points_pruned = points[output_dimensions]
18
- params = get_writer_parameters_from_reader_metadata(pipeline.metadata)
22
+ params = get_writer_parameters_from_reader_metadata(metadata)
19
23
  pipeline_end = pdal.Pipeline(arrays=[points_pruned])
20
24
  pipeline_end |= pdal.Writer.las(output_las, forward="all", **params)
21
25
  pipeline_end.execute()
22
26
 
23
27
 
28
+ def remove_dimensions_from_las(input_las: str, dimensions: [str], output_las: str):
29
+ """
30
+ export new las without some dimensions
31
+ """
32
+ pipeline = pdal.Pipeline() | pdal.Reader.las(input_las)
33
+ pipeline.execute()
34
+ points = pipeline.arrays[0]
35
+ remove_dimensions_from_points(points, pipeline.metadata, dimensions, output_las)
36
+
37
+
24
38
  def parse_args():
25
39
  parser = argparse.ArgumentParser("Remove dimensions from las")
26
40
  parser.add_argument(
@@ -0,0 +1,79 @@
1
+ """
2
+ Rename dimensions in a LAS file using PDAL's Python API.
3
+
4
+ This script allows renaming dimensions in a LAS file while preserving all other data.
5
+ """
6
+
7
+ import argparse
8
+ import pdal
9
+ import sys
10
+ from pathlib import Path
11
+ from pdaltools.las_remove_dimensions import remove_dimensions_from_points
12
+
13
+
14
+ def rename_dimension(input_file: str, output_file: str, old_dims: list[str], new_dims: list[str]):
15
+ """
16
+ Rename one or multiple dimensions in a LAS file using PDAL.
17
+
18
+ Args:
19
+ input_file: Path to the input LAS file
20
+ output_file: Path to save the output LAS file
21
+ old_dims: List of names of dimensions to rename
22
+ new_dims: List of new names for the dimensions
23
+ """
24
+
25
+ # Validate dimensions
26
+ if len(old_dims) != len(new_dims):
27
+ raise ValueError("Number of old dimensions must match number of new dimensions")
28
+
29
+ mandatory_dimensions = ["X", "Y", "Z", "x", "y", "z"]
30
+ for dim in new_dims:
31
+ if dim in mandatory_dimensions:
32
+ raise ValueError(f"New dimension {dim} cannot be a mandatory dimension (X,Y,Z,x,y,z)")
33
+
34
+ pipeline = pdal.Pipeline() | pdal.Reader.las(input_file)
35
+ for old, new in zip(old_dims, new_dims):
36
+ pipeline |= pdal.Filter.ferry(dimensions=f"{old} => {new}")
37
+ pipeline |= pdal.Writer.las(output_file)
38
+ pipeline.execute()
39
+ points = pipeline.arrays[0]
40
+
41
+ # Remove old dimensions
42
+ remove_dimensions_from_points(points, pipeline.metadata, old_dims, output_file)
43
+
44
+
45
+ def main():
46
+ parser = argparse.ArgumentParser(description="Rename dimensions in a LAS file")
47
+ parser.add_argument("input_file", help="Input LAS file")
48
+ parser.add_argument("output_file", help="Output LAS file")
49
+ parser.add_argument(
50
+ "--old-dims",
51
+ nargs="+",
52
+ required=True,
53
+ help="Names of dimensions to rename (can specify multiple)",
54
+ )
55
+ parser.add_argument(
56
+ "--new-dims",
57
+ nargs="+",
58
+ required=True,
59
+ help="New names for the dimensions (must match --old-dims count)",
60
+ )
61
+
62
+ args = parser.parse_args()
63
+
64
+ # Validate input file
65
+ input_path = Path(args.input_file)
66
+ if not input_path.exists():
67
+ print(f"Error: Input file {args.input_file} does not exist", file=sys.stderr)
68
+ sys.exit(1)
69
+
70
+ # Validate output file
71
+ output_path = Path(args.output_file)
72
+ if output_path.exists():
73
+ print(f"Warning: Output file {args.output_file} already exists. It will be overwritten.")
74
+
75
+ rename_dimension(args.input_file, args.output_file, args.old_dims, args.new_dims)
76
+
77
+
78
+ if __name__ == "__main__":
79
+ main()
@@ -1,11 +1,11 @@
1
1
  """Re-write las file with expected format:
2
- - laz version
3
- - [TODO] nomenclature ???
4
- - record format
5
- - global encoding
6
- - projection
7
- - precision
8
- - no extra-dims
2
+ - laz version
3
+ - [TODO] nomenclature ???
4
+ - record format
5
+ - global encoding
6
+ - projection
7
+ - precision
8
+ - no extra-dims
9
9
  """
10
10
 
11
11
  import argparse
@@ -18,6 +18,7 @@ from typing import Dict, List
18
18
  import pdal
19
19
 
20
20
  from pdaltools.unlock_file import copy_and_hack_decorator
21
+ from pdaltools.las_rename_dimension import rename_dimension
21
22
 
22
23
  # Standard parameters to pass to the pdal writer
23
24
  STANDARD_PARAMETERS = dict(
@@ -60,6 +61,13 @@ def parse_args():
60
61
  help="List of extra dims to keep in the output (default=[], use 'all' to keep all extra dims), "
61
62
  "extra_dims must be specified with their type (see pdal.writers.las documentation, eg 'dim1=double')",
62
63
  )
64
+ parser.add_argument(
65
+ "--rename_dims",
66
+ default=[],
67
+ nargs="*",
68
+ type=str,
69
+ help="Rename dimensions in pairs: --rename_dims old_name1 new_name1 old_name2 new_name2 ...",
70
+ )
63
71
  return parser.parse_args()
64
72
 
65
73
 
@@ -73,11 +81,27 @@ def get_writer_parameters(new_parameters: Dict) -> Dict:
73
81
 
74
82
 
75
83
  def rewrite_with_pdal(
76
- input_file: str, output_file: str, params_from_parser: Dict, classes_to_remove: List = []
84
+ input_file: str, output_file: str, params_from_parser: Dict, classes_to_remove: List = [], rename_dims: List = []
77
85
  ) -> None:
78
86
  params = get_writer_parameters(params_from_parser)
87
+
88
+ # Create temporary file for dimension renaming if needed
89
+ if rename_dims:
90
+ with tempfile.NamedTemporaryFile(suffix=".laz", delete=False) as tmp_file:
91
+ tmp_file_name = tmp_file.name
92
+
93
+ # Rename dimensions
94
+ old_dims = rename_dims[::2]
95
+ new_dims = rename_dims[1::2]
96
+ rename_dimension(input_file, tmp_file_name, old_dims, new_dims)
97
+
98
+ # Use renamed file as input
99
+ input_file = tmp_file_name
100
+ else:
101
+ tmp_file_name = input_file
102
+
79
103
  pipeline = pdal.Pipeline()
80
- pipeline |= pdal.Reader.las(input_file)
104
+ pipeline |= pdal.Reader.las(tmp_file_name)
81
105
  if classes_to_remove:
82
106
  expression = "&&".join([f"Classification != {c}" for c in classes_to_remove])
83
107
  pipeline |= pdal.Filter.expression(expression=expression)
@@ -102,18 +126,24 @@ def exec_las2las(input_file: str, output_file: str):
102
126
 
103
127
 
104
128
  @copy_and_hack_decorator
105
- def standardize(input_file: str, output_file: str, params_from_parser: Dict, class_points_removed: []) -> None:
129
+ def standardize(
130
+ input_file: str, output_file: str, params_from_parser: Dict, class_points_removed: [], rename_dims: []
131
+ ) -> None:
106
132
  filename = os.path.basename(output_file)
107
133
  with tempfile.NamedTemporaryFile(suffix=filename) as tmp:
108
- rewrite_with_pdal(input_file, tmp.name, params_from_parser, class_points_removed)
134
+ rewrite_with_pdal(input_file, tmp.name, params_from_parser, class_points_removed, rename_dims)
109
135
  exec_las2las(tmp.name, output_file)
110
136
 
111
137
 
112
- if __name__ == "__main__":
138
+ def main():
113
139
  args = parse_args()
114
140
  params_from_parser = dict(
115
141
  dataformat_id=args.record_format,
116
142
  a_srs=args.projection,
117
143
  extra_dims=args.extra_dims,
118
144
  )
119
- standardize(args.input_file, args.output_file, params_from_parser, args.class_points_removed)
145
+ standardize(args.input_file, args.output_file, params_from_parser, args.class_points_removed, args.rename_dims)
146
+
147
+
148
+ if __name__ == "__main__":
149
+ main()
@@ -1,5 +1,4 @@
1
- """Tools to handle malformed las/laz files
2
- """
1
+ """Tools to handle malformed las/laz files"""
3
2
 
4
3
  # https://gis.stackexchange.com/questions/413191/python-pdal-error-reading-format-1-4-las-file-readers-las-error-global-enco
5
4
 
@@ -272,6 +272,22 @@ def test_add_points_from_geometry_to_las(input_file, input_points, epsg, expecte
272
272
  input_points, input_file, OUTPUT_FILE, 68, epsg, 1000, spacing, altitude_column
273
273
  )
274
274
  assert Path(OUTPUT_FILE).exists() # check output exists
275
+
276
+ # Read input and output files to compare headers
277
+ input_las = laspy.read(input_file)
278
+ output_las = laspy.read(OUTPUT_FILE)
279
+
280
+ # Compare headers
281
+ assert input_las.header.version == output_las.header.version
282
+ assert input_las.header.system_identifier == output_las.header.system_identifier
283
+ assert input_las.header.extra_header_bytes == output_las.header.extra_header_bytes
284
+ assert input_las.header.extra_vlr_bytes == output_las.header.extra_vlr_bytes
285
+ assert input_las.header.number_of_evlrs == output_las.header.number_of_evlrs
286
+ assert input_las.header.point_format == output_las.header.point_format
287
+ assert np.array_equal(input_las.header.scales, output_las.header.scales)
288
+ assert np.array_equal(input_las.header.offsets, output_las.header.offsets)
289
+ assert input_las.header.vlrs[0].string == output_las.header.vlrs[0].string
290
+
275
291
  point_count = compute_count_one_file(OUTPUT_FILE)["68"]
276
292
  assert point_count == expected_nb_points # Add all points from geojson
277
293
 
@@ -0,0 +1,159 @@
1
+ import os
2
+ import pytest
3
+ import tempfile
4
+ import numpy as np
5
+ import laspy
6
+ import sys
7
+ from pdaltools.las_rename_dimension import rename_dimension, main
8
+ from pyproj import CRS
9
+
10
+ def create_test_las_file():
11
+ """Create a temporary LAS file with test data."""
12
+ with tempfile.NamedTemporaryFile(suffix='.las', delete=False) as tmp_file:
13
+ # Create a LAS file with some test points
14
+ header = laspy.LasHeader(point_format=3, version="1.4")
15
+ header.add_extra_dim(laspy.ExtraBytesParams(name="test_dim", type=np.float32))
16
+ header.add_extra_dim(laspy.ExtraBytesParams(name="test_dim2", type=np.int32))
17
+
18
+ las = laspy.LasData(header)
19
+
20
+ crs_pyproj = CRS.from_string("epsg:4326")
21
+ las.header.add_crs(crs_pyproj)
22
+
23
+ # Add some test points
24
+ las.x = np.array([1.0, 2.0, 3.0])
25
+ las.y = np.array([4.0, 5.0, 6.0])
26
+ las.z = np.array([7.0, 8.0, 9.0])
27
+ las.test_dim = np.array([10.0, 11.0, 12.0])
28
+ las.test_dim2 = np.array([12, 13, 14])
29
+
30
+ las.write(tmp_file.name)
31
+ return tmp_file.name
32
+
33
+ def test_rename_dimension():
34
+ """Test renaming a dimension in a LAS file."""
35
+ # Create a temporary input LAS file
36
+ input_file = create_test_las_file()
37
+
38
+ # Create temporary output file
39
+ with tempfile.NamedTemporaryFile(suffix='.las', delete=False) as tmp_file:
40
+ output_file = tmp_file.name
41
+
42
+ try:
43
+ # Rename dimension using direct function call
44
+ rename_dimension(input_file, output_file, ["test_dim", "test_dim2"], ["new_test_dim", "new_test_dim2"])
45
+
46
+ # Verify the dimension was renamed
47
+ with laspy.open(output_file) as las_file:
48
+ las = las_file.read()
49
+ assert "new_test_dim" in las.point_format.dimension_names
50
+ assert "test_dim" not in las.point_format.dimension_names
51
+ assert "new_test_dim2" in las.point_format.dimension_names
52
+ assert "test_dim2" not in las.point_format.dimension_names
53
+
54
+ # Verify the data is preserved
55
+ np.testing.assert_array_equal(las.x, [1.0, 2.0, 3.0])
56
+ np.testing.assert_array_equal(las.y, [4.0, 5.0, 6.0])
57
+ np.testing.assert_array_equal(las.z, [7.0, 8.0, 9.0])
58
+ np.testing.assert_array_equal(las["new_test_dim"], [10.0, 11.0, 12.0])
59
+ np.testing.assert_array_equal(las["new_test_dim2"], [12, 13, 14])
60
+ finally:
61
+ # Clean up temporary files
62
+ try:
63
+ os.unlink(input_file)
64
+ os.unlink(output_file)
65
+ except:
66
+ pass
67
+
68
+ def test_rename_nonexistent_dimension():
69
+ """Test attempting to rename a dimension that doesn't exist."""
70
+ input_file = create_test_las_file()
71
+
72
+ with tempfile.NamedTemporaryFile(suffix='.las', delete=False) as tmp_file:
73
+ output_file = tmp_file.name
74
+
75
+ try:
76
+ with pytest.raises(RuntimeError):
77
+ rename_dimension(input_file, output_file, ["nonexistent_dim"], ["new_dim"])
78
+ finally:
79
+ os.unlink(input_file)
80
+ os.unlink(output_file)
81
+
82
+ def test_rename_to_existing_dimension():
83
+ """Test attempting to rename to an existing dimension."""
84
+ input_file = create_test_las_file()
85
+
86
+ with tempfile.NamedTemporaryFile(suffix='.las', delete=False) as tmp_file:
87
+ output_file = tmp_file.name
88
+
89
+ try:
90
+ with pytest.raises(ValueError):
91
+ rename_dimension(input_file, output_file, ["test_dim"], ["x"])
92
+ finally:
93
+ os.unlink(input_file)
94
+ os.unlink(output_file)
95
+
96
+ def test_rename_dimension_case_sensitive():
97
+ """Test that dimension renaming is case-sensitive."""
98
+ input_file = create_test_las_file()
99
+
100
+ with tempfile.NamedTemporaryFile(suffix='.las', delete=False) as tmp_file:
101
+ output_file = tmp_file.name
102
+
103
+ try:
104
+ with pytest.raises(RuntimeError):
105
+ rename_dimension(input_file, output_file, ["TEST_DIM"], ["new_dim"])
106
+ finally:
107
+ os.unlink(input_file)
108
+ os.unlink(output_file)
109
+
110
+
111
+ def test_rename_dimension_main():
112
+ """Test renaming dimensions using the main() function."""
113
+ # Create a temporary input LAS file
114
+ input_file = create_test_las_file()
115
+
116
+ # Create temporary output file
117
+ with tempfile.NamedTemporaryFile(suffix='.las', delete=False) as tmp_file:
118
+ output_file = tmp_file.name
119
+
120
+ try:
121
+ # Save original sys.argv
122
+ original_argv = sys.argv
123
+
124
+ # Mock command-line arguments
125
+ sys.argv = [
126
+ "las_rename_dimension.py", # script name
127
+ input_file,
128
+ output_file,
129
+ "--old-dims", "test_dim", "test_dim2",
130
+ "--new-dims", "new_test_dim", "new_test_dim2"
131
+ ]
132
+
133
+ # Call main() function
134
+ main()
135
+
136
+ # Restore original sys.argv
137
+ sys.argv = original_argv
138
+
139
+ # Verify the dimension was renamed
140
+ with laspy.open(output_file) as las_file:
141
+ las = las_file.read()
142
+ assert "new_test_dim" in las.point_format.dimension_names
143
+ assert "test_dim" not in las.point_format.dimension_names
144
+ assert "new_test_dim2" in las.point_format.dimension_names
145
+ assert "test_dim2" not in las.point_format.dimension_names
146
+
147
+ # Verify the data is preserved
148
+ np.testing.assert_array_equal(las.x, [1.0, 2.0, 3.0])
149
+ np.testing.assert_array_equal(las.y, [4.0, 5.0, 6.0])
150
+ np.testing.assert_array_equal(las.z, [7.0, 8.0, 9.0])
151
+ np.testing.assert_array_equal(las["new_test_dim"], [10.0, 11.0, 12.0])
152
+ np.testing.assert_array_equal(las["new_test_dim2"], [12, 13, 14])
153
+ finally:
154
+ # Clean up temporary files
155
+ try:
156
+ os.unlink(input_file)
157
+ os.unlink(output_file)
158
+ except:
159
+ pass
@@ -0,0 +1,24 @@
1
+ import os
2
+
3
+ import pdal
4
+ import pytest
5
+
6
+ TEST_PATH = os.path.dirname(os.path.abspath(__file__))
7
+
8
+
9
+ # this test only works with PDAL compiled on a custom fork and branch, so we mark it to avoid running it.
10
+ @pytest.mark.pdal_custom
11
+ def test_pdal_read_severals_extra_dims():
12
+ test_file = os.path.join(TEST_PATH, "data/las_with_several_extra_byte_bloc.laz")
13
+
14
+ pipeline = pdal.Reader.las(filename=test_file).pipeline()
15
+ metadata = pipeline.quickinfo["readers.las"]
16
+
17
+ # dimensions should contains 'Deviation' and 'confidence'
18
+ assert "Deviation" in metadata["dimensions"]
19
+ assert "confidence" in metadata["dimensions"]
20
+
21
+ # Test Python PDAL bindings
22
+ pipeline = pdal.Reader.las(filename=test_file).pipeline()
23
+ num_points = pipeline.execute()
24
+ assert num_points > 0
@@ -0,0 +1,339 @@
1
+ import logging
2
+ import os
3
+ import platform
4
+ import shutil
5
+ import subprocess as sp
6
+ from test.utils import EXPECTED_DIMS_BY_DATAFORMAT, get_pdal_infos_summary
7
+ import laspy
8
+ import pdal
9
+ import pytest
10
+ import tempfile
11
+ import sys
12
+
13
+ from pdaltools.count_occurences.count_occurences_for_attribute import (
14
+ compute_count_one_file,
15
+ )
16
+ from pdaltools.standardize_format import exec_las2las, rewrite_with_pdal, standardize, main
17
+
18
+ TEST_PATH = os.path.dirname(os.path.abspath(__file__))
19
+ TMP_PATH = os.path.join(TEST_PATH, "tmp")
20
+ INPUT_DIR = os.path.join(TEST_PATH, "data")
21
+
22
+ DEFAULT_PARAMS = {"dataformat_id": 6, "a_srs": "EPSG:2154", "extra_dims": []}
23
+ DEFAULT_PARAMS_WITH_ALL_EXTRA_DIMS = {"dataformat_id": 6, "a_srs": "EPSG:2154", "extra_dims": "all"}
24
+
25
+ MUTLIPLE_PARAMS = [
26
+ DEFAULT_PARAMS,
27
+ {"dataformat_id": 8, "a_srs": "EPSG:4326", "extra_dims": []},
28
+ {"dataformat_id": 8, "a_srs": "EPSG:2154", "extra_dims": ["dtm_marker=double", "dsm_marker=double"]},
29
+ {"dataformat_id": 8, "a_srs": "EPSG:2154", "extra_dims": "all"},
30
+ ]
31
+
32
+ def setup_module(module):
33
+ try:
34
+ shutil.rmtree(TMP_PATH)
35
+
36
+ except FileNotFoundError:
37
+ pass
38
+ os.mkdir(TMP_PATH)
39
+
40
+
41
+ @pytest.mark.parametrize(
42
+ "params",
43
+ [
44
+ DEFAULT_PARAMS,
45
+ DEFAULT_PARAMS_WITH_ALL_EXTRA_DIMS,
46
+ {"dataformat_id": 8, "a_srs": "EPSG:4326", "extra_dims": []},
47
+ {"dataformat_id": 8, "a_srs": "EPSG:2154", "extra_dims": ["dtm_marker=double", "dsm_marker=double"]},
48
+ ],
49
+ )
50
+ def test_rewrite_with_pdal_format(params):
51
+ input_file = os.path.join(INPUT_DIR, "test_data_77055_627755_LA93_IGN69_extra_dims.laz")
52
+ output_file = os.path.join(TMP_PATH, "formatted.laz")
53
+ rewrite_with_pdal(input_file, output_file, params, [])
54
+ # check file exists
55
+ assert os.path.isfile(output_file)
56
+ # check values from metadata
57
+ json_info = get_pdal_infos_summary(output_file)
58
+ if pdal.info.version < "2.5":
59
+ raise NotImplementedError("This test is not implemented for pdal < 2.5")
60
+ elif pdal.info.version <= "2.5.2":
61
+ metadata = json_info["summary"]["metadata"][1]
62
+ else:
63
+ metadata = json_info["summary"]["metadata"]
64
+ assert metadata["compressed"] is True
65
+ assert metadata["minor_version"] == 4
66
+ assert metadata["global_encoding"] == 17
67
+ assert metadata["dataformat_id"] == params["dataformat_id"]
68
+ # Check that there is no extra dim
69
+ dimensions = set([d.strip() for d in json_info["summary"]["dimensions"].split(",")])
70
+ if params["extra_dims"] == "all":
71
+ assert EXPECTED_DIMS_BY_DATAFORMAT[params["dataformat_id"]].issubset(dimensions)
72
+ else:
73
+ extra_dims_names = [dim.split("=")[0] for dim in params["extra_dims"]]
74
+ assert dimensions == EXPECTED_DIMS_BY_DATAFORMAT[params["dataformat_id"]].union(extra_dims_names)
75
+
76
+ # Check that there is the expected number of points for each class
77
+ expected_points_counts = compute_count_one_file(input_file)
78
+
79
+ output_points_counts = compute_count_one_file(output_file)
80
+ assert output_points_counts == expected_points_counts
81
+
82
+ # TODO: Check srs
83
+ # TODO: check precision
84
+
85
+ @pytest.mark.parametrize(
86
+ "params, rename_dims",
87
+ [
88
+ (DEFAULT_PARAMS_WITH_ALL_EXTRA_DIMS, []), # No renaming
89
+ (DEFAULT_PARAMS, ["dtm_marker", "new_dtm_marker"]), # Single dimension rename
90
+ (DEFAULT_PARAMS_WITH_ALL_EXTRA_DIMS, ["dtm_marker", "new_dtm_marker"]), # Single dimension rename
91
+ (DEFAULT_PARAMS_WITH_ALL_EXTRA_DIMS, ["dtm_marker", "new_dtm_marker", "dsm_marker", "new_dsm_marker"]), # Multiple dimensions rename
92
+ ],
93
+ )
94
+ def test_rewrite_with_pdal_rename_dimensions(params, rename_dims):
95
+ input_file = os.path.join(INPUT_DIR, "test_data_77055_627755_LA93_IGN69_extra_dims.laz")
96
+ output_file = os.path.join(TMP_PATH, "formatted_with_rename.laz")
97
+
98
+ # Check if we export all extra dims
99
+ export_with_all_extra_dims = params["extra_dims"] == "all"
100
+
101
+ # Get original dimensions
102
+ with laspy.open(input_file) as las_file:
103
+ las = las_file.read()
104
+ original_dims = las.point_format.dimension_names
105
+
106
+ # Standardize with dimension renaming
107
+ rewrite_with_pdal(input_file, output_file, params, [], rename_dims)
108
+
109
+ # Verify dimensions were renamed
110
+ with laspy.open(output_file) as las_file:
111
+ las = las_file.read()
112
+
113
+ for i in range(0, len(rename_dims), 2):
114
+ old_dim = rename_dims[i]
115
+ new_dim = rename_dims[i + 1]
116
+ if export_with_all_extra_dims:
117
+ assert new_dim in las.point_format.dimension_names
118
+ assert old_dim not in las.point_format.dimension_names
119
+ else:
120
+ assert new_dim not in las.point_format.dimension_names
121
+ assert old_dim not in las.point_format.dimension_names
122
+
123
+ new_dims = las.point_format.dimension_names
124
+
125
+ # Make dimensions case-insensitive (ex : red => Red with pdal transform)
126
+ new_dims_lowercase = [dim.casefold() for dim in new_dims]
127
+ original_dims_lowercase = [dim.casefold() for dim in original_dims]
128
+
129
+ # Check that other dimensions are preserved
130
+ if export_with_all_extra_dims:
131
+ for dim in original_dims_lowercase:
132
+ old_dims_renammed = rename_dims[::2]
133
+ # If dimension wasn't renamed and is not NIR (wich is 'infrared' in Some las files)
134
+ if dim not in old_dims_renammed and dim != 'nir':
135
+ assert dim in new_dims_lowercase, f"Original dimension {dim} was removed unexpectedly"
136
+
137
+
138
+ # Verify points count is preserved
139
+ original_count = compute_count_one_file(input_file)
140
+ new_count = compute_count_one_file(output_file)
141
+ assert original_count == new_count, "Points count changed unexpectedly"
142
+
143
+
144
+ @pytest.mark.parametrize(
145
+ "classes_to_remove",
146
+ [
147
+ [],
148
+ [2, 3],
149
+ [1, 2, 3, 4, 5, 6, 64], # remove all classes
150
+ ],
151
+ )
152
+ def test_standardize_classes(classes_to_remove):
153
+ input_file = os.path.join(INPUT_DIR, "test_data_77055_627755_LA93_IGN69_extra_dims.laz")
154
+ output_file = os.path.join(TMP_PATH, "formatted.laz")
155
+ rewrite_with_pdal(input_file, output_file, DEFAULT_PARAMS, classes_to_remove)
156
+ # Check that there is the expected number of points for each class
157
+ expected_points_counts = compute_count_one_file(input_file)
158
+ for cl in classes_to_remove:
159
+ expected_points_counts.pop(str(cl))
160
+
161
+ output_points_counts = compute_count_one_file(output_file)
162
+ assert output_points_counts == expected_points_counts
163
+
164
+
165
+ def exec_lasinfo(input_file: str):
166
+ if platform.processor() == "arm" and platform.architecture()[0] == "64bit":
167
+ lasinfo = "lasinfo64"
168
+ else:
169
+ lasinfo = "lasinfo"
170
+ r = sp.run([lasinfo, "-stdout", input_file], stderr=sp.PIPE, stdout=sp.PIPE)
171
+ if r.returncode == 1:
172
+ msg = r.stderr.decode()
173
+ print(msg)
174
+ raise RuntimeError(msg)
175
+
176
+ output = r.stdout.decode()
177
+ return output
178
+
179
+
180
+ def assert_lasinfo_no_warning(input_file: str):
181
+ errors = [line for line in exec_lasinfo(input_file).splitlines() if "WARNING" in line]
182
+
183
+ for line in errors:
184
+ print(line)
185
+
186
+ assert errors == [], errors
187
+
188
+
189
+ def test_exec_las2las_error():
190
+ with pytest.raises(RuntimeError):
191
+ exec_las2las("not_existing_input_file", "output_file")
192
+
193
+ def test_standardize_with_all_options():
194
+ """
195
+ Test standardize with all option
196
+ """
197
+ input_file = os.path.join(INPUT_DIR, "test_data_77055_627755_LA93_IGN69_extra_dims.laz")
198
+ output_file = os.path.join(TMP_PATH, "test_standardize_with_all_extra_dims.laz")
199
+
200
+ rename_dims = ["dtm_marker", "new_dtm_marker", "dsm_marker", "new_dsm_marker"]
201
+ remove_classes = ["64"]
202
+
203
+ # Standardize with all extra dimensions
204
+ params = DEFAULT_PARAMS_WITH_ALL_EXTRA_DIMS
205
+ standardize(input_file, output_file, params, remove_classes, rename_dims)
206
+
207
+ # Check that there is the expected number of points for each class
208
+ expected_points_counts = compute_count_one_file(input_file)
209
+ for cl in remove_classes:
210
+ expected_points_counts.pop(str(cl))
211
+ output_points_counts = compute_count_one_file(output_file)
212
+ assert output_points_counts == expected_points_counts
213
+
214
+ # Get original dimensions
215
+ with laspy.open(input_file) as las_file:
216
+ original_las = las_file.read()
217
+ original_dims = original_las.point_format.dimension_names
218
+
219
+ # Verify all extra dimensions are preserved and renamed if needed
220
+ with laspy.open(output_file) as las_file:
221
+ las = las_file.read()
222
+ new_dims = las.point_format.dimension_names
223
+
224
+ # Make dimensions case-insensitive (ex : red => Red with pdal transform)
225
+ new_dims_lowercase = [dim.casefold() for dim in new_dims]
226
+ original_dims_lowercase = [dim.casefold() for dim in original_dims]
227
+
228
+ # Verify all original dimensions are present
229
+ for dim in original_dims_lowercase:
230
+ old_dims_renammed = rename_dims[::2]
231
+ # If dimension wasn't renamed and is not NIR (wich is 'infrared' in Some las files)
232
+ if dim not in old_dims_renammed and dim != 'nir':
233
+ assert dim in new_dims_lowercase, f"Original dimension {dim} was removed unexpectedly"
234
+
235
+
236
+ def test_standardize_does_NOT_produce_any_warning_with_Lasinfo():
237
+ # bad file on the store (44 Mo)
238
+ # input_file = (
239
+ # "/var/data/store-lidarhd/developpement/standaLAS/demo_standardization/Semis_2022_0584_6880_LA93_IGN69.laz"
240
+ # )
241
+
242
+ input_file = os.path.join(TEST_PATH, "data/classified_laz/test_data_77050_627755_LA93_IGN69.laz")
243
+ output_file = os.path.join(TMP_PATH, "test_standardize_produce_no_warning_with_lasinfo.las")
244
+
245
+ # if you want to see input_file warnings
246
+ # assert_lasinfo_no_warning(input_file)
247
+
248
+ standardize(input_file, output_file, DEFAULT_PARAMS, [], [])
249
+ assert_lasinfo_no_warning(output_file)
250
+
251
+
252
+ def test_standardize_malformed_laz():
253
+ input_file = os.path.join(TEST_PATH, "data/test_pdalfail_0643_6319_LA93_IGN69.laz")
254
+ output_file = os.path.join(TMP_PATH, "standardize_pdalfail_0643_6319_LA93_IGN69.laz")
255
+ standardize(input_file, output_file, DEFAULT_PARAMS, [], [])
256
+ assert os.path.isfile(output_file)
257
+
258
+
259
+ def test_main_with_rename_dimensions():
260
+ """
261
+ Test the main function with dimension renaming
262
+ """
263
+ input_file = os.path.join(INPUT_DIR, "test_data_77055_627755_LA93_IGN69_extra_dims.laz")
264
+ output_file = os.path.join(TMP_PATH, "test_main_with_rename.laz")
265
+
266
+ # Save original sys.argv
267
+ original_argv = sys.argv
268
+
269
+ try:
270
+ # Set up mock command-line arguments
271
+ sys.argv = [
272
+ "standardize_format",
273
+ "--input_file", input_file,
274
+ "--output_file", output_file,
275
+ "--record_format", "6",
276
+ "--projection", "EPSG:2154",
277
+ "--extra_dims", "all",
278
+ "--rename_dims", "dtm_marker", "new_dtm_marker", "dsm_marker", "new_dsm_marker"
279
+ ]
280
+
281
+ # Run main function
282
+ main()
283
+
284
+ # Verify output file exists
285
+ assert os.path.isfile(output_file)
286
+
287
+ # Verify dimensions were renamed
288
+ with laspy.open(output_file) as las_file:
289
+ las = las_file.read()
290
+ assert "new_dsm_marker" in las.point_format.dimension_names
291
+ assert "new_dtm_marker" in las.point_format.dimension_names
292
+ assert "dtm_marker" not in las.point_format.dimension_names
293
+ assert "dsm_marker" not in las.point_format.dimension_names
294
+
295
+ finally:
296
+ # Restore original sys.argv
297
+ sys.argv = original_argv
298
+
299
+
300
+ def test_main_with_class_points_removed():
301
+ """
302
+ Test the main function with class points removed
303
+ """
304
+ input_file = os.path.join(INPUT_DIR, "test_data_77055_627755_LA93_IGN69_extra_dims.laz")
305
+ output_file = os.path.join(TMP_PATH, "test_main_with_class_remove.laz")
306
+
307
+ # Save original sys.argv
308
+ original_argv = sys.argv
309
+
310
+ try:
311
+ # Set up mock command-line arguments
312
+ sys.argv = [
313
+ "standardize_format",
314
+ "--input_file", input_file,
315
+ "--output_file", output_file,
316
+ "--record_format", "6",
317
+ "--projection", "EPSG:2154",
318
+ "--class_points_removed", "64"
319
+ ]
320
+
321
+ # Run main function
322
+ main()
323
+
324
+ # Verify output file exists
325
+ assert os.path.isfile(output_file)
326
+
327
+ # Verify points count is reduced
328
+ original_count = compute_count_one_file(input_file)
329
+ new_count = compute_count_one_file(output_file)
330
+ assert new_count < original_count, "Points count should be reduced after removing class 64"
331
+
332
+ finally:
333
+ # Restore original sys.argv
334
+ sys.argv = original_argv
335
+
336
+
337
+ if __name__ == "__main__":
338
+ logging.basicConfig(level=logging.INFO)
339
+ test_standardize_format()
@@ -1,159 +0,0 @@
1
- import logging
2
- import os
3
- import platform
4
- import shutil
5
- import subprocess as sp
6
- from test.utils import EXPECTED_DIMS_BY_DATAFORMAT, get_pdal_infos_summary
7
-
8
- import pdal
9
- import pytest
10
-
11
- from pdaltools.count_occurences.count_occurences_for_attribute import (
12
- compute_count_one_file,
13
- )
14
- from pdaltools.standardize_format import exec_las2las, rewrite_with_pdal, standardize
15
-
16
- TEST_PATH = os.path.dirname(os.path.abspath(__file__))
17
- TMP_PATH = os.path.join(TEST_PATH, "tmp")
18
- INPUT_DIR = os.path.join(TEST_PATH, "data")
19
-
20
- DEFAULT_PARAMS = {"dataformat_id": 6, "a_srs": "EPSG:2154", "extra_dims": []}
21
-
22
- MUTLIPLE_PARAMS = [
23
- DEFAULT_PARAMS,
24
- {"dataformat_id": 8, "a_srs": "EPSG:4326", "extra_dims": []},
25
- {"dataformat_id": 8, "a_srs": "EPSG:2154", "extra_dims": ["dtm_marker=double", "dsm_marker=double"]},
26
- {"dataformat_id": 8, "a_srs": "EPSG:2154", "extra_dims": "all"},
27
- ]
28
-
29
-
30
- def setup_module(module):
31
- try:
32
- shutil.rmtree(TMP_PATH)
33
-
34
- except FileNotFoundError:
35
- pass
36
- os.mkdir(TMP_PATH)
37
-
38
-
39
- @pytest.mark.parametrize(
40
- "params",
41
- [
42
- DEFAULT_PARAMS,
43
- {"dataformat_id": 8, "a_srs": "EPSG:4326", "extra_dims": []},
44
- {"dataformat_id": 8, "a_srs": "EPSG:2154", "extra_dims": ["dtm_marker=double", "dsm_marker=double"]},
45
- {"dataformat_id": 8, "a_srs": "EPSG:2154", "extra_dims": "all"},
46
- ],
47
- )
48
- def test_standardize_format(params):
49
- input_file = os.path.join(INPUT_DIR, "test_data_77055_627755_LA93_IGN69_extra_dims.laz")
50
- output_file = os.path.join(TMP_PATH, "formatted.laz")
51
- rewrite_with_pdal(input_file, output_file, params, [])
52
- # check file exists
53
- assert os.path.isfile(output_file)
54
- # check values from metadata
55
- json_info = get_pdal_infos_summary(output_file)
56
- if pdal.info.version < "2.5":
57
- raise NotImplementedError("This test is not implemented for pdal < 2.5")
58
- elif pdal.info.version <= "2.5.2":
59
- metadata = json_info["summary"]["metadata"][1]
60
- else:
61
- metadata = json_info["summary"]["metadata"]
62
- assert metadata["compressed"] is True
63
- assert metadata["minor_version"] == 4
64
- assert metadata["global_encoding"] == 17
65
- assert metadata["dataformat_id"] == params["dataformat_id"]
66
- # Check that there is no extra dim
67
- dimensions = set([d.strip() for d in json_info["summary"]["dimensions"].split(",")])
68
- if params["extra_dims"] == "all":
69
- assert EXPECTED_DIMS_BY_DATAFORMAT[params["dataformat_id"]].issubset(dimensions)
70
- else:
71
- extra_dims_names = [dim.split("=")[0] for dim in params["extra_dims"]]
72
- assert dimensions == EXPECTED_DIMS_BY_DATAFORMAT[params["dataformat_id"]].union(extra_dims_names)
73
-
74
- # Check that there is the expected number of points for each class
75
- expected_points_counts = compute_count_one_file(input_file)
76
-
77
- output_points_counts = compute_count_one_file(output_file)
78
- assert output_points_counts == expected_points_counts
79
-
80
- # TODO: Check srs
81
- # TODO: check precision
82
-
83
-
84
- @pytest.mark.parametrize(
85
- "classes_to_remove",
86
- [
87
- [],
88
- [2, 3],
89
- [1, 2, 3, 4, 5, 6, 64], # remove all classes
90
- ],
91
- )
92
- def test_standardize_classes(classes_to_remove):
93
- input_file = os.path.join(INPUT_DIR, "test_data_77055_627755_LA93_IGN69_extra_dims.laz")
94
- output_file = os.path.join(TMP_PATH, "formatted.laz")
95
- rewrite_with_pdal(input_file, output_file, DEFAULT_PARAMS, classes_to_remove)
96
- # Check that there is the expected number of points for each class
97
- expected_points_counts = compute_count_one_file(input_file)
98
- for cl in classes_to_remove:
99
- expected_points_counts.pop(str(cl))
100
-
101
- output_points_counts = compute_count_one_file(output_file)
102
- assert output_points_counts == expected_points_counts
103
-
104
-
105
- def exec_lasinfo(input_file: str):
106
- if platform.processor() == "arm" and platform.architecture()[0] == "64bit":
107
- lasinfo = "lasinfo64"
108
- else:
109
- lasinfo = "lasinfo"
110
- r = sp.run([lasinfo, "-stdout", input_file], stderr=sp.PIPE, stdout=sp.PIPE)
111
- if r.returncode == 1:
112
- msg = r.stderr.decode()
113
- print(msg)
114
- raise RuntimeError(msg)
115
-
116
- output = r.stdout.decode()
117
- return output
118
-
119
-
120
- def assert_lasinfo_no_warning(input_file: str):
121
- errors = [line for line in exec_lasinfo(input_file).splitlines() if "WARNING" in line]
122
-
123
- for line in errors:
124
- print(line)
125
-
126
- assert errors == [], errors
127
-
128
-
129
- def test_exec_las2las_error():
130
- with pytest.raises(RuntimeError):
131
- exec_las2las("not_existing_input_file", "output_file")
132
-
133
-
134
- def test_standardize_does_NOT_produce_any_warning_with_Lasinfo():
135
- # bad file on the store (44 Mo)
136
- # input_file = (
137
- # "/var/data/store-lidarhd/developpement/standaLAS/demo_standardization/Semis_2022_0584_6880_LA93_IGN69.laz"
138
- # )
139
-
140
- input_file = os.path.join(TEST_PATH, "data/classified_laz/test_data_77050_627755_LA93_IGN69.laz")
141
- output_file = os.path.join(TMP_PATH, "test_standardize_produce_no_warning_with_lasinfo.las")
142
-
143
- # if you want to see input_file warnings
144
- # assert_lasinfo_no_warning(input_file)
145
-
146
- standardize(input_file, output_file, DEFAULT_PARAMS, [])
147
- assert_lasinfo_no_warning(output_file)
148
-
149
-
150
- def test_standardize_malformed_laz():
151
- input_file = os.path.join(TEST_PATH, "data/test_pdalfail_0643_6319_LA93_IGN69.laz")
152
- output_file = os.path.join(TMP_PATH, "standardize_pdalfail_0643_6319_LA93_IGN69.laz")
153
- standardize(input_file, output_file, DEFAULT_PARAMS, [])
154
- assert os.path.isfile(output_file)
155
-
156
-
157
- if __name__ == "__main__":
158
- logging.basicConfig(level=logging.INFO)
159
- test_standardize_format()