ign-pdal-tools 1.15.5__tar.gz → 1.15.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/PKG-INFO +1 -1
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/ign_pdal_tools.egg-info/PKG-INFO +1 -1
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/pdaltools/_version.py +1 -1
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/pdaltools/add_points_in_pointcloud.py +1 -1
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/pdaltools/color.py +110 -28
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/pdaltools/create_random_laz.py +14 -18
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/pdaltools/download_image.py +1 -1
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/pdaltools/las_add_buffer.py +3 -3
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/pdaltools/las_comparison.py +72 -15
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/pdaltools/las_rename_dimension.py +4 -2
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/pdaltools/replace_area_in_pointcloud.py +1 -1
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/pdaltools/standardize_format.py +42 -25
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/pdaltools/unlock_file.py +1 -2
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/test/test_add_points_in_pointcloud.py +48 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/test/test_color.py +72 -16
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/test/test_create_random_laz.py +30 -17
- ign_pdal_tools-1.15.7/test/test_las_comparison.py +585 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/test/test_las_remove_dimensions.py +4 -4
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/test/test_las_rename_dimension.py +37 -36
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/test/test_pdal.py +4 -4
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/test/test_standardize_format.py +4 -3
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/test/test_unlock.py +2 -2
- ign_pdal_tools-1.15.5/test/test_las_comparison.py +0 -284
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/LICENSE.md +0 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/README.md +0 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/ign_pdal_tools.egg-info/SOURCES.txt +0 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/ign_pdal_tools.egg-info/dependency_links.txt +0 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/ign_pdal_tools.egg-info/top_level.txt +0 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/pdaltools/las_clip.py +0 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/pdaltools/las_info.py +0 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/pdaltools/las_merge.py +0 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/pdaltools/las_remove_dimensions.py +0 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/pdaltools/pcd_info.py +0 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/pdaltools/replace_attribute_in_las.py +0 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/pyproject.toml +0 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/setup.cfg +0 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/test/test_download_image.py +0 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/test/test_las_add_buffer.py +0 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/test/test_las_clip.py +0 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/test/test_las_info.py +0 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/test/test_las_merge.py +0 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/test/test_pcd_info.py +0 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/test/test_replace_area_in_pointcloud.py +0 -0
- {ign_pdal_tools-1.15.5 → ign_pdal_tools-1.15.7}/test/test_replace_attribute_in_las.py +0 -0
|
@@ -156,7 +156,7 @@ def add_points_to_las(
|
|
|
156
156
|
new_points.z = z_coords.astype(new_points.z.dtype)
|
|
157
157
|
new_points.classification = classes.astype(new_points.classification.dtype)
|
|
158
158
|
|
|
159
|
-
with tempfile.NamedTemporaryFile(suffix="_new_points.las") as tmp:
|
|
159
|
+
with tempfile.NamedTemporaryFile(suffix="_new_points.las", delete_on_close=False) as tmp:
|
|
160
160
|
with laspy.open(tmp.name, mode="w", header=header) as las_file:
|
|
161
161
|
las_file.write_points(new_points)
|
|
162
162
|
|
|
@@ -31,7 +31,7 @@ def match_min_max_with_pixel_size(min_d: float, max_d: float, pixel_per_meter: f
|
|
|
31
31
|
return min_d, max_d
|
|
32
32
|
|
|
33
33
|
|
|
34
|
-
def
|
|
34
|
+
def color_from_stream(
|
|
35
35
|
input_file: str,
|
|
36
36
|
output_file: str,
|
|
37
37
|
proj="",
|
|
@@ -101,7 +101,7 @@ def color(
|
|
|
101
101
|
|
|
102
102
|
tmp_ortho = None
|
|
103
103
|
if color_rvb_enabled:
|
|
104
|
-
tmp_ortho = tempfile.NamedTemporaryFile(suffix="_rvb.tif")
|
|
104
|
+
tmp_ortho = tempfile.NamedTemporaryFile(suffix="_rvb.tif", delete_on_close=False)
|
|
105
105
|
download_image(
|
|
106
106
|
proj,
|
|
107
107
|
stream_RGB,
|
|
@@ -124,7 +124,7 @@ def color(
|
|
|
124
124
|
|
|
125
125
|
tmp_ortho_irc = None
|
|
126
126
|
if color_ir_enabled:
|
|
127
|
-
tmp_ortho_irc = tempfile.NamedTemporaryFile(suffix="_irc.tif")
|
|
127
|
+
tmp_ortho_irc = tempfile.NamedTemporaryFile(suffix="_irc.tif", delete_on_close=False)
|
|
128
128
|
download_image(
|
|
129
129
|
proj,
|
|
130
130
|
stream_IRC,
|
|
@@ -156,28 +156,58 @@ def color(
|
|
|
156
156
|
return tmp_ortho, tmp_ortho_irc
|
|
157
157
|
|
|
158
158
|
|
|
159
|
-
def
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
159
|
+
def color_from_files(
|
|
160
|
+
input_file: str,
|
|
161
|
+
output_file: str,
|
|
162
|
+
rgb_image: str,
|
|
163
|
+
irc_image: str,
|
|
164
|
+
color_rvb_enabled=True,
|
|
165
|
+
color_ir_enabled=True,
|
|
166
|
+
veget_index_file="",
|
|
167
|
+
vegetation_dim="Deviation",
|
|
168
|
+
):
|
|
169
|
+
pipeline = pdal.Reader.las(filename=input_file)
|
|
170
|
+
|
|
171
|
+
writer_extra_dims = "all"
|
|
172
|
+
|
|
173
|
+
if veget_index_file and veget_index_file != "":
|
|
174
|
+
print(f"Remplissage du champ {vegetation_dim} à partir du fichier {veget_index_file}")
|
|
175
|
+
pipeline |= pdal.Filter.colorization(raster=veget_index_file, dimensions=f"{vegetation_dim}:1:256.0")
|
|
176
|
+
writer_extra_dims = [f"{vegetation_dim}=ushort"]
|
|
177
|
+
|
|
178
|
+
# Warning: the initial color is multiplied by 256 despite its initial 8-bits encoding
|
|
179
|
+
# which turns it to a 0 to 255*256 range.
|
|
180
|
+
# It is kept this way because of other dependencies that have been tuned to fit this range
|
|
181
|
+
if color_rvb_enabled:
|
|
182
|
+
pipeline |= pdal.Filter.colorization(raster=rgb_image, dimensions="Red:1:256.0, Green:2:256.0, Blue:3:256.0")
|
|
183
|
+
if color_ir_enabled:
|
|
184
|
+
pipeline |= pdal.Filter.colorization(raster=irc_image, dimensions="Infrared:1:256.0")
|
|
185
|
+
|
|
186
|
+
pipeline |= pdal.Writer.las(
|
|
187
|
+
filename=output_file, extra_dims=writer_extra_dims, minor_version="4", dataformat_id="8", forward="all"
|
|
165
188
|
)
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
189
|
+
|
|
190
|
+
print("Traitement du nuage de point")
|
|
191
|
+
pipeline.execute()
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
def argument_parser():
|
|
195
|
+
parser = argparse.ArgumentParser("Colorize tool")
|
|
196
|
+
subparsers = parser.add_subparsers(required=True)
|
|
197
|
+
|
|
198
|
+
# first command is 'from_stream'
|
|
199
|
+
from_stream = subparsers.add_parser("from_stream", help="Images are downloaded from streams")
|
|
200
|
+
from_stream.add_argument(
|
|
201
|
+
"--proj", "-p", type=str, default="", help="Projection, default will use projection from metadata input"
|
|
175
202
|
)
|
|
176
|
-
|
|
177
|
-
|
|
203
|
+
from_stream.add_argument("--timeout", "-t", type=int, default=300, help="Timeout, in seconds")
|
|
204
|
+
from_stream.add_argument("--rvb", action="store_true", help="Colorize RVB")
|
|
205
|
+
from_stream.add_argument("--ir", action="store_true", help="Colorize IR")
|
|
206
|
+
from_stream.add_argument("--resolution", "-r", type=float, default=5, help="Resolution, in pixel per meter")
|
|
207
|
+
from_stream.add_argument(
|
|
208
|
+
"--check-images", "-c", action="store_true", help="Check that downloaded image is not white"
|
|
178
209
|
)
|
|
179
|
-
|
|
180
|
-
parser.add_argument(
|
|
210
|
+
from_stream.add_argument(
|
|
181
211
|
"--stream-RGB",
|
|
182
212
|
type=str,
|
|
183
213
|
default="ORTHOIMAGERY.ORTHOPHOTOS",
|
|
@@ -186,27 +216,49 @@ default stream (ORTHOIMAGERY.ORTHOPHOTOS) let the server choose the resolution
|
|
|
186
216
|
for 20cm resolution rasters, use HR.ORTHOIMAGERY.ORTHOPHOTOS
|
|
187
217
|
for 50 cm resolution rasters, use ORTHOIMAGERY.ORTHOPHOTOS.BDORTHO""",
|
|
188
218
|
)
|
|
189
|
-
|
|
219
|
+
from_stream.add_argument(
|
|
190
220
|
"--stream-IRC",
|
|
191
221
|
type=str,
|
|
192
222
|
default="ORTHOIMAGERY.ORTHOPHOTOS.IRC",
|
|
193
223
|
help="""WMS raster stream for IRC colorization. Default to ORTHOIMAGERY.ORTHOPHOTOS.IRC
|
|
194
224
|
Documentation about possible stream : https://geoservices.ign.fr/services-web-experts-ortho""",
|
|
195
225
|
)
|
|
196
|
-
|
|
226
|
+
from_stream.add_argument(
|
|
197
227
|
"--size-max-GPF",
|
|
198
228
|
type=int,
|
|
199
229
|
default=5000,
|
|
200
230
|
help="Maximum edge size (in pixels) of downloaded images."
|
|
201
231
|
" If input file needs more, several images are downloaded and merged.",
|
|
202
232
|
)
|
|
233
|
+
add_common_options(from_stream)
|
|
234
|
+
from_stream.set_defaults(func=from_stream_func)
|
|
203
235
|
|
|
204
|
-
|
|
236
|
+
# second command is 'from_files'
|
|
237
|
+
from_files = subparsers.add_parser("from_files", help="Images are in directories from RGB/IRC")
|
|
238
|
+
from_files.add_argument("--image_RGB", type=str, required=True, help="RGB image filepath")
|
|
239
|
+
from_files.add_argument("--image_IRC", type=str, required=True, help="IRC image filepath")
|
|
240
|
+
add_common_options(from_files)
|
|
241
|
+
from_files.set_defaults(func=from_files_func)
|
|
205
242
|
|
|
243
|
+
return parser
|
|
206
244
|
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
245
|
+
|
|
246
|
+
def add_common_options(parser):
|
|
247
|
+
parser.add_argument("--input", "-i", type=str, required=True, help="Input file")
|
|
248
|
+
parser.add_argument("--output", "-o", type=str, default="", help="Output file")
|
|
249
|
+
parser.add_argument(
|
|
250
|
+
"--vegetation",
|
|
251
|
+
type=str,
|
|
252
|
+
default="",
|
|
253
|
+
help="Vegetation file (raster), value will be stored in 'vegetation_dim' field",
|
|
254
|
+
)
|
|
255
|
+
parser.add_argument(
|
|
256
|
+
"--vegetation_dim", type=str, default="Deviation", help="name of the extra_dim uses for the vegetation value"
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def from_stream_func(args):
|
|
261
|
+
color_from_stream(
|
|
210
262
|
input_file=args.input,
|
|
211
263
|
output_file=args.output,
|
|
212
264
|
proj=args.proj,
|
|
@@ -221,3 +273,33 @@ if __name__ == "__main__":
|
|
|
221
273
|
stream_IRC=args.stream_IRC,
|
|
222
274
|
size_max_gpf=args.size_max_GPF,
|
|
223
275
|
)
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
def from_files_func(args):
|
|
279
|
+
if args.image_RGB and args.image_RGB != "":
|
|
280
|
+
color_rvb_enabled = True
|
|
281
|
+
else:
|
|
282
|
+
color_rvb_enabled = False
|
|
283
|
+
if args.image_IRC and args.image_IRC != "":
|
|
284
|
+
color_irc_enabled = True
|
|
285
|
+
else:
|
|
286
|
+
color_irc_enabled = False
|
|
287
|
+
|
|
288
|
+
if not color_rvb_enabled and not color_irc_enabled:
|
|
289
|
+
raise ValueError("At least one of --rvb or --ir must be provided")
|
|
290
|
+
|
|
291
|
+
color_from_files(
|
|
292
|
+
input_file=args.input,
|
|
293
|
+
output_file=args.output,
|
|
294
|
+
rgb_image=args.image_RGB,
|
|
295
|
+
irc_image=args.image_IRC,
|
|
296
|
+
color_rvb_enabled=color_rvb_enabled,
|
|
297
|
+
color_ir_enabled=color_irc_enabled,
|
|
298
|
+
veget_index_file=args.vegetation,
|
|
299
|
+
vegetation_dim=args.vegetation_dim,
|
|
300
|
+
)
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
if __name__ == "__main__":
|
|
304
|
+
args = argument_parser.parse_args()
|
|
305
|
+
args.func(args)
|
|
@@ -1,10 +1,11 @@
|
|
|
1
|
-
import numpy as np
|
|
2
|
-
import laspy
|
|
3
|
-
from pathlib import Path
|
|
4
1
|
import argparse
|
|
5
|
-
from
|
|
2
|
+
from pathlib import Path
|
|
6
3
|
from typing import List, Tuple
|
|
7
4
|
|
|
5
|
+
import laspy
|
|
6
|
+
import numpy as np
|
|
7
|
+
from pyproj import CRS
|
|
8
|
+
|
|
8
9
|
|
|
9
10
|
def create_random_laz(
|
|
10
11
|
output_file: str,
|
|
@@ -27,7 +28,7 @@ def create_random_laz(
|
|
|
27
28
|
(default: (650000, 6810000) ; around Paris)
|
|
28
29
|
extra_dims: List of tuples (dimension_name, dimension_type) where type can be:
|
|
29
30
|
'float32', 'float64', 'int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64'
|
|
30
|
-
classifications: Optional list of classification values.
|
|
31
|
+
classifications: Optional list of classification values.
|
|
31
32
|
"""
|
|
32
33
|
|
|
33
34
|
# Create a new point cloud
|
|
@@ -103,7 +104,6 @@ def create_random_laz(
|
|
|
103
104
|
|
|
104
105
|
|
|
105
106
|
def test_output_file(result: dict, output_file: str):
|
|
106
|
-
|
|
107
107
|
# Validate output file path
|
|
108
108
|
output_path = Path(output_file)
|
|
109
109
|
if not output_path.exists():
|
|
@@ -126,12 +126,14 @@ def parse_args():
|
|
|
126
126
|
)
|
|
127
127
|
parser.add_argument("--crs", type=int, default=2154, help="Projection code")
|
|
128
128
|
parser.add_argument(
|
|
129
|
-
"--center",
|
|
130
|
-
|
|
129
|
+
"--center",
|
|
130
|
+
type=float,
|
|
131
|
+
nargs=2,
|
|
132
|
+
default=[650000.0, 6810000.0],
|
|
133
|
+
help="Center coordinates (x y) of the area to generate points in (space-separated)",
|
|
131
134
|
)
|
|
132
135
|
parser.add_argument(
|
|
133
|
-
"--classifications", type=int, nargs=
|
|
134
|
-
help="List of classification values (space-separated)"
|
|
136
|
+
"--classifications", type=int, nargs="+", help="List of classification values (space-separated)"
|
|
135
137
|
)
|
|
136
138
|
return parser.parse_args()
|
|
137
139
|
|
|
@@ -145,19 +147,13 @@ def main():
|
|
|
145
147
|
|
|
146
148
|
# Parse center
|
|
147
149
|
center = tuple(args.center[:2]) # Only take first 2 values if more are provided
|
|
148
|
-
|
|
150
|
+
|
|
149
151
|
# Parse classifications if provided
|
|
150
152
|
classifications = args.classifications
|
|
151
153
|
|
|
152
154
|
# Call create_random_laz
|
|
153
155
|
result = create_random_laz(
|
|
154
|
-
args.output_file,
|
|
155
|
-
args.point_format,
|
|
156
|
-
args.num_points,
|
|
157
|
-
args.crs,
|
|
158
|
-
center,
|
|
159
|
-
extra_dims,
|
|
160
|
-
classifications
|
|
156
|
+
args.output_file, args.point_format, args.num_points, args.crs, center, extra_dims, classifications
|
|
161
157
|
)
|
|
162
158
|
|
|
163
159
|
# Test output file
|
|
@@ -185,7 +185,7 @@ def download_image(proj, layer, minx, miny, maxx, maxy, pixel_per_meter, outfile
|
|
|
185
185
|
tmp_gpg_ortho.append(cells_ortho_paths)
|
|
186
186
|
|
|
187
187
|
# merge the cells
|
|
188
|
-
with tempfile.NamedTemporaryFile(suffix="_gpf.vrt") as tmp_vrt:
|
|
188
|
+
with tempfile.NamedTemporaryFile(suffix="_gpf.vrt", delete_on_close=False) as tmp_vrt:
|
|
189
189
|
gdal.BuildVRT(tmp_vrt.name, tmp_gpg_ortho)
|
|
190
190
|
gdal.Translate(outfile, tmp_vrt.name)
|
|
191
191
|
|
|
@@ -153,7 +153,7 @@ def remove_points_from_buffer(input_file: str, output_file: str):
|
|
|
153
153
|
input_file (str): path to the input file containing the "is_in_original" dimension
|
|
154
154
|
output_file (str): path to the output_file
|
|
155
155
|
"""
|
|
156
|
-
with tempfile.NamedTemporaryFile(suffix="_with_additional_dim.las") as tmp_las:
|
|
156
|
+
with tempfile.NamedTemporaryFile(suffix="_with_additional_dim.las", delete_on_close=False) as tmp_las:
|
|
157
157
|
pipeline = pdal.Pipeline() | pdal.Reader.las(input_file)
|
|
158
158
|
pipeline |= pdal.Filter.range(limits=f"{ORIGINAL_TILE_TAG}[1:1]")
|
|
159
159
|
pipeline |= pdal.Writer.las(filename=tmp_las.name, forward="all", extra_dims="all")
|
|
@@ -217,8 +217,8 @@ def run_on_buffered_las(
|
|
|
217
217
|
)
|
|
218
218
|
|
|
219
219
|
with (
|
|
220
|
-
tempfile.NamedTemporaryFile(suffix="_buffered_input.laz", dir=".") as buf_in,
|
|
221
|
-
tempfile.NamedTemporaryFile(suffix="_buffered_output.laz", dir=".") as buf_out,
|
|
220
|
+
tempfile.NamedTemporaryFile(suffix="_buffered_input.laz", dir=".", delete_on_close=False) as buf_in,
|
|
221
|
+
tempfile.NamedTemporaryFile(suffix="_buffered_output.laz", dir=".", delete_on_close=False) as buf_out,
|
|
222
222
|
):
|
|
223
223
|
create_las_with_buffer(
|
|
224
224
|
Path(input_file).parent,
|
|
@@ -1,11 +1,13 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
from typing import Tuple, Dict, Optional
|
|
3
|
+
|
|
1
4
|
import laspy
|
|
2
|
-
from pathlib import Path
|
|
3
5
|
import numpy as np
|
|
4
|
-
|
|
5
|
-
from
|
|
6
|
+
|
|
7
|
+
from pathlib import Path
|
|
6
8
|
|
|
7
9
|
|
|
8
|
-
def compare_las_dimensions(file1: Path, file2: Path, dimensions: list = None) -> Tuple[bool, int, float]:
|
|
10
|
+
def compare_las_dimensions(file1: Path, file2: Path, dimensions: list = None, precision: Optional[Dict[str, float]] = None) -> Tuple[bool, int, float]:
|
|
9
11
|
"""
|
|
10
12
|
Compare specified dimensions between two LAS files.
|
|
11
13
|
If no dimensions are specified, compares all available dimensions.
|
|
@@ -15,6 +17,8 @@ def compare_las_dimensions(file1: Path, file2: Path, dimensions: list = None) ->
|
|
|
15
17
|
file1: Path to the first LAS file
|
|
16
18
|
file2: Path to the second LAS file
|
|
17
19
|
dimensions: List of dimension names to compare (optional)
|
|
20
|
+
precision: Dictionary mapping dimension names to tolerance values for float comparison.
|
|
21
|
+
If None or dimension not in dict, uses exact comparison (default: None)
|
|
18
22
|
|
|
19
23
|
Returns:
|
|
20
24
|
bool: True if all specified dimensions are identical, False otherwise
|
|
@@ -58,20 +62,42 @@ def compare_las_dimensions(file1: Path, file2: Path, dimensions: list = None) ->
|
|
|
58
62
|
# Compare each dimension
|
|
59
63
|
for dim in dimensions:
|
|
60
64
|
try:
|
|
65
|
+
|
|
61
66
|
# Get sorted dimension arrays
|
|
62
67
|
dim1 = np.array(las1[dim])[sort_idx1]
|
|
63
68
|
dim2 = np.array(las2[dim])[sort_idx2]
|
|
64
69
|
|
|
70
|
+
# Get precision for this dimension (if specified)
|
|
71
|
+
dim_precision = None
|
|
72
|
+
if precision is not None and dim in precision:
|
|
73
|
+
dim_precision = precision[dim]
|
|
74
|
+
|
|
65
75
|
# Compare dimensions
|
|
66
|
-
if not
|
|
67
|
-
#
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
print(f"
|
|
74
|
-
|
|
76
|
+
if dim_precision is not None:
|
|
77
|
+
# Use tolerance-based comparison for floats
|
|
78
|
+
are_equal = np.allclose(dim1, dim2, rtol=0, atol=dim_precision)
|
|
79
|
+
if not are_equal:
|
|
80
|
+
# Find differences
|
|
81
|
+
diff_mask = ~np.isclose(dim1, dim2, rtol=0, atol=dim_precision)
|
|
82
|
+
diff_indices = np.where(diff_mask)[0]
|
|
83
|
+
print(f"Found {len(diff_indices)} points with different {dim} (tolerance={dim_precision}):")
|
|
84
|
+
for idx in diff_indices[:10]: # Show first 10 differences
|
|
85
|
+
diff_value = abs(dim1[idx] - dim2[idx])
|
|
86
|
+
print(f"Point {idx}: file1={dim1[idx]}, file2={dim2[idx]}, diff={diff_value}")
|
|
87
|
+
if len(diff_indices) > 10:
|
|
88
|
+
print(f"... and {len(diff_indices) - 10} more differences")
|
|
89
|
+
return False, len(diff_indices), 100 * len(diff_indices) / len(las1)
|
|
90
|
+
else:
|
|
91
|
+
# Exact comparison
|
|
92
|
+
if not np.array_equal(dim1, dim2):
|
|
93
|
+
# Find differences
|
|
94
|
+
diff_indices = np.where(dim1 != dim2)[0]
|
|
95
|
+
print(f"Found {len(diff_indices)} points with different {dim}:")
|
|
96
|
+
for idx in diff_indices[:10]: # Show first 10 differences
|
|
97
|
+
print(f"Point {idx}: file1={dim1[idx]}, file2={dim2[idx]}")
|
|
98
|
+
if len(diff_indices) > 10:
|
|
99
|
+
print(f"... and {len(diff_indices) - 10} more differences")
|
|
100
|
+
return False, len(diff_indices), 100 * len(diff_indices) / len(las1)
|
|
75
101
|
|
|
76
102
|
except KeyError:
|
|
77
103
|
print(f"Dimension '{dim}' not found in one or both files")
|
|
@@ -92,12 +118,32 @@ def compare_las_dimensions(file1: Path, file2: Path, dimensions: list = None) ->
|
|
|
92
118
|
|
|
93
119
|
# Update main function to use the new compare function
|
|
94
120
|
def main():
|
|
95
|
-
parser = argparse.ArgumentParser(
|
|
121
|
+
parser = argparse.ArgumentParser(
|
|
122
|
+
description="Compare dimensions between two LAS files",
|
|
123
|
+
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
124
|
+
epilog="""
|
|
125
|
+
Examples:
|
|
126
|
+
# Compare all dimensions with exact match
|
|
127
|
+
python las_comparison.py file1.las file2.las
|
|
128
|
+
|
|
129
|
+
# Compare specific dimensions with precision per dimension
|
|
130
|
+
python las_comparison.py file1.las file2.las --dimensions X Y Z --precision X=0.001 Y=0.001 Z=0.0001
|
|
131
|
+
|
|
132
|
+
# Compare all dimensions with precision for specific ones
|
|
133
|
+
python las_comparison.py file1.las file2.las --precision X=0.001 Y=0.001
|
|
134
|
+
"""
|
|
135
|
+
)
|
|
96
136
|
parser.add_argument("file1", type=str, help="Path to first LAS file")
|
|
97
137
|
parser.add_argument("file2", type=str, help="Path to second LAS file")
|
|
98
138
|
parser.add_argument(
|
|
99
139
|
"--dimensions", nargs="*", help="List of dimensions to compare. If not specified, compares all dimensions."
|
|
100
140
|
)
|
|
141
|
+
parser.add_argument(
|
|
142
|
+
"--precision", nargs="*", metavar="DIM=VAL",
|
|
143
|
+
help="Tolerance for float comparison per dimension (format: DIMENSION=PRECISION). "
|
|
144
|
+
"Example: --precision X=0.001 Y=0.001 Z=0.0001. "
|
|
145
|
+
"Dimensions not specified will use exact comparison."
|
|
146
|
+
)
|
|
101
147
|
|
|
102
148
|
args = parser.parse_args()
|
|
103
149
|
|
|
@@ -108,7 +154,18 @@ def main():
|
|
|
108
154
|
print("Error: One or both files do not exist")
|
|
109
155
|
exit(1)
|
|
110
156
|
|
|
111
|
-
|
|
157
|
+
# Parse precision dictionary from command line arguments
|
|
158
|
+
precision_dict = None
|
|
159
|
+
if args.precision:
|
|
160
|
+
precision_dict = {}
|
|
161
|
+
for prec_spec in args.precision:
|
|
162
|
+
try:
|
|
163
|
+
dim_name, prec_value = prec_spec.split('=', 1)
|
|
164
|
+
precision_dict[dim_name] = float(prec_value)
|
|
165
|
+
except ValueError:
|
|
166
|
+
parser.error(f"Invalid precision format: '{prec_spec}'. Expected format: DIMENSION=PRECISION (e.g., X=0.001)")
|
|
167
|
+
|
|
168
|
+
result = compare_las_dimensions(file1, file2, args.dimensions, precision_dict)
|
|
112
169
|
print(f"Dimensions comparison result: {'identical' if result[0] else 'different'}")
|
|
113
170
|
return result
|
|
114
171
|
|
|
@@ -6,11 +6,13 @@ This script allows renaming dimensions in a LAS file while preserving all other
|
|
|
6
6
|
|
|
7
7
|
import argparse
|
|
8
8
|
import logging
|
|
9
|
-
import pdal
|
|
10
9
|
import sys
|
|
11
10
|
from pathlib import Path
|
|
12
|
-
|
|
11
|
+
|
|
12
|
+
import pdal
|
|
13
|
+
|
|
13
14
|
from pdaltools.las_info import las_info_metadata
|
|
15
|
+
from pdaltools.las_remove_dimensions import remove_dimensions_from_points
|
|
14
16
|
|
|
15
17
|
|
|
16
18
|
def rename_dimension(input_file: str, output_file: str, old_dims: list[str], new_dims: list[str]):
|
|
@@ -271,7 +271,7 @@ def replace_area(
|
|
|
271
271
|
points = pipeline.execute()
|
|
272
272
|
|
|
273
273
|
end = time.time()
|
|
274
|
-
print(f"Step 3: merge: {
|
|
274
|
+
print(f"Step 3: merge: {points}, points in {end-t3:.2f} seconds")
|
|
275
275
|
print("all steps done in ", f"{end-start:.2f}", " seconds")
|
|
276
276
|
|
|
277
277
|
|
|
@@ -9,13 +9,14 @@
|
|
|
9
9
|
"""
|
|
10
10
|
|
|
11
11
|
import argparse
|
|
12
|
+
import os
|
|
12
13
|
import tempfile
|
|
13
14
|
from typing import Dict, List
|
|
14
15
|
|
|
15
16
|
import pdal
|
|
16
17
|
|
|
17
|
-
from pdaltools.unlock_file import copy_and_hack_decorator
|
|
18
18
|
from pdaltools.las_rename_dimension import rename_dimension
|
|
19
|
+
from pdaltools.unlock_file import copy_and_hack_decorator
|
|
19
20
|
|
|
20
21
|
# Standard parameters to pass to the pdal writer
|
|
21
22
|
STANDARD_PARAMETERS = dict(
|
|
@@ -76,34 +77,50 @@ def get_writer_parameters(new_parameters: Dict) -> Dict:
|
|
|
76
77
|
params = STANDARD_PARAMETERS | new_parameters
|
|
77
78
|
return params
|
|
78
79
|
|
|
80
|
+
|
|
79
81
|
@copy_and_hack_decorator
|
|
80
82
|
def standardize(
|
|
81
|
-
input_file: str,
|
|
83
|
+
input_file: str,
|
|
84
|
+
output_file: str,
|
|
85
|
+
params_from_parser: Dict,
|
|
86
|
+
classes_to_remove: List = [],
|
|
87
|
+
rename_dims: List = []
|
|
82
88
|
) -> None:
|
|
89
|
+
"""
|
|
90
|
+
Standardize a LAS/LAZ file with improved error handling and resource management.
|
|
91
|
+
|
|
92
|
+
Args:
|
|
93
|
+
input_file: Input file path
|
|
94
|
+
output_file: Output file path
|
|
95
|
+
params_from_parser: Parameters for the PDAL writer
|
|
96
|
+
classes_to_remove: List of classification classes to remove
|
|
97
|
+
rename_dims: List of dimension names to rename (pairs of old_name, new_name)
|
|
98
|
+
"""
|
|
83
99
|
params = get_writer_parameters(params_from_parser)
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
100
|
+
tmp_file_name = None
|
|
101
|
+
|
|
102
|
+
try:
|
|
103
|
+
# Create temporary file for dimension renaming if needed
|
|
104
|
+
if rename_dims:
|
|
105
|
+
with tempfile.NamedTemporaryFile(suffix=".laz", delete=False) as tmp_file:
|
|
106
|
+
tmp_file_name = tmp_file.name
|
|
107
|
+
old_dims = rename_dims[::2]
|
|
108
|
+
new_dims = rename_dims[1::2]
|
|
109
|
+
rename_dimension(input_file, tmp_file_name, old_dims, new_dims)
|
|
110
|
+
input_file = tmp_file_name
|
|
111
|
+
|
|
112
|
+
pipeline = pdal.Pipeline()
|
|
113
|
+
pipeline |= pdal.Reader.las(input_file)
|
|
114
|
+
if classes_to_remove:
|
|
115
|
+
expression = "&&".join([f"Classification != {c}" for c in classes_to_remove])
|
|
116
|
+
pipeline |= pdal.Filter.expression(expression=expression)
|
|
117
|
+
pipeline |= pdal.Writer(filename=output_file, forward="all", **params)
|
|
118
|
+
pipeline.execute()
|
|
119
|
+
|
|
120
|
+
finally:
|
|
121
|
+
# Clean up temporary file
|
|
122
|
+
if tmp_file_name and os.path.exists(tmp_file_name):
|
|
123
|
+
os.remove(tmp_file_name)
|
|
107
124
|
|
|
108
125
|
|
|
109
126
|
def main():
|
|
@@ -39,10 +39,9 @@ def copy_and_hack_decorator(func):
|
|
|
39
39
|
if "readers.las: Global encoding WKT flag not set for point format 6 - 10." in str(e):
|
|
40
40
|
args = list(args)
|
|
41
41
|
in_file = args[0]
|
|
42
|
-
with tempfile.NamedTemporaryFile(suffix=os.path.splitext(in_file)[-1]) as tmp:
|
|
42
|
+
with tempfile.NamedTemporaryFile(suffix=os.path.splitext(in_file)[-1], delete_on_close=False) as tmp:
|
|
43
43
|
copy_and_hack_input_file(in_file, tmp.name)
|
|
44
44
|
args[0] = tmp.name
|
|
45
|
-
|
|
46
45
|
return func(*args, **kwargs)
|
|
47
46
|
|
|
48
47
|
else:
|
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
import inspect
|
|
2
2
|
import os
|
|
3
|
+
import tempfile
|
|
3
4
|
from pathlib import Path
|
|
4
5
|
|
|
5
6
|
import geopandas as gpd
|
|
6
7
|
import laspy
|
|
7
8
|
import numpy as np
|
|
9
|
+
import pdal
|
|
8
10
|
import pytest
|
|
9
11
|
from shapely.geometry import LineString, MultiPoint, Point
|
|
10
12
|
|
|
@@ -424,3 +426,49 @@ def test_parse_args():
|
|
|
424
426
|
parsed_args_keys = args.__dict__.keys()
|
|
425
427
|
main_parameters = inspect.signature(add_points_in_pointcloud.add_points_from_geometry_to_las).parameters.keys()
|
|
426
428
|
assert set(parsed_args_keys) == set(main_parameters)
|
|
429
|
+
|
|
430
|
+
|
|
431
|
+
def test_namedtemporaryfile_delete_on_close_false():
|
|
432
|
+
"""Test that NamedTemporaryFile could be used on windows in the context with delete_on_close=False"""
|
|
433
|
+
temp_file_path = None
|
|
434
|
+
|
|
435
|
+
with tempfile.NamedTemporaryFile(suffix="_test.las", delete_on_close=False) as tmp:
|
|
436
|
+
temp_file_path = tmp.name
|
|
437
|
+
# Verify that the file exists during the context
|
|
438
|
+
assert os.path.exists(temp_file_path)
|
|
439
|
+
|
|
440
|
+
# Write some data to the temporary file
|
|
441
|
+
with open(temp_file_path, "w") as f:
|
|
442
|
+
f.write("test data")
|
|
443
|
+
f.close()
|
|
444
|
+
|
|
445
|
+
# Verify that the file still exists after exiting the context
|
|
446
|
+
assert not os.path.exists(
|
|
447
|
+
temp_file_path
|
|
448
|
+
), f"Temporary file {temp_file_path} should not exist oustside the context with delete_on_close=False"
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
def test_namedtemporaryfile_delete_false_with_pdal():
|
|
452
|
+
"""Test that NamedTemporaryFile could be used on windows in the context
|
|
453
|
+
with delete_on_close=False and some pdal operations"""
|
|
454
|
+
|
|
455
|
+
def read_las(input_las, tmp):
|
|
456
|
+
pipeline = pdal.Pipeline()
|
|
457
|
+
pipeline |= pdal.Reader.las(filename=input_las)
|
|
458
|
+
pipeline |= pdal.Writer.las(filename=tmp.name, forward="all", extra_dims="all")
|
|
459
|
+
pipeline.execute()
|
|
460
|
+
|
|
461
|
+
input_las = os.path.join(TEST_PATH, "data/crop_duplicate.laz")
|
|
462
|
+
temp_file_path = None
|
|
463
|
+
with tempfile.NamedTemporaryFile(suffix="_test.las", delete_on_close=False) as tmp:
|
|
464
|
+
temp_file_path = tmp.name
|
|
465
|
+
read_las(input_las, tmp)
|
|
466
|
+
pipeline = pdal.Pipeline()
|
|
467
|
+
pipeline |= pdal.Reader.las(filename=tmp.name)
|
|
468
|
+
pipeline.execute()
|
|
469
|
+
assert os.path.exists(tmp.name)
|
|
470
|
+
|
|
471
|
+
# Verify that the file still exists after exiting the context
|
|
472
|
+
assert not os.path.exists(
|
|
473
|
+
temp_file_path
|
|
474
|
+
), f"Temporary file {temp_file_path} should not exist oustside the context with delete_on_close=False"
|