ign-pdal-tools 1.14.0__tar.gz → 1.15.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/PKG-INFO +3 -1
  2. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/README.md +2 -0
  3. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/ign_pdal_tools.egg-info/PKG-INFO +3 -1
  4. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/ign_pdal_tools.egg-info/SOURCES.txt +1 -1
  5. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/pdaltools/_version.py +1 -1
  6. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/pdaltools/las_info.py +16 -0
  7. ign_pdal_tools-1.15.1/pdaltools/replace_area_in_pointcloud.py +213 -0
  8. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/pyproject.toml +0 -1
  9. ign_pdal_tools-1.14.0/test/test_pdal_custom.py → ign_pdal_tools-1.15.1/test/test_pdal.py +3 -2
  10. ign_pdal_tools-1.15.1/test/test_replace_area_in_pointcloud.py +339 -0
  11. ign_pdal_tools-1.14.0/pdaltools/replace_area_in_pointcloud.py +0 -79
  12. ign_pdal_tools-1.14.0/test/test_replace_area_in_pointcloud.py +0 -118
  13. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/LICENSE.md +0 -0
  14. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/ign_pdal_tools.egg-info/dependency_links.txt +0 -0
  15. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/ign_pdal_tools.egg-info/top_level.txt +0 -0
  16. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/pdaltools/add_points_in_pointcloud.py +0 -0
  17. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/pdaltools/color.py +0 -0
  18. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/pdaltools/create_random_laz.py +0 -0
  19. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/pdaltools/download_image.py +0 -0
  20. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/pdaltools/las_add_buffer.py +0 -0
  21. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/pdaltools/las_clip.py +0 -0
  22. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/pdaltools/las_comparison.py +0 -0
  23. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/pdaltools/las_merge.py +0 -0
  24. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/pdaltools/las_remove_dimensions.py +0 -0
  25. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/pdaltools/las_rename_dimension.py +0 -0
  26. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/pdaltools/pcd_info.py +0 -0
  27. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/pdaltools/replace_attribute_in_las.py +0 -0
  28. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/pdaltools/standardize_format.py +0 -0
  29. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/pdaltools/unlock_file.py +0 -0
  30. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/setup.cfg +0 -0
  31. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/test/test_add_points_in_pointcloud.py +0 -0
  32. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/test/test_color.py +0 -0
  33. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/test/test_create_random_laz.py +0 -0
  34. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/test/test_download_image.py +0 -0
  35. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/test/test_las_add_buffer.py +0 -0
  36. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/test/test_las_clip.py +0 -0
  37. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/test/test_las_comparison.py +0 -0
  38. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/test/test_las_info.py +0 -0
  39. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/test/test_las_merge.py +0 -0
  40. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/test/test_las_remove_dimensions.py +0 -0
  41. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/test/test_las_rename_dimension.py +0 -0
  42. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/test/test_pcd_info.py +0 -0
  43. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/test/test_replace_attribute_in_las.py +0 -0
  44. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/test/test_standardize_format.py +0 -0
  45. {ign_pdal_tools-1.14.0 → ign_pdal_tools-1.15.1}/test/test_unlock.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ign-pdal-tools
3
- Version: 1.14.0
3
+ Version: 1.15.1
4
4
  Summary: Library for common LAS files manipulation with PDAL
5
5
  Author-email: Guillaume Liegard <guillaume.liegard@ign.fr>
6
6
  Description-Content-Type: text/markdown
@@ -128,3 +128,5 @@ To generate a pip package and deploy it on pypi, use the [Makefile](Makefile) at
128
128
  To build a docker image with the library installed: `make docker-build`
129
129
 
130
130
  To test the docker image: `make docker-test`
131
+
132
+ To build a docker image with a custom version of PDAL: `make docker-build-custom-pdal` ; the custom version is defined in the Makefile (see Makefile for details)
@@ -119,3 +119,5 @@ To generate a pip package and deploy it on pypi, use the [Makefile](Makefile) at
119
119
  To build a docker image with the library installed: `make docker-build`
120
120
 
121
121
  To test the docker image: `make docker-test`
122
+
123
+ To build a docker image with a custom version of PDAL: `make docker-build-custom-pdal` ; the custom version is defined in the Makefile (see Makefile for details)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ign-pdal-tools
3
- Version: 1.14.0
3
+ Version: 1.15.1
4
4
  Summary: Library for common LAS files manipulation with PDAL
5
5
  Author-email: Guillaume Liegard <guillaume.liegard@ign.fr>
6
6
  Description-Content-Type: text/markdown
@@ -128,3 +128,5 @@ To generate a pip package and deploy it on pypi, use the [Makefile](Makefile) at
128
128
  To build a docker image with the library installed: `make docker-build`
129
129
 
130
130
  To test the docker image: `make docker-test`
131
+
132
+ To build a docker image with a custom version of PDAL: `make docker-build-custom-pdal` ; the custom version is defined in the Makefile (see Makefile for details)
@@ -34,7 +34,7 @@ test/test_las_merge.py
34
34
  test/test_las_remove_dimensions.py
35
35
  test/test_las_rename_dimension.py
36
36
  test/test_pcd_info.py
37
- test/test_pdal_custom.py
37
+ test/test_pdal.py
38
38
  test/test_replace_area_in_pointcloud.py
39
39
  test/test_replace_attribute_in_las.py
40
40
  test/test_standardize_format.py
@@ -1,4 +1,4 @@
1
- __version__ = "1.14.0"
1
+ __version__ = "1.15.1"
2
2
 
3
3
 
4
4
  if __name__ == "__main__":
@@ -254,3 +254,19 @@ def get_epsg_from_las(filename: str) -> str:
254
254
  return None # Return None if CRS is not defined
255
255
  epsg_code = crs.to_epsg()
256
256
  return f"EPSG:{epsg_code}" if epsg_code else None
257
+
258
+
259
+ def list_dims(las_filename):
260
+ """List dimensions
261
+
262
+ Args:
263
+ las_file (_type_): _description_
264
+
265
+ Returns:
266
+ List<String>: Dimensions names
267
+ """
268
+ pipeline = pdal.Pipeline()
269
+ pipeline |= pdal.Reader.las(filename=las_filename)
270
+ pipeline.execute()
271
+
272
+ return list(pipeline.arrays[0].dtype.fields.keys())
@@ -0,0 +1,213 @@
1
+ import argparse
2
+ import warnings
3
+
4
+ import numpy as np
5
+ import pdal
6
+ from numpy.lib import recfunctions as rfn
7
+ from osgeo import gdal
8
+
9
+ from pdaltools.las_info import get_writer_parameters_from_reader_metadata
10
+
11
+
12
+ def argument_parser():
13
+ parser = argparse.ArgumentParser(
14
+ "Replace points in a pointcloud, based on an area. "
15
+ "Source may come from from another pointcloud (command from_cloud), "
16
+ "or may be derivated from a digital surface model (command from_DSM).\n"
17
+ )
18
+ subparsers = parser.add_subparsers(required=True)
19
+
20
+ # first command is 'from_cloud'
21
+ from_cloud = subparsers.add_parser("from_cloud", help="Source is a point cloud")
22
+ from_cloud.add_argument("--source_cloud", "-s", required=True, type=str, help="path of source point cloud")
23
+ add_common_options(from_cloud)
24
+ from_cloud.set_defaults(func=from_cloud_func)
25
+
26
+ # second command is 'from_DSM'
27
+ from_DSM = subparsers.add_parser("from_DSM", help="Source is a digital surface model (DSM)")
28
+ from_DSM.add_argument(
29
+ "--source_dsm",
30
+ "-d",
31
+ required=True,
32
+ type=str,
33
+ help="path of the source digital surface model (DSM), used to generate source points",
34
+ )
35
+ from_DSM.add_argument(
36
+ "--source_ground_area",
37
+ "-g",
38
+ required=True,
39
+ type=str,
40
+ help=(
41
+ "area of the ground, used to intersect source cloud. "
42
+ "(shapefile, geojson or other format readable by GDAL)"
43
+ ),
44
+ )
45
+ from_DSM.add_argument(
46
+ "--source_classification",
47
+ "-c",
48
+ required=True,
49
+ type=int,
50
+ help="classification to apply to the points extracted from the DSM",
51
+ )
52
+ add_common_options(from_DSM)
53
+ from_DSM.set_defaults(func=from_DSM_func)
54
+
55
+ return parser
56
+
57
+
58
+ def add_common_options(parser):
59
+ parser.add_argument(
60
+ "--source_pdal_filter", "-f", type=str, help="pdal filter expression to apply to source point cloud"
61
+ )
62
+ parser.add_argument("--target_cloud", "-t", type=str, required=True, help="path of target cloud to be modified")
63
+ parser.add_argument(
64
+ "--replacement_area",
65
+ "-r",
66
+ required=True,
67
+ type=str,
68
+ help="area to replace (shapefile, geojson or other format readable by GDAL)",
69
+ )
70
+ parser.add_argument("--output_cloud", "-o", required=True, type=str, help="output cloud file")
71
+
72
+
73
+ def from_cloud_func(args):
74
+ replace_area(
75
+ target_cloud=args.target_cloud,
76
+ pipeline_source=pipeline_read_from_cloud(args.source_cloud),
77
+ replacement_area=args.replacement_area,
78
+ output_cloud=args.output_cloud,
79
+ source_pdal_filter=args.source_pdal_filter,
80
+ )
81
+
82
+
83
+ def from_DSM_func(args):
84
+ replace_area(
85
+ target_cloud=args.target_cloud,
86
+ pipeline_source=pipeline_read_from_DSM(
87
+ dsm=args.source_dsm, ground_area=args.source_ground_area, classification=args.source_classification
88
+ ),
89
+ replacement_area=args.replacement_area,
90
+ output_cloud=args.output_cloud,
91
+ source_pdal_filter=args.source_pdal_filter,
92
+ )
93
+
94
+
95
+ def get_writer_params(input_file):
96
+ pipeline = pdal.Pipeline()
97
+ pipeline |= pdal.Reader.las(filename=input_file)
98
+ pipeline.execute()
99
+ params = get_writer_parameters_from_reader_metadata(pipeline.metadata)
100
+ return params
101
+
102
+
103
+ def pipeline_read_from_cloud(filename):
104
+ pipeline_source = pdal.Pipeline()
105
+ pipeline_source |= pdal.Reader.las(filename=filename)
106
+ return pipeline_source
107
+
108
+
109
+ def pipeline_read_from_DSM(dsm, ground_area, classification):
110
+ # get nodata value
111
+ ds = gdal.Open(dsm)
112
+ band = ds.GetRasterBand(1)
113
+ nodata_value = band.GetNoDataValue()
114
+ ds.Close()
115
+
116
+ pipeline = pdal.Pipeline()
117
+ pipeline |= pdal.Reader.gdal(filename=dsm, header="Z")
118
+ pipeline |= pdal.Filter.expression(expression=f"Z != {nodata_value}")
119
+
120
+ pipeline |= pdal.Filter.ferry(dimensions="=> geometryFid")
121
+ pipeline |= pdal.Filter.assign(assignment="geometryFid[:]=-1")
122
+ pipeline |= pdal.Filter.overlay(column="fid", dimension="geometryFid", datasource=ground_area)
123
+ # Keep only points in the area
124
+ pipeline |= pdal.Filter.expression(expression="geometryFid>=0")
125
+
126
+ # assign class
127
+ pipeline |= pdal.Filter.ferry(dimensions="=>Classification")
128
+ pipeline |= pdal.Filter.assign(assignment=f"Classification[:]={classification}")
129
+
130
+ return pipeline
131
+
132
+
133
+ def replace_area(
134
+ target_cloud, pipeline_source, replacement_area, output_cloud, source_pdal_filter="", target_pdal_filter=""
135
+ ):
136
+ crops = []
137
+ # pipeline to read target_cloud and remove points inside the polygon
138
+ pipeline_target = pdal.Pipeline()
139
+ pipeline_target |= pdal.Reader.las(filename=target_cloud)
140
+ pipeline_target |= pdal.Filter.ferry(dimensions="=> geometryFid")
141
+ # Assign -1 to all points because overlay replaces values from 0 and more
142
+ pipeline_target |= pdal.Filter.assign(assignment="geometryFid[:]=-1")
143
+ if target_pdal_filter:
144
+ pipeline_target |= pdal.Filter.expression(expression=target_pdal_filter)
145
+ pipeline_target |= pdal.Filter.overlay(column="fid", dimension="geometryFid", datasource=replacement_area)
146
+ # Keep only points out of the area
147
+ pipeline_target |= pdal.Filter.expression(expression="geometryFid==-1", tag="A")
148
+ pipeline_target.execute()
149
+
150
+ # get input dimensions dtype from target
151
+ if pipeline_target.arrays:
152
+ input_dim_dtype = pipeline_target.arrays[0].dtype
153
+ else:
154
+ # re-read the LAS only if we cant have dimensions with previous pipeline (empty output)
155
+ pipeline_target2 = pdal.Pipeline()
156
+ pipeline_target2 |= pdal.Reader.las(filename=target_cloud)
157
+ pipeline_target2.execute()
158
+ input_dim_dtype = pipeline_target2.arrays[0].dtype
159
+
160
+ # get input dimensions names
161
+ input_dimensions = list(input_dim_dtype.fields.keys())
162
+
163
+ # do not keep geometryFid
164
+ output_dimensions = [dim for dim in input_dimensions if dim not in "geometryFid"]
165
+
166
+ # add target to the result after keeping only the expected dimensions
167
+ if pipeline_target.arrays:
168
+ target_cloud_pruned = pipeline_target.arrays[0][output_dimensions]
169
+ crops.append(target_cloud_pruned)
170
+
171
+ # pipeline to read source_cloud and remove points outside the polygon
172
+ pipeline_source |= pdal.Filter.ferry(dimensions="=> geometryFid")
173
+ pipeline_source |= pdal.Filter.assign(assignment="geometryFid[:]=-1")
174
+ if source_pdal_filter:
175
+ pipeline_source |= pdal.Filter.expression(expression=source_pdal_filter)
176
+ pipeline_source |= pdal.Filter.overlay(column="fid", dimension="geometryFid", datasource=replacement_area)
177
+ # Keep only points in the area
178
+ pipeline_source |= pdal.Filter.expression(expression="geometryFid>=0", tag="B")
179
+ pipeline_source.execute()
180
+
181
+ # add source to the result
182
+ if pipeline_source.arrays:
183
+ # eventually add dimensions in source to have same dimensions as target cloud
184
+ # we do that in numpy (instead of PDAL filter) to keep dimension types
185
+ source_cloud_crop = pipeline_source.arrays[0]
186
+ nb_points = source_cloud_crop.shape[0]
187
+ source_dims = source_cloud_crop.dtype.fields.keys()
188
+ for dim_name, dim_type in input_dim_dtype.fields.items():
189
+ if dim_name not in source_dims:
190
+ source_cloud_crop = rfn.append_fields(
191
+ base=source_cloud_crop,
192
+ names=dim_name,
193
+ data=np.zeros(nb_points, dtype=dim_type[0]),
194
+ dtypes=dim_type[0],
195
+ )
196
+
197
+ source_cloud_pruned = source_cloud_crop[output_dimensions]
198
+ crops.append(source_cloud_pruned)
199
+
200
+ # Merge
201
+ if not crops:
202
+ warnings.warn("WARNING: Empty LAS, extra dims are lost")
203
+
204
+ pipeline = pdal.Filter.merge().pipeline(*crops)
205
+
206
+ writer_params = get_writer_params(target_cloud)
207
+ pipeline |= pdal.Writer.las(filename=output_cloud, **writer_params)
208
+ pipeline.execute()
209
+
210
+
211
+ if __name__ == "__main__":
212
+ args = argument_parser().parse_args()
213
+ args.func(args)
@@ -13,7 +13,6 @@ packages = ["pdaltools"]
13
13
  [tool.pytest.ini_options]
14
14
  markers = [
15
15
  "geopf: marks tests that request the (sometimes unreliable) data.geopf.fr",
16
- "pdal_custom: marks tests that only work with PDAL compiled on a custom fork and branch",
17
16
  ]
18
17
 
19
18
  [tool.black]
@@ -5,10 +5,11 @@ import pytest
5
5
 
6
6
  TEST_PATH = os.path.dirname(os.path.abspath(__file__))
7
7
 
8
+ #this test files concatenate somes tests on PDAL features
9
+ #it allows us to test the PDAL version used in the library is modern enough
8
10
 
9
- # this test only works with PDAL compiled on a custom fork and branch, so we mark it to avoid running it.
10
- @pytest.mark.pdal_custom
11
11
  def test_pdal_read_severals_extra_dims():
12
+ # test that we can read a las file with several extra dims
12
13
  test_file = os.path.join(TEST_PATH, "data/las_with_several_extra_byte_bloc.laz")
13
14
 
14
15
  pipeline = pdal.Reader.las(filename=test_file).pipeline()
@@ -0,0 +1,339 @@
1
+ import os
2
+ import shutil
3
+ import test.utils as tu
4
+
5
+ import laspy
6
+ import numpy as np
7
+ import pdal
8
+ import pytest
9
+
10
+ from pdaltools.count_occurences.count_occurences_for_attribute import (
11
+ compute_count_one_file,
12
+ )
13
+ from pdaltools.las_info import list_dims
14
+ from pdaltools.replace_area_in_pointcloud import (
15
+ argument_parser,
16
+ pipeline_read_from_cloud,
17
+ pipeline_read_from_DSM,
18
+ replace_area,
19
+ )
20
+
21
+ TEST_PATH = os.path.dirname(os.path.abspath(__file__))
22
+ TMP_PATH = os.path.join(TEST_PATH, "tmp/replace_area_in_pointcloud")
23
+ INPUT_DIR = os.path.join(TEST_PATH, "data/replace_area_in_pointcloud")
24
+
25
+ TARGET_CLOUD = os.path.join(INPUT_DIR, "target_cloud_crop.laz")
26
+ REPLACE_AREA = os.path.join(INPUT_DIR, "replace_area.geojson")
27
+
28
+ # source may be a cloud
29
+ SOURCE_CLOUD = os.path.join(INPUT_DIR, "source_cloud_crop.laz")
30
+
31
+ # source may be a digital surface model
32
+ SOURCE_DSM = os.path.join(INPUT_DIR, "DSM.tif")
33
+ SOURCE_GROUND_AREA = os.path.join(INPUT_DIR, "ground_area.geojson")
34
+ SOURCE_CLASSIF = 68
35
+
36
+ TMP_EXTRA_DIMS = os.path.join(TMP_PATH, "input_with_extra_dims")
37
+ TARGET_EXTRA_DIM = os.path.join(TMP_EXTRA_DIMS, "target_cloud_crop_extra_dim.laz")
38
+
39
+
40
+ def setup_module(module):
41
+ try:
42
+ shutil.rmtree(TMP_PATH)
43
+
44
+ except FileNotFoundError:
45
+ pass
46
+ os.mkdir(TMP_PATH)
47
+
48
+ # target file with extra dims is used is severals tests.
49
+ generate_target_extra_dim()
50
+
51
+
52
+ def generate_extra_dim(input, output, new_dims):
53
+ pipeline = pdal.Pipeline() | pdal.Reader.las(input)
54
+ for dim in new_dims.keys():
55
+ pipeline |= pdal.Filter.ferry(dimensions=f"=>{dim}")
56
+ pipeline |= pdal.Filter.assign(assignment=f"{dim}[:]=1")
57
+
58
+ extra_dims = ",".join(f"{k}={v}" for k, v in new_dims.items())
59
+ pipeline |= pdal.Writer.las(output, forward="all", extra_dims=extra_dims)
60
+ pipeline.execute()
61
+
62
+
63
+ def generate_target_extra_dim():
64
+ """generate target with an extra dim target with value 1, and target2"""
65
+
66
+ os.makedirs(TMP_EXTRA_DIMS)
67
+ generate_extra_dim(input=TARGET_CLOUD, output=TARGET_EXTRA_DIM, new_dims={"target": "uint16", "target2": "uint8"})
68
+
69
+ target_dims = list_dims(TARGET_EXTRA_DIM)
70
+ assert "target" in target_dims, "target should have 'target' dimension"
71
+ assert "target2" in target_dims, "target should have 'target2' dimension"
72
+
73
+
74
+ # Utils functions
75
+ def get_nb_points(path):
76
+ """Get number of points in a las file"""
77
+ with laspy.open(path) as f:
78
+ nb_points = f.header.point_count
79
+
80
+ return nb_points
81
+
82
+
83
+ def test_replace_area_base():
84
+ output_file = os.path.join(TMP_PATH, "test_replace_area", "replaced.laz")
85
+ os.makedirs(os.path.dirname(output_file))
86
+
87
+ replace_area(
88
+ target_cloud=TARGET_CLOUD,
89
+ pipeline_source=pipeline_read_from_cloud(SOURCE_CLOUD),
90
+ replacement_area=REPLACE_AREA,
91
+ output_cloud=output_file,
92
+ )
93
+ # Check that we have the expected number of points in the output
94
+ assert get_nb_points(output_file) == 6461
95
+
96
+ # Check that there are only points from the target pointcloud outside the replacement geometry
97
+ output_file_outside_geometry = os.path.join(os.path.dirname(output_file), "replaced_outside_area.laz")
98
+ pipeline = pdal.Pipeline()
99
+ pipeline |= pdal.Reader.las(filename=output_file)
100
+ pipeline |= pdal.Filter.ferry(dimensions="=> geometryFid")
101
+ pipeline |= pdal.Filter.assign(assignment="geometryFid[:]=-1")
102
+ pipeline |= pdal.Filter.overlay(column="fid", dimension="geometryFid", datasource=REPLACE_AREA)
103
+ pipeline |= pdal.Filter.expression(expression="geometryFid==-1")
104
+ pipeline |= pdal.Writer.las(filename=output_file_outside_geometry)
105
+ pipeline.execute()
106
+ counts_outside_area = compute_count_one_file(output_file_outside_geometry, "Classification")
107
+ assert counts_outside_area == {"1": 3841, "2": 2355}
108
+
109
+ # Check that there are only points from the source pointcloud inside the replacement geometry
110
+ output_file_in_area = os.path.join(os.path.dirname(output_file), "replaced_in_area.laz")
111
+ pipeline = pdal.Pipeline()
112
+ pipeline |= pdal.Reader.las(filename=output_file)
113
+ pipeline |= pdal.Filter.ferry(dimensions="=> geometryFid")
114
+ pipeline |= pdal.Filter.assign(assignment="geometryFid[:]=-1")
115
+ pipeline |= pdal.Filter.overlay(column="fid", dimension="geometryFid", datasource=REPLACE_AREA)
116
+ pipeline |= pdal.Filter.expression(expression="geometryFid>=0")
117
+ pipeline |= pdal.Writer.las(filename=output_file_in_area)
118
+ pipeline.execute()
119
+ counts_in_area = compute_count_one_file(output_file_in_area, "Classification")
120
+
121
+ assert counts_in_area == {"0": 265}
122
+
123
+ # Check output dimensions are the same as input dimensions
124
+ target_dimensions = tu.get_pdal_infos_summary(TARGET_CLOUD)["summary"]["dimensions"].split(",")
125
+ output_dimensions = tu.get_pdal_infos_summary(output_file)["summary"]["dimensions"].split(",")
126
+
127
+ assert output_dimensions == target_dimensions
128
+
129
+
130
+ def test_replace_area_with_target_filter():
131
+ filter = "Classification==2"
132
+ output_file = os.path.join(TMP_PATH, "test_replace_with_target_filter", "replaced.laz")
133
+ os.makedirs(os.path.dirname(output_file))
134
+ replace_area(
135
+ target_cloud=TARGET_CLOUD,
136
+ pipeline_source=pipeline_read_from_cloud(SOURCE_CLOUD),
137
+ replacement_area=REPLACE_AREA,
138
+ output_cloud=output_file,
139
+ target_pdal_filter=filter,
140
+ )
141
+ assert get_nb_points(output_file) == 2620
142
+
143
+
144
+ def test_replace_area_with_source_filter():
145
+ filter = "Z>=2550"
146
+ output_file = os.path.join(TMP_PATH, "test_replace_with_source_filter", "replaced.laz")
147
+ os.makedirs(os.path.dirname(output_file))
148
+ replace_area(
149
+ target_cloud=TARGET_CLOUD,
150
+ pipeline_source=pipeline_read_from_cloud(SOURCE_CLOUD),
151
+ replacement_area=REPLACE_AREA,
152
+ output_cloud=output_file,
153
+ source_pdal_filter=filter,
154
+ )
155
+ assert get_nb_points(output_file) == 6390
156
+
157
+
158
+ def test_replace_area_two_datasources():
159
+ target_file_class1 = os.path.join(INPUT_DIR, "target_cloud_crop_class1.laz") # Classification=2
160
+ source_file_class2 = os.path.join(INPUT_DIR, "source_cloud_crop_class2.laz") # Classification=1
161
+ output_file = os.path.join(TMP_PATH, "test_replace_two_datasources", "replaced.laz")
162
+ os.makedirs(os.path.dirname(output_file))
163
+ replace_area(
164
+ target_cloud=target_file_class1,
165
+ pipeline_source=pipeline_read_from_cloud(source_file_class2),
166
+ replacement_area=REPLACE_AREA,
167
+ output_cloud=output_file,
168
+ )
169
+
170
+ # Check if there is data from both input sources
171
+ counts = compute_count_one_file(output_file, "Classification")
172
+
173
+ assert counts == {"1": 6196, "2": 265}
174
+
175
+
176
+ def test_replace_area_extra_dims():
177
+ tmp_extra_dim = os.path.join(TMP_PATH, "test_replace_extra_dims")
178
+ os.makedirs(tmp_extra_dim)
179
+
180
+ # generate source with an extra dim source
181
+ source_file_extra_dim = os.path.join(tmp_extra_dim, "source_cloud_crop_extra_dim.laz")
182
+ generate_extra_dim(input=SOURCE_CLOUD, output=source_file_extra_dim, new_dims={"source": "float"})
183
+
184
+ source_dims = list_dims(source_file_extra_dim)
185
+ assert "source" in source_dims, "source should have 'source' dimension"
186
+ assert "target" not in source_dims, "source should not have 'target' dimension"
187
+
188
+ output_file = os.path.join(tmp_extra_dim, "replaced.laz")
189
+
190
+ replace_area(
191
+ target_cloud=TARGET_EXTRA_DIM,
192
+ pipeline_source=pipeline_read_from_cloud(source_file_extra_dim),
193
+ replacement_area=REPLACE_AREA,
194
+ output_cloud=output_file,
195
+ )
196
+
197
+ replaced_dims = list_dims(output_file)
198
+
199
+ assert "target" in replaced_dims # dimension from target cloud
200
+ assert "target2" in replaced_dims # dimension from target cloud
201
+ assert "source" not in replaced_dims # dimension from source cloud should not be kept
202
+
203
+ # check dimensions dtype
204
+ las = laspy.read(output_file)
205
+ assert las["target"].dtype == np.uint16
206
+ assert las["target2"].dtype == np.uint8
207
+
208
+
209
+ def test_replace_area_with_no_point_on_target():
210
+ output_file = os.path.join(TMP_PATH, "test_replace_area_no_point_on_target", "replaced.laz")
211
+ os.makedirs(os.path.dirname(output_file))
212
+
213
+ replace_area(
214
+ target_cloud=TARGET_EXTRA_DIM,
215
+ pipeline_source=pipeline_read_from_cloud(SOURCE_CLOUD),
216
+ replacement_area=REPLACE_AREA,
217
+ output_cloud=output_file,
218
+ target_pdal_filter="Classification==3",
219
+ )
220
+ assert get_nb_points(output_file) == 265
221
+
222
+ # check dimensions dtype
223
+ las = laspy.read(output_file)
224
+ assert las["target"].dtype == np.uint16
225
+ assert las["target2"].dtype == np.uint8
226
+
227
+
228
+ def test_replace_area_with_no_point_on_source():
229
+ output_file = os.path.join(TMP_PATH, "test_replace_area_no_point_on_source", "replaced.laz")
230
+ os.makedirs(os.path.dirname(output_file))
231
+
232
+ replace_area(
233
+ target_cloud=TARGET_EXTRA_DIM,
234
+ pipeline_source=pipeline_read_from_cloud(SOURCE_CLOUD),
235
+ replacement_area=REPLACE_AREA,
236
+ output_cloud=output_file,
237
+ source_pdal_filter="Classification==3",
238
+ )
239
+ assert get_nb_points(output_file) == 6196
240
+
241
+ # check dimensions dtype
242
+ las = laspy.read(output_file)
243
+ assert las["target"].dtype == np.uint16
244
+ assert las["target2"].dtype == np.uint8
245
+
246
+
247
+ @pytest.mark.filterwarnings("ignore")
248
+ def test_replace_area_with_no_output_point_base():
249
+ output_file = os.path.join(TMP_PATH, "test_replace_area_no_point_at_all", "replaced.laz")
250
+ os.makedirs(os.path.dirname(output_file))
251
+
252
+ replace_area(
253
+ target_cloud=TARGET_CLOUD,
254
+ pipeline_source=pipeline_read_from_cloud(SOURCE_CLOUD),
255
+ replacement_area=REPLACE_AREA,
256
+ output_cloud=output_file,
257
+ source_pdal_filter="Classification==3",
258
+ target_pdal_filter="Classification==3",
259
+ )
260
+ assert get_nb_points(output_file) == 0
261
+
262
+
263
+ @pytest.mark.xfail(reason="when PDAL write empty LAS, extra dims are not written")
264
+ @pytest.mark.filterwarnings("ignore")
265
+ def test_replace_area_with_no_output_point_with_extra_dims():
266
+ output_file = os.path.join(TMP_PATH, "test_replace_area_no_point_at_all_with_extra_dims", "replaced.laz")
267
+ os.makedirs(os.path.dirname(output_file))
268
+
269
+ replace_area(
270
+ target_cloud=TARGET_EXTRA_DIM,
271
+ pipeline_source=pipeline_read_from_cloud(SOURCE_CLOUD),
272
+ replacement_area=REPLACE_AREA,
273
+ output_cloud=output_file,
274
+ source_pdal_filter="Classification==3",
275
+ target_pdal_filter="Classification==3",
276
+ )
277
+ assert get_nb_points(output_file) == 0
278
+
279
+ # check dimensions dtype
280
+ las = laspy.read(output_file)
281
+ assert las["target"].dtype == np.uint16
282
+ assert las["target2"].dtype == np.uint8
283
+
284
+
285
+ def test_pipeline_read_from_DSM():
286
+ cloud_from_DSM = os.path.join(TMP_PATH, "las_from_DSM.laz")
287
+
288
+ pipeline = pipeline_read_from_DSM(dsm=SOURCE_DSM, ground_area=SOURCE_GROUND_AREA, classification=SOURCE_CLASSIF)
289
+ pipeline |= pdal.Writer.las(cloud_from_DSM, forward="all", extra_dims="all")
290
+ pipeline.execute()
291
+
292
+ # we have 27 col of points on 3 lines
293
+ num_points = get_nb_points(cloud_from_DSM)
294
+ assert num_points == 27 * 3
295
+
296
+ # point are classed in class 68
297
+ counts = compute_count_one_file(cloud_from_DSM, "Classification")
298
+ assert counts == {str(SOURCE_CLASSIF): 27 * 3}
299
+
300
+ # no_data have been filtered
301
+ stats_Z = compute_count_one_file(cloud_from_DSM, "Z", type=float)
302
+ assert stats_Z["-9999.0"] == 0
303
+
304
+
305
+ def test_main_from_cloud_base():
306
+ output_file = os.path.join(TMP_PATH, "main_from_cloud", "output_main_from_cloud.laz")
307
+ os.makedirs(os.path.dirname(output_file))
308
+ cmd = f"from_cloud -s {SOURCE_CLOUD} -t {TARGET_CLOUD} -r {REPLACE_AREA} -o {output_file}".split()
309
+ args = argument_parser().parse_args(cmd)
310
+ args.func(args)
311
+
312
+ # Check that we have the expected number of points in the output
313
+ assert get_nb_points(output_file) == 6461
314
+
315
+
316
+ def test_main_from_cloud_with_filter():
317
+ output_file = os.path.join(TMP_PATH, "main_from_cloud_with_filter", "output_main_from_cloud.laz")
318
+ os.makedirs(os.path.dirname(output_file))
319
+ cmd = (f"from_cloud -s {SOURCE_CLOUD} -t {TARGET_CLOUD} -r {REPLACE_AREA} -o {output_file} " "-f Z>=2550").split()
320
+
321
+ args = argument_parser().parse_args(cmd)
322
+ args.func(args)
323
+
324
+ # Check that we have the expected number of points in the output
325
+ assert get_nb_points(output_file) == 6390
326
+
327
+
328
+ def test_main_from_DSM():
329
+ output_file = os.path.join(TMP_PATH, "output_main_from_cloud.laz")
330
+ cmd = (
331
+ f"from_DSM -d {SOURCE_DSM} -g {SOURCE_GROUND_AREA} -c {SOURCE_CLASSIF} -t {TARGET_CLOUD} -r {REPLACE_AREA}"
332
+ f" -o {output_file}"
333
+ ).split()
334
+ args = argument_parser().parse_args(cmd)
335
+ args.func(args)
336
+
337
+ # same result as test_from_DMS
338
+ counts = compute_count_one_file(output_file, "Classification")
339
+ assert counts == {"1": 3841, "2": 2355, str(SOURCE_CLASSIF): 45}
@@ -1,79 +0,0 @@
1
- import argparse
2
-
3
- import pdal
4
-
5
- from pdaltools.las_info import get_writer_parameters_from_reader_metadata
6
-
7
-
8
- def parse_args():
9
- parser = argparse.ArgumentParser(
10
- "Replace points in a pointcloud with points from another pointcloud based on a area"
11
- )
12
- parser.add_argument("--target_cloud", "-t", type=str, help="filepath of target cloud to be modified")
13
- parser.add_argument("--source_cloud", "-s", type=str, help="filepath of source cloud to use for replacement")
14
- parser.add_argument("--replacement_area_file", "-r", type=str, help="filepath of file containing areas to replace")
15
- parser.add_argument("--filter", "-f", type=str, help="pdal filter expression to apply to target_cloud")
16
- parser.add_argument("--outfile", "-o", type=str, help="output file")
17
- return parser.parse_args()
18
-
19
-
20
- def get_writer_params(input_file):
21
- pipeline = pdal.Pipeline()
22
- pipeline |= pdal.Reader.las(filename=input_file)
23
- pipeline.execute()
24
- params = get_writer_parameters_from_reader_metadata(pipeline.metadata)
25
- return params
26
-
27
-
28
- def replace_area(target_cloud, source_cloud, replacement_area_file, outfile, writer_params, filter=""):
29
- crops = []
30
- pipeline_target = pdal.Pipeline()
31
- pipeline_target |= pdal.Reader.las(filename=target_cloud)
32
- pipeline_target |= pdal.Filter.ferry(dimensions="=> geometryFid")
33
- # Assign -1 to all points because overlay replaces values from 0 and more
34
- pipeline_target |= pdal.Filter.assign(assignment="geometryFid[:]=-1")
35
- if filter:
36
- pipeline_target |= pdal.Filter.expression(expression=filter)
37
- pipeline_target |= pdal.Filter.overlay(column="fid", dimension="geometryFid", datasource=replacement_area_file)
38
- # Keep only points out of the area
39
- pipeline_target |= pdal.Filter.expression(expression="geometryFid==-1", tag="A")
40
- pipeline_target.execute()
41
-
42
- input_dimensions = list(pipeline_target.arrays[0].dtype.fields.keys())
43
- # do not keep geometryFid
44
- output_dimensions = [dim for dim in input_dimensions if dim not in "geometryFid"]
45
- target_cloud_pruned = pipeline_target.arrays[0][output_dimensions]
46
- crops.append(target_cloud_pruned)
47
-
48
- pipeline_source = pdal.Pipeline()
49
- pipeline_source |= pdal.Reader.las(filename=source_cloud)
50
- pipeline_source |= pdal.Filter.ferry(dimensions="=> geometryFid")
51
- pipeline_source |= pdal.Filter.assign(assignment="geometryFid[:]=-1")
52
- pipeline_source |= pdal.Filter.overlay(column="fid", dimension="geometryFid", datasource=replacement_area_file)
53
- # Keep only points in the area
54
- pipeline_source |= pdal.Filter.expression(expression="geometryFid>=0", tag="B")
55
- pipeline_source.execute()
56
-
57
- # delete geometryFid from source_cloud
58
- source_cloud_pruned = pipeline_source.arrays[0][output_dimensions]
59
- crops.append(source_cloud_pruned)
60
-
61
- # Merge
62
- pipeline = pdal.Filter.merge().pipeline(*crops)
63
- pipeline |= pdal.Writer.las(filename=outfile, **writer_params)
64
- pipeline.execute()
65
-
66
-
67
- def main():
68
- args = parse_args()
69
-
70
- writer_parameters = get_writer_params(args.target_cloud)
71
- # writer_parameters["extra_dims"] = "" # no extra-dim by default
72
-
73
- replace_area(
74
- args.target_cloud, args.source_cloud, args.replacement_area_file, args.outfile, writer_parameters, args.filter
75
- )
76
-
77
-
78
- if __name__ == "__main__":
79
- main()
@@ -1,118 +0,0 @@
1
- import os
2
- import shutil
3
- import test.utils as tu
4
-
5
- import laspy
6
- import pdal
7
-
8
- from pdaltools.count_occurences.count_occurences_for_attribute import (
9
- compute_count_one_file,
10
- )
11
- from pdaltools.replace_area_in_pointcloud import get_writer_params, replace_area
12
-
13
- TEST_PATH = os.path.dirname(os.path.abspath(__file__))
14
- TMP_PATH = os.path.join(TEST_PATH, "tmp/replace_area_in_pointcloud")
15
- INPUT_DIR = os.path.join(TEST_PATH, "data/replace_area_in_pointcloud")
16
- TARGET_FILE = os.path.join(INPUT_DIR, "target_cloud_crop.laz")
17
- SOURCE_FILE = os.path.join(INPUT_DIR, "source_cloud_crop.laz")
18
- SHAPEFILE = os.path.join(INPUT_DIR, "ground_area.shp")
19
- WRITER_PARAMS = get_writer_params(TARGET_FILE)
20
-
21
-
22
- def setup_module(module):
23
- try:
24
- shutil.rmtree(TMP_PATH)
25
-
26
- except FileNotFoundError:
27
- pass
28
- os.mkdir(TMP_PATH)
29
-
30
-
31
- # Utils functions
32
- def get_nb_points(path):
33
- """Get number of points in a las file"""
34
- with laspy.open(path) as f:
35
- nb_points = f.header.point_count
36
-
37
- return nb_points
38
-
39
-
40
- def test_replace_area():
41
- output_file = os.path.join(TMP_PATH, "test_replace_area", "replaced.laz")
42
- os.makedirs(os.path.dirname(output_file))
43
- replace_area(TARGET_FILE, SOURCE_FILE, SHAPEFILE, output_file, WRITER_PARAMS)
44
- # Check that we have the expected number of points in the output
45
- assert get_nb_points(output_file) == 6461
46
-
47
- # Check that there are only points from the target pointcloud outside the replacement geometry
48
- output_file_outside_geometry = os.path.join(os.path.dirname(output_file), "replaced_outside_area.laz")
49
- pipeline = pdal.Pipeline()
50
- pipeline |= pdal.Reader.las(filename=output_file)
51
- pipeline |= pdal.Filter.ferry(dimensions="=> geometryFid")
52
- pipeline |= pdal.Filter.assign(assignment="geometryFid[:]=-1")
53
- pipeline |= pdal.Filter.overlay(column="fid", dimension="geometryFid", datasource=SHAPEFILE)
54
- pipeline |= pdal.Filter.expression(expression="geometryFid==-1")
55
- pipeline |= pdal.Writer.las(filename=output_file_outside_geometry)
56
- pipeline.execute()
57
- counts_outside_area = compute_count_one_file(output_file_outside_geometry, "Classification")
58
- assert counts_outside_area == {"1": 3841, "2": 2355}
59
-
60
- # Check that there are only points from the source pointcloud inside the replacement geometry
61
- output_file_in_area = os.path.join(os.path.dirname(output_file), "replaced_in_area.laz")
62
- pipeline = pdal.Pipeline()
63
- pipeline |= pdal.Reader.las(filename=output_file)
64
- pipeline |= pdal.Filter.ferry(dimensions="=> geometryFid")
65
- pipeline |= pdal.Filter.assign(assignment="geometryFid[:]=-1")
66
- pipeline |= pdal.Filter.overlay(column="fid", dimension="geometryFid", datasource=SHAPEFILE)
67
- pipeline |= pdal.Filter.expression(expression="geometryFid>=0")
68
- pipeline |= pdal.Writer.las(filename=output_file_in_area)
69
- pipeline.execute()
70
- counts_in_area = compute_count_one_file(output_file_in_area, "Classification")
71
-
72
- assert counts_in_area == {"0": 265}
73
-
74
- # Check output dimensions are the same as input dimensions
75
- target_dimensions = tu.get_pdal_infos_summary(TARGET_FILE)["summary"]["dimensions"].split(",")
76
- output_dimensions = tu.get_pdal_infos_summary(output_file)["summary"]["dimensions"].split(",")
77
-
78
- assert output_dimensions == target_dimensions
79
-
80
-
81
- def test_replace_with_filter():
82
- filter = "Classification==2"
83
- output_file = os.path.join(TMP_PATH, "test_replace_with_filter", "replaced.laz")
84
- os.makedirs(os.path.dirname(output_file))
85
- replace_area(TARGET_FILE, SOURCE_FILE, SHAPEFILE, output_file, WRITER_PARAMS, filter)
86
- assert get_nb_points(output_file) == 2620
87
-
88
-
89
- def test_replace_two_datasources():
90
- target_file_class1 = os.path.join(INPUT_DIR, "target_cloud_crop_class1.laz") # Classification=2
91
- source_file_class2 = os.path.join(INPUT_DIR, "source_cloud_crop_class2.laz") # Classification=1
92
- writer_params = get_writer_params(target_file_class1)
93
- output_file = os.path.join(TMP_PATH, "test_replace_two_datasources", "replaced.laz")
94
- os.makedirs(os.path.dirname(output_file))
95
- replace_area(target_file_class1, source_file_class2, SHAPEFILE, output_file, writer_params)
96
-
97
- # Check if there is data from both input sources
98
- counts = compute_count_one_file(output_file, "Classification")
99
-
100
- assert counts == {"1": 6196, "2": 265}
101
-
102
-
103
- def test_replace_extra_dims():
104
- target_file_extra_dim = os.path.join(INPUT_DIR, "target_cloud_crop_extra_dim.laz") # has target extra dimension
105
- source_file_extra_dim = os.path.join(INPUT_DIR, "source_cloud_crop_extra_dim.laz") # has source extra dimension
106
- writer_params = get_writer_params(target_file_extra_dim)
107
- output_file = os.path.join(TMP_PATH, "test_replace_extra_dims", "replaced.laz")
108
- os.makedirs(os.path.dirname(output_file))
109
- replace_area(target_file_extra_dim, source_file_extra_dim, SHAPEFILE, output_file, writer_params)
110
-
111
- pipeline = pdal.Pipeline()
112
- pipeline |= pdal.Reader.las(filename=output_file)
113
- pipeline.execute()
114
-
115
- output_dimensions = list(pipeline.arrays[0].dtype.fields.keys())
116
-
117
- assert "target" in output_dimensions # dimension from target cloud
118
- assert "source" not in output_dimensions # dimension from source cloud should not be kept