ign-pdal-tools 1.14.0__py3-none-any.whl → 1.15.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ign_pdal_tools-1.14.0.dist-info → ign_pdal_tools-1.15.1.dist-info}/METADATA +3 -1
- {ign_pdal_tools-1.14.0.dist-info → ign_pdal_tools-1.15.1.dist-info}/RECORD +8 -8
- pdaltools/_version.py +1 -1
- pdaltools/las_info.py +16 -0
- pdaltools/replace_area_in_pointcloud.py +168 -34
- {ign_pdal_tools-1.14.0.dist-info → ign_pdal_tools-1.15.1.dist-info}/WHEEL +0 -0
- {ign_pdal_tools-1.14.0.dist-info → ign_pdal_tools-1.15.1.dist-info}/licenses/LICENSE.md +0 -0
- {ign_pdal_tools-1.14.0.dist-info → ign_pdal_tools-1.15.1.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ign-pdal-tools
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.15.1
|
|
4
4
|
Summary: Library for common LAS files manipulation with PDAL
|
|
5
5
|
Author-email: Guillaume Liegard <guillaume.liegard@ign.fr>
|
|
6
6
|
Description-Content-Type: text/markdown
|
|
@@ -128,3 +128,5 @@ To generate a pip package and deploy it on pypi, use the [Makefile](Makefile) at
|
|
|
128
128
|
To build a docker image with the library installed: `make docker-build`
|
|
129
129
|
|
|
130
130
|
To test the docker image: `make docker-test`
|
|
131
|
+
|
|
132
|
+
To build a docker image with a custom version of PDAL: `make docker-build-custom-pdal` ; the custom version is defined in the Makefile (see Makefile for details)
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
ign_pdal_tools-1.
|
|
2
|
-
pdaltools/_version.py,sha256=
|
|
1
|
+
ign_pdal_tools-1.15.1.dist-info/licenses/LICENSE.md,sha256=iVzCFZTUXeiqP8bP474iuWZiWO_kDCD4SPh1Wiw125Y,1120
|
|
2
|
+
pdaltools/_version.py,sha256=Ifg5508wmTI_52FvVJqD1IBETM-8iJe8gIBnxxVL1UI,75
|
|
3
3
|
pdaltools/add_points_in_pointcloud.py,sha256=VM2HW2b1Ul_I8jtXaOpTsmyGjiEFgoSi8AmCLuj6gH8,12697
|
|
4
4
|
pdaltools/color.py,sha256=s-_rmLK6fIK3UwkUzHVZPEkm6r1LliG5ftGr-jkqyjM,9549
|
|
5
5
|
pdaltools/create_random_laz.py,sha256=XuHH4G8Nrs8DB-F8bkcIeto7JtmrlrNGF_R66oxGCbQ,6069
|
|
@@ -7,16 +7,16 @@ pdaltools/download_image.py,sha256=DG9PunQsjw7Uyyf4YMVp8LMH0G3Uo4cahx5EZbdi3so,7
|
|
|
7
7
|
pdaltools/las_add_buffer.py,sha256=rnFExAfi0KqlQpL4hDMh2aC08AcYdSHSB6WPG5RyFIc,11274
|
|
8
8
|
pdaltools/las_clip.py,sha256=GvEOYu8RXV68e35kU8i42GwSkbo4P9TvmS6rkrdPmFM,1034
|
|
9
9
|
pdaltools/las_comparison.py,sha256=B9hFGbmD0x4JEN4oHbiQFNbd0T-9P3mnAN67Czu0pZk,4505
|
|
10
|
-
pdaltools/las_info.py,sha256=
|
|
10
|
+
pdaltools/las_info.py,sha256=xZlTsdLS3I9_xeqGJyOOpJNJrqF82JBhlMhtYabOuw0,9845
|
|
11
11
|
pdaltools/las_merge.py,sha256=tcFVueV9X9nNEaoAl5zCduY5DETlBg63MAgP2SuKiNo,4121
|
|
12
12
|
pdaltools/las_remove_dimensions.py,sha256=f8imGhN6LNTuQ1GMJQRzIIV3Wab_oRPOyEnKi1CgfiM,2318
|
|
13
13
|
pdaltools/las_rename_dimension.py,sha256=FEWIcq0ZZiv9xWbCLDRE9Hzb5K0YYfoi3Z8IZFEs-uU,2887
|
|
14
14
|
pdaltools/pcd_info.py,sha256=NIAH5KGikVDQLlbCcw9FuaPqe20UZvRfkHsDZd5kmZA,3210
|
|
15
|
-
pdaltools/replace_area_in_pointcloud.py,sha256=
|
|
15
|
+
pdaltools/replace_area_in_pointcloud.py,sha256=VyLMDItP-FU4muRV01vbetf5ySgdKeOpc55YESicJ7U,8008
|
|
16
16
|
pdaltools/replace_attribute_in_las.py,sha256=MHpIizSupgWtbizteoRH8FKDE049hrAh4v_OhmRmSPU,4318
|
|
17
17
|
pdaltools/standardize_format.py,sha256=I2oNiwhSMtr4e5ZK9qbB_yKmy3twOoO6QLiSFu4_AaI,3905
|
|
18
18
|
pdaltools/unlock_file.py,sha256=G2odk0cpp_X9r49Y90oK88v3qlihaMfg6acwmWqblik,1958
|
|
19
|
-
ign_pdal_tools-1.
|
|
20
|
-
ign_pdal_tools-1.
|
|
21
|
-
ign_pdal_tools-1.
|
|
22
|
-
ign_pdal_tools-1.
|
|
19
|
+
ign_pdal_tools-1.15.1.dist-info/METADATA,sha256=3mwPLGrnvNs6FuL_f6iSbFq1Zb5mlVnTjdGidcs8SLI,6146
|
|
20
|
+
ign_pdal_tools-1.15.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
21
|
+
ign_pdal_tools-1.15.1.dist-info/top_level.txt,sha256=KvGW0ZzqQbhCKzB5_Tp_buWMZyIgiO2M2krWF_ecOZc,10
|
|
22
|
+
ign_pdal_tools-1.15.1.dist-info/RECORD,,
|
pdaltools/_version.py
CHANGED
pdaltools/las_info.py
CHANGED
|
@@ -254,3 +254,19 @@ def get_epsg_from_las(filename: str) -> str:
|
|
|
254
254
|
return None # Return None if CRS is not defined
|
|
255
255
|
epsg_code = crs.to_epsg()
|
|
256
256
|
return f"EPSG:{epsg_code}" if epsg_code else None
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
def list_dims(las_filename):
|
|
260
|
+
"""List dimensions
|
|
261
|
+
|
|
262
|
+
Args:
|
|
263
|
+
las_file (_type_): _description_
|
|
264
|
+
|
|
265
|
+
Returns:
|
|
266
|
+
List<String>: Dimensions names
|
|
267
|
+
"""
|
|
268
|
+
pipeline = pdal.Pipeline()
|
|
269
|
+
pipeline |= pdal.Reader.las(filename=las_filename)
|
|
270
|
+
pipeline.execute()
|
|
271
|
+
|
|
272
|
+
return list(pipeline.arrays[0].dtype.fields.keys())
|
|
@@ -1,20 +1,95 @@
|
|
|
1
1
|
import argparse
|
|
2
|
+
import warnings
|
|
2
3
|
|
|
4
|
+
import numpy as np
|
|
3
5
|
import pdal
|
|
6
|
+
from numpy.lib import recfunctions as rfn
|
|
7
|
+
from osgeo import gdal
|
|
4
8
|
|
|
5
9
|
from pdaltools.las_info import get_writer_parameters_from_reader_metadata
|
|
6
10
|
|
|
7
11
|
|
|
8
|
-
def
|
|
12
|
+
def argument_parser():
|
|
9
13
|
parser = argparse.ArgumentParser(
|
|
10
|
-
"Replace points in a pointcloud
|
|
14
|
+
"Replace points in a pointcloud, based on an area. "
|
|
15
|
+
"Source may come from from another pointcloud (command from_cloud), "
|
|
16
|
+
"or may be derivated from a digital surface model (command from_DSM).\n"
|
|
17
|
+
)
|
|
18
|
+
subparsers = parser.add_subparsers(required=True)
|
|
19
|
+
|
|
20
|
+
# first command is 'from_cloud'
|
|
21
|
+
from_cloud = subparsers.add_parser("from_cloud", help="Source is a point cloud")
|
|
22
|
+
from_cloud.add_argument("--source_cloud", "-s", required=True, type=str, help="path of source point cloud")
|
|
23
|
+
add_common_options(from_cloud)
|
|
24
|
+
from_cloud.set_defaults(func=from_cloud_func)
|
|
25
|
+
|
|
26
|
+
# second command is 'from_DSM'
|
|
27
|
+
from_DSM = subparsers.add_parser("from_DSM", help="Source is a digital surface model (DSM)")
|
|
28
|
+
from_DSM.add_argument(
|
|
29
|
+
"--source_dsm",
|
|
30
|
+
"-d",
|
|
31
|
+
required=True,
|
|
32
|
+
type=str,
|
|
33
|
+
help="path of the source digital surface model (DSM), used to generate source points",
|
|
34
|
+
)
|
|
35
|
+
from_DSM.add_argument(
|
|
36
|
+
"--source_ground_area",
|
|
37
|
+
"-g",
|
|
38
|
+
required=True,
|
|
39
|
+
type=str,
|
|
40
|
+
help=(
|
|
41
|
+
"area of the ground, used to intersect source cloud. "
|
|
42
|
+
"(shapefile, geojson or other format readable by GDAL)"
|
|
43
|
+
),
|
|
44
|
+
)
|
|
45
|
+
from_DSM.add_argument(
|
|
46
|
+
"--source_classification",
|
|
47
|
+
"-c",
|
|
48
|
+
required=True,
|
|
49
|
+
type=int,
|
|
50
|
+
help="classification to apply to the points extracted from the DSM",
|
|
51
|
+
)
|
|
52
|
+
add_common_options(from_DSM)
|
|
53
|
+
from_DSM.set_defaults(func=from_DSM_func)
|
|
54
|
+
|
|
55
|
+
return parser
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def add_common_options(parser):
|
|
59
|
+
parser.add_argument(
|
|
60
|
+
"--source_pdal_filter", "-f", type=str, help="pdal filter expression to apply to source point cloud"
|
|
61
|
+
)
|
|
62
|
+
parser.add_argument("--target_cloud", "-t", type=str, required=True, help="path of target cloud to be modified")
|
|
63
|
+
parser.add_argument(
|
|
64
|
+
"--replacement_area",
|
|
65
|
+
"-r",
|
|
66
|
+
required=True,
|
|
67
|
+
type=str,
|
|
68
|
+
help="area to replace (shapefile, geojson or other format readable by GDAL)",
|
|
69
|
+
)
|
|
70
|
+
parser.add_argument("--output_cloud", "-o", required=True, type=str, help="output cloud file")
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def from_cloud_func(args):
|
|
74
|
+
replace_area(
|
|
75
|
+
target_cloud=args.target_cloud,
|
|
76
|
+
pipeline_source=pipeline_read_from_cloud(args.source_cloud),
|
|
77
|
+
replacement_area=args.replacement_area,
|
|
78
|
+
output_cloud=args.output_cloud,
|
|
79
|
+
source_pdal_filter=args.source_pdal_filter,
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def from_DSM_func(args):
|
|
84
|
+
replace_area(
|
|
85
|
+
target_cloud=args.target_cloud,
|
|
86
|
+
pipeline_source=pipeline_read_from_DSM(
|
|
87
|
+
dsm=args.source_dsm, ground_area=args.source_ground_area, classification=args.source_classification
|
|
88
|
+
),
|
|
89
|
+
replacement_area=args.replacement_area,
|
|
90
|
+
output_cloud=args.output_cloud,
|
|
91
|
+
source_pdal_filter=args.source_pdal_filter,
|
|
11
92
|
)
|
|
12
|
-
parser.add_argument("--target_cloud", "-t", type=str, help="filepath of target cloud to be modified")
|
|
13
|
-
parser.add_argument("--source_cloud", "-s", type=str, help="filepath of source cloud to use for replacement")
|
|
14
|
-
parser.add_argument("--replacement_area_file", "-r", type=str, help="filepath of file containing areas to replace")
|
|
15
|
-
parser.add_argument("--filter", "-f", type=str, help="pdal filter expression to apply to target_cloud")
|
|
16
|
-
parser.add_argument("--outfile", "-o", type=str, help="output file")
|
|
17
|
-
return parser.parse_args()
|
|
18
93
|
|
|
19
94
|
|
|
20
95
|
def get_writer_params(input_file):
|
|
@@ -25,55 +100,114 @@ def get_writer_params(input_file):
|
|
|
25
100
|
return params
|
|
26
101
|
|
|
27
102
|
|
|
28
|
-
def
|
|
103
|
+
def pipeline_read_from_cloud(filename):
|
|
104
|
+
pipeline_source = pdal.Pipeline()
|
|
105
|
+
pipeline_source |= pdal.Reader.las(filename=filename)
|
|
106
|
+
return pipeline_source
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def pipeline_read_from_DSM(dsm, ground_area, classification):
|
|
110
|
+
# get nodata value
|
|
111
|
+
ds = gdal.Open(dsm)
|
|
112
|
+
band = ds.GetRasterBand(1)
|
|
113
|
+
nodata_value = band.GetNoDataValue()
|
|
114
|
+
ds.Close()
|
|
115
|
+
|
|
116
|
+
pipeline = pdal.Pipeline()
|
|
117
|
+
pipeline |= pdal.Reader.gdal(filename=dsm, header="Z")
|
|
118
|
+
pipeline |= pdal.Filter.expression(expression=f"Z != {nodata_value}")
|
|
119
|
+
|
|
120
|
+
pipeline |= pdal.Filter.ferry(dimensions="=> geometryFid")
|
|
121
|
+
pipeline |= pdal.Filter.assign(assignment="geometryFid[:]=-1")
|
|
122
|
+
pipeline |= pdal.Filter.overlay(column="fid", dimension="geometryFid", datasource=ground_area)
|
|
123
|
+
# Keep only points in the area
|
|
124
|
+
pipeline |= pdal.Filter.expression(expression="geometryFid>=0")
|
|
125
|
+
|
|
126
|
+
# assign class
|
|
127
|
+
pipeline |= pdal.Filter.ferry(dimensions="=>Classification")
|
|
128
|
+
pipeline |= pdal.Filter.assign(assignment=f"Classification[:]={classification}")
|
|
129
|
+
|
|
130
|
+
return pipeline
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def replace_area(
|
|
134
|
+
target_cloud, pipeline_source, replacement_area, output_cloud, source_pdal_filter="", target_pdal_filter=""
|
|
135
|
+
):
|
|
29
136
|
crops = []
|
|
137
|
+
# pipeline to read target_cloud and remove points inside the polygon
|
|
30
138
|
pipeline_target = pdal.Pipeline()
|
|
31
139
|
pipeline_target |= pdal.Reader.las(filename=target_cloud)
|
|
32
140
|
pipeline_target |= pdal.Filter.ferry(dimensions="=> geometryFid")
|
|
33
141
|
# Assign -1 to all points because overlay replaces values from 0 and more
|
|
34
142
|
pipeline_target |= pdal.Filter.assign(assignment="geometryFid[:]=-1")
|
|
35
|
-
if
|
|
36
|
-
pipeline_target |= pdal.Filter.expression(expression=
|
|
37
|
-
pipeline_target |= pdal.Filter.overlay(column="fid", dimension="geometryFid", datasource=
|
|
143
|
+
if target_pdal_filter:
|
|
144
|
+
pipeline_target |= pdal.Filter.expression(expression=target_pdal_filter)
|
|
145
|
+
pipeline_target |= pdal.Filter.overlay(column="fid", dimension="geometryFid", datasource=replacement_area)
|
|
38
146
|
# Keep only points out of the area
|
|
39
147
|
pipeline_target |= pdal.Filter.expression(expression="geometryFid==-1", tag="A")
|
|
40
148
|
pipeline_target.execute()
|
|
41
149
|
|
|
42
|
-
|
|
150
|
+
# get input dimensions dtype from target
|
|
151
|
+
if pipeline_target.arrays:
|
|
152
|
+
input_dim_dtype = pipeline_target.arrays[0].dtype
|
|
153
|
+
else:
|
|
154
|
+
# re-read the LAS only if we cant have dimensions with previous pipeline (empty output)
|
|
155
|
+
pipeline_target2 = pdal.Pipeline()
|
|
156
|
+
pipeline_target2 |= pdal.Reader.las(filename=target_cloud)
|
|
157
|
+
pipeline_target2.execute()
|
|
158
|
+
input_dim_dtype = pipeline_target2.arrays[0].dtype
|
|
159
|
+
|
|
160
|
+
# get input dimensions names
|
|
161
|
+
input_dimensions = list(input_dim_dtype.fields.keys())
|
|
162
|
+
|
|
43
163
|
# do not keep geometryFid
|
|
44
164
|
output_dimensions = [dim for dim in input_dimensions if dim not in "geometryFid"]
|
|
45
|
-
target_cloud_pruned = pipeline_target.arrays[0][output_dimensions]
|
|
46
|
-
crops.append(target_cloud_pruned)
|
|
47
165
|
|
|
48
|
-
|
|
49
|
-
|
|
166
|
+
# add target to the result after keeping only the expected dimensions
|
|
167
|
+
if pipeline_target.arrays:
|
|
168
|
+
target_cloud_pruned = pipeline_target.arrays[0][output_dimensions]
|
|
169
|
+
crops.append(target_cloud_pruned)
|
|
170
|
+
|
|
171
|
+
# pipeline to read source_cloud and remove points outside the polygon
|
|
50
172
|
pipeline_source |= pdal.Filter.ferry(dimensions="=> geometryFid")
|
|
51
173
|
pipeline_source |= pdal.Filter.assign(assignment="geometryFid[:]=-1")
|
|
52
|
-
|
|
174
|
+
if source_pdal_filter:
|
|
175
|
+
pipeline_source |= pdal.Filter.expression(expression=source_pdal_filter)
|
|
176
|
+
pipeline_source |= pdal.Filter.overlay(column="fid", dimension="geometryFid", datasource=replacement_area)
|
|
53
177
|
# Keep only points in the area
|
|
54
178
|
pipeline_source |= pdal.Filter.expression(expression="geometryFid>=0", tag="B")
|
|
55
179
|
pipeline_source.execute()
|
|
56
180
|
|
|
57
|
-
#
|
|
58
|
-
|
|
59
|
-
|
|
181
|
+
# add source to the result
|
|
182
|
+
if pipeline_source.arrays:
|
|
183
|
+
# eventually add dimensions in source to have same dimensions as target cloud
|
|
184
|
+
# we do that in numpy (instead of PDAL filter) to keep dimension types
|
|
185
|
+
source_cloud_crop = pipeline_source.arrays[0]
|
|
186
|
+
nb_points = source_cloud_crop.shape[0]
|
|
187
|
+
source_dims = source_cloud_crop.dtype.fields.keys()
|
|
188
|
+
for dim_name, dim_type in input_dim_dtype.fields.items():
|
|
189
|
+
if dim_name not in source_dims:
|
|
190
|
+
source_cloud_crop = rfn.append_fields(
|
|
191
|
+
base=source_cloud_crop,
|
|
192
|
+
names=dim_name,
|
|
193
|
+
data=np.zeros(nb_points, dtype=dim_type[0]),
|
|
194
|
+
dtypes=dim_type[0],
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
source_cloud_pruned = source_cloud_crop[output_dimensions]
|
|
198
|
+
crops.append(source_cloud_pruned)
|
|
60
199
|
|
|
61
200
|
# Merge
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
pipeline.execute()
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
def main():
|
|
68
|
-
args = parse_args()
|
|
201
|
+
if not crops:
|
|
202
|
+
warnings.warn("WARNING: Empty LAS, extra dims are lost")
|
|
69
203
|
|
|
70
|
-
|
|
71
|
-
# writer_parameters["extra_dims"] = "" # no extra-dim by default
|
|
204
|
+
pipeline = pdal.Filter.merge().pipeline(*crops)
|
|
72
205
|
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
)
|
|
206
|
+
writer_params = get_writer_params(target_cloud)
|
|
207
|
+
pipeline |= pdal.Writer.las(filename=output_cloud, **writer_params)
|
|
208
|
+
pipeline.execute()
|
|
76
209
|
|
|
77
210
|
|
|
78
211
|
if __name__ == "__main__":
|
|
79
|
-
|
|
212
|
+
args = argument_parser().parse_args()
|
|
213
|
+
args.func(args)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|