ign-pdal-tools 1.7.11__tar.gz → 1.8.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/PKG-INFO +9 -4
  2. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/README.md +9 -4
  3. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/ign_pdal_tools.egg-info/PKG-INFO +9 -4
  4. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/ign_pdal_tools.egg-info/SOURCES.txt +0 -2
  5. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/pdaltools/_version.py +1 -1
  6. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/pdaltools/add_points_in_pointcloud.py +4 -5
  7. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/pdaltools/color.py +169 -21
  8. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/pdaltools/las_remove_dimensions.py +1 -0
  9. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/test/test_add_points_in_pointcloud.py +9 -6
  10. ign_pdal_tools-1.8.1/test/test_color.py +379 -0
  11. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/test/test_las_remove_dimensions.py +4 -5
  12. ign_pdal_tools-1.7.11/pdaltools/add_points_in_las.py +0 -104
  13. ign_pdal_tools-1.7.11/test/test_add_points_in_las.py +0 -72
  14. ign_pdal_tools-1.7.11/test/test_color.py +0 -148
  15. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/LICENSE.md +0 -0
  16. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/ign_pdal_tools.egg-info/dependency_links.txt +0 -0
  17. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/ign_pdal_tools.egg-info/top_level.txt +0 -0
  18. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/pdaltools/las_add_buffer.py +0 -0
  19. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/pdaltools/las_clip.py +0 -0
  20. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/pdaltools/las_info.py +0 -0
  21. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/pdaltools/las_merge.py +0 -0
  22. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/pdaltools/pcd_info.py +0 -0
  23. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/pdaltools/replace_attribute_in_las.py +0 -0
  24. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/pdaltools/standardize_format.py +0 -0
  25. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/pdaltools/unlock_file.py +0 -0
  26. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/pyproject.toml +0 -0
  27. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/setup.cfg +0 -0
  28. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/test/test_las_add_buffer.py +0 -0
  29. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/test/test_las_clip.py +0 -0
  30. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/test/test_las_info.py +0 -0
  31. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/test/test_las_merge.py +0 -0
  32. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/test/test_pcd_info.py +0 -0
  33. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/test/test_replace_attribute_in_las.py +0 -0
  34. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/test/test_standardize_format.py +0 -0
  35. {ign_pdal_tools-1.7.11 → ign_pdal_tools-1.8.1}/test/test_unlock.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ign-pdal-tools
3
- Version: 1.7.11
3
+ Version: 1.8.1
4
4
  Summary: Library for common LAS files manipulation with PDAL
5
5
  Author-email: Guillaume Liegard <guillaume.liegard@ign.fr>
6
6
  Description-Content-Type: text/markdown
@@ -88,10 +88,15 @@ By default, `xcoord` and `ycoord` are given in kilometers and the shape of the t
88
88
  `readers.las: Global encoding WKT flag not set for point format 6 - 10.` which is due to TerraSolid
89
89
  malformed LAS output for LAS1.4 files with point format 6 to 10.
90
90
 
91
- ## Add points in Las
92
-
93
- [add_points_in_las.py](pdaltools/add_points_in_las.py): add points from some vector files (ex: shp, geojson, ...) inside Las. New points will have X,Y and Z coordinates. Other attributes values given by the initial las file are null (ex: classification at 0). These others attributes could be forced by using the '--dimensions/-d' option in the command line (ex : 'add_points_in_las.py -i myLas.las -g myPoints.json -d classification=64' - points will have their classification set to 64). The dimension should be present in the initial las ; this is not allowed to add new dimension.
91
+ ## Add points in pointcloud
94
92
 
93
+ [add_points_in_pointcloud.py](pdaltools/add_points_in_pointcloud.py): add points from some vector files (ex: shp, geojson, ...) inside Las/Laz:
94
+ - 2 kinds of geometries are handled:
95
+ - if the geometries in the vector file are points, they are added directly to the las file
96
+ - if the geometries are lines, points are added along this line using a `spacing` parameter
97
+ - In case the points are 2D only, Z can be provided as a feature property (parametrized via `altitude_column`)
98
+ - The Classification attribute for these points is parametrized via `virtual_points_classes`
99
+ - All the other attributes are set to 0.
95
100
 
96
101
  # Dev / Build
97
102
 
@@ -79,10 +79,15 @@ By default, `xcoord` and `ycoord` are given in kilometers and the shape of the t
79
79
  `readers.las: Global encoding WKT flag not set for point format 6 - 10.` which is due to TerraSolid
80
80
  malformed LAS output for LAS1.4 files with point format 6 to 10.
81
81
 
82
- ## Add points in Las
83
-
84
- [add_points_in_las.py](pdaltools/add_points_in_las.py): add points from some vector files (ex: shp, geojson, ...) inside Las. New points will have X,Y and Z coordinates. Other attributes values given by the initial las file are null (ex: classification at 0). These others attributes could be forced by using the '--dimensions/-d' option in the command line (ex : 'add_points_in_las.py -i myLas.las -g myPoints.json -d classification=64' - points will have their classification set to 64). The dimension should be present in the initial las ; this is not allowed to add new dimension.
85
-
82
+ ## Add points in pointcloud
83
+
84
+ [add_points_in_pointcloud.py](pdaltools/add_points_in_pointcloud.py): add points from some vector files (ex: shp, geojson, ...) inside Las/Laz:
85
+ - 2 kinds of geometries are handled:
86
+ - if the geometries in the vector file are points, they are added directly to the las file
87
+ - if the geometries are lines, points are added along this line using a `spacing` parameter
88
+ - In case the points are 2D only, Z can be provided as a feature property (parametrized via `altitude_column`)
89
+ - The Classification attribute for these points is parametrized via `virtual_points_classes`
90
+ - All the other attributes are set to 0.
86
91
 
87
92
  # Dev / Build
88
93
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ign-pdal-tools
3
- Version: 1.7.11
3
+ Version: 1.8.1
4
4
  Summary: Library for common LAS files manipulation with PDAL
5
5
  Author-email: Guillaume Liegard <guillaume.liegard@ign.fr>
6
6
  Description-Content-Type: text/markdown
@@ -88,10 +88,15 @@ By default, `xcoord` and `ycoord` are given in kilometers and the shape of the t
88
88
  `readers.las: Global encoding WKT flag not set for point format 6 - 10.` which is due to TerraSolid
89
89
  malformed LAS output for LAS1.4 files with point format 6 to 10.
90
90
 
91
- ## Add points in Las
92
-
93
- [add_points_in_las.py](pdaltools/add_points_in_las.py): add points from some vector files (ex: shp, geojson, ...) inside Las. New points will have X,Y and Z coordinates. Other attributes values given by the initial las file are null (ex: classification at 0). These others attributes could be forced by using the '--dimensions/-d' option in the command line (ex : 'add_points_in_las.py -i myLas.las -g myPoints.json -d classification=64' - points will have their classification set to 64). The dimension should be present in the initial las ; this is not allowed to add new dimension.
91
+ ## Add points in pointcloud
94
92
 
93
+ [add_points_in_pointcloud.py](pdaltools/add_points_in_pointcloud.py): add points from some vector files (ex: shp, geojson, ...) inside Las/Laz:
94
+ - 2 kinds of geometries are handled:
95
+ - if the geometries in the vector file are points, they are added directly to the las file
96
+ - if the geometries are lines, points are added along this line using a `spacing` parameter
97
+ - In case the points are 2D only, Z can be provided as a feature property (parametrized via `altitude_column`)
98
+ - The Classification attribute for these points is parametrized via `virtual_points_classes`
99
+ - All the other attributes are set to 0.
95
100
 
96
101
  # Dev / Build
97
102
 
@@ -6,7 +6,6 @@ ign_pdal_tools.egg-info/SOURCES.txt
6
6
  ign_pdal_tools.egg-info/dependency_links.txt
7
7
  ign_pdal_tools.egg-info/top_level.txt
8
8
  pdaltools/_version.py
9
- pdaltools/add_points_in_las.py
10
9
  pdaltools/add_points_in_pointcloud.py
11
10
  pdaltools/color.py
12
11
  pdaltools/las_add_buffer.py
@@ -18,7 +17,6 @@ pdaltools/pcd_info.py
18
17
  pdaltools/replace_attribute_in_las.py
19
18
  pdaltools/standardize_format.py
20
19
  pdaltools/unlock_file.py
21
- test/test_add_points_in_las.py
22
20
  test/test_add_points_in_pointcloud.py
23
21
  test/test_color.py
24
22
  test/test_las_add_buffer.py
@@ -1,4 +1,4 @@
1
- __version__ = "1.7.11"
1
+ __version__ = "1.8.1"
2
2
 
3
3
 
4
4
  if __name__ == "__main__":
@@ -127,6 +127,9 @@ def add_points_to_las(
127
127
  crs (str): CRS of the data.
128
128
  virtual_points_classes (int): The classification value to assign to those virtual points (default: 66).
129
129
  """
130
+ # Copy data pointcloud
131
+ copy2(input_las, output_las)
132
+
130
133
  if input_points_with_z.empty:
131
134
  print(
132
135
  "No points to add. All points of the geojson file are outside the tile. Copying the input file to output"
@@ -141,7 +144,7 @@ def add_points_to_las(
141
144
  classes = virtual_points_classes * np.ones(nb_points)
142
145
 
143
146
  # Open the input LAS file to check and possibly update the header of the output
144
- with laspy.open(input_las) as las:
147
+ with laspy.open(input_las, "r") as las:
145
148
  header = las.header
146
149
  if not header:
147
150
  header = laspy.LasHeader(point_format=8, version="1.4")
@@ -152,11 +155,7 @@ def add_points_to_las(
152
155
  raise ValueError(f"Invalid CRS: {crs}")
153
156
  header.add_crs(crs_obj)
154
157
 
155
- # Copy data pointcloud
156
- copy2(input_las, output_las)
157
-
158
158
  # Add the new points with 3D points
159
- nb_points = len(x_coords)
160
159
  with laspy.open(output_las, mode="a", header=header) as output_las: # mode `a` for adding points
161
160
  # create nb_points points with "0" everywhere
162
161
  new_points = laspy.ScaleAwarePointRecord.zeros(nb_points, header=header) # use header for input_las
@@ -1,12 +1,14 @@
1
1
  import argparse
2
2
  import tempfile
3
3
  import time
4
- from math import ceil
4
+ from math import ceil, floor
5
+ from pathlib import Path
6
+ from typing import Tuple
5
7
 
6
8
  import numpy as np
7
9
  import pdal
8
10
  import requests
9
- from osgeo import gdal_array
11
+ from osgeo import gdal, gdal_array
10
12
 
11
13
  import pdaltools.las_info as las_info
12
14
  from pdaltools.unlock_file import copy_and_hack_decorator
@@ -28,7 +30,7 @@ def pretty_time_delta(seconds):
28
30
  return "%s%ds" % (sign_string, seconds)
29
31
 
30
32
 
31
- def retry(times, delay, factor=2, debug=False):
33
+ def retry(times, delay, factor, debug=False):
32
34
  def decorator(func):
33
35
  def newfn(*args, **kwargs):
34
36
  attempt = 1
@@ -61,25 +63,28 @@ def is_image_white(filename: str):
61
63
 
62
64
 
63
65
  def download_image_from_geoplateforme(
64
- proj, layer, minx, miny, maxx, maxy, pixel_per_meter, outfile, timeout, check_images
66
+ proj, layer, minx, miny, maxx, maxy, width_pixels, height_pixels, outfile, timeout, check_images
65
67
  ):
66
- # Force a 1-pixel margin in the east and south borders
67
- # to make sure that no point of the pointcloud is on the limit of the last pixel
68
- # to prevent interpolation issues
69
- maxx = maxx + 1 / pixel_per_meter
70
- miny = miny - 1 / pixel_per_meter
68
+ """
69
+ Download image using a wms request to geoplateforme.
70
+
71
+ Args:
72
+ proj (int): epsg code for the projection of the downloaded image.
73
+ layer: which kind of image is downloaded (ORTHOIMAGERY.ORTHOPHOTOS, ORTHOIMAGERY.ORTHOPHOTOS.IRC, ...).
74
+ minx, miny, maxx, maxy: box of the downloaded image.
75
+ width_pixels: width in pixels of the downloaded image.
76
+ height_pixels: height in pixels of the downloaded image.
77
+ outfile: file name of the downloaded file
78
+ timeout: delay after which the request is canceled (in seconds)
79
+ check_images (bool): enable checking if the output image is not a white image
80
+ """
71
81
 
72
82
  # for layer in layers:
73
83
  URL_GPP = "https://data.geopf.fr/wms-r/wms?"
74
84
  URL_FORMAT = "&EXCEPTIONS=text/xml&FORMAT=image/geotiff&SERVICE=WMS&VERSION=1.3.0&REQUEST=GetMap&STYLES="
75
85
  URL_EPSG = "&CRS=EPSG:" + str(proj)
76
86
  URL_BBOX = "&BBOX=" + str(minx) + "," + str(miny) + "," + str(maxx) + "," + str(maxy)
77
- URL_SIZE = (
78
- "&WIDTH="
79
- + str(ceil((maxx - minx) * pixel_per_meter))
80
- + "&HEIGHT="
81
- + str(ceil((maxy - miny) * pixel_per_meter))
82
- )
87
+ URL_SIZE = "&WIDTH=" + str(width_pixels) + "&HEIGHT=" + str(height_pixels)
83
88
 
84
89
  URL = URL_GPP + "LAYERS=" + layer + URL_FORMAT + URL_EPSG + URL_BBOX + URL_SIZE
85
90
 
@@ -96,7 +101,120 @@ def download_image_from_geoplateforme(
96
101
  raise ValueError(f"Downloaded image is white, with stream: {layer}")
97
102
 
98
103
 
104
+ def compute_cells_size(mind: float, maxd: float, pixel_per_meter: float, size_max_gpf: int) -> Tuple[int, int, int]:
105
+ """Compute cell size to have cells of almost equal size, but phased the same way as
106
+ if there had been no paving by forcing cell_size (in pixels) to be an integer
107
+
108
+ Args:
109
+ mind (float): minimum value along the dimension, in meters
110
+ maxd (float): maximum value along the dimension, in meters
111
+ pixel_per_meter (float): resolution (in number of pixels per meter)
112
+ size_max_gpf (int): maximum image size in pixels
113
+
114
+ Returns:
115
+ Tuple[int, int, int]: number of pixels in total, number of cells along the dimension, cell size in pixels
116
+ """
117
+ nb_pixels = ceil((maxd - mind) * pixel_per_meter)
118
+ nb_cells = ceil(nb_pixels / size_max_gpf)
119
+ cell_size_pixels = ceil(nb_pixels / nb_cells) # Force cell size to be an integer
120
+
121
+ return nb_pixels, nb_cells, cell_size_pixels
122
+
123
+
99
124
  @copy_and_hack_decorator
125
+ def download_image(proj, layer, minx, miny, maxx, maxy, pixel_per_meter, outfile, timeout, check_images, size_max_gpf):
126
+ """
127
+ Download image using a wms request to geoplateforme with call of download_image_from_geoplateforme() :
128
+ image are downloaded in blocks then merged, in order to limit the size of geoplateforme requests.
129
+
130
+ Args:
131
+ proj: projection of the downloaded image.
132
+ layer: which kind of image is downloaed (ORTHOIMAGERY.ORTHOPHOTOS, ORTHOIMAGERY.ORTHOPHOTOS.IRC, ...).
133
+ minx, miny, maxx, maxy: box of the downloaded image.
134
+ pixel_per_meter: resolution of the downloaded image.
135
+ outfile: file name of the downloaed file
136
+ timeout: time after the request is canceled
137
+ check_images: check if images is not a white image
138
+ size_max_gpf: block size of downloaded images. (in pixels)
139
+
140
+ return the number of effective requests
141
+ """
142
+
143
+ download_image_from_geoplateforme_retrying = retry(times=9, delay=5, factor=2)(download_image_from_geoplateforme)
144
+
145
+ size_x_p, nb_cells_x, cell_size_x = compute_cells_size(minx, maxx, pixel_per_meter, size_max_gpf)
146
+ size_y_p, nb_cells_y, cell_size_y = compute_cells_size(minx, maxx, pixel_per_meter, size_max_gpf)
147
+
148
+ # the image size is under SIZE_MAX_IMAGE_GPF
149
+ if (size_x_p <= size_max_gpf) and (size_y_p <= size_max_gpf):
150
+ download_image_from_geoplateforme_retrying(
151
+ proj, layer, minx, miny, maxx, maxy, cell_size_x, cell_size_y, outfile, timeout, check_images
152
+ )
153
+ return 1
154
+
155
+ # the image is bigger than the SIZE_MAX_IMAGE_GPF
156
+ # it's preferable to compute it by paving
157
+ with tempfile.TemporaryDirectory() as tmp_dir:
158
+ tmp_gpg_ortho = []
159
+ for line in range(0, nb_cells_y):
160
+ for col in range(0, nb_cells_x):
161
+ # Cope for last line/col that can be slightly smaller than other cells
162
+ remaining_pixels_x = size_x_p - col * cell_size_x
163
+ remaining_pixels_y = size_y_p - line * cell_size_y
164
+ cell_size_x_local = min(cell_size_x, remaining_pixels_x)
165
+ cell_size_y_local = min(cell_size_y, remaining_pixels_y)
166
+
167
+ minx_cell = minx + col * cell_size_x / pixel_per_meter
168
+ maxx_cell = minx_cell + cell_size_x_local / pixel_per_meter
169
+ miny_cell = miny + line * cell_size_y / pixel_per_meter
170
+ maxy_cell = miny_cell + cell_size_y_local / pixel_per_meter
171
+
172
+ cells_ortho_paths = str(Path(tmp_dir)) + f"cell_{col}_{line}.tif"
173
+ download_image_from_geoplateforme_retrying(
174
+ proj,
175
+ layer,
176
+ minx_cell,
177
+ miny_cell,
178
+ maxx_cell,
179
+ maxy_cell,
180
+ cell_size_x_local,
181
+ cell_size_y_local,
182
+ cells_ortho_paths,
183
+ timeout,
184
+ check_images,
185
+ )
186
+ tmp_gpg_ortho.append(cells_ortho_paths)
187
+
188
+ # merge the cells
189
+ with tempfile.NamedTemporaryFile(suffix="_gpf.vrt") as tmp_vrt:
190
+ gdal.BuildVRT(tmp_vrt.name, tmp_gpg_ortho)
191
+ gdal.Translate(outfile, tmp_vrt.name)
192
+
193
+ return nb_cells_x * nb_cells_y
194
+
195
+
196
+ def match_min_max_with_pixel_size(min_d: float, max_d: float, pixel_per_meter: float) -> Tuple[float, float]:
197
+ """Round min/max values along one dimension to the closest multiple of 1 / pixel_per_meter
198
+ It should prevent having to interpolate during a request to the geoplateforme
199
+ in case we use a native resolution.
200
+
201
+ Args:
202
+ min_d (float): minimum value along the dimension, in meters
203
+ max_d (float): maximum value along the dimension, in meters
204
+ pixel_per_meter (float): resolution (in number of pixels per meter)
205
+
206
+ Returns:
207
+ Tuple[float, float]: adapted min / max value
208
+ """
209
+ # Use ceil - 1 instead of ceil to make sure that
210
+ # no point of the pointcloud is on the limit of the first pixel
211
+ min_d = (ceil(min_d * pixel_per_meter) - 1) / pixel_per_meter
212
+ # Use floor + 1 instead of ceil to make sure that no point of the pointcloud is on the limit of the last pixel
213
+ max_d = (floor(max_d * pixel_per_meter) + 1) / pixel_per_meter
214
+
215
+ return min_d, max_d
216
+
217
+
100
218
  def color(
101
219
  input_file: str,
102
220
  output_file: str,
@@ -109,10 +227,14 @@ def color(
109
227
  check_images=False,
110
228
  stream_RGB="ORTHOIMAGERY.ORTHOPHOTOS",
111
229
  stream_IRC="ORTHOIMAGERY.ORTHOPHOTOS.IRC",
230
+ size_max_gpf=5000,
112
231
  ):
113
232
  metadata = las_info.las_info_metadata(input_file)
114
233
  minx, maxx, miny, maxy = las_info.get_bounds_from_header_info(metadata)
115
234
 
235
+ minx, maxx = match_min_max_with_pixel_size(minx, maxx, pixel_per_meter)
236
+ miny, maxy = match_min_max_with_pixel_size(miny, maxy, pixel_per_meter)
237
+
116
238
  if proj == "":
117
239
  proj = las_info.get_epsg_from_header_info(metadata)
118
240
 
@@ -120,8 +242,6 @@ def color(
120
242
 
121
243
  writer_extra_dims = "all"
122
244
 
123
- download_image_from_geoplateforme_retrying = retry(times=9, delay=5, factor=2)(download_image_from_geoplateforme)
124
-
125
245
  if veget_index_file and veget_index_file != "":
126
246
  print(f"Remplissage du champ Deviation à partir du fichier {veget_index_file}")
127
247
  pipeline |= pdal.Filter.colorization(raster=veget_index_file, dimensions="Deviation:1:256.0")
@@ -130,8 +250,18 @@ def color(
130
250
  tmp_ortho = None
131
251
  if color_rvb_enabled:
132
252
  tmp_ortho = tempfile.NamedTemporaryFile(suffix="_rvb.tif")
133
- download_image_from_geoplateforme_retrying(
134
- proj, stream_RGB, minx, miny, maxx, maxy, pixel_per_meter, tmp_ortho.name, timeout_second, check_images
253
+ download_image(
254
+ proj,
255
+ stream_RGB,
256
+ minx,
257
+ miny,
258
+ maxx,
259
+ maxy,
260
+ pixel_per_meter,
261
+ tmp_ortho.name,
262
+ timeout_second,
263
+ check_images,
264
+ size_max_gpf,
135
265
  )
136
266
  # Warning: the initial color is multiplied by 256 despite its initial 8-bits encoding
137
267
  # which turns it to a 0 to 255*256 range.
@@ -143,8 +273,18 @@ def color(
143
273
  tmp_ortho_irc = None
144
274
  if color_ir_enabled:
145
275
  tmp_ortho_irc = tempfile.NamedTemporaryFile(suffix="_irc.tif")
146
- download_image_from_geoplateforme_retrying(
147
- proj, stream_IRC, minx, miny, maxx, maxy, pixel_per_meter, tmp_ortho_irc.name, timeout_second, check_images
276
+ download_image(
277
+ proj,
278
+ stream_IRC,
279
+ minx,
280
+ miny,
281
+ maxx,
282
+ maxy,
283
+ pixel_per_meter,
284
+ tmp_ortho_irc.name,
285
+ timeout_second,
286
+ check_images,
287
+ size_max_gpf,
148
288
  )
149
289
  # Warning: the initial color is multiplied by 256 despite its initial 8-bits encoding
150
290
  # which turns it to a 0 to 255*256 range.
@@ -195,6 +335,13 @@ for 50 cm resolution rasters, use ORTHOIMAGERY.ORTHOPHOTOS.BDORTHO""",
195
335
  help="""WMS raster stream for IRC colorization. Default to ORTHOIMAGERY.ORTHOPHOTOS.IRC
196
336
  Documentation about possible stream : https://geoservices.ign.fr/services-web-experts-ortho""",
197
337
  )
338
+ parser.add_argument(
339
+ "--size-max-GPF",
340
+ type=int,
341
+ default=5000,
342
+ help="Maximum edge size (in pixels) of downloaded images."
343
+ " If input file needs more, several images are downloaded and merged.",
344
+ )
198
345
 
199
346
  return parser.parse_args()
200
347
 
@@ -213,4 +360,5 @@ if __name__ == "__main__":
213
360
  check_images=args.check_images,
214
361
  stream_RGB=args.stream_RGB,
215
362
  stream_IRC=args.stream_IRC,
363
+ size_max_gpf=args.size_max_GPF,
216
364
  )
@@ -1,6 +1,7 @@
1
1
  import argparse
2
2
 
3
3
  import pdal
4
+
4
5
  from pdaltools.las_info import get_writer_parameters_from_reader_metadata
5
6
 
6
7
 
@@ -26,6 +26,7 @@ INPUT_LIGNES_2D_GEOJSON = os.path.join(DATA_LIGNES_PATH, "Lignes_2d_0292_6833.ge
26
26
  INPUT_LIGNES_3D_GEOJSON = os.path.join(DATA_LIGNES_PATH, "Lignes_3d_0292_6833.geojson")
27
27
  INPUT_LIGNES_SHAPE = os.path.join(DATA_LIGNES_PATH, "Lignes_3d_0292_6833.shp")
28
28
  OUTPUT_FILE = os.path.join(TMP_PATH, "test_semis_2023_0292_6833_LA93_IGN69.laz")
29
+ INPUT_EMPTY_POINTS_2D = os.path.join(DATA_POINTS_3D_PATH, "Points_virtuels_2d_empty.geojson")
29
30
 
30
31
  # Cropped las tile used to test adding points that belong to the theorical tile but not to the
31
32
  # effective las file extent
@@ -81,19 +82,21 @@ def test_clip_3d_lines_to_tile(input_file, epsg):
81
82
 
82
83
 
83
84
  @pytest.mark.parametrize(
84
- "input_file, epsg, expected_nb_points",
85
+ "input_file, epsg, input_points_2d, expected_nb_points",
85
86
  [
86
- (INPUT_PCD, "EPSG:2154", 2423), # should work when providing an epsg value
87
- (INPUT_PCD, None, 2423), # Should also work with no epsg value (get from las file)
88
- (INPUT_PCD_CROPPED, None, 2423),
87
+ (INPUT_PCD, "EPSG:2154", INPUT_POINTS_2D, 2423), # should work when providing an epsg value
88
+ (INPUT_PCD, None, INPUT_POINTS_2D, 2423), # Should also work with no epsg value (get from las file)
89
+ (INPUT_PCD_CROPPED, None, INPUT_POINTS_2D_FOR_CROPPED_PCD, 451),
90
+ # Should also work if there is no points (direct copy of the input file)
91
+ (INPUT_PCD_CROPPED, None, INPUT_EMPTY_POINTS_2D, 0),
89
92
  ],
90
93
  )
91
- def test_add_points_to_las(input_file, epsg, expected_nb_points):
94
+ def test_add_points_to_las(input_file, epsg, input_points_2d, expected_nb_points):
92
95
  # Ensure the output file doesn't exist before the test
93
96
  if Path(OUTPUT_FILE).exists():
94
97
  os.remove(OUTPUT_FILE)
95
98
 
96
- points = gpd.read_file(INPUT_POINTS_2D)
99
+ points = gpd.read_file(input_points_2d)
97
100
  add_points_in_pointcloud.add_points_to_las(points, input_file, OUTPUT_FILE, epsg, 68)
98
101
  assert Path(OUTPUT_FILE).exists() # check output exists
99
102
 
@@ -0,0 +1,379 @@
1
+ import math
2
+ import os
3
+ import shutil
4
+ from pathlib import Path
5
+
6
+ import laspy
7
+ import numpy as np
8
+ import pytest
9
+ import requests
10
+ import requests_mock
11
+ from osgeo import gdal
12
+
13
+ from pdaltools import color
14
+
15
+ cwd = os.getcwd()
16
+
17
+ TEST_PATH = os.path.dirname(os.path.abspath(__file__))
18
+ TMPDIR = os.path.join(TEST_PATH, "tmp")
19
+
20
+ INPUT_PATH = os.path.join(TEST_PATH, "data/test_noepsg_043500_629205_IGN69.laz")
21
+
22
+ OUTPUT_FILE = os.path.join(TMPDIR, "Semis_2021_0435_6292_LA93_IGN69.colorized.las")
23
+ EPSG = "2154"
24
+ LAYER = "ORTHOIMAGERY.ORTHOPHOTOS"
25
+ MINX = 435000
26
+ MINY = 6291000
27
+ MAXX = 436000
28
+ MAXY = 6292000
29
+ PIXEL_PER_METER = 0.1
30
+ SIZE_MAX_IMAGE_GPF = 500
31
+
32
+
33
+ def setup_module(module):
34
+ try:
35
+ shutil.rmtree(TMPDIR)
36
+ except FileNotFoundError:
37
+ pass
38
+ os.mkdir(TMPDIR)
39
+
40
+
41
+ @pytest.mark.geopf
42
+ def test_epsg_fail():
43
+ with pytest.raises(
44
+ RuntimeError,
45
+ match="EPSG could not be inferred from metadata: No 'srs' key in metadata.",
46
+ ):
47
+ color.color(INPUT_PATH, OUTPUT_FILE, "", 0.1, 15)
48
+
49
+
50
+ @pytest.mark.geopf
51
+ def test_color_and_keeping_orthoimages():
52
+ tmp_ortho, tmp_ortho_irc = color.color(INPUT_PATH, OUTPUT_FILE, EPSG, check_images=True)
53
+ assert Path(tmp_ortho.name).exists()
54
+ assert Path(tmp_ortho_irc.name).exists()
55
+
56
+
57
+ @pytest.mark.parametrize(
58
+ "mind, maxd, pixel_per_meter, size_max_gpf, expected_nb_pixels, expected_nb_cells, expected_cell_size",
59
+ [
60
+ (0, 1000, 1, 500, 1000, 2, 500), # Easy case, sizes match perfectly
61
+ (0, 1001, 1, 1000, 1001, 2, 501), # Image slightly bigger than size_max_gpf
62
+ (0.1, 999.2, 1, 500, 1000, 2, 500), # floating value for min/max
63
+ ],
64
+ )
65
+ def test_compute_cells_size(
66
+ mind, maxd, pixel_per_meter, size_max_gpf, expected_nb_pixels, expected_nb_cells, expected_cell_size
67
+ ):
68
+ nb_pixels, nb_cells, cell_size = color.compute_cells_size(mind, maxd, pixel_per_meter, size_max_gpf)
69
+ assert nb_pixels == expected_nb_pixels
70
+ assert nb_cells == expected_nb_cells
71
+ assert cell_size == expected_cell_size
72
+
73
+
74
+ @pytest.mark.geopf
75
+ def test_download_image_ok():
76
+ tif_output = os.path.join(TMPDIR, "download_image.tif")
77
+ color.download_image(
78
+ EPSG, LAYER, MINX, MINY, MAXX, MAXY, PIXEL_PER_METER, tif_output, 15, True, size_max_gpf=SIZE_MAX_IMAGE_GPF
79
+ )
80
+
81
+ # check there is no noData
82
+ raster = gdal.Open(tif_output)
83
+ assert np.any(raster.ReadAsArray()) # Check that the raster array is not empty
84
+ # TODO: Fix this test: it did not correspond to what was expected:
85
+ # - GetNoDataValue returns the value of no_data, not the number of occurrences
86
+ # - it is possible to have occasional no data values if no_data == 255. (white pixels)
87
+ # for i in range(raster.RasterCount):
88
+ # assert raster.GetRasterBand(i + 1).GetNoDataValue() is None
89
+
90
+
91
+ @pytest.mark.geopf
92
+ def test_download_image_ok_one_download():
93
+ tif_output = os.path.join(TMPDIR, "download_image.tif")
94
+ expected_pixel_size = 100 # (MAXX - MINX) * PIXEL_PER_METER
95
+ nb_request = color.download_image(
96
+ EPSG, LAYER, MINX, MINY, MAXX, MAXY, PIXEL_PER_METER, tif_output, 15, True, size_max_gpf=1000
97
+ )
98
+ assert nb_request == 1
99
+
100
+ # check there is no noData
101
+ raster = gdal.Open(tif_output)
102
+ assert raster.ReadAsArray().shape == (3, expected_pixel_size, expected_pixel_size)
103
+ assert np.any(raster.ReadAsArray()) # Check that the raster array is not empty
104
+ # TODO: Fix this test: it did not correspond to what was expected:
105
+ # - GetNoDataValue returns the value of no_data, not the number of occurrences
106
+ # - it is possible to have occasional no data values if no_data == 255. (white pixels)
107
+ # for i in range(raster.RasterCount):
108
+ # assert raster.GetRasterBand(i + 1).GetNoDataValue() is None
109
+
110
+
111
+ @pytest.mark.geopf
112
+ @pytest.mark.parametrize("pixel_per_meter, expected_pixel_size", [(0.5, 501), (1, 1001), (2, 2001)])
113
+ def test_download_image_ok_one_download_with_extra_pixel(pixel_per_meter, expected_pixel_size):
114
+ # test with 1 extra pixel to compensate the phase difference between raster and lidar
115
+ tif_output = os.path.join(TMPDIR, "download_image.tif")
116
+
117
+ maxx = MAXX + 1 / pixel_per_meter
118
+ maxy = MAXY + 1 / pixel_per_meter
119
+ nb_request = color.download_image(
120
+ EPSG, LAYER, MINX, MINY, maxx, maxy, pixel_per_meter, tif_output, 15, True, size_max_gpf=5000
121
+ )
122
+ assert nb_request == 1
123
+
124
+ # check there is no noData
125
+ raster = gdal.Open(tif_output)
126
+
127
+ assert raster.ReadAsArray().shape == (3, expected_pixel_size, expected_pixel_size)
128
+ assert np.any(raster.ReadAsArray()) # Check that the raster array is not empty
129
+ # TODO: Fix this test: it did not correspond to what was expected:
130
+ # - GetNoDataValue returns the value of no_data, not the number of occurrences
131
+ # - it is possible to have occasional no data values if no_data == 255. (white pixels)
132
+ # for i in range(raster.RasterCount):
133
+ # assert raster.GetRasterBand(i + 1).GetNoDataValue() is None
134
+
135
+
136
+ @pytest.mark.geopf
137
+ @pytest.mark.parametrize("pixel_per_meter, expected_pixel_size", [(0.5, 500), (1, 1000), (2, 2000), (5, 5000)])
138
+ def test_download_image_ok_more_downloads(pixel_per_meter, expected_pixel_size):
139
+ tif_output = os.path.join(TMPDIR, f"download_image_resolution_{pixel_per_meter}.tif")
140
+
141
+ nb_request = color.download_image(
142
+ EPSG,
143
+ LAYER,
144
+ MINX,
145
+ MINY,
146
+ MAXX,
147
+ MAXY,
148
+ pixel_per_meter,
149
+ tif_output,
150
+ 15,
151
+ True,
152
+ size_max_gpf=1000,
153
+ )
154
+ assert nb_request == max(1, 1 * pixel_per_meter * pixel_per_meter)
155
+
156
+ # check there is no noData
157
+ raster = gdal.Open(tif_output)
158
+ assert raster.ReadAsArray().shape == (3, expected_pixel_size, expected_pixel_size)
159
+ assert np.any(raster.ReadAsArray()) # Check that the raster array is not empty
160
+ # TODO: Fix this test: it did not correspond to what was expected:
161
+ # - GetNoDataValue returns the value of no_data, not the number of occurrences
162
+ # - it is possible to have occasional no data values if no_data == 255. (white pixels)
163
+ # for i in range(raster.RasterCount):
164
+ # assert raster.GetRasterBand(i + 1).GetNoDataValue() is None
165
+
166
+
167
+ @pytest.mark.geopf
168
+ @pytest.mark.parametrize(
169
+ "pixel_per_meter, expected_nb_requests, expected_pixel_size",
170
+ [(0.5, 1, 501), (1, 4, 1001), (2, 9, 2001), (4, 25, 4001)],
171
+ )
172
+ def test_download_image_ok_more_downloads_with_extra_pixel(pixel_per_meter, expected_nb_requests, expected_pixel_size):
173
+ # test with 1 extra pixel to compensate the phase difference between raster and lidar
174
+ tif_output = os.path.join(TMPDIR, "download_image.tif")
175
+ maxx = MAXX + 1 / pixel_per_meter
176
+ maxy = MAXY + 1 / pixel_per_meter
177
+ nb_request = color.download_image(
178
+ EPSG, LAYER, MINX, MINY, maxx, maxy, pixel_per_meter, tif_output, 15, True, size_max_gpf=1000
179
+ )
180
+ assert nb_request == expected_nb_requests
181
+
182
+ # check there is no noData
183
+ raster = gdal.Open(tif_output)
184
+ assert raster.ReadAsArray().shape == (3, expected_pixel_size, expected_pixel_size)
185
+ assert np.any(raster.ReadAsArray()) # Check that the raster array is not empty
186
+ # TODO: Fix this test: it did not correspond to what was expected:
187
+ # - GetNoDataValue returns the value of no_data, not the number of occurrences
188
+ # - it is possible to have occasional no data values if no_data == 255. (white pixels)
189
+ # for i in range(raster.RasterCount):
190
+ # assert raster.GetRasterBand(i + 1).GetNoDataValue() is None
191
+
192
+
193
+ @pytest.mark.geopf
194
+ def test_download_image_download_size_gpf_bigger():
195
+ tif_output = os.path.join(TMPDIR, "download_image_bigger.tif")
196
+ color.download_image(EPSG, LAYER, MINX, MINY, MAXX, MAXY, PIXEL_PER_METER, tif_output, 15, True, size_max_gpf=1005)
197
+
198
+ # check there is no noData
199
+ raster = gdal.Open(tif_output)
200
+ assert np.any(raster.ReadAsArray()) # Check that the raster array is not empty
201
+ # TODO: Fix this test: it did not correspond to what was expected:
202
+ # - GetNoDataValue returns the value of no_data, not the number of occurrences
203
+ # - it is possible to have occasional no data values if no_data == 255. (white pixels)
204
+ # for i in range(raster.RasterCount):
205
+ # assert raster.GetRasterBand(i + 1).GetNoDataValue() is None
206
+
207
+
208
+ @pytest.mark.geopf
209
+ def test_download_image_download_size_gpf_size_almost_ok():
210
+ tif_output = os.path.join(TMPDIR, "download_image_bigger.tif")
211
+ nb_request = color.download_image(
212
+ EPSG, LAYER, MINX, MINY, MAXX, MAXY, PIXEL_PER_METER, tif_output, 15, True, size_max_gpf=99
213
+ )
214
+ assert nb_request == 4
215
+
216
+ # check there is no noData
217
+ raster = gdal.Open(tif_output)
218
+ assert np.any(raster.ReadAsArray()) # Check that the raster array is not empty
219
+ # TODO: Fix this test: it did not correspond to what was expected:
220
+ # - GetNoDataValue returns the value of no_data, not the number of occurrences
221
+ # - it is possible to have occasional no data values if no_data == 255. (white pixels)
222
+ # for i in range(raster.RasterCount):
223
+ # assert raster.GetRasterBand(i + 1).GetNoDataValue() is None
224
+
225
+
226
+ @pytest.mark.geopf
227
+ @pytest.mark.parametrize("size_block", [100, 50, 25])
228
+ def test_download_image_compare_one_and_block(size_block):
229
+ tif_output_one = os.path.join(TMPDIR, "download_image_one.tif")
230
+ nb_request = color.download_image(
231
+ EPSG, LAYER, MINX, MINY, MAXX, MAXY, PIXEL_PER_METER, tif_output_one, 15, True, 100
232
+ )
233
+ assert nb_request == 1
234
+
235
+ tif_output_blocks = os.path.join(TMPDIR, "download_image_block.tif")
236
+ nb_request = color.download_image(
237
+ EPSG, LAYER, MINX, MINY, MAXX, MAXY, PIXEL_PER_METER, tif_output_blocks, 100, True, size_block
238
+ )
239
+ assert nb_request == math.pow(1000 * PIXEL_PER_METER / size_block, 2)
240
+
241
+ # due to GeoPlateforme interpolation, images could have small differences
242
+ # check images are almost the same
243
+
244
+ raster_one = gdal.Open(tif_output_one)
245
+ raster_blocks = gdal.Open(tif_output_blocks)
246
+
247
+ r_one = np.array(raster_one.ReadAsArray())
248
+ r_blocks = np.array(raster_blocks.ReadAsArray())
249
+ assert r_one.size == r_blocks.size
250
+ r_diff = r_one - r_blocks
251
+
252
+ # images should be same as 5/1000 (tolerance)
253
+ assert np.count_nonzero(r_diff) < 0.005 * ((MAXX - MINX) * (MAXY - MINY) * math.pow(PIXEL_PER_METER, 2))
254
+
255
+ # differences should be 1 or 255 (eq a variation of one on one RVB canal)
256
+ r_diff_nonzero = np.nonzero(r_diff)
257
+ for i in range(0, r_diff_nonzero[0].size):
258
+ diff = r_diff[r_diff_nonzero[0][i], r_diff_nonzero[1][i], r_diff_nonzero[2][i]]
259
+ assert diff == 1 or diff == 255
260
+
261
+
262
+ @pytest.mark.parametrize(
263
+ "minx, maxx, pixel_per_meter, expected_minx, expected_maxx",
264
+ [(500, 1000, 5, 499.8, 1000.2), [1.1, 999.9, 5, 1, 1000]],
265
+ )
266
+ def test_match_min_max_with_pixel_size(minx, maxx, pixel_per_meter, expected_minx, expected_maxx):
267
+ out_minx, out_maxx = color.match_min_max_with_pixel_size(minx, maxx, pixel_per_meter)
268
+ assert (out_minx, out_maxx) == (expected_minx, expected_maxx)
269
+
270
+
271
+ @pytest.mark.geopf
272
+ def test_color_narrow_cloud():
273
+ input_path = os.path.join(TEST_PATH, "data/test_data_0436_6384_LA93_IGN69_single_point.laz")
274
+ output_path = os.path.join(TMPDIR, "color_narrow_cloud_test_data_0436_6384_LA93_IGN69_single_point.colorized.laz")
275
+ # Test that clouds that are smaller in width or height to 20cm are still colorized without an error.
276
+ color.color(input_path, output_path, EPSG)
277
+ with laspy.open(output_path, "r") as las:
278
+ las_data = las.read()
279
+ # Check all points are colored
280
+ assert not np.any(las_data.red == 0)
281
+ assert not np.any(las_data.green == 0)
282
+ assert not np.any(las_data.blue == 0)
283
+ assert not np.any(las_data.nir == 0)
284
+
285
+
286
+ @pytest.mark.geopf
287
+ def test_color_standard_cloud():
288
+ input_path = os.path.join(TEST_PATH, "data/test_data_77055_627760_LA93_IGN69.laz")
289
+ output_path = os.path.join(TMPDIR, "color_standard_cloud_test_data_77055_627760_LA93_IGN69.colorized.laz")
290
+ # Test that clouds that are smaller in width or height to 20cm are still colorized without an error.
291
+ color.color(input_path, output_path, EPSG)
292
+ with laspy.open(output_path, "r") as las:
293
+ las_data = las.read()
294
+ # Check all points are colored
295
+ las_rgb_missing = (las_data.red == 0) & (las_data.green == 0) & (las_data.blue == 0)
296
+ assert not np.any(las_rgb_missing), f"Should be no missing RGB value, got {np.count_nonzero(las_rgb_missing)} "
297
+ assert not np.any(las_data.nir == 0)
298
+
299
+
300
+ @pytest.mark.geopf
301
+ def test_color_epsg_2975_forced():
302
+ input_path = os.path.join(TEST_PATH, "data/sample_lareunion_epsg2975.laz")
303
+ output_path = os.path.join(TMPDIR, "color_epsg_2975_forced_sample_lareunion_epsg2975.colorized.laz")
304
+
305
+ color.color(input_path, output_path, 2975)
306
+
307
+
308
+ def test_is_image_white_true():
309
+ input_path = os.path.join(TEST_PATH, "data/image/white.tif")
310
+ assert color.is_image_white(input_path), "This image should be detected as white"
311
+
312
+
313
+ def test_is_image_white_false():
314
+ input_path = os.path.join(TEST_PATH, "data/image/colored.tif")
315
+ assert not color.is_image_white(input_path), "This image should NOT be detected as white"
316
+
317
+
318
+ @pytest.mark.geopf
319
+ def test_color_raise_for_white_image():
320
+ input_path = os.path.join(TEST_PATH, "data/sample_lareunion_epsg2975.laz")
321
+ output_path = os.path.join(TMPDIR, "sample_lareunion_epsg2975.colorized.white.laz")
322
+
323
+ with pytest.raises(ValueError) as excinfo:
324
+ color.color(input_path, output_path, check_images=True)
325
+
326
+ assert "Downloaded image is white" in str(excinfo.value)
327
+
328
+
329
+ @pytest.mark.geopf
330
+ def test_color_epsg_2975_detected():
331
+ input_path = os.path.join(TEST_PATH, "data/sample_lareunion_epsg2975.laz")
332
+ output_path = os.path.join(TMPDIR, "color_epsg_2975_detected_sample_lareunion_epsg2975.colorized.laz")
333
+ # Test that clouds that are smaller in width or height to 20cm are still clorized without an error.
334
+ color.color(input_path, output_path)
335
+
336
+
337
+ @pytest.mark.geopf
338
+ def test_download_image_raise1():
339
+ retry_download = color.retry(times=2, delay=5, factor=2)(color.download_image_from_geoplateforme)
340
+ with pytest.raises(requests.exceptions.HTTPError):
341
+ retry_download(EPSG, "MAUVAISE_COUCHE", MINX, MINY, MAXX, MAXY, 100, 100, OUTPUT_FILE, 15, True)
342
+
343
+
344
+ @pytest.mark.geopf
345
+ def test_download_image_raise2():
346
+ retry_download = color.retry(times=2, delay=5, factor=2)(color.download_image_from_geoplateforme)
347
+ with pytest.raises(requests.exceptions.HTTPError):
348
+ retry_download("9001", LAYER, MINX, MINY, MAXX, MAXY, 100, 100, OUTPUT_FILE, 15, True)
349
+
350
+
351
+ def test_retry_on_server_error():
352
+ with requests_mock.Mocker() as mock:
353
+ mock.get(requests_mock.ANY, status_code=502, reason="Bad Gateway")
354
+ with pytest.raises(requests.exceptions.HTTPError):
355
+ retry_download = color.retry(times=2, delay=1, factor=2)(color.download_image_from_geoplateforme)
356
+ retry_download(EPSG, LAYER, MINX, MINY, MAXX, MAXY, 100, 100, OUTPUT_FILE, 15, True)
357
+ history = mock.request_history
358
+ assert len(history) == 3
359
+
360
+
361
+ def test_retry_on_connection_error():
362
+ with requests_mock.Mocker() as mock:
363
+ mock.get(requests_mock.ANY, exc=requests.exceptions.ConnectionError)
364
+ with pytest.raises(requests.exceptions.ConnectionError):
365
+ retry_download = color.retry(times=2, delay=1, factor=2)(color.download_image_from_geoplateforme)
366
+ retry_download(EPSG, LAYER, MINX, MINY, MAXX, MAXY, 100, 100, OUTPUT_FILE, 15, True)
367
+
368
+ history = mock.request_history
369
+ assert len(history) == 3
370
+
371
+
372
+ def test_retry_param():
373
+ # Here you can change retry params
374
+ @color.retry(times=9, delay=5, factor=2, debug=True)
375
+ def raise_server_error():
376
+ raise requests.exceptions.HTTPError("Server Error")
377
+
378
+ with pytest.raises(requests.exceptions.HTTPError):
379
+ raise_server_error()
@@ -1,8 +1,9 @@
1
+ import logging
2
+ import os
1
3
  import tempfile
2
- import pdal
4
+
3
5
  import numpy
4
- import os
5
- import logging
6
+ import pdal
6
7
  import pytest
7
8
 
8
9
  from pdaltools import las_remove_dimensions
@@ -33,7 +34,6 @@ def append_dimension(input_las: str, output_las: str):
33
34
 
34
35
 
35
36
  def test_remove_all_dimension():
36
-
37
37
  # get initial data
38
38
  points_ini = get_points(ini_las)
39
39
 
@@ -47,7 +47,6 @@ def test_remove_all_dimension():
47
47
 
48
48
 
49
49
  def test_remove_one_dimension():
50
-
51
50
  # get initial data
52
51
  points_ini = get_points(ini_las)
53
52
 
@@ -1,104 +0,0 @@
1
- import argparse
2
-
3
- import geopandas
4
- import numpy as np
5
- import pdal
6
-
7
- from pdaltools.las_info import get_writer_parameters_from_reader_metadata, las_info_metadata, get_bounds_from_header_info
8
-
9
-
10
- def extract_points_from_geo(input_geo: str):
11
- file = open(input_geo)
12
- df = geopandas.read_file(file)
13
- return df.get_coordinates(ignore_index=True, include_z=True)
14
-
15
- def point_in_bound(bound_minx, bound_maxx, bound_miny, bound_maxy, pt_x, pt_y):
16
- return pt_x >= bound_minx and pt_x <= bound_maxx and pt_y >= bound_miny and pt_y <= bound_maxy
17
-
18
- def add_points_in_las(input_las: str, input_geo: str, output_las: str, inside_las: bool, values_dimensions: {}):
19
- points_geo = extract_points_from_geo(input_geo)
20
- pipeline = pdal.Pipeline() | pdal.Reader.las(input_las)
21
- pipeline.execute()
22
- points_las = pipeline.arrays[0]
23
- dimensions = list(points_las.dtype.fields.keys())
24
-
25
- if inside_las:
26
- mtd = las_info_metadata(input_las)
27
- bound_minx, bound_maxx, bound_miny, bound_maxy = get_bounds_from_header_info(mtd)
28
-
29
- for i in points_geo.index:
30
- if inside_las :
31
- if not point_in_bound(bound_minx, bound_maxx, bound_miny, bound_maxy, points_geo["x"][i], points_geo["y"][i]):
32
- continue
33
- pt_las = np.empty(1, dtype=points_las.dtype)
34
- pt_las[0][dimensions.index("X")] = points_geo["x"][i]
35
- pt_las[0][dimensions.index("Y")] = points_geo["y"][i]
36
- pt_las[0][dimensions.index("Z")] = points_geo["z"][i]
37
- for val in values_dimensions:
38
- pt_las[0][dimensions.index(val)] = values_dimensions[val]
39
- points_las = np.append(points_las, pt_las, axis=0)
40
-
41
- params = get_writer_parameters_from_reader_metadata(pipeline.metadata)
42
- pipeline_end = pdal.Pipeline(arrays=[points_las])
43
- pipeline_end |= pdal.Writer.las(output_las, forward="all", **params)
44
- pipeline_end.execute()
45
-
46
-
47
- def parse_args():
48
- parser = argparse.ArgumentParser("Add points from geometry file in a las/laz file.")
49
- parser.add_argument("--input_file", "-i", type=str, help="Las/Laz input file")
50
- parser.add_argument("--output_file", "-o", type=str, help="Las/Laz output file.")
51
- parser.add_argument("--input_geo_file", "-g", type=str, help="Geometry input file.")
52
- parser.add_argument("--inside_las", "-l", type=str, help="Keep points only inside the las boundary.")
53
- parser.add_argument(
54
- "--dimensions",
55
- "-d",
56
- metavar="KEY=VALUE",
57
- nargs="+",
58
- help="Set a number of key-value pairs corresponding to value "
59
- "needed in points added in the output las; key should be included in the input las.",
60
- )
61
- return parser.parse_args()
62
-
63
-
64
- def is_nature(value, nature):
65
- if value is None:
66
- return False
67
- try:
68
- nature(value)
69
- return True
70
- except:
71
- return False
72
-
73
-
74
- def parse_var(s):
75
- items = s.split("=")
76
- key = items[0].strip()
77
- if len(items) > 1:
78
- value = "=".join(items[1:])
79
- if is_nature(value, int):
80
- value = int(value)
81
- elif is_nature(value, float):
82
- value = float(value)
83
- return (key, value)
84
-
85
-
86
- def parse_vars(items):
87
- d = {}
88
- if items:
89
- for item in items:
90
- key, value = parse_var(item)
91
- d[key] = value
92
- return d
93
-
94
-
95
- if __name__ == "__main__":
96
- args = parse_args()
97
- added_dimensions = parse_vars(args.dimensions)
98
- add_points_in_las(
99
- input_las=args.input_file,
100
- input_geo=args.input_geo_file,
101
- output_las=args.input_file if args.output_file is None else args.output_file,
102
- inside_las=args.inside_las,
103
- values_dimensions=added_dimensions,
104
- )
@@ -1,72 +0,0 @@
1
- import pytest
2
- import os
3
- import random as rand
4
- import tempfile
5
- import math
6
-
7
- import pdal
8
-
9
- import geopandas as gpd
10
- from shapely.geometry import Point
11
-
12
- from pdaltools import add_points_in_las
13
-
14
- numeric_precision = 4
15
-
16
- TEST_PATH = os.path.dirname(os.path.abspath(__file__))
17
- INPUT_DIR = os.path.join(TEST_PATH, "data")
18
- INPUT_LAS = os.path.join(INPUT_DIR, "test_data_77055_627760_LA93_IGN69.laz")
19
-
20
- Xmin = 770575
21
- Ymin = 6277575
22
- Zmin = 20
23
- Size = 20
24
-
25
- def distance3D(pt_geo, pt_las):
26
- return round(
27
- math.sqrt((pt_geo.x - pt_las['X']) ** 2 + (pt_geo.y - pt_las['Y']) ** 2 + (pt_geo.z - pt_las['Z']) ** 2),
28
- numeric_precision,
29
- )
30
-
31
- def add_point_in_las(pt_geo, inside_las):
32
- geom = [pt_geo]
33
- series = gpd.GeoSeries(geom, crs="2154")
34
-
35
- with tempfile.NamedTemporaryFile(suffix="_geom_tmp.las") as out_las_file:
36
- with tempfile.NamedTemporaryFile(suffix="_geom_tmp.geojson") as geom_file:
37
- series.to_file(geom_file.name)
38
-
39
- added_dimensions = {"Classification":64, "Intensity":1.}
40
- add_points_in_las.add_points_in_las(INPUT_LAS, geom_file.name, out_las_file.name, inside_las, added_dimensions)
41
-
42
- pipeline = pdal.Pipeline() | pdal.Reader.las(out_las_file.name)
43
- pipeline.execute()
44
- points_las = pipeline.arrays[0]
45
- points_las = [e for e in points_las if all(e[val] == added_dimensions[val] for val in added_dimensions)]
46
- return points_las
47
-
48
- def test_add_point_inside_las():
49
- X = Xmin + rand.uniform(0, 1) * Size
50
- Y = Ymin + rand.uniform(0, 1) * Size
51
- Z = Zmin + rand.uniform(0, 1) * 10
52
- pt_geo = Point(X, Y, Z)
53
- points_las = add_point_in_las(pt_geo=pt_geo, inside_las=True)
54
- assert len(points_las) == 1
55
- assert distance3D(pt_geo, points_las[0]) < 1 / numeric_precision
56
-
57
- def test_add_point_outside_las_no_control():
58
- X = Xmin + rand.uniform(2, 3) * Size
59
- Y = Ymin + rand.uniform(0, 1) * Size
60
- Z = Zmin + rand.uniform(0, 1) * 10
61
- pt_geo = Point(X, Y, Z)
62
- points_las = add_point_in_las(pt_geo=pt_geo, inside_las=False)
63
- assert len(points_las) == 1
64
- assert distance3D(pt_geo, points_las[0]) < 1 / numeric_precision
65
-
66
- def test_add_point_outside_las_with_control():
67
- X = Xmin + rand.uniform(2, 3) * Size
68
- Y = Ymin + rand.uniform(2, 3) * Size
69
- Z = Zmin + rand.uniform(0, 1) * 10
70
- pt_geo = Point(X, Y, Z)
71
- points_las = add_point_in_las(pt_geo=pt_geo, inside_las=True)
72
- assert len(points_las) == 0
@@ -1,148 +0,0 @@
1
- import os
2
- import shutil
3
- from pathlib import Path
4
-
5
- import pytest
6
- import requests
7
- import requests_mock
8
-
9
- from pdaltools import color
10
-
11
- cwd = os.getcwd()
12
-
13
- TEST_PATH = os.path.dirname(os.path.abspath(__file__))
14
- TMPDIR = os.path.join(TEST_PATH, "tmp")
15
-
16
-
17
- def setup_module(module):
18
- try:
19
- shutil.rmtree(TMPDIR)
20
- except FileNotFoundError:
21
- pass
22
- os.mkdir(TMPDIR)
23
-
24
-
25
- INPUT_PATH = os.path.join(TEST_PATH, "data/test_noepsg_043500_629205_IGN69.laz")
26
-
27
- OUTPUT_FILE = os.path.join(TMPDIR, "Semis_2021_0435_6292_LA93_IGN69.colorized.las")
28
-
29
-
30
- @pytest.mark.geopf
31
- def test_epsg_fail():
32
- with pytest.raises(
33
- RuntimeError,
34
- match="EPSG could not be inferred from metadata: No 'srs' key in metadata.",
35
- ):
36
- color.color(INPUT_PATH, OUTPUT_FILE, "", 0.1, 15)
37
-
38
-
39
- epsg = "2154"
40
- layer = "ORTHOIMAGERY.ORTHOPHOTOS"
41
- minx = 435000
42
- miny = 6291000
43
- maxx = 436000
44
- maxy = 6292000
45
- pixel_per_meter = 0.1
46
-
47
-
48
- @pytest.mark.geopf
49
- def test_color_and_keeping_orthoimages():
50
- tmp_ortho, tmp_ortho_irc = color.color(INPUT_PATH, OUTPUT_FILE, epsg, check_images=True)
51
- assert Path(tmp_ortho.name).exists()
52
- assert Path(tmp_ortho_irc.name).exists()
53
-
54
-
55
- @pytest.mark.geopf
56
- def test_color_narrow_cloud():
57
- input_path = os.path.join(TEST_PATH, "data/test_data_0436_6384_LA93_IGN69_single_point.laz")
58
- output_path = os.path.join(TMPDIR, "test_data_0436_6384_LA93_IGN69_single_point.colorized.laz")
59
- # Test that clouds that are smaller in width or height to 20cm are still clorized without an error.
60
- color.color(input_path, output_path, epsg)
61
-
62
-
63
- @pytest.mark.geopf
64
- def test_download_image_ok():
65
- tif_output = os.path.join(TMPDIR, "download_image.tif")
66
- color.download_image_from_geoplateforme(epsg, layer, minx, miny, maxx, maxy, pixel_per_meter, tif_output, 15, True)
67
-
68
-
69
- @pytest.mark.geopf
70
- def test_color_epsg_2975_forced():
71
- input_path = os.path.join(TEST_PATH, "data/sample_lareunion_epsg2975.laz")
72
- output_path = os.path.join(TMPDIR, "sample_lareunion_epsg2975.colorized.laz")
73
- # Test that clouds that are smaller in width or height to 20cm are still clorized without an error.
74
- color.color(input_path, output_path, 2975)
75
-
76
-
77
- def test_is_image_white_true():
78
- input_path = os.path.join(TEST_PATH, "data/image/white.tif")
79
- assert color.is_image_white(input_path), "This image should be detected as white"
80
-
81
-
82
- def test_is_image_white_false():
83
- input_path = os.path.join(TEST_PATH, "data/image/colored.tif")
84
- assert not color.is_image_white(input_path), "This image should NOT be detected as white"
85
-
86
-
87
- @pytest.mark.geopf
88
- def test_color_raise_for_white_image():
89
- input_path = os.path.join(TEST_PATH, "data/sample_lareunion_epsg2975.laz")
90
- output_path = os.path.join(TMPDIR, "sample_lareunion_epsg2975.colorized.white.laz")
91
-
92
- with pytest.raises(ValueError) as excinfo:
93
- color.color(input_path, output_path, check_images=True)
94
-
95
- assert "Downloaded image is white" in str(excinfo.value)
96
-
97
-
98
- @pytest.mark.geopf
99
- def test_color_epsg_2975_detected():
100
- input_path = os.path.join(TEST_PATH, "data/sample_lareunion_epsg2975.laz")
101
- output_path = os.path.join(TMPDIR, "sample_lareunion_epsg2975.colorized.laz")
102
- # Test that clouds that are smaller in width or height to 20cm are still clorized without an error.
103
- color.color(input_path, output_path)
104
-
105
-
106
- @pytest.mark.geopf
107
- def test_download_image_raise1():
108
- retry_download = color.retry(2, 5)(color.download_image_from_geoplateforme)
109
- with pytest.raises(requests.exceptions.HTTPError):
110
- retry_download(epsg, "MAUVAISE_COUCHE", minx, miny, maxx, maxy, pixel_per_meter, OUTPUT_FILE, 15, True)
111
-
112
-
113
- @pytest.mark.geopf
114
- def test_download_image_raise2():
115
- retry_download = color.retry(2, 5)(color.download_image_from_geoplateforme)
116
- with pytest.raises(requests.exceptions.HTTPError):
117
- retry_download("9001", layer, minx, miny, maxx, maxy, pixel_per_meter, OUTPUT_FILE, 15, True)
118
-
119
-
120
- def test_retry_on_server_error():
121
- with requests_mock.Mocker() as mock:
122
- mock.get(requests_mock.ANY, status_code=502, reason="Bad Gateway")
123
- with pytest.raises(requests.exceptions.HTTPError):
124
- retry_download = color.retry(2, 1, 2)(color.download_image_from_geoplateforme)
125
- retry_download(epsg, layer, minx, miny, maxx, maxy, pixel_per_meter, OUTPUT_FILE, 15, True)
126
- history = mock.request_history
127
- assert len(history) == 3
128
-
129
-
130
- def test_retry_on_connection_error():
131
- with requests_mock.Mocker() as mock:
132
- mock.get(requests_mock.ANY, exc=requests.exceptions.ConnectionError)
133
- with pytest.raises(requests.exceptions.ConnectionError):
134
- retry_download = color.retry(2, 1)(color.download_image_from_geoplateforme)
135
- retry_download(epsg, layer, minx, miny, maxx, maxy, pixel_per_meter, OUTPUT_FILE, 15, True)
136
-
137
- history = mock.request_history
138
- assert len(history) == 3
139
-
140
-
141
- def test_retry_param():
142
- # Here you can change retry params
143
- @color.retry(times=9, delay=5, factor=2, debug=True)
144
- def raise_server_error():
145
- raise requests.exceptions.HTTPError("Server Error")
146
-
147
- with pytest.raises(requests.exceptions.HTTPError):
148
- raise_server_error()