cars 1.0.0a3__cp313-cp313-win_amd64.whl → 1.0.0a4__cp313-cp313-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cars might be problematic. Click here for more details.

Files changed (139) hide show
  1. cars/__init__.py +3 -3
  2. cars/applications/__init__.py +0 -3
  3. cars/applications/application_template.py +20 -0
  4. cars/applications/auxiliary_filling/abstract_auxiliary_filling_app.py +12 -2
  5. cars/applications/auxiliary_filling/auxiliary_filling_algo.py +2 -2
  6. cars/applications/auxiliary_filling/auxiliary_filling_from_sensors_app.py +80 -36
  7. cars/applications/dem_generation/dem_generation_algo.py +1 -1
  8. cars/applications/dem_generation/dem_generation_wrappers.py +23 -57
  9. cars/applications/dem_generation/dichotomic_generation_app.py +3 -3
  10. cars/applications/dem_generation/rasterization_app.py +100 -41
  11. cars/applications/dense_match_filling/__init__.py +1 -1
  12. cars/applications/dense_match_filling/abstract_dense_match_filling_app.py +2 -15
  13. cars/applications/dense_match_filling/fill_disp_algo.py +32 -373
  14. cars/applications/dense_match_filling/fill_disp_wrappers.py +0 -343
  15. cars/applications/dense_match_filling/zero_padding_app.py +10 -5
  16. cars/applications/dense_matching/abstract_dense_matching_app.py +2 -1
  17. cars/applications/dense_matching/census_mccnn_sgm_app.py +38 -39
  18. cars/applications/dense_matching/cpp/dense_matching_cpp.cp313-win_amd64.dll.a +0 -0
  19. cars/applications/dense_matching/cpp/dense_matching_cpp.cp313-win_amd64.pyd +0 -0
  20. cars/applications/dense_matching/dense_matching_algo.py +48 -14
  21. cars/applications/dense_matching/dense_matching_wrappers.py +11 -3
  22. cars/applications/dense_matching/disparity_grid_algo.py +84 -62
  23. cars/applications/dense_matching/loaders/pandora_loader.py +91 -33
  24. cars/applications/dsm_filling/border_interpolation_app.py +1 -7
  25. cars/applications/dsm_filling/bulldozer_filling_app.py +2 -8
  26. cars/applications/dsm_filling/exogenous_filling_app.py +4 -9
  27. cars/applications/grid_generation/abstract_grid_generation_app.py +1 -1
  28. cars/applications/grid_generation/epipolar_grid_generation_app.py +4 -2
  29. cars/applications/grid_generation/grid_correction_app.py +4 -1
  30. cars/applications/grid_generation/grid_generation_algo.py +7 -2
  31. cars/applications/ground_truth_reprojection/abstract_ground_truth_reprojection_app.py +1 -1
  32. cars/applications/ground_truth_reprojection/direct_localization_app.py +2 -2
  33. cars/applications/ground_truth_reprojection/ground_truth_reprojection_algo.py +2 -1
  34. cars/applications/point_cloud_fusion/abstract_pc_fusion_app.py +0 -155
  35. cars/applications/point_cloud_fusion/mapping_to_terrain_tiles_app.py +0 -658
  36. cars/applications/point_cloud_fusion/pc_fusion_algo.py +0 -1339
  37. cars/applications/point_cloud_fusion/pc_fusion_wrappers.py +0 -869
  38. cars/applications/point_cloud_outlier_removal/abstract_outlier_removal_app.py +2 -1
  39. cars/applications/point_cloud_outlier_removal/outlier_removal_algo.py +9 -8
  40. cars/applications/point_cloud_outlier_removal/small_components_app.py +96 -267
  41. cars/applications/point_cloud_outlier_removal/statistical_app.py +116 -275
  42. cars/applications/rasterization/abstract_pc_rasterization_app.py +1 -1
  43. cars/applications/rasterization/rasterization_algo.py +18 -6
  44. cars/applications/rasterization/rasterization_wrappers.py +2 -1
  45. cars/applications/rasterization/simple_gaussian_app.py +60 -113
  46. cars/applications/resampling/abstract_resampling_app.py +1 -1
  47. cars/applications/resampling/bicubic_resampling_app.py +3 -1
  48. cars/applications/resampling/resampling_algo.py +16 -4
  49. cars/applications/resampling/resampling_wrappers.py +3 -1
  50. cars/applications/sparse_matching/abstract_sparse_matching_app.py +1 -1
  51. cars/applications/sparse_matching/sift_app.py +3 -3
  52. cars/applications/sparse_matching/sparse_matching_algo.py +3 -2
  53. cars/applications/sparse_matching/sparse_matching_wrappers.py +1 -1
  54. cars/applications/triangulation/abstract_triangulation_app.py +1 -1
  55. cars/applications/triangulation/line_of_sight_intersection_app.py +13 -11
  56. cars/applications/triangulation/pc_transform.py +552 -0
  57. cars/applications/triangulation/triangulation_algo.py +6 -4
  58. cars/applications/triangulation/triangulation_wrappers.py +1 -0
  59. cars/bundleadjustment.py +6 -6
  60. cars/cars.py +11 -9
  61. cars/core/cars_logging.py +80 -49
  62. cars/core/constants.py +0 -1
  63. cars/core/datasets.py +5 -2
  64. cars/core/geometry/abstract_geometry.py +256 -25
  65. cars/core/geometry/shareloc_geometry.py +110 -82
  66. cars/core/inputs.py +57 -19
  67. cars/core/outputs.py +1 -1
  68. cars/core/preprocessing.py +17 -3
  69. cars/core/projection.py +9 -6
  70. cars/core/tiling.py +10 -3
  71. cars/data_structures/cars_dataset.py +5 -5
  72. cars/data_structures/corresponding_tiles_tools.py +0 -103
  73. cars/data_structures/format_transformation.py +4 -1
  74. cars/devibrate.py +6 -3
  75. cars/extractroi.py +20 -21
  76. cars/orchestrator/cluster/abstract_cluster.py +15 -5
  77. cars/orchestrator/cluster/abstract_dask_cluster.py +6 -2
  78. cars/orchestrator/cluster/dask_jobqueue_utils.py +1 -1
  79. cars/orchestrator/cluster/log_wrapper.py +148 -21
  80. cars/orchestrator/cluster/mp_cluster/multiprocessing_cluster.py +11 -3
  81. cars/orchestrator/cluster/mp_cluster/multiprocessing_profiler.py +2 -2
  82. cars/orchestrator/cluster/pbs_dask_cluster.py +1 -1
  83. cars/orchestrator/cluster/sequential_cluster.py +5 -4
  84. cars/orchestrator/cluster/slurm_dask_cluster.py +1 -1
  85. cars/orchestrator/orchestrator.py +14 -3
  86. cars/orchestrator/registry/id_generator.py +1 -0
  87. cars/orchestrator/registry/saver_registry.py +2 -2
  88. cars/pipelines/conf_resolution/conf_final_resolution.json +5 -3
  89. cars/pipelines/default/default_pipeline.py +462 -1073
  90. cars/pipelines/parameters/advanced_parameters.py +74 -64
  91. cars/pipelines/parameters/advanced_parameters_constants.py +2 -5
  92. cars/pipelines/parameters/application_parameters.py +71 -0
  93. cars/pipelines/parameters/depth_map_inputs.py +0 -314
  94. cars/pipelines/parameters/dsm_inputs.py +40 -4
  95. cars/pipelines/parameters/output_parameters.py +2 -2
  96. cars/pipelines/parameters/sensor_inputs.py +30 -75
  97. cars/pipelines/parameters/sensor_inputs_constants.py +0 -2
  98. cars/pipelines/parameters/sensor_loaders/__init__.py +4 -3
  99. cars/pipelines/parameters/sensor_loaders/basic_classif_loader.py +106 -0
  100. cars/pipelines/parameters/sensor_loaders/{basic_sensor_loader.py → basic_image_loader.py} +16 -22
  101. cars/pipelines/parameters/sensor_loaders/pivot_classif_loader.py +121 -0
  102. cars/pipelines/parameters/sensor_loaders/{pivot_sensor_loader.py → pivot_image_loader.py} +10 -21
  103. cars/pipelines/parameters/sensor_loaders/sensor_loader.py +4 -6
  104. cars/pipelines/parameters/sensor_loaders/sensor_loader_template.py +1 -3
  105. cars/pipelines/pipeline_template.py +1 -3
  106. cars/pipelines/unit/unit_pipeline.py +527 -1016
  107. cars/starter.py +4 -3
  108. cars-1.0.0a4.dist-info/DELVEWHEEL +2 -0
  109. {cars-1.0.0a3.dist-info → cars-1.0.0a4.dist-info}/METADATA +135 -53
  110. {cars-1.0.0a3.dist-info → cars-1.0.0a4.dist-info}/RECORD +115 -131
  111. cars.libs/libgcc_s_seh-1-b2494fcbd4d80cf2c98fdd5261f6d850.dll +0 -0
  112. cars.libs/libstdc++-6-e9b0d12ae0e9555bbae55e8dfd08c3f7.dll +0 -0
  113. cars.libs/libwinpthread-1-7882d1b093714ccdfaf4e0789a817792.dll +0 -0
  114. cars/applications/dense_match_filling/cpp/__init__.py +0 -0
  115. cars/applications/dense_match_filling/cpp/dense_match_filling_cpp.cp313-win_amd64.dll.a +0 -0
  116. cars/applications/dense_match_filling/cpp/dense_match_filling_cpp.cp313-win_amd64.pyd +0 -0
  117. cars/applications/dense_match_filling/cpp/dense_match_filling_cpp.py +0 -72
  118. cars/applications/dense_match_filling/cpp/includes/dense_match_filling.hpp +0 -46
  119. cars/applications/dense_match_filling/cpp/meson.build +0 -9
  120. cars/applications/dense_match_filling/cpp/src/bindings.cpp +0 -11
  121. cars/applications/dense_match_filling/cpp/src/dense_match_filling.cpp +0 -142
  122. cars/applications/dense_match_filling/plane_app.py +0 -556
  123. cars/applications/hole_detection/__init__.py +0 -30
  124. cars/applications/hole_detection/abstract_hole_detection_app.py +0 -125
  125. cars/applications/hole_detection/cloud_to_bbox_app.py +0 -346
  126. cars/applications/hole_detection/hole_detection_algo.py +0 -144
  127. cars/applications/hole_detection/hole_detection_wrappers.py +0 -53
  128. cars/applications/point_cloud_denoising/__init__.py +0 -29
  129. cars/applications/point_cloud_denoising/abstract_pc_denoising_app.py +0 -273
  130. cars/applications/point_cloud_fusion/__init__.py +0 -30
  131. cars/applications/point_cloud_fusion/cloud_fusion_constants.py +0 -39
  132. cars/applications/sparse_matching/pandora_sparse_matching_app.py +0 -0
  133. cars/pipelines/parameters/depth_map_inputs_constants.py +0 -25
  134. cars-1.0.0a3.dist-info/DELVEWHEEL +0 -2
  135. cars.libs/libgcc_s_seh-1-ca70890bbc5723b6d0ea31e9c9cded2b.dll +0 -0
  136. cars.libs/libstdc++-6-00ee19f73d5122a1277c137b1c218401.dll +0 -0
  137. cars.libs/libwinpthread-1-f5042e8e3d21edce20c1bc99445f551b.dll +0 -0
  138. {cars-1.0.0a3.dist-info → cars-1.0.0a4.dist-info}/WHEEL +0 -0
  139. {cars-1.0.0a3.dist-info → cars-1.0.0a4.dist-info}/entry_points.txt +0 -0
@@ -1,1339 +0,0 @@
1
- #!/usr/bin/env python
2
- # coding: utf8
3
- #
4
- # Copyright (c) 2020 Centre National d'Etudes Spatiales (CNES).
5
- #
6
- # This file is part of CARS
7
- # (see https://github.com/CNES/cars).
8
- #
9
- # Licensed under the Apache License, Version 2.0 (the "License");
10
- # you may not use this file except in compliance with the License.
11
- # You may obtain a copy of the License at
12
- #
13
- # http://www.apache.org/licenses/LICENSE-2.0
14
- #
15
- # Unless required by applicable law or agreed to in writing, software
16
- # distributed under the License is distributed on an "AS IS" BASIS,
17
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
18
- # See the License for the specific language governing permissions and
19
- # limitations under the License.
20
- #
21
- # pylint: disable=too-many-lines
22
- """
23
- This module is responsible for the transition between triangulation and
24
- rasterization steps
25
- """
26
-
27
- # pylint: disable=C0302
28
-
29
- # Standard imports
30
- import logging
31
- from typing import List, Tuple
32
-
33
- # Third party imports
34
- import numpy as np
35
- import pandas
36
- import rasterio as rio
37
- import xarray as xr
38
-
39
- import cars.orchestrator.orchestrator as ocht
40
- from cars.applications.dense_matching import dense_matching_wrappers
41
- from cars.applications.point_cloud_fusion import pc_fusion_wrappers as pc_wrap
42
-
43
- # CARS imports
44
- from cars.core import constants as cst
45
- from cars.core import inputs, preprocessing, projection, tiling
46
- from cars.data_structures import cars_dataset, cars_dict
47
-
48
-
49
- def create_combined_cloud( # noqa: C901
50
- cloud_list: List[xr.Dataset] or List[pandas.DataFrame],
51
- cloud_ids: List[int],
52
- dsm_epsg: int,
53
- xmin: float = None,
54
- xmax: float = None,
55
- ymin: int = None,
56
- ymax: int = None,
57
- margin: float = 0,
58
- with_coords: bool = False,
59
- ) -> Tuple[pandas.DataFrame, int]:
60
- """
61
- Combine a list of clouds from sparse or dense matching
62
- into a pandas dataframe.
63
- The detailed cases for each cloud type are in the derived function
64
- create_combined_sparse_cloud and create_combined_dense_cloud.
65
-
66
- :param cloud_list: list of every point cloud to merge
67
- :param cloud_ids: list of global identificators of clouds in cloud_list
68
- :param dsm_epsg: epsg code for the CRS of the final output raster
69
- :param xmin: xmin of the rasterization grid
70
- (if None, the whole clouds are combined)
71
- :param xmax: xmax of the rasterization grid
72
- (if None, the whole clouds are combined)
73
- :param ymin: ymin of the rasterization grid
74
- (if None, the whole clouds are combined)
75
- :param ymax: ymax of the rasterization grid
76
- (if None, the whole clouds are combined)
77
- :param margin: Margin added for each tile, in meter or degree.
78
- (default value: 0)
79
- :param with_coords: Option enabling the adding to the combined cloud
80
- of information of each point to retrieve their positions
81
- in the original epipolar images
82
- :return: Tuple formed with the combined clouds and color
83
- in a single pandas dataframe and the epsg code
84
- """
85
- if isinstance(cloud_list[0], xr.Dataset):
86
- return create_combined_dense_cloud(
87
- cloud_list,
88
- cloud_ids,
89
- dsm_epsg,
90
- xmin,
91
- xmax,
92
- ymin,
93
- ymax,
94
- margin,
95
- with_coords,
96
- )
97
- # case of pandas.DataFrame cloud
98
- return create_combined_sparse_cloud(
99
- cloud_list,
100
- cloud_ids,
101
- dsm_epsg,
102
- xmin,
103
- xmax,
104
- ymin,
105
- ymax,
106
- margin,
107
- with_coords,
108
- )
109
-
110
-
111
- def create_combined_sparse_cloud( # noqa: C901
112
- cloud_list: List[pandas.DataFrame],
113
- cloud_ids: List[int],
114
- dsm_epsg: int,
115
- xmin: float = None,
116
- xmax: float = None,
117
- ymin: int = None,
118
- ymax: int = None,
119
- margin: float = 0,
120
- with_coords: bool = False,
121
- ) -> Tuple[pandas.DataFrame, int]:
122
- """
123
- Combine a list of clouds (and their colors) into a pandas dataframe
124
- structured with the following labels:
125
-
126
- - if no mask data present in cloud_list datasets:
127
- labels=[ cst.X, cst.Y, cst.Z] \
128
- The combined cloud has x, y, z columns
129
-
130
- - if mask data present in cloud_list datasets:
131
- labels=[cst.X, cst.Y, cst.Z, cst.POINT_CLOUD_MSK]\
132
- The mask values are added to the dataframe.
133
-
134
- :param dsm_epsg: epsg code for the CRS of the final output raster
135
- :param xmin: xmin of the rasterization grid
136
- (if None, the whole clouds are combined)
137
- :param xmax: xmax of the rasterization grid
138
- (if None, the whole clouds are combined)
139
- :param ymin: ymin of the rasterization grid
140
- (if None, the whole clouds are combined)
141
- :param ymax: ymax of the rasterization grid
142
- (if None, the whole clouds are combined)
143
- :param margin: Margin added for each tile, in meter or degree.
144
- (default value: 0)
145
- :param with_coords: Option enabling the adding to the combined cloud
146
- of information of each point to retrieve their positions
147
- in the original epipolar images
148
- :return: Tuple formed with the combined clouds and color
149
- in a single pandas dataframe and the epsg code
150
- """
151
-
152
- epsg = pc_wrap.get_epsg(cloud_list)
153
-
154
- # compute margin/roi and final number of data to add to the combined cloud
155
- roi = (
156
- xmin is not None
157
- and xmax is not None
158
- and ymin is not None
159
- and ymax is not None
160
- )
161
-
162
- cloud_indexes_with_types = pc_wrap.create_point_cloud_index(cloud_list[0])
163
-
164
- if with_coords:
165
- cloud_indexes_with_types.update(
166
- {cst.POINT_CLOUD_COORD_EPI_GEOM_I: "uint16"}
167
- )
168
-
169
- cloud_indexes = list(cloud_indexes_with_types.keys())
170
-
171
- # iterate through input clouds
172
- combined_cloud = np.zeros((0, len(cloud_indexes)))
173
- nb_points = 0
174
- for cloud_global_id, point_cloud in zip( # noqa: B905
175
- cloud_ids, cloud_list
176
- ):
177
- full_x = point_cloud[cst.X]
178
- full_y = point_cloud[cst.Y]
179
- full_z = point_cloud[cst.Z]
180
-
181
- # get mask of points inside the roi (plus margins)
182
- if roi:
183
- # Compute terrain tile bounds
184
- # if the point clouds are not in the same referential as the roi,
185
- # it is converted using the dsm_epsg
186
- (
187
- terrain_tile_data_msk,
188
- terrain_tile_data_msk_pos,
189
- ) = pc_wrap.compute_terrain_msk(
190
- dsm_epsg,
191
- xmin,
192
- xmax,
193
- ymin,
194
- ymax,
195
- margin,
196
- epsg,
197
- point_cloud,
198
- full_x,
199
- full_y,
200
- )
201
-
202
- # if the point clouds are not in the same referential as the roi,
203
- # retrieve the initial values
204
- if epsg != dsm_epsg:
205
- full_x = point_cloud[cst.X]
206
- full_y = point_cloud[cst.Y]
207
-
208
- # if no point is found, continue
209
- if terrain_tile_data_msk_pos[0].shape[0] == 0:
210
- continue
211
- # get useful data bounding box
212
- bbox = [
213
- np.min(terrain_tile_data_msk_pos),
214
- np.max(terrain_tile_data_msk_pos),
215
- ]
216
-
217
- else:
218
- bbox = [0, full_y.shape[0] - 1]
219
-
220
- # add (x, y, z) information to the current cloud
221
- crop_x = full_x[bbox[0] : bbox[1] + 1]
222
- crop_y = full_y[bbox[0] : bbox[1] + 1]
223
- crop_z = full_z[bbox[0] : bbox[1] + 1]
224
-
225
- crop_cloud = np.zeros((len(cloud_indexes), (bbox[1] - bbox[0] + 1)))
226
- crop_cloud[cloud_indexes.index(cst.X), :] = crop_x
227
- crop_cloud[cloud_indexes.index(cst.Y), :] = crop_y
228
- crop_cloud[cloud_indexes.index(cst.Z), :] = crop_z
229
-
230
- # add index of original point cloud
231
- crop_cloud[cloud_indexes.index(cst.POINT_CLOUD_GLOBAL_ID), :] = (
232
- cloud_global_id
233
- )
234
-
235
- # add the original image coordinates information to the current cloud
236
- if with_coords:
237
- coords_line = np.linspace(
238
- bbox[0], bbox[1], num=bbox[1] - bbox[0] + 1
239
- )
240
- crop_cloud[
241
- cloud_indexes.index(cst.POINT_CLOUD_COORD_EPI_GEOM_I), :
242
- ] = coords_line
243
-
244
- # Transpose point cloud
245
- crop_cloud = crop_cloud.transpose()
246
-
247
- # remove masked data (pandora + out of the terrain tile points)
248
- crop_terrain_tile_data_msk = (
249
- point_cloud[cst.POINT_CLOUD_CORR_MSK][bbox[0] : bbox[1]] == 255
250
- )
251
- if roi:
252
- crop_terrain_tile_data_msk = np.logical_and(
253
- crop_terrain_tile_data_msk,
254
- terrain_tile_data_msk[bbox[0] : bbox[1]],
255
- )
256
-
257
- crop_cloud = pc_wrap.filter_cloud_with_mask(
258
- nb_points, crop_cloud, crop_terrain_tile_data_msk
259
- )
260
-
261
- # add current cloud to the combined one
262
- combined_cloud = np.concatenate([combined_cloud, crop_cloud], axis=0)
263
-
264
- logging.debug("Received {} points to rasterize".format(nb_points))
265
- logging.debug(
266
- "Keeping {}/{} points "
267
- "inside rasterization grid".format(combined_cloud.shape[0], nb_points)
268
- )
269
-
270
- pd_cloud = pandas.DataFrame(combined_cloud, columns=cloud_indexes)
271
-
272
- return pd_cloud, epsg
273
-
274
-
275
- def create_combined_dense_cloud( # noqa: C901
276
- cloud_list: List[xr.Dataset],
277
- cloud_id: List[int],
278
- dsm_epsg: int,
279
- xmin: float = None,
280
- xmax: float = None,
281
- ymin: int = None,
282
- ymax: int = None,
283
- margin: float = 0,
284
- with_coords: bool = False,
285
- ) -> Tuple[pandas.DataFrame, int]:
286
- """
287
- Combine a list of clouds (and their colors) into a pandas dataframe
288
- structured with the following labels:
289
-
290
- - if no colors in input and no mask data present in cloud_list datasets:
291
- labels=[cst.X, cst.Y, cst.Z] \
292
- The combined cloud has x, y, z columns
293
-
294
- - if no colors in input and mask data present in cloud_list datasets:
295
- labels=[cst.X, cst.Y, cst.Z, cst.POINT_CLOUD_MSK]\
296
- The mask values are added to the dataframe.
297
-
298
- - if colors are set in input and mask data are present \
299
- in the cloud_list datasets:
300
- labels=[cst.X, cst.Y, cst.Z, cst.POINT_CLOUD_MSK,\
301
- cst.POINT_CLOUD_CLR_KEY_ROOT+"0",\
302
- cst.POINT_CLOUD_CLR_KEY_ROOT+"1",\
303
- cst.POINT_CLOUD_CLR_KEY_ROOT+"2"]\
304
- Color channels information are added to the dataframe.
305
-
306
- - if colors in input, mask data present in the cloud_list datasets and\
307
- the with_coords option is activated:
308
- labels=[cst.X, cst.Y, cst.Z, cst.POINT_CLOUD_MSK,\
309
- cst.POINT_CLOUD_CLR_KEY_ROOT+"0",\
310
- cst.POINT_CLOUD_CLR_KEY_ROOT+"1",\
311
- cst.POINT_CLOUD_CLR_KEY_ROOT+"2"\
312
- cst.POINT_CLOUD_COORD_EPI_GEOM_I,\
313
- cst.POINT_CLOUD_COORD_EPI_GEOM_J,\
314
- cst.POINT_CLOUD_ID_IM_EPI]\
315
- The pixel position of the xyz point in the original epipolar\
316
- image (coord_epi_geom_i, coord_epi_geom_j) are added\
317
- to the dataframe along with the index of its original cloud\
318
- in the cloud_list input.
319
- - if confidence intervals on Z in input, then\
320
- [cst.Z_INF, cst.Z_SUP] are also added to the labels
321
-
322
-
323
- :param dsm_epsg: epsg code for the CRS of the final output raster
324
- :param xmin: xmin of the rasterization grid
325
- (if None, the whole clouds are combined)
326
- :param xmax: xmax of the rasterization grid
327
- (if None, the whole clouds are combined)
328
- :param ymin: ymin of the rasterization grid
329
- (if None, the whole clouds are combined)
330
- :param ymax: ymax of the rasterization grid
331
- (if None, the whole clouds are combined)
332
- :param margin: Margin added for each tile, in meter or degree.
333
- (default value: 0)
334
- :param with_coords: Option enabling the adding to the combined cloud
335
- of information of each point to retrieve their positions
336
- in the original epipolar images
337
- :return: Tuple formed with the combined clouds and color
338
- in a single pandas dataframe and the epsg code
339
- """
340
- epsg = pc_wrap.get_epsg(cloud_list)
341
-
342
- # Compute margin/roi and final number of data to add to the combined cloud
343
- roi = (
344
- xmin is not None
345
- and xmax is not None
346
- and ymin is not None
347
- and ymax is not None
348
- )
349
-
350
- # Create point cloud index
351
- cloud_indexes_with_types = pc_wrap.create_point_cloud_index(cloud_list[0])
352
-
353
- # Add coords
354
- if with_coords:
355
- cloud_indexes_with_types.update(
356
- {
357
- cst.POINT_CLOUD_COORD_EPI_GEOM_I: "uint16",
358
- cst.POINT_CLOUD_COORD_EPI_GEOM_J: "uint16",
359
- cst.POINT_CLOUD_ID_IM_EPI: "uint16",
360
- }
361
- )
362
-
363
- cloud_indexes = list(cloud_indexes_with_types.keys())
364
-
365
- # Iterate through input clouds
366
- combined_cloud = np.zeros((0, len(cloud_indexes)))
367
- nb_points = 0
368
- for cloud_global_id, (cloud_list_id, point_cloud) in zip( # noqa: B905
369
- cloud_id, enumerate(cloud_list)
370
- ):
371
- # crop point cloud if is not created from tif depth maps
372
- if (
373
- cst.EPI_MARGINS in point_cloud.attrs
374
- and cst.ROI in point_cloud.attrs
375
- ):
376
- ref_roi, _, _ = dense_matching_wrappers.compute_cropped_roi(
377
- point_cloud.attrs[cst.EPI_MARGINS],
378
- 0,
379
- point_cloud.attrs[cst.ROI],
380
- point_cloud.sizes[cst.ROW],
381
- point_cloud.sizes[cst.COL],
382
- )
383
- point_cloud = point_cloud.isel(
384
- row=slice(ref_roi[1], ref_roi[3]),
385
- col=slice(ref_roi[0], ref_roi[2]),
386
- )
387
-
388
- full_x = point_cloud[cst.X].values
389
- full_y = point_cloud[cst.Y].values
390
- full_z = point_cloud[cst.Z].values
391
-
392
- # get mask of points inside the roi (plus margins)
393
- if roi:
394
- # Compute terrain tile bounds
395
- # if the point clouds are not in the same referential as the roi,
396
- # it is converted using the dsm_epsg
397
- (
398
- terrain_tile_data_msk,
399
- terrain_tile_data_msk_pos,
400
- ) = pc_wrap.compute_terrain_msk(
401
- dsm_epsg,
402
- xmin,
403
- xmax,
404
- ymin,
405
- ymax,
406
- margin,
407
- epsg,
408
- point_cloud,
409
- full_x,
410
- full_y,
411
- )
412
-
413
- # if the point clouds are not in the same referential as the roi,
414
- # retrieve the initial values
415
- if epsg != dsm_epsg:
416
- full_x = point_cloud[cst.X].values
417
- full_y = point_cloud[cst.Y].values
418
-
419
- # if no point is found, continue
420
- if terrain_tile_data_msk_pos[0].shape[0] == 0:
421
- continue
422
-
423
- # get useful data bounding box
424
- bbox = [
425
- np.min(terrain_tile_data_msk_pos[0]),
426
- np.min(terrain_tile_data_msk_pos[1]),
427
- np.max(terrain_tile_data_msk_pos[0]),
428
- np.max(terrain_tile_data_msk_pos[1]),
429
- ]
430
- else:
431
- bbox = [0, 0, full_y.shape[0] - 1, full_y.shape[1] - 1]
432
-
433
- # add (x, y, z) information to the current cloud
434
- crop_x = full_x[bbox[0] : bbox[2] + 1, bbox[1] : bbox[3] + 1]
435
- crop_y = full_y[bbox[0] : bbox[2] + 1, bbox[1] : bbox[3] + 1]
436
- crop_z = full_z[bbox[0] : bbox[2] + 1, bbox[1] : bbox[3] + 1]
437
-
438
- flatten_cloud = np.zeros(
439
- (
440
- len(cloud_indexes),
441
- (bbox[2] - bbox[0] + 1) * (bbox[3] - bbox[1] + 1),
442
- )
443
- )
444
- flatten_cloud[cloud_indexes.index(cst.X), :] = np.ravel(crop_x)
445
- flatten_cloud[cloud_indexes.index(cst.Y), :] = np.ravel(crop_y)
446
- flatten_cloud[cloud_indexes.index(cst.Z), :] = np.ravel(crop_z)
447
-
448
- # add index of original point cloud
449
- flatten_cloud[cloud_indexes.index(cst.POINT_CLOUD_GLOBAL_ID), :] = (
450
- cloud_global_id
451
- )
452
-
453
- # add additional information to point cloud
454
- arrays_to_add_to_point_cloud = [
455
- (cst.EPI_TEXTURE, cst.POINT_CLOUD_CLR_KEY_ROOT),
456
- (cst.EPI_MSK, cst.POINT_CLOUD_MSK),
457
- (cst.EPI_CLASSIFICATION, cst.POINT_CLOUD_CLASSIF_KEY_ROOT),
458
- (cst.EPI_FILLING, cst.POINT_CLOUD_FILLING_KEY_ROOT),
459
- ]
460
-
461
- # Add layer inf and sup
462
- for array_name in point_cloud:
463
- if cst.POINT_CLOUD_LAYER_SUP_OR_INF_ROOT in array_name:
464
- arrays_to_add_to_point_cloud.append((array_name, array_name))
465
-
466
- # add performance map
467
- for array_name in point_cloud:
468
- if cst.POINT_CLOUD_PERFORMANCE_MAP_ROOT in array_name:
469
- arrays_to_add_to_point_cloud.append((array_name, array_name))
470
-
471
- # add ambiguity layer, drop confidence_* layers
472
- for array_name in point_cloud:
473
- if (
474
- cst.EPI_AMBIGUITY in array_name
475
- and cst.EPI_CONFIDENCE_KEY_ROOT not in array_name
476
- ):
477
- arrays_to_add_to_point_cloud.append((array_name, array_name))
478
-
479
- # add denoising info layers
480
- for array_name in point_cloud:
481
- if cst.EPI_DENOISING_INFO_KEY_ROOT in array_name:
482
- arrays_to_add_to_point_cloud.append((array_name, array_name))
483
-
484
- for input_band, output_column in arrays_to_add_to_point_cloud:
485
- pc_wrap.add_information_to_cloud(
486
- point_cloud,
487
- cloud_indexes,
488
- bbox,
489
- flatten_cloud,
490
- input_band,
491
- output_column,
492
- )
493
-
494
- # add the original image coordinates information to the current cloud
495
- if with_coords:
496
- coords_line = np.linspace(bbox[0], bbox[2], bbox[2] - bbox[0] + 1)
497
- coords_col = np.linspace(bbox[1], bbox[3], bbox[3] - bbox[1] + 1)
498
- coords_col, coords_line = np.meshgrid(coords_col, coords_line)
499
-
500
- flatten_cloud[
501
- cloud_indexes.index(cst.POINT_CLOUD_COORD_EPI_GEOM_I), :
502
- ] = np.ravel(coords_line)
503
- flatten_cloud[
504
- cloud_indexes.index(cst.POINT_CLOUD_COORD_EPI_GEOM_J), :
505
- ] = np.ravel(coords_col)
506
- flatten_cloud[cloud_indexes.index(cst.POINT_CLOUD_ID_IM_EPI), :] = (
507
- cloud_list_id
508
- )
509
-
510
- # Transpose point cloud
511
- flatten_cloud = flatten_cloud.transpose()
512
-
513
- # remove masked data (pandora + out of the terrain tile points)
514
- crop_terrain_tile_data_msk = (
515
- point_cloud[cst.POINT_CLOUD_CORR_MSK].values[
516
- bbox[0] : bbox[2] + 1, bbox[1] : bbox[3] + 1
517
- ]
518
- == 255
519
- )
520
-
521
- if roi:
522
- crop_terrain_tile_data_msk = np.logical_and(
523
- crop_terrain_tile_data_msk,
524
- terrain_tile_data_msk[
525
- bbox[0] : bbox[2] + 1, bbox[1] : bbox[3] + 1
526
- ],
527
- )
528
-
529
- flatten_cloud = pc_wrap.filter_cloud_with_mask(
530
- nb_points, flatten_cloud, crop_terrain_tile_data_msk
531
- )
532
-
533
- # Remove points with nan values on X, Y or Z
534
- xyz_indexes = np.array(
535
- [
536
- cloud_indexes.index(cst.X),
537
- cloud_indexes.index(cst.Y),
538
- cloud_indexes.index(cst.Z),
539
- ]
540
- )
541
- flatten_cloud = flatten_cloud[
542
- ~np.any(np.isnan(flatten_cloud[:, xyz_indexes]), axis=1)
543
- ]
544
-
545
- # Add current cloud to the combined one
546
- combined_cloud = np.concatenate([combined_cloud, flatten_cloud], axis=0)
547
-
548
- logging.debug("Received {} points to rasterize".format(nb_points))
549
- logging.debug(
550
- "Keeping {}/{} points "
551
- "inside rasterization grid".format(combined_cloud.shape[0], nb_points)
552
- )
553
-
554
- pd_cloud = pandas.DataFrame(combined_cloud, columns=cloud_indexes)
555
- pd_cloud = pd_cloud.astype(cloud_indexes_with_types)
556
-
557
- return pd_cloud, epsg
558
-
559
-
560
- def create_combined_cloud_from_tif(
561
- clouds,
562
- clouds_id,
563
- epsg,
564
- xmin=None,
565
- xmax=None,
566
- ymin=None,
567
- ymax=None,
568
- margin=0,
569
- ):
570
- """
571
- Create combined cloud from tif point clouds
572
-
573
- :param clouds: list of clouds
574
- :type clouds: list(dict)
575
- :param clouds_id: list of global identificators associated to clouds
576
- :type clouds_id: list(str)
577
- :param epsg: epsg to convert point clouds to
578
- :type epsg: int or str
579
- :param xmin: min x coordinate
580
- :type xmin: float
581
- :param xmax: max x coordinate
582
- :type xmax: float
583
- :param ymin: min y coordinate
584
- :type ymin: float
585
- :param ymax: max y coordinate
586
- :type ymax: float
587
-
588
- :return: combined cloud, point cloud epsg
589
- :rtype: pandas Dataframe, int
590
- """
591
- clouds_pd_list = []
592
- color_types = []
593
- for cloud in clouds:
594
- for band_name in cloud["data"].keys():
595
- band_path = cloud["data"][band_name]
596
- # Create multiple pc pandas dataframes
597
- for cloud_file_id, cloud in zip(clouds_id, clouds): # noqa: B905
598
- window = cloud["window"]
599
- cloud_epsg = cloud["cloud_epsg"]
600
- cloud_data_bands = []
601
- cloud_data_types = []
602
- cloud_data = {}
603
- for band_name in cloud["data"].keys():
604
- # open file and get data
605
- band_path = cloud["data"][band_name]
606
-
607
- if band_path is not None:
608
- if cst.POINT_CLOUD_CLR_KEY_ROOT in band_name:
609
- # Get color type
610
- color_types.append(
611
- inputs.rasterio_get_image_type(band_path)
612
- )
613
-
614
- if isinstance(band_path, dict):
615
- for key in band_path:
616
- sub_band_path = band_path[key]
617
- sub_band_name = key
618
- pc_wrap.read_band(
619
- sub_band_name,
620
- sub_band_path,
621
- window,
622
- cloud_data_bands,
623
- cloud_data_types,
624
- cloud_data,
625
- )
626
- else:
627
- pc_wrap.read_band(
628
- band_name,
629
- band_path,
630
- window,
631
- cloud_data_bands,
632
- cloud_data_types,
633
- cloud_data,
634
- )
635
-
636
- # add source file id
637
- cloud_data[cst.POINT_CLOUD_GLOBAL_ID] = (
638
- np.ones(cloud_data[cst.X].shape) * cloud_file_id
639
- )
640
- cloud_data_bands.append(cst.POINT_CLOUD_GLOBAL_ID)
641
- cloud_data_types.append("uint16")
642
-
643
- # Create cloud pandas
644
- cloud_pd = pandas.DataFrame(cloud_data, columns=cloud_data_bands)
645
-
646
- # Post processing if 0 in data
647
- cloud_pd = cloud_pd.drop(
648
- cloud_pd.index[
649
- (cloud_pd[cst.X] == 0.0) # pylint: disable=E1136
650
- | (cloud_pd[cst.Y] == 0.0) # pylint: disable=E1136
651
- ]
652
- )
653
-
654
- cloud_pd = cloud_pd.drop(
655
- cloud_pd.index[
656
- (np.isnan(cloud_pd[cst.X])) # pylint: disable=E1136
657
- | (np.isnan(cloud_pd[cst.Y])) # pylint: disable=E1136
658
- ]
659
- )
660
-
661
- # Cast types according to band
662
- cloud_data_types = dict(
663
- zip(cloud_data_bands, cloud_data_types) # noqa: B905
664
- )
665
- cloud_pd = cloud_pd.astype(cloud_data_types)
666
-
667
- # Convert pc if necessary
668
- if cloud_epsg != epsg:
669
- projection.point_cloud_conversion_dataframe(
670
- cloud_pd, cloud_epsg, epsg
671
- )
672
-
673
- # filter outside points considering mmargins
674
- pc_wrap.filter_cloud_tif(
675
- cloud_pd,
676
- list(
677
- np.array([xmin, xmax, ymin, ymax])
678
- + np.array([-margin, margin, -margin, margin])
679
- ),
680
- )
681
-
682
- # add to list of pandas pc
683
- clouds_pd_list.append(cloud_pd)
684
-
685
- # Merge pandas point clouds
686
- combined_pd_cloud = pandas.concat(
687
- clouds_pd_list,
688
- axis=0,
689
- join="outer",
690
- )
691
-
692
- # Get color type
693
- color_type_set = set(color_types)
694
- if len(color_type_set) > 1:
695
- logging.warning("The tiles colors don't have the same type.")
696
- color_type = None
697
- if len(color_types) > 0:
698
- color_type = color_types[0]
699
-
700
- return combined_pd_cloud, epsg, color_type
701
-
702
-
703
- def generate_point_clouds(list_clouds, orchestrator, tile_size=1000):
704
- """
705
- Generate point cloud cars Datasets from list
706
-
707
- :param list_clouds: list of clouds
708
- :type list_clouds: dict
709
- :param orchestrator: orchestrator
710
- :type orchestrator: Orchestrator
711
- :param tile_size: tile size
712
- :type tile_size: int
713
-
714
- :return list of point clouds
715
- :rtype: list(CarsDataset)
716
- """
717
- list_epipolar_point_clouds = []
718
-
719
- # Create cars datasets
720
-
721
- list_names = list(list_clouds.keys())
722
-
723
- for cloud_id, key in enumerate(list_clouds):
724
- cloud = list_clouds[key]
725
- cars_ds = cars_dataset.CarsDataset(dataset_type="arrays")
726
-
727
- epipolar_size_x, epipolar_size_y = inputs.rasterio_get_size(cloud["x"])
728
-
729
- # Generate tiling grid
730
- cars_ds.tiling_grid = tiling.generate_tiling_grid(
731
- 0,
732
- 0,
733
- epipolar_size_y,
734
- epipolar_size_x,
735
- tile_size,
736
- tile_size,
737
- )
738
-
739
- color_type = None
740
- if cst.POINT_CLOUD_CLR_KEY_ROOT in cloud:
741
- # Get color type
742
- color_type = inputs.rasterio_get_image_type(
743
- cloud[cst.POINT_CLOUD_CLR_KEY_ROOT]
744
- )
745
- cars_ds.attributes = {
746
- "color_type": color_type,
747
- "source_pc_names": list_names,
748
- }
749
-
750
- for col in range(cars_ds.shape[1]):
751
- for row in range(cars_ds.shape[0]):
752
- # get window
753
- window = cars_ds.get_window_as_dict(row, col)
754
- rio_window = cars_dataset.generate_rasterio_window(window)
755
-
756
- # Generate tile
757
- cars_ds[row, col] = orchestrator.cluster.create_task(
758
- generate_pc_wrapper, nout=1
759
- )(
760
- cloud,
761
- rio_window,
762
- color_type=color_type,
763
- cloud_id=cloud_id,
764
- list_cloud_ids=list_names,
765
- )
766
-
767
- list_epipolar_point_clouds.append(cars_ds)
768
-
769
- return list_epipolar_point_clouds
770
-
771
-
772
- def generate_pc_wrapper( # noqa: C901
773
- cloud, window, color_type=None, cloud_id=None, list_cloud_ids=None
774
- ):
775
- """
776
- Generate point cloud dataset
777
-
778
- :param cloud: cloud dict
779
- :param window: window
780
- :param color_type: color type
781
- :param cloud_id: cloud id
782
- :param list_cloud_ids: list of global cloud ids
783
-
784
- :return cloud
785
- :rtype: xr.Dataset
786
- """
787
-
788
- list_keys = cloud.keys()
789
- # x y z
790
- data_x = pc_wrap.read_image_full(cloud["x"], window=window, squeeze=True)
791
- data_y = pc_wrap.read_image_full(cloud["y"], window=window, squeeze=True)
792
- data_z = pc_wrap.read_image_full(cloud["z"], window=window, squeeze=True)
793
-
794
- shape = data_x.shape
795
-
796
- row = np.arange(0, shape[0])
797
- col = np.arange(0, shape[1])
798
-
799
- values = {
800
- cst.X: ([cst.ROW, cst.COL], data_x), # longitudes
801
- cst.Y: ([cst.ROW, cst.COL], data_y), # latitudes
802
- cst.Z: ([cst.ROW, cst.COL], data_z),
803
- }
804
-
805
- coords = {cst.ROW: row, cst.COL: col}
806
-
807
- attributes = {"cloud_id": cloud_id, "number_of_pc": len(list_cloud_ids)}
808
-
809
- for key in list_keys:
810
- if cloud[key] is None and key != "mask":
811
- pass
812
- elif key in ["x", "y", "z"]:
813
- pass
814
- elif key == cst.POINT_CLOUD_LAYER_INF:
815
- data_z_inf = pc_wrap.read_image_full(
816
- cloud[cst.POINT_CLOUD_LAYER_INF], window=window, squeeze=True
817
- )
818
- values[cst.POINT_CLOUD_LAYER_INF] = ([cst.ROW, cst.COL], data_z_inf)
819
- elif key == cst.POINT_CLOUD_LAYER_SUP:
820
- data_z_sup = pc_wrap.read_image_full(
821
- cloud[cst.POINT_CLOUD_LAYER_SUP], window=window, squeeze=True
822
- )
823
- values[cst.POINT_CLOUD_LAYER_SUP] = ([cst.ROW, cst.COL], data_z_sup)
824
- elif key == "point_cloud_epsg":
825
- attributes["epsg"] = cloud[key]
826
- elif key == "mask":
827
- if cloud[key] is None:
828
- data = ~np.isnan(data_x) * 255
829
- else:
830
- data = pc_wrap.read_image_full(
831
- cloud[key], window=window, squeeze=True
832
- )
833
- values[cst.POINT_CLOUD_CORR_MSK] = ([cst.ROW, cst.COL], data)
834
-
835
- elif key == cst.EPI_CLASSIFICATION:
836
- data = pc_wrap.read_image_full(
837
- cloud[key], window=window, squeeze=False
838
- )
839
- descriptions = list(inputs.get_descriptions_bands(cloud[key]))
840
- values[cst.EPI_CLASSIFICATION] = (
841
- [cst.BAND_CLASSIF, cst.ROW, cst.COL],
842
- data,
843
- )
844
- if cst.BAND_CLASSIF not in coords:
845
- coords[cst.BAND_CLASSIF] = descriptions
846
-
847
- elif key == cst.EPI_TEXTURE:
848
- data = pc_wrap.read_image_full(
849
- cloud[key], window=window, squeeze=False
850
- )
851
- descriptions = list(inputs.get_descriptions_bands(cloud[key]))
852
- attributes["color_type"] = color_type
853
- values[cst.EPI_TEXTURE] = ([cst.BAND_IM, cst.ROW, cst.COL], data)
854
-
855
- if cst.EPI_TEXTURE not in coords:
856
- coords[cst.BAND_IM] = descriptions
857
-
858
- elif key == cst.EPI_AMBIGUITY:
859
- data = pc_wrap.read_image_full(
860
- cloud[key], window=window, squeeze=True
861
- )
862
- descriptions = list(inputs.get_descriptions_bands(cloud[key]))
863
- values[cst.EPI_AMBIGUITY] = (
864
- [cst.ROW, cst.COL],
865
- data,
866
- )
867
-
868
- elif key == cst.EPI_FILLING:
869
- data = pc_wrap.read_image_full(
870
- cloud[key], window=window, squeeze=False
871
- )
872
- descriptions = list(inputs.get_descriptions_bands(cloud[key]))
873
- values[cst.EPI_FILLING] = (
874
- [cst.BAND_FILLING, cst.ROW, cst.COL],
875
- data,
876
- )
877
- if cst.BAND_FILLING not in coords:
878
- coords[cst.BAND_FILLING] = descriptions
879
-
880
- elif key == cst.EPI_PERFORMANCE_MAP:
881
- data = pc_wrap.read_image_full(
882
- cloud[key], window=window, squeeze=True
883
- )
884
- descriptions = list(inputs.get_descriptions_bands(cloud[key]))
885
- values[cst.EPI_PERFORMANCE_MAP] = (
886
- [cst.ROW, cst.COL],
887
- data,
888
- )
889
- if cst.BAND_PERFORMANCE_MAP not in coords:
890
- coords[cst.BAND_PERFORMANCE_MAP] = descriptions
891
-
892
- else:
893
- data = pc_wrap.read_image_full(
894
- cloud[key], window=window, squeeze=True
895
- )
896
- if data.shape == 2:
897
- values[key] = ([cst.ROW, cst.COL], data)
898
- else:
899
- logging.error(" {} data not managed".format(key))
900
-
901
- xr_cloud = xr.Dataset(values, coords=coords)
902
- xr_cloud.attrs = attributes
903
-
904
- return xr_cloud
905
-
906
-
907
- def transform_input_pc(
908
- list_epipolar_point_clouds,
909
- epsg,
910
- roi_poly=None,
911
- epipolar_tile_size=600,
912
- orchestrator=None,
913
- ):
914
- """
915
- Transform point clouds from inputs into point cloud fusion application
916
- format.
917
- Create tiles, with x y min max informations.
918
-
919
- :param list_epipolar_point_clouds: list of epipolar point clouds
920
- :type list_epipolar_point_clouds: dict
921
- :param epsg: epsg
922
- :type epsg: int, str
923
- :param roi_poly: roi polygon
924
- :type roi_poly: Polygon
925
- :param epipolar_tile_size: size of tile used for tiling the tif files
926
- :type epipolar_tile_size: int
927
-
928
- :return list of point clouds
929
- :rtype: list(CarsDataset type dict)
930
-
931
- """
932
-
933
- if orchestrator is None:
934
- # Create default sequential orchestrator for current application
935
- # be awere, no out_json will be shared between orchestrators
936
- # No files saved
937
- cars_orchestrator = ocht.Orchestrator(
938
- orchestrator_conf={"mode": "sequential"}
939
- )
940
- else:
941
- cars_orchestrator = orchestrator
942
-
943
- list_epipolar_point_clouds_by_tiles = []
944
-
945
- # For each stereo pair
946
- xmin_list = []
947
- xmax_list = []
948
- ymin_list = []
949
- ymax_list = []
950
- for pair_key, items in list_epipolar_point_clouds.items():
951
- # Generate CarsDataset
952
- epi_pc = cars_dataset.CarsDataset("dict")
953
- tif_size = inputs.rasterio_get_size(items[cst.X])
954
- epi_pc.tiling_grid = tiling.generate_tiling_grid(
955
- 0,
956
- 0,
957
- tif_size[0],
958
- tif_size[1],
959
- epipolar_tile_size,
960
- epipolar_tile_size,
961
- )
962
-
963
- # Add to replace list so tiles will be readable at the same time
964
- [saving_info_pc] = cars_orchestrator.get_saving_infos([epi_pc])
965
- cars_orchestrator.add_to_replace_lists(
966
- epi_pc, cars_ds_name="epi_pc_min_max"
967
- )
968
-
969
- # Open the TIFF and get bounds from lon/lat min and max values
970
- for row in range(epi_pc.shape[0]):
971
- for col in range(epi_pc.shape[1]):
972
- window = rio.windows.Window.from_slices(
973
- (
974
- epi_pc.tiling_grid[row, col, 0],
975
- epi_pc.tiling_grid[row, col, 1],
976
- ),
977
- (
978
- epi_pc.tiling_grid[row, col, 2],
979
- epi_pc.tiling_grid[row, col, 3],
980
- ),
981
- )
982
-
983
- # Update saving info for row and col
984
- # /!\ BE AWARE : this is not the conventionnal way
985
- # to parallelise tasks in CARS
986
- full_saving_info_pc = ocht.update_saving_infos(
987
- saving_info_pc, row=row, col=col
988
- )
989
-
990
- epi_pc[row, col] = cars_orchestrator.cluster.create_task(
991
- compute_x_y_min_max_wrapper, nout=1
992
- )(
993
- items,
994
- epsg,
995
- window,
996
- saving_info=full_saving_info_pc,
997
- )
998
- epi_pc.attributes["source_pc_name"] = pair_key
999
- list_epipolar_point_clouds_by_tiles.append(epi_pc)
1000
-
1001
- # Breakpoint : compute
1002
- # /!\ BE AWARE : this is not the conventionnal way
1003
- # to parallelise tasks in CARS
1004
- cars_orchestrator.breakpoint()
1005
-
1006
- # Get all local min and max
1007
- for computed_epi_pc in list_epipolar_point_clouds_by_tiles:
1008
- pc_xmin_list, pc_ymin_list, pc_xmax_list, pc_ymax_list = [], [], [], []
1009
- for row in range(computed_epi_pc.shape[0]):
1010
- for col in range(computed_epi_pc.shape[1]):
1011
- local_x_y_min_max = computed_epi_pc[row, col].data[
1012
- "x_y_min_max"
1013
- ]
1014
-
1015
- if np.all(np.isfinite(local_x_y_min_max)):
1016
- # Add for global
1017
- xmin_list.append(local_x_y_min_max[0])
1018
- xmax_list.append(local_x_y_min_max[1])
1019
- ymin_list.append(local_x_y_min_max[2])
1020
- ymax_list.append(local_x_y_min_max[3])
1021
- # Add for current CarsDS
1022
- pc_xmin_list.append(local_x_y_min_max[0])
1023
- pc_xmax_list.append(local_x_y_min_max[1])
1024
- pc_ymin_list.append(local_x_y_min_max[2])
1025
- pc_ymax_list.append(local_x_y_min_max[3])
1026
-
1027
- # Simplify data
1028
- computed_epi_pc[row, col] = computed_epi_pc[row, col].data
1029
-
1030
- # Add min max for current point cloud CarsDataset
1031
- if len(pc_xmin_list) > 0:
1032
- computed_epi_pc.attributes["xmin"] = min(pc_xmin_list)
1033
- computed_epi_pc.attributes["ymin"] = min(pc_ymin_list)
1034
- computed_epi_pc.attributes["xmax"] = max(pc_xmax_list)
1035
- computed_epi_pc.attributes["ymax"] = max(pc_ymax_list)
1036
- computed_epi_pc.attributes["epsg"] = epsg
1037
-
1038
- # Define a terrain tiling from the terrain bounds (in terrain epsg)
1039
- if len(xmin_list) > 0:
1040
- global_xmin = min(xmin_list)
1041
- global_xmax = max(xmax_list)
1042
- global_ymin = min(ymin_list)
1043
- global_ymax = max(ymax_list)
1044
- else:
1045
- raise RuntimeError("All the depth maps are full of nan")
1046
-
1047
- if roi_poly is not None:
1048
- (
1049
- global_xmin,
1050
- global_ymin,
1051
- global_xmax,
1052
- global_ymax,
1053
- ) = preprocessing.crop_terrain_bounds_with_roi(
1054
- roi_poly, global_xmin, global_ymin, global_xmax, global_ymax
1055
- )
1056
-
1057
- terrain_bbox = [global_xmin, global_ymin, global_xmax, global_ymax]
1058
-
1059
- logging.info("terrain bbox in epsg {}: {}".format(str(epsg), terrain_bbox))
1060
-
1061
- return (terrain_bbox, list_epipolar_point_clouds_by_tiles)
1062
-
1063
-
1064
- def compute_x_y_min_max_wrapper(items, epsg, window, saving_info=None):
1065
- """
1066
- Compute bounds from item and create CarsDict filled with point cloud
1067
- information: file paths, bounds, epsg, window
1068
-
1069
- :param items: point cloud
1070
- :type items: dict
1071
- :param epsg: epsg
1072
- :type epsg: int
1073
- :param window: window to use
1074
- :type window: dict
1075
- :param saving_info: saving infos
1076
- :type saving_info: dict
1077
-
1078
- :return: Tile ready to use
1079
- :rtype: CarsDict
1080
-
1081
- """
1082
- x_y_min_max = pc_wrap.get_min_max_band(
1083
- items[cst.X],
1084
- items[cst.Y],
1085
- items[cst.Z],
1086
- items[cst.PC_EPSG],
1087
- epsg,
1088
- window=window,
1089
- )
1090
-
1091
- data_dict = {
1092
- cst.X: items[cst.X],
1093
- cst.Y: items[cst.Y],
1094
- cst.Z: items[cst.Z],
1095
- cst.POINT_CLOUD_CLR_KEY_ROOT: items[cst.POINT_CLOUD_CLR_KEY_ROOT],
1096
- }
1097
- if cst.POINT_CLOUD_MSK in items:
1098
- data_dict[cst.POINT_CLOUD_MSK] = items[cst.POINT_CLOUD_MSK]
1099
- if cst.POINT_CLOUD_CLASSIF_KEY_ROOT in items:
1100
- data_dict[cst.POINT_CLOUD_CLASSIF_KEY_ROOT] = items[
1101
- cst.POINT_CLOUD_CLASSIF_KEY_ROOT
1102
- ]
1103
- if cst.POINT_CLOUD_FILLING_KEY_ROOT in items:
1104
- data_dict[cst.POINT_CLOUD_FILLING_KEY_ROOT] = items[
1105
- cst.POINT_CLOUD_FILLING_KEY_ROOT
1106
- ]
1107
- if cst.POINT_CLOUD_AMBIGUITY_KEY_ROOT in items:
1108
- data_dict[cst.POINT_CLOUD_AMBIGUITY_KEY_ROOT] = items[
1109
- cst.POINT_CLOUD_AMBIGUITY_KEY_ROOT
1110
- ]
1111
- if cst.POINT_CLOUD_PERFORMANCE_MAP_ROOT in items:
1112
- data_dict[cst.POINT_CLOUD_PERFORMANCE_MAP_ROOT] = items[
1113
- cst.POINT_CLOUD_PERFORMANCE_MAP_ROOT
1114
- ]
1115
- if cst.EPI_Z_INF in items:
1116
- data_dict[cst.POINT_CLOUD_LAYER_SUP] = items[cst.EPI_Z_INF]
1117
- if cst.EPI_Z_SUP in items:
1118
- data_dict[cst.POINT_CLOUD_LAYER_INF] = items[cst.EPI_Z_SUP]
1119
-
1120
- # create dict
1121
- tile = {
1122
- "data": data_dict,
1123
- "x_y_min_max": x_y_min_max,
1124
- "window": window,
1125
- "cloud_epsg": items[cst.PC_EPSG],
1126
- }
1127
-
1128
- # add saving infos
1129
- res = cars_dict.CarsDict(tile)
1130
- cars_dataset.fill_dict(res, saving_info=saving_info)
1131
-
1132
- return res
1133
-
1134
-
1135
- def get_corresponding_tiles_tif(
1136
- terrain_tiling_grid,
1137
- list_epipolar_point_clouds_with_loc,
1138
- margins=0,
1139
- orchestrator=None,
1140
- ):
1141
- """
1142
- Get point cloud tiles to use for terrain region
1143
-
1144
- :param terrain_tiling_grid: tiling grid
1145
- :type terrain_tiling_grid: np.ndarray
1146
- :param row: tiling row
1147
- :type row: int
1148
- :param col: col
1149
- :type col: int
1150
- :param list_epipolar_point_clouds_with_loc: list of left point clouds
1151
- :type list_epipolar_point_clouds_with_loc: list(CarsDataset)
1152
- :param margins: margin to use in point clouds
1153
- :type margins: float
1154
-
1155
- :return: CarsDataset containing list of point cloud tiles to use
1156
- to terrain tile
1157
- :rtype: CarsDataset
1158
-
1159
- """
1160
-
1161
- if orchestrator is None:
1162
- # Create default sequential orchestrator for current
1163
- # application
1164
- # be aware, no out_json will be shared between orchestrators
1165
- # No files saved
1166
- cars_orchestrator = ocht.Orchestrator(
1167
- orchestrator_conf={"mode": "sequential"}
1168
- )
1169
- else:
1170
- cars_orchestrator = orchestrator
1171
-
1172
- # Compute correspondances for every tile
1173
- # Create Carsdataset containing a tile for every point cloud
1174
- list_corresp_cars_ds = cars_dataset.CarsDataset("dict")
1175
- # Create fake tiling grid , not used later
1176
- list_corresp_cars_ds.tiling_grid = tiling.generate_tiling_grid(
1177
- 0,
1178
- 0,
1179
- len(list_epipolar_point_clouds_with_loc),
1180
- len(list_epipolar_point_clouds_with_loc),
1181
- 1,
1182
- len(list_epipolar_point_clouds_with_loc),
1183
- )
1184
- # Add to replace list so tiles will be readable at the same time
1185
- [saving_info_pc] = cars_orchestrator.get_saving_infos(
1186
- [list_corresp_cars_ds]
1187
- )
1188
- cars_orchestrator.add_to_replace_lists(
1189
- list_corresp_cars_ds, cars_ds_name="epi_pc_corresp"
1190
- )
1191
-
1192
- for row_fake_cars_ds in range(list_corresp_cars_ds.shape[0]):
1193
- # Update saving info for row and col
1194
- full_saving_info_pc = ocht.update_saving_infos(
1195
- saving_info_pc, row=row_fake_cars_ds, col=0
1196
- )
1197
-
1198
- # /!\ BE AWARE : this is not the conventionnal way
1199
- # to parallelise tasks in CARS
1200
-
1201
- list_corresp_cars_ds[
1202
- row_fake_cars_ds, 0
1203
- ] = cars_orchestrator.cluster.create_task(
1204
- compute_correspondance_single_pc_terrain, nout=1
1205
- )(
1206
- list_epipolar_point_clouds_with_loc[row_fake_cars_ds],
1207
- row_fake_cars_ds,
1208
- terrain_tiling_grid,
1209
- margins=margins,
1210
- saving_info=full_saving_info_pc,
1211
- )
1212
-
1213
- # Breakpoint : compute
1214
- # /!\ BE AWARE : this is not the conventionnal way
1215
- # to parallelise tasks in CARS
1216
- cars_orchestrator.breakpoint()
1217
-
1218
- # Create res
1219
- terrain_correspondances = cars_dataset.CarsDataset("dict")
1220
- terrain_correspondances.tiling_grid = terrain_tiling_grid
1221
-
1222
- for row in range(terrain_correspondances.shape[0]):
1223
- for col in range(terrain_correspondances.shape[1]):
1224
- # get terrain region
1225
-
1226
- # Terrain grid [row, j, :] = [xmin, xmax, ymin, ymax]
1227
- # terrain region = [xmin, ymin, xmax, ymax]
1228
- terrain_region = [
1229
- terrain_tiling_grid[row, col, 0],
1230
- terrain_tiling_grid[row, col, 2],
1231
- terrain_tiling_grid[row, col, 1],
1232
- terrain_tiling_grid[row, col, 3],
1233
- ]
1234
-
1235
- # Get required_point_clouds_left
1236
- required_point_clouds = []
1237
- for corresp_row in range(list_corresp_cars_ds.shape[0]):
1238
- # each tile in list_corresp contains a CarsDict,
1239
- # containing a CarsDataset filled with list
1240
- corresp = list_corresp_cars_ds[corresp_row, 0].data[
1241
- "corresp_cars_ds"
1242
- ][row, col]
1243
- required_point_clouds += corresp
1244
-
1245
- terrain_correspondances[row, col] = {
1246
- "terrain_region": terrain_region,
1247
- "required_point_clouds": required_point_clouds,
1248
- }
1249
-
1250
- return terrain_correspondances
1251
-
1252
-
1253
- def compute_correspondance_single_pc_terrain(
1254
- epi_pc,
1255
- epi_pc_id,
1256
- terrain_tiling_grid,
1257
- margins=0,
1258
- saving_info=None,
1259
- ):
1260
- """
1261
- Compute correspondances for each terrain tile, with current point cloud
1262
-
1263
- :param epi_pc: point cloud
1264
- :type epi_pc: dict
1265
- :param epi_pc_id: identificator of the file of the point cloud
1266
- :type epi_pc_id: int
1267
- :param terrain_tiling_grid: tiling grid
1268
- :type terrain_tiling_grid: np.ndarray
1269
- :param margins: margin to use in point clouds
1270
- :type margins: float
1271
-
1272
- :return: CarsDict containing list of point cloud tiles to use for each
1273
- terrain tile:
1274
-
1275
- :rtype: CarsDict
1276
-
1277
- """
1278
-
1279
- # Create fake CarsDataset only for 2d structure
1280
- terrain_corresp = cars_dataset.CarsDataset("dict")
1281
- terrain_corresp.tiling_grid = terrain_tiling_grid
1282
-
1283
- for terrain_row in range(terrain_corresp.shape[0]):
1284
- for terrain_col in range(terrain_corresp.shape[1]):
1285
- # Initialisae to empty list
1286
- terrain_corresp[terrain_row, terrain_col] = []
1287
-
1288
- # Terrain grid [row, j, :] = [xmin, xmax, ymin, ymax]
1289
- # terrain region = [xmin, ymin, xmax, ymax]
1290
- terrain_region = [
1291
- terrain_tiling_grid[terrain_row, terrain_col, 0],
1292
- terrain_tiling_grid[terrain_row, terrain_col, 2],
1293
- terrain_tiling_grid[terrain_row, terrain_col, 1],
1294
- terrain_tiling_grid[terrain_row, terrain_col, 3],
1295
- ]
1296
- region_with_margin = list(
1297
- np.array(terrain_region)
1298
- + np.array([-margins, margins, -margins, margins])
1299
- )
1300
-
1301
- # Convert the bounds of the terrain tile into shapely polygon
1302
- # region: [xmin, ymin, xmax, ymax],
1303
- # convert_to_polygon needs : [xmin, xmax, ymin, ymax]
1304
-
1305
- terrain_tile_polygon = pc_wrap.convert_to_polygon(
1306
- [
1307
- region_with_margin[0],
1308
- region_with_margin[2],
1309
- region_with_margin[1],
1310
- region_with_margin[3],
1311
- ]
1312
- )
1313
-
1314
- for tile_row in range(epi_pc.shape[0]):
1315
- for tile_col in range(epi_pc.shape[1]):
1316
- x_y_min_max = epi_pc[tile_row, tile_col]["x_y_min_max"]
1317
-
1318
- # Convert the bounds of the point cloud tile into
1319
- # shapely point
1320
- if np.all(np.isfinite(x_y_min_max)):
1321
- point_cloud_tile_polygon = pc_wrap.convert_to_polygon(
1322
- x_y_min_max
1323
- )
1324
-
1325
- if pc_wrap.intersect_polygons(
1326
- terrain_tile_polygon, point_cloud_tile_polygon
1327
- ):
1328
- # add to required
1329
- terrain_corresp[terrain_row, terrain_col].append(
1330
- (epi_pc[tile_row, tile_col], epi_pc_id)
1331
- )
1332
-
1333
- # add saving infos
1334
- dict_with_corresp_cars_ds = cars_dict.CarsDict(
1335
- {"corresp_cars_ds": terrain_corresp}
1336
- )
1337
- cars_dataset.fill_dict(dict_with_corresp_cars_ds, saving_info=saving_info)
1338
-
1339
- return dict_with_corresp_cars_ds