cars 1.0.0a2__cp312-cp312-win_amd64.whl → 1.0.0a4__cp312-cp312-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cars might be problematic. Click here for more details.

Files changed (144) hide show
  1. cars/__init__.py +3 -3
  2. cars/applications/__init__.py +0 -3
  3. cars/applications/application.py +14 -6
  4. cars/applications/application_template.py +42 -0
  5. cars/applications/auxiliary_filling/abstract_auxiliary_filling_app.py +12 -2
  6. cars/applications/auxiliary_filling/auxiliary_filling_algo.py +2 -2
  7. cars/applications/auxiliary_filling/auxiliary_filling_from_sensors_app.py +95 -46
  8. cars/applications/auxiliary_filling/auxiliary_filling_wrappers.py +7 -6
  9. cars/applications/dem_generation/abstract_dem_generation_app.py +9 -5
  10. cars/applications/dem_generation/dem_generation_algo.py +1 -1
  11. cars/applications/dem_generation/dem_generation_wrappers.py +44 -59
  12. cars/applications/dem_generation/dichotomic_generation_app.py +9 -6
  13. cars/applications/dem_generation/rasterization_app.py +112 -43
  14. cars/applications/dense_match_filling/__init__.py +1 -1
  15. cars/applications/dense_match_filling/abstract_dense_match_filling_app.py +2 -15
  16. cars/applications/dense_match_filling/fill_disp_algo.py +32 -373
  17. cars/applications/dense_match_filling/fill_disp_wrappers.py +0 -343
  18. cars/applications/dense_match_filling/zero_padding_app.py +10 -5
  19. cars/applications/dense_matching/abstract_dense_matching_app.py +2 -1
  20. cars/applications/dense_matching/census_mccnn_sgm_app.py +48 -60
  21. cars/applications/dense_matching/cpp/dense_matching_cpp.cp312-win_amd64.dll.a +0 -0
  22. cars/applications/dense_matching/cpp/dense_matching_cpp.cp312-win_amd64.pyd +0 -0
  23. cars/applications/dense_matching/dense_matching_algo.py +48 -14
  24. cars/applications/dense_matching/dense_matching_wrappers.py +11 -3
  25. cars/applications/dense_matching/disparity_grid_algo.py +95 -79
  26. cars/applications/dense_matching/loaders/config_mapping.json +13 -0
  27. cars/applications/dense_matching/loaders/global_land_cover_map.tif +0 -0
  28. cars/applications/dense_matching/loaders/pandora_loader.py +169 -34
  29. cars/applications/dsm_filling/border_interpolation_app.py +11 -12
  30. cars/applications/dsm_filling/bulldozer_filling_app.py +16 -15
  31. cars/applications/dsm_filling/exogenous_filling_app.py +14 -14
  32. cars/applications/grid_generation/abstract_grid_generation_app.py +1 -1
  33. cars/applications/grid_generation/epipolar_grid_generation_app.py +4 -2
  34. cars/applications/grid_generation/grid_correction_app.py +4 -1
  35. cars/applications/grid_generation/grid_generation_algo.py +7 -2
  36. cars/applications/ground_truth_reprojection/abstract_ground_truth_reprojection_app.py +1 -1
  37. cars/applications/ground_truth_reprojection/direct_localization_app.py +2 -2
  38. cars/applications/ground_truth_reprojection/ground_truth_reprojection_algo.py +2 -1
  39. cars/applications/point_cloud_fusion/abstract_pc_fusion_app.py +0 -155
  40. cars/applications/point_cloud_fusion/mapping_to_terrain_tiles_app.py +0 -658
  41. cars/applications/point_cloud_fusion/pc_fusion_algo.py +0 -1339
  42. cars/applications/point_cloud_fusion/pc_fusion_wrappers.py +0 -869
  43. cars/applications/point_cloud_outlier_removal/abstract_outlier_removal_app.py +11 -6
  44. cars/applications/point_cloud_outlier_removal/outlier_removal_algo.py +9 -8
  45. cars/applications/point_cloud_outlier_removal/small_components_app.py +101 -270
  46. cars/applications/point_cloud_outlier_removal/statistical_app.py +120 -277
  47. cars/applications/rasterization/abstract_pc_rasterization_app.py +2 -1
  48. cars/applications/rasterization/rasterization_algo.py +18 -6
  49. cars/applications/rasterization/rasterization_wrappers.py +2 -1
  50. cars/applications/rasterization/simple_gaussian_app.py +88 -116
  51. cars/applications/resampling/abstract_resampling_app.py +1 -1
  52. cars/applications/resampling/bicubic_resampling_app.py +3 -1
  53. cars/applications/resampling/resampling_algo.py +60 -53
  54. cars/applications/resampling/resampling_wrappers.py +3 -1
  55. cars/applications/sparse_matching/abstract_sparse_matching_app.py +1 -1
  56. cars/applications/sparse_matching/sift_app.py +5 -25
  57. cars/applications/sparse_matching/sparse_matching_algo.py +3 -2
  58. cars/applications/sparse_matching/sparse_matching_wrappers.py +1 -1
  59. cars/applications/triangulation/abstract_triangulation_app.py +1 -1
  60. cars/applications/triangulation/line_of_sight_intersection_app.py +13 -11
  61. cars/applications/triangulation/pc_transform.py +552 -0
  62. cars/applications/triangulation/triangulation_algo.py +6 -4
  63. cars/applications/triangulation/triangulation_wrappers.py +1 -0
  64. cars/bundleadjustment.py +6 -6
  65. cars/cars.py +11 -9
  66. cars/core/cars_logging.py +80 -49
  67. cars/core/constants.py +0 -1
  68. cars/core/datasets.py +5 -2
  69. cars/core/geometry/abstract_geometry.py +364 -22
  70. cars/core/geometry/shareloc_geometry.py +112 -82
  71. cars/core/inputs.py +72 -19
  72. cars/core/outputs.py +1 -1
  73. cars/core/preprocessing.py +17 -3
  74. cars/core/projection.py +126 -6
  75. cars/core/tiling.py +10 -3
  76. cars/data_structures/cars_dataset.py +12 -10
  77. cars/data_structures/corresponding_tiles_tools.py +0 -103
  78. cars/data_structures/format_transformation.py +4 -1
  79. cars/devibrate.py +6 -3
  80. cars/extractroi.py +20 -21
  81. cars/orchestrator/cluster/abstract_cluster.py +15 -5
  82. cars/orchestrator/cluster/abstract_dask_cluster.py +6 -2
  83. cars/orchestrator/cluster/dask_jobqueue_utils.py +1 -1
  84. cars/orchestrator/cluster/log_wrapper.py +149 -22
  85. cars/orchestrator/cluster/mp_cluster/multiprocessing_cluster.py +12 -4
  86. cars/orchestrator/cluster/mp_cluster/multiprocessing_profiler.py +2 -2
  87. cars/orchestrator/cluster/pbs_dask_cluster.py +1 -1
  88. cars/orchestrator/cluster/sequential_cluster.py +5 -4
  89. cars/orchestrator/cluster/slurm_dask_cluster.py +1 -1
  90. cars/orchestrator/orchestrator.py +15 -4
  91. cars/orchestrator/registry/id_generator.py +1 -0
  92. cars/orchestrator/registry/saver_registry.py +2 -2
  93. cars/pipelines/conf_resolution/conf_final_resolution.json +5 -3
  94. cars/pipelines/default/default_pipeline.py +461 -1052
  95. cars/pipelines/parameters/advanced_parameters.py +91 -64
  96. cars/pipelines/parameters/advanced_parameters_constants.py +6 -5
  97. cars/pipelines/parameters/application_parameters.py +71 -0
  98. cars/pipelines/parameters/depth_map_inputs.py +0 -314
  99. cars/pipelines/parameters/dsm_inputs.py +40 -4
  100. cars/pipelines/parameters/output_parameters.py +44 -8
  101. cars/pipelines/parameters/sensor_inputs.py +122 -73
  102. cars/pipelines/parameters/sensor_inputs_constants.py +0 -2
  103. cars/pipelines/parameters/sensor_loaders/__init__.py +4 -3
  104. cars/pipelines/parameters/sensor_loaders/basic_classif_loader.py +106 -0
  105. cars/pipelines/parameters/sensor_loaders/{basic_sensor_loader.py → basic_image_loader.py} +16 -22
  106. cars/pipelines/parameters/sensor_loaders/pivot_classif_loader.py +121 -0
  107. cars/pipelines/parameters/sensor_loaders/{pivot_sensor_loader.py → pivot_image_loader.py} +10 -21
  108. cars/pipelines/parameters/sensor_loaders/sensor_loader.py +4 -6
  109. cars/pipelines/parameters/sensor_loaders/sensor_loader_template.py +1 -3
  110. cars/pipelines/pipeline_template.py +1 -3
  111. cars/pipelines/unit/unit_pipeline.py +676 -1070
  112. cars/starter.py +4 -3
  113. cars-1.0.0a4.dist-info/DELVEWHEEL +2 -0
  114. {cars-1.0.0a2.dist-info → cars-1.0.0a4.dist-info}/METADATA +135 -53
  115. {cars-1.0.0a2.dist-info → cars-1.0.0a4.dist-info}/RECORD +120 -134
  116. cars.libs/libgcc_s_seh-1-b2494fcbd4d80cf2c98fdd5261f6d850.dll +0 -0
  117. cars.libs/libstdc++-6-e9b0d12ae0e9555bbae55e8dfd08c3f7.dll +0 -0
  118. cars.libs/libwinpthread-1-7882d1b093714ccdfaf4e0789a817792.dll +0 -0
  119. cars/applications/dense_match_filling/cpp/__init__.py +0 -0
  120. cars/applications/dense_match_filling/cpp/dense_match_filling_cpp.cp312-win_amd64.dll.a +0 -0
  121. cars/applications/dense_match_filling/cpp/dense_match_filling_cpp.cp312-win_amd64.pyd +0 -0
  122. cars/applications/dense_match_filling/cpp/dense_match_filling_cpp.py +0 -72
  123. cars/applications/dense_match_filling/cpp/includes/dense_match_filling.hpp +0 -46
  124. cars/applications/dense_match_filling/cpp/meson.build +0 -9
  125. cars/applications/dense_match_filling/cpp/src/bindings.cpp +0 -11
  126. cars/applications/dense_match_filling/cpp/src/dense_match_filling.cpp +0 -142
  127. cars/applications/dense_match_filling/plane_app.py +0 -556
  128. cars/applications/hole_detection/__init__.py +0 -30
  129. cars/applications/hole_detection/abstract_hole_detection_app.py +0 -125
  130. cars/applications/hole_detection/cloud_to_bbox_app.py +0 -346
  131. cars/applications/hole_detection/hole_detection_algo.py +0 -144
  132. cars/applications/hole_detection/hole_detection_wrappers.py +0 -53
  133. cars/applications/point_cloud_denoising/__init__.py +0 -29
  134. cars/applications/point_cloud_denoising/abstract_pc_denoising_app.py +0 -273
  135. cars/applications/point_cloud_fusion/__init__.py +0 -30
  136. cars/applications/point_cloud_fusion/cloud_fusion_constants.py +0 -39
  137. cars/applications/sparse_matching/pandora_sparse_matching_app.py +0 -0
  138. cars/pipelines/parameters/depth_map_inputs_constants.py +0 -25
  139. cars-1.0.0a2.dist-info/DELVEWHEEL +0 -2
  140. cars.libs/libgcc_s_seh-1-f2b6825d483bdf14050493af93b5997d.dll +0 -0
  141. cars.libs/libstdc++-6-6b0059df6bc601df5a0f18a5805eea05.dll +0 -0
  142. cars.libs/libwinpthread-1-e01b8e85fd67c2b861f64d4ccc7df607.dll +0 -0
  143. {cars-1.0.0a2.dist-info → cars-1.0.0a4.dist-info}/WHEEL +0 -0
  144. {cars-1.0.0a2.dist-info → cars-1.0.0a4.dist-info}/entry_points.txt +0 -0
@@ -27,9 +27,7 @@ import copy
27
27
 
28
28
  # Standard imports
29
29
  import logging
30
- import math
31
30
  import os
32
- import time
33
31
 
34
32
  import numpy as np
35
33
 
@@ -40,14 +38,13 @@ from pyproj import CRS
40
38
  # CARS imports
41
39
  import cars.orchestrator.orchestrator as ocht
42
40
  from cars.applications import application_constants
43
- from cars.applications.point_cloud_fusion import (
44
- pc_fusion_algo,
45
- pc_fusion_wrappers,
46
- )
47
41
  from cars.applications.point_cloud_outlier_removal import (
48
42
  abstract_outlier_removal_app as pc_removal,
49
43
  )
50
- from cars.applications.point_cloud_outlier_removal import outlier_removal_algo
44
+ from cars.applications.point_cloud_outlier_removal import (
45
+ outlier_removal_algo,
46
+ )
47
+ from cars.applications.triangulation import pc_transform
51
48
  from cars.applications.triangulation.triangulation_wrappers import (
52
49
  generate_point_cloud_file_names,
53
50
  )
@@ -68,20 +65,21 @@ class Statistical(
68
65
 
69
66
  # pylint: disable=too-many-instance-attributes
70
67
 
71
- def __init__(self, conf=None):
68
+ def __init__(self, scaling_coeff, conf=None):
72
69
  """
73
70
  Init function of Statistical
74
71
 
72
+ :param scaling_coeff: scaling factor for resolution
73
+ :type scaling_coeff: float
75
74
  :param conf: configuration for points outlier removal
76
75
  :return: a application_to_use object
77
76
  """
78
77
 
79
- super().__init__(conf=conf)
78
+ super().__init__(scaling_coeff, conf=conf)
80
79
 
81
80
  self.used_method = self.used_config["method"]
82
81
 
83
82
  # statistical outliers
84
- self.activated = self.used_config["activated"]
85
83
  self.k = self.used_config["k"]
86
84
  self.filtering_constant = self.used_config["filtering_constant"]
87
85
  self.mean_factor = self.used_config["mean_factor"]
@@ -124,10 +122,6 @@ class Statistical(
124
122
  overloaded_conf["use_median"] = conf.get("use_median", True)
125
123
 
126
124
  # statistical outlier filtering
127
- overloaded_conf["activated"] = conf.get(
128
- "activated", True
129
- ) # if false, the following
130
- # parameters are unused
131
125
  # k: number of neighbors
132
126
  overloaded_conf["k"] = conf.get("k", 50)
133
127
  # filtering_constant: constant to apply in the distance threshold
@@ -152,7 +146,6 @@ class Statistical(
152
146
  point_cloud_outlier_removal_schema = {
153
147
  "method": str,
154
148
  "save_by_pair": bool,
155
- "activated": bool,
156
149
  "k": And(int, lambda x: x > 0),
157
150
  "filtering_constant": And(Or(float, int), lambda x: x >= 0),
158
151
  "mean_factor": And(Or(float, int), lambda x: x >= 0),
@@ -189,17 +182,12 @@ class Statistical(
189
182
 
190
183
  """
191
184
 
192
- if not self.activated:
193
- # if not activated, this tile size must not be taken into acount
194
- # during the min(*tile_sizes) operations
195
- tile_size = math.inf
196
- else:
197
- tot = 10000 * superposing_point_clouds / point_cloud_resolution
185
+ tot = 10000 * superposing_point_clouds / point_cloud_resolution
198
186
 
199
- import_ = 200 # MiB
200
- tile_size = int(
201
- np.sqrt(float(((max_ram_per_worker - import_) * 2**23)) / tot)
202
- )
187
+ import_ = 200 # MiB
188
+ tile_size = int(
189
+ np.sqrt(float(((max_ram_per_worker - import_) * 2**23)) / tot)
190
+ )
203
191
 
204
192
  logging.info(
205
193
  "Estimated optimal tile size for statistical "
@@ -226,11 +214,7 @@ class Statistical(
226
214
  :return: margin
227
215
  :rtype: int
228
216
  """
229
-
230
- margin = 0
231
-
232
- if self.activated:
233
- margin = self.half_epipolar_size
217
+ margin = self.half_epipolar_size
234
218
 
235
219
  return margin
236
220
 
@@ -245,7 +229,7 @@ class Statistical(
245
229
 
246
230
  return 0
247
231
 
248
- def run(
232
+ def run( # pylint: disable=too-many-positional-arguments
249
233
  self,
250
234
  merged_point_cloud,
251
235
  orchestrator=None,
@@ -298,9 +282,6 @@ class Statistical(
298
282
  :rtype : CarsDataset filled with xr.Dataset
299
283
  """
300
284
 
301
- if not self.activated:
302
- return merged_point_cloud
303
-
304
285
  # Default orchestrator
305
286
  if orchestrator is None:
306
287
  # Create default sequential orchestrator for current application
@@ -312,263 +293,103 @@ class Statistical(
312
293
  else:
313
294
  self.orchestrator = orchestrator
314
295
 
315
- if merged_point_cloud.dataset_type == "points":
316
- (
317
- filtered_point_cloud,
318
- laz_pc_file_name,
319
- csv_pc_file_name,
320
- saving_info,
321
- ) = self.__register_pc_dataset__(
322
- merged_point_cloud,
323
- point_cloud_dir,
324
- dump_dir,
325
- app_name="statistical",
326
- )
327
-
328
- logging.info(
329
- "Cloud filtering: Filtered points number: {}".format(
330
- filtered_point_cloud.shape[1]
331
- * filtered_point_cloud.shape[0]
332
- )
333
- )
296
+ if dump_dir is None:
297
+ dump_dir = self.generate_unknown_dump_dir(self.orchestrator)
334
298
 
335
- # Generate rasters
336
- for col in range(filtered_point_cloud.shape[1]):
337
- for row in range(filtered_point_cloud.shape[0]):
338
- # update saving infos for potential replacement
339
- full_saving_info = ocht.update_saving_infos(
340
- saving_info, row=row, col=col
341
- )
342
- if merged_point_cloud.tiles[row][col] is not None:
343
- # Delayed call to cloud filtering
344
- filtered_point_cloud[
345
- row, col
346
- ] = self.orchestrator.cluster.create_task(
347
- statistical_removal_wrapper
348
- )(
349
- merged_point_cloud[row, col],
350
- self.k,
351
- self.filtering_constant,
352
- self.mean_factor,
353
- self.std_dev_factor,
354
- self.use_median,
355
- save_by_pair=(self.save_by_pair),
356
- point_cloud_csv_file_name=csv_pc_file_name,
357
- point_cloud_laz_file_name=laz_pc_file_name,
358
- saving_info=full_saving_info,
359
- )
360
-
361
- elif merged_point_cloud.dataset_type == "arrays":
362
- prefix = os.path.basename(dump_dir)
363
- # Save as depth map
364
- filtered_point_cloud, saving_info_epipolar = (
365
- self.__register_epipolar_dataset__(
366
- merged_point_cloud,
367
- depth_map_dir,
368
- dump_dir,
369
- app_name="statistical",
370
- pair_key=prefix,
371
- )
299
+ if merged_point_cloud.dataset_type != "arrays":
300
+ raise RuntimeError(
301
+ "Only arrays is supported in statistical removal"
372
302
  )
373
303
 
374
- # Save as point cloud
375
- (
376
- flatten_filtered_point_cloud,
377
- laz_pc_dir_name,
378
- csv_pc_dir_name,
379
- saving_info_flatten,
380
- ) = self.__register_pc_dataset__(
304
+ prefix = os.path.basename(dump_dir)
305
+ # Save as depth map
306
+ filtered_point_cloud, saving_info_epipolar = (
307
+ self.__register_epipolar_dataset__(
381
308
  merged_point_cloud,
382
- point_cloud_dir,
309
+ depth_map_dir,
383
310
  dump_dir,
384
311
  app_name="statistical",
312
+ pair_key=prefix,
385
313
  )
314
+ )
386
315
 
387
- # initialize empty index file for point cloud product if official
388
- # product is requested
389
- pc_index = None
390
- if point_cloud_dir:
391
- pc_index = {}
316
+ # Save as point cloud
317
+ (
318
+ flatten_filtered_point_cloud,
319
+ laz_pc_dir_name,
320
+ csv_pc_dir_name,
321
+ saving_info_flatten,
322
+ ) = self.__register_pc_dataset__(
323
+ merged_point_cloud,
324
+ point_cloud_dir,
325
+ dump_dir,
326
+ app_name="statistical",
327
+ )
328
+
329
+ # initialize empty index file for point cloud product if official
330
+ # product is requested
331
+ pc_index = None
332
+ if point_cloud_dir:
333
+ pc_index = {}
392
334
 
393
- # Generate rasters
394
- for col in range(filtered_point_cloud.shape[1]):
395
- for row in range(filtered_point_cloud.shape[0]):
335
+ # Generate rasters
336
+ for col in range(filtered_point_cloud.shape[1]):
337
+ for row in range(filtered_point_cloud.shape[0]):
396
338
 
397
- # update saving infos for potential replacement
398
- full_saving_info_epipolar = ocht.update_saving_infos(
399
- saving_info_epipolar, row=row, col=col
339
+ # update saving infos for potential replacement
340
+ full_saving_info_epipolar = ocht.update_saving_infos(
341
+ saving_info_epipolar, row=row, col=col
342
+ )
343
+ full_saving_info_flatten = None
344
+ if saving_info_flatten is not None:
345
+ full_saving_info_flatten = ocht.update_saving_infos(
346
+ saving_info_flatten, row=row, col=col
400
347
  )
401
- full_saving_info_flatten = None
402
- if saving_info_flatten is not None:
403
- full_saving_info_flatten = ocht.update_saving_infos(
404
- saving_info_flatten, row=row, col=col
405
- )
406
348
 
407
- if merged_point_cloud[row][col] is not None:
408
- csv_pc_file_name, laz_pc_file_name = (
409
- generate_point_cloud_file_names(
410
- csv_pc_dir_name,
411
- laz_pc_dir_name,
412
- row,
413
- col,
414
- pc_index,
415
- pair_key=prefix,
416
- )
349
+ if merged_point_cloud[row][col] is not None:
350
+ csv_pc_file_name, laz_pc_file_name = (
351
+ generate_point_cloud_file_names(
352
+ csv_pc_dir_name,
353
+ laz_pc_dir_name,
354
+ row,
355
+ col,
356
+ pc_index,
357
+ pair_key=prefix,
417
358
  )
418
- window = merged_point_cloud.tiling_grid[row, col]
419
- overlap = filtered_point_cloud.overlaps[row, col]
420
- # Delayed call to cloud filtering
421
- (
422
- filtered_point_cloud[row, col],
423
- flatten_filtered_point_cloud[row, col],
424
- ) = self.orchestrator.cluster.create_task(
425
- epipolar_statistical_removal_wrapper, nout=2
426
- )(
427
- merged_point_cloud[row, col],
428
- self.k,
429
- self.filtering_constant,
430
- self.mean_factor,
431
- self.std_dev_factor,
432
- self.use_median,
433
- self.half_epipolar_size,
434
- window,
435
- overlap,
436
- epsg=epsg,
437
- point_cloud_csv_file_name=csv_pc_file_name,
438
- point_cloud_laz_file_name=laz_pc_file_name,
439
- saving_info_epipolar=full_saving_info_epipolar,
440
- saving_info_flatten=full_saving_info_flatten,
441
- )
442
-
443
- # update point cloud index
444
- if point_cloud_dir:
445
- self.orchestrator.update_index(pc_index)
359
+ )
360
+ window = merged_point_cloud.tiling_grid[row, col]
361
+ overlap = filtered_point_cloud.overlaps[row, col]
362
+ # Delayed call to cloud filtering
363
+ (
364
+ filtered_point_cloud[row, col],
365
+ flatten_filtered_point_cloud[row, col],
366
+ ) = self.orchestrator.cluster.create_task(
367
+ epipolar_statistical_removal_wrapper, nout=2
368
+ )(
369
+ merged_point_cloud[row, col],
370
+ self.k,
371
+ self.filtering_constant,
372
+ self.mean_factor,
373
+ self.std_dev_factor,
374
+ self.use_median,
375
+ self.half_epipolar_size,
376
+ window,
377
+ overlap,
378
+ epsg=epsg,
379
+ point_cloud_csv_file_name=csv_pc_file_name,
380
+ point_cloud_laz_file_name=laz_pc_file_name,
381
+ saving_info_epipolar=full_saving_info_epipolar,
382
+ saving_info_flatten=full_saving_info_flatten,
383
+ )
446
384
 
447
- else:
448
- logging.error(
449
- "PointCloudOutlierRemoval application doesn't support"
450
- "this input data "
451
- "format"
452
- )
385
+ # update point cloud index
386
+ if point_cloud_dir:
387
+ self.orchestrator.update_index(pc_index)
453
388
 
454
389
  return filtered_point_cloud
455
390
 
456
391
 
457
- def statistical_removal_wrapper(
458
- cloud,
459
- statistical_k,
460
- filtering_constant,
461
- mean_factor,
462
- std_dev_factor,
463
- use_median,
464
- save_by_pair: bool = False,
465
- point_cloud_csv_file_name=None,
466
- point_cloud_laz_file_name=None,
467
- saving_info=None,
468
- ):
469
- """
470
- Statistical outlier removal
471
-
472
- :param cloud: cloud to filter
473
- :type cloud: pandas DataFrame
474
- :param statistical_k: k
475
- :type statistical_k: int
476
- :param filtering_constant: constant applied to the threshold
477
- :type filtering_constant: float
478
- :param mean_factor: mean factor
479
- :type mean_factor: float
480
- :param std_dev_factor: std factor
481
- :type std_dev_factor: float
482
- :param use_median: use median and quartile instead of mean and std
483
- :type use median: bool
484
- :param save_by_pair: save point cloud as pair
485
- :type save_by_pair: bool
486
- :param point_cloud_csv_file_name: write point cloud as CSV in filename
487
- (if None, the point cloud is not written as csv)
488
- :type point_cloud_csv_file_name: str
489
- :param point_cloud_laz_file_name: write point cloud as laz in filename
490
- (if None, the point cloud is not written as laz)
491
- :type point_cloud_laz_file_name: str
492
- :param saving_info: saving infos
493
- :type saving_info: dict
494
-
495
- :return: filtered cloud
496
- :rtype: pandas DataFrame
497
-
498
- """
499
-
500
- # Copy input cloud
501
- new_cloud = cloud.copy()
502
- new_cloud.attrs = copy.deepcopy(cloud.attrs)
503
-
504
- # Get current epsg
505
- cloud_attributes = cars_dataset.get_attributes(new_cloud)
506
- cloud_epsg = cloud_attributes["epsg"]
507
- current_epsg = cloud_epsg
508
-
509
- # Check if can be used to filter
510
- spatial_ref = CRS.from_epsg(cloud_epsg)
511
- if spatial_ref.is_geographic:
512
- logging.debug(
513
- "The point cloud to filter is not in a cartographic system. "
514
- "The filter's default parameters might not be adapted "
515
- "to this referential. Convert the points "
516
- "cloud to ECEF to ensure a proper point_cloud."
517
- )
518
- # Convert to epsg = 4978
519
- cartographic_epsg = 4978
520
- projection.point_cloud_conversion_dataframe(
521
- new_cloud, current_epsg, cartographic_epsg
522
- )
523
- current_epsg = cartographic_epsg
524
-
525
- # Filter point cloud
526
- tic = time.process_time()
527
- (new_cloud, _) = outlier_removal_algo.statistical_outlier_filtering(
528
- new_cloud,
529
- statistical_k,
530
- filtering_constant,
531
- mean_factor,
532
- std_dev_factor,
533
- use_median,
534
- )
535
- toc = time.process_time()
536
- logging.debug(
537
- "Statistical cloud filtering done in {} seconds".format(toc - tic)
538
- )
539
-
540
- # Conversion to UTM
541
- projection.point_cloud_conversion_dataframe(
542
- new_cloud, cloud_epsg, current_epsg
543
- )
544
- # Update attributes
545
- cloud_attributes["epsg"] = current_epsg
546
-
547
- cars_dataset.fill_dataframe(
548
- new_cloud, saving_info=saving_info, attributes=cloud_attributes
549
- )
550
-
551
- # save point cloud in worker
552
- if point_cloud_csv_file_name:
553
- cars_dataset.run_save_points(
554
- new_cloud,
555
- point_cloud_csv_file_name,
556
- save_by_pair=save_by_pair,
557
- overwrite=True,
558
- point_cloud_format="csv",
559
- )
560
- if point_cloud_laz_file_name:
561
- cars_dataset.run_save_points(
562
- new_cloud,
563
- point_cloud_laz_file_name,
564
- save_by_pair=save_by_pair,
565
- overwrite=True,
566
- point_cloud_format="laz",
567
- )
568
-
569
- return new_cloud
570
-
571
-
392
+ # pylint: disable=too-many-positional-arguments
572
393
  def epipolar_statistical_removal_wrapper(
573
394
  epipolar_ds,
574
395
  statistical_k,
@@ -617,9 +438,29 @@ def epipolar_statistical_removal_wrapper(
617
438
  # Copy input cloud
618
439
  filtered_cloud = copy.copy(epipolar_ds)
619
440
 
441
+ # Get current epsg
442
+ cloud_epsg = filtered_cloud.attrs["epsg"]
443
+ current_epsg = cloud_epsg
444
+
445
+ # Check if can be used to filter
446
+ spatial_ref = CRS.from_epsg(cloud_epsg)
447
+ if spatial_ref.is_geographic:
448
+ logging.debug(
449
+ "The point cloud to filter is not in a cartographic system. "
450
+ "The filter's default parameters might not be adapted "
451
+ "to this referential. Please, convert the point "
452
+ "cloud to ECEF to ensure a proper point_cloud."
453
+ )
454
+ # Convert to epsg = 4978
455
+ cartographic_epsg = 4978
456
+
457
+ projection.point_cloud_conversion_dataset(
458
+ filtered_cloud, cartographic_epsg
459
+ )
460
+ current_epsg = cartographic_epsg
461
+
620
462
  outlier_removal_algo.epipolar_statistical_filtering(
621
463
  filtered_cloud,
622
- epsg,
623
464
  k=statistical_k,
624
465
  filtering_constant=filtering_constant,
625
466
  mean_factor=mean_factor,
@@ -643,9 +484,11 @@ def epipolar_statistical_removal_wrapper(
643
484
  if point_cloud_csv_file_name or point_cloud_laz_file_name:
644
485
  # Convert epipolar array into point cloud
645
486
  flatten_filtered_cloud, cloud_epsg = (
646
- pc_fusion_algo.create_combined_cloud([filtered_cloud], ["0"], epsg)
487
+ pc_transform.depth_map_dataset_to_dataframe(
488
+ filtered_cloud, current_epsg
489
+ )
647
490
  )
648
- # Convert to UTM
491
+ # Convert to wanted epsg
649
492
  if epsg is not None and cloud_epsg != epsg:
650
493
  projection.point_cloud_conversion_dataframe(
651
494
  flatten_filtered_cloud, cloud_epsg, epsg
@@ -653,7 +496,7 @@ def epipolar_statistical_removal_wrapper(
653
496
  cloud_epsg = epsg
654
497
 
655
498
  # Fill attributes for LAZ saving
656
- color_type = pc_fusion_wrappers.get_color_type([filtered_cloud])
499
+ color_type = pc_transform.get_color_type([filtered_cloud])
657
500
  attributes = {
658
501
  "epsg": cloud_epsg,
659
502
  "color_type": color_type,
@@ -127,10 +127,11 @@ class PointCloudRasterization(ApplicationTemplate, metaclass=ABCMeta):
127
127
  """
128
128
 
129
129
  @abstractmethod
130
- def run(
130
+ def run( # pylint: disable=too-many-positional-arguments
131
131
  self,
132
132
  point_clouds,
133
133
  epsg,
134
+ output_crs,
134
135
  resolution,
135
136
  orchestrator=None,
136
137
  dsm_file_name=None,
@@ -45,6 +45,7 @@ from cars.core import constants as cst
45
45
  from cars.data_structures import cars_dataset
46
46
 
47
47
 
48
+ # pylint: disable=too-many-positional-arguments
48
49
  def simple_rasterization_dataset_wrapper(
49
50
  cloud: pandas.DataFrame,
50
51
  resolution: float,
@@ -61,6 +62,7 @@ def simple_rasterization_dataset_wrapper(
61
62
  list_computed_layers: List[str] = None,
62
63
  source_pc_names: List[str] = None,
63
64
  performance_map_classes: List[float] = None,
65
+ cloud_global_id: int = None,
64
66
  ) -> xr.Dataset:
65
67
  """
66
68
  Wrapper of simple_rasterization
@@ -91,6 +93,8 @@ def simple_rasterization_dataset_wrapper(
91
93
  name of sensors pair or name of point cloud file
92
94
  :param performance_map_classes: list for step defining border of class
93
95
  :type performance_map_classes: list or None
96
+ :param cloud_global_id: global id of pair
97
+ :type cloud_global_id: int
94
98
  :return: Rasterized cloud
95
99
  """
96
100
 
@@ -129,11 +133,13 @@ def simple_rasterization_dataset_wrapper(
129
133
  list_computed_layers=list_computed_layers,
130
134
  source_pc_names=source_pc_names,
131
135
  performance_map_classes=performance_map_classes,
136
+ cloud_global_id=cloud_global_id,
132
137
  )
133
138
 
134
139
  return raster
135
140
 
136
141
 
142
+ # pylint: disable=too-many-positional-arguments
137
143
  def compute_vector_raster_and_stats(
138
144
  cloud: pandas.DataFrame,
139
145
  x_start: float,
@@ -144,6 +150,7 @@ def compute_vector_raster_and_stats(
144
150
  sigma: float,
145
151
  radius: int,
146
152
  list_computed_layers: List[str] = None,
153
+ cloud_global_id: int = None,
147
154
  ) -> Tuple[
148
155
  np.ndarray,
149
156
  np.ndarray,
@@ -168,6 +175,7 @@ def compute_vector_raster_and_stats(
168
175
  :param sigma: Sigma for gaussian interpolation. If None, set to resolution
169
176
  :param radius: Radius for hole filling.
170
177
  :param list_computed_layers: list of computed output data
178
+ :param cloud_global_id: global id of pair
171
179
  :return: a tuple with rasterization results and statistics.
172
180
  """
173
181
  # get points corresponding to (X, Y positions) + data_valid
@@ -225,8 +233,8 @@ def compute_vector_raster_and_stats(
225
233
  # Fill the dataframe with additional columns :
226
234
  # each column refers to a point cloud id
227
235
  number_of_pc = cars_dataset.get_attributes(cloud)["number_of_pc"]
228
- if cst.POINT_CLOUD_GLOBAL_ID in cloud.columns and (
229
- (list_computed_layers is None)
236
+ if (cloud_global_id is not None) and (
237
+ list_computed_layers is None
230
238
  or rast_wrap.substring_in_list(
231
239
  list_computed_layers, cst.POINT_CLOUD_SOURCE_KEY_ROOT
232
240
  )
@@ -234,9 +242,10 @@ def compute_vector_raster_and_stats(
234
242
  for pc_id in range(number_of_pc):
235
243
  # Create binary list that indicates from each point whether it comes
236
244
  # from point cloud number "pc_id"
237
- point_is_from_pc = list(
238
- map(int, cloud[cst.POINT_CLOUD_GLOBAL_ID] == pc_id)
239
- )
245
+ if pc_id == cloud_global_id:
246
+ point_is_from_pc = np.ones(cloud.shape[0], dtype=int)
247
+ else:
248
+ point_is_from_pc = np.zeros(cloud.shape[0], dtype=int)
240
249
  pc_key = "{}{}".format(cst.POINT_CLOUD_SOURCE_KEY_ROOT, pc_id)
241
250
  cloud[pc_key] = point_is_from_pc
242
251
 
@@ -346,7 +355,7 @@ def compute_vector_raster_and_stats(
346
355
  )
347
356
 
348
357
 
349
- def rasterize(
358
+ def rasterize( # pylint: disable=too-many-positional-arguments
350
359
  cloud: pandas.DataFrame,
351
360
  resolution: float,
352
361
  epsg: int,
@@ -362,6 +371,7 @@ def rasterize(
362
371
  list_computed_layers: List[str] = None,
363
372
  source_pc_names: List[str] = None,
364
373
  performance_map_classes: List[float] = None,
374
+ cloud_global_id: int = None,
365
375
  ) -> Union[xr.Dataset, None]:
366
376
  """
367
377
  Rasterize a point cloud with its color bands to a Dataset
@@ -385,6 +395,7 @@ def rasterize(
385
395
  :param source_pc_names: list of source pc names
386
396
  :param performance_map_classes: list for step defining border of class
387
397
  :type performance_map_classes: list or None
398
+ :param cloud_global_id: global id of pair
388
399
  :return: Rasterized cloud color and statistics.
389
400
  """
390
401
 
@@ -432,6 +443,7 @@ def rasterize(
432
443
  sigma,
433
444
  radius,
434
445
  list_computed_layers,
446
+ cloud_global_id=cloud_global_id,
435
447
  )
436
448
 
437
449
  # reshape data as a 2d grid.
@@ -154,6 +154,7 @@ def find_indexes_in_point_cloud(
154
154
  return indexes
155
155
 
156
156
 
157
+ # pylint: disable=too-many-positional-arguments
157
158
  def create_raster_dataset( # noqa: C901
158
159
  raster: np.ndarray,
159
160
  weights_sum: np.ndarray,
@@ -548,7 +549,7 @@ def update_weights(old_weights, weights):
548
549
  return new_weights
549
550
 
550
551
 
551
- def update_data(
552
+ def update_data( # pylint: disable=too-many-positional-arguments
552
553
  old_data, current_data, weights, old_weights, nodata, method="basic"
553
554
  ):
554
555
  """