cars 1.0.0a3__cp311-cp311-win_amd64.whl → 1.0.0a4__cp311-cp311-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cars might be problematic. Click here for more details.

Files changed (139) hide show
  1. cars/__init__.py +3 -3
  2. cars/applications/__init__.py +0 -3
  3. cars/applications/application_template.py +20 -0
  4. cars/applications/auxiliary_filling/abstract_auxiliary_filling_app.py +12 -2
  5. cars/applications/auxiliary_filling/auxiliary_filling_algo.py +2 -2
  6. cars/applications/auxiliary_filling/auxiliary_filling_from_sensors_app.py +80 -36
  7. cars/applications/dem_generation/dem_generation_algo.py +1 -1
  8. cars/applications/dem_generation/dem_generation_wrappers.py +23 -57
  9. cars/applications/dem_generation/dichotomic_generation_app.py +3 -3
  10. cars/applications/dem_generation/rasterization_app.py +100 -41
  11. cars/applications/dense_match_filling/__init__.py +1 -1
  12. cars/applications/dense_match_filling/abstract_dense_match_filling_app.py +2 -15
  13. cars/applications/dense_match_filling/fill_disp_algo.py +32 -373
  14. cars/applications/dense_match_filling/fill_disp_wrappers.py +0 -343
  15. cars/applications/dense_match_filling/zero_padding_app.py +10 -5
  16. cars/applications/dense_matching/abstract_dense_matching_app.py +2 -1
  17. cars/applications/dense_matching/census_mccnn_sgm_app.py +38 -39
  18. cars/applications/dense_matching/cpp/dense_matching_cpp.cp311-win_amd64.dll.a +0 -0
  19. cars/applications/dense_matching/cpp/dense_matching_cpp.cp311-win_amd64.pyd +0 -0
  20. cars/applications/dense_matching/dense_matching_algo.py +48 -14
  21. cars/applications/dense_matching/dense_matching_wrappers.py +11 -3
  22. cars/applications/dense_matching/disparity_grid_algo.py +84 -62
  23. cars/applications/dense_matching/loaders/pandora_loader.py +91 -33
  24. cars/applications/dsm_filling/border_interpolation_app.py +1 -7
  25. cars/applications/dsm_filling/bulldozer_filling_app.py +2 -8
  26. cars/applications/dsm_filling/exogenous_filling_app.py +4 -9
  27. cars/applications/grid_generation/abstract_grid_generation_app.py +1 -1
  28. cars/applications/grid_generation/epipolar_grid_generation_app.py +4 -2
  29. cars/applications/grid_generation/grid_correction_app.py +4 -1
  30. cars/applications/grid_generation/grid_generation_algo.py +7 -2
  31. cars/applications/ground_truth_reprojection/abstract_ground_truth_reprojection_app.py +1 -1
  32. cars/applications/ground_truth_reprojection/direct_localization_app.py +2 -2
  33. cars/applications/ground_truth_reprojection/ground_truth_reprojection_algo.py +2 -1
  34. cars/applications/point_cloud_fusion/abstract_pc_fusion_app.py +0 -155
  35. cars/applications/point_cloud_fusion/mapping_to_terrain_tiles_app.py +0 -658
  36. cars/applications/point_cloud_fusion/pc_fusion_algo.py +0 -1339
  37. cars/applications/point_cloud_fusion/pc_fusion_wrappers.py +0 -869
  38. cars/applications/point_cloud_outlier_removal/abstract_outlier_removal_app.py +2 -1
  39. cars/applications/point_cloud_outlier_removal/outlier_removal_algo.py +9 -8
  40. cars/applications/point_cloud_outlier_removal/small_components_app.py +96 -267
  41. cars/applications/point_cloud_outlier_removal/statistical_app.py +116 -275
  42. cars/applications/rasterization/abstract_pc_rasterization_app.py +1 -1
  43. cars/applications/rasterization/rasterization_algo.py +18 -6
  44. cars/applications/rasterization/rasterization_wrappers.py +2 -1
  45. cars/applications/rasterization/simple_gaussian_app.py +60 -113
  46. cars/applications/resampling/abstract_resampling_app.py +1 -1
  47. cars/applications/resampling/bicubic_resampling_app.py +3 -1
  48. cars/applications/resampling/resampling_algo.py +16 -4
  49. cars/applications/resampling/resampling_wrappers.py +3 -1
  50. cars/applications/sparse_matching/abstract_sparse_matching_app.py +1 -1
  51. cars/applications/sparse_matching/sift_app.py +3 -3
  52. cars/applications/sparse_matching/sparse_matching_algo.py +3 -2
  53. cars/applications/sparse_matching/sparse_matching_wrappers.py +1 -1
  54. cars/applications/triangulation/abstract_triangulation_app.py +1 -1
  55. cars/applications/triangulation/line_of_sight_intersection_app.py +13 -11
  56. cars/applications/triangulation/pc_transform.py +552 -0
  57. cars/applications/triangulation/triangulation_algo.py +6 -4
  58. cars/applications/triangulation/triangulation_wrappers.py +1 -0
  59. cars/bundleadjustment.py +6 -6
  60. cars/cars.py +11 -9
  61. cars/core/cars_logging.py +80 -49
  62. cars/core/constants.py +0 -1
  63. cars/core/datasets.py +5 -2
  64. cars/core/geometry/abstract_geometry.py +256 -25
  65. cars/core/geometry/shareloc_geometry.py +110 -82
  66. cars/core/inputs.py +57 -19
  67. cars/core/outputs.py +1 -1
  68. cars/core/preprocessing.py +17 -3
  69. cars/core/projection.py +9 -6
  70. cars/core/tiling.py +10 -3
  71. cars/data_structures/cars_dataset.py +5 -5
  72. cars/data_structures/corresponding_tiles_tools.py +0 -103
  73. cars/data_structures/format_transformation.py +4 -1
  74. cars/devibrate.py +6 -3
  75. cars/extractroi.py +20 -21
  76. cars/orchestrator/cluster/abstract_cluster.py +15 -5
  77. cars/orchestrator/cluster/abstract_dask_cluster.py +6 -2
  78. cars/orchestrator/cluster/dask_jobqueue_utils.py +1 -1
  79. cars/orchestrator/cluster/log_wrapper.py +148 -21
  80. cars/orchestrator/cluster/mp_cluster/multiprocessing_cluster.py +11 -3
  81. cars/orchestrator/cluster/mp_cluster/multiprocessing_profiler.py +2 -2
  82. cars/orchestrator/cluster/pbs_dask_cluster.py +1 -1
  83. cars/orchestrator/cluster/sequential_cluster.py +5 -4
  84. cars/orchestrator/cluster/slurm_dask_cluster.py +1 -1
  85. cars/orchestrator/orchestrator.py +14 -3
  86. cars/orchestrator/registry/id_generator.py +1 -0
  87. cars/orchestrator/registry/saver_registry.py +2 -2
  88. cars/pipelines/conf_resolution/conf_final_resolution.json +5 -3
  89. cars/pipelines/default/default_pipeline.py +462 -1073
  90. cars/pipelines/parameters/advanced_parameters.py +74 -64
  91. cars/pipelines/parameters/advanced_parameters_constants.py +2 -5
  92. cars/pipelines/parameters/application_parameters.py +71 -0
  93. cars/pipelines/parameters/depth_map_inputs.py +0 -314
  94. cars/pipelines/parameters/dsm_inputs.py +40 -4
  95. cars/pipelines/parameters/output_parameters.py +2 -2
  96. cars/pipelines/parameters/sensor_inputs.py +30 -75
  97. cars/pipelines/parameters/sensor_inputs_constants.py +0 -2
  98. cars/pipelines/parameters/sensor_loaders/__init__.py +4 -3
  99. cars/pipelines/parameters/sensor_loaders/basic_classif_loader.py +106 -0
  100. cars/pipelines/parameters/sensor_loaders/{basic_sensor_loader.py → basic_image_loader.py} +16 -22
  101. cars/pipelines/parameters/sensor_loaders/pivot_classif_loader.py +121 -0
  102. cars/pipelines/parameters/sensor_loaders/{pivot_sensor_loader.py → pivot_image_loader.py} +10 -21
  103. cars/pipelines/parameters/sensor_loaders/sensor_loader.py +4 -6
  104. cars/pipelines/parameters/sensor_loaders/sensor_loader_template.py +1 -3
  105. cars/pipelines/pipeline_template.py +1 -3
  106. cars/pipelines/unit/unit_pipeline.py +527 -1016
  107. cars/starter.py +4 -3
  108. cars-1.0.0a4.dist-info/DELVEWHEEL +2 -0
  109. {cars-1.0.0a3.dist-info → cars-1.0.0a4.dist-info}/METADATA +135 -53
  110. {cars-1.0.0a3.dist-info → cars-1.0.0a4.dist-info}/RECORD +115 -131
  111. cars.libs/libgcc_s_seh-1-b2494fcbd4d80cf2c98fdd5261f6d850.dll +0 -0
  112. cars.libs/libstdc++-6-e9b0d12ae0e9555bbae55e8dfd08c3f7.dll +0 -0
  113. cars.libs/libwinpthread-1-7882d1b093714ccdfaf4e0789a817792.dll +0 -0
  114. cars/applications/dense_match_filling/cpp/__init__.py +0 -0
  115. cars/applications/dense_match_filling/cpp/dense_match_filling_cpp.cp311-win_amd64.dll.a +0 -0
  116. cars/applications/dense_match_filling/cpp/dense_match_filling_cpp.cp311-win_amd64.pyd +0 -0
  117. cars/applications/dense_match_filling/cpp/dense_match_filling_cpp.py +0 -72
  118. cars/applications/dense_match_filling/cpp/includes/dense_match_filling.hpp +0 -46
  119. cars/applications/dense_match_filling/cpp/meson.build +0 -9
  120. cars/applications/dense_match_filling/cpp/src/bindings.cpp +0 -11
  121. cars/applications/dense_match_filling/cpp/src/dense_match_filling.cpp +0 -142
  122. cars/applications/dense_match_filling/plane_app.py +0 -556
  123. cars/applications/hole_detection/__init__.py +0 -30
  124. cars/applications/hole_detection/abstract_hole_detection_app.py +0 -125
  125. cars/applications/hole_detection/cloud_to_bbox_app.py +0 -346
  126. cars/applications/hole_detection/hole_detection_algo.py +0 -144
  127. cars/applications/hole_detection/hole_detection_wrappers.py +0 -53
  128. cars/applications/point_cloud_denoising/__init__.py +0 -29
  129. cars/applications/point_cloud_denoising/abstract_pc_denoising_app.py +0 -273
  130. cars/applications/point_cloud_fusion/__init__.py +0 -30
  131. cars/applications/point_cloud_fusion/cloud_fusion_constants.py +0 -39
  132. cars/applications/sparse_matching/pandora_sparse_matching_app.py +0 -0
  133. cars/pipelines/parameters/depth_map_inputs_constants.py +0 -25
  134. cars-1.0.0a3.dist-info/DELVEWHEEL +0 -2
  135. cars.libs/libgcc_s_seh-1-ca70890bbc5723b6d0ea31e9c9cded2b.dll +0 -0
  136. cars.libs/libstdc++-6-00ee19f73d5122a1277c137b1c218401.dll +0 -0
  137. cars.libs/libwinpthread-1-f5042e8e3d21edce20c1bc99445f551b.dll +0 -0
  138. {cars-1.0.0a3.dist-info → cars-1.0.0a4.dist-info}/WHEEL +0 -0
  139. {cars-1.0.0a3.dist-info → cars-1.0.0a4.dist-info}/entry_points.txt +0 -0
@@ -27,9 +27,7 @@ import copy
27
27
 
28
28
  # Standard imports
29
29
  import logging
30
- import math
31
30
  import os
32
- import time
33
31
 
34
32
  import numpy as np
35
33
 
@@ -40,14 +38,13 @@ from pyproj import CRS
40
38
  # CARS imports
41
39
  import cars.orchestrator.orchestrator as ocht
42
40
  from cars.applications import application_constants
43
- from cars.applications.point_cloud_fusion import (
44
- pc_fusion_algo,
45
- pc_fusion_wrappers,
46
- )
47
41
  from cars.applications.point_cloud_outlier_removal import (
48
42
  abstract_outlier_removal_app as pc_removal,
49
43
  )
50
- from cars.applications.point_cloud_outlier_removal import outlier_removal_algo
44
+ from cars.applications.point_cloud_outlier_removal import (
45
+ outlier_removal_algo,
46
+ )
47
+ from cars.applications.triangulation import pc_transform
51
48
  from cars.applications.triangulation.triangulation_wrappers import (
52
49
  generate_point_cloud_file_names,
53
50
  )
@@ -83,7 +80,6 @@ class Statistical(
83
80
  self.used_method = self.used_config["method"]
84
81
 
85
82
  # statistical outliers
86
- self.activated = self.used_config["activated"]
87
83
  self.k = self.used_config["k"]
88
84
  self.filtering_constant = self.used_config["filtering_constant"]
89
85
  self.mean_factor = self.used_config["mean_factor"]
@@ -126,10 +122,6 @@ class Statistical(
126
122
  overloaded_conf["use_median"] = conf.get("use_median", True)
127
123
 
128
124
  # statistical outlier filtering
129
- overloaded_conf["activated"] = conf.get(
130
- "activated", True
131
- ) # if false, the following
132
- # parameters are unused
133
125
  # k: number of neighbors
134
126
  overloaded_conf["k"] = conf.get("k", 50)
135
127
  # filtering_constant: constant to apply in the distance threshold
@@ -154,7 +146,6 @@ class Statistical(
154
146
  point_cloud_outlier_removal_schema = {
155
147
  "method": str,
156
148
  "save_by_pair": bool,
157
- "activated": bool,
158
149
  "k": And(int, lambda x: x > 0),
159
150
  "filtering_constant": And(Or(float, int), lambda x: x >= 0),
160
151
  "mean_factor": And(Or(float, int), lambda x: x >= 0),
@@ -191,17 +182,12 @@ class Statistical(
191
182
 
192
183
  """
193
184
 
194
- if not self.activated:
195
- # if not activated, this tile size must not be taken into acount
196
- # during the min(*tile_sizes) operations
197
- tile_size = math.inf
198
- else:
199
- tot = 10000 * superposing_point_clouds / point_cloud_resolution
185
+ tot = 10000 * superposing_point_clouds / point_cloud_resolution
200
186
 
201
- import_ = 200 # MiB
202
- tile_size = int(
203
- np.sqrt(float(((max_ram_per_worker - import_) * 2**23)) / tot)
204
- )
187
+ import_ = 200 # MiB
188
+ tile_size = int(
189
+ np.sqrt(float(((max_ram_per_worker - import_) * 2**23)) / tot)
190
+ )
205
191
 
206
192
  logging.info(
207
193
  "Estimated optimal tile size for statistical "
@@ -228,11 +214,7 @@ class Statistical(
228
214
  :return: margin
229
215
  :rtype: int
230
216
  """
231
-
232
- margin = 0
233
-
234
- if self.activated:
235
- margin = self.half_epipolar_size
217
+ margin = self.half_epipolar_size
236
218
 
237
219
  return margin
238
220
 
@@ -247,7 +229,7 @@ class Statistical(
247
229
 
248
230
  return 0
249
231
 
250
- def run(
232
+ def run( # pylint: disable=too-many-positional-arguments
251
233
  self,
252
234
  merged_point_cloud,
253
235
  orchestrator=None,
@@ -300,9 +282,6 @@ class Statistical(
300
282
  :rtype : CarsDataset filled with xr.Dataset
301
283
  """
302
284
 
303
- if not self.activated:
304
- return merged_point_cloud
305
-
306
285
  # Default orchestrator
307
286
  if orchestrator is None:
308
287
  # Create default sequential orchestrator for current application
@@ -314,263 +293,103 @@ class Statistical(
314
293
  else:
315
294
  self.orchestrator = orchestrator
316
295
 
317
- if merged_point_cloud.dataset_type == "points":
318
- (
319
- filtered_point_cloud,
320
- laz_pc_file_name,
321
- csv_pc_file_name,
322
- saving_info,
323
- ) = self.__register_pc_dataset__(
324
- merged_point_cloud,
325
- point_cloud_dir,
326
- dump_dir,
327
- app_name="statistical",
328
- )
329
-
330
- logging.info(
331
- "Cloud filtering: Filtered points number: {}".format(
332
- filtered_point_cloud.shape[1]
333
- * filtered_point_cloud.shape[0]
334
- )
335
- )
296
+ if dump_dir is None:
297
+ dump_dir = self.generate_unknown_dump_dir(self.orchestrator)
336
298
 
337
- # Generate rasters
338
- for col in range(filtered_point_cloud.shape[1]):
339
- for row in range(filtered_point_cloud.shape[0]):
340
- # update saving infos for potential replacement
341
- full_saving_info = ocht.update_saving_infos(
342
- saving_info, row=row, col=col
343
- )
344
- if merged_point_cloud.tiles[row][col] is not None:
345
- # Delayed call to cloud filtering
346
- filtered_point_cloud[
347
- row, col
348
- ] = self.orchestrator.cluster.create_task(
349
- statistical_removal_wrapper
350
- )(
351
- merged_point_cloud[row, col],
352
- self.k,
353
- self.filtering_constant,
354
- self.mean_factor,
355
- self.std_dev_factor,
356
- self.use_median,
357
- save_by_pair=(self.save_by_pair),
358
- point_cloud_csv_file_name=csv_pc_file_name,
359
- point_cloud_laz_file_name=laz_pc_file_name,
360
- saving_info=full_saving_info,
361
- )
362
-
363
- elif merged_point_cloud.dataset_type == "arrays":
364
- prefix = os.path.basename(dump_dir)
365
- # Save as depth map
366
- filtered_point_cloud, saving_info_epipolar = (
367
- self.__register_epipolar_dataset__(
368
- merged_point_cloud,
369
- depth_map_dir,
370
- dump_dir,
371
- app_name="statistical",
372
- pair_key=prefix,
373
- )
299
+ if merged_point_cloud.dataset_type != "arrays":
300
+ raise RuntimeError(
301
+ "Only arrays is supported in statistical removal"
374
302
  )
375
303
 
376
- # Save as point cloud
377
- (
378
- flatten_filtered_point_cloud,
379
- laz_pc_dir_name,
380
- csv_pc_dir_name,
381
- saving_info_flatten,
382
- ) = self.__register_pc_dataset__(
304
+ prefix = os.path.basename(dump_dir)
305
+ # Save as depth map
306
+ filtered_point_cloud, saving_info_epipolar = (
307
+ self.__register_epipolar_dataset__(
383
308
  merged_point_cloud,
384
- point_cloud_dir,
309
+ depth_map_dir,
385
310
  dump_dir,
386
311
  app_name="statistical",
312
+ pair_key=prefix,
387
313
  )
314
+ )
388
315
 
389
- # initialize empty index file for point cloud product if official
390
- # product is requested
391
- pc_index = None
392
- if point_cloud_dir:
393
- pc_index = {}
316
+ # Save as point cloud
317
+ (
318
+ flatten_filtered_point_cloud,
319
+ laz_pc_dir_name,
320
+ csv_pc_dir_name,
321
+ saving_info_flatten,
322
+ ) = self.__register_pc_dataset__(
323
+ merged_point_cloud,
324
+ point_cloud_dir,
325
+ dump_dir,
326
+ app_name="statistical",
327
+ )
328
+
329
+ # initialize empty index file for point cloud product if official
330
+ # product is requested
331
+ pc_index = None
332
+ if point_cloud_dir:
333
+ pc_index = {}
394
334
 
395
- # Generate rasters
396
- for col in range(filtered_point_cloud.shape[1]):
397
- for row in range(filtered_point_cloud.shape[0]):
335
+ # Generate rasters
336
+ for col in range(filtered_point_cloud.shape[1]):
337
+ for row in range(filtered_point_cloud.shape[0]):
398
338
 
399
- # update saving infos for potential replacement
400
- full_saving_info_epipolar = ocht.update_saving_infos(
401
- saving_info_epipolar, row=row, col=col
339
+ # update saving infos for potential replacement
340
+ full_saving_info_epipolar = ocht.update_saving_infos(
341
+ saving_info_epipolar, row=row, col=col
342
+ )
343
+ full_saving_info_flatten = None
344
+ if saving_info_flatten is not None:
345
+ full_saving_info_flatten = ocht.update_saving_infos(
346
+ saving_info_flatten, row=row, col=col
402
347
  )
403
- full_saving_info_flatten = None
404
- if saving_info_flatten is not None:
405
- full_saving_info_flatten = ocht.update_saving_infos(
406
- saving_info_flatten, row=row, col=col
407
- )
408
348
 
409
- if merged_point_cloud[row][col] is not None:
410
- csv_pc_file_name, laz_pc_file_name = (
411
- generate_point_cloud_file_names(
412
- csv_pc_dir_name,
413
- laz_pc_dir_name,
414
- row,
415
- col,
416
- pc_index,
417
- pair_key=prefix,
418
- )
349
+ if merged_point_cloud[row][col] is not None:
350
+ csv_pc_file_name, laz_pc_file_name = (
351
+ generate_point_cloud_file_names(
352
+ csv_pc_dir_name,
353
+ laz_pc_dir_name,
354
+ row,
355
+ col,
356
+ pc_index,
357
+ pair_key=prefix,
419
358
  )
420
- window = merged_point_cloud.tiling_grid[row, col]
421
- overlap = filtered_point_cloud.overlaps[row, col]
422
- # Delayed call to cloud filtering
423
- (
424
- filtered_point_cloud[row, col],
425
- flatten_filtered_point_cloud[row, col],
426
- ) = self.orchestrator.cluster.create_task(
427
- epipolar_statistical_removal_wrapper, nout=2
428
- )(
429
- merged_point_cloud[row, col],
430
- self.k,
431
- self.filtering_constant,
432
- self.mean_factor,
433
- self.std_dev_factor,
434
- self.use_median,
435
- self.half_epipolar_size,
436
- window,
437
- overlap,
438
- epsg=epsg,
439
- point_cloud_csv_file_name=csv_pc_file_name,
440
- point_cloud_laz_file_name=laz_pc_file_name,
441
- saving_info_epipolar=full_saving_info_epipolar,
442
- saving_info_flatten=full_saving_info_flatten,
443
- )
444
-
445
- # update point cloud index
446
- if point_cloud_dir:
447
- self.orchestrator.update_index(pc_index)
359
+ )
360
+ window = merged_point_cloud.tiling_grid[row, col]
361
+ overlap = filtered_point_cloud.overlaps[row, col]
362
+ # Delayed call to cloud filtering
363
+ (
364
+ filtered_point_cloud[row, col],
365
+ flatten_filtered_point_cloud[row, col],
366
+ ) = self.orchestrator.cluster.create_task(
367
+ epipolar_statistical_removal_wrapper, nout=2
368
+ )(
369
+ merged_point_cloud[row, col],
370
+ self.k,
371
+ self.filtering_constant,
372
+ self.mean_factor,
373
+ self.std_dev_factor,
374
+ self.use_median,
375
+ self.half_epipolar_size,
376
+ window,
377
+ overlap,
378
+ epsg=epsg,
379
+ point_cloud_csv_file_name=csv_pc_file_name,
380
+ point_cloud_laz_file_name=laz_pc_file_name,
381
+ saving_info_epipolar=full_saving_info_epipolar,
382
+ saving_info_flatten=full_saving_info_flatten,
383
+ )
448
384
 
449
- else:
450
- logging.error(
451
- "PointCloudOutlierRemoval application doesn't support"
452
- "this input data "
453
- "format"
454
- )
385
+ # update point cloud index
386
+ if point_cloud_dir:
387
+ self.orchestrator.update_index(pc_index)
455
388
 
456
389
  return filtered_point_cloud
457
390
 
458
391
 
459
- def statistical_removal_wrapper(
460
- cloud,
461
- statistical_k,
462
- filtering_constant,
463
- mean_factor,
464
- std_dev_factor,
465
- use_median,
466
- save_by_pair: bool = False,
467
- point_cloud_csv_file_name=None,
468
- point_cloud_laz_file_name=None,
469
- saving_info=None,
470
- ):
471
- """
472
- Statistical outlier removal
473
-
474
- :param cloud: cloud to filter
475
- :type cloud: pandas DataFrame
476
- :param statistical_k: k
477
- :type statistical_k: int
478
- :param filtering_constant: constant applied to the threshold
479
- :type filtering_constant: float
480
- :param mean_factor: mean factor
481
- :type mean_factor: float
482
- :param std_dev_factor: std factor
483
- :type std_dev_factor: float
484
- :param use_median: use median and quartile instead of mean and std
485
- :type use median: bool
486
- :param save_by_pair: save point cloud as pair
487
- :type save_by_pair: bool
488
- :param point_cloud_csv_file_name: write point cloud as CSV in filename
489
- (if None, the point cloud is not written as csv)
490
- :type point_cloud_csv_file_name: str
491
- :param point_cloud_laz_file_name: write point cloud as laz in filename
492
- (if None, the point cloud is not written as laz)
493
- :type point_cloud_laz_file_name: str
494
- :param saving_info: saving infos
495
- :type saving_info: dict
496
-
497
- :return: filtered cloud
498
- :rtype: pandas DataFrame
499
-
500
- """
501
-
502
- # Copy input cloud
503
- new_cloud = cloud.copy()
504
- new_cloud.attrs = copy.deepcopy(cloud.attrs)
505
-
506
- # Get current epsg
507
- cloud_attributes = cars_dataset.get_attributes(new_cloud)
508
- cloud_epsg = cloud_attributes["epsg"]
509
- current_epsg = cloud_epsg
510
-
511
- # Check if can be used to filter
512
- spatial_ref = CRS.from_epsg(cloud_epsg)
513
- if spatial_ref.is_geographic:
514
- logging.debug(
515
- "The point cloud to filter is not in a cartographic system. "
516
- "The filter's default parameters might not be adapted "
517
- "to this referential. Convert the points "
518
- "cloud to ECEF to ensure a proper point_cloud."
519
- )
520
- # Convert to epsg = 4978
521
- cartographic_epsg = 4978
522
- projection.point_cloud_conversion_dataframe(
523
- new_cloud, current_epsg, cartographic_epsg
524
- )
525
- current_epsg = cartographic_epsg
526
-
527
- # Filter point cloud
528
- tic = time.process_time()
529
- (new_cloud, _) = outlier_removal_algo.statistical_outlier_filtering(
530
- new_cloud,
531
- statistical_k,
532
- filtering_constant,
533
- mean_factor,
534
- std_dev_factor,
535
- use_median,
536
- )
537
- toc = time.process_time()
538
- logging.debug(
539
- "Statistical cloud filtering done in {} seconds".format(toc - tic)
540
- )
541
-
542
- # Conversion to UTM
543
- projection.point_cloud_conversion_dataframe(
544
- new_cloud, cloud_epsg, current_epsg
545
- )
546
- # Update attributes
547
- cloud_attributes["epsg"] = current_epsg
548
-
549
- cars_dataset.fill_dataframe(
550
- new_cloud, saving_info=saving_info, attributes=cloud_attributes
551
- )
552
-
553
- # save point cloud in worker
554
- if point_cloud_csv_file_name:
555
- cars_dataset.run_save_points(
556
- new_cloud,
557
- point_cloud_csv_file_name,
558
- save_by_pair=save_by_pair,
559
- overwrite=True,
560
- point_cloud_format="csv",
561
- )
562
- if point_cloud_laz_file_name:
563
- cars_dataset.run_save_points(
564
- new_cloud,
565
- point_cloud_laz_file_name,
566
- save_by_pair=save_by_pair,
567
- overwrite=True,
568
- point_cloud_format="laz",
569
- )
570
-
571
- return new_cloud
572
-
573
-
392
+ # pylint: disable=too-many-positional-arguments
574
393
  def epipolar_statistical_removal_wrapper(
575
394
  epipolar_ds,
576
395
  statistical_k,
@@ -619,9 +438,29 @@ def epipolar_statistical_removal_wrapper(
619
438
  # Copy input cloud
620
439
  filtered_cloud = copy.copy(epipolar_ds)
621
440
 
441
+ # Get current epsg
442
+ cloud_epsg = filtered_cloud.attrs["epsg"]
443
+ current_epsg = cloud_epsg
444
+
445
+ # Check if can be used to filter
446
+ spatial_ref = CRS.from_epsg(cloud_epsg)
447
+ if spatial_ref.is_geographic:
448
+ logging.debug(
449
+ "The point cloud to filter is not in a cartographic system. "
450
+ "The filter's default parameters might not be adapted "
451
+ "to this referential. Please, convert the point "
452
+ "cloud to ECEF to ensure a proper point_cloud."
453
+ )
454
+ # Convert to epsg = 4978
455
+ cartographic_epsg = 4978
456
+
457
+ projection.point_cloud_conversion_dataset(
458
+ filtered_cloud, cartographic_epsg
459
+ )
460
+ current_epsg = cartographic_epsg
461
+
622
462
  outlier_removal_algo.epipolar_statistical_filtering(
623
463
  filtered_cloud,
624
- epsg,
625
464
  k=statistical_k,
626
465
  filtering_constant=filtering_constant,
627
466
  mean_factor=mean_factor,
@@ -645,9 +484,11 @@ def epipolar_statistical_removal_wrapper(
645
484
  if point_cloud_csv_file_name or point_cloud_laz_file_name:
646
485
  # Convert epipolar array into point cloud
647
486
  flatten_filtered_cloud, cloud_epsg = (
648
- pc_fusion_algo.create_combined_cloud([filtered_cloud], ["0"], epsg)
487
+ pc_transform.depth_map_dataset_to_dataframe(
488
+ filtered_cloud, current_epsg
489
+ )
649
490
  )
650
- # Convert to UTM
491
+ # Convert to wanted epsg
651
492
  if epsg is not None and cloud_epsg != epsg:
652
493
  projection.point_cloud_conversion_dataframe(
653
494
  flatten_filtered_cloud, cloud_epsg, epsg
@@ -655,7 +496,7 @@ def epipolar_statistical_removal_wrapper(
655
496
  cloud_epsg = epsg
656
497
 
657
498
  # Fill attributes for LAZ saving
658
- color_type = pc_fusion_wrappers.get_color_type([filtered_cloud])
499
+ color_type = pc_transform.get_color_type([filtered_cloud])
659
500
  attributes = {
660
501
  "epsg": cloud_epsg,
661
502
  "color_type": color_type,
@@ -127,7 +127,7 @@ class PointCloudRasterization(ApplicationTemplate, metaclass=ABCMeta):
127
127
  """
128
128
 
129
129
  @abstractmethod
130
- def run(
130
+ def run( # pylint: disable=too-many-positional-arguments
131
131
  self,
132
132
  point_clouds,
133
133
  epsg,
@@ -45,6 +45,7 @@ from cars.core import constants as cst
45
45
  from cars.data_structures import cars_dataset
46
46
 
47
47
 
48
+ # pylint: disable=too-many-positional-arguments
48
49
  def simple_rasterization_dataset_wrapper(
49
50
  cloud: pandas.DataFrame,
50
51
  resolution: float,
@@ -61,6 +62,7 @@ def simple_rasterization_dataset_wrapper(
61
62
  list_computed_layers: List[str] = None,
62
63
  source_pc_names: List[str] = None,
63
64
  performance_map_classes: List[float] = None,
65
+ cloud_global_id: int = None,
64
66
  ) -> xr.Dataset:
65
67
  """
66
68
  Wrapper of simple_rasterization
@@ -91,6 +93,8 @@ def simple_rasterization_dataset_wrapper(
91
93
  name of sensors pair or name of point cloud file
92
94
  :param performance_map_classes: list for step defining border of class
93
95
  :type performance_map_classes: list or None
96
+ :param cloud_global_id: global id of pair
97
+ :type cloud_global_id: int
94
98
  :return: Rasterized cloud
95
99
  """
96
100
 
@@ -129,11 +133,13 @@ def simple_rasterization_dataset_wrapper(
129
133
  list_computed_layers=list_computed_layers,
130
134
  source_pc_names=source_pc_names,
131
135
  performance_map_classes=performance_map_classes,
136
+ cloud_global_id=cloud_global_id,
132
137
  )
133
138
 
134
139
  return raster
135
140
 
136
141
 
142
+ # pylint: disable=too-many-positional-arguments
137
143
  def compute_vector_raster_and_stats(
138
144
  cloud: pandas.DataFrame,
139
145
  x_start: float,
@@ -144,6 +150,7 @@ def compute_vector_raster_and_stats(
144
150
  sigma: float,
145
151
  radius: int,
146
152
  list_computed_layers: List[str] = None,
153
+ cloud_global_id: int = None,
147
154
  ) -> Tuple[
148
155
  np.ndarray,
149
156
  np.ndarray,
@@ -168,6 +175,7 @@ def compute_vector_raster_and_stats(
168
175
  :param sigma: Sigma for gaussian interpolation. If None, set to resolution
169
176
  :param radius: Radius for hole filling.
170
177
  :param list_computed_layers: list of computed output data
178
+ :param cloud_global_id: global id of pair
171
179
  :return: a tuple with rasterization results and statistics.
172
180
  """
173
181
  # get points corresponding to (X, Y positions) + data_valid
@@ -225,8 +233,8 @@ def compute_vector_raster_and_stats(
225
233
  # Fill the dataframe with additional columns :
226
234
  # each column refers to a point cloud id
227
235
  number_of_pc = cars_dataset.get_attributes(cloud)["number_of_pc"]
228
- if cst.POINT_CLOUD_GLOBAL_ID in cloud.columns and (
229
- (list_computed_layers is None)
236
+ if (cloud_global_id is not None) and (
237
+ list_computed_layers is None
230
238
  or rast_wrap.substring_in_list(
231
239
  list_computed_layers, cst.POINT_CLOUD_SOURCE_KEY_ROOT
232
240
  )
@@ -234,9 +242,10 @@ def compute_vector_raster_and_stats(
234
242
  for pc_id in range(number_of_pc):
235
243
  # Create binary list that indicates from each point whether it comes
236
244
  # from point cloud number "pc_id"
237
- point_is_from_pc = list(
238
- map(int, cloud[cst.POINT_CLOUD_GLOBAL_ID] == pc_id)
239
- )
245
+ if pc_id == cloud_global_id:
246
+ point_is_from_pc = np.ones(cloud.shape[0], dtype=int)
247
+ else:
248
+ point_is_from_pc = np.zeros(cloud.shape[0], dtype=int)
240
249
  pc_key = "{}{}".format(cst.POINT_CLOUD_SOURCE_KEY_ROOT, pc_id)
241
250
  cloud[pc_key] = point_is_from_pc
242
251
 
@@ -346,7 +355,7 @@ def compute_vector_raster_and_stats(
346
355
  )
347
356
 
348
357
 
349
- def rasterize(
358
+ def rasterize( # pylint: disable=too-many-positional-arguments
350
359
  cloud: pandas.DataFrame,
351
360
  resolution: float,
352
361
  epsg: int,
@@ -362,6 +371,7 @@ def rasterize(
362
371
  list_computed_layers: List[str] = None,
363
372
  source_pc_names: List[str] = None,
364
373
  performance_map_classes: List[float] = None,
374
+ cloud_global_id: int = None,
365
375
  ) -> Union[xr.Dataset, None]:
366
376
  """
367
377
  Rasterize a point cloud with its color bands to a Dataset
@@ -385,6 +395,7 @@ def rasterize(
385
395
  :param source_pc_names: list of source pc names
386
396
  :param performance_map_classes: list for step defining border of class
387
397
  :type performance_map_classes: list or None
398
+ :param cloud_global_id: global id of pair
388
399
  :return: Rasterized cloud color and statistics.
389
400
  """
390
401
 
@@ -432,6 +443,7 @@ def rasterize(
432
443
  sigma,
433
444
  radius,
434
445
  list_computed_layers,
446
+ cloud_global_id=cloud_global_id,
435
447
  )
436
448
 
437
449
  # reshape data as a 2d grid.
@@ -154,6 +154,7 @@ def find_indexes_in_point_cloud(
154
154
  return indexes
155
155
 
156
156
 
157
+ # pylint: disable=too-many-positional-arguments
157
158
  def create_raster_dataset( # noqa: C901
158
159
  raster: np.ndarray,
159
160
  weights_sum: np.ndarray,
@@ -548,7 +549,7 @@ def update_weights(old_weights, weights):
548
549
  return new_weights
549
550
 
550
551
 
551
- def update_data(
552
+ def update_data( # pylint: disable=too-many-positional-arguments
552
553
  old_data, current_data, weights, old_weights, nodata, method="basic"
553
554
  ):
554
555
  """