cars 1.0.0a1__cp313-cp313-win_amd64.whl → 1.0.0a3__cp313-cp313-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cars might be problematic. Click here for more details.

Files changed (81) hide show
  1. cars/__init__.py +4 -4
  2. cars/applications/application.py +14 -6
  3. cars/applications/application_template.py +22 -0
  4. cars/applications/auxiliary_filling/auxiliary_filling_from_sensors_app.py +15 -10
  5. cars/applications/auxiliary_filling/auxiliary_filling_wrappers.py +7 -6
  6. cars/applications/dem_generation/abstract_dem_generation_app.py +9 -5
  7. cars/applications/dem_generation/dem_generation_wrappers.py +48 -25
  8. cars/applications/dem_generation/dichotomic_generation_app.py +27 -9
  9. cars/applications/dem_generation/rasterization_app.py +85 -32
  10. cars/applications/dense_match_filling/abstract_dense_match_filling_app.py +4 -0
  11. cars/applications/dense_match_filling/cpp/dense_match_filling_cpp.cp313-win_amd64.dll.a +0 -0
  12. cars/applications/dense_match_filling/cpp/dense_match_filling_cpp.cp313-win_amd64.pyd +0 -0
  13. cars/applications/dense_match_filling/fill_disp_algo.py +41 -12
  14. cars/applications/dense_match_filling/plane_app.py +11 -0
  15. cars/applications/dense_match_filling/zero_padding_app.py +11 -1
  16. cars/applications/dense_matching/census_mccnn_sgm_app.py +254 -548
  17. cars/applications/dense_matching/cpp/dense_matching_cpp.cp313-win_amd64.dll.a +0 -0
  18. cars/applications/dense_matching/cpp/dense_matching_cpp.cp313-win_amd64.pyd +0 -0
  19. cars/applications/dense_matching/dense_matching_algo.py +59 -11
  20. cars/applications/dense_matching/dense_matching_wrappers.py +51 -31
  21. cars/applications/dense_matching/disparity_grid_algo.py +566 -0
  22. cars/applications/dense_matching/loaders/config_mapping.json +13 -0
  23. cars/applications/dense_matching/loaders/global_land_cover_map.tif +0 -0
  24. cars/applications/dense_matching/loaders/pandora_loader.py +78 -1
  25. cars/applications/dsm_filling/border_interpolation_app.py +10 -5
  26. cars/applications/dsm_filling/bulldozer_filling_app.py +14 -7
  27. cars/applications/dsm_filling/exogenous_filling_app.py +10 -5
  28. cars/applications/grid_generation/grid_correction_app.py +0 -53
  29. cars/applications/grid_generation/transform_grid.py +5 -5
  30. cars/applications/point_cloud_fusion/pc_fusion_algo.py +17 -11
  31. cars/applications/point_cloud_fusion/pc_fusion_wrappers.py +3 -4
  32. cars/applications/point_cloud_outlier_removal/abstract_outlier_removal_app.py +9 -5
  33. cars/applications/point_cloud_outlier_removal/small_components_app.py +5 -3
  34. cars/applications/point_cloud_outlier_removal/statistical_app.py +4 -2
  35. cars/applications/rasterization/abstract_pc_rasterization_app.py +1 -0
  36. cars/applications/rasterization/rasterization_algo.py +20 -27
  37. cars/applications/rasterization/rasterization_wrappers.py +6 -5
  38. cars/applications/rasterization/simple_gaussian_app.py +30 -17
  39. cars/applications/resampling/resampling_algo.py +44 -49
  40. cars/applications/sparse_matching/sift_app.py +2 -22
  41. cars/applications/sparse_matching/sparse_matching_wrappers.py +0 -49
  42. cars/applications/triangulation/line_of_sight_intersection_app.py +1 -1
  43. cars/applications/triangulation/triangulation_wrappers.py +2 -1
  44. cars/bundleadjustment.py +51 -11
  45. cars/cars.py +15 -5
  46. cars/core/constants.py +1 -1
  47. cars/core/geometry/abstract_geometry.py +166 -12
  48. cars/core/geometry/shareloc_geometry.py +61 -14
  49. cars/core/inputs.py +15 -0
  50. cars/core/projection.py +117 -0
  51. cars/data_structures/cars_dataset.py +7 -5
  52. cars/orchestrator/cluster/log_wrapper.py +1 -1
  53. cars/orchestrator/cluster/mp_cluster/multiprocessing_cluster.py +1 -1
  54. cars/orchestrator/orchestrator.py +1 -1
  55. cars/orchestrator/registry/saver_registry.py +0 -78
  56. cars/pipelines/default/default_pipeline.py +69 -52
  57. cars/pipelines/parameters/advanced_parameters.py +17 -0
  58. cars/pipelines/parameters/advanced_parameters_constants.py +4 -0
  59. cars/pipelines/parameters/depth_map_inputs.py +22 -67
  60. cars/pipelines/parameters/dsm_inputs.py +16 -29
  61. cars/pipelines/parameters/output_parameters.py +44 -8
  62. cars/pipelines/parameters/sensor_inputs.py +117 -24
  63. cars/pipelines/parameters/sensor_loaders/basic_sensor_loader.py +3 -3
  64. cars/pipelines/parameters/sensor_loaders/pivot_sensor_loader.py +2 -2
  65. cars/pipelines/parameters/sensor_loaders/sensor_loader.py +4 -6
  66. cars/pipelines/parameters/sensor_loaders/sensor_loader_template.py +2 -2
  67. cars/pipelines/pipeline.py +8 -8
  68. cars/pipelines/unit/unit_pipeline.py +276 -274
  69. cars/starter.py +20 -1
  70. cars-1.0.0a3.dist-info/DELVEWHEEL +2 -0
  71. {cars-1.0.0a1.dist-info → cars-1.0.0a3.dist-info}/METADATA +3 -2
  72. {cars-1.0.0a1.dist-info → cars-1.0.0a3.dist-info}/RECORD +77 -74
  73. cars.libs/libgcc_s_seh-1-ca70890bbc5723b6d0ea31e9c9cded2b.dll +0 -0
  74. cars.libs/libstdc++-6-00ee19f73d5122a1277c137b1c218401.dll +0 -0
  75. cars.libs/libwinpthread-1-f5042e8e3d21edce20c1bc99445f551b.dll +0 -0
  76. cars-1.0.0a1.dist-info/DELVEWHEEL +0 -2
  77. cars.libs/libgcc_s_seh-1-f2b6825d483bdf14050493af93b5997d.dll +0 -0
  78. cars.libs/libstdc++-6-6b0059df6bc601df5a0f18a5805eea05.dll +0 -0
  79. cars.libs/libwinpthread-1-e01b8e85fd67c2b861f64d4ccc7df607.dll +0 -0
  80. {cars-1.0.0a1.dist-info → cars-1.0.0a3.dist-info}/WHEEL +0 -0
  81. {cars-1.0.0a1.dist-info → cars-1.0.0a3.dist-info}/entry_points.txt +0 -0
@@ -31,6 +31,7 @@ import rasterio as rio
31
31
  import scipy
32
32
  import skimage
33
33
  from json_checker import Checker, Or
34
+ from pyproj import CRS
34
35
  from shapely import Polygon
35
36
 
36
37
  from cars.core import inputs, projection
@@ -74,7 +75,11 @@ class BorderInterpolation(DsmFilling, short_name="border_interpolation"):
74
75
  # Overload conf
75
76
  overloaded_conf["method"] = conf.get("method", "bulldozer")
76
77
  overloaded_conf["activated"] = conf.get("activated", False)
77
- overloaded_conf["classification"] = conf.get("classification", None)
78
+ overloaded_conf["classification"] = conf.get("classification", "nodata")
79
+ if isinstance(overloaded_conf["classification"], str):
80
+ overloaded_conf["classification"] = [
81
+ overloaded_conf["classification"]
82
+ ]
78
83
  overloaded_conf["component_min_size"] = conf.get(
79
84
  "component_min_size", 5
80
85
  )
@@ -151,8 +156,8 @@ class BorderInterpolation(DsmFilling, short_name="border_interpolation"):
151
156
  roi_polys_outepsg = []
152
157
  for poly in roi_polys:
153
158
  if isinstance(poly, Polygon):
154
- roi_poly_outepsg = projection.polygon_projection(
155
- poly, roi_epsg, dsm_crs.to_epsg()
159
+ roi_poly_outepsg = projection.polygon_projection_crs(
160
+ poly, CRS(roi_epsg), dsm_crs
156
161
  )
157
162
  roi_polys_outepsg.append(roi_poly_outepsg)
158
163
 
@@ -160,8 +165,8 @@ class BorderInterpolation(DsmFilling, short_name="border_interpolation"):
160
165
  roi_polys_outepsg, out_shape=roi_raster.shape, transform=dsm_tr
161
166
  )
162
167
  elif isinstance(roi_polys, Polygon):
163
- roi_poly_outepsg = projection.polygon_projection(
164
- roi_polys, roi_epsg, dsm_crs.to_epsg()
168
+ roi_poly_outepsg = projection.polygon_projection_crs(
169
+ roi_polys, CRS(roi_epsg), dsm_crs
165
170
  )
166
171
  roi_raster = rio.features.rasterize(
167
172
  [roi_poly_outepsg], out_shape=roi_raster.shape, transform=dsm_tr
@@ -32,6 +32,7 @@ import rasterio as rio
32
32
  import yaml
33
33
  from bulldozer.pipeline.bulldozer_pipeline import dsm_to_dtm
34
34
  from json_checker import Checker, Or
35
+ from pyproj import CRS
35
36
  from shapely import Polygon
36
37
 
37
38
  from cars.core import inputs, projection
@@ -72,7 +73,13 @@ class BulldozerFilling(DsmFilling, short_name="bulldozer"):
72
73
  # Overload conf
73
74
  overloaded_conf["method"] = conf.get("method", "bulldozer")
74
75
  overloaded_conf["activated"] = conf.get("activated", False)
75
- overloaded_conf["classification"] = conf.get("classification", None)
76
+ overloaded_conf["classification"] = conf.get("classification", "nodata")
77
+
78
+ if isinstance(overloaded_conf["classification"], str):
79
+ overloaded_conf["classification"] = [
80
+ overloaded_conf["classification"]
81
+ ]
82
+
76
83
  overloaded_conf["save_intermediate_data"] = conf.get(
77
84
  "save_intermediate_data", False
78
85
  )
@@ -162,8 +169,8 @@ class BulldozerFilling(DsmFilling, short_name="bulldozer"):
162
169
  roi_polys_outepsg = []
163
170
  for poly in roi_polys:
164
171
  if isinstance(poly, Polygon):
165
- roi_poly_outepsg = projection.polygon_projection(
166
- poly, roi_epsg, dsm_crs.to_epsg()
172
+ roi_poly_outepsg = projection.polygon_projection_crs(
173
+ poly, CRS(roi_epsg), dsm_crs
167
174
  )
168
175
  roi_polys_outepsg.append(roi_poly_outepsg)
169
176
 
@@ -171,8 +178,8 @@ class BulldozerFilling(DsmFilling, short_name="bulldozer"):
171
178
  roi_polys_outepsg, out_shape=roi_raster.shape, transform=dsm_tr
172
179
  )
173
180
  elif isinstance(roi_polys, Polygon):
174
- roi_poly_outepsg = projection.polygon_projection(
175
- roi_polys, roi_epsg, dsm_crs.to_epsg()
181
+ roi_poly_outepsg = projection.polygon_projection_crs(
182
+ roi_polys, CRS(roi_epsg), dsm_crs
176
183
  )
177
184
  roi_raster = rio.features.rasterize(
178
185
  [roi_poly_outepsg], out_shape=roi_raster.shape, transform=dsm_tr
@@ -211,7 +218,7 @@ class BulldozerFilling(DsmFilling, short_name="bulldozer"):
211
218
  with rio.open(old_dsm_path, "w", **dsm_meta) as out_dsm:
212
219
  out_dsm.write(dsm, 1)
213
220
 
214
- if classif_file is not None:
221
+ if classif_file is not None and os.path.exists(classif_file):
215
222
  classif_descriptions = inputs.get_descriptions_bands(classif_file)
216
223
  else:
217
224
  classif_descriptions = []
@@ -225,7 +232,7 @@ class BulldozerFilling(DsmFilling, short_name="bulldozer"):
225
232
  classif[classif_msk == 0] = 0
226
233
  filling_mask = np.logical_and(classif, roi_raster > 0)
227
234
  elif label == "nodata":
228
- if classif_file is not None:
235
+ if classif_file is not None and os.path.exists(classif_file):
229
236
  with rio.open(classif_file) as in_classif:
230
237
  classif_msk = in_classif.read_masks(1)
231
238
  classif = ~classif_msk
@@ -29,6 +29,7 @@ import shutil
29
29
  import numpy as np
30
30
  import rasterio as rio
31
31
  from json_checker import Checker, Or
32
+ from pyproj import CRS
32
33
  from rasterio.enums import Resampling
33
34
  from rasterio.warp import reproject
34
35
  from shapely import Polygon
@@ -73,7 +74,11 @@ class ExogenousFilling(DsmFilling, short_name="exogenous_filling"):
73
74
  # Overload conf
74
75
  overloaded_conf["method"] = conf.get("method", "bulldozer")
75
76
  overloaded_conf["activated"] = conf.get("activated", False)
76
- overloaded_conf["classification"] = conf.get("classification", None)
77
+ overloaded_conf["classification"] = conf.get("classification", "nodata")
78
+ if isinstance(overloaded_conf["classification"], str):
79
+ overloaded_conf["classification"] = [
80
+ overloaded_conf["classification"]
81
+ ]
77
82
  overloaded_conf["fill_with_geoid"] = conf.get("fill_with_geoid", None)
78
83
  overloaded_conf["interpolation_method"] = conf.get(
79
84
  "interpolation_method", "bilinear"
@@ -170,8 +175,8 @@ class ExogenousFilling(DsmFilling, short_name="exogenous_filling"):
170
175
  roi_polys_outepsg = []
171
176
  for poly in roi_polys:
172
177
  if isinstance(poly, Polygon):
173
- roi_poly_outepsg = projection.polygon_projection(
174
- poly, roi_epsg, dsm_crs.to_epsg()
178
+ roi_poly_outepsg = projection.polygon_projection_crs(
179
+ poly, CRS(roi_epsg), dsm_crs
175
180
  )
176
181
  roi_polys_outepsg.append(roi_poly_outepsg)
177
182
 
@@ -179,8 +184,8 @@ class ExogenousFilling(DsmFilling, short_name="exogenous_filling"):
179
184
  roi_polys_outepsg, out_shape=roi_raster.shape, transform=dsm_tr
180
185
  )
181
186
  elif isinstance(roi_polys, Polygon):
182
- roi_poly_outepsg = projection.polygon_projection(
183
- roi_polys, roi_epsg, dsm_crs.to_epsg()
187
+ roi_poly_outepsg = projection.polygon_projection_crs(
188
+ roi_polys, CRS(roi_epsg), dsm_crs
184
189
  )
185
190
  roi_raster = rio.features.rasterize(
186
191
  [roi_poly_outepsg], out_shape=roi_raster.shape, transform=dsm_tr
@@ -31,7 +31,6 @@ import os
31
31
 
32
32
  # Third party imports
33
33
  import numpy as np
34
- import pandas
35
34
  import rasterio as rio
36
35
  from scipy.interpolate import LinearNDInterpolator
37
36
  from scipy.spatial import Delaunay # pylint: disable=E0611
@@ -45,7 +44,6 @@ from cars.applications.grid_generation import (
45
44
  from cars.core.utils import safe_makedirs
46
45
 
47
46
  # CARS imports
48
- from cars.data_structures import cars_dataset
49
47
  from cars.orchestrator.cluster.log_wrapper import cars_profile
50
48
 
51
49
 
@@ -171,7 +169,6 @@ def correct_grid(grid, grid_correction, pair_folder, save_grid=None):
171
169
  def estimate_right_grid_correction(
172
170
  matches,
173
171
  grid_right,
174
- initial_cars_ds=None,
175
172
  save_matches=False,
176
173
  minimum_nb_matches=100,
177
174
  pair_folder="",
@@ -480,13 +477,6 @@ def estimate_right_grid_correction(
480
477
  )
481
478
  np.save(matches_array_path, corrected_matches)
482
479
 
483
- # Create CarsDataset containing corrected matches, with same tiling as input
484
- corrected_matches_cars_ds = None
485
- if initial_cars_ds is not None:
486
- corrected_matches_cars_ds = create_matches_cars_ds(
487
- corrected_matches, initial_cars_ds
488
- )
489
-
490
480
  # Update orchestrator out_json
491
481
  corrected_matches_infos = {
492
482
  application_constants.APPLICATION_TAG: {
@@ -498,49 +488,6 @@ def estimate_right_grid_correction(
498
488
  return (
499
489
  grid_correction,
500
490
  corrected_matches,
501
- corrected_matches_cars_ds,
502
491
  in_stats,
503
492
  out_stats,
504
493
  )
505
-
506
-
507
- def create_matches_cars_ds(corrected_matches, initial_cars_ds):
508
- """
509
- Create CarsDataset representing matches, from numpy matches.
510
- Matches are split into tiles, stored in pandas DataFrames
511
-
512
- Right CarsDataset is filled with Nones
513
-
514
- :param corrected_matches: matches
515
- :type corrected_matches: numpy array
516
- :param initial_cars_ds: cars dataset to use tiling from
517
- :type initial_cars_ds: CarsDataset
518
-
519
- :return new_matches_cars_ds
520
- :rtype: CarsDataset
521
- """
522
-
523
- # initialize CarsDataset
524
- new_matches_cars_ds = cars_dataset.CarsDataset("points")
525
- new_matches_cars_ds.create_empty_copy(initial_cars_ds)
526
- new_matches_cars_ds.attributes = initial_cars_ds.attributes
527
-
528
- for row in range(new_matches_cars_ds.shape[0]):
529
- for col in range(new_matches_cars_ds.shape[1]):
530
- [
531
- row_min,
532
- row_max,
533
- col_min,
534
- col_max,
535
- ] = new_matches_cars_ds.tiling_grid[row, col, :]
536
-
537
- # Get corresponding matches
538
- tile_matches = corrected_matches[corrected_matches[:, 1] > row_min]
539
- tile_matches = tile_matches[tile_matches[:, 1] < row_max]
540
- tile_matches = tile_matches[tile_matches[:, 0] > col_min]
541
- tile_matches = tile_matches[tile_matches[:, 0] < col_max]
542
-
543
- # Create pandas DataFrame
544
- new_matches_cars_ds[row, col] = pandas.DataFrame(tile_matches)
545
-
546
- return new_matches_cars_ds
@@ -41,9 +41,9 @@ def transform_grid_func(grid, resolution, right=False):
41
41
  for key, value in grid.items():
42
42
  if right:
43
43
  if key not in ("grid_origin", "grid_spacing"):
44
- divide(key, value, grid, resolution)
44
+ scale(key, value, grid, resolution)
45
45
  else:
46
- divide(key, value, grid, resolution)
46
+ scale(key, value, grid, resolution)
47
47
 
48
48
  # we need to charge the data to override it
49
49
  with rasterio.open(grid["path"]) as src:
@@ -59,9 +59,9 @@ def transform_grid_func(grid, resolution, right=False):
59
59
  return grid
60
60
 
61
61
 
62
- def divide(key, value, grid, resolution):
62
+ def scale(key, value, grid, resolution):
63
63
  """
64
- Divide attributs by the resolution
64
+ Scale attributes by the resolution
65
65
  """
66
66
 
67
67
  if key == "grid_origin":
@@ -71,7 +71,7 @@ def divide(key, value, grid, resolution):
71
71
  for i, _ in enumerate(value):
72
72
  grid[key][i] = np.floor(value[i] / resolution)
73
73
  elif key == "disp_to_alt_ratio":
74
- grid[key] = value / resolution
74
+ grid[key] = value * resolution
75
75
  elif key == "epipolar_size_x":
76
76
  grid[key] = np.floor(value / resolution)
77
77
  elif key == "epipolar_size_y":
@@ -468,9 +468,12 @@ def create_combined_dense_cloud( # noqa: C901
468
468
  if cst.POINT_CLOUD_PERFORMANCE_MAP_ROOT in array_name:
469
469
  arrays_to_add_to_point_cloud.append((array_name, array_name))
470
470
 
471
- # add confidence layers
471
+ # add ambiguity layer, drop confidence_* layers
472
472
  for array_name in point_cloud:
473
- if cst.EPI_CONFIDENCE_KEY_ROOT in array_name:
473
+ if (
474
+ cst.EPI_AMBIGUITY in array_name
475
+ and cst.EPI_CONFIDENCE_KEY_ROOT not in array_name
476
+ ):
474
477
  arrays_to_add_to_point_cloud.append((array_name, array_name))
475
478
 
476
479
  # add denoising info layers
@@ -852,12 +855,15 @@ def generate_pc_wrapper( # noqa: C901
852
855
  if cst.EPI_TEXTURE not in coords:
853
856
  coords[cst.BAND_IM] = descriptions
854
857
 
855
- elif key == cst.EPI_CONFIDENCE_KEY_ROOT:
856
- for sub_key in cloud[key].keys():
857
- data = pc_wrap.read_image_full(
858
- cloud[key][sub_key], window=window, squeeze=True
859
- )
860
- values[sub_key] = ([cst.ROW, cst.COL], data)
858
+ elif key == cst.EPI_AMBIGUITY:
859
+ data = pc_wrap.read_image_full(
860
+ cloud[key], window=window, squeeze=True
861
+ )
862
+ descriptions = list(inputs.get_descriptions_bands(cloud[key]))
863
+ values[cst.EPI_AMBIGUITY] = (
864
+ [cst.ROW, cst.COL],
865
+ data,
866
+ )
861
867
 
862
868
  elif key == cst.EPI_FILLING:
863
869
  data = pc_wrap.read_image_full(
@@ -1098,9 +1104,9 @@ def compute_x_y_min_max_wrapper(items, epsg, window, saving_info=None):
1098
1104
  data_dict[cst.POINT_CLOUD_FILLING_KEY_ROOT] = items[
1099
1105
  cst.POINT_CLOUD_FILLING_KEY_ROOT
1100
1106
  ]
1101
- if cst.POINT_CLOUD_CONFIDENCE_KEY_ROOT in items:
1102
- data_dict[cst.POINT_CLOUD_CONFIDENCE_KEY_ROOT] = items[
1103
- cst.POINT_CLOUD_CONFIDENCE_KEY_ROOT
1107
+ if cst.POINT_CLOUD_AMBIGUITY_KEY_ROOT in items:
1108
+ data_dict[cst.POINT_CLOUD_AMBIGUITY_KEY_ROOT] = items[
1109
+ cst.POINT_CLOUD_AMBIGUITY_KEY_ROOT
1104
1110
  ]
1105
1111
  if cst.POINT_CLOUD_PERFORMANCE_MAP_ROOT in items:
1106
1112
  data_dict[cst.POINT_CLOUD_PERFORMANCE_MAP_ROOT] = items[
@@ -180,10 +180,9 @@ def create_point_cloud_index(cloud_sample):
180
180
  band_index = "{}_{}".format(cst.POINT_CLOUD_FILLING_KEY_ROOT, band)
181
181
  cloud_indexes_with_types[band_index] = "uint8"
182
182
 
183
- # Add confidence indexes
184
- for key in cloud_sample:
185
- if cst.EPI_CONFIDENCE_KEY_ROOT in key:
186
- cloud_indexes_with_types[key] = "float32"
183
+ # Add ambiguity information index
184
+ if cst.EPI_AMBIGUITY in cloud_sample:
185
+ cloud_indexes_with_types[cst.EPI_AMBIGUITY] = "float32"
187
186
 
188
187
  return cloud_indexes_with_types
189
188
 
@@ -31,7 +31,7 @@ import numpy as np
31
31
 
32
32
  from cars.applications import application_constants
33
33
  from cars.applications.application import Application
34
- from cars.applications.application_template import ApplicationTemplate
34
+ from cars.applications.application_template import ScalingApplicationTemplate
35
35
  from cars.applications.point_cloud_outlier_removal import (
36
36
  outlier_removal_constants as pr_cst,
37
37
  )
@@ -41,7 +41,7 @@ from cars.data_structures import cars_dataset
41
41
 
42
42
 
43
43
  @Application.register("point_cloud_outlier_removal")
44
- class PointCloudOutlierRemoval(ApplicationTemplate, metaclass=ABCMeta):
44
+ class PointCloudOutlierRemoval(ScalingApplicationTemplate, metaclass=ABCMeta):
45
45
  """
46
46
  PointCloudOutlierRemoval
47
47
  """
@@ -49,12 +49,14 @@ class PointCloudOutlierRemoval(ApplicationTemplate, metaclass=ABCMeta):
49
49
  available_applications: Dict = {}
50
50
  default_application = "statistical"
51
51
 
52
- def __new__(cls, conf=None): # pylint: disable=W0613
52
+ def __new__(cls, scaling_coeff, conf=None): # pylint: disable=W0613
53
53
  """
54
54
  Return the required application
55
55
  :raises:
56
56
  - KeyError when the required application is not registered
57
57
 
58
+ :param scaling_coeff: scaling factor for resolution
59
+ :type scaling_coeff: float
58
60
  :param conf: configuration for points removal
59
61
  :return: a application_to_use object
60
62
  """
@@ -95,15 +97,17 @@ class PointCloudOutlierRemoval(ApplicationTemplate, metaclass=ABCMeta):
95
97
  cls.orchestrator = None
96
98
  cls.available_applications[short_name] = cls
97
99
 
98
- def __init__(self, conf=None):
100
+ def __init__(self, scaling_coeff, conf=None):
99
101
  """
100
102
  Init function of PointCloudOutlierRemoval
101
103
 
104
+ :param scaling_coeff: scaling factor for resolution
105
+ :type scaling_coeff: float
102
106
  :param conf: configuration
103
107
  :return: an application_to_use object
104
108
  """
105
109
 
106
- super().__init__(conf=conf)
110
+ super().__init__(scaling_coeff, conf=conf)
107
111
 
108
112
  @abstractmethod
109
113
  def get_on_ground_margin(self, resolution=0.5):
@@ -64,15 +64,17 @@ class SmallComponents(
64
64
 
65
65
  # pylint: disable=too-many-instance-attributes
66
66
 
67
- def __init__(self, conf=None):
67
+ def __init__(self, scaling_coeff, conf=None):
68
68
  """
69
69
  Init function of SmallComponents
70
70
 
71
+ :param scaling_coeff: scaling factor for resolution
72
+ :type scaling_coeff: float
71
73
  :param conf: configuration for points outlier removal
72
74
  :return: an application_to_use object
73
75
  """
74
76
 
75
- super().__init__(conf=conf)
77
+ super().__init__(scaling_coeff, conf=conf)
76
78
 
77
79
  self.used_method = self.used_config["method"]
78
80
 
@@ -131,7 +133,7 @@ class SmallComponents(
131
133
  # pts_connection_dist:
132
134
  # distance to use to consider that two points are connected
133
135
  overloaded_conf["connection_distance"] = conf.get(
134
- "connection_distance", 3.0
136
+ "connection_distance", self.scaling_coeff * 3.0
135
137
  )
136
138
  # nb_pts_threshold:
137
139
  # points clusters that have less than this number of points
@@ -68,15 +68,17 @@ class Statistical(
68
68
 
69
69
  # pylint: disable=too-many-instance-attributes
70
70
 
71
- def __init__(self, conf=None):
71
+ def __init__(self, scaling_coeff, conf=None):
72
72
  """
73
73
  Init function of Statistical
74
74
 
75
+ :param scaling_coeff: scaling factor for resolution
76
+ :type scaling_coeff: float
75
77
  :param conf: configuration for points outlier removal
76
78
  :return: a application_to_use object
77
79
  """
78
80
 
79
- super().__init__(conf=conf)
81
+ super().__init__(scaling_coeff, conf=conf)
80
82
 
81
83
  self.used_method = self.used_config["method"]
82
84
 
@@ -131,6 +131,7 @@ class PointCloudRasterization(ApplicationTemplate, metaclass=ABCMeta):
131
131
  self,
132
132
  point_clouds,
133
133
  epsg,
134
+ output_crs,
134
135
  resolution,
135
136
  orchestrator=None,
136
137
  dsm_file_name=None,
@@ -174,7 +174,7 @@ def compute_vector_raster_and_stats(
174
174
  points = cloud.loc[:, [cst.X, cst.Y]].values.T
175
175
  nb_points = points.shape[1]
176
176
  valid = np.ones((1, nb_points))
177
- # create values: 1. altitudes and colors, 2. confidences, 3. masks
177
+ # create values: 1. altitudes and colors, 2. ambiguity, 3. masks
178
178
  # split_indexes allows to keep indexes separating values
179
179
  split_indexes = []
180
180
 
@@ -187,21 +187,16 @@ def compute_vector_raster_and_stats(
187
187
  values_bands.extend(clr_indexes)
188
188
  split_indexes.append(len(values_bands))
189
189
 
190
- # 2. confidences
191
- if list_computed_layers is not None:
192
- if cst.POINT_CLOUD_CONFIDENCE_KEY_ROOT not in list_computed_layers:
193
- confidences_indexes = rast_wrap.find_indexes_in_point_cloud(
194
- cloud, cst.POINT_CLOUD_AMBIGUITY_KEY_ROOT, list_computed_layers
195
- )
196
- else:
197
- confidences_indexes = rast_wrap.find_indexes_in_point_cloud(
198
- cloud, cst.POINT_CLOUD_CONFIDENCE_KEY_ROOT, list_computed_layers
199
- )
200
- else:
201
- confidences_indexes = []
190
+ # 2. ambiguity
191
+ ambiguity_indexes = rast_wrap.find_indexes_in_point_cloud(
192
+ cloud, cst.POINT_CLOUD_AMBIGUITY_KEY_ROOT, list_computed_layers
193
+ )
194
+
195
+ values_bands.extend(ambiguity_indexes)
196
+ split_indexes.append(len(ambiguity_indexes))
202
197
 
203
- values_bands.extend(confidences_indexes)
204
- split_indexes.append(len(confidences_indexes))
198
+ # sanity check
199
+ assert len(ambiguity_indexes) <= 1
205
200
 
206
201
  # 3. sup and inf layers interval
207
202
  layer_inf_sup_indexes = rast_wrap.find_indexes_in_point_cloud(
@@ -288,7 +283,7 @@ def compute_vector_raster_and_stats(
288
283
  # pylint: disable=unbalanced-tuple-unpacking
289
284
  (
290
285
  out,
291
- confidences,
286
+ ambiguity,
292
287
  interval,
293
288
  msk,
294
289
  classif,
@@ -297,11 +292,9 @@ def compute_vector_raster_and_stats(
297
292
  performance_map,
298
293
  ) = np.split(out, np.cumsum(split_indexes), axis=-1)
299
294
 
300
- confidences_out = None
301
- if len(confidences_indexes) > 0:
302
- confidences_out = {}
303
- for k, key in enumerate(confidences_indexes):
304
- confidences_out[key] = confidences[..., k]
295
+ ambiguity_out = None
296
+ if len(ambiguity_indexes) > 0:
297
+ ambiguity_out = ambiguity
305
298
 
306
299
  layers_inf_sup_out = None
307
300
  layers_inf_sup_stat_index = None
@@ -341,7 +334,7 @@ def compute_vector_raster_and_stats(
341
334
  clr_indexes,
342
335
  classif_out,
343
336
  classif_indexes,
344
- confidences_out,
337
+ ambiguity_out,
345
338
  layers_inf_sup_out,
346
339
  layers_inf_sup_stat_index,
347
340
  layer_inf_sup_indexes,
@@ -420,7 +413,7 @@ def rasterize(
420
413
  clr_indexes,
421
414
  classif,
422
415
  classif_indexes,
423
- confidences,
416
+ ambiguity,
424
417
  layer_inf_sup,
425
418
  layer_inf_sup_stats_indexes,
426
419
  layer_inf_sup_indexes,
@@ -463,9 +456,9 @@ def rasterize(
463
456
  else:
464
457
  msk = np.isnan(out[0, :, :])
465
458
 
466
- if confidences is not None:
467
- for key, value in confidences.items():
468
- confidences[key] = value.reshape(shape_out)
459
+ if ambiguity is not None:
460
+ ambiguity = ambiguity.reshape(shape_out + (-1,))
461
+ ambiguity = np.moveaxis(ambiguity, 2, 0)
469
462
 
470
463
  if layer_inf_sup is not None:
471
464
  layer_inf_sup = layer_inf_sup.reshape(shape_out + (-1,))
@@ -512,7 +505,7 @@ def rasterize(
512
505
  clr_indexes,
513
506
  classif,
514
507
  classif_indexes,
515
- confidences,
508
+ ambiguity,
516
509
  layer_inf_sup,
517
510
  layer_inf_sup_stats_indexes,
518
511
  layer_inf_sup_indexes,
@@ -174,7 +174,7 @@ def create_raster_dataset( # noqa: C901
174
174
  band_im: List[str] = None,
175
175
  classif: np.ndarray = None,
176
176
  band_classif: List[str] = None,
177
- confidences: np.ndarray = None,
177
+ ambiguity: np.ndarray = None,
178
178
  layers_inf_sup: np.ndarray = None,
179
179
  layers_inf_sup_stat_index: List[int] = None,
180
180
  layer_inf_sup_indexes: List[str] = None,
@@ -207,7 +207,7 @@ def create_raster_dataset( # noqa: C901
207
207
  :param n_in_cell: number of points which contribute to a cell
208
208
  :param msk: raster msk
209
209
  :param classif: raster classif
210
- :param confidences: raster containing the confidences
210
+ :param ambiguity: raster containing the ambiguity
211
211
  :param layers_inf_sup: raster containing intervals inf and sup
212
212
  :param layers_inf_sup_stat_index: list containing index of
213
213
  intervals in mean and stdev rasters
@@ -310,9 +310,10 @@ def create_raster_dataset( # noqa: C901
310
310
  # update raster output with classification data
311
311
  raster_out = xr.merge((raster_out, classif_out))
312
312
 
313
- if confidences is not None: # rasterizer produced color output
314
- for key in confidences:
315
- raster_out[key] = xr.DataArray(confidences[key], dims=raster_dims)
313
+ if ambiguity is not None: # rasterizer produced color output
314
+ raster_out[cst.RASTER_AMBIGUITY] = xr.DataArray(
315
+ ambiguity[0], dims=raster_dims
316
+ )
316
317
 
317
318
  if layers_inf_sup is not None:
318
319
  # Get inf data