cars 1.0.0a1__cp39-cp39-win_amd64.whl → 1.0.0a2__cp39-cp39-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cars might be problematic. Click here for more details.

Files changed (50) hide show
  1. cars/__init__.py +6 -6
  2. cars/applications/dem_generation/dem_generation_wrappers.py +5 -1
  3. cars/applications/dem_generation/dichotomic_generation_app.py +21 -6
  4. cars/applications/dem_generation/rasterization_app.py +70 -27
  5. cars/applications/dense_match_filling/abstract_dense_match_filling_app.py +4 -0
  6. cars/applications/dense_match_filling/cpp/dense_match_filling_cpp.cp39-win_amd64.dll.a +0 -0
  7. cars/applications/dense_match_filling/cpp/dense_match_filling_cpp.cp39-win_amd64.pyd +0 -0
  8. cars/applications/dense_match_filling/fill_disp_algo.py +41 -12
  9. cars/applications/dense_match_filling/plane_app.py +11 -0
  10. cars/applications/dense_match_filling/zero_padding_app.py +11 -1
  11. cars/applications/dense_matching/census_mccnn_sgm_app.py +262 -545
  12. cars/applications/dense_matching/cpp/dense_matching_cpp.cp39-win_amd64.dll.a +0 -0
  13. cars/applications/dense_matching/cpp/dense_matching_cpp.cp39-win_amd64.pyd +0 -0
  14. cars/applications/dense_matching/dense_matching_algo.py +59 -11
  15. cars/applications/dense_matching/dense_matching_wrappers.py +51 -31
  16. cars/applications/dense_matching/disparity_grid_algo.py +572 -0
  17. cars/applications/grid_generation/grid_correction_app.py +0 -53
  18. cars/applications/grid_generation/transform_grid.py +5 -5
  19. cars/applications/point_cloud_fusion/pc_fusion_algo.py +17 -11
  20. cars/applications/point_cloud_fusion/pc_fusion_wrappers.py +3 -4
  21. cars/applications/rasterization/rasterization_algo.py +20 -27
  22. cars/applications/rasterization/rasterization_wrappers.py +6 -5
  23. cars/applications/rasterization/simple_gaussian_app.py +2 -14
  24. cars/applications/sparse_matching/sparse_matching_wrappers.py +0 -49
  25. cars/applications/triangulation/line_of_sight_intersection_app.py +1 -1
  26. cars/applications/triangulation/triangulation_wrappers.py +2 -1
  27. cars/bundleadjustment.py +51 -11
  28. cars/cars.py +15 -5
  29. cars/core/constants.py +1 -1
  30. cars/core/geometry/abstract_geometry.py +54 -11
  31. cars/core/geometry/shareloc_geometry.py +59 -14
  32. cars/orchestrator/registry/saver_registry.py +0 -78
  33. cars/pipelines/default/default_pipeline.py +23 -26
  34. cars/pipelines/parameters/depth_map_inputs.py +22 -67
  35. cars/pipelines/parameters/dsm_inputs.py +16 -29
  36. cars/pipelines/parameters/sensor_inputs.py +20 -21
  37. cars/pipelines/parameters/sensor_loaders/basic_sensor_loader.py +3 -3
  38. cars/pipelines/parameters/sensor_loaders/pivot_sensor_loader.py +2 -2
  39. cars/pipelines/parameters/sensor_loaders/sensor_loader.py +4 -6
  40. cars/pipelines/parameters/sensor_loaders/sensor_loader_template.py +2 -2
  41. cars/pipelines/pipeline.py +8 -8
  42. cars/pipelines/unit/unit_pipeline.py +103 -196
  43. cars/starter.py +20 -1
  44. cars-1.0.0a2.dist-info/DELVEWHEEL +2 -0
  45. {cars-1.0.0a1.dist-info → cars-1.0.0a2.dist-info}/METADATA +3 -2
  46. {cars-1.0.0a1.dist-info → cars-1.0.0a2.dist-info}/RECORD +49 -48
  47. cars-1.0.0a1.dist-info/DELVEWHEEL +0 -2
  48. {cars-1.0.0a1.dist-info → cars-1.0.0a2.dist-info}/WHEEL +0 -0
  49. {cars-1.0.0a1.dist-info → cars-1.0.0a2.dist-info}/entry_points.txt +0 -0
  50. cars.libs/{.load-order-cars-1.0.0a1 → .load-order-cars-1.0.0a2} +1 -1
@@ -468,9 +468,12 @@ def create_combined_dense_cloud( # noqa: C901
468
468
  if cst.POINT_CLOUD_PERFORMANCE_MAP_ROOT in array_name:
469
469
  arrays_to_add_to_point_cloud.append((array_name, array_name))
470
470
 
471
- # add confidence layers
471
+ # add ambiguity layer, drop confidence_* layers
472
472
  for array_name in point_cloud:
473
- if cst.EPI_CONFIDENCE_KEY_ROOT in array_name:
473
+ if (
474
+ cst.EPI_AMBIGUITY in array_name
475
+ and cst.EPI_CONFIDENCE_KEY_ROOT not in array_name
476
+ ):
474
477
  arrays_to_add_to_point_cloud.append((array_name, array_name))
475
478
 
476
479
  # add denoising info layers
@@ -852,12 +855,15 @@ def generate_pc_wrapper( # noqa: C901
852
855
  if cst.EPI_TEXTURE not in coords:
853
856
  coords[cst.BAND_IM] = descriptions
854
857
 
855
- elif key == cst.EPI_CONFIDENCE_KEY_ROOT:
856
- for sub_key in cloud[key].keys():
857
- data = pc_wrap.read_image_full(
858
- cloud[key][sub_key], window=window, squeeze=True
859
- )
860
- values[sub_key] = ([cst.ROW, cst.COL], data)
858
+ elif key == cst.EPI_AMBIGUITY:
859
+ data = pc_wrap.read_image_full(
860
+ cloud[key], window=window, squeeze=True
861
+ )
862
+ descriptions = list(inputs.get_descriptions_bands(cloud[key]))
863
+ values[cst.EPI_AMBIGUITY] = (
864
+ [cst.ROW, cst.COL],
865
+ data,
866
+ )
861
867
 
862
868
  elif key == cst.EPI_FILLING:
863
869
  data = pc_wrap.read_image_full(
@@ -1098,9 +1104,9 @@ def compute_x_y_min_max_wrapper(items, epsg, window, saving_info=None):
1098
1104
  data_dict[cst.POINT_CLOUD_FILLING_KEY_ROOT] = items[
1099
1105
  cst.POINT_CLOUD_FILLING_KEY_ROOT
1100
1106
  ]
1101
- if cst.POINT_CLOUD_CONFIDENCE_KEY_ROOT in items:
1102
- data_dict[cst.POINT_CLOUD_CONFIDENCE_KEY_ROOT] = items[
1103
- cst.POINT_CLOUD_CONFIDENCE_KEY_ROOT
1107
+ if cst.POINT_CLOUD_AMBIGUITY_KEY_ROOT in items:
1108
+ data_dict[cst.POINT_CLOUD_AMBIGUITY_KEY_ROOT] = items[
1109
+ cst.POINT_CLOUD_AMBIGUITY_KEY_ROOT
1104
1110
  ]
1105
1111
  if cst.POINT_CLOUD_PERFORMANCE_MAP_ROOT in items:
1106
1112
  data_dict[cst.POINT_CLOUD_PERFORMANCE_MAP_ROOT] = items[
@@ -180,10 +180,9 @@ def create_point_cloud_index(cloud_sample):
180
180
  band_index = "{}_{}".format(cst.POINT_CLOUD_FILLING_KEY_ROOT, band)
181
181
  cloud_indexes_with_types[band_index] = "uint8"
182
182
 
183
- # Add confidence indexes
184
- for key in cloud_sample:
185
- if cst.EPI_CONFIDENCE_KEY_ROOT in key:
186
- cloud_indexes_with_types[key] = "float32"
183
+ # Add ambiguity information index
184
+ if cst.EPI_AMBIGUITY in cloud_sample:
185
+ cloud_indexes_with_types[cst.EPI_AMBIGUITY] = "float32"
187
186
 
188
187
  return cloud_indexes_with_types
189
188
 
@@ -174,7 +174,7 @@ def compute_vector_raster_and_stats(
174
174
  points = cloud.loc[:, [cst.X, cst.Y]].values.T
175
175
  nb_points = points.shape[1]
176
176
  valid = np.ones((1, nb_points))
177
- # create values: 1. altitudes and colors, 2. confidences, 3. masks
177
+ # create values: 1. altitudes and colors, 2. ambiguity, 3. masks
178
178
  # split_indexes allows to keep indexes separating values
179
179
  split_indexes = []
180
180
 
@@ -187,21 +187,16 @@ def compute_vector_raster_and_stats(
187
187
  values_bands.extend(clr_indexes)
188
188
  split_indexes.append(len(values_bands))
189
189
 
190
- # 2. confidences
191
- if list_computed_layers is not None:
192
- if cst.POINT_CLOUD_CONFIDENCE_KEY_ROOT not in list_computed_layers:
193
- confidences_indexes = rast_wrap.find_indexes_in_point_cloud(
194
- cloud, cst.POINT_CLOUD_AMBIGUITY_KEY_ROOT, list_computed_layers
195
- )
196
- else:
197
- confidences_indexes = rast_wrap.find_indexes_in_point_cloud(
198
- cloud, cst.POINT_CLOUD_CONFIDENCE_KEY_ROOT, list_computed_layers
199
- )
200
- else:
201
- confidences_indexes = []
190
+ # 2. ambiguity
191
+ ambiguity_indexes = rast_wrap.find_indexes_in_point_cloud(
192
+ cloud, cst.POINT_CLOUD_AMBIGUITY_KEY_ROOT, list_computed_layers
193
+ )
194
+
195
+ values_bands.extend(ambiguity_indexes)
196
+ split_indexes.append(len(ambiguity_indexes))
202
197
 
203
- values_bands.extend(confidences_indexes)
204
- split_indexes.append(len(confidences_indexes))
198
+ # sanity check
199
+ assert len(ambiguity_indexes) <= 1
205
200
 
206
201
  # 3. sup and inf layers interval
207
202
  layer_inf_sup_indexes = rast_wrap.find_indexes_in_point_cloud(
@@ -288,7 +283,7 @@ def compute_vector_raster_and_stats(
288
283
  # pylint: disable=unbalanced-tuple-unpacking
289
284
  (
290
285
  out,
291
- confidences,
286
+ ambiguity,
292
287
  interval,
293
288
  msk,
294
289
  classif,
@@ -297,11 +292,9 @@ def compute_vector_raster_and_stats(
297
292
  performance_map,
298
293
  ) = np.split(out, np.cumsum(split_indexes), axis=-1)
299
294
 
300
- confidences_out = None
301
- if len(confidences_indexes) > 0:
302
- confidences_out = {}
303
- for k, key in enumerate(confidences_indexes):
304
- confidences_out[key] = confidences[..., k]
295
+ ambiguity_out = None
296
+ if len(ambiguity_indexes) > 0:
297
+ ambiguity_out = ambiguity
305
298
 
306
299
  layers_inf_sup_out = None
307
300
  layers_inf_sup_stat_index = None
@@ -341,7 +334,7 @@ def compute_vector_raster_and_stats(
341
334
  clr_indexes,
342
335
  classif_out,
343
336
  classif_indexes,
344
- confidences_out,
337
+ ambiguity_out,
345
338
  layers_inf_sup_out,
346
339
  layers_inf_sup_stat_index,
347
340
  layer_inf_sup_indexes,
@@ -420,7 +413,7 @@ def rasterize(
420
413
  clr_indexes,
421
414
  classif,
422
415
  classif_indexes,
423
- confidences,
416
+ ambiguity,
424
417
  layer_inf_sup,
425
418
  layer_inf_sup_stats_indexes,
426
419
  layer_inf_sup_indexes,
@@ -463,9 +456,9 @@ def rasterize(
463
456
  else:
464
457
  msk = np.isnan(out[0, :, :])
465
458
 
466
- if confidences is not None:
467
- for key, value in confidences.items():
468
- confidences[key] = value.reshape(shape_out)
459
+ if ambiguity is not None:
460
+ ambiguity = ambiguity.reshape(shape_out + (-1,))
461
+ ambiguity = np.moveaxis(ambiguity, 2, 0)
469
462
 
470
463
  if layer_inf_sup is not None:
471
464
  layer_inf_sup = layer_inf_sup.reshape(shape_out + (-1,))
@@ -512,7 +505,7 @@ def rasterize(
512
505
  clr_indexes,
513
506
  classif,
514
507
  classif_indexes,
515
- confidences,
508
+ ambiguity,
516
509
  layer_inf_sup,
517
510
  layer_inf_sup_stats_indexes,
518
511
  layer_inf_sup_indexes,
@@ -174,7 +174,7 @@ def create_raster_dataset( # noqa: C901
174
174
  band_im: List[str] = None,
175
175
  classif: np.ndarray = None,
176
176
  band_classif: List[str] = None,
177
- confidences: np.ndarray = None,
177
+ ambiguity: np.ndarray = None,
178
178
  layers_inf_sup: np.ndarray = None,
179
179
  layers_inf_sup_stat_index: List[int] = None,
180
180
  layer_inf_sup_indexes: List[str] = None,
@@ -207,7 +207,7 @@ def create_raster_dataset( # noqa: C901
207
207
  :param n_in_cell: number of points which contribute to a cell
208
208
  :param msk: raster msk
209
209
  :param classif: raster classif
210
- :param confidences: raster containing the confidences
210
+ :param ambiguity: raster containing the ambiguity
211
211
  :param layers_inf_sup: raster containing intervals inf and sup
212
212
  :param layers_inf_sup_stat_index: list containing index of
213
213
  intervals in mean and stdev rasters
@@ -310,9 +310,10 @@ def create_raster_dataset( # noqa: C901
310
310
  # update raster output with classification data
311
311
  raster_out = xr.merge((raster_out, classif_out))
312
312
 
313
- if confidences is not None: # rasterizer produced color output
314
- for key in confidences:
315
- raster_out[key] = xr.DataArray(confidences[key], dims=raster_dims)
313
+ if ambiguity is not None: # rasterizer produced color output
314
+ raster_out[cst.RASTER_AMBIGUITY] = xr.DataArray(
315
+ ambiguity[0], dims=raster_dims
316
+ )
316
317
 
317
318
  if layers_inf_sup is not None:
318
319
  # Get inf data
@@ -644,7 +644,7 @@ class SimpleGaussian(
644
644
  )
645
645
  elif save_intermediate_data:
646
646
  # File is not part of the official product, write it in dump_dir
647
- out_source_pc = os.path.join(out_dump_dir, "source_pc.tif")
647
+ out_source_pc = os.path.join(out_dump_dir, "contributing_pair.tif")
648
648
  if out_source_pc:
649
649
  list_computed_layers += ["source_pc"]
650
650
  self.orchestrator.add_to_save_lists(
@@ -653,7 +653,7 @@ class SimpleGaussian(
653
653
  terrain_raster,
654
654
  dtype=np.uint8,
655
655
  nodata=self.msk_no_data,
656
- cars_ds_name="source_pc",
656
+ cars_ds_name="contributing_pair",
657
657
  optional_data=True,
658
658
  )
659
659
 
@@ -680,18 +680,6 @@ class SimpleGaussian(
680
680
 
681
681
  # TODO Check that intervals indeed exist!
682
682
  if save_intermediate_data:
683
- out_confidence = os.path.join(out_dump_dir, "confidence.tif")
684
- list_computed_layers += ["confidence"]
685
- self.orchestrator.add_to_save_lists(
686
- out_confidence,
687
- cst.RASTER_CONFIDENCE,
688
- terrain_raster,
689
- dtype=np.float32,
690
- nodata=self.msk_no_data,
691
- cars_ds_name="confidence",
692
- optional_data=True,
693
- )
694
-
695
683
  list_computed_layers += [cst.POINT_CLOUD_LAYER_SUP_OR_INF_ROOT]
696
684
  out_dsm_inf_file_name = os.path.join(out_dump_dir, "dsm_inf.tif")
697
685
  self.orchestrator.add_to_save_lists(
@@ -170,55 +170,6 @@ def compute_disp_min_disp_max(
170
170
  return dmin, dmax
171
171
 
172
172
 
173
- @cars_profile(name="Clustering matches")
174
- def clustering_matches(
175
- triangulated_matches,
176
- connection_val=3.0,
177
- nb_pts_threshold=80,
178
- clusters_distance_threshold: float = None,
179
- filtered_elt_pos: bool = False,
180
- ):
181
- """
182
- Filter triangulated matches
183
-
184
- :param pd_cloud: triangulated_matches
185
- :type pd_cloud: pandas Dataframe
186
- :param connection_val: distance to use
187
- to consider that two points are connected
188
- :param nb_pts_threshold: number of points to use
189
- to identify small clusters to filter
190
- :param clusters_distance_threshold: distance to use
191
- to consider if two points clusters are far from each other or not
192
- (set to None to deactivate this level of filtering)
193
- :param filtered_elt_pos: if filtered_elt_pos is set to True,
194
- the removed points positions in their original
195
- epipolar images are returned, otherwise it is set to None
196
-
197
- :return: filtered_matches
198
- :rtype: pandas Dataframe
199
-
200
- """
201
-
202
- filtered_pandora_matches, _ = (
203
- outlier_removal_algo.small_component_filtering(
204
- triangulated_matches,
205
- connection_val=connection_val,
206
- nb_pts_threshold=nb_pts_threshold,
207
- clusters_distance_threshold=clusters_distance_threshold,
208
- filtered_elt_pos=filtered_elt_pos,
209
- )
210
- )
211
-
212
- filtered_pandora_matches_dataframe = pandas.DataFrame(
213
- filtered_pandora_matches
214
- )
215
- filtered_pandora_matches_dataframe.attrs["epsg"] = (
216
- triangulated_matches.attrs["epsg"]
217
- )
218
-
219
- return filtered_pandora_matches_dataframe
220
-
221
-
222
173
  @cars_profile(name="filter_point_cloud_matches")
223
174
  def filter_point_cloud_matches(
224
175
  pd_cloud,
@@ -1044,7 +1044,7 @@ def triangulation_wrapper(
1044
1044
  ambiguity_map = None
1045
1045
  perf_ambiguity_threshold = None
1046
1046
  if use_ambiguity:
1047
- ambiguity_map = disp_ref["confidence_from_ambiguity.cars_1"]
1047
+ ambiguity_map = disp_ref["ambiguity"]
1048
1048
  perf_ambiguity_threshold = performance_maps_parameters[
1049
1049
  "perf_ambiguity_threshold"
1050
1050
  ]
@@ -246,7 +246,8 @@ def compute_performance_map(
246
246
  )
247
247
 
248
248
  if ambiguity_map is not None:
249
- ambiguity_map = 1 - ambiguity_map.values
249
+ # ambiguity is already ambiguity, not confidence from ambiguity
250
+ ambiguity_map = ambiguity_map.values
250
251
  mask_ambi = ambiguity_map > perf_ambiguity_threshold
251
252
  w_ambi = ambiguity_map / perf_ambiguity_threshold
252
253
  w_ambi[mask_ambi] = 1
cars/bundleadjustment.py CHANGED
@@ -15,6 +15,7 @@ import geopandas as gpd
15
15
  import numpy as np
16
16
  import pandas as pd
17
17
  import rasterio as rio
18
+ import yaml
18
19
 
19
20
  try:
20
21
  from rpcfit import rpc_fit
@@ -34,6 +35,7 @@ from shareloc.geomodels.geomodel import GeoModel
34
35
  from shareloc.geomodels.los import LOS
35
36
  from shareloc.proj_utils import coordinates_conversion
36
37
 
38
+ from cars.pipelines.parameters import sensor_inputs
37
39
  from cars.pipelines.pipeline import Pipeline
38
40
 
39
41
 
@@ -550,14 +552,26 @@ def new_rpcs_from_matches(
550
552
  return None
551
553
 
552
554
 
553
- def cars_bundle_adjustment(conf, no_run_sparse):
555
+ def cars_bundle_adjustment(conf, no_run_sparse, output_format="yaml"):
554
556
  """
555
557
  cars-bundleadjustement main:
556
558
  - Launch CARS to compute homologous points (run sparse matching)
557
559
  - Compute new RPCs
558
560
  """
559
- with open(conf, encoding="utf-8") as reader:
560
- conf_as_dict = json.load(reader)
561
+ _, ext = os.path.splitext(conf)
562
+ ext = ext.lower()
563
+
564
+ if ext == ".json":
565
+ with open(conf, encoding="utf-8") as reader:
566
+ conf_as_dict = json.load(reader)
567
+ elif ext in [".yaml", ".yml"]:
568
+ with open(conf, encoding="utf-8") as reader:
569
+ conf_as_dict = yaml.safe_load(reader)
570
+ else:
571
+ raise ValueError(
572
+ f"Unsupported configuration file format: {ext}. "
573
+ "Please use .json, .yaml, or .yml"
574
+ )
561
575
 
562
576
  conf_dirname = os.path.dirname(conf)
563
577
  out_dir = os.path.abspath(
@@ -576,6 +590,7 @@ def cars_bundle_adjustment(conf, no_run_sparse):
576
590
  ]
577
591
  sparse_matching_config["output"]["directory"] = sparse_matching
578
592
  sparse_matching_config["output"]["product_level"] = []
593
+ sparse_matching_config["advanced"] = {}
579
594
  sparse_matching_config["advanced"]["epipolar_resolutions"] = [1]
580
595
  if "sparse_matching.sift" not in sparse_matching_config["applications"]:
581
596
  sparse_matching_config["applications"]["sparse_matching.sift"] = {}
@@ -595,6 +610,9 @@ def cars_bundle_adjustment(conf, no_run_sparse):
595
610
  sparse_matching_pipeline.run()
596
611
 
597
612
  # create new refined rpcs
613
+ conf_as_dict["inputs"] = sensor_inputs.sensors_check_inputs(
614
+ conf_as_dict["inputs"], config_dir=conf_dirname
615
+ )
598
616
  separate = bundle_adjustment_config.pop("separate")
599
617
  refined_rpcs = new_rpcs_from_matches(
600
618
  conf_as_dict["inputs"]["sensors"],
@@ -623,9 +641,15 @@ def cars_bundle_adjustment(conf, no_run_sparse):
623
641
  raw_config["inputs"]["pairing"] = pairing
624
642
  raw_config["output"]["directory"] = raw
625
643
 
626
- raw_cfg_file = raw_config["output"]["directory"] + ".json"
627
- with open(raw_cfg_file, "w", encoding="utf8") as json_writer:
628
- json.dump(raw_config, json_writer, indent=2)
644
+ # output config file
645
+ raw_cfg_file = raw_config["output"]["directory"] + (
646
+ ".yaml" if output_format == "yaml" else ".json"
647
+ )
648
+ with open(raw_cfg_file, "w", encoding="utf8") as writer:
649
+ if output_format == "yaml":
650
+ yaml.safe_dump(raw_config, writer, sort_keys=False)
651
+ else:
652
+ json.dump(raw_config, writer, indent=2)
629
653
 
630
654
  if refined_rpcs is not None:
631
655
  # create configuration file + launch cars dense matching
@@ -645,9 +669,14 @@ def cars_bundle_adjustment(conf, no_run_sparse):
645
669
  refined_config["inputs"]["pairing"] = pairing
646
670
  refined_config["output"]["directory"] = refined
647
671
 
648
- refined_cfg_file = refined_config["output"]["directory"] + ".json"
649
- with open(refined_cfg_file, "w", encoding="utf8") as json_writer:
650
- json.dump(refined_config, json_writer, indent=2)
672
+ refined_cfg_file = refined_config["output"]["directory"] + (
673
+ ".yaml" if output_format == "yaml" else ".json"
674
+ )
675
+ with open(refined_cfg_file, "w", encoding="utf8") as writer:
676
+ if output_format == "yaml":
677
+ yaml.safe_dump(refined_config, writer, sort_keys=False)
678
+ else:
679
+ json.dump(refined_config, writer, indent=2)
651
680
 
652
681
 
653
682
  def cli():
@@ -679,8 +708,8 @@ key and its associated value:
679
708
  ```
680
709
 
681
710
  - Parameters "pairing" and "separate" are mandatory.
682
- - Parameters "nb_decimals" (default value: 0) and "min_matches" \
683
- (default value: 100) are optional.
711
+ - Parameters "nb_decimals" (default value: 0), "min_matches" \
712
+ (default value: 100) and "output_format" (default value: yaml) are optional.
684
713
 
685
714
  ### Generation of homologous points calculated by pair
686
715
 
@@ -704,7 +733,18 @@ number of matches per zone required to calculate these statistics."""
704
733
  )
705
734
  parser.add_argument("conf", type=str, help="Configuration File")
706
735
  parser.add_argument("--no-run-sparse", action="store_true")
736
+ parser.add_argument(
737
+ "--output-format",
738
+ type=str,
739
+ default="json",
740
+ choices=["json", "yaml", "JSON", "YAML"],
741
+ help="Output format for generated configuration files "
742
+ "(json or yaml, case-insensitive). Default: json",
743
+ )
744
+
707
745
  args = parser.parse_args()
746
+ # normalize format to lowercase
747
+ args.output_format = args.output_format.lower()
708
748
  cars_bundle_adjustment(**vars(args))
709
749
 
710
750
 
cars/cars.py CHANGED
@@ -33,6 +33,8 @@ import os
33
33
  import sys
34
34
  import warnings
35
35
 
36
+ import yaml
37
+
36
38
  # CARS imports
37
39
  from cars import __version__
38
40
  from cars.core import cars_logging
@@ -87,9 +89,17 @@ def main_cli(args, dry_run=False): # noqa: C901
87
89
  from cars.pipelines.pipeline import Pipeline
88
90
 
89
91
  try:
90
- # Transform conf file to dict
91
- with open(args.conf, "r", encoding="utf8") as fstream:
92
- config = json.load(fstream)
92
+ # Check file extension and load configuration
93
+ config_path = args.conf
94
+ ext = os.path.splitext(config_path)[1].lower()
95
+ if ext == ".json":
96
+ with open(config_path, "r", encoding="utf8") as fstream:
97
+ config = json.load(fstream)
98
+ elif ext in [".yaml", ".yml"]:
99
+ with open(config_path, "r", encoding="utf8") as fstream:
100
+ config = yaml.safe_load(fstream)
101
+ else:
102
+ raise ValueError("Configuration file must be .json or .yaml/.yml")
93
103
 
94
104
  # Cars 0.9.0 API change, check if the configfile seems to use the old
95
105
  # API by looking for the deprecated out_dir key
@@ -109,7 +119,7 @@ def main_cli(args, dry_run=False): # noqa: C901
109
119
  config["output"]["directory"] = config["output"]["out_dir"]
110
120
  del config["output"]["out_dir"]
111
121
 
112
- config_json_dir = os.path.abspath(os.path.dirname(args.conf))
122
+ config_dir = os.path.abspath(os.path.dirname(config_path))
113
123
  pipeline_name = config.get("advanced", {}).get("pipeline", "default")
114
124
 
115
125
  # Logging configuration with args Loglevel
@@ -126,7 +136,7 @@ def main_cli(args, dry_run=False): # noqa: C901
126
136
 
127
137
  # Generate pipeline and check conf
128
138
  cars_logging.add_progress_message("Check configuration...")
129
- used_pipeline = Pipeline(pipeline_name, config, config_json_dir)
139
+ used_pipeline = Pipeline(pipeline_name, config, config_dir)
130
140
  cars_logging.add_progress_message("CARS pipeline is started.")
131
141
  if not dry_run:
132
142
  # run pipeline
cars/core/constants.py CHANGED
@@ -184,7 +184,7 @@ DSM_INF_MEAN = "dsm_inf_mean"
184
184
  DSM_INF_STD = "dsm_inf_std"
185
185
  DSM_SUP_MEAN = "dsm_sup_mean"
186
186
  DSM_SUP_STD = "dsm_sup_std"
187
- DSM_CONFIDENCE_AMBIGUITY = "ambiguity"
187
+ DSM_AMBIGUITY = "ambiguity"
188
188
  DSM_CONFIDENCE = "confidence"
189
189
  DSM_PERFORMANCE_MAP = "performance_map"
190
190
  DSM_SOURCE_PC = "source_pc"
@@ -30,7 +30,7 @@ from typing import Dict, List, Tuple, Union
30
30
  import numpy as np
31
31
  import rasterio as rio
32
32
  import xarray as xr
33
- from json_checker import And, Checker
33
+ from json_checker import And, Checker, Or
34
34
  from scipy import interpolate
35
35
  from scipy.interpolate import LinearNDInterpolator
36
36
  from shapely.geometry import Polygon
@@ -65,7 +65,9 @@ class AbstractGeometry(metaclass=ABCMeta):
65
65
  if isinstance(geometry_plugin_conf, str):
66
66
  geometry_plugin = geometry_plugin_conf
67
67
  elif isinstance(geometry_plugin_conf, dict):
68
- geometry_plugin = geometry_plugin_conf.get("plugin_name", None)
68
+ geometry_plugin = geometry_plugin_conf.get(
69
+ "plugin_name", "SharelocGeometry"
70
+ )
69
71
  else:
70
72
  raise RuntimeError("Not a supported type")
71
73
 
@@ -105,6 +107,7 @@ class AbstractGeometry(metaclass=ABCMeta):
105
107
 
106
108
  self.plugin_name = config["plugin_name"]
107
109
  self.interpolator = config["interpolator"]
110
+ self.dem_roi_margin = config["dem_roi_margin"]
108
111
 
109
112
  self.dem = dem
110
113
  self.dem_roi = None
@@ -153,12 +156,16 @@ class AbstractGeometry(metaclass=ABCMeta):
153
156
  conf = {"plugin_name": conf}
154
157
 
155
158
  # overload conf
156
- overloaded_conf["plugin_name"] = conf.get("plugin_name", None)
159
+ overloaded_conf["plugin_name"] = conf.get(
160
+ "plugin_name", "SharelocGeometry"
161
+ )
157
162
  overloaded_conf["interpolator"] = conf.get("interpolator", "cubic")
163
+ overloaded_conf["dem_roi_margin"] = conf.get("dem_roi_margin", 0.012)
158
164
 
159
165
  geometry_schema = {
160
166
  "plugin_name": str,
161
167
  "interpolator": And(str, lambda x: x in ["cubic", "linear"]),
168
+ "dem_roi_margin": Or(float, int),
162
169
  }
163
170
 
164
171
  # Check conf
@@ -372,7 +379,6 @@ class AbstractGeometry(metaclass=ABCMeta):
372
379
  array of size [number of points, 2]. The last index indicates
373
380
  the 'x' coordinate (last index set to 0) or the 'y' coordinate
374
381
  (last index set to 1).
375
- :param interpolator: interpolator to use
376
382
  :return: sensors positions as a numpy array of size
377
383
  [number of points, 2]. The last index indicates the 'x'
378
384
  coordinate (last index set to 0) or
@@ -387,6 +393,9 @@ class AbstractGeometry(metaclass=ABCMeta):
387
393
  f"Grid type {type(grid)} not a dict or RectificationGrid"
388
394
  )
389
395
 
396
+ # Ensure positions is a numpy array
397
+ positions = np.asarray(positions)
398
+
390
399
  # Get data
391
400
  with rio.open(grid["path"]) as grid_data:
392
401
  row_dep = grid_data.read(2)
@@ -403,17 +412,42 @@ class AbstractGeometry(metaclass=ABCMeta):
403
412
  cols = np.arange(ori_col, last_col, step_col)
404
413
  rows = np.arange(ori_row, last_row, step_row)
405
414
 
406
- # create regular grid points positions
407
- sensor_row_positions = row_dep
408
- sensor_col_positions = col_dep
415
+ # Determine margin based on interpolator type
416
+ margin = 6 if self.interpolator == "cubic" else 3
417
+
418
+ # Find the bounds of positions to determine crop region
419
+ min_col = np.nanmin(positions[:, 0])
420
+ max_col = np.nanmax(positions[:, 0])
421
+ min_row = np.nanmin(positions[:, 1])
422
+ max_row = np.nanmax(positions[:, 1])
423
+
424
+ # Convert position bounds to grid indices with margin
425
+ min_col_idx = max(0, int((min_col - ori_col) / step_col) - margin)
426
+ max_col_idx = min(
427
+ len(cols) - 1, int((max_col - ori_col) / step_col) + margin
428
+ )
429
+ min_row_idx = max(0, int((min_row - ori_row) / step_row) - margin)
430
+ max_row_idx = min(
431
+ len(rows) - 1, int((max_row - ori_row) / step_row) + margin
432
+ )
433
+
434
+ # Crop the grids and coordinate arrays
435
+ cols_cropped = cols[min_col_idx : max_col_idx + 1]
436
+ rows_cropped = rows[min_row_idx : max_row_idx + 1]
437
+ sensor_row_positions_cropped = row_dep[
438
+ min_row_idx : max_row_idx + 1, min_col_idx : max_col_idx + 1
439
+ ]
440
+ sensor_col_positions_cropped = col_dep[
441
+ min_row_idx : max_row_idx + 1, min_col_idx : max_col_idx + 1
442
+ ]
409
443
 
410
444
  # interpolate sensor positions
411
445
  interpolator = interpolate.RegularGridInterpolator(
412
- (cols, rows),
446
+ (cols_cropped, rows_cropped),
413
447
  np.stack(
414
448
  (
415
- sensor_row_positions.transpose(),
416
- sensor_col_positions.transpose(),
449
+ sensor_row_positions_cropped.transpose(),
450
+ sensor_col_positions_cropped.transpose(),
417
451
  ),
418
452
  axis=2,
419
453
  ),
@@ -607,7 +641,12 @@ class AbstractGeometry(metaclass=ABCMeta):
607
641
  """
608
642
 
609
643
  def image_envelope(
610
- self, sensor, geomodel, out_path=None, out_driver="ESRI Shapefile"
644
+ self,
645
+ sensor,
646
+ geomodel,
647
+ out_path=None,
648
+ out_driver="ESRI Shapefile",
649
+ elevation=None,
611
650
  ):
612
651
  """
613
652
  Export the image footprint to a vector file
@@ -629,24 +668,28 @@ class AbstractGeometry(metaclass=ABCMeta):
629
668
  geomodel,
630
669
  np.array(shift_x),
631
670
  np.array(shift_y),
671
+ elevation,
632
672
  )
633
673
  lat_upper_right, lon_upper_right, _ = self.direct_loc(
634
674
  sensor,
635
675
  geomodel,
636
676
  np.array(img_size_x + shift_x),
637
677
  np.array(shift_y),
678
+ elevation,
638
679
  )
639
680
  lat_bottom_left, lon_bottom_left, _ = self.direct_loc(
640
681
  sensor,
641
682
  geomodel,
642
683
  np.array(shift_x),
643
684
  np.array(img_size_y + shift_y),
685
+ elevation,
644
686
  )
645
687
  lat_bottom_right, lon_bottom_right, _ = self.direct_loc(
646
688
  sensor,
647
689
  geomodel,
648
690
  np.array(img_size_x + shift_x),
649
691
  np.array(img_size_y + shift_y),
692
+ elevation,
650
693
  )
651
694
 
652
695
  u_l = (lon_upper_left, lat_upper_left)