cars 1.0.0rc1__cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cars might be problematic. Click here for more details.
- cars/__init__.py +74 -0
- cars/applications/__init__.py +37 -0
- cars/applications/application.py +117 -0
- cars/applications/application_constants.py +29 -0
- cars/applications/application_template.py +146 -0
- cars/applications/auxiliary_filling/__init__.py +29 -0
- cars/applications/auxiliary_filling/abstract_auxiliary_filling_app.py +104 -0
- cars/applications/auxiliary_filling/auxiliary_filling_algo.py +475 -0
- cars/applications/auxiliary_filling/auxiliary_filling_from_sensors_app.py +630 -0
- cars/applications/auxiliary_filling/auxiliary_filling_wrappers.py +90 -0
- cars/applications/dem_generation/__init__.py +30 -0
- cars/applications/dem_generation/abstract_dem_generation_app.py +116 -0
- cars/applications/dem_generation/bulldozer_config/base_config.yaml +42 -0
- cars/applications/dem_generation/bulldozer_dem_app.py +655 -0
- cars/applications/dem_generation/bulldozer_memory.py +55 -0
- cars/applications/dem_generation/dem_generation_algo.py +107 -0
- cars/applications/dem_generation/dem_generation_constants.py +32 -0
- cars/applications/dem_generation/dem_generation_wrappers.py +323 -0
- cars/applications/dense_match_filling/__init__.py +30 -0
- cars/applications/dense_match_filling/abstract_dense_match_filling_app.py +242 -0
- cars/applications/dense_match_filling/fill_disp_algo.py +113 -0
- cars/applications/dense_match_filling/fill_disp_constants.py +39 -0
- cars/applications/dense_match_filling/fill_disp_wrappers.py +83 -0
- cars/applications/dense_match_filling/zero_padding_app.py +302 -0
- cars/applications/dense_matching/__init__.py +30 -0
- cars/applications/dense_matching/abstract_dense_matching_app.py +261 -0
- cars/applications/dense_matching/census_mccnn_sgm_app.py +1460 -0
- cars/applications/dense_matching/cpp/__init__.py +0 -0
- cars/applications/dense_matching/cpp/dense_matching_cpp.cpython-312-i386-linux-gnu.so +0 -0
- cars/applications/dense_matching/cpp/dense_matching_cpp.py +94 -0
- cars/applications/dense_matching/cpp/includes/dense_matching.hpp +58 -0
- cars/applications/dense_matching/cpp/meson.build +9 -0
- cars/applications/dense_matching/cpp/src/bindings.cpp +13 -0
- cars/applications/dense_matching/cpp/src/dense_matching.cpp +207 -0
- cars/applications/dense_matching/dense_matching_algo.py +401 -0
- cars/applications/dense_matching/dense_matching_constants.py +89 -0
- cars/applications/dense_matching/dense_matching_wrappers.py +951 -0
- cars/applications/dense_matching/disparity_grid_algo.py +588 -0
- cars/applications/dense_matching/loaders/__init__.py +23 -0
- cars/applications/dense_matching/loaders/config_census_sgm_default.json +31 -0
- cars/applications/dense_matching/loaders/config_census_sgm_homogeneous.json +30 -0
- cars/applications/dense_matching/loaders/config_census_sgm_mountain_and_vegetation.json +30 -0
- cars/applications/dense_matching/loaders/config_census_sgm_shadow.json +30 -0
- cars/applications/dense_matching/loaders/config_census_sgm_sparse.json +36 -0
- cars/applications/dense_matching/loaders/config_census_sgm_urban.json +30 -0
- cars/applications/dense_matching/loaders/config_mapping.json +13 -0
- cars/applications/dense_matching/loaders/config_mccnn.json +28 -0
- cars/applications/dense_matching/loaders/global_land_cover_map.tif +0 -0
- cars/applications/dense_matching/loaders/pandora_loader.py +593 -0
- cars/applications/dsm_filling/__init__.py +32 -0
- cars/applications/dsm_filling/abstract_dsm_filling_app.py +101 -0
- cars/applications/dsm_filling/border_interpolation_app.py +270 -0
- cars/applications/dsm_filling/bulldozer_config/base_config.yaml +44 -0
- cars/applications/dsm_filling/bulldozer_filling_app.py +279 -0
- cars/applications/dsm_filling/exogenous_filling_app.py +333 -0
- cars/applications/grid_generation/__init__.py +30 -0
- cars/applications/grid_generation/abstract_grid_generation_app.py +142 -0
- cars/applications/grid_generation/epipolar_grid_generation_app.py +327 -0
- cars/applications/grid_generation/grid_correction_app.py +496 -0
- cars/applications/grid_generation/grid_generation_algo.py +388 -0
- cars/applications/grid_generation/grid_generation_constants.py +46 -0
- cars/applications/grid_generation/transform_grid.py +88 -0
- cars/applications/ground_truth_reprojection/__init__.py +30 -0
- cars/applications/ground_truth_reprojection/abstract_ground_truth_reprojection_app.py +137 -0
- cars/applications/ground_truth_reprojection/direct_localization_app.py +629 -0
- cars/applications/ground_truth_reprojection/ground_truth_reprojection_algo.py +275 -0
- cars/applications/point_cloud_outlier_removal/__init__.py +30 -0
- cars/applications/point_cloud_outlier_removal/abstract_outlier_removal_app.py +385 -0
- cars/applications/point_cloud_outlier_removal/outlier_removal_algo.py +392 -0
- cars/applications/point_cloud_outlier_removal/outlier_removal_constants.py +43 -0
- cars/applications/point_cloud_outlier_removal/small_components_app.py +527 -0
- cars/applications/point_cloud_outlier_removal/statistical_app.py +531 -0
- cars/applications/rasterization/__init__.py +30 -0
- cars/applications/rasterization/abstract_pc_rasterization_app.py +183 -0
- cars/applications/rasterization/rasterization_algo.py +534 -0
- cars/applications/rasterization/rasterization_constants.py +38 -0
- cars/applications/rasterization/rasterization_wrappers.py +634 -0
- cars/applications/rasterization/simple_gaussian_app.py +1152 -0
- cars/applications/resampling/__init__.py +28 -0
- cars/applications/resampling/abstract_resampling_app.py +187 -0
- cars/applications/resampling/bicubic_resampling_app.py +762 -0
- cars/applications/resampling/resampling_algo.py +614 -0
- cars/applications/resampling/resampling_constants.py +36 -0
- cars/applications/resampling/resampling_wrappers.py +309 -0
- cars/applications/sparse_matching/__init__.py +30 -0
- cars/applications/sparse_matching/abstract_sparse_matching_app.py +498 -0
- cars/applications/sparse_matching/sift_app.py +735 -0
- cars/applications/sparse_matching/sparse_matching_algo.py +360 -0
- cars/applications/sparse_matching/sparse_matching_constants.py +68 -0
- cars/applications/sparse_matching/sparse_matching_wrappers.py +238 -0
- cars/applications/triangulation/__init__.py +32 -0
- cars/applications/triangulation/abstract_triangulation_app.py +227 -0
- cars/applications/triangulation/line_of_sight_intersection_app.py +1243 -0
- cars/applications/triangulation/pc_transform.py +552 -0
- cars/applications/triangulation/triangulation_algo.py +371 -0
- cars/applications/triangulation/triangulation_constants.py +38 -0
- cars/applications/triangulation/triangulation_wrappers.py +259 -0
- cars/bundleadjustment.py +757 -0
- cars/cars.py +177 -0
- cars/conf/__init__.py +23 -0
- cars/conf/geoid/egm96.grd +0 -0
- cars/conf/geoid/egm96.grd.hdr +15 -0
- cars/conf/input_parameters.py +156 -0
- cars/conf/mask_cst.py +35 -0
- cars/core/__init__.py +23 -0
- cars/core/cars_logging.py +402 -0
- cars/core/constants.py +191 -0
- cars/core/constants_disparity.py +50 -0
- cars/core/datasets.py +140 -0
- cars/core/geometry/__init__.py +27 -0
- cars/core/geometry/abstract_geometry.py +1119 -0
- cars/core/geometry/shareloc_geometry.py +598 -0
- cars/core/inputs.py +568 -0
- cars/core/outputs.py +176 -0
- cars/core/preprocessing.py +722 -0
- cars/core/projection.py +843 -0
- cars/core/roi_tools.py +215 -0
- cars/core/tiling.py +774 -0
- cars/core/utils.py +164 -0
- cars/data_structures/__init__.py +23 -0
- cars/data_structures/cars_dataset.py +1541 -0
- cars/data_structures/cars_dict.py +74 -0
- cars/data_structures/corresponding_tiles_tools.py +186 -0
- cars/data_structures/dataframe_converter.py +185 -0
- cars/data_structures/format_transformation.py +297 -0
- cars/devibrate.py +689 -0
- cars/extractroi.py +264 -0
- cars/orchestrator/__init__.py +23 -0
- cars/orchestrator/achievement_tracker.py +125 -0
- cars/orchestrator/cluster/__init__.py +37 -0
- cars/orchestrator/cluster/abstract_cluster.py +244 -0
- cars/orchestrator/cluster/abstract_dask_cluster.py +375 -0
- cars/orchestrator/cluster/dask_cluster_tools.py +103 -0
- cars/orchestrator/cluster/dask_config/README.md +94 -0
- cars/orchestrator/cluster/dask_config/dask.yaml +21 -0
- cars/orchestrator/cluster/dask_config/distributed.yaml +70 -0
- cars/orchestrator/cluster/dask_config/jobqueue.yaml +26 -0
- cars/orchestrator/cluster/dask_config/reference_confs/dask-schema.yaml +137 -0
- cars/orchestrator/cluster/dask_config/reference_confs/dask.yaml +26 -0
- cars/orchestrator/cluster/dask_config/reference_confs/distributed-schema.yaml +1009 -0
- cars/orchestrator/cluster/dask_config/reference_confs/distributed.yaml +273 -0
- cars/orchestrator/cluster/dask_config/reference_confs/jobqueue.yaml +212 -0
- cars/orchestrator/cluster/dask_jobqueue_utils.py +204 -0
- cars/orchestrator/cluster/local_dask_cluster.py +116 -0
- cars/orchestrator/cluster/log_wrapper.py +1075 -0
- cars/orchestrator/cluster/mp_cluster/__init__.py +27 -0
- cars/orchestrator/cluster/mp_cluster/mp_factorizer.py +212 -0
- cars/orchestrator/cluster/mp_cluster/mp_objects.py +535 -0
- cars/orchestrator/cluster/mp_cluster/mp_tools.py +93 -0
- cars/orchestrator/cluster/mp_cluster/mp_wrapper.py +505 -0
- cars/orchestrator/cluster/mp_cluster/multiprocessing_cluster.py +873 -0
- cars/orchestrator/cluster/mp_cluster/multiprocessing_profiler.py +399 -0
- cars/orchestrator/cluster/pbs_dask_cluster.py +207 -0
- cars/orchestrator/cluster/sequential_cluster.py +139 -0
- cars/orchestrator/cluster/slurm_dask_cluster.py +234 -0
- cars/orchestrator/orchestrator.py +905 -0
- cars/orchestrator/orchestrator_constants.py +29 -0
- cars/orchestrator/registry/__init__.py +23 -0
- cars/orchestrator/registry/abstract_registry.py +143 -0
- cars/orchestrator/registry/compute_registry.py +106 -0
- cars/orchestrator/registry/id_generator.py +116 -0
- cars/orchestrator/registry/replacer_registry.py +213 -0
- cars/orchestrator/registry/saver_registry.py +363 -0
- cars/orchestrator/registry/unseen_registry.py +118 -0
- cars/orchestrator/tiles_profiler.py +279 -0
- cars/pipelines/__init__.py +26 -0
- cars/pipelines/conf_resolution/conf_final_resolution.yaml +5 -0
- cars/pipelines/conf_resolution/conf_first_resolution.yaml +2 -0
- cars/pipelines/conf_resolution/conf_intermediate_resolution.yaml +2 -0
- cars/pipelines/default/__init__.py +26 -0
- cars/pipelines/default/default_pipeline.py +786 -0
- cars/pipelines/parameters/__init__.py +0 -0
- cars/pipelines/parameters/advanced_parameters.py +417 -0
- cars/pipelines/parameters/advanced_parameters_constants.py +69 -0
- cars/pipelines/parameters/application_parameters.py +71 -0
- cars/pipelines/parameters/depth_map_inputs.py +0 -0
- cars/pipelines/parameters/dsm_inputs.py +918 -0
- cars/pipelines/parameters/dsm_inputs_constants.py +25 -0
- cars/pipelines/parameters/output_constants.py +52 -0
- cars/pipelines/parameters/output_parameters.py +454 -0
- cars/pipelines/parameters/sensor_inputs.py +842 -0
- cars/pipelines/parameters/sensor_inputs_constants.py +49 -0
- cars/pipelines/parameters/sensor_loaders/__init__.py +29 -0
- cars/pipelines/parameters/sensor_loaders/basic_classif_loader.py +86 -0
- cars/pipelines/parameters/sensor_loaders/basic_image_loader.py +98 -0
- cars/pipelines/parameters/sensor_loaders/pivot_classif_loader.py +90 -0
- cars/pipelines/parameters/sensor_loaders/pivot_image_loader.py +105 -0
- cars/pipelines/parameters/sensor_loaders/sensor_loader.py +93 -0
- cars/pipelines/parameters/sensor_loaders/sensor_loader_template.py +71 -0
- cars/pipelines/parameters/sensor_loaders/slurp_classif_loader.py +86 -0
- cars/pipelines/pipeline.py +119 -0
- cars/pipelines/pipeline_constants.py +31 -0
- cars/pipelines/pipeline_template.py +139 -0
- cars/pipelines/unit/__init__.py +26 -0
- cars/pipelines/unit/unit_pipeline.py +2850 -0
- cars/starter.py +167 -0
- cars-1.0.0rc1.dist-info/METADATA +292 -0
- cars-1.0.0rc1.dist-info/RECORD +200 -0
- cars-1.0.0rc1.dist-info/WHEEL +6 -0
- cars-1.0.0rc1.dist-info/entry_points.txt +8 -0
|
@@ -0,0 +1,1541 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# coding: utf8
|
|
3
|
+
#
|
|
4
|
+
# Copyright (c) 2020 Centre National d'Etudes Spatiales (CNES).
|
|
5
|
+
#
|
|
6
|
+
# This file is part of CARS
|
|
7
|
+
# (see https://github.com/CNES/cars).
|
|
8
|
+
#
|
|
9
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
10
|
+
# you may not use this file except in compliance with the License.
|
|
11
|
+
# You may obtain a copy of the License at
|
|
12
|
+
#
|
|
13
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
14
|
+
#
|
|
15
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
16
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
17
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
18
|
+
# See the License for the specific language governing permissions and
|
|
19
|
+
# limitations under the License.
|
|
20
|
+
#
|
|
21
|
+
# pylint: disable=too-many-lines
|
|
22
|
+
"""
|
|
23
|
+
cars_dataset module:
|
|
24
|
+
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
import copy
|
|
29
|
+
import logging
|
|
30
|
+
import math
|
|
31
|
+
|
|
32
|
+
# Standard imports
|
|
33
|
+
import os
|
|
34
|
+
import pickle
|
|
35
|
+
from typing import Dict
|
|
36
|
+
|
|
37
|
+
# Third party imports
|
|
38
|
+
import numpy as np
|
|
39
|
+
import pandas
|
|
40
|
+
import pyproj
|
|
41
|
+
import rasterio as rio
|
|
42
|
+
import xarray as xr
|
|
43
|
+
import yaml
|
|
44
|
+
from rasterio.profiles import DefaultGTiffProfile
|
|
45
|
+
from rasterio.windows import Window
|
|
46
|
+
|
|
47
|
+
# CARS imports
|
|
48
|
+
from cars.core import constants as cst
|
|
49
|
+
from cars.core import outputs
|
|
50
|
+
from cars.core.utils import safe_makedirs
|
|
51
|
+
from cars.data_structures import cars_dict, dataframe_converter
|
|
52
|
+
|
|
53
|
+
# cars dataset dtype
|
|
54
|
+
CARS_DS_TYPE_ARRAY = "arrays"
|
|
55
|
+
CARS_DS_TYPE_POINTS = "points"
|
|
56
|
+
CARS_DS_TYPE_DICT = "dict"
|
|
57
|
+
|
|
58
|
+
# cars_dataset names
|
|
59
|
+
TILES_INFO_FILE = "tiles_info.yaml"
|
|
60
|
+
OVERLAP_FILE = "overlaps.npy"
|
|
61
|
+
GRID_FILE = "grid.npy"
|
|
62
|
+
PROFILE_FILE = "profile.json"
|
|
63
|
+
|
|
64
|
+
# single tile names
|
|
65
|
+
ATTRIBUTE_FILE = "attributes.yaml"
|
|
66
|
+
DATASET_FILE = "dataset"
|
|
67
|
+
DATAFRAME_FILE = "dataframe.csv"
|
|
68
|
+
CARSDICT_FILE = "cars_dict"
|
|
69
|
+
|
|
70
|
+
PROFILE = "profile"
|
|
71
|
+
WINDOW = "window"
|
|
72
|
+
OVERLAPS = "overlaps"
|
|
73
|
+
ATTRIBUTES = "attributes"
|
|
74
|
+
SAVING_INFO = "saving_info"
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class CarsDataset:
|
|
78
|
+
"""
|
|
79
|
+
CarsDataset.
|
|
80
|
+
|
|
81
|
+
Internal CARS structure for organazing tiles
|
|
82
|
+
(xr.Datasets or pd.DataFrames).
|
|
83
|
+
"""
|
|
84
|
+
|
|
85
|
+
def __init__(self, dataset_type, load_from_disk=None, name="unknown"):
|
|
86
|
+
"""
|
|
87
|
+
Init function of CarsDataset.
|
|
88
|
+
If a path is provided, restore CarsDataset saved on disk.
|
|
89
|
+
|
|
90
|
+
:param dataset_type: type of dataset : 'arrays' or 'points'
|
|
91
|
+
:type dataset_type: str
|
|
92
|
+
:param load_from_disk: path to saved CarsDataset
|
|
93
|
+
:type load_from_disk: str
|
|
94
|
+
|
|
95
|
+
"""
|
|
96
|
+
|
|
97
|
+
self.dataset_type = dataset_type
|
|
98
|
+
if dataset_type not in [
|
|
99
|
+
CARS_DS_TYPE_ARRAY,
|
|
100
|
+
CARS_DS_TYPE_POINTS,
|
|
101
|
+
CARS_DS_TYPE_DICT,
|
|
102
|
+
]:
|
|
103
|
+
raise ValueError("wrong dataset type")
|
|
104
|
+
|
|
105
|
+
self.tiles = None
|
|
106
|
+
self.tiles_info = {}
|
|
107
|
+
self._tiling_grid = None
|
|
108
|
+
self.overlaps = None
|
|
109
|
+
self.attributes = {}
|
|
110
|
+
self.name = name
|
|
111
|
+
|
|
112
|
+
# Method to apply at the reception of tile
|
|
113
|
+
self.final_function = None
|
|
114
|
+
|
|
115
|
+
if load_from_disk is not None:
|
|
116
|
+
self.load_cars_dataset_from_disk(load_from_disk)
|
|
117
|
+
|
|
118
|
+
def __repr__(self):
|
|
119
|
+
"""
|
|
120
|
+
Repr function
|
|
121
|
+
:return: printable self CarsDataset
|
|
122
|
+
"""
|
|
123
|
+
return self.custom_print()
|
|
124
|
+
|
|
125
|
+
def __str__(self):
|
|
126
|
+
"""
|
|
127
|
+
Str function
|
|
128
|
+
:return: printable self CarsDataset
|
|
129
|
+
"""
|
|
130
|
+
return self.custom_print()
|
|
131
|
+
|
|
132
|
+
def custom_print(self):
|
|
133
|
+
"""
|
|
134
|
+
Return string of self
|
|
135
|
+
:return: printable self
|
|
136
|
+
"""
|
|
137
|
+
|
|
138
|
+
res = str(self.__class__) + ": \n" "dataset_type: " + str(
|
|
139
|
+
self.dataset_type
|
|
140
|
+
) + "\n" + "shape: " + str(self.shape) + "\n" + "tiling_grid: " + str(
|
|
141
|
+
self._tiling_grid
|
|
142
|
+
) + "\n" + "overlaps: " + str(
|
|
143
|
+
self.overlaps
|
|
144
|
+
) + "\n" + "tiles_info: " + str(
|
|
145
|
+
self.tiles_info
|
|
146
|
+
) + "\n" + "attributes: " + str(
|
|
147
|
+
self.attributes
|
|
148
|
+
) + "\n" + "tiles:" + str(
|
|
149
|
+
self.tiles
|
|
150
|
+
)
|
|
151
|
+
return res
|
|
152
|
+
|
|
153
|
+
@property
|
|
154
|
+
def shape(self):
|
|
155
|
+
"""
|
|
156
|
+
Return the shape of tiling grid (nb_row, nb_col)
|
|
157
|
+
:return: shape of grid
|
|
158
|
+
"""
|
|
159
|
+
return self.tiling_grid.shape[0], self.tiling_grid.shape[1]
|
|
160
|
+
|
|
161
|
+
@property
|
|
162
|
+
def tiling_grid(self):
|
|
163
|
+
"""
|
|
164
|
+
Tiling grid, containing pixel windows of tiles
|
|
165
|
+
|
|
166
|
+
:return: tiling grid, of shape [N, M, 4],
|
|
167
|
+
containing [row_min, row_max, col_min, col_max]
|
|
168
|
+
:rtype: np.ndarray
|
|
169
|
+
"""
|
|
170
|
+
return self._tiling_grid
|
|
171
|
+
|
|
172
|
+
@tiling_grid.setter
|
|
173
|
+
def tiling_grid(self, new_grid):
|
|
174
|
+
"""
|
|
175
|
+
Set tiling_grid
|
|
176
|
+
|
|
177
|
+
:param new_grid: new grid
|
|
178
|
+
:type new_grid: np.ndarray
|
|
179
|
+
"""
|
|
180
|
+
self._tiling_grid = new_grid
|
|
181
|
+
# reset overlaps to zeros
|
|
182
|
+
self.overlaps = np.zeros(new_grid.shape)
|
|
183
|
+
# fill dataset grid with Nones
|
|
184
|
+
self.generate_none_tiles()
|
|
185
|
+
|
|
186
|
+
def __getitem__(self, key):
|
|
187
|
+
"""
|
|
188
|
+
Get item : return the [row, col] dataset
|
|
189
|
+
|
|
190
|
+
:param key: tuple index
|
|
191
|
+
|
|
192
|
+
:return: tile
|
|
193
|
+
:rtype: xr.Dataset or pd.DataFrame
|
|
194
|
+
"""
|
|
195
|
+
|
|
196
|
+
if isinstance(key, (tuple, list)):
|
|
197
|
+
if len(key) == 2:
|
|
198
|
+
res = self.tiles[key[0]][key[1]]
|
|
199
|
+
elif len(key) == 1:
|
|
200
|
+
res = self.tiles[key[0]]
|
|
201
|
+
else:
|
|
202
|
+
raise ValueError("Too many indexes, expected 1 or 2")
|
|
203
|
+
else:
|
|
204
|
+
if isinstance(key, int):
|
|
205
|
+
res = self.tiles[key]
|
|
206
|
+
else:
|
|
207
|
+
raise ValueError("Index type not supported")
|
|
208
|
+
|
|
209
|
+
return res
|
|
210
|
+
|
|
211
|
+
def __setitem__(self, key, newvalue):
|
|
212
|
+
"""
|
|
213
|
+
Set new tile
|
|
214
|
+
|
|
215
|
+
:param key: tuple of row and col indexes
|
|
216
|
+
:type key: tuple(int, int)
|
|
217
|
+
:param newvalue: tile to set
|
|
218
|
+
"""
|
|
219
|
+
if isinstance(key, (tuple, list)):
|
|
220
|
+
if len(key) == 2:
|
|
221
|
+
self.tiles[key[0]][key[1]] = newvalue
|
|
222
|
+
else:
|
|
223
|
+
raise ValueError("Too many indexes, expected 2")
|
|
224
|
+
else:
|
|
225
|
+
raise ValueError("Index type not supported")
|
|
226
|
+
|
|
227
|
+
def load_single_tile(self, tile_path_name: str):
|
|
228
|
+
"""
|
|
229
|
+
Load a single tile
|
|
230
|
+
|
|
231
|
+
:param tile_path_name: Path of tile to load
|
|
232
|
+
:type tile_path_name: str
|
|
233
|
+
|
|
234
|
+
:return: single tile
|
|
235
|
+
:rtype: xarray Dataset or Panda dataframe to file
|
|
236
|
+
|
|
237
|
+
"""
|
|
238
|
+
|
|
239
|
+
functions = {
|
|
240
|
+
CARS_DS_TYPE_ARRAY: load_single_tile_array,
|
|
241
|
+
CARS_DS_TYPE_POINTS: load_single_tile_points,
|
|
242
|
+
CARS_DS_TYPE_DICT: load_single_tile_dict,
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
return functions[self.dataset_type](tile_path_name)
|
|
246
|
+
|
|
247
|
+
def save_single_tile(self, tile, tile_path_name: str):
|
|
248
|
+
"""
|
|
249
|
+
Save xarray Dataset or Panda dataframe to file
|
|
250
|
+
|
|
251
|
+
:param tile: tile to save
|
|
252
|
+
:type tile: xr.Dataset or pd.DataFrame
|
|
253
|
+
:param tile_path_name: Path of file to save in
|
|
254
|
+
"""
|
|
255
|
+
|
|
256
|
+
functions = {
|
|
257
|
+
CARS_DS_TYPE_ARRAY: save_single_tile_array,
|
|
258
|
+
CARS_DS_TYPE_POINTS: save_single_tile_points,
|
|
259
|
+
CARS_DS_TYPE_DICT: save_single_tile_dict,
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
return functions[self.dataset_type](tile, tile_path_name)
|
|
263
|
+
|
|
264
|
+
def run_save(self, future_result, file_name: str, **kwargs):
|
|
265
|
+
"""
|
|
266
|
+
Save future result when arrived
|
|
267
|
+
|
|
268
|
+
:param future_result: xarray.Dataset received
|
|
269
|
+
:param file_name: filename to save data to
|
|
270
|
+
"""
|
|
271
|
+
|
|
272
|
+
functions = {
|
|
273
|
+
CARS_DS_TYPE_ARRAY: run_save_arrays,
|
|
274
|
+
CARS_DS_TYPE_POINTS: run_save_points,
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
return functions[self.dataset_type](future_result, file_name, **kwargs)
|
|
278
|
+
|
|
279
|
+
def get_window_as_dict(self, row, col, from_terrain=False, resolution=1):
|
|
280
|
+
"""
|
|
281
|
+
Get window in pixels for rasterio. Set from_terrain if tiling grid
|
|
282
|
+
was defined in geographic coordinates.
|
|
283
|
+
|
|
284
|
+
:param row: row
|
|
285
|
+
:type row: int
|
|
286
|
+
:param col: col
|
|
287
|
+
:type col: int
|
|
288
|
+
:param from_terrain: true if in terrain coordinates
|
|
289
|
+
:type from_terrain: bool
|
|
290
|
+
:param resolution: resolution
|
|
291
|
+
:type resolution: float
|
|
292
|
+
|
|
293
|
+
:return: New window : {
|
|
294
|
+
"row_min" : row_min ,
|
|
295
|
+
"row_max" : row_max
|
|
296
|
+
"col_min" : col_min
|
|
297
|
+
"col_max" : col_max
|
|
298
|
+
}
|
|
299
|
+
:rtype: Dict
|
|
300
|
+
|
|
301
|
+
"""
|
|
302
|
+
|
|
303
|
+
row_min = np.min(self.tiling_grid[:, :, 0])
|
|
304
|
+
col_min = np.min(self.tiling_grid[:, :, 2])
|
|
305
|
+
col_max = np.max(self.tiling_grid[:, :, 3])
|
|
306
|
+
|
|
307
|
+
window_arr = np.copy(self.tiling_grid[row, col, :])
|
|
308
|
+
|
|
309
|
+
if from_terrain:
|
|
310
|
+
# row -> y axis : reversed by convention
|
|
311
|
+
window = np.array(
|
|
312
|
+
[
|
|
313
|
+
col_max - window_arr[3],
|
|
314
|
+
col_max - window_arr[2],
|
|
315
|
+
window_arr[0] - row_min,
|
|
316
|
+
window_arr[1] - row_min,
|
|
317
|
+
]
|
|
318
|
+
)
|
|
319
|
+
|
|
320
|
+
else:
|
|
321
|
+
window = np.array(
|
|
322
|
+
[
|
|
323
|
+
window_arr[0] - row_min,
|
|
324
|
+
window_arr[1] - row_min,
|
|
325
|
+
window_arr[2] - col_min,
|
|
326
|
+
window_arr[3] - col_min,
|
|
327
|
+
]
|
|
328
|
+
)
|
|
329
|
+
|
|
330
|
+
# normalize with resolution
|
|
331
|
+
window = np.round(window / resolution)
|
|
332
|
+
|
|
333
|
+
new_window = {
|
|
334
|
+
"row_min": int(window[0]),
|
|
335
|
+
"row_max": int(window[1]),
|
|
336
|
+
"col_min": int(window[2]),
|
|
337
|
+
"col_max": int(window[3]),
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
return new_window
|
|
341
|
+
|
|
342
|
+
def create_grid( # pylint: disable=too-many-positional-arguments
|
|
343
|
+
self,
|
|
344
|
+
nb_col: int,
|
|
345
|
+
nb_row: int,
|
|
346
|
+
row_split: int,
|
|
347
|
+
col_split: int,
|
|
348
|
+
row_overlap: int,
|
|
349
|
+
col_overlap: int,
|
|
350
|
+
):
|
|
351
|
+
"""
|
|
352
|
+
Generate grid of positions by splitting [0, nb_row]x[0, nb_col]
|
|
353
|
+
in splits of xsplit x ysplit size
|
|
354
|
+
|
|
355
|
+
:param nb_col : number of columns
|
|
356
|
+
:param nb_row : number of lines
|
|
357
|
+
:param col_split: width of splits
|
|
358
|
+
:param row_split: height of splits
|
|
359
|
+
:param col_overlap: overlap to apply on rows
|
|
360
|
+
:param row_overlap: overlap to apply on cols
|
|
361
|
+
|
|
362
|
+
"""
|
|
363
|
+
nb_col_splits = math.ceil(nb_col / row_split)
|
|
364
|
+
nb_row_splits = math.ceil(nb_row / col_split)
|
|
365
|
+
|
|
366
|
+
row_min, row_max = 0, nb_row
|
|
367
|
+
col_min, col_max = 0, nb_col
|
|
368
|
+
|
|
369
|
+
out_grid = np.ndarray(
|
|
370
|
+
shape=(nb_row_splits, nb_col_splits, 4), dtype=int
|
|
371
|
+
)
|
|
372
|
+
|
|
373
|
+
out_overlap = np.ndarray(
|
|
374
|
+
shape=(nb_row_splits, nb_col_splits, 4), dtype=int
|
|
375
|
+
)
|
|
376
|
+
|
|
377
|
+
for i in range(0, nb_row_splits):
|
|
378
|
+
for j in range(0, nb_col_splits):
|
|
379
|
+
row_down = row_min + row_split * i
|
|
380
|
+
col_left = col_min + col_split * j
|
|
381
|
+
row_up = min(row_max, row_min + (i + 1) * row_split)
|
|
382
|
+
col_right = min(col_max, col_min + (j + 1) * col_split)
|
|
383
|
+
|
|
384
|
+
out_grid[i, j, 0] = row_down
|
|
385
|
+
out_grid[i, j, 1] = row_up
|
|
386
|
+
out_grid[i, j, 2] = col_left
|
|
387
|
+
out_grid[i, j, 3] = col_right
|
|
388
|
+
|
|
389
|
+
# fill overlap [OL_row_down, OL_row_up, OL_col_left,
|
|
390
|
+
# OL_col_right]
|
|
391
|
+
out_overlap[i, j, 0] = row_down - max(
|
|
392
|
+
row_min, row_down - row_overlap
|
|
393
|
+
)
|
|
394
|
+
out_overlap[i, j, 1] = (
|
|
395
|
+
min(row_max, row_up + row_overlap) - row_up
|
|
396
|
+
)
|
|
397
|
+
out_overlap[i, j, 2] = col_left - max(
|
|
398
|
+
col_min, col_left - col_overlap
|
|
399
|
+
)
|
|
400
|
+
out_overlap[i, j, 3] = (
|
|
401
|
+
min(col_right, col_right + col_overlap) - col_right
|
|
402
|
+
)
|
|
403
|
+
|
|
404
|
+
self.tiling_grid = out_grid
|
|
405
|
+
self.overlaps = out_overlap
|
|
406
|
+
|
|
407
|
+
def generate_none_tiles(self):
|
|
408
|
+
"""
|
|
409
|
+
Generate the structure of data tiles, with Nones, according
|
|
410
|
+
to grid shape.
|
|
411
|
+
|
|
412
|
+
"""
|
|
413
|
+
|
|
414
|
+
self.tiles = create_none(
|
|
415
|
+
self.tiling_grid.shape[0], self.tiling_grid.shape[1]
|
|
416
|
+
)
|
|
417
|
+
|
|
418
|
+
def create_empty_copy(self, cars_ds):
|
|
419
|
+
"""
|
|
420
|
+
Copy attributes, grid, overlaps, and create Nones.
|
|
421
|
+
|
|
422
|
+
:param cars_ds: CarsDataset to copy
|
|
423
|
+
:type cars_ds: CarsDataset
|
|
424
|
+
|
|
425
|
+
"""
|
|
426
|
+
|
|
427
|
+
self.tiles_info = copy.deepcopy(cars_ds.tiles_info)
|
|
428
|
+
self.tiling_grid = copy.deepcopy(cars_ds.tiling_grid)
|
|
429
|
+
self.overlaps = copy.deepcopy(cars_ds.overlaps)
|
|
430
|
+
|
|
431
|
+
self.tiles = []
|
|
432
|
+
for _ in range(cars_ds.overlaps.shape[0]):
|
|
433
|
+
tiles_row = []
|
|
434
|
+
for _ in range(cars_ds.overlaps.shape[1]):
|
|
435
|
+
tiles_row.append(None)
|
|
436
|
+
self.tiles.append(tiles_row)
|
|
437
|
+
|
|
438
|
+
def generate_descriptor( # pylint: disable=too-many-positional-arguments
|
|
439
|
+
self, future_result, file_name, tag=None, dtype=None, nodata=None
|
|
440
|
+
):
|
|
441
|
+
"""
|
|
442
|
+
Generate de rasterio descriptor for the given future result
|
|
443
|
+
|
|
444
|
+
Only works with pixelic tiling grid
|
|
445
|
+
|
|
446
|
+
:param future_result: Future result
|
|
447
|
+
:type future_result: xr.Dataset
|
|
448
|
+
:param file_name: file name to save futures to
|
|
449
|
+
:type file_name: str
|
|
450
|
+
:param tag: tag to save
|
|
451
|
+
:type tag: str
|
|
452
|
+
:param dtype: dtype
|
|
453
|
+
:type dtype: str
|
|
454
|
+
:param nodata: no data value
|
|
455
|
+
:type nodata: float
|
|
456
|
+
"""
|
|
457
|
+
|
|
458
|
+
# Get profile from 1st finished future
|
|
459
|
+
new_profile = get_profile_for_tag_dataset(future_result, tag)
|
|
460
|
+
|
|
461
|
+
if "width" not in new_profile or "height" not in new_profile:
|
|
462
|
+
logging.debug(
|
|
463
|
+
"CarsDataset doesn't have a profile, default is given"
|
|
464
|
+
)
|
|
465
|
+
new_profile = DefaultGTiffProfile(count=new_profile["count"])
|
|
466
|
+
new_profile["height"] = np.max(self.tiling_grid[:, :, 1])
|
|
467
|
+
new_profile["width"] = np.max(self.tiling_grid[:, :, 3])
|
|
468
|
+
|
|
469
|
+
# Change dtype
|
|
470
|
+
new_profile["dtype"] = dtype
|
|
471
|
+
if nodata is not None:
|
|
472
|
+
new_profile["nodata"] = nodata
|
|
473
|
+
|
|
474
|
+
descriptor = rio.open(
|
|
475
|
+
file_name, "w+", **new_profile, BIGTIFF="IF_SAFER"
|
|
476
|
+
)
|
|
477
|
+
|
|
478
|
+
return descriptor
|
|
479
|
+
|
|
480
|
+
def save_cars_dataset(self, directory):
|
|
481
|
+
"""
|
|
482
|
+
Save whole CarsDataset to given directory, including tiling grids,
|
|
483
|
+
attributes, overlaps, and all the xr.Dataset or pd.DataFrames.
|
|
484
|
+
|
|
485
|
+
:param directory: Path where to save self CarsDataset
|
|
486
|
+
:type directory: str
|
|
487
|
+
|
|
488
|
+
"""
|
|
489
|
+
|
|
490
|
+
# Create CarsDataset folder
|
|
491
|
+
safe_makedirs(directory)
|
|
492
|
+
|
|
493
|
+
if self.tiles is None:
|
|
494
|
+
logging.error("No tiles managed by CarsDatasets")
|
|
495
|
+
raise RuntimeError("No tiles managed by CarsDatasets")
|
|
496
|
+
|
|
497
|
+
# save tiles info
|
|
498
|
+
tiles_info_file = os.path.join(directory, TILES_INFO_FILE)
|
|
499
|
+
save_dict(self.tiles_info, tiles_info_file)
|
|
500
|
+
|
|
501
|
+
# save grid
|
|
502
|
+
grid_file = os.path.join(directory, GRID_FILE)
|
|
503
|
+
save_numpy_array(self.tiling_grid, grid_file)
|
|
504
|
+
|
|
505
|
+
# save overlap
|
|
506
|
+
overlap_file = os.path.join(directory, OVERLAP_FILE)
|
|
507
|
+
save_numpy_array(self.overlaps, overlap_file)
|
|
508
|
+
|
|
509
|
+
nb_rows, nb_cols = self.tiling_grid.shape[0], self.tiling_grid.shape[1]
|
|
510
|
+
|
|
511
|
+
# save each tile
|
|
512
|
+
for col in range(nb_cols):
|
|
513
|
+
for row in range(nb_rows):
|
|
514
|
+
# Get name
|
|
515
|
+
current_tile_path_name = create_tile_path(col, row, directory)
|
|
516
|
+
|
|
517
|
+
# save tile
|
|
518
|
+
self.save_single_tile(
|
|
519
|
+
self.tiles[row][col], current_tile_path_name
|
|
520
|
+
)
|
|
521
|
+
|
|
522
|
+
def load_cars_dataset_from_disk(self, directory):
|
|
523
|
+
"""
|
|
524
|
+
Load whole CarsDataset from given directory
|
|
525
|
+
|
|
526
|
+
:param directory: Path where is saved CarsDataset to load
|
|
527
|
+
:type directory: str
|
|
528
|
+
|
|
529
|
+
"""
|
|
530
|
+
|
|
531
|
+
# get tiles info
|
|
532
|
+
tiles_info_file = os.path.join(directory, TILES_INFO_FILE)
|
|
533
|
+
self.tiles_info = load_dict(tiles_info_file)
|
|
534
|
+
|
|
535
|
+
# load grid
|
|
536
|
+
grid_file = os.path.join(directory, GRID_FILE)
|
|
537
|
+
self.tiling_grid = load_numpy_array(grid_file)
|
|
538
|
+
|
|
539
|
+
nb_rows, nb_cols = self.tiling_grid.shape[0], self.tiling_grid.shape[1]
|
|
540
|
+
|
|
541
|
+
# load overlap
|
|
542
|
+
overlap_file = os.path.join(directory, OVERLAP_FILE)
|
|
543
|
+
self.overlaps = load_numpy_array(overlap_file)
|
|
544
|
+
|
|
545
|
+
# load each tile
|
|
546
|
+
self.tiles = []
|
|
547
|
+
for row in range(nb_rows):
|
|
548
|
+
tiles_row = []
|
|
549
|
+
for col in range(nb_cols):
|
|
550
|
+
# Get name
|
|
551
|
+
current_tile_path_name = create_tile_path(col, row, directory)
|
|
552
|
+
|
|
553
|
+
# load tile
|
|
554
|
+
tiles_row.append(self.load_single_tile(current_tile_path_name))
|
|
555
|
+
|
|
556
|
+
self.tiles.append(tiles_row)
|
|
557
|
+
|
|
558
|
+
|
|
559
|
+
def run_save_arrays(future_result, file_name, tag=None, descriptor=None):
|
|
560
|
+
"""
|
|
561
|
+
Save future when arrived
|
|
562
|
+
|
|
563
|
+
:param future_result: xarray.Dataset received
|
|
564
|
+
:type future_result: xarray.Dataset
|
|
565
|
+
:param file_name: filename to save data to
|
|
566
|
+
:type file_name: str
|
|
567
|
+
:param tag: dataset tag to rasterize
|
|
568
|
+
:type tag: str
|
|
569
|
+
:param descriptor: rasterio descriptor
|
|
570
|
+
"""
|
|
571
|
+
# write future result using saved window and overlaps
|
|
572
|
+
|
|
573
|
+
save_dataset(
|
|
574
|
+
future_result,
|
|
575
|
+
file_name,
|
|
576
|
+
tag,
|
|
577
|
+
use_windows_and_overlaps=True,
|
|
578
|
+
descriptor=descriptor,
|
|
579
|
+
)
|
|
580
|
+
|
|
581
|
+
|
|
582
|
+
def run_save_points( # pylint: disable=too-many-positional-arguments
|
|
583
|
+
future_result,
|
|
584
|
+
file_name,
|
|
585
|
+
overwrite=False,
|
|
586
|
+
save_by_pair=False,
|
|
587
|
+
point_cloud_format="csv",
|
|
588
|
+
overwrite_file_name=True,
|
|
589
|
+
):
|
|
590
|
+
"""
|
|
591
|
+
Save future result when arrived
|
|
592
|
+
|
|
593
|
+
:param future_result: pandas Dataframe received
|
|
594
|
+
:type future_result: pandas Dataframe
|
|
595
|
+
:param file_name: filename to save data to
|
|
596
|
+
:type file_name: str
|
|
597
|
+
:param overwrite: overwrite file
|
|
598
|
+
:type overwrite: bool
|
|
599
|
+
:param point_cloud_format: output point cloud format
|
|
600
|
+
:type point_cloud_format: str
|
|
601
|
+
:param overwrite_file_name: generate a new filename from input filename
|
|
602
|
+
matching input dataframe attributes
|
|
603
|
+
:type overwrite_file_name: bool
|
|
604
|
+
"""
|
|
605
|
+
|
|
606
|
+
# Save
|
|
607
|
+
save_all_dataframe(
|
|
608
|
+
future_result,
|
|
609
|
+
file_name,
|
|
610
|
+
save_by_pair=save_by_pair,
|
|
611
|
+
overwrite=overwrite,
|
|
612
|
+
point_cloud_format=point_cloud_format,
|
|
613
|
+
overwrite_file_name=overwrite_file_name,
|
|
614
|
+
)
|
|
615
|
+
|
|
616
|
+
|
|
617
|
+
def load_single_tile_array(tile_path_name: str) -> xr.Dataset:
|
|
618
|
+
"""
|
|
619
|
+
Load a xarray tile
|
|
620
|
+
|
|
621
|
+
:param tile_path_name: Path of tile to load
|
|
622
|
+
:type tile_path_name: str
|
|
623
|
+
|
|
624
|
+
:return: tile dataset
|
|
625
|
+
:rtype: xr.Dataset
|
|
626
|
+
|
|
627
|
+
"""
|
|
628
|
+
|
|
629
|
+
# get dataset
|
|
630
|
+
dataset_file_name = os.path.join(tile_path_name, DATASET_FILE)
|
|
631
|
+
if not os.path.exists(dataset_file_name):
|
|
632
|
+
logging.error("Tile {} does not exists".format(dataset_file_name))
|
|
633
|
+
return None
|
|
634
|
+
with open(dataset_file_name, "rb") as handle:
|
|
635
|
+
dataset = pickle.load(handle)
|
|
636
|
+
|
|
637
|
+
# get attributes
|
|
638
|
+
attributes_file_name = os.path.join(tile_path_name, ATTRIBUTE_FILE)
|
|
639
|
+
attributes = load_dict(attributes_file_name)
|
|
640
|
+
|
|
641
|
+
# Format transformation
|
|
642
|
+
if PROFILE in attributes:
|
|
643
|
+
attributes[PROFILE] = dict_profile_to_rio_profile(attributes[PROFILE])
|
|
644
|
+
|
|
645
|
+
# add to dataset
|
|
646
|
+
dataset.attrs.update(attributes)
|
|
647
|
+
|
|
648
|
+
return dataset
|
|
649
|
+
|
|
650
|
+
|
|
651
|
+
def load_single_tile_points(tile_path_name: str):
|
|
652
|
+
"""
|
|
653
|
+
Load a panda dataframe
|
|
654
|
+
|
|
655
|
+
:param tile_path_name: Path of tile to load
|
|
656
|
+
:type tile_path_name: str
|
|
657
|
+
|
|
658
|
+
:return: Tile dataframe
|
|
659
|
+
:rtype: Panda dataframe
|
|
660
|
+
|
|
661
|
+
"""
|
|
662
|
+
|
|
663
|
+
# get dataframe
|
|
664
|
+
dataframe_file_name = os.path.join(tile_path_name, DATAFRAME_FILE)
|
|
665
|
+
|
|
666
|
+
if not os.path.exists(dataframe_file_name):
|
|
667
|
+
logging.error("Tile {} does not exists".format(dataframe_file_name))
|
|
668
|
+
return None
|
|
669
|
+
|
|
670
|
+
with open(dataframe_file_name, "rb") as handle:
|
|
671
|
+
dataframe = pickle.load(handle)
|
|
672
|
+
|
|
673
|
+
# get attributes
|
|
674
|
+
attributes_file_name = os.path.join(tile_path_name, ATTRIBUTE_FILE)
|
|
675
|
+
attributes = load_dict(attributes_file_name)
|
|
676
|
+
|
|
677
|
+
# Format transformation
|
|
678
|
+
|
|
679
|
+
# add to dataframe
|
|
680
|
+
dataframe.attrs.update(attributes)
|
|
681
|
+
|
|
682
|
+
return dataframe
|
|
683
|
+
|
|
684
|
+
|
|
685
|
+
def load_single_tile_dict(tile_path_name: str):
|
|
686
|
+
"""
|
|
687
|
+
Load a CarsDict
|
|
688
|
+
|
|
689
|
+
:param tile_path_name: Path of tile to load
|
|
690
|
+
:type tile_path_name: str
|
|
691
|
+
|
|
692
|
+
:return: Tile dataframe
|
|
693
|
+
:rtype: Panda dataframe
|
|
694
|
+
|
|
695
|
+
"""
|
|
696
|
+
|
|
697
|
+
# get dataframe
|
|
698
|
+
dict_file_name = os.path.join(tile_path_name, CARSDICT_FILE)
|
|
699
|
+
|
|
700
|
+
if not os.path.exists(dict_file_name):
|
|
701
|
+
logging.error("Tile {} does not exists".format(dict_file_name))
|
|
702
|
+
return None
|
|
703
|
+
|
|
704
|
+
with open(dict_file_name, "rb") as handle:
|
|
705
|
+
dict_cars = pickle.load(handle)
|
|
706
|
+
|
|
707
|
+
# get attributes
|
|
708
|
+
attributes_file_name = os.path.join(tile_path_name, ATTRIBUTE_FILE)
|
|
709
|
+
attributes = load_dict(attributes_file_name)
|
|
710
|
+
|
|
711
|
+
# Format transformation
|
|
712
|
+
|
|
713
|
+
# add to dataframe
|
|
714
|
+
dict_cars.attrs.update(attributes)
|
|
715
|
+
|
|
716
|
+
return dict_cars
|
|
717
|
+
|
|
718
|
+
|
|
719
|
+
def save_single_tile_array(dataset: xr.Dataset, tile_path_name: str):
|
|
720
|
+
"""
|
|
721
|
+
Save xarray to directory, saving the data in a different file that
|
|
722
|
+
the attributes (saved in a .json next to it).
|
|
723
|
+
|
|
724
|
+
:param dataset: dataset to save
|
|
725
|
+
:type dataset: xr.Dataset
|
|
726
|
+
:param tile_path_name: Path of file to save in
|
|
727
|
+
:type tile_path_name: str
|
|
728
|
+
"""
|
|
729
|
+
|
|
730
|
+
if dataset is None:
|
|
731
|
+
logging.debug("Tile is None: not saved")
|
|
732
|
+
return
|
|
733
|
+
|
|
734
|
+
# Create tile folder
|
|
735
|
+
safe_makedirs(tile_path_name)
|
|
736
|
+
|
|
737
|
+
# save attributes
|
|
738
|
+
saved_dataset_attrs = copy.copy(dataset.attrs)
|
|
739
|
+
attributes_file_name = os.path.join(tile_path_name, ATTRIBUTE_FILE)
|
|
740
|
+
if dataset.attrs is None:
|
|
741
|
+
attributes = {}
|
|
742
|
+
else:
|
|
743
|
+
attributes = dataset.attrs
|
|
744
|
+
|
|
745
|
+
# Format transformation
|
|
746
|
+
if PROFILE in attributes:
|
|
747
|
+
attributes[PROFILE] = rio_profile_to_dict_profile(attributes[PROFILE])
|
|
748
|
+
|
|
749
|
+
# dump
|
|
750
|
+
# separate attributes
|
|
751
|
+
dataset.attrs, custom_attributes = separate_dicts(
|
|
752
|
+
attributes, [PROFILE, WINDOW, OVERLAPS, SAVING_INFO, ATTRIBUTES]
|
|
753
|
+
)
|
|
754
|
+
# save
|
|
755
|
+
save_dict(custom_attributes, attributes_file_name)
|
|
756
|
+
dataset_file_name = os.path.join(tile_path_name, DATASET_FILE)
|
|
757
|
+
with open(dataset_file_name, "wb") as handle:
|
|
758
|
+
pickle.dump(dataset, handle, protocol=pickle.HIGHEST_PROTOCOL)
|
|
759
|
+
|
|
760
|
+
# Retrieve attrs
|
|
761
|
+
dataset.attrs = saved_dataset_attrs
|
|
762
|
+
|
|
763
|
+
|
|
764
|
+
def save_single_tile_points(dataframe, tile_path_name: str):
|
|
765
|
+
"""
|
|
766
|
+
Save dataFrame to directory, saving the data in a different file that
|
|
767
|
+
the attributes (saved in a .json next to it).
|
|
768
|
+
|
|
769
|
+
:param dataframe: dataframe to save
|
|
770
|
+
:type dataframe: pd.DataFrame
|
|
771
|
+
:param tile_path_name: Path of file to save in
|
|
772
|
+
:type tile_path_name: str
|
|
773
|
+
"""
|
|
774
|
+
if dataframe is None:
|
|
775
|
+
logging.debug("Tile is None: not saved")
|
|
776
|
+
return
|
|
777
|
+
# Create tile folder
|
|
778
|
+
safe_makedirs(tile_path_name)
|
|
779
|
+
|
|
780
|
+
# save attributes
|
|
781
|
+
saved_dataframe_attrs = copy.copy(dataframe.attrs)
|
|
782
|
+
attributes_file_name = os.path.join(tile_path_name, ATTRIBUTE_FILE)
|
|
783
|
+
if dataframe.attrs is None:
|
|
784
|
+
attributes = {}
|
|
785
|
+
else:
|
|
786
|
+
attributes = dataframe.attrs
|
|
787
|
+
|
|
788
|
+
# Format transformation
|
|
789
|
+
|
|
790
|
+
# dump
|
|
791
|
+
# separate attributes
|
|
792
|
+
dataframe.attrs, custom_attributes = separate_dicts(
|
|
793
|
+
attributes, [SAVING_INFO, ATTRIBUTES]
|
|
794
|
+
)
|
|
795
|
+
# save
|
|
796
|
+
save_dict(custom_attributes, attributes_file_name)
|
|
797
|
+
dataframe_file_name = os.path.join(tile_path_name, DATAFRAME_FILE)
|
|
798
|
+
with open(dataframe_file_name, "wb") as handle:
|
|
799
|
+
pickle.dump(dataframe, handle, protocol=pickle.HIGHEST_PROTOCOL)
|
|
800
|
+
|
|
801
|
+
# Retrieve attrs
|
|
802
|
+
dataframe.attrs = saved_dataframe_attrs
|
|
803
|
+
|
|
804
|
+
|
|
805
|
+
def save_single_tile_dict(dict_cars, tile_path_name: str):
|
|
806
|
+
"""
|
|
807
|
+
Save cars_dict to directory, saving the data in a different file that
|
|
808
|
+
the attributes (saved in a .json next to it).
|
|
809
|
+
|
|
810
|
+
:param dict_cars: dataframe to save
|
|
811
|
+
:type dict_cars: pd.DataFrame
|
|
812
|
+
:param tile_path_name: Path of file to save in
|
|
813
|
+
:type tile_path_name: str
|
|
814
|
+
"""
|
|
815
|
+
# Create tile folder
|
|
816
|
+
safe_makedirs(tile_path_name)
|
|
817
|
+
|
|
818
|
+
# save attributes
|
|
819
|
+
saved_dict_cars_attrs = copy.copy(dict_cars.attrs)
|
|
820
|
+
attributes_file_name = os.path.join(tile_path_name, ATTRIBUTE_FILE)
|
|
821
|
+
if dict_cars.attrs is None:
|
|
822
|
+
attributes = {}
|
|
823
|
+
else:
|
|
824
|
+
attributes = dict_cars.attrs
|
|
825
|
+
|
|
826
|
+
# Format transformation
|
|
827
|
+
|
|
828
|
+
# dump
|
|
829
|
+
# separate attributes
|
|
830
|
+
dict_cars.attrs, custom_attributes = separate_dicts(
|
|
831
|
+
attributes, [SAVING_INFO, ATTRIBUTES]
|
|
832
|
+
)
|
|
833
|
+
# save
|
|
834
|
+
save_dict(custom_attributes, attributes_file_name)
|
|
835
|
+
dict_cars_file_name = os.path.join(tile_path_name, CARSDICT_FILE)
|
|
836
|
+
with open(dict_cars_file_name, "wb") as handle:
|
|
837
|
+
pickle.dump(dict_cars, handle, protocol=pickle.HIGHEST_PROTOCOL)
|
|
838
|
+
|
|
839
|
+
# Retrieve attrs
|
|
840
|
+
dict_cars.attrs = saved_dict_cars_attrs
|
|
841
|
+
|
|
842
|
+
|
|
843
|
+
def fill_dataset( # pylint: disable=too-many-positional-arguments
|
|
844
|
+
dataset,
|
|
845
|
+
saving_info=None,
|
|
846
|
+
window=None,
|
|
847
|
+
profile=None,
|
|
848
|
+
attributes=None,
|
|
849
|
+
overlaps=None,
|
|
850
|
+
):
|
|
851
|
+
"""
|
|
852
|
+
From a full xarray dataset, fill info properly.
|
|
853
|
+
User can fill with saving information (containing CarsDataset id),
|
|
854
|
+
window of current tile and its overlaps,
|
|
855
|
+
rasterio profile of full data, and attributes associated to data
|
|
856
|
+
|
|
857
|
+
:param dataset: dataset to fill
|
|
858
|
+
:type dataset: xarray_dataset
|
|
859
|
+
:param saving_info: created by Orchestrator.get_saving_infos
|
|
860
|
+
:type saving_info: dict
|
|
861
|
+
:param window:
|
|
862
|
+
:type window: dict
|
|
863
|
+
:param profile:
|
|
864
|
+
:type profile: dict
|
|
865
|
+
:param attributes:
|
|
866
|
+
:type attributes: dict
|
|
867
|
+
|
|
868
|
+
"""
|
|
869
|
+
|
|
870
|
+
if attributes is not None:
|
|
871
|
+
dataset.attrs[ATTRIBUTES] = attributes
|
|
872
|
+
|
|
873
|
+
if saving_info is not None:
|
|
874
|
+
dataset.attrs[SAVING_INFO] = saving_info
|
|
875
|
+
|
|
876
|
+
if window is not None:
|
|
877
|
+
dataset.attrs[WINDOW] = window
|
|
878
|
+
|
|
879
|
+
if overlaps is not None:
|
|
880
|
+
dataset.attrs[OVERLAPS] = overlaps
|
|
881
|
+
|
|
882
|
+
if profile is not None:
|
|
883
|
+
dataset.attrs[PROFILE] = profile
|
|
884
|
+
|
|
885
|
+
|
|
886
|
+
def fill_dataframe(dataframe, saving_info=None, attributes=None):
|
|
887
|
+
"""
|
|
888
|
+
From a full pandas dataframe, fill info properly.
|
|
889
|
+
User can fill with saving information (containing CarsDataset id),
|
|
890
|
+
and attributes associated to data
|
|
891
|
+
|
|
892
|
+
|
|
893
|
+
:param dataframe: dataframe to fill
|
|
894
|
+
:type dataframe: pandas dataframe
|
|
895
|
+
:param saving_info: created by Orchestrator.get_saving_infos
|
|
896
|
+
:type saving_info: dict
|
|
897
|
+
:param attributes:
|
|
898
|
+
:type attributes: dict
|
|
899
|
+
|
|
900
|
+
"""
|
|
901
|
+
|
|
902
|
+
if attributes is not None:
|
|
903
|
+
dataframe.attrs[ATTRIBUTES] = attributes
|
|
904
|
+
|
|
905
|
+
if saving_info is not None:
|
|
906
|
+
dataframe.attrs[SAVING_INFO] = saving_info
|
|
907
|
+
|
|
908
|
+
|
|
909
|
+
def fill_dict(data_dict, saving_info=None, attributes=None):
|
|
910
|
+
"""
|
|
911
|
+
From a fulldict, fill info properly.
|
|
912
|
+
User can fill with saving information (containing CarsDataset id),
|
|
913
|
+
and attributes associated to data
|
|
914
|
+
|
|
915
|
+
|
|
916
|
+
:param data_dict: dictionnary to fill
|
|
917
|
+
:type data_dict: Dict
|
|
918
|
+
:param saving_info: created by Orchestrator.get_saving_infos
|
|
919
|
+
:type saving_info: dict
|
|
920
|
+
:param attributes: attributes associated to data
|
|
921
|
+
:type attributes: dict
|
|
922
|
+
|
|
923
|
+
"""
|
|
924
|
+
|
|
925
|
+
# TODO only use CarsDict
|
|
926
|
+
|
|
927
|
+
if isinstance(data_dict, dict):
|
|
928
|
+
if attributes is not None:
|
|
929
|
+
data_dict[ATTRIBUTES] = attributes
|
|
930
|
+
|
|
931
|
+
if saving_info is not None:
|
|
932
|
+
data_dict[SAVING_INFO] = saving_info
|
|
933
|
+
|
|
934
|
+
elif isinstance(data_dict, cars_dict.CarsDict):
|
|
935
|
+
if attributes is not None:
|
|
936
|
+
data_dict.attrs[ATTRIBUTES] = attributes
|
|
937
|
+
|
|
938
|
+
if saving_info is not None:
|
|
939
|
+
data_dict.attrs[SAVING_INFO] = saving_info
|
|
940
|
+
|
|
941
|
+
|
|
942
|
+
def save_all_dataframe( # pylint: disable=too-many-positional-arguments
|
|
943
|
+
dataframe,
|
|
944
|
+
file_name,
|
|
945
|
+
save_by_pair=False,
|
|
946
|
+
overwrite=True,
|
|
947
|
+
point_cloud_format="csv",
|
|
948
|
+
overwrite_file_name=True,
|
|
949
|
+
):
|
|
950
|
+
"""
|
|
951
|
+
Save DataFrame to csv and laz format. The content of dataframe is merged to
|
|
952
|
+
the content of existing saved Dataframe, if overwrite==False
|
|
953
|
+
The option save_by_pair separate the dataframe by pair (one folder by pair)
|
|
954
|
+
|
|
955
|
+
:param file_name: file name to save data to
|
|
956
|
+
:type file_name: str
|
|
957
|
+
:param overwrite: overwrite file if exists
|
|
958
|
+
:type overwrite: bool
|
|
959
|
+
:param point_cloud_format: point cloud format (csv or laz)
|
|
960
|
+
:type point_cloud_format: str
|
|
961
|
+
:param overwrite_file_name: generate a new filename from input filename
|
|
962
|
+
matching input dataframe attributes, using only directory from input
|
|
963
|
+
filename
|
|
964
|
+
:type overwrite_file_name: bool
|
|
965
|
+
"""
|
|
966
|
+
|
|
967
|
+
# generate filename if attributes have xstart and ystart settings
|
|
968
|
+
if overwrite_file_name:
|
|
969
|
+
if (
|
|
970
|
+
"attributes" in dataframe.attrs
|
|
971
|
+
and "xmin" in dataframe.attrs["attributes"]
|
|
972
|
+
):
|
|
973
|
+
file_name = os.path.dirname(file_name)
|
|
974
|
+
file_name = os.path.join(
|
|
975
|
+
file_name,
|
|
976
|
+
(
|
|
977
|
+
str(dataframe.attrs["attributes"]["xmin"])
|
|
978
|
+
+ "_"
|
|
979
|
+
+ str(dataframe.attrs["attributes"]["ymax"])
|
|
980
|
+
),
|
|
981
|
+
)
|
|
982
|
+
elif "saving_info" in dataframe.attrs:
|
|
983
|
+
file_name = os.path.dirname(file_name)
|
|
984
|
+
file_name = os.path.join(
|
|
985
|
+
file_name,
|
|
986
|
+
(
|
|
987
|
+
str(dataframe.attrs["saving_info"]["cars_ds_col"])
|
|
988
|
+
+ "_"
|
|
989
|
+
+ str(dataframe.attrs["saving_info"]["cars_ds_row"])
|
|
990
|
+
),
|
|
991
|
+
)
|
|
992
|
+
if not save_by_pair:
|
|
993
|
+
save_dataframe(dataframe, file_name, overwrite, point_cloud_format)
|
|
994
|
+
else:
|
|
995
|
+
pairing_indexes = set(np.array(dataframe["global_id"]).flat)
|
|
996
|
+
source_pc_names = dataframe.attrs["attributes"]["source_pc_names"]
|
|
997
|
+
for pair_index in pairing_indexes:
|
|
998
|
+
dir_name = os.path.join(
|
|
999
|
+
os.path.dirname(file_name), source_pc_names[int(pair_index)]
|
|
1000
|
+
)
|
|
1001
|
+
safe_makedirs(dir_name)
|
|
1002
|
+
base_name = os.path.basename(file_name)
|
|
1003
|
+
points_indexes = dataframe["global_id"] == pair_index
|
|
1004
|
+
file_name_by_pair = os.path.join(dir_name, base_name)
|
|
1005
|
+
save_dataframe(
|
|
1006
|
+
dataframe.loc[points_indexes],
|
|
1007
|
+
file_name_by_pair,
|
|
1008
|
+
overwrite,
|
|
1009
|
+
point_cloud_format,
|
|
1010
|
+
)
|
|
1011
|
+
|
|
1012
|
+
|
|
1013
|
+
def save_dataframe(
|
|
1014
|
+
dataframe, file_name, overwrite=True, point_cloud_format="csv"
|
|
1015
|
+
):
|
|
1016
|
+
"""
|
|
1017
|
+
Save dataframe (csv, laz, attr file)
|
|
1018
|
+
"""
|
|
1019
|
+
# Save attributes
|
|
1020
|
+
attributes_file_name = file_name + "_attrs.yaml"
|
|
1021
|
+
save_dict(dataframe.attrs, attributes_file_name)
|
|
1022
|
+
|
|
1023
|
+
# Save point cloud to laz format
|
|
1024
|
+
|
|
1025
|
+
if point_cloud_format == "laz":
|
|
1026
|
+
_, extension = os.path.splitext(file_name)
|
|
1027
|
+
if "laz" not in extension:
|
|
1028
|
+
file_name = file_name + ".laz"
|
|
1029
|
+
dataframe_converter.convert_pcl_to_laz(dataframe, file_name)
|
|
1030
|
+
elif point_cloud_format == "csv":
|
|
1031
|
+
_, extension = os.path.splitext(file_name)
|
|
1032
|
+
if "csv" not in extension:
|
|
1033
|
+
file_name = file_name + ".csv"
|
|
1034
|
+
if overwrite and os.path.exists(file_name):
|
|
1035
|
+
dataframe.to_csv(file_name, index=False)
|
|
1036
|
+
else:
|
|
1037
|
+
if os.path.exists(file_name):
|
|
1038
|
+
# merge files
|
|
1039
|
+
existing_dataframe = pandas.read_csv(file_name)
|
|
1040
|
+
merged_dataframe = pandas.concat(
|
|
1041
|
+
[existing_dataframe, dataframe],
|
|
1042
|
+
ignore_index=True,
|
|
1043
|
+
sort=False,
|
|
1044
|
+
)
|
|
1045
|
+
merged_dataframe.to_csv(file_name, index=False)
|
|
1046
|
+
else:
|
|
1047
|
+
dataframe.to_csv(file_name, index=False)
|
|
1048
|
+
else:
|
|
1049
|
+
raise RuntimeError(
|
|
1050
|
+
"Invalid point cloud format {0}".format(point_cloud_format)
|
|
1051
|
+
)
|
|
1052
|
+
|
|
1053
|
+
|
|
1054
|
+
def save_dataset(
|
|
1055
|
+
dataset, file_name, tag, use_windows_and_overlaps=False, descriptor=None
|
|
1056
|
+
):
|
|
1057
|
+
"""
|
|
1058
|
+
Reconstruct and save data.
|
|
1059
|
+
In order to save properly the dataset to corresponding tiff file,
|
|
1060
|
+
dataset must have been filled with saving info, profile, window,
|
|
1061
|
+
overlaps (if not 0), and rasterio descriptor if already created.
|
|
1062
|
+
See fill_dataset.
|
|
1063
|
+
|
|
1064
|
+
:param dataset: dataset to save
|
|
1065
|
+
:type dataset: xr.Dataset
|
|
1066
|
+
:param file_name: file name to save data to
|
|
1067
|
+
:type file_name: str
|
|
1068
|
+
:param tag: tag to reconstruct
|
|
1069
|
+
:type tag: str
|
|
1070
|
+
:param use_windows_and_overlaps: use saved window and overlaps
|
|
1071
|
+
:type use_windows_and_overlaps: bool
|
|
1072
|
+
:param descriptor: descriptor to use with rasterio
|
|
1073
|
+
:type descriptor: rasterio dataset
|
|
1074
|
+
|
|
1075
|
+
"""
|
|
1076
|
+
if dataset is None:
|
|
1077
|
+
logging.error("Tile is None: not saved ")
|
|
1078
|
+
return
|
|
1079
|
+
|
|
1080
|
+
overlaps = get_overlaps_dataset(dataset)
|
|
1081
|
+
window = get_window_dataset(dataset)
|
|
1082
|
+
|
|
1083
|
+
rio_window = None
|
|
1084
|
+
overlap = [0, 0, 0, 0]
|
|
1085
|
+
if use_windows_and_overlaps:
|
|
1086
|
+
if window is None:
|
|
1087
|
+
logging.debug("User wants to use window but none was set")
|
|
1088
|
+
|
|
1089
|
+
else:
|
|
1090
|
+
rio_window = generate_rasterio_window(window)
|
|
1091
|
+
|
|
1092
|
+
if overlaps is not None:
|
|
1093
|
+
overlap = [
|
|
1094
|
+
overlaps["up"],
|
|
1095
|
+
overlaps["down"],
|
|
1096
|
+
overlaps["left"],
|
|
1097
|
+
overlaps["right"],
|
|
1098
|
+
]
|
|
1099
|
+
if len(dataset[tag].values.shape) > 2:
|
|
1100
|
+
nb_rows, nb_cols = (
|
|
1101
|
+
dataset[tag].values.shape[1],
|
|
1102
|
+
dataset[tag].values.shape[2],
|
|
1103
|
+
)
|
|
1104
|
+
|
|
1105
|
+
data = dataset[tag].values[
|
|
1106
|
+
:,
|
|
1107
|
+
overlap[0] : nb_rows - overlap[1],
|
|
1108
|
+
overlap[2] : nb_cols - overlap[3],
|
|
1109
|
+
]
|
|
1110
|
+
else:
|
|
1111
|
+
nb_rows, nb_cols = (
|
|
1112
|
+
dataset[tag].values.shape[0],
|
|
1113
|
+
dataset[tag].values.shape[1],
|
|
1114
|
+
)
|
|
1115
|
+
|
|
1116
|
+
data = dataset[tag].values[
|
|
1117
|
+
overlap[0] : nb_rows - overlap[1],
|
|
1118
|
+
overlap[2] : nb_cols - overlap[3],
|
|
1119
|
+
]
|
|
1120
|
+
|
|
1121
|
+
if tag == cst.EPI_TEXTURE and "int" in descriptor.dtypes[0]:
|
|
1122
|
+
# Prepare color data for cast
|
|
1123
|
+
data = np.nan_to_num(data, nan=descriptor.nodata)
|
|
1124
|
+
data = np.round(data)
|
|
1125
|
+
|
|
1126
|
+
profile = get_profile_for_tag_dataset(dataset, tag)
|
|
1127
|
+
|
|
1128
|
+
new_profile = profile
|
|
1129
|
+
if "width" not in new_profile or "height" not in new_profile:
|
|
1130
|
+
logging.debug("CarsDataset doesn't have a profile, default is given")
|
|
1131
|
+
new_profile = DefaultGTiffProfile(count=new_profile["count"])
|
|
1132
|
+
new_profile["height"] = data.shape[0]
|
|
1133
|
+
new_profile["width"] = data.shape[1]
|
|
1134
|
+
new_profile["dtype"] = "float32"
|
|
1135
|
+
|
|
1136
|
+
bands_description = None
|
|
1137
|
+
if tag in (cst.EPI_CLASSIFICATION, cst.RASTER_CLASSIF, cst.DSM_CLASSIF):
|
|
1138
|
+
bands_description = dataset.coords[cst.BAND_CLASSIF].values
|
|
1139
|
+
if tag in (cst.EPI_TEXTURE, cst.POINT_CLOUD_CLR_KEY_ROOT, cst.DSM_COLOR):
|
|
1140
|
+
bands_description = dataset.coords[cst.BAND_IM].values
|
|
1141
|
+
if tag in (cst.RASTER_SOURCE_PC, cst.DSM_SOURCE_PC):
|
|
1142
|
+
bands_description = dataset.coords[cst.BAND_SOURCE_PC].values
|
|
1143
|
+
if tag in (cst.EPI_FILLING, cst.RASTER_FILLING, cst.DSM_FILLING):
|
|
1144
|
+
bands_description = dataset.coords[cst.BAND_FILLING].values
|
|
1145
|
+
if tag in (
|
|
1146
|
+
cst.RASTER_PERFORMANCE_MAP,
|
|
1147
|
+
cst.RASTER_PERFORMANCE_MAP_RAW,
|
|
1148
|
+
cst.DSM_PERFORMANCE_MAP,
|
|
1149
|
+
):
|
|
1150
|
+
if cst.BAND_PERFORMANCE_MAP in dataset.coords:
|
|
1151
|
+
bands_description = dataset.coords[cst.BAND_PERFORMANCE_MAP].values
|
|
1152
|
+
|
|
1153
|
+
classes_info_tag = None
|
|
1154
|
+
if tag == cst.RASTER_PERFORMANCE_MAP:
|
|
1155
|
+
classes_info_tag = dataset.attrs.get(
|
|
1156
|
+
cst.RIO_TAG_PERFORMANCE_MAP_CLASSES, None
|
|
1157
|
+
)
|
|
1158
|
+
|
|
1159
|
+
outputs.rasterio_write_georaster(
|
|
1160
|
+
file_name,
|
|
1161
|
+
data,
|
|
1162
|
+
new_profile,
|
|
1163
|
+
window=rio_window,
|
|
1164
|
+
descriptor=descriptor,
|
|
1165
|
+
bands_description=bands_description,
|
|
1166
|
+
classes_info_tag=classes_info_tag,
|
|
1167
|
+
)
|
|
1168
|
+
|
|
1169
|
+
|
|
1170
|
+
def create_tile_path(col: int, row: int, directory: str) -> str:
|
|
1171
|
+
"""
|
|
1172
|
+
Create path of tile, according to its position in CarsDataset grid
|
|
1173
|
+
|
|
1174
|
+
:param col: numero of column
|
|
1175
|
+
:type col: int
|
|
1176
|
+
:param row: numero of row
|
|
1177
|
+
:type row: int
|
|
1178
|
+
:param directory: path where to save tile
|
|
1179
|
+
:type directory: str
|
|
1180
|
+
|
|
1181
|
+
:return: full path
|
|
1182
|
+
:rtype: str
|
|
1183
|
+
|
|
1184
|
+
"""
|
|
1185
|
+
|
|
1186
|
+
tail = "col_" + repr(col) + "_row_" + repr(row)
|
|
1187
|
+
name = os.path.join(directory, tail)
|
|
1188
|
+
|
|
1189
|
+
return name
|
|
1190
|
+
|
|
1191
|
+
|
|
1192
|
+
def save_numpy_array(array: np.ndarray, file_name: str):
|
|
1193
|
+
"""
|
|
1194
|
+
Save numpy array to file
|
|
1195
|
+
|
|
1196
|
+
:param array: array to save
|
|
1197
|
+
:type array: np.ndarray
|
|
1198
|
+
:param file_name: numero of row
|
|
1199
|
+
:type file_name: str
|
|
1200
|
+
|
|
1201
|
+
"""
|
|
1202
|
+
|
|
1203
|
+
with open(file_name, "wb") as descriptor:
|
|
1204
|
+
np.save(descriptor, array)
|
|
1205
|
+
|
|
1206
|
+
|
|
1207
|
+
def load_numpy_array(file_name: str) -> np.ndarray:
|
|
1208
|
+
"""
|
|
1209
|
+
Load numpy array from file
|
|
1210
|
+
|
|
1211
|
+
:param file_name: numero of row
|
|
1212
|
+
:type file_name: str
|
|
1213
|
+
|
|
1214
|
+
:return: array
|
|
1215
|
+
:rtype: np.ndarray
|
|
1216
|
+
|
|
1217
|
+
"""
|
|
1218
|
+
with open(file_name, "rb") as descriptor:
|
|
1219
|
+
return np.load(descriptor)
|
|
1220
|
+
|
|
1221
|
+
|
|
1222
|
+
def create_none(nb_row: int, nb_col: int):
|
|
1223
|
+
"""
|
|
1224
|
+
Create a grid filled with None. The created grid is a 2D list :
|
|
1225
|
+
ex: [[None, None], [None, None]]
|
|
1226
|
+
|
|
1227
|
+
:param nb_row: number of rows
|
|
1228
|
+
:param nb_col: number of cols
|
|
1229
|
+
:return: Grid filled with None
|
|
1230
|
+
:rtype: list of list
|
|
1231
|
+
"""
|
|
1232
|
+
grid = []
|
|
1233
|
+
for _ in range(nb_row):
|
|
1234
|
+
tmp = []
|
|
1235
|
+
for _ in range(nb_col):
|
|
1236
|
+
tmp.append(None)
|
|
1237
|
+
grid.append(tmp)
|
|
1238
|
+
return grid
|
|
1239
|
+
|
|
1240
|
+
|
|
1241
|
+
def overlap_array_to_dict(overlap):
|
|
1242
|
+
"""
|
|
1243
|
+
Convert matrix of overlaps, to dict format used in CarsDatasets.
|
|
1244
|
+
Input is : [o_up, o_down, o_left, o_right].
|
|
1245
|
+
Output is : {"up": o_up, "down": o_down, "left": o_left, "right": o_right}
|
|
1246
|
+
|
|
1247
|
+
:param overlap: overlaps
|
|
1248
|
+
:type overlap: List
|
|
1249
|
+
|
|
1250
|
+
:return: New overlaps
|
|
1251
|
+
:rtype: Dict
|
|
1252
|
+
|
|
1253
|
+
"""
|
|
1254
|
+
new_overlap = {
|
|
1255
|
+
"up": int(overlap[0]),
|
|
1256
|
+
"down": int(overlap[1]),
|
|
1257
|
+
"left": int(overlap[2]),
|
|
1258
|
+
"right": int(overlap[3]),
|
|
1259
|
+
}
|
|
1260
|
+
return new_overlap
|
|
1261
|
+
|
|
1262
|
+
|
|
1263
|
+
def window_array_to_dict(window, overlap=None):
|
|
1264
|
+
"""
|
|
1265
|
+
Convert matrix of windows, to dict format used in CarsDatasets.
|
|
1266
|
+
Use overlaps if you want to get window with overlaps
|
|
1267
|
+
inputs are :
|
|
1268
|
+
|
|
1269
|
+
- window : [row_min, row_max, col_min, col_max], with pixel format
|
|
1270
|
+
- overlap (optional): [o_row_min, o_row_max, o_col_min, o_col_max]
|
|
1271
|
+
|
|
1272
|
+
outputs are :
|
|
1273
|
+
{
|
|
1274
|
+
"row_min" : row_min - o_row_min,
|
|
1275
|
+
"row_max" : row_max + o_row_max,
|
|
1276
|
+
"col_min" : col_min - o_col_min,
|
|
1277
|
+
"col_max" : col_max - o_col_max,
|
|
1278
|
+
|
|
1279
|
+
}
|
|
1280
|
+
|
|
1281
|
+
:param window: window
|
|
1282
|
+
:type window: List
|
|
1283
|
+
:param overlap: overlaps
|
|
1284
|
+
:type overlap: List
|
|
1285
|
+
|
|
1286
|
+
:return: New window
|
|
1287
|
+
:rtype: Dict
|
|
1288
|
+
|
|
1289
|
+
"""
|
|
1290
|
+
|
|
1291
|
+
new_window = {
|
|
1292
|
+
"row_min": int(window[0]),
|
|
1293
|
+
"row_max": int(window[1]),
|
|
1294
|
+
"col_min": int(window[2]),
|
|
1295
|
+
"col_max": int(window[3]),
|
|
1296
|
+
}
|
|
1297
|
+
|
|
1298
|
+
if overlap is not None:
|
|
1299
|
+
new_window["row_min"] -= int(overlap[0])
|
|
1300
|
+
new_window["row_max"] += int(overlap[1])
|
|
1301
|
+
new_window["col_min"] -= int(overlap[2])
|
|
1302
|
+
new_window["col_max"] += int(overlap[3])
|
|
1303
|
+
|
|
1304
|
+
return new_window
|
|
1305
|
+
|
|
1306
|
+
|
|
1307
|
+
def dict_profile_to_rio_profile(dict_profile: Dict) -> Dict:
|
|
1308
|
+
"""
|
|
1309
|
+
Transform a rasterio Profile transformed into serializable Dict,
|
|
1310
|
+
into a rasterio profile.
|
|
1311
|
+
|
|
1312
|
+
:param profile: rasterio Profile transformed into serializable Dict
|
|
1313
|
+
:type profile: Dict
|
|
1314
|
+
|
|
1315
|
+
:return: Profile
|
|
1316
|
+
:rtype: Rasterio Profile
|
|
1317
|
+
|
|
1318
|
+
"""
|
|
1319
|
+
|
|
1320
|
+
rio_profile = copy.copy(dict_profile)
|
|
1321
|
+
|
|
1322
|
+
transform = None
|
|
1323
|
+
if "transform" in dict_profile:
|
|
1324
|
+
if dict_profile["transform"] is not None:
|
|
1325
|
+
transform = rio.Affine(
|
|
1326
|
+
*dict_profile["transform"][0:6],
|
|
1327
|
+
)
|
|
1328
|
+
crs = None
|
|
1329
|
+
if "crs" in dict_profile:
|
|
1330
|
+
if dict_profile["crs"] is not None:
|
|
1331
|
+
if (
|
|
1332
|
+
isinstance(dict_profile["crs"], str)
|
|
1333
|
+
and "EPSG:" in dict_profile["crs"]
|
|
1334
|
+
):
|
|
1335
|
+
crs = pyproj.CRS(dict_profile["crs"].replace("EPSG:", ""))
|
|
1336
|
+
else:
|
|
1337
|
+
crs = pyproj.CRS(dict_profile["crs"])
|
|
1338
|
+
|
|
1339
|
+
rio_profile["crs"] = crs
|
|
1340
|
+
rio_profile["transform"] = transform
|
|
1341
|
+
|
|
1342
|
+
return rio_profile
|
|
1343
|
+
|
|
1344
|
+
|
|
1345
|
+
def rio_profile_to_dict_profile(in_profile: Dict) -> Dict:
|
|
1346
|
+
"""
|
|
1347
|
+
Transform a rasterio profile into a serializable Dict.
|
|
1348
|
+
|
|
1349
|
+
:param in_profile: rasterio Profile transformed into serializable Dict
|
|
1350
|
+
:type in_profile: Dict
|
|
1351
|
+
|
|
1352
|
+
:return: Profile
|
|
1353
|
+
:rtype: Dict
|
|
1354
|
+
|
|
1355
|
+
"""
|
|
1356
|
+
|
|
1357
|
+
profile = copy.copy(in_profile)
|
|
1358
|
+
|
|
1359
|
+
profile = {**profile}
|
|
1360
|
+
crs = None
|
|
1361
|
+
if "crs" in profile:
|
|
1362
|
+
if profile["crs"] is not None:
|
|
1363
|
+
if isinstance(profile["crs"], str):
|
|
1364
|
+
crs = profile["crs"]
|
|
1365
|
+
else:
|
|
1366
|
+
crs = profile["crs"].to_epsg()
|
|
1367
|
+
|
|
1368
|
+
transform = None
|
|
1369
|
+
if "transform" in profile:
|
|
1370
|
+
if profile["transform"] is not None:
|
|
1371
|
+
transform = list(profile["transform"])[:6]
|
|
1372
|
+
|
|
1373
|
+
profile.update(crs=crs, transform=transform)
|
|
1374
|
+
|
|
1375
|
+
return profile
|
|
1376
|
+
|
|
1377
|
+
|
|
1378
|
+
def save_dict(dictionary, file_path: str):
|
|
1379
|
+
"""
|
|
1380
|
+
Save dict to json file
|
|
1381
|
+
|
|
1382
|
+
:param dictionary: dictionary to save
|
|
1383
|
+
:type dictionary: Dict
|
|
1384
|
+
:param file_path: file path to use
|
|
1385
|
+
:type file_path: str
|
|
1386
|
+
"""
|
|
1387
|
+
|
|
1388
|
+
dictionary_yaml = make_yaml_safe(dictionary)
|
|
1389
|
+
|
|
1390
|
+
with open(file_path, "w", encoding="utf8") as fstream:
|
|
1391
|
+
yaml.safe_dump(
|
|
1392
|
+
dictionary_yaml,
|
|
1393
|
+
fstream,
|
|
1394
|
+
allow_unicode=True,
|
|
1395
|
+
sort_keys=False,
|
|
1396
|
+
)
|
|
1397
|
+
|
|
1398
|
+
|
|
1399
|
+
def make_yaml_safe(obj): # pylint: disable=too-many-return-statements
|
|
1400
|
+
"""
|
|
1401
|
+
Converter
|
|
1402
|
+
"""
|
|
1403
|
+
if isinstance(obj, dict):
|
|
1404
|
+
return {make_yaml_safe(k): make_yaml_safe(v) for k, v in obj.items()}
|
|
1405
|
+
if isinstance(obj, list):
|
|
1406
|
+
return [make_yaml_safe(v) for v in obj]
|
|
1407
|
+
if isinstance(obj, tuple):
|
|
1408
|
+
return tuple(make_yaml_safe(v) for v in obj)
|
|
1409
|
+
if isinstance(obj, (np.generic, np.number)):
|
|
1410
|
+
return obj.item()
|
|
1411
|
+
|
|
1412
|
+
return obj
|
|
1413
|
+
|
|
1414
|
+
|
|
1415
|
+
def load_dict(file_path: str) -> Dict:
|
|
1416
|
+
"""
|
|
1417
|
+
Load dict from json file
|
|
1418
|
+
|
|
1419
|
+
:param file_path: file path to use
|
|
1420
|
+
:type file_path: str
|
|
1421
|
+
|
|
1422
|
+
"""
|
|
1423
|
+
|
|
1424
|
+
with open(file_path, "r", encoding="utf8") as fstream:
|
|
1425
|
+
dictionary = yaml.safe_load(fstream)
|
|
1426
|
+
|
|
1427
|
+
return dictionary
|
|
1428
|
+
|
|
1429
|
+
|
|
1430
|
+
def separate_dicts(dictionary, list_tags):
|
|
1431
|
+
"""
|
|
1432
|
+
Separate a dict into two, the second one containing the given tags.
|
|
1433
|
+
|
|
1434
|
+
For example, {key1: val1, key2: val2, key3: val3}
|
|
1435
|
+
with list_tags = [key2] will be split in :
|
|
1436
|
+
{key1: val1, key3: val3} and {key2: val2}
|
|
1437
|
+
|
|
1438
|
+
"""
|
|
1439
|
+
|
|
1440
|
+
dict1 = {}
|
|
1441
|
+
dict2 = {}
|
|
1442
|
+
|
|
1443
|
+
for key in dictionary:
|
|
1444
|
+
if key in list_tags:
|
|
1445
|
+
dict2[key] = dictionary[key]
|
|
1446
|
+
else:
|
|
1447
|
+
dict1[key] = dictionary[key]
|
|
1448
|
+
|
|
1449
|
+
return dict1, dict2
|
|
1450
|
+
|
|
1451
|
+
|
|
1452
|
+
def get_attributes(obj):
|
|
1453
|
+
"""
|
|
1454
|
+
Get attributes in .attrs of dataset or dataframe
|
|
1455
|
+
|
|
1456
|
+
:param obj: dataset or dataframe
|
|
1457
|
+
:type obj: xr.Dataset or pandas.Dataframe
|
|
1458
|
+
"""
|
|
1459
|
+
|
|
1460
|
+
return obj.attrs.get(ATTRIBUTES, None)
|
|
1461
|
+
|
|
1462
|
+
|
|
1463
|
+
def get_window_dataset(dataset):
|
|
1464
|
+
"""
|
|
1465
|
+
Get window in dataset
|
|
1466
|
+
|
|
1467
|
+
:param dataset: dataset
|
|
1468
|
+
:type dataset: xr.Dataset
|
|
1469
|
+
"""
|
|
1470
|
+
|
|
1471
|
+
return dataset.attrs.get(WINDOW, None)
|
|
1472
|
+
|
|
1473
|
+
|
|
1474
|
+
def get_overlaps_dataset(dataset):
|
|
1475
|
+
"""
|
|
1476
|
+
Get overlaps in dataset
|
|
1477
|
+
|
|
1478
|
+
:param dataset: dataset
|
|
1479
|
+
:type dataset: xr.Dataset
|
|
1480
|
+
"""
|
|
1481
|
+
|
|
1482
|
+
return dataset.attrs.get(OVERLAPS, None)
|
|
1483
|
+
|
|
1484
|
+
|
|
1485
|
+
def get_profile_rasterio(dataset):
|
|
1486
|
+
"""
|
|
1487
|
+
Get profile in dataset
|
|
1488
|
+
|
|
1489
|
+
:param dataset: dataset
|
|
1490
|
+
:type dataset: xr.Dataset
|
|
1491
|
+
"""
|
|
1492
|
+
|
|
1493
|
+
return dataset.attrs.get(PROFILE, None)
|
|
1494
|
+
|
|
1495
|
+
|
|
1496
|
+
def get_profile_for_tag_dataset(dataset, tag: str) -> Dict:
|
|
1497
|
+
"""
|
|
1498
|
+
Get profile according to layer to save.
|
|
1499
|
+
This function modify current rasterio dataset to fix the number of
|
|
1500
|
+
bands of the data associated to given tag.
|
|
1501
|
+
|
|
1502
|
+
:param tag: tag to use
|
|
1503
|
+
:type tag: str
|
|
1504
|
+
|
|
1505
|
+
:return: Profile
|
|
1506
|
+
:rtype: Rasterio Profile
|
|
1507
|
+
|
|
1508
|
+
"""
|
|
1509
|
+
|
|
1510
|
+
new_profile = get_profile_rasterio(dataset)
|
|
1511
|
+
if new_profile is None:
|
|
1512
|
+
new_profile = {}
|
|
1513
|
+
|
|
1514
|
+
new_profile["count"] = 1
|
|
1515
|
+
if len(dataset[tag].values.shape) > 2:
|
|
1516
|
+
new_profile["count"] = dataset[tag].values.shape[0]
|
|
1517
|
+
|
|
1518
|
+
return new_profile
|
|
1519
|
+
|
|
1520
|
+
|
|
1521
|
+
def generate_rasterio_window(window: Dict) -> rio.windows.Window:
|
|
1522
|
+
"""
|
|
1523
|
+
Generate rasterio window to use.
|
|
1524
|
+
|
|
1525
|
+
:param window: window to convert, containing 'row_min',
|
|
1526
|
+
'row_max', 'col_min', 'col_max
|
|
1527
|
+
:type window: dict
|
|
1528
|
+
|
|
1529
|
+
:return: rasterio window
|
|
1530
|
+
:rtype: rio.windows.Window
|
|
1531
|
+
|
|
1532
|
+
"""
|
|
1533
|
+
returned_window = None
|
|
1534
|
+
|
|
1535
|
+
if window is not None:
|
|
1536
|
+
return Window.from_slices(
|
|
1537
|
+
(window["row_min"], window["row_max"]),
|
|
1538
|
+
(window["col_min"], window["col_max"]),
|
|
1539
|
+
)
|
|
1540
|
+
|
|
1541
|
+
return returned_window
|