ign-borea 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- borea/__init__.py +0 -0
- borea/datastruct/__init__.py +0 -0
- borea/datastruct/camera.py +25 -0
- borea/datastruct/dtm.py +119 -0
- borea/datastruct/gcp.py +22 -0
- borea/datastruct/shot.py +222 -0
- borea/datastruct/workdata.py +220 -0
- borea/format/__init__.py +0 -0
- borea/format/conl.py +143 -0
- borea/format/rpc.py +244 -0
- borea/geodesy/__init__.py +0 -0
- borea/geodesy/approx_euclidean_proj.py +91 -0
- borea/geodesy/euclidean_proj.py +25 -0
- borea/geodesy/local_euclidean_proj.py +127 -0
- borea/geodesy/proj_engine.py +70 -0
- borea/geodesy/projectionlist/__init__.py +0 -0
- borea/geodesy/projectionlist/search_proj.py +60 -0
- borea/geodesy/transform_geodesy.py +114 -0
- borea/process/__init__.py +0 -0
- borea/process/p_add_data/__init__.py +0 -0
- borea/process/p_add_data/p_add_shot.py +63 -0
- borea/process/p_add_data/p_file_gcp2d.py +55 -0
- borea/process/p_add_data/p_file_gcp3d.py +53 -0
- borea/process/p_add_data/p_gen_param.py +76 -0
- borea/process/p_add_data/p_pt2d.py +48 -0
- borea/process/p_add_data/p_pt3d.py +48 -0
- borea/process/p_add_data/p_unit_shot.py +48 -0
- borea/process/p_add_data/p_write.py +23 -0
- borea/process/p_format/__init__.py +0 -0
- borea/process/p_format/p_read_opk.py +78 -0
- borea/process/p_format/p_write_con.py +36 -0
- borea/process/p_format/p_write_opk.py +64 -0
- borea/process/p_format/p_write_rpc.py +48 -0
- borea/process/p_func/__init__.py +0 -0
- borea/process/p_func/p_control.py +67 -0
- borea/process/p_func/p_image_world.py +48 -0
- borea/process/p_func/p_spaceresection.py +51 -0
- borea/process/p_func/p_world_image.py +49 -0
- borea/reader/__init__.py +0 -0
- borea/reader/orientation/__init__.py +0 -0
- borea/reader/orientation/manage_reader.py +33 -0
- borea/reader/orientation/reader_opk.py +58 -0
- borea/reader/reader_camera.py +52 -0
- borea/reader/reader_point.py +113 -0
- borea/stat/__init__.py +0 -0
- borea/stat/statistics.py +215 -0
- borea/transform_world_image/__init__.py +0 -0
- borea/transform_world_image/transform_dtm/__init__.py +0 -0
- borea/transform_world_image/transform_dtm/world_image_dtm.py +47 -0
- borea/transform_world_image/transform_shot/__init__.py +0 -0
- borea/transform_world_image/transform_shot/conversion_coor_shot.py +58 -0
- borea/transform_world_image/transform_shot/image_world_shot.py +153 -0
- borea/transform_world_image/transform_shot/world_image_shot.py +117 -0
- borea/transform_world_image/transform_worksite/__init__.py +0 -0
- borea/transform_world_image/transform_worksite/image_world_intersection.py +154 -0
- borea/transform_world_image/transform_worksite/image_world_least_square.py +184 -0
- borea/transform_world_image/transform_worksite/image_world_work.py +49 -0
- borea/transform_world_image/transform_worksite/space_resection.py +343 -0
- borea/transform_world_image/transform_worksite/world_image_work.py +43 -0
- borea/utils/__init__.py +0 -0
- borea/utils/check/__init__.py +0 -0
- borea/utils/check/check_args_opk.py +59 -0
- borea/utils/check/check_args_reader_pt.py +44 -0
- borea/utils/check/check_array.py +56 -0
- borea/utils/check/check_header.py +90 -0
- borea/utils/check/check_order_axe.py +50 -0
- borea/utils/miscellaneous/__init__.py +0 -0
- borea/utils/miscellaneous/miscellaneous.py +83 -0
- borea/utils/miscellaneous/param_bundle.py +36 -0
- borea/utils/miscellaneous/sparse.py +31 -0
- borea/utils/singleton/__init__.py +0 -0
- borea/utils/singleton/singleton.py +23 -0
- borea/utils/xml/__init__.py +0 -0
- borea/utils/xml/xml.py +63 -0
- borea/worksite/__init__.py +0 -0
- borea/worksite/worksite.py +240 -0
- borea/writer/__init__.py +0 -0
- borea/writer/manage_writer.py +23 -0
- borea/writer/writer_con.py +29 -0
- borea/writer/writer_df_to_txt.py +32 -0
- borea/writer/writer_opk.py +70 -0
- borea/writer/writer_rpc.py +55 -0
- borea_tools/__init__.py +0 -0
- borea_tools/opk_control.py +33 -0
- borea_tools/opk_to_conl.py +33 -0
- borea_tools/opk_to_opk.py +33 -0
- borea_tools/opk_to_rpc.py +33 -0
- borea_tools/pt_image_to_world.py +32 -0
- borea_tools/pt_world_to_image.py +32 -0
- borea_tools/ptfile_image_to_world.py +32 -0
- borea_tools/ptfile_world_to_image.py +32 -0
- borea_tools/spaceresection_opk.py +34 -0
- ign_borea-0.1.5.dist-info/LICENSE +21 -0
- ign_borea-0.1.5.dist-info/METADATA +274 -0
- ign_borea-0.1.5.dist-info/RECORD +98 -0
- ign_borea-0.1.5.dist-info/WHEEL +5 -0
- ign_borea-0.1.5.dist-info/entry_points.txt +10 -0
- ign_borea-0.1.5.dist-info/top_level.txt +2 -0
borea/format/conl.py
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Class to write shot in conical IGN format
|
|
3
|
+
"""
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
import xml.etree.ElementTree as ET
|
|
7
|
+
from dataclasses import dataclass
|
|
8
|
+
|
|
9
|
+
import numpy as np
|
|
10
|
+
from borea.datastruct.camera import Camera
|
|
11
|
+
from borea.datastruct.shot import Shot
|
|
12
|
+
from borea.utils.xml.xml import format_xml, indent, add_elem
|
|
13
|
+
|
|
14
|
+
B_TIME = ["year", "month", "day", "hour", "minute", "second", "time_system"]
|
|
15
|
+
B_PT3D = ["x", "y", "z"]
|
|
16
|
+
B_ORI = ["easting", "northing", "altitude"]
|
|
17
|
+
B_TCAM = ["width", "height"]
|
|
18
|
+
B_CAM = ["c", "l", "focale"]
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass
|
|
22
|
+
class Conl:
|
|
23
|
+
"""
|
|
24
|
+
Class for light conical shot.
|
|
25
|
+
File reading by software Geoview IGN on MacOs.
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
shot (Shot): Shot to convert in conical file.
|
|
29
|
+
cam (Camera): Camera of the shot.
|
|
30
|
+
proj (str): Projection of the shot for GEOVIEW.
|
|
31
|
+
"""
|
|
32
|
+
shot: Shot
|
|
33
|
+
cam: Camera
|
|
34
|
+
proj: str
|
|
35
|
+
|
|
36
|
+
def save_conl(self, path_conical: Path, linalt: bool = True) -> None:
|
|
37
|
+
"""
|
|
38
|
+
Save the shot as light conical file.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
path_conical (Path): path to the light conical file.
|
|
42
|
+
linalt (bool): If you want z shot corrected by linear alteration.
|
|
43
|
+
"""
|
|
44
|
+
date_now = datetime.now()
|
|
45
|
+
|
|
46
|
+
# Scale factor correction
|
|
47
|
+
self.shot.set_linear_alteration(linalt)
|
|
48
|
+
|
|
49
|
+
# creation XML
|
|
50
|
+
ori = ET.Element("orientation", {})
|
|
51
|
+
ET.SubElement(ori, "lastmodificationbylibori",
|
|
52
|
+
{'date': date_now.strftime("%Y-%m-%d"),
|
|
53
|
+
"time": date_now.strftime("%H h %M min %S sec")})
|
|
54
|
+
ET.SubElement(ori, "version").text = format_xml("1.0")
|
|
55
|
+
|
|
56
|
+
auxiliary_data = ET.SubElement(ori, "auxiliarydata")
|
|
57
|
+
ET.SubElement(auxiliary_data, "image_name").text = format_xml(self.shot.name_shot)
|
|
58
|
+
|
|
59
|
+
image_date = ET.SubElement(auxiliary_data, "image_date")
|
|
60
|
+
add_elem(image_date, B_TIME, [0, 0, 0, 0, 0, 0, None])
|
|
61
|
+
|
|
62
|
+
ET.SubElement(auxiliary_data, "samples")
|
|
63
|
+
|
|
64
|
+
self.set_geometry_xml(ori)
|
|
65
|
+
|
|
66
|
+
tree = ET.ElementTree(ori)
|
|
67
|
+
indent(ori)
|
|
68
|
+
tree.write(path_conical, encoding='utf-8', xml_declaration=True)
|
|
69
|
+
|
|
70
|
+
def set_geometry_xml(self, ori: ET) -> None:
|
|
71
|
+
"""
|
|
72
|
+
Setup balise geometry of xml.
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
ori (ET): The xml.
|
|
76
|
+
"""
|
|
77
|
+
pos_shot = np.round(np.copy(self.shot.pos_shot), 5)
|
|
78
|
+
|
|
79
|
+
geometry = ET.SubElement(ori, "geometry", {'type': "physique"})
|
|
80
|
+
|
|
81
|
+
extrinseque = ET.SubElement(geometry, "extrinseque")
|
|
82
|
+
|
|
83
|
+
systeme = ET.SubElement(extrinseque, "systeme")
|
|
84
|
+
|
|
85
|
+
euclidien = ET.SubElement(systeme, "euclidien", {'type': "MATISRTL"})
|
|
86
|
+
add_elem(euclidien, B_PT3D[:2], pos_shot[:2])
|
|
87
|
+
|
|
88
|
+
ET.SubElement(systeme, "geodesique").text = format_xml(self.proj).upper()
|
|
89
|
+
|
|
90
|
+
ET.SubElement(extrinseque, "grid_alti").text = format_xml("UNKNOWN")
|
|
91
|
+
|
|
92
|
+
sommet = ET.SubElement(extrinseque, "sommet")
|
|
93
|
+
add_elem(sommet, B_ORI, [0, 0, pos_shot[2]])
|
|
94
|
+
|
|
95
|
+
rotation = ET.SubElement(extrinseque, "rotation")
|
|
96
|
+
ET.SubElement(rotation, "Image2Ground").text = format_xml('false')
|
|
97
|
+
|
|
98
|
+
mat3d = ET.SubElement(rotation, "mat3d")
|
|
99
|
+
|
|
100
|
+
l1_pt3d = ET.SubElement(ET.SubElement(mat3d, "l1"), "pt3d")
|
|
101
|
+
add_elem(l1_pt3d, B_PT3D, [self.shot.mat_rot[0][0],
|
|
102
|
+
self.shot.mat_rot[0][1],
|
|
103
|
+
self.shot.mat_rot[0][2]], ".16f")
|
|
104
|
+
|
|
105
|
+
l2_pt3d = ET.SubElement(ET.SubElement(mat3d, "l2"), "pt3d")
|
|
106
|
+
add_elem(l2_pt3d, B_PT3D, [self.shot.mat_rot[1][0],
|
|
107
|
+
self.shot.mat_rot[1][1],
|
|
108
|
+
self.shot.mat_rot[1][2]], ".16f")
|
|
109
|
+
|
|
110
|
+
l3_pt3d = ET.SubElement(ET.SubElement(mat3d, "l3"), "pt3d")
|
|
111
|
+
add_elem(l3_pt3d, B_PT3D, [self.shot.mat_rot[2][0],
|
|
112
|
+
self.shot.mat_rot[2][1],
|
|
113
|
+
self.shot.mat_rot[2][2]], ".16f")
|
|
114
|
+
|
|
115
|
+
self.set_camera_xml(geometry)
|
|
116
|
+
|
|
117
|
+
def set_camera_xml(self, geometry: ET) -> None:
|
|
118
|
+
"""
|
|
119
|
+
Setup balise camera of xml.
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
geometry (ET): The xml.
|
|
123
|
+
"""
|
|
124
|
+
intrinseque = ET.SubElement(geometry, "intrinseque")
|
|
125
|
+
|
|
126
|
+
sensor = ET.SubElement(intrinseque, "sensor")
|
|
127
|
+
ET.SubElement(sensor, "name").text = self.cam.name_camera
|
|
128
|
+
|
|
129
|
+
calibration_date = ET.SubElement(sensor, "calibration_date")
|
|
130
|
+
add_elem(calibration_date, B_TIME, [0, 0, 0, 0, 0, 0, None])
|
|
131
|
+
|
|
132
|
+
ET.SubElement(sensor, "serial_number").text = format_xml("UNKNOWN")
|
|
133
|
+
|
|
134
|
+
image_size = ET.SubElement(sensor, "image_size")
|
|
135
|
+
add_elem(image_size, B_TCAM, [self.cam.width, self.cam.height])
|
|
136
|
+
|
|
137
|
+
sensor_size = ET.SubElement(sensor, "sensor_size")
|
|
138
|
+
add_elem(sensor_size, B_TCAM, [self.cam.width, self.cam.height])
|
|
139
|
+
|
|
140
|
+
ppa = ET.SubElement(sensor, "ppa")
|
|
141
|
+
add_elem(ppa, B_CAM, [int(self.cam.ppax), int(self.cam.ppay), int(self.cam.focal)])
|
|
142
|
+
|
|
143
|
+
ET.SubElement(sensor, "pixel_size").text = str(0.000004)
|
borea/format/rpc.py
ADDED
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Class module for Rpc
|
|
3
|
+
"""
|
|
4
|
+
import numpy as np
|
|
5
|
+
from borea.datastruct.shot import Shot
|
|
6
|
+
from borea.datastruct.camera import Camera
|
|
7
|
+
from borea.geodesy.proj_engine import ProjEngine
|
|
8
|
+
from borea.datastruct.dtm import Dtm
|
|
9
|
+
from borea.transform_world_image.transform_shot.image_world_shot import ImageWorldShot
|
|
10
|
+
from borea.transform_world_image.transform_shot.world_image_shot import WorldImageShot
|
|
11
|
+
from borea.utils.miscellaneous.miscellaneous import normalize
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Rpc:
|
|
15
|
+
"""
|
|
16
|
+
A class for computing RPC geometries.
|
|
17
|
+
|
|
18
|
+
.. note::
|
|
19
|
+
A RPC is always describing a transformation from
|
|
20
|
+
geographical coordinates to images coordinates.
|
|
21
|
+
"""
|
|
22
|
+
def __init__(self) -> None:
|
|
23
|
+
self.param_rpc = {}
|
|
24
|
+
self.fact_rpc = None
|
|
25
|
+
|
|
26
|
+
@classmethod
|
|
27
|
+
def from_shot(cls, shot: Shot, cam: Camera, param_rpc: dict, unit_data: dict) -> None:
|
|
28
|
+
"""
|
|
29
|
+
Calculate RPC of the shot.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
shot (Shot): Shot to calculate rpc.
|
|
33
|
+
cam (Camera): Camera of the shot.
|
|
34
|
+
param_rpc (dict): Dictionary of parameters for rpc calculation.
|
|
35
|
+
key;
|
|
36
|
+
"size_grid"; size of the grip to calcule rpc.
|
|
37
|
+
"order"; order of the polynome of the rpc.
|
|
38
|
+
"fact_rpc"; rpc factor for world coordinate when src is not WGS84.
|
|
39
|
+
unit_data (dict): Dictionary of data unity.
|
|
40
|
+
key;
|
|
41
|
+
"unit_z_data"; Unity of z data.
|
|
42
|
+
"unit_z_shot"; Unity of z shot.
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
Rpc: The object rpc.
|
|
46
|
+
"""
|
|
47
|
+
obj = cls()
|
|
48
|
+
obj.fact_rpc = param_rpc["fact_rpc"]
|
|
49
|
+
obj.param_rpc["ERR_BIAS"] = -1
|
|
50
|
+
obj.param_rpc["ERR_RAND"] = -1
|
|
51
|
+
|
|
52
|
+
grid_img, grid_world = obj.create_grid_rpc(shot, cam, unit_data, param_rpc["size_grid"])
|
|
53
|
+
|
|
54
|
+
img_norm, world_norm = obj.normalize_data(grid_img, grid_world)
|
|
55
|
+
|
|
56
|
+
coeffx = obj.least_square_rpc(img_norm[0], world_norm, param_rpc["order"])
|
|
57
|
+
coeffy = obj.least_square_rpc(img_norm[1], world_norm, param_rpc["order"])
|
|
58
|
+
|
|
59
|
+
# Save param
|
|
60
|
+
obj.param_rpc["SAMP_NUM_COEFF"] = coeffx[0:20] # X numerator
|
|
61
|
+
obj.param_rpc["SAMP_DEN_COEFF"] = coeffx[20:] # X denominator
|
|
62
|
+
obj.param_rpc["LINE_NUM_COEFF"] = coeffy[0:20] # Y numerator
|
|
63
|
+
obj.param_rpc["LINE_DEN_COEFF"] = coeffy[20:] # Y denominator
|
|
64
|
+
return obj
|
|
65
|
+
|
|
66
|
+
def create_grid_rpc(self, shot: Shot, cam: Camera,
|
|
67
|
+
unit_data: dict, size_grid: int = 100) -> tuple:
|
|
68
|
+
"""
|
|
69
|
+
Creation of a grid on a size_grid buffer around the acquisition.
|
|
70
|
+
|
|
71
|
+
Args:
|
|
72
|
+
shot (Shot): The shot to create the grid.
|
|
73
|
+
cam (Camera): The camera of the shot.
|
|
74
|
+
unit_data (dict): Dictionary of data unity.
|
|
75
|
+
key;
|
|
76
|
+
"unit_z_data"; Unity of z data.
|
|
77
|
+
"unit_z_shot"; Unity of z shot.
|
|
78
|
+
size_grid (int): The size of the grid to add around the shot.
|
|
79
|
+
|
|
80
|
+
Returns:
|
|
81
|
+
tuple: Image coordinates [col, line] and world coordinates [X, Y, Z] of the grid.
|
|
82
|
+
"""
|
|
83
|
+
corner_img = np.array([[0, cam.width, 0, cam.width], [0, cam.height, cam.height, 0]])
|
|
84
|
+
pt_w = ImageWorldShot(shot, cam).image_to_world(corner_img,
|
|
85
|
+
unit_data["unit_z_data"],
|
|
86
|
+
unit_data["unit_z_shot"])
|
|
87
|
+
pt_min = np.min(pt_w, axis=1)[0:2] - size_grid - 1
|
|
88
|
+
pt_max = np.max(pt_w, axis=1)[0:2] + size_grid + 1
|
|
89
|
+
x, y = np.mgrid[int(pt_min[0]):int(pt_max[0]) + size_grid:size_grid,
|
|
90
|
+
int(pt_min[1]):int(pt_max[1]) + size_grid:size_grid]
|
|
91
|
+
x, y = x.ravel(), y.ravel()
|
|
92
|
+
z = np.squeeze(Dtm().get_z_world(np.array([x, y])))
|
|
93
|
+
pt_img = WorldImageShot(shot, cam).world_to_image(np.array([x, y, z]),
|
|
94
|
+
unit_data["unit_z_data"],
|
|
95
|
+
unit_data["unit_z_shot"])
|
|
96
|
+
return pt_img, np.array([x, y, z])
|
|
97
|
+
|
|
98
|
+
def normalize_data(self, grid_img: np.ndarray, grid_world: np.ndarray) -> tuple:
|
|
99
|
+
"""
|
|
100
|
+
Normalize data grid to calculate Rpc.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
grid_img (np.array): Image coordinates [c, l] of the grid.
|
|
104
|
+
grid_world (np.array): World coordinates [x, y, z] of the grid.
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
tuple: grid image normalize and grid world normalize.
|
|
108
|
+
"""
|
|
109
|
+
if self.fact_rpc is None:
|
|
110
|
+
x_geog, y_geog, z_geog = ProjEngine().carto_to_geog(grid_world[0],
|
|
111
|
+
grid_world[1],
|
|
112
|
+
grid_world[2])
|
|
113
|
+
else:
|
|
114
|
+
x_geog = grid_world[0]*self.fact_rpc
|
|
115
|
+
y_geog = grid_world[1]*self.fact_rpc
|
|
116
|
+
z_geog = grid_world[2]
|
|
117
|
+
|
|
118
|
+
# Normalization of image coordinates
|
|
119
|
+
col_n_o_s = normalize(grid_img[0])
|
|
120
|
+
lin_n_o_s = normalize(grid_img[1])
|
|
121
|
+
|
|
122
|
+
# Normalization of world coordinates
|
|
123
|
+
x_n_o_s = normalize(x_geog)
|
|
124
|
+
y_n_o_s = normalize(y_geog)
|
|
125
|
+
z_n_o_s = normalize(z_geog)
|
|
126
|
+
|
|
127
|
+
self.param_rpc["LINE_OFF"] = lin_n_o_s[1]
|
|
128
|
+
self.param_rpc["SAMP_OFF"] = col_n_o_s[1]
|
|
129
|
+
self.param_rpc["LAT_OFF"] = y_n_o_s[1]
|
|
130
|
+
self.param_rpc["LONG_OFF"] = x_n_o_s[1]
|
|
131
|
+
self.param_rpc["HEIGHT_OFF"] = z_n_o_s[1]
|
|
132
|
+
self.param_rpc["LINE_SCALE"] = lin_n_o_s[2]
|
|
133
|
+
self.param_rpc["SAMP_SCALE"] = col_n_o_s[2]
|
|
134
|
+
self.param_rpc["LAT_SCALE"] = y_n_o_s[2]
|
|
135
|
+
self.param_rpc["LONG_SCALE"] = x_n_o_s[2]
|
|
136
|
+
self.param_rpc["HEIGHT_SCALE"] = z_n_o_s[2]
|
|
137
|
+
|
|
138
|
+
coor_img = np.array([col_n_o_s[0], lin_n_o_s[0]])
|
|
139
|
+
coor_world = np.array([x_n_o_s[0], y_n_o_s[0], z_n_o_s[0]])
|
|
140
|
+
|
|
141
|
+
return coor_img, coor_world
|
|
142
|
+
|
|
143
|
+
def least_square_rpc(self, img_norm: np.ndarray, world_norm: np.ndarray,
|
|
144
|
+
polynomial_degree: int) -> np.ndarray:
|
|
145
|
+
"""
|
|
146
|
+
Calcule Rpc of the shot by least square methode for one image coordinate.
|
|
147
|
+
|
|
148
|
+
Args:
|
|
149
|
+
img_norm (np.array): Normalize coordinates column or line of point in image.
|
|
150
|
+
world_norm (np.array): Normalize coordinate of world grid.
|
|
151
|
+
polynomial_degree (int): Degree of the polynomial of the rpc (1, 2, 3).
|
|
152
|
+
|
|
153
|
+
Returns:
|
|
154
|
+
np.array: Rpc coefficients.
|
|
155
|
+
"""
|
|
156
|
+
mat_obs = self.setup_matrix_obs_rpc(img_norm, world_norm, polynomial_degree)
|
|
157
|
+
x = np.linalg.lstsq(mat_obs, img_norm, rcond=None)[0]
|
|
158
|
+
|
|
159
|
+
coef_rpc = np.zeros(40)
|
|
160
|
+
if polynomial_degree == 1:
|
|
161
|
+
coef_rpc[0:4] = x[0:4]
|
|
162
|
+
coef_rpc[20] = 1
|
|
163
|
+
coef_rpc[21:24] = x[4:7]
|
|
164
|
+
|
|
165
|
+
elif polynomial_degree == 2:
|
|
166
|
+
coef_rpc[0:10] = x[0:10]
|
|
167
|
+
coef_rpc[20] = 1
|
|
168
|
+
coef_rpc[21:30] = x[10:19]
|
|
169
|
+
|
|
170
|
+
elif polynomial_degree == 3:
|
|
171
|
+
coef_rpc[0:20] = x[0:20]
|
|
172
|
+
coef_rpc[20] = 1
|
|
173
|
+
coef_rpc[21:40] = x[20:39]
|
|
174
|
+
|
|
175
|
+
return coef_rpc
|
|
176
|
+
|
|
177
|
+
def setup_matrix_obs_rpc(self, img_norm: np.ndarray, world_norm: np.ndarray,
|
|
178
|
+
polynomial_degree: int) -> np.ndarray:
|
|
179
|
+
"""
|
|
180
|
+
Setup observation matrix of the rpc for least square methode.
|
|
181
|
+
|
|
182
|
+
Args:
|
|
183
|
+
img_norm (np.array): Normalize coordinates column or line of point in image.
|
|
184
|
+
world_norm (np.array): Normalize coordinate of world grid.
|
|
185
|
+
polynomial_degree (int): Degree of the polynomial of the rpc (1, 2, 3).
|
|
186
|
+
|
|
187
|
+
Returns:
|
|
188
|
+
np.array: Observation matrix of the Rpc.
|
|
189
|
+
"""
|
|
190
|
+
mat_a1 = []
|
|
191
|
+
mat_a2 = []
|
|
192
|
+
ligne_a = np.ones((len(img_norm)))
|
|
193
|
+
mat_a1.append(ligne_a)
|
|
194
|
+
|
|
195
|
+
if polynomial_degree in [1, 2, 3]:
|
|
196
|
+
mat_a1.append(ligne_a * world_norm[0])
|
|
197
|
+
mat_a1.append(ligne_a * world_norm[1])
|
|
198
|
+
mat_a1.append(ligne_a * world_norm[2])
|
|
199
|
+
|
|
200
|
+
mat_a2.append(ligne_a * -img_norm * world_norm[0])
|
|
201
|
+
mat_a2.append(ligne_a * -img_norm * world_norm[1])
|
|
202
|
+
mat_a2.append(ligne_a * -img_norm * world_norm[2])
|
|
203
|
+
else:
|
|
204
|
+
raise ValueError("The degree of the polynomial must be 1, 2 or 3")
|
|
205
|
+
|
|
206
|
+
if polynomial_degree in [2, 3]:
|
|
207
|
+
mat_a1.append(ligne_a * world_norm[0] * world_norm[1])
|
|
208
|
+
mat_a1.append(ligne_a * world_norm[0] * world_norm[2])
|
|
209
|
+
mat_a1.append(ligne_a * world_norm[1] * world_norm[2])
|
|
210
|
+
mat_a1.append(ligne_a * world_norm[0] * world_norm[0])
|
|
211
|
+
mat_a1.append(ligne_a * world_norm[1] * world_norm[1])
|
|
212
|
+
mat_a1.append(ligne_a * world_norm[2] * world_norm[2])
|
|
213
|
+
|
|
214
|
+
mat_a2.append(ligne_a * -img_norm * world_norm[0] * world_norm[1])
|
|
215
|
+
mat_a2.append(ligne_a * -img_norm * world_norm[0] * world_norm[2])
|
|
216
|
+
mat_a2.append(ligne_a * -img_norm * world_norm[1] * world_norm[2])
|
|
217
|
+
mat_a2.append(ligne_a * -img_norm * world_norm[0] * world_norm[0])
|
|
218
|
+
mat_a2.append(ligne_a * -img_norm * world_norm[1] * world_norm[1])
|
|
219
|
+
mat_a2.append(ligne_a * -img_norm * world_norm[2] * world_norm[2])
|
|
220
|
+
|
|
221
|
+
if polynomial_degree == 3:
|
|
222
|
+
mat_a1.append(ligne_a * world_norm[0] * world_norm[1] * world_norm[2])
|
|
223
|
+
mat_a1.append(ligne_a * world_norm[0] * world_norm[0] * world_norm[0])
|
|
224
|
+
mat_a1.append(ligne_a * world_norm[0] * world_norm[1] * world_norm[1])
|
|
225
|
+
mat_a1.append(ligne_a * world_norm[0] * world_norm[2] * world_norm[2])
|
|
226
|
+
mat_a1.append(ligne_a * world_norm[0] * world_norm[0] * world_norm[1])
|
|
227
|
+
mat_a1.append(ligne_a * world_norm[1] * world_norm[1] * world_norm[1])
|
|
228
|
+
mat_a1.append(ligne_a * world_norm[1] * world_norm[2] * world_norm[2])
|
|
229
|
+
mat_a1.append(ligne_a * world_norm[0] * world_norm[0] * world_norm[2])
|
|
230
|
+
mat_a1.append(ligne_a * world_norm[1] * world_norm[1] * world_norm[2])
|
|
231
|
+
mat_a1.append(ligne_a * world_norm[2] * world_norm[2] * world_norm[2])
|
|
232
|
+
|
|
233
|
+
mat_a2.append(ligne_a * -img_norm * world_norm[0] * world_norm[1] * world_norm[2])
|
|
234
|
+
mat_a2.append(ligne_a * -img_norm * world_norm[0] * world_norm[0] * world_norm[0])
|
|
235
|
+
mat_a2.append(ligne_a * -img_norm * world_norm[0] * world_norm[1] * world_norm[1])
|
|
236
|
+
mat_a2.append(ligne_a * -img_norm * world_norm[0] * world_norm[2] * world_norm[2])
|
|
237
|
+
mat_a2.append(ligne_a * -img_norm * world_norm[0] * world_norm[0] * world_norm[1])
|
|
238
|
+
mat_a2.append(ligne_a * -img_norm * world_norm[1] * world_norm[1] * world_norm[1])
|
|
239
|
+
mat_a2.append(ligne_a * -img_norm * world_norm[1] * world_norm[2] * world_norm[2])
|
|
240
|
+
mat_a2.append(ligne_a * -img_norm * world_norm[0] * world_norm[0] * world_norm[2])
|
|
241
|
+
mat_a2.append(ligne_a * -img_norm * world_norm[1] * world_norm[1] * world_norm[2])
|
|
242
|
+
mat_a2.append(ligne_a * -img_norm * world_norm[2] * world_norm[2] * world_norm[2])
|
|
243
|
+
|
|
244
|
+
return np.concatenate((np.array(mat_a1), np.array(mat_a2)), axis=0).T
|
|
File without changes
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
"""
|
|
2
|
+
A module for manipulating an approximate euclidean projection.
|
|
3
|
+
"""
|
|
4
|
+
import numpy as np
|
|
5
|
+
from borea.geodesy.euclidean_proj import EuclideanProj
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class ApproxEuclideanProj(EuclideanProj):
|
|
9
|
+
"""
|
|
10
|
+
This class represents an approximate Euclidean projection system.
|
|
11
|
+
|
|
12
|
+
Args:
|
|
13
|
+
x_central (float): x coordinate of the central point of the Euclidean system.
|
|
14
|
+
y_central (float): y coordinate of the central point of the Euclidean system.
|
|
15
|
+
|
|
16
|
+
.. note::
|
|
17
|
+
A Euclidean system is a local Euclidean reference system in which
|
|
18
|
+
the collinear equation is valid.
|
|
19
|
+
"""
|
|
20
|
+
def __init__(self, x_central: float, y_central: float) -> None:
|
|
21
|
+
super().__init__(x_central, y_central)
|
|
22
|
+
self.earth_raduis = 6366000
|
|
23
|
+
|
|
24
|
+
def mat_to_mat_eucli(self, x: float, y: float, mat: np.ndarray) -> np.ndarray:
|
|
25
|
+
"""
|
|
26
|
+
Transform the rotation matrix (World system) into rotation matrix (Euclidian systeme).
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
x (float): x coordinate of the point.
|
|
30
|
+
y (float): y coordinate of the point.
|
|
31
|
+
mat (np.array): Rotation matrix (World system).
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
np.array: Euclidean rotation matrix.
|
|
35
|
+
"""
|
|
36
|
+
_, _ = x, y
|
|
37
|
+
return mat
|
|
38
|
+
|
|
39
|
+
def mat_eucli_to_mat(self, x: float, y: float, mat_eucli: np.ndarray) -> np.ndarray:
|
|
40
|
+
"""
|
|
41
|
+
Transform the rotation matrix (Euclidean system) into rotation matrix (World system).
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
x (float): x coordinate of the point.
|
|
45
|
+
y (float): y coordinate of the point.
|
|
46
|
+
mat_eucli (np.array): Rotation matrix (Euclidean system).
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
np.array: Rotation matrix (World system).
|
|
50
|
+
"""
|
|
51
|
+
_, _ = x, y
|
|
52
|
+
return mat_eucli
|
|
53
|
+
|
|
54
|
+
def world_to_eucli(self, coor_world: np.ndarray) -> np.ndarray:
|
|
55
|
+
"""
|
|
56
|
+
Transform a point from the world coordinate reference system into
|
|
57
|
+
the Euclidean coordinate reference system.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
coor_world (np.array): World coordinate of the point [X, Y, Z].
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
np.array: Point in the Euclidean coordinate reference system.
|
|
64
|
+
"""
|
|
65
|
+
dx = coor_world[0] - self.pt_central[0]
|
|
66
|
+
dy = coor_world[1] - self.pt_central[1]
|
|
67
|
+
tt = (dx ** 2 + dy ** 2) / (4 * self.earth_raduis)
|
|
68
|
+
cc = (self.earth_raduis + coor_world[2]) / (tt + self.earth_raduis)
|
|
69
|
+
xe = dx * cc
|
|
70
|
+
ye = dy * cc
|
|
71
|
+
ze = ((self.earth_raduis - tt) * cc) - self.earth_raduis
|
|
72
|
+
return np.array([xe, ye, ze])
|
|
73
|
+
|
|
74
|
+
def eucli_to_world(self, coor_eucli: np.ndarray) -> np.ndarray:
|
|
75
|
+
"""
|
|
76
|
+
Transform a point from the Euclidean coordinate reference system into
|
|
77
|
+
the world coordinate reference system.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
coor_eucli: Euclidean coordinate of the point [X, Y, Z].
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
np.array: Point in the World coordinate reference system.
|
|
84
|
+
"""
|
|
85
|
+
rz = self.earth_raduis + coor_eucli[2]
|
|
86
|
+
rh = (rz**2 + coor_eucli[0]**2 + coor_eucli[1]**2)**0.5
|
|
87
|
+
rz = (2*self.earth_raduis)/(rz + rh)
|
|
88
|
+
x = self.pt_central[0] + (coor_eucli[0] * rz)
|
|
89
|
+
y = self.pt_central[1] + (coor_eucli[1] * rz)
|
|
90
|
+
z = rh - self.earth_raduis
|
|
91
|
+
return np.array([x, y, z])
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"""
|
|
2
|
+
A module for manipulating an euclidean projection.
|
|
3
|
+
"""
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
import numpy as np
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class EuclideanProj:
|
|
10
|
+
"""
|
|
11
|
+
This class represents a Euclidean projection system.
|
|
12
|
+
|
|
13
|
+
Args:
|
|
14
|
+
x_central (float): x coordinate of the central point of the Euclidean system.
|
|
15
|
+
y_central (float): y coordinate of the central point of the Euclidean system.
|
|
16
|
+
|
|
17
|
+
.. note::
|
|
18
|
+
A Euclidean system is a local Euclidean reference system in which
|
|
19
|
+
the collinear equation is valid.
|
|
20
|
+
"""
|
|
21
|
+
x_central: float
|
|
22
|
+
y_central: float
|
|
23
|
+
|
|
24
|
+
def __post_init__(self) -> None:
|
|
25
|
+
self.pt_central = np.array([self.x_central, self.y_central, 0])
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
"""
|
|
2
|
+
A module for manipulating a local euclidean projection.
|
|
3
|
+
"""
|
|
4
|
+
import numpy as np
|
|
5
|
+
from scipy.spatial.transform import Rotation as R
|
|
6
|
+
from borea.geodesy.proj_engine import ProjEngine
|
|
7
|
+
from borea.geodesy.euclidean_proj import EuclideanProj
|
|
8
|
+
from borea.utils.check.check_array import check_array_transfo
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class LocalEuclideanProj(EuclideanProj):
|
|
12
|
+
"""
|
|
13
|
+
This class represents a Euclidean projection system.
|
|
14
|
+
|
|
15
|
+
Args:
|
|
16
|
+
x_central (float): x coordinate of the central point of the Euclidean system.
|
|
17
|
+
y_central (float): y coordinate of the central point of the Euclidean system.
|
|
18
|
+
|
|
19
|
+
.. note::
|
|
20
|
+
A Euclidean system is a local Euclidean reference system in which
|
|
21
|
+
the collinear equation is valid.
|
|
22
|
+
The cartographic system parameters must be known.
|
|
23
|
+
"""
|
|
24
|
+
def __init__(self, x_central: float, y_central: float) -> None:
|
|
25
|
+
super().__init__(x_central, y_central)
|
|
26
|
+
self.rot_to_euclidean_local = self.mat_rot_euclidean_local(x_central, y_central)
|
|
27
|
+
|
|
28
|
+
def mat_rot_euclidean_local(self, x: float, y: float) -> np.ndarray:
|
|
29
|
+
"""
|
|
30
|
+
Compute the transition matrix between the world system and
|
|
31
|
+
the Euclidean system centred on a point.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
x (float): x coordinate of the central point of the Euclidean system.
|
|
35
|
+
y (float): y coordinate of the central point of the Euclidean system.
|
|
36
|
+
|
|
37
|
+
Returns:
|
|
38
|
+
np.array: Transition matrix.
|
|
39
|
+
"""
|
|
40
|
+
lon, lat = ProjEngine().carto_to_geog(x, y)
|
|
41
|
+
gamma = ProjEngine().get_meridian_convergence(x, y)
|
|
42
|
+
|
|
43
|
+
# Matrix for switching to local cartesian coordinates
|
|
44
|
+
rot_ecef_to_eucli = R.from_euler("ZYZ", np.array([lon, 90 - lat, 90 + gamma]),
|
|
45
|
+
degrees=True).inv()
|
|
46
|
+
return rot_ecef_to_eucli.as_matrix()
|
|
47
|
+
|
|
48
|
+
def mat_to_mat_eucli(self, x: float, y: float, mat: np.ndarray) -> np.ndarray:
|
|
49
|
+
"""
|
|
50
|
+
Transform the rotation matrix (World system) into rotation matrix (Euclidian systeme).
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
x (float): x coordinate of the point.
|
|
54
|
+
y (float): y coordinate of the point.
|
|
55
|
+
mat (np.array): Rotation matrix (World system).
|
|
56
|
+
|
|
57
|
+
Returns:
|
|
58
|
+
np.array: Euclidean rotation matrix.
|
|
59
|
+
"""
|
|
60
|
+
# We are in the projection system, we pass into the local tangeant system
|
|
61
|
+
matecef_to_rtl = self.mat_rot_euclidean_local(x, y)
|
|
62
|
+
mat_eucli = mat @ matecef_to_rtl @ self.rot_to_euclidean_local.T
|
|
63
|
+
return mat_eucli
|
|
64
|
+
|
|
65
|
+
def mat_eucli_to_mat(self, x: float, y: float, mat_eucli: np.ndarray) -> np.ndarray:
|
|
66
|
+
"""
|
|
67
|
+
Transform the rotation matrix (Euclidean system) into rotation matrix (World system).
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
x (float): x coordinate of the point.
|
|
71
|
+
y (float): y coordinate of the point.
|
|
72
|
+
mat_eucli (np.array): Rotation matrix (Euclidean system).
|
|
73
|
+
|
|
74
|
+
Returns:
|
|
75
|
+
np.array: Rotation matrix (World system).
|
|
76
|
+
"""
|
|
77
|
+
# We're in the local tangeant frame, now we're in the projection frame
|
|
78
|
+
matecef_to_rtl = self.mat_rot_euclidean_local(x, y)
|
|
79
|
+
mat = mat_eucli @ self.rot_to_euclidean_local @ matecef_to_rtl.T
|
|
80
|
+
return mat
|
|
81
|
+
|
|
82
|
+
def world_to_eucli(self, coor: np.ndarray) -> np.ndarray:
|
|
83
|
+
"""
|
|
84
|
+
Transform a point from the world coordinate reference system into
|
|
85
|
+
the Euclidean coordinate reference system.
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
coor (np.ndarray): Coordinate [X, Y, Z].
|
|
89
|
+
|
|
90
|
+
Returns:
|
|
91
|
+
np.array: x, y, z in the Euclidean coordinate reference system.
|
|
92
|
+
"""
|
|
93
|
+
coor = check_array_transfo(coor[0], coor[1], coor[2])
|
|
94
|
+
|
|
95
|
+
coor_geoc = np.array(ProjEngine().carto_to_geoc(coor[0], coor[1], coor[2]))
|
|
96
|
+
central_geoc = np.array(ProjEngine().carto_to_geoc(self.pt_central[0],
|
|
97
|
+
self.pt_central[1],
|
|
98
|
+
self.pt_central[2]))
|
|
99
|
+
dr = np.vstack([coor_geoc[0] - central_geoc[0],
|
|
100
|
+
coor_geoc[1] - central_geoc[1],
|
|
101
|
+
coor_geoc[2] - central_geoc[2]])
|
|
102
|
+
point_eucli = np.squeeze((self.rot_to_euclidean_local @ dr) + np.array([self.pt_central]).T)
|
|
103
|
+
return point_eucli
|
|
104
|
+
|
|
105
|
+
def eucli_to_world(self, coor: np.ndarray) -> np.ndarray:
|
|
106
|
+
"""
|
|
107
|
+
Transform a point from the Euclidean coordinate reference system into
|
|
108
|
+
the world coordinate reference system.
|
|
109
|
+
|
|
110
|
+
Args:
|
|
111
|
+
coor (np.ndarray): Coordinate [X, Y, Z].
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
np.array: x, y, z in the world coordinate reference system.
|
|
115
|
+
"""
|
|
116
|
+
coor = np.squeeze(coor)
|
|
117
|
+
|
|
118
|
+
central_geoc = np.array(ProjEngine().carto_to_geoc(self.pt_central[0],
|
|
119
|
+
self.pt_central[1],
|
|
120
|
+
self.pt_central[2]))
|
|
121
|
+
dr = np.vstack([coor[0] - self.pt_central[0],
|
|
122
|
+
coor[1] - self.pt_central[1],
|
|
123
|
+
coor[2] - self.pt_central[2]])
|
|
124
|
+
point_geoc = np.squeeze((self.rot_to_euclidean_local.T @ dr) + np.array([central_geoc]).T)
|
|
125
|
+
x_gc, y_gc, z_gc = check_array_transfo(point_geoc[0], point_geoc[1], point_geoc[2])
|
|
126
|
+
tup = ProjEngine().geoc_to_carto(x_gc, y_gc, z_gc)
|
|
127
|
+
return np.array([tup[0], tup[1], tup[2]])
|