ign-borea 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- borea/__init__.py +0 -0
- borea/datastruct/__init__.py +0 -0
- borea/datastruct/camera.py +25 -0
- borea/datastruct/dtm.py +119 -0
- borea/datastruct/gcp.py +22 -0
- borea/datastruct/shot.py +222 -0
- borea/datastruct/workdata.py +220 -0
- borea/format/__init__.py +0 -0
- borea/format/conl.py +143 -0
- borea/format/rpc.py +244 -0
- borea/geodesy/__init__.py +0 -0
- borea/geodesy/approx_euclidean_proj.py +91 -0
- borea/geodesy/euclidean_proj.py +25 -0
- borea/geodesy/local_euclidean_proj.py +127 -0
- borea/geodesy/proj_engine.py +70 -0
- borea/geodesy/projectionlist/__init__.py +0 -0
- borea/geodesy/projectionlist/search_proj.py +60 -0
- borea/geodesy/transform_geodesy.py +114 -0
- borea/process/__init__.py +0 -0
- borea/process/p_add_data/__init__.py +0 -0
- borea/process/p_add_data/p_add_shot.py +63 -0
- borea/process/p_add_data/p_file_gcp2d.py +55 -0
- borea/process/p_add_data/p_file_gcp3d.py +53 -0
- borea/process/p_add_data/p_gen_param.py +76 -0
- borea/process/p_add_data/p_pt2d.py +48 -0
- borea/process/p_add_data/p_pt3d.py +48 -0
- borea/process/p_add_data/p_unit_shot.py +48 -0
- borea/process/p_add_data/p_write.py +23 -0
- borea/process/p_format/__init__.py +0 -0
- borea/process/p_format/p_read_opk.py +78 -0
- borea/process/p_format/p_write_con.py +36 -0
- borea/process/p_format/p_write_opk.py +64 -0
- borea/process/p_format/p_write_rpc.py +48 -0
- borea/process/p_func/__init__.py +0 -0
- borea/process/p_func/p_control.py +67 -0
- borea/process/p_func/p_image_world.py +48 -0
- borea/process/p_func/p_spaceresection.py +51 -0
- borea/process/p_func/p_world_image.py +49 -0
- borea/reader/__init__.py +0 -0
- borea/reader/orientation/__init__.py +0 -0
- borea/reader/orientation/manage_reader.py +33 -0
- borea/reader/orientation/reader_opk.py +58 -0
- borea/reader/reader_camera.py +52 -0
- borea/reader/reader_point.py +113 -0
- borea/stat/__init__.py +0 -0
- borea/stat/statistics.py +215 -0
- borea/transform_world_image/__init__.py +0 -0
- borea/transform_world_image/transform_dtm/__init__.py +0 -0
- borea/transform_world_image/transform_dtm/world_image_dtm.py +47 -0
- borea/transform_world_image/transform_shot/__init__.py +0 -0
- borea/transform_world_image/transform_shot/conversion_coor_shot.py +58 -0
- borea/transform_world_image/transform_shot/image_world_shot.py +153 -0
- borea/transform_world_image/transform_shot/world_image_shot.py +117 -0
- borea/transform_world_image/transform_worksite/__init__.py +0 -0
- borea/transform_world_image/transform_worksite/image_world_intersection.py +154 -0
- borea/transform_world_image/transform_worksite/image_world_least_square.py +184 -0
- borea/transform_world_image/transform_worksite/image_world_work.py +49 -0
- borea/transform_world_image/transform_worksite/space_resection.py +343 -0
- borea/transform_world_image/transform_worksite/world_image_work.py +43 -0
- borea/utils/__init__.py +0 -0
- borea/utils/check/__init__.py +0 -0
- borea/utils/check/check_args_opk.py +59 -0
- borea/utils/check/check_args_reader_pt.py +44 -0
- borea/utils/check/check_array.py +56 -0
- borea/utils/check/check_header.py +90 -0
- borea/utils/check/check_order_axe.py +50 -0
- borea/utils/miscellaneous/__init__.py +0 -0
- borea/utils/miscellaneous/miscellaneous.py +83 -0
- borea/utils/miscellaneous/param_bundle.py +36 -0
- borea/utils/miscellaneous/sparse.py +31 -0
- borea/utils/singleton/__init__.py +0 -0
- borea/utils/singleton/singleton.py +23 -0
- borea/utils/xml/__init__.py +0 -0
- borea/utils/xml/xml.py +63 -0
- borea/worksite/__init__.py +0 -0
- borea/worksite/worksite.py +240 -0
- borea/writer/__init__.py +0 -0
- borea/writer/manage_writer.py +23 -0
- borea/writer/writer_con.py +29 -0
- borea/writer/writer_df_to_txt.py +32 -0
- borea/writer/writer_opk.py +70 -0
- borea/writer/writer_rpc.py +55 -0
- borea_tools/__init__.py +0 -0
- borea_tools/opk_control.py +33 -0
- borea_tools/opk_to_conl.py +33 -0
- borea_tools/opk_to_opk.py +33 -0
- borea_tools/opk_to_rpc.py +33 -0
- borea_tools/pt_image_to_world.py +32 -0
- borea_tools/pt_world_to_image.py +32 -0
- borea_tools/ptfile_image_to_world.py +32 -0
- borea_tools/ptfile_world_to_image.py +32 -0
- borea_tools/spaceresection_opk.py +34 -0
- ign_borea-0.1.5.dist-info/LICENSE +21 -0
- ign_borea-0.1.5.dist-info/METADATA +274 -0
- ign_borea-0.1.5.dist-info/RECORD +98 -0
- ign_borea-0.1.5.dist-info/WHEEL +5 -0
- ign_borea-0.1.5.dist-info/entry_points.txt +10 -0
- ign_borea-0.1.5.dist-info/top_level.txt +2 -0
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
"""
|
|
2
|
+
World image transformation module for Shot
|
|
3
|
+
"""
|
|
4
|
+
import numpy as np
|
|
5
|
+
from borea.datastruct.shot import Shot
|
|
6
|
+
from borea.datastruct.camera import Camera
|
|
7
|
+
from borea.datastruct.dtm import Dtm
|
|
8
|
+
from borea.geodesy.proj_engine import ProjEngine
|
|
9
|
+
from borea.transform_world_image.transform_shot.conversion_coor_shot import conv_z_shot_to_z_data
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class WorldImageShot():
|
|
13
|
+
"""
|
|
14
|
+
Function world_to_image for shot.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
shot (Shot): The shot for convert coordinate
|
|
18
|
+
cam (Camera): The camera of the shot.
|
|
19
|
+
"""
|
|
20
|
+
def __init__(self, shot: Shot, cam: Camera) -> None:
|
|
21
|
+
self.shot = shot
|
|
22
|
+
self.cam = cam
|
|
23
|
+
|
|
24
|
+
def world_to_image(self, coor_world: np.ndarray,
|
|
25
|
+
type_z_data: str, type_z_shot: str) -> np.ndarray:
|
|
26
|
+
"""
|
|
27
|
+
Calculates the c,l coordinates of a terrain point in an image.
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
coor_world (np.array): The coordinate [x, y, z] of ground point.
|
|
31
|
+
type_z_data (str): Type of z of data, "height" or "altitude".
|
|
32
|
+
type_z_shot (str): Type of z of worksite, "height" or "altitude".
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
np.array: The image coordinate [c,l].
|
|
36
|
+
"""
|
|
37
|
+
if type_z_data != type_z_shot and not ProjEngine().geog_to_geoid:
|
|
38
|
+
raise ValueError("Missing geoid")
|
|
39
|
+
|
|
40
|
+
if self.shot.linear_alteration and not Dtm().path_dtm and not self.shot.approxeucli:
|
|
41
|
+
raise ValueError("Missing dtm")
|
|
42
|
+
|
|
43
|
+
if self.shot.approxeucli and not self.shot.linear_alteration:
|
|
44
|
+
raise ValueError("The data are not corrected by linear alteration "
|
|
45
|
+
"and you are using an approximate system.")
|
|
46
|
+
|
|
47
|
+
p_eucli = self.shot.projeucli.world_to_eucli(coor_world)
|
|
48
|
+
|
|
49
|
+
# Convert coordinate in bundle system to image system
|
|
50
|
+
coor_image = self.eucli_to_image(p_eucli, type_z_data, type_z_shot)
|
|
51
|
+
|
|
52
|
+
return coor_image
|
|
53
|
+
|
|
54
|
+
def eucli_to_image(self, p_eucli: np.ndarray,
|
|
55
|
+
type_z_data: str, type_z_shot: str) -> np.ndarray:
|
|
56
|
+
"""
|
|
57
|
+
Convert euclidean coordinate to image coordinate.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
p_eucli (np.array): The euclidean coordinate [x, y, z] of ground point.
|
|
61
|
+
type_z_data (str): Type of z of data, "height" or "altitude".
|
|
62
|
+
type_z_shot (str): Type of z of worksite, "height" or "altitude".
|
|
63
|
+
|
|
64
|
+
Returns:
|
|
65
|
+
np.array: Bundle coordinate [c,l].
|
|
66
|
+
"""
|
|
67
|
+
# Convert euclidean coordinate system to bundle system
|
|
68
|
+
p_bundle = self.eucli_to_bundle(p_eucli, type_z_data, type_z_shot)
|
|
69
|
+
|
|
70
|
+
# Convert coordinate in bundle system to image system
|
|
71
|
+
x_col, y_line = self.bundle_to_image(p_bundle)
|
|
72
|
+
|
|
73
|
+
return np.array([x_col, y_line])
|
|
74
|
+
|
|
75
|
+
def eucli_to_bundle(self, p_eucli: np.ndarray,
|
|
76
|
+
type_z_data: str, type_z_shot: str) -> np.ndarray:
|
|
77
|
+
"""
|
|
78
|
+
Convert euclidean coordinate to bundle coordinate.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
p_eucli (np.array): The euclidean coordinate [x, y, z] of ground point.
|
|
82
|
+
type_z_data (str): Type of z of data, "height" or "altitude".
|
|
83
|
+
type_z_shot (str): Type of z of worksite, "height" or "altitude".
|
|
84
|
+
|
|
85
|
+
Returns:
|
|
86
|
+
np.array: Bundle coordinate [c,l].
|
|
87
|
+
"""
|
|
88
|
+
pos_shot_new_z = conv_z_shot_to_z_data(self.shot, type_z_shot, type_z_data,
|
|
89
|
+
approx=self.shot.approxeucli)
|
|
90
|
+
|
|
91
|
+
# Convert coordinate in world system to euclidean system
|
|
92
|
+
pos_eucli = self.shot.projeucli.world_to_eucli(pos_shot_new_z)
|
|
93
|
+
|
|
94
|
+
# Convert coordinate in euclidean system to bundle system
|
|
95
|
+
p_bundle = np.squeeze((self.shot.mat_rot_eucli @ np.vstack([p_eucli[0] - pos_eucli[0],
|
|
96
|
+
p_eucli[1] - pos_eucli[1],
|
|
97
|
+
p_eucli[2] - pos_eucli[2]])))
|
|
98
|
+
return p_bundle
|
|
99
|
+
|
|
100
|
+
def bundle_to_image(self, p_bundle: np.ndarray) -> np.ndarray:
|
|
101
|
+
"""
|
|
102
|
+
Convert coordinate bundle to coordinate image col line.
|
|
103
|
+
|
|
104
|
+
Args:
|
|
105
|
+
p_bundle (np.array): [X, Y, Z] coordinates in bundle.
|
|
106
|
+
|
|
107
|
+
Returns:
|
|
108
|
+
np.array: Image coordinate x_col y_line.
|
|
109
|
+
"""
|
|
110
|
+
x_shot = p_bundle[0] * self.cam.focal / p_bundle[2]
|
|
111
|
+
y_shot = p_bundle[1] * self.cam.focal / p_bundle[2]
|
|
112
|
+
z_shot = p_bundle[2]
|
|
113
|
+
x_shot, y_shot, z_shot = self.shot.f_sys(x_shot, y_shot, z_shot)
|
|
114
|
+
x_col = self.cam.ppax + x_shot
|
|
115
|
+
y_line = self.cam.ppay + y_shot
|
|
116
|
+
|
|
117
|
+
return np.array([x_col, y_line])
|
|
File without changes
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Class to calcule world coordinate by intersection.
|
|
3
|
+
"""
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
import numpy as np
|
|
6
|
+
from borea.worksite.worksite import Worksite
|
|
7
|
+
from borea.datastruct.shot import Shot
|
|
8
|
+
from borea.geodesy.local_euclidean_proj import LocalEuclideanProj
|
|
9
|
+
from borea.geodesy.approx_euclidean_proj import ApproxEuclideanProj
|
|
10
|
+
from borea.transform_world_image.transform_shot.conversion_coor_shot import conv_z_shot_to_z_data
|
|
11
|
+
from borea.transform_world_image.transform_shot.image_world_shot import ImageWorldShot
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass
|
|
15
|
+
class WorldIntersection:
|
|
16
|
+
"""
|
|
17
|
+
Class to calculate image coordinate to world coordinate in worksite by intersect bundle.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
name (str): Name of the worksite.
|
|
21
|
+
"""
|
|
22
|
+
work: Worksite
|
|
23
|
+
|
|
24
|
+
def calculate_image_world_by_intersection(self, type_point: str,
|
|
25
|
+
control_type: list = None) -> None:
|
|
26
|
+
"""
|
|
27
|
+
Calculates the ground position of connecting point by intersection with
|
|
28
|
+
the most distance between two shots or ground image point.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
type_point (str): "co_points" or "gcp2d"
|
|
32
|
+
depending on what you want to calculate.
|
|
33
|
+
control_type (list): Type controle for gcp.
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
out_pt, control_type = self.work.get_attr_transfo_pt(type_point, control_type)
|
|
37
|
+
|
|
38
|
+
for name_pt, list_shot in getattr(self.work, type_point).items(): # Loop on points
|
|
39
|
+
try:
|
|
40
|
+
if control_type != [] and self.work.gcp3d[name_pt].code not in control_type:
|
|
41
|
+
continue
|
|
42
|
+
except KeyError:
|
|
43
|
+
continue
|
|
44
|
+
if len(list_shot) == 1:
|
|
45
|
+
continue
|
|
46
|
+
|
|
47
|
+
coor = self.comput_inter_in_2_more_distant_shot(name_pt, list_shot)
|
|
48
|
+
|
|
49
|
+
getattr(self.work, out_pt)[name_pt] = coor
|
|
50
|
+
|
|
51
|
+
def comput_inter_in_2_more_distant_shot(self, name_pt: str, list_shot: list) -> np.ndarray:
|
|
52
|
+
"""
|
|
53
|
+
Search for the two most distant images where the point is visible,
|
|
54
|
+
to calculate the point's position.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
name_pt (str): Name of the point.
|
|
58
|
+
list_shot (list): List of name shot where the point is visible.
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
np.ndarray: World coordinate of the point.
|
|
62
|
+
"""
|
|
63
|
+
shot1 = ""
|
|
64
|
+
shot2 = ""
|
|
65
|
+
dist = 0
|
|
66
|
+
list_shot1 = list_shot.copy()
|
|
67
|
+
list_shot2 = list_shot1.copy()
|
|
68
|
+
_ = list_shot1.pop(-1)
|
|
69
|
+
for name_shot1 in list_shot1: # Double loop on shots where see the point
|
|
70
|
+
_ = list_shot2.pop(0)
|
|
71
|
+
for name_shot2 in list_shot2:
|
|
72
|
+
pos_shot1 = self.work.shots[name_shot1].pos_shot
|
|
73
|
+
pos_shot2 = self.work.shots[name_shot2].pos_shot
|
|
74
|
+
new_dist = np.sqrt(np.sum((pos_shot1 - pos_shot2)**2))
|
|
75
|
+
if new_dist > dist:
|
|
76
|
+
dist = new_dist
|
|
77
|
+
shot1 = name_shot1
|
|
78
|
+
shot2 = name_shot2
|
|
79
|
+
return self.intersection_pt_in_2shot(name_pt,
|
|
80
|
+
self.work.shots[shot1],
|
|
81
|
+
self.work.shots[shot2])
|
|
82
|
+
|
|
83
|
+
def intersection_pt_in_2shot(self, name_point: str, shot1: Shot, shot2: Shot) -> np.ndarray:
|
|
84
|
+
"""
|
|
85
|
+
Calculates the euclidien position of a point from two shots.
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
name_point (str): Name of copoint to calcule coordinate.
|
|
89
|
+
shot1 (Shot): Frist shot.
|
|
90
|
+
shot2 (Shot): Second shot.
|
|
91
|
+
|
|
92
|
+
Returns:
|
|
93
|
+
np.array: Euclidien coordinate of the copoint.
|
|
94
|
+
"""
|
|
95
|
+
# Retrieve coordinates of points in the image.
|
|
96
|
+
if name_point in list(shot1.co_points):
|
|
97
|
+
p_img1 = shot1.co_points[name_point]
|
|
98
|
+
p_img2 = shot2.co_points[name_point]
|
|
99
|
+
else:
|
|
100
|
+
p_img1 = shot1.gcp2d[name_point]
|
|
101
|
+
p_img2 = shot2.gcp2d[name_point]
|
|
102
|
+
|
|
103
|
+
# Setting up a Euclidean projection centered on the two images.
|
|
104
|
+
bary = (shot1.pos_shot + shot2.pos_shot)/2
|
|
105
|
+
if shot1.approxeucli:
|
|
106
|
+
projeucli = ApproxEuclideanProj(bary[0], bary[1])
|
|
107
|
+
else:
|
|
108
|
+
projeucli = LocalEuclideanProj(bary[0], bary[1])
|
|
109
|
+
|
|
110
|
+
# Calculates data specific to Euclidean projection.
|
|
111
|
+
mat_eucli1 = projeucli.mat_to_mat_eucli(shot1.pos_shot[0], shot1.pos_shot[1], shot1.mat_rot)
|
|
112
|
+
mat_eucli2 = projeucli.mat_to_mat_eucli(shot2.pos_shot[0], shot2.pos_shot[1], shot2.mat_rot)
|
|
113
|
+
pos_eucli1 = conv_z_shot_to_z_data(shot1, self.work.type_z_shot, self.work.type_z_data)
|
|
114
|
+
pos_eucli2 = conv_z_shot_to_z_data(shot2, self.work.type_z_shot, self.work.type_z_data)
|
|
115
|
+
pos_eucli1 = projeucli.world_to_eucli(pos_eucli1)
|
|
116
|
+
pos_eucli2 = projeucli.world_to_eucli(pos_eucli2)
|
|
117
|
+
|
|
118
|
+
# Calculates the director vectors of the point bundles in the Euclidean reference system.
|
|
119
|
+
vect1 = mat_eucli1.T @ ImageWorldShot(shot1, self.work.cameras[shot1.name_cam]
|
|
120
|
+
).image_to_bundle(p_img1)
|
|
121
|
+
vect2 = mat_eucli2.T @ ImageWorldShot(shot2, self.work.cameras[shot2.name_cam]
|
|
122
|
+
).image_to_bundle(p_img2)
|
|
123
|
+
|
|
124
|
+
# Calculating the intersection of two lines
|
|
125
|
+
pt_inter = self.intersection_line_3d(vect1, pos_eucli1, vect2, pos_eucli2)
|
|
126
|
+
|
|
127
|
+
# Converting the point to the world system.
|
|
128
|
+
pt_inter = projeucli.eucli_to_world(pt_inter)
|
|
129
|
+
return pt_inter
|
|
130
|
+
|
|
131
|
+
def intersection_line_3d(self, vect1: np.ndarray, point1: np.ndarray,
|
|
132
|
+
vect2: np.ndarray, point2: np.ndarray) -> np.ndarray:
|
|
133
|
+
"""
|
|
134
|
+
Calculation of the intersection point between 2 line in a 3d system.
|
|
135
|
+
|
|
136
|
+
Args:
|
|
137
|
+
vect1 (np.array): Directing vector of the first line.
|
|
138
|
+
point1 (np.array): A point on the first line.
|
|
139
|
+
vect2 (np.array): Directing vector of the second line.
|
|
140
|
+
point2 (np.array): A point on the second line.
|
|
141
|
+
|
|
142
|
+
Returns:
|
|
143
|
+
np.array: The point of the intersection of the lines.
|
|
144
|
+
"""
|
|
145
|
+
base = point2 - point1
|
|
146
|
+
norme_v1 = vect1 @ vect1
|
|
147
|
+
norme_v2 = vect2 @ vect2
|
|
148
|
+
v1_v2 = vect1 @ vect2
|
|
149
|
+
b_v1 = base @ vect1
|
|
150
|
+
b_v2 = base @ vect2
|
|
151
|
+
denum = v1_v2**2 - norme_v1*norme_v2
|
|
152
|
+
p1_eucli = point1 + ((b_v2*v1_v2 - b_v1*norme_v2)/(denum))*vect1
|
|
153
|
+
p2_eucli = point2 + ((b_v2*norme_v1 - b_v1*v1_v2)/(denum))*vect2
|
|
154
|
+
return (p1_eucli + p2_eucli) / 2
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Class to calcule world coordinate by least square methode.
|
|
3
|
+
"""
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
import numpy as np
|
|
6
|
+
import pandas as pd
|
|
7
|
+
from scipy.sparse import coo_matrix
|
|
8
|
+
from borea.worksite.worksite import Worksite
|
|
9
|
+
from borea.geodesy.local_euclidean_proj import LocalEuclideanProj
|
|
10
|
+
from borea.geodesy.approx_euclidean_proj import ApproxEuclideanProj
|
|
11
|
+
from borea.datastruct.dtm import Dtm
|
|
12
|
+
from borea.transform_world_image.transform_shot.image_world_shot import ImageWorldShot
|
|
13
|
+
from borea.transform_world_image.transform_shot.world_image_shot import WorldImageShot
|
|
14
|
+
from borea.utils.miscellaneous.sparse import invert_diag_sparse_matrix_3_3
|
|
15
|
+
from borea.utils.miscellaneous.param_bundle import set_param_bundle_diff
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class WorldLeastSquare:
|
|
20
|
+
"""
|
|
21
|
+
Class to calculate image coordinate to world coordinate in worksite by least square.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
name (str): Name of the worksite.
|
|
25
|
+
"""
|
|
26
|
+
work: Worksite
|
|
27
|
+
|
|
28
|
+
def compute_image_world_least_square(self, type_point: str, control_type: list) -> None:
|
|
29
|
+
"""
|
|
30
|
+
Calculates the mean of the result x y z of the point for each shot where it's visible.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
type_point (str): "co_points" or "gcp2d"
|
|
34
|
+
depending on what you want to calculate.
|
|
35
|
+
control_type (list): List of code gcp to take.
|
|
36
|
+
To take all points or co_point, control_type = [].
|
|
37
|
+
"""
|
|
38
|
+
# Creation of worksite's barycentre.
|
|
39
|
+
bary = self.work.calculate_barycentre()
|
|
40
|
+
if self.work.approxeucli:
|
|
41
|
+
eucliproj = ApproxEuclideanProj(bary[0], bary[1])
|
|
42
|
+
else:
|
|
43
|
+
eucliproj = LocalEuclideanProj(bary[0], bary[1])
|
|
44
|
+
|
|
45
|
+
# Retrieving point data from images.
|
|
46
|
+
pd_mes = self.work.get_point_image_dataframe(type_point, control_type)
|
|
47
|
+
pd_mes = pd_mes[pd_mes.duplicated(subset=['id_pt'], keep=False)]
|
|
48
|
+
|
|
49
|
+
# Initialization of euclidean points.
|
|
50
|
+
pd_pnt = self.init_eucli_points(pd_mes)
|
|
51
|
+
|
|
52
|
+
# Do least square methode.
|
|
53
|
+
pd_pnt = self.least_square_intersect(pd_mes, pd_pnt)
|
|
54
|
+
|
|
55
|
+
# Transform euclidean point to world point.
|
|
56
|
+
xw, yw, zw = eucliproj.eucli_to_world(np.array([pd_pnt["x"].to_numpy(),
|
|
57
|
+
pd_pnt["y"].to_numpy(),
|
|
58
|
+
pd_pnt["z"].to_numpy()]))
|
|
59
|
+
pd_pnt["x"] = xw
|
|
60
|
+
pd_pnt["y"] = yw
|
|
61
|
+
pd_pnt["z"] = zw
|
|
62
|
+
self.work.set_point_world_dataframe(pd_pnt, type_point)
|
|
63
|
+
|
|
64
|
+
def init_eucli_points(self, pd_mes: pd.DataFrame) -> pd.DataFrame:
|
|
65
|
+
"""
|
|
66
|
+
Initialization of ground points.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
pd_mes (pd.Dataframe): Dataframe of image data, id_pt, id_img; column, line.
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
pd.Dataframe: Dataframe of eucli data, id_pt, x, y, z.
|
|
73
|
+
"""
|
|
74
|
+
pb_pnt_unique = pd_mes[~pd_mes['id_pt'].duplicated(keep='first')]
|
|
75
|
+
group = pb_pnt_unique.groupby('id_img')
|
|
76
|
+
frames = []
|
|
77
|
+
|
|
78
|
+
for _, (id_shot, pd_mes_pnt) in enumerate(group):
|
|
79
|
+
shot = self.work.shots[id_shot]
|
|
80
|
+
cam = self.work.cameras[shot.name_cam]
|
|
81
|
+
coor_img = np.array([pd_mes_pnt["column"].to_numpy(), pd_mes_pnt["line"].to_numpy()])
|
|
82
|
+
z_world = np.squeeze(np.full(pd_mes_pnt.shape[0], Dtm().get_z_world(shot.pos_shot[:2])))
|
|
83
|
+
|
|
84
|
+
# Calculation of world coordinate for all points in this shot
|
|
85
|
+
coor_world = ImageWorldShot(shot, cam).image_z_to_world(coor_img,
|
|
86
|
+
self.work.type_z_shot,
|
|
87
|
+
z_world)
|
|
88
|
+
|
|
89
|
+
# Transform of world coordinate to euclidean coordinate
|
|
90
|
+
coor_eucli = shot.projeucli.world_to_eucli(coor_world)
|
|
91
|
+
frames += [pd.DataFrame({"id_pt": pd_mes_pnt["id_pt"],
|
|
92
|
+
"x": coor_eucli[0],
|
|
93
|
+
"y": coor_eucli[1],
|
|
94
|
+
"z": coor_eucli[2]})]
|
|
95
|
+
|
|
96
|
+
pd_pnt = pd.concat(frames, ignore_index=True)
|
|
97
|
+
pd_pnt["index_pnt"] = pd_pnt.index
|
|
98
|
+
|
|
99
|
+
return pd_pnt
|
|
100
|
+
|
|
101
|
+
def least_square_intersect(self, pd_mes: pd.DataFrame, pd_pnt: pd.DataFrame) -> pd.DataFrame:
|
|
102
|
+
"""
|
|
103
|
+
Methode of least square to calcule world coordinate point.
|
|
104
|
+
|
|
105
|
+
Args:
|
|
106
|
+
pd_mes (pd.Dataframe): Dataframe of image data, id_pt, id_img; column, line.
|
|
107
|
+
pd_pnt (pd.Dataframe): Dataframe of world data, id_pt, x, y, z.
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
pd.Dataframe: Field data converged.
|
|
111
|
+
"""
|
|
112
|
+
# Initialzsation number of observation
|
|
113
|
+
nbr_obs = pd_mes.shape[0] * 2
|
|
114
|
+
nbr_inc = pd_mes.nunique(axis=0)["id_pt"] * 3
|
|
115
|
+
|
|
116
|
+
dx = 1.01
|
|
117
|
+
nbr_iter = 0
|
|
118
|
+
while not np.all(abs(dx) < 0.01) and nbr_iter < 5:
|
|
119
|
+
# join data
|
|
120
|
+
pd_mes_temp = pd_mes.join(pd_pnt.set_index('id_pt'), on="id_pt")
|
|
121
|
+
pd_mes_temp = pd_mes_temp.sort_values(by=["index_pnt"]).reset_index(drop=True)
|
|
122
|
+
pd_mes_temp["index_mes"] = pd_mes_temp.index
|
|
123
|
+
|
|
124
|
+
mat_a, v_res = self.create_mat_a_and_vect_residu(pd_mes_temp, nbr_obs, nbr_inc)
|
|
125
|
+
|
|
126
|
+
# Solving the system by inverting the block diagonal matrix
|
|
127
|
+
dx = invert_diag_sparse_matrix_3_3(mat_a.T @ mat_a) @ mat_a.T @ v_res
|
|
128
|
+
|
|
129
|
+
# Update World coordinate
|
|
130
|
+
pd_pnt["x"] += dx[0::3]
|
|
131
|
+
pd_pnt["y"] += dx[1::3]
|
|
132
|
+
pd_pnt["z"] += dx[2::3]
|
|
133
|
+
|
|
134
|
+
nbr_iter += 1
|
|
135
|
+
|
|
136
|
+
return pd_pnt
|
|
137
|
+
|
|
138
|
+
def create_mat_a_and_vect_residu(self, pd_mes_temp: pd.DataFrame,
|
|
139
|
+
nbr_obs: int, nbr_inc: int) -> tuple:
|
|
140
|
+
"""
|
|
141
|
+
Creation of matrix A and vector residu for the least square methode.
|
|
142
|
+
|
|
143
|
+
Args:
|
|
144
|
+
pd_mes_temp (pd.Dataframe): Dataframe of data.
|
|
145
|
+
nbr_obs (int): Number of image observation.
|
|
146
|
+
nbr_inc (int): Number of world observation.
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
tuple: Matrix A and vector residu.
|
|
150
|
+
"""
|
|
151
|
+
v_res = np.zeros(nbr_obs)
|
|
152
|
+
coord_i, coord_j, data = [], [], []
|
|
153
|
+
for _, (id_shot, pd_data) in enumerate(pd_mes_temp.groupby('id_img')):
|
|
154
|
+
shot = self.work.shots[id_shot]
|
|
155
|
+
cam = self.work.cameras[shot.name_cam]
|
|
156
|
+
|
|
157
|
+
pti = WorldImageShot(shot, cam).eucli_to_image(np.array([pd_data["x"].to_numpy(),
|
|
158
|
+
pd_data["y"].to_numpy(),
|
|
159
|
+
pd_data["z"].to_numpy()]),
|
|
160
|
+
self.work.type_z_data,
|
|
161
|
+
self.work.type_z_shot)
|
|
162
|
+
|
|
163
|
+
_, vect_u, mat_v = set_param_bundle_diff(shot,
|
|
164
|
+
np.array([pd_data["x"].to_numpy(),
|
|
165
|
+
pd_data["y"].to_numpy(),
|
|
166
|
+
pd_data["z"].to_numpy()]))
|
|
167
|
+
|
|
168
|
+
coord_i += [np.repeat(2 * pd_data['index_mes'].to_numpy(), 6) +
|
|
169
|
+
np.tile([0, 0, 0, 1, 1, 1], len(pd_data["x"].to_numpy()))]
|
|
170
|
+
coord_j += [np.repeat(3 * pd_data['index_pnt'].to_numpy(), 6) +
|
|
171
|
+
np.tile([0, 1, 2, 0, 1, 2], len(pd_data["y"].to_numpy()))]
|
|
172
|
+
|
|
173
|
+
data += [(np.tile(np.repeat(cam.focal / vect_u[2]**2, 2), (3, 1)).T *
|
|
174
|
+
mat_v @ shot.mat_rot_eucli).flatten()]
|
|
175
|
+
|
|
176
|
+
v_res[2 * pd_data['index_mes'].to_numpy()] = pd_data["column"].to_numpy() - pti[0]
|
|
177
|
+
v_res[2 * pd_data['index_mes'].to_numpy() + 1] = pd_data["line"].to_numpy() - pti[1]
|
|
178
|
+
|
|
179
|
+
# Creation matrix
|
|
180
|
+
coord_i = np.concatenate(coord_i)
|
|
181
|
+
coord_j = np.concatenate(coord_j)
|
|
182
|
+
data = np.concatenate(data)
|
|
183
|
+
|
|
184
|
+
return coo_matrix((data, (coord_i, coord_j)), shape=(nbr_obs, nbr_inc)).tocsr(), v_res
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Image world transformation module for worksite
|
|
3
|
+
"""
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from borea.worksite.worksite import Worksite
|
|
6
|
+
# pylint: disable-next=line-too-long
|
|
7
|
+
from borea.transform_world_image.transform_worksite.image_world_intersection import WorldIntersection # noqa: E501
|
|
8
|
+
from borea.transform_world_image.transform_worksite.image_world_least_square import WorldLeastSquare
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass
|
|
12
|
+
class ImageWorldWork:
|
|
13
|
+
"""
|
|
14
|
+
Class to calculate image coordinate to world coordinate in worksite.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
name (str): Name of the worksite.
|
|
18
|
+
"""
|
|
19
|
+
work: Worksite
|
|
20
|
+
|
|
21
|
+
def manage_image_world(self, type_point: str = "co_points", type_process: str = "inter",
|
|
22
|
+
control_type: list = None) -> None:
|
|
23
|
+
"""
|
|
24
|
+
Process to calcule image coordinate to world coordinate.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
type_point (str): "co_points" or "gcp2d"
|
|
28
|
+
depending on what you want to calculate.
|
|
29
|
+
type_process (str): Type of process you want to use.
|
|
30
|
+
* "inter" by intersect bundle of point in each shot.
|
|
31
|
+
* "square" take all point and do methode least square.
|
|
32
|
+
control_type (list): Type controle for gcp.
|
|
33
|
+
"""
|
|
34
|
+
if type_point not in ["co_points", "gcp2d"]:
|
|
35
|
+
raise ValueError(f"type_point {type_point} is incorrect,['co_points','gcp2d']")
|
|
36
|
+
|
|
37
|
+
if type_process not in ["inter", "square"]:
|
|
38
|
+
raise ValueError(f"type_process {type_process} is incorrect, "
|
|
39
|
+
"['inter','square']")
|
|
40
|
+
|
|
41
|
+
if control_type is None or type_point == "co_points":
|
|
42
|
+
control_type = []
|
|
43
|
+
|
|
44
|
+
if type_process == "inter":
|
|
45
|
+
WorldIntersection(self.work).calculate_image_world_by_intersection(type_point,
|
|
46
|
+
control_type)
|
|
47
|
+
|
|
48
|
+
if type_process == "square":
|
|
49
|
+
WorldLeastSquare(self.work).compute_image_world_least_square(type_point, control_type)
|