pyvale 2025.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyvale might be problematic. Click here for more details.
- pyvale/__init__.py +75 -0
- pyvale/core/__init__.py +7 -0
- pyvale/core/analyticmeshgen.py +59 -0
- pyvale/core/analyticsimdatafactory.py +63 -0
- pyvale/core/analyticsimdatagenerator.py +160 -0
- pyvale/core/camera.py +146 -0
- pyvale/core/cameradata.py +64 -0
- pyvale/core/cameradata2d.py +82 -0
- pyvale/core/cameratools.py +328 -0
- pyvale/core/cython/rastercyth.c +32267 -0
- pyvale/core/cython/rastercyth.py +636 -0
- pyvale/core/dataset.py +250 -0
- pyvale/core/errorcalculator.py +112 -0
- pyvale/core/errordriftcalc.py +146 -0
- pyvale/core/errorintegrator.py +339 -0
- pyvale/core/errorrand.py +614 -0
- pyvale/core/errorsysdep.py +331 -0
- pyvale/core/errorsysfield.py +407 -0
- pyvale/core/errorsysindep.py +905 -0
- pyvale/core/experimentsimulator.py +99 -0
- pyvale/core/field.py +136 -0
- pyvale/core/fieldconverter.py +154 -0
- pyvale/core/fieldsampler.py +112 -0
- pyvale/core/fieldscalar.py +167 -0
- pyvale/core/fieldtensor.py +221 -0
- pyvale/core/fieldtransform.py +384 -0
- pyvale/core/fieldvector.py +215 -0
- pyvale/core/generatorsrandom.py +528 -0
- pyvale/core/imagedef2d.py +566 -0
- pyvale/core/integratorfactory.py +241 -0
- pyvale/core/integratorquadrature.py +192 -0
- pyvale/core/integratorrectangle.py +88 -0
- pyvale/core/integratorspatial.py +90 -0
- pyvale/core/integratortype.py +44 -0
- pyvale/core/optimcheckfuncs.py +153 -0
- pyvale/core/raster.py +31 -0
- pyvale/core/rastercy.py +76 -0
- pyvale/core/rasternp.py +604 -0
- pyvale/core/rendermesh.py +156 -0
- pyvale/core/sensorarray.py +179 -0
- pyvale/core/sensorarrayfactory.py +210 -0
- pyvale/core/sensorarraypoint.py +280 -0
- pyvale/core/sensordata.py +72 -0
- pyvale/core/sensordescriptor.py +101 -0
- pyvale/core/sensortools.py +143 -0
- pyvale/core/visualexpplotter.py +151 -0
- pyvale/core/visualimagedef.py +71 -0
- pyvale/core/visualimages.py +75 -0
- pyvale/core/visualopts.py +180 -0
- pyvale/core/visualsimanimator.py +83 -0
- pyvale/core/visualsimplotter.py +182 -0
- pyvale/core/visualtools.py +81 -0
- pyvale/core/visualtraceplotter.py +256 -0
- pyvale/data/__init__.py +7 -0
- pyvale/data/case13_out.e +0 -0
- pyvale/data/case16_out.e +0 -0
- pyvale/data/case17_out.e +0 -0
- pyvale/data/case18_1_out.e +0 -0
- pyvale/data/case18_2_out.e +0 -0
- pyvale/data/case18_3_out.e +0 -0
- pyvale/data/case25_out.e +0 -0
- pyvale/data/case26_out.e +0 -0
- pyvale/data/optspeckle_2464x2056px_spec5px_8bit_gblur1px.tiff +0 -0
- pyvale/examples/__init__.py +7 -0
- pyvale/examples/analyticdatagen/__init__.py +7 -0
- pyvale/examples/analyticdatagen/ex1_1_scalarvisualisation.py +38 -0
- pyvale/examples/analyticdatagen/ex1_2_scalarcasebuild.py +46 -0
- pyvale/examples/analyticdatagen/ex2_1_analyticsensors.py +83 -0
- pyvale/examples/ex1_1_thermal2d.py +89 -0
- pyvale/examples/ex1_2_thermal2d.py +111 -0
- pyvale/examples/ex1_3_thermal2d.py +113 -0
- pyvale/examples/ex1_4_thermal2d.py +89 -0
- pyvale/examples/ex1_5_thermal2d.py +105 -0
- pyvale/examples/ex2_1_thermal3d .py +87 -0
- pyvale/examples/ex2_2_thermal3d.py +51 -0
- pyvale/examples/ex2_3_thermal3d.py +109 -0
- pyvale/examples/ex3_1_displacement2d.py +47 -0
- pyvale/examples/ex3_2_displacement2d.py +79 -0
- pyvale/examples/ex3_3_displacement2d.py +104 -0
- pyvale/examples/ex3_4_displacement2d.py +105 -0
- pyvale/examples/ex4_1_strain2d.py +57 -0
- pyvale/examples/ex4_2_strain2d.py +79 -0
- pyvale/examples/ex4_3_strain2d.py +100 -0
- pyvale/examples/ex5_1_multiphysics2d.py +78 -0
- pyvale/examples/ex6_1_multiphysics2d_expsim.py +118 -0
- pyvale/examples/ex6_2_multiphysics3d_expsim.py +158 -0
- pyvale/examples/features/__init__.py +7 -0
- pyvale/examples/features/ex_animation_tools_3dmonoblock.py +83 -0
- pyvale/examples/features/ex_area_avg.py +89 -0
- pyvale/examples/features/ex_calibration_error.py +108 -0
- pyvale/examples/features/ex_chain_field_errs.py +141 -0
- pyvale/examples/features/ex_field_errs.py +78 -0
- pyvale/examples/features/ex_sensor_single_angle_batch.py +110 -0
- pyvale/examples/imagedef2d/ex_imagedef2d_todisk.py +86 -0
- pyvale/examples/rasterisation/ex_rastenp.py +154 -0
- pyvale/examples/rasterisation/ex_rastercyth_oneframe.py +220 -0
- pyvale/examples/rasterisation/ex_rastercyth_static_cypara.py +194 -0
- pyvale/examples/rasterisation/ex_rastercyth_static_pypara.py +193 -0
- pyvale/simcases/case00_HEX20.i +242 -0
- pyvale/simcases/case00_HEX27.i +242 -0
- pyvale/simcases/case00_TET10.i +242 -0
- pyvale/simcases/case00_TET14.i +242 -0
- pyvale/simcases/case01.i +101 -0
- pyvale/simcases/case02.i +156 -0
- pyvale/simcases/case03.i +136 -0
- pyvale/simcases/case04.i +181 -0
- pyvale/simcases/case05.i +234 -0
- pyvale/simcases/case06.i +305 -0
- pyvale/simcases/case07.geo +135 -0
- pyvale/simcases/case07.i +87 -0
- pyvale/simcases/case08.geo +144 -0
- pyvale/simcases/case08.i +153 -0
- pyvale/simcases/case09.geo +204 -0
- pyvale/simcases/case09.i +87 -0
- pyvale/simcases/case10.geo +204 -0
- pyvale/simcases/case10.i +257 -0
- pyvale/simcases/case11.geo +337 -0
- pyvale/simcases/case11.i +147 -0
- pyvale/simcases/case12.geo +388 -0
- pyvale/simcases/case12.i +329 -0
- pyvale/simcases/case13.i +140 -0
- pyvale/simcases/case14.i +159 -0
- pyvale/simcases/case15.geo +337 -0
- pyvale/simcases/case15.i +150 -0
- pyvale/simcases/case16.geo +391 -0
- pyvale/simcases/case16.i +357 -0
- pyvale/simcases/case17.geo +135 -0
- pyvale/simcases/case17.i +144 -0
- pyvale/simcases/case18.i +254 -0
- pyvale/simcases/case18_1.i +254 -0
- pyvale/simcases/case18_2.i +254 -0
- pyvale/simcases/case18_3.i +254 -0
- pyvale/simcases/case19.geo +252 -0
- pyvale/simcases/case19.i +99 -0
- pyvale/simcases/case20.geo +252 -0
- pyvale/simcases/case20.i +250 -0
- pyvale/simcases/case21.geo +74 -0
- pyvale/simcases/case21.i +155 -0
- pyvale/simcases/case22.geo +82 -0
- pyvale/simcases/case22.i +140 -0
- pyvale/simcases/case23.geo +164 -0
- pyvale/simcases/case23.i +140 -0
- pyvale/simcases/case24.geo +79 -0
- pyvale/simcases/case24.i +123 -0
- pyvale/simcases/case25.geo +82 -0
- pyvale/simcases/case25.i +140 -0
- pyvale/simcases/case26.geo +166 -0
- pyvale/simcases/case26.i +140 -0
- pyvale/simcases/run_1case.py +61 -0
- pyvale/simcases/run_all_cases.py +69 -0
- pyvale/simcases/run_build_case.py +64 -0
- pyvale/simcases/run_example_cases.py +69 -0
- pyvale-2025.4.0.dist-info/METADATA +140 -0
- pyvale-2025.4.0.dist-info/RECORD +157 -0
- pyvale-2025.4.0.dist-info/WHEEL +5 -0
- pyvale-2025.4.0.dist-info/licenses/LICENSE +21 -0
- pyvale-2025.4.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,328 @@
|
|
|
1
|
+
"""
|
|
2
|
+
================================================================================
|
|
3
|
+
pyvale: the python validation engine
|
|
4
|
+
License: MIT
|
|
5
|
+
Copyright (C) 2025 The Computer Aided Validation Team
|
|
6
|
+
================================================================================
|
|
7
|
+
"""
|
|
8
|
+
import warnings
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
import numpy as np
|
|
11
|
+
from scipy.signal import convolve2d
|
|
12
|
+
from scipy.spatial.transform import Rotation
|
|
13
|
+
import matplotlib.image as mplim
|
|
14
|
+
from PIL import Image
|
|
15
|
+
from pyvale.core.cameradata2d import CameraData2D
|
|
16
|
+
from pyvale.core.sensordata import SensorData
|
|
17
|
+
|
|
18
|
+
# NOTE: This module is a feature under developement.
|
|
19
|
+
|
|
20
|
+
class CameraTools:
|
|
21
|
+
#-------------------------------------------------------------------------------
|
|
22
|
+
@staticmethod
|
|
23
|
+
def load_image(im_path: Path) -> np.ndarray:
|
|
24
|
+
|
|
25
|
+
input_im = mplim.imread(im_path).astype(np.float64)
|
|
26
|
+
# If we have RGB then get rid of it
|
|
27
|
+
# TODO: make sure this is collapsing RGB to grey scale coorectly
|
|
28
|
+
if input_im.ndim > 2:
|
|
29
|
+
input_im = input_im[:,:,0]
|
|
30
|
+
|
|
31
|
+
return input_im
|
|
32
|
+
|
|
33
|
+
@staticmethod
|
|
34
|
+
def save_image(save_file: Path,
|
|
35
|
+
image: np.ndarray,
|
|
36
|
+
n_bits: int = 16) -> None:
|
|
37
|
+
|
|
38
|
+
# Need to flip image so coords are top left with Y down
|
|
39
|
+
# TODO check this
|
|
40
|
+
image = image[::-1,:]
|
|
41
|
+
|
|
42
|
+
if n_bits > 8:
|
|
43
|
+
im = Image.fromarray(image.astype(np.uint16))
|
|
44
|
+
else:
|
|
45
|
+
im = Image.fromarray(image.astype(np.uint8))
|
|
46
|
+
|
|
47
|
+
im.save(save_file)
|
|
48
|
+
|
|
49
|
+
@staticmethod
|
|
50
|
+
def image_num_str(im_num: int, width: int , cam_num: int = -1) -> str:
|
|
51
|
+
num_str = str(im_num)
|
|
52
|
+
num_str = num_str.zfill(width)
|
|
53
|
+
|
|
54
|
+
if cam_num >= 0:
|
|
55
|
+
num_str = num_str+'_'+str(cam_num)
|
|
56
|
+
|
|
57
|
+
return num_str
|
|
58
|
+
|
|
59
|
+
#-------------------------------------------------------------------------------
|
|
60
|
+
@staticmethod
|
|
61
|
+
def pixel_vec_px(pixels_count: np.ndarray) -> tuple[np.ndarray,np.ndarray]:
|
|
62
|
+
px_vec_x = np.arange(0,pixels_count[0],1)
|
|
63
|
+
px_vec_y = np.arange(0,pixels_count[1],1)
|
|
64
|
+
return (px_vec_x,px_vec_y)
|
|
65
|
+
@staticmethod
|
|
66
|
+
def pixel_grid_px(pixels_count: np.ndarray
|
|
67
|
+
) -> tuple[np.ndarray,np.ndarray]:
|
|
68
|
+
(px_vec_x,px_vec_y) = CameraTools.pixel_vec_px(pixels_count)
|
|
69
|
+
return np.meshgrid(px_vec_x,px_vec_y)
|
|
70
|
+
@staticmethod
|
|
71
|
+
def vectorise_pixel_grid_px(pixels_count: np.ndarray) -> tuple[np.ndarray,np.ndarray]:
|
|
72
|
+
(px_grid_x,px_grid_y) = CameraTools.pixel_grid_px(pixels_count)
|
|
73
|
+
return (px_grid_x.flatten(),px_grid_y.flatten())
|
|
74
|
+
|
|
75
|
+
#-------------------------------------------------------------------------------
|
|
76
|
+
@staticmethod
|
|
77
|
+
def subpixel_vec_px(pixels_count: np.ndarray,
|
|
78
|
+
subsample: int = 2) -> tuple[np.ndarray,np.ndarray]:
|
|
79
|
+
px_vec_x = np.arange(1/(2*subsample),pixels_count[0],1/subsample)
|
|
80
|
+
px_vec_y = np.arange(1/(2*subsample),pixels_count[1],1/subsample)
|
|
81
|
+
return (px_vec_x,px_vec_y)
|
|
82
|
+
|
|
83
|
+
@staticmethod
|
|
84
|
+
def subpixel_grid_px(pixels_count: np.ndarray,
|
|
85
|
+
subsample: int = 2) -> tuple[np.ndarray,np.ndarray]:
|
|
86
|
+
(px_vec_x,px_vec_y) = CameraTools.subpixel_vec_px(pixels_count,subsample)
|
|
87
|
+
return np.meshgrid(px_vec_x,px_vec_y)
|
|
88
|
+
|
|
89
|
+
@staticmethod
|
|
90
|
+
def vectorise_subpixel_grid_px(pixels_count: np.ndarray,
|
|
91
|
+
subsample: int = 2) -> tuple[np.ndarray,np.ndarray]:
|
|
92
|
+
(px_grid_x,px_grid_y) = CameraTools.subpixel_grid_px(pixels_count,subsample)
|
|
93
|
+
return (px_grid_x.flatten(),px_grid_y.flatten())
|
|
94
|
+
|
|
95
|
+
#-------------------------------------------------------------------------------
|
|
96
|
+
@staticmethod
|
|
97
|
+
def pixel_vec_leng(field_of_view: np.ndarray,
|
|
98
|
+
leng_per_px: float) -> tuple[np.ndarray,np.ndarray]:
|
|
99
|
+
px_vec_x = np.arange(leng_per_px/2,
|
|
100
|
+
field_of_view[0],
|
|
101
|
+
leng_per_px)
|
|
102
|
+
px_vec_y = np.arange(leng_per_px/2,
|
|
103
|
+
field_of_view[1],
|
|
104
|
+
leng_per_px)
|
|
105
|
+
return (px_vec_x,px_vec_y)
|
|
106
|
+
|
|
107
|
+
@staticmethod
|
|
108
|
+
def pixel_grid_leng(field_of_view: np.ndarray,
|
|
109
|
+
leng_per_px: float) -> tuple[np.ndarray,np.ndarray]:
|
|
110
|
+
(px_vec_x,px_vec_y) = CameraTools.pixel_vec_leng(field_of_view,leng_per_px)
|
|
111
|
+
return np.meshgrid(px_vec_x,px_vec_y)
|
|
112
|
+
|
|
113
|
+
@staticmethod
|
|
114
|
+
def vectorise_pixel_grid_leng(field_of_view: np.ndarray,
|
|
115
|
+
leng_per_px: float) -> tuple[np.ndarray,np.ndarray]:
|
|
116
|
+
(px_grid_x,px_grid_y) = CameraTools.pixel_grid_leng(field_of_view,leng_per_px)
|
|
117
|
+
return (px_grid_x.flatten(),px_grid_y.flatten())
|
|
118
|
+
|
|
119
|
+
#-------------------------------------------------------------------------------
|
|
120
|
+
@staticmethod
|
|
121
|
+
def subpixel_vec_leng(field_of_view: np.ndarray,
|
|
122
|
+
leng_per_px: float,
|
|
123
|
+
subsample: int = 2) -> tuple[np.ndarray,np.ndarray]:
|
|
124
|
+
px_vec_x = np.arange(leng_per_px/(2*subsample),
|
|
125
|
+
field_of_view[0],
|
|
126
|
+
leng_per_px/subsample)
|
|
127
|
+
px_vec_y = np.arange(leng_per_px/(2*subsample),
|
|
128
|
+
field_of_view[1],
|
|
129
|
+
leng_per_px/subsample)
|
|
130
|
+
return (px_vec_x,px_vec_y)
|
|
131
|
+
|
|
132
|
+
@staticmethod
|
|
133
|
+
def subpixel_grid_leng(field_of_view: np.ndarray,
|
|
134
|
+
leng_per_px: float,
|
|
135
|
+
subsample: int = 2) -> tuple[np.ndarray,np.ndarray]:
|
|
136
|
+
(px_vec_x,px_vec_y) = CameraTools.subpixel_vec_leng(
|
|
137
|
+
field_of_view,
|
|
138
|
+
leng_per_px,
|
|
139
|
+
subsample)
|
|
140
|
+
return np.meshgrid(px_vec_x,px_vec_y)
|
|
141
|
+
|
|
142
|
+
@staticmethod
|
|
143
|
+
def vectorise_subpixel_grid_leng(field_of_view: np.ndarray,
|
|
144
|
+
leng_per_px: float,
|
|
145
|
+
subsample: int = 2) -> tuple[np.ndarray,np.ndarray]:
|
|
146
|
+
(px_grid_x,px_grid_y) = CameraTools.subpixel_grid_leng(
|
|
147
|
+
field_of_view,
|
|
148
|
+
leng_per_px,
|
|
149
|
+
subsample)
|
|
150
|
+
return (px_grid_x.flatten(),px_grid_y.flatten())
|
|
151
|
+
|
|
152
|
+
#-------------------------------------------------------------------------------
|
|
153
|
+
@staticmethod
|
|
154
|
+
def calc_resolution_from_sim_2d(pixels_count: np.ndarray,
|
|
155
|
+
coords: np.ndarray,
|
|
156
|
+
pixels_border: int,
|
|
157
|
+
view_plane_axes: tuple[int,int] = (0,1),
|
|
158
|
+
) -> float:
|
|
159
|
+
|
|
160
|
+
coords_min = np.min(coords, axis=0)
|
|
161
|
+
coords_max = np.max(coords, axis=0)
|
|
162
|
+
field_of_view = np.abs(coords_max - coords_min)
|
|
163
|
+
roi_px = np.array(pixels_count - 2*pixels_border,dtype=np.float64)
|
|
164
|
+
|
|
165
|
+
resolution = np.zeros_like(view_plane_axes,dtype=np.float64)
|
|
166
|
+
for ii in view_plane_axes:
|
|
167
|
+
resolution[ii] = field_of_view[view_plane_axes[ii]] / roi_px[ii]
|
|
168
|
+
|
|
169
|
+
return np.max(resolution)
|
|
170
|
+
|
|
171
|
+
@staticmethod
|
|
172
|
+
def calc_roi_cent_from_sim_2d(coords: np.ndarray,) -> np.ndarray:
|
|
173
|
+
return np.mean(coords,axis=0)
|
|
174
|
+
|
|
175
|
+
@staticmethod
|
|
176
|
+
def crop_image_rectangle(image: np.ndarray,
|
|
177
|
+
pixels_count: np.ndarray,
|
|
178
|
+
corner: tuple[int,int] = (0,0)
|
|
179
|
+
) -> np.ndarray:
|
|
180
|
+
|
|
181
|
+
crop_x = np.array((corner[0],pixels_count[0]),dtype=np.int32)
|
|
182
|
+
crop_y = np.array((corner[1],pixels_count[1]),dtype=np.int32)
|
|
183
|
+
|
|
184
|
+
if corner[0] < 0:
|
|
185
|
+
crop_x[0] = 0
|
|
186
|
+
warnings.warn("Crop edge outside image, setting to image edge.")
|
|
187
|
+
|
|
188
|
+
if corner[1] < 0:
|
|
189
|
+
crop_y[0] = 0
|
|
190
|
+
warnings.warn("Crop edge outside image, setting to image edge.")
|
|
191
|
+
|
|
192
|
+
if ((corner[0]+pixels_count[0]) > image.shape[1]):
|
|
193
|
+
crop_x[1] = image.shape[0]
|
|
194
|
+
warnings.warn("Crop edge outside image, setting to image edge.")
|
|
195
|
+
|
|
196
|
+
if (corner[1]+pixels_count[1]) > image.shape[0]:
|
|
197
|
+
crop_y[1] = image.shape[1]
|
|
198
|
+
warnings.warn("Crop edge outside image, setting to image edge.")
|
|
199
|
+
|
|
200
|
+
return image[crop_y[0]:crop_y[1],crop_x[0]:crop_x[1]]
|
|
201
|
+
|
|
202
|
+
@staticmethod
|
|
203
|
+
def average_subpixel_image(subpx_image: np.ndarray,
|
|
204
|
+
subsample: int) -> np.ndarray:
|
|
205
|
+
if subsample <= 1:
|
|
206
|
+
return subpx_image
|
|
207
|
+
|
|
208
|
+
conv_mask = np.ones((subsample,subsample))/(subsample**2)
|
|
209
|
+
subpx_image_conv = convolve2d(subpx_image,conv_mask,mode='same')
|
|
210
|
+
avg_image = subpx_image_conv[round(subsample/2)-1::subsample,
|
|
211
|
+
round(subsample/2)-1::subsample]
|
|
212
|
+
return avg_image
|
|
213
|
+
|
|
214
|
+
#---------------------------------------------------------------------------
|
|
215
|
+
@staticmethod
|
|
216
|
+
def build_sensor_data_from_camera_2d(cam_data: CameraData2D) -> SensorData:
|
|
217
|
+
pixels_vectorised = CameraTools.vectorise_pixel_grid_leng(cam_data.field_of_view,
|
|
218
|
+
cam_data.leng_per_px)
|
|
219
|
+
|
|
220
|
+
positions = np.zeros((pixels_vectorised[0].shape[0],3))
|
|
221
|
+
for ii,vv in enumerate(cam_data.view_axes):
|
|
222
|
+
positions[:,vv] = pixels_vectorised[ii] + cam_data.roi_shift_world[ii]
|
|
223
|
+
|
|
224
|
+
if cam_data.angle is None:
|
|
225
|
+
angle = None
|
|
226
|
+
else:
|
|
227
|
+
angle = (cam_data.angle,)
|
|
228
|
+
|
|
229
|
+
sens_data = SensorData(positions=positions,
|
|
230
|
+
sample_times=cam_data.sample_times,
|
|
231
|
+
angles=angle)
|
|
232
|
+
|
|
233
|
+
return sens_data
|
|
234
|
+
|
|
235
|
+
#-------------------------------------------------------------------------------
|
|
236
|
+
# NOTE: keep these functions!
|
|
237
|
+
# These functions work for 3D cameras calculating imaging dist and fov taking
|
|
238
|
+
# account of camera rotation by rotating the bounding box of the sim into cam
|
|
239
|
+
# coords
|
|
240
|
+
|
|
241
|
+
@staticmethod
|
|
242
|
+
def fov_from_cam_rot_3d(cam_rot: Rotation,
|
|
243
|
+
coords_world: np.ndarray) -> np.ndarray:
|
|
244
|
+
(xx,yy,zz) = (0,1,2)
|
|
245
|
+
|
|
246
|
+
cam_to_world_mat = cam_rot.as_matrix()
|
|
247
|
+
world_to_cam_mat = np.linalg.inv(cam_to_world_mat)
|
|
248
|
+
|
|
249
|
+
bb_min = np.min(coords_world,axis=0)
|
|
250
|
+
bb_max = np.max(coords_world,axis=0)
|
|
251
|
+
|
|
252
|
+
bound_box_world_vecs = np.array([[bb_min[xx],bb_min[yy],bb_max[zz]],
|
|
253
|
+
[bb_max[xx],bb_min[yy],bb_max[zz]],
|
|
254
|
+
[bb_max[xx],bb_max[yy],bb_max[zz]],
|
|
255
|
+
[bb_min[xx],bb_max[yy],bb_max[zz]],
|
|
256
|
+
|
|
257
|
+
[bb_min[xx],bb_min[yy],bb_min[zz]],
|
|
258
|
+
[bb_max[xx],bb_min[yy],bb_min[zz]],
|
|
259
|
+
[bb_max[xx],bb_max[yy],bb_min[zz]],
|
|
260
|
+
[bb_min[xx],bb_max[yy],bb_min[zz]],])
|
|
261
|
+
|
|
262
|
+
bound_box_cam_vecs = np.matmul(world_to_cam_mat,bound_box_world_vecs.T)
|
|
263
|
+
boundbox_cam_leng = (np.max(bound_box_cam_vecs,axis=1)
|
|
264
|
+
- np.min(bound_box_cam_vecs,axis=1))
|
|
265
|
+
|
|
266
|
+
print(80*"-")
|
|
267
|
+
print(f"{bb_min=}")
|
|
268
|
+
print(f"{bb_max=}")
|
|
269
|
+
print()
|
|
270
|
+
print("Cam to world mat:")
|
|
271
|
+
print(cam_to_world_mat)
|
|
272
|
+
print()
|
|
273
|
+
print("World to cam mat:")
|
|
274
|
+
print(world_to_cam_mat)
|
|
275
|
+
print(80*"-")
|
|
276
|
+
|
|
277
|
+
return np.array((boundbox_cam_leng[xx],boundbox_cam_leng[yy]))
|
|
278
|
+
|
|
279
|
+
@staticmethod
|
|
280
|
+
def image_dist_from_fov_3d(pixel_num: np.ndarray,
|
|
281
|
+
pixel_size: np.ndarray,
|
|
282
|
+
focal_leng: float,
|
|
283
|
+
fov_leng: np.ndarray) -> np.ndarray:
|
|
284
|
+
|
|
285
|
+
sensor_dims = pixel_num * pixel_size
|
|
286
|
+
fov_angle = 2*np.arctan(sensor_dims/(2*focal_leng))
|
|
287
|
+
image_dist = fov_leng/(2*np.tan(fov_angle/2))
|
|
288
|
+
return image_dist
|
|
289
|
+
|
|
290
|
+
@staticmethod
|
|
291
|
+
def pos_fill_frame_from_rotation(coords_world: np.ndarray,
|
|
292
|
+
pixel_num: np.ndarray,
|
|
293
|
+
pixel_size: np.ndarray,
|
|
294
|
+
focal_leng: float,
|
|
295
|
+
cam_rot: Rotation,
|
|
296
|
+
frame_fill: float = 1.0,
|
|
297
|
+
) -> tuple[np.ndarray,
|
|
298
|
+
np.ndarray]:
|
|
299
|
+
fov_leng = CameraTools.fov_from_cam_rot_3d(
|
|
300
|
+
cam_rot=cam_rot,
|
|
301
|
+
coords_world=coords_world,
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
# Scales the FOV by the given factor, greater than 1.0 will zoom out
|
|
305
|
+
# making sure the mesh is wholly within the image
|
|
306
|
+
fov_leng = frame_fill*fov_leng
|
|
307
|
+
|
|
308
|
+
image_dist = CameraTools.image_dist_from_fov_3d(
|
|
309
|
+
pixel_num=pixel_num,
|
|
310
|
+
pixel_size=pixel_size,
|
|
311
|
+
focal_leng=focal_leng,
|
|
312
|
+
fov_leng=fov_leng,
|
|
313
|
+
)
|
|
314
|
+
|
|
315
|
+
roi_pos_world = (np.max(coords_world[:,:-1],axis=0)
|
|
316
|
+
+ np.min(coords_world[:,:-1],axis=0))/2.0
|
|
317
|
+
cam_z_dir_world = cam_rot.as_matrix()[:,-1]
|
|
318
|
+
cam_pos_world = (roi_pos_world + np.max(image_dist)*cam_z_dir_world)
|
|
319
|
+
|
|
320
|
+
print(80*"-")
|
|
321
|
+
print(f"{fov_leng=}")
|
|
322
|
+
print(f"{image_dist=}")
|
|
323
|
+
print(80*"-")
|
|
324
|
+
|
|
325
|
+
return (roi_pos_world,cam_pos_world)
|
|
326
|
+
|
|
327
|
+
|
|
328
|
+
#-------------------------------------------------------------------------------
|