pyvale 2025.4.1__py3-none-any.whl → 2025.5.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyvale might be problematic. Click here for more details.
- pyvale/__init__.py +18 -3
- pyvale/analyticmeshgen.py +1 -0
- pyvale/analyticsimdatafactory.py +18 -13
- pyvale/analyticsimdatagenerator.py +105 -72
- pyvale/blendercalibrationdata.py +15 -0
- pyvale/blenderlightdata.py +26 -0
- pyvale/blendermaterialdata.py +15 -0
- pyvale/blenderrenderdata.py +30 -0
- pyvale/blenderscene.py +488 -0
- pyvale/blendertools.py +420 -0
- pyvale/camera.py +6 -5
- pyvale/cameradata.py +25 -7
- pyvale/cameradata2d.py +6 -4
- pyvale/camerastereo.py +217 -0
- pyvale/cameratools.py +206 -11
- pyvale/cython/rastercyth.py +6 -2
- pyvale/data/cal_target.tiff +0 -0
- pyvale/dataset.py +73 -14
- pyvale/errorcalculator.py +8 -10
- pyvale/errordriftcalc.py +10 -9
- pyvale/errorintegrator.py +19 -21
- pyvale/errorrand.py +33 -39
- pyvale/errorsyscalib.py +134 -0
- pyvale/errorsysdep.py +19 -22
- pyvale/errorsysfield.py +49 -41
- pyvale/errorsysindep.py +79 -175
- pyvale/examples/basics/ex1_1_basicscalars_therm2d.py +131 -0
- pyvale/examples/basics/ex1_2_sensormodel_therm2d.py +158 -0
- pyvale/examples/basics/ex1_3_customsens_therm3d.py +216 -0
- pyvale/examples/basics/ex1_4_basicerrors_therm3d.py +153 -0
- pyvale/examples/basics/ex1_5_fielderrs_therm3d.py +168 -0
- pyvale/examples/basics/ex1_6_caliberrs_therm2d.py +133 -0
- pyvale/examples/basics/ex1_7_spatavg_therm2d.py +123 -0
- pyvale/examples/basics/ex2_1_basicvectors_disp2d.py +112 -0
- pyvale/examples/basics/ex2_2_vectorsens_disp2d.py +111 -0
- pyvale/examples/basics/ex2_3_sensangle_disp2d.py +139 -0
- pyvale/examples/basics/ex2_4_chainfielderrs_disp2d.py +196 -0
- pyvale/examples/basics/ex2_5_vectorfields3d_disp3d.py +109 -0
- pyvale/examples/basics/ex3_1_basictensors_strain2d.py +114 -0
- pyvale/examples/basics/ex3_2_tensorsens2d_strain2d.py +111 -0
- pyvale/examples/basics/ex3_3_tensorsens3d_strain3d.py +182 -0
- pyvale/examples/basics/ex4_1_expsim2d_thermmech2d.py +171 -0
- pyvale/examples/basics/ex4_2_expsim3d_thermmech3d.py +252 -0
- pyvale/examples/{analyticdatagen → genanalyticdata}/ex1_1_scalarvisualisation.py +6 -9
- pyvale/examples/{analyticdatagen → genanalyticdata}/ex1_2_scalarcasebuild.py +8 -11
- pyvale/examples/{analyticdatagen → genanalyticdata}/ex2_1_analyticsensors.py +9 -12
- pyvale/examples/imagedef2d/ex_imagedef2d_todisk.py +8 -15
- pyvale/examples/renderblender/ex1_1_blenderscene.py +121 -0
- pyvale/examples/renderblender/ex1_2_blenderdeformed.py +119 -0
- pyvale/examples/renderblender/ex2_1_stereoscene.py +128 -0
- pyvale/examples/renderblender/ex2_2_stereodeformed.py +131 -0
- pyvale/examples/renderblender/ex3_1_blendercalibration.py +120 -0
- pyvale/examples/{rasterisation → renderrasterisation}/ex_rastenp.py +3 -2
- pyvale/examples/{rasterisation → renderrasterisation}/ex_rastercyth_oneframe.py +2 -2
- pyvale/examples/{rasterisation → renderrasterisation}/ex_rastercyth_static_cypara.py +3 -8
- pyvale/examples/{rasterisation → renderrasterisation}/ex_rastercyth_static_pypara.py +6 -7
- pyvale/examples/{ex1_4_thermal2d.py → visualisation/ex1_1_plot_traces.py} +32 -16
- pyvale/examples/{features/ex_animation_tools_3dmonoblock.py → visualisation/ex2_1_animate_sim.py} +37 -31
- pyvale/experimentsimulator.py +107 -30
- pyvale/field.py +2 -9
- pyvale/fieldconverter.py +98 -22
- pyvale/fieldsampler.py +2 -2
- pyvale/fieldscalar.py +10 -10
- pyvale/fieldtensor.py +15 -17
- pyvale/fieldtransform.py +7 -2
- pyvale/fieldvector.py +6 -7
- pyvale/generatorsrandom.py +25 -47
- pyvale/imagedef2d.py +6 -2
- pyvale/integratorfactory.py +2 -2
- pyvale/integratorquadrature.py +50 -24
- pyvale/integratorrectangle.py +85 -7
- pyvale/integratorspatial.py +4 -4
- pyvale/integratortype.py +3 -3
- pyvale/output.py +17 -0
- pyvale/pyvaleexceptions.py +11 -0
- pyvale/raster.py +6 -5
- pyvale/rastercy.py +6 -4
- pyvale/rasternp.py +6 -4
- pyvale/rendermesh.py +6 -2
- pyvale/sensorarray.py +2 -2
- pyvale/sensorarrayfactory.py +52 -65
- pyvale/sensorarraypoint.py +29 -30
- pyvale/sensordata.py +2 -2
- pyvale/sensordescriptor.py +138 -25
- pyvale/sensortools.py +3 -3
- pyvale/simtools.py +67 -0
- pyvale/visualexpplotter.py +99 -57
- pyvale/visualimagedef.py +11 -7
- pyvale/visualimages.py +6 -4
- pyvale/visualopts.py +372 -58
- pyvale/visualsimanimator.py +42 -13
- pyvale/visualsimsensors.py +318 -0
- pyvale/visualtools.py +69 -13
- pyvale/visualtraceplotter.py +52 -165
- {pyvale-2025.4.1.dist-info → pyvale-2025.5.2.dist-info}/METADATA +17 -14
- pyvale-2025.5.2.dist-info/RECORD +172 -0
- {pyvale-2025.4.1.dist-info → pyvale-2025.5.2.dist-info}/WHEEL +1 -1
- pyvale/examples/analyticdatagen/__init__.py +0 -5
- pyvale/examples/ex1_1_thermal2d.py +0 -86
- pyvale/examples/ex1_2_thermal2d.py +0 -108
- pyvale/examples/ex1_3_thermal2d.py +0 -110
- pyvale/examples/ex1_5_thermal2d.py +0 -102
- pyvale/examples/ex2_1_thermal3d .py +0 -84
- pyvale/examples/ex2_2_thermal3d.py +0 -51
- pyvale/examples/ex2_3_thermal3d.py +0 -106
- pyvale/examples/ex3_1_displacement2d.py +0 -44
- pyvale/examples/ex3_2_displacement2d.py +0 -76
- pyvale/examples/ex3_3_displacement2d.py +0 -101
- pyvale/examples/ex3_4_displacement2d.py +0 -102
- pyvale/examples/ex4_1_strain2d.py +0 -54
- pyvale/examples/ex4_2_strain2d.py +0 -76
- pyvale/examples/ex4_3_strain2d.py +0 -97
- pyvale/examples/ex5_1_multiphysics2d.py +0 -75
- pyvale/examples/ex6_1_multiphysics2d_expsim.py +0 -115
- pyvale/examples/ex6_2_multiphysics3d_expsim.py +0 -160
- pyvale/examples/features/__init__.py +0 -5
- pyvale/examples/features/ex_area_avg.py +0 -89
- pyvale/examples/features/ex_calibration_error.py +0 -108
- pyvale/examples/features/ex_chain_field_errs.py +0 -141
- pyvale/examples/features/ex_field_errs.py +0 -78
- pyvale/examples/features/ex_sensor_single_angle_batch.py +0 -110
- pyvale/optimcheckfuncs.py +0 -153
- pyvale/visualsimplotter.py +0 -182
- pyvale-2025.4.1.dist-info/RECORD +0 -163
- {pyvale-2025.4.1.dist-info → pyvale-2025.5.2.dist-info}/licenses/LICENSE +0 -0
- {pyvale-2025.4.1.dist-info → pyvale-2025.5.2.dist-info}/top_level.txt +0 -0
pyvale/blendertools.py
ADDED
|
@@ -0,0 +1,420 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
import numpy as np
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from scipy.spatial.transform import Rotation
|
|
9
|
+
from PIL import Image
|
|
10
|
+
import bpy
|
|
11
|
+
from pyvale.cameratools import CameraTools
|
|
12
|
+
from pyvale.blendermaterialdata import BlenderMaterialData
|
|
13
|
+
from pyvale.blenderrenderdata import RenderData, RenderEngine
|
|
14
|
+
from pyvale.blendercalibrationdata import CalibrationData
|
|
15
|
+
from pyvale.output import Outputs
|
|
16
|
+
from pyvale.pyvaleexceptions import BlenderError
|
|
17
|
+
|
|
18
|
+
class BlenderTools:
|
|
19
|
+
"""Namespace for tools used within the pyvale Blender module.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
@staticmethod
|
|
23
|
+
def save_blender_file(base_dir: Path | None = Outputs.base_dir,
|
|
24
|
+
override: bool = False) -> None:
|
|
25
|
+
"""A method to save the current Blender scene to a Blender .blend filepath
|
|
26
|
+
|
|
27
|
+
Parameters
|
|
28
|
+
----------
|
|
29
|
+
base_dir : Path
|
|
30
|
+
The base directory to which the Blender file will be saved to. The
|
|
31
|
+
file will be saved in a subfolder of this directory named blenderfiles.
|
|
32
|
+
override : bool, optional
|
|
33
|
+
A flag which can be set to True or False. If set to True, if the
|
|
34
|
+
specified filepath already exists, this file will be automatically
|
|
35
|
+
overwritten. If set to False and the specified filepath already exists
|
|
36
|
+
an error will be thrown. If the specified filepath does not exist,
|
|
37
|
+
the file will be saved normally, by default False
|
|
38
|
+
|
|
39
|
+
Raises
|
|
40
|
+
------
|
|
41
|
+
BlenderError
|
|
42
|
+
"The specified save directory does not exist".
|
|
43
|
+
BlenderError
|
|
44
|
+
"A file already exists with this filepath". This error is thrown
|
|
45
|
+
when override is set to False, and the specified filepath already
|
|
46
|
+
exists.
|
|
47
|
+
|
|
48
|
+
"""
|
|
49
|
+
if not base_dir.is_dir():
|
|
50
|
+
raise BlenderError("The specified save directory does not exist")
|
|
51
|
+
|
|
52
|
+
save_dir = base_dir / "blenderfiles"
|
|
53
|
+
if not save_dir.is_dir():
|
|
54
|
+
print("Yes")
|
|
55
|
+
save_dir.mkdir(parents=True, exist_ok=True)
|
|
56
|
+
|
|
57
|
+
filename = save_dir / "projectfile.blend"
|
|
58
|
+
|
|
59
|
+
if filename.exists():
|
|
60
|
+
if override:
|
|
61
|
+
filename.unlink()
|
|
62
|
+
else:
|
|
63
|
+
raise BlenderError("A file already exists with this filepath")
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
bpy.ops.wm.save_as_mainfile(filepath=str(filename))
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
print()
|
|
70
|
+
print(80*"-")
|
|
71
|
+
print("Save directory of the project file:", filename)
|
|
72
|
+
print(80*"-")
|
|
73
|
+
print()
|
|
74
|
+
|
|
75
|
+
@staticmethod
|
|
76
|
+
def move_blender_obj(pos_world: np.ndarray, part: bpy.data.objects) -> None:
|
|
77
|
+
"""A method to move an object within Blender.
|
|
78
|
+
|
|
79
|
+
Parameters
|
|
80
|
+
----------
|
|
81
|
+
pos_world : np.ndarray
|
|
82
|
+
A array describing the vector position to which the part should be
|
|
83
|
+
moved to.
|
|
84
|
+
part : bpy.data.objects
|
|
85
|
+
The Blender part object to be moved.
|
|
86
|
+
"""
|
|
87
|
+
z_location = int(part.dimensions[2])
|
|
88
|
+
part.location = (pos_world[0], pos_world[1], (pos_world[2] - z_location))
|
|
89
|
+
|
|
90
|
+
@staticmethod
|
|
91
|
+
def rotate_blender_obj(rot_world: Rotation, part: bpy.data.objects) -> None:
|
|
92
|
+
"""A method to rotate an object within Blender.
|
|
93
|
+
|
|
94
|
+
Parameters
|
|
95
|
+
----------
|
|
96
|
+
rot_world : Rotation
|
|
97
|
+
The rotation that is to be applied to the part object.
|
|
98
|
+
part : bpy.data.objects
|
|
99
|
+
The Blender part object to be rotated.
|
|
100
|
+
"""
|
|
101
|
+
part.rotation_mode = "XYZ"
|
|
102
|
+
part_rotation = rot_world.as_euler("xyz", degrees=False)
|
|
103
|
+
part.rotation_euler = part_rotation
|
|
104
|
+
|
|
105
|
+
@staticmethod
|
|
106
|
+
def set_new_frame(part: bpy.data.objects) -> None:
|
|
107
|
+
"""A method to set a new frame within Blender (needed to differenciate
|
|
108
|
+
the timesteps).
|
|
109
|
+
|
|
110
|
+
Parameters
|
|
111
|
+
----------
|
|
112
|
+
part : bpy.data.objects
|
|
113
|
+
The Blender part object, normally the sample object. This is passed
|
|
114
|
+
in to ensure it is the active object within the scene.
|
|
115
|
+
"""
|
|
116
|
+
frame_incr = 20
|
|
117
|
+
ob = bpy.context.view_layer.objects.active
|
|
118
|
+
if ob is None:
|
|
119
|
+
bpy.context.objects.active = part
|
|
120
|
+
|
|
121
|
+
current_frame = bpy.context.scene.frame_current
|
|
122
|
+
current_frame += frame_incr
|
|
123
|
+
bpy.context.scene.frame_set(current_frame)
|
|
124
|
+
|
|
125
|
+
bpy.data.shape_keys["Key"].eval_time = current_frame
|
|
126
|
+
part.data.shape_keys.keyframe_insert("eval_time", frame=current_frame)
|
|
127
|
+
bpy.context.scene.frame_end = current_frame
|
|
128
|
+
|
|
129
|
+
@staticmethod
|
|
130
|
+
def deform_single_timestep(part: bpy.data.objects,
|
|
131
|
+
deformed_nodes: np.ndarray) -> bpy.data.objects:
|
|
132
|
+
"""A method to deform the part for a single timestep, given the node
|
|
133
|
+
positions the nodes will move to.
|
|
134
|
+
|
|
135
|
+
Parameters
|
|
136
|
+
----------
|
|
137
|
+
part : bpy.data.objects
|
|
138
|
+
The Blender part object to be deformed, normally the sample object.
|
|
139
|
+
deformed_nodes : np.ndarray
|
|
140
|
+
An array of the deformed positions of each node in the surface mesh.
|
|
141
|
+
|
|
142
|
+
Returns
|
|
143
|
+
-------
|
|
144
|
+
bpy.data.objects
|
|
145
|
+
The deformed Blender part object.
|
|
146
|
+
"""
|
|
147
|
+
if part.data.shape_keys is None:
|
|
148
|
+
part.shape_key_add()
|
|
149
|
+
BlenderTools.set_new_frame(part)
|
|
150
|
+
shape_key = part.shape_key_add()
|
|
151
|
+
part.data.shape_keys.use_relative = False
|
|
152
|
+
|
|
153
|
+
n_nodes_layer = int(len(part.data.vertices))
|
|
154
|
+
for i in range(len(part.data.vertices)):
|
|
155
|
+
if i < n_nodes_layer:
|
|
156
|
+
shape_key.data[i].co = deformed_nodes[i]
|
|
157
|
+
return part
|
|
158
|
+
|
|
159
|
+
@staticmethod
|
|
160
|
+
def clear_material_nodes(part: bpy.data.objects) -> None:
|
|
161
|
+
"""A method to clear any existing material nodes from the specified
|
|
162
|
+
Blender object.
|
|
163
|
+
|
|
164
|
+
Parameters
|
|
165
|
+
----------
|
|
166
|
+
part : bpy.data.objects
|
|
167
|
+
The Blender part object to which a material will be applied.
|
|
168
|
+
"""
|
|
169
|
+
part.select_set(True)
|
|
170
|
+
mat = bpy.data.materials.new(name="Material")
|
|
171
|
+
mat.use_nodes = True
|
|
172
|
+
part.active_material = mat
|
|
173
|
+
tree = mat.node_tree
|
|
174
|
+
nodes = tree.nodes
|
|
175
|
+
nodes.clear()
|
|
176
|
+
|
|
177
|
+
@staticmethod
|
|
178
|
+
def uv_unwrap_part(part: bpy.data.objects,
|
|
179
|
+
resolution: float,
|
|
180
|
+
cal: bool = False) -> None:
|
|
181
|
+
"""A method to UV unwrap the Blender object, in order to apply a speckle
|
|
182
|
+
image texture.
|
|
183
|
+
|
|
184
|
+
Parameters
|
|
185
|
+
----------
|
|
186
|
+
part : bpy.data.objects
|
|
187
|
+
The Blender part object to be unwrapped, normally the sample object.
|
|
188
|
+
resolution : float
|
|
189
|
+
The mm/px resolution of the rendered image, used to size the UV unwrapping.
|
|
190
|
+
cal : bool, optional
|
|
191
|
+
A flag that can be set when UV unwrapping a calibration target as the
|
|
192
|
+
sizing differs, by default False
|
|
193
|
+
"""
|
|
194
|
+
part.select_set(True)
|
|
195
|
+
bpy.context.view_layer.objects.active = part
|
|
196
|
+
bpy.ops.object.mode_set(mode="EDIT")
|
|
197
|
+
bpy.ops.mesh.select_all(action="SELECT")
|
|
198
|
+
cube_size = resolution * 1500
|
|
199
|
+
# TODO: Add capability here to uv unwrap non-rectangular objects
|
|
200
|
+
if cal is not True:
|
|
201
|
+
bpy.ops.uv.cube_project(scale_to_bounds = False,
|
|
202
|
+
correct_aspect=True,
|
|
203
|
+
cube_size = cube_size)
|
|
204
|
+
else:
|
|
205
|
+
bpy.ops.uv.cube_project(scale_to_bounds=True)
|
|
206
|
+
bpy.ops.object.mode_set(mode="OBJECT")
|
|
207
|
+
part.select_set(False)
|
|
208
|
+
|
|
209
|
+
@staticmethod
|
|
210
|
+
def add_image_texture(mat_data: BlenderMaterialData,
|
|
211
|
+
image_path: Path | None = None,
|
|
212
|
+
image_array: np.ndarray | None = None) -> None:
|
|
213
|
+
"""A method to add an image texture to a Blender object, this will
|
|
214
|
+
primarily be used for applying a speckle pattern to a sample object.
|
|
215
|
+
|
|
216
|
+
Parameters
|
|
217
|
+
----------
|
|
218
|
+
mat_data : BlenderMaterialData
|
|
219
|
+
A dataclass containing the material parameters, including roughness
|
|
220
|
+
image_path : Path | None, optional
|
|
221
|
+
The filepath for the speckle image file. If provided, that image will
|
|
222
|
+
be used, by default None
|
|
223
|
+
image_array : np.ndarray | None, optional
|
|
224
|
+
An 2D array of a speckle image. If provided, this image will be used,
|
|
225
|
+
by default None
|
|
226
|
+
|
|
227
|
+
Raises
|
|
228
|
+
------
|
|
229
|
+
BlenderError
|
|
230
|
+
"Image texture filepath does not exist". This error is thrown when
|
|
231
|
+
neither a filepath nor an image array have been provided
|
|
232
|
+
"""
|
|
233
|
+
mat_nodes = bpy.data.materials["Material"].node_tree.nodes
|
|
234
|
+
bsdf = mat_nodes.new(type="ShaderNodeBsdfPrincipled")
|
|
235
|
+
bsdf.location = (0, 0)
|
|
236
|
+
bsdf.inputs["Roughness"].default_value = mat_data.roughness
|
|
237
|
+
bsdf.inputs["Metallic"].default_value = mat_data.metallic
|
|
238
|
+
|
|
239
|
+
node_tree = bpy.data.materials["Material"].node_tree
|
|
240
|
+
tex_image = node_tree.nodes.new(type="ShaderNodeTexImage")
|
|
241
|
+
tex_image.location = (0, 0)
|
|
242
|
+
|
|
243
|
+
if image_array is None:
|
|
244
|
+
if image_path.exists:
|
|
245
|
+
tex_image.image = bpy.data.images.load(str(image_path))
|
|
246
|
+
else:
|
|
247
|
+
raise BlenderError("Image texture filepath does not exist")
|
|
248
|
+
|
|
249
|
+
if image_array is not None:
|
|
250
|
+
size = image_array.shape
|
|
251
|
+
image = Image.fromarray(image_array).convert("RGBA")
|
|
252
|
+
new_image_array = np.array(image)
|
|
253
|
+
blender_image = bpy.data.images.new("Speckle",
|
|
254
|
+
width=size[0],
|
|
255
|
+
height=size[1])
|
|
256
|
+
pixels = new_image_array.flatten()
|
|
257
|
+
blender_image.pixels = pixels
|
|
258
|
+
blender_image.update()
|
|
259
|
+
tex_image.image = blender_image
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
tex_image.interpolation = mat_data.interpolant
|
|
263
|
+
|
|
264
|
+
output = node_tree.nodes.new(type="ShaderNodeOutputMaterial")
|
|
265
|
+
output.location = (0, 0)
|
|
266
|
+
|
|
267
|
+
node_tree.links.new(tex_image.outputs["Color"], bsdf.inputs["Base Color"])
|
|
268
|
+
node_tree.links.new(bsdf.outputs["BSDF"], output.inputs["Surface"])
|
|
269
|
+
|
|
270
|
+
obj = bpy.data.objects.get("Part")
|
|
271
|
+
if obj:
|
|
272
|
+
obj.active_material = bpy.data.materials["Material"]
|
|
273
|
+
|
|
274
|
+
@staticmethod
|
|
275
|
+
def save_render_as_array(filepath: Path) -> np.ndarray:
|
|
276
|
+
"""Method to save a rendered image as an array. This method write the
|
|
277
|
+
image to the disk and then extracts it
|
|
278
|
+
|
|
279
|
+
Parameters
|
|
280
|
+
----------
|
|
281
|
+
filepath : Path
|
|
282
|
+
The filepath to which the image is saved
|
|
283
|
+
|
|
284
|
+
Returns
|
|
285
|
+
-------
|
|
286
|
+
np.ndarray
|
|
287
|
+
The rendered image as an array with the following dimensions:
|
|
288
|
+
shape=(pixels_num_y, pixels_num_x)
|
|
289
|
+
"""
|
|
290
|
+
image = Image.open(filepath)
|
|
291
|
+
image_array = np.asarray(image)
|
|
292
|
+
filepath.unlink()
|
|
293
|
+
return image_array
|
|
294
|
+
|
|
295
|
+
@staticmethod
|
|
296
|
+
def number_calibration_images(calibration_data: CalibrationData) -> int:
|
|
297
|
+
"""A function to calculate the number of calibration images that will
|
|
298
|
+
be rendered, given the calibration target's movement limits.
|
|
299
|
+
|
|
300
|
+
Parameters
|
|
301
|
+
----------
|
|
302
|
+
calibration_data : CalibrationData
|
|
303
|
+
A dataclass detailing the movement the calibration target will have
|
|
304
|
+
throughout the calibration
|
|
305
|
+
|
|
306
|
+
Returns
|
|
307
|
+
-------
|
|
308
|
+
int
|
|
309
|
+
The number of calibration images that will be rendered with the
|
|
310
|
+
given settings
|
|
311
|
+
"""
|
|
312
|
+
number_plunge_steps = (((calibration_data.plunge_lims[1] -
|
|
313
|
+
calibration_data.plunge_lims[0]) /
|
|
314
|
+
calibration_data.plunge_step) + 1)
|
|
315
|
+
number_angle_steps = (((calibration_data.angle_lims[1] -
|
|
316
|
+
calibration_data.angle_lims[0]) /
|
|
317
|
+
calibration_data.angle_step) + 1)
|
|
318
|
+
|
|
319
|
+
number_cal_images = int(number_angle_steps * number_angle_steps * number_plunge_steps * 9)
|
|
320
|
+
return number_cal_images
|
|
321
|
+
|
|
322
|
+
|
|
323
|
+
def render_calibration_images(render_data: RenderData,
|
|
324
|
+
calibration_data: CalibrationData,
|
|
325
|
+
part: bpy.data.objects) -> int:
|
|
326
|
+
"""A method to render a set of calibration images, which can be used to
|
|
327
|
+
calculate the intrinsic and extrinsic parameters.
|
|
328
|
+
|
|
329
|
+
Parameters
|
|
330
|
+
----------
|
|
331
|
+
render_data : RenderData
|
|
332
|
+
A dataclass containing the parameters needed to render the images
|
|
333
|
+
calibration_data : CalibrationData
|
|
334
|
+
A dataclass containing the parameters by which to move the calibration
|
|
335
|
+
target. These inclcude the plungle depth and rotation angle.
|
|
336
|
+
part : bpy.data.objects
|
|
337
|
+
The Blender part object, in this instance the calibration target.
|
|
338
|
+
|
|
339
|
+
Returns
|
|
340
|
+
-------
|
|
341
|
+
int
|
|
342
|
+
The number of calibration images that will be rendered. This is
|
|
343
|
+
dependant on the values set within the CalibrationData dataclass.
|
|
344
|
+
"""
|
|
345
|
+
# Render parameters
|
|
346
|
+
bpy.context.scene.render.engine = render_data.engine.value
|
|
347
|
+
bpy.context.scene.render.image_settings.color_mode = "BW"
|
|
348
|
+
bpy.context.scene.render.image_settings.color_depth = str(render_data.bit_size)
|
|
349
|
+
bpy.context.scene.render.threads_mode = "FIXED"
|
|
350
|
+
bpy.context.scene.render.threads = render_data.threads
|
|
351
|
+
bpy.context.scene.render.image_settings.file_format = "TIFF"
|
|
352
|
+
|
|
353
|
+
if render_data.engine == RenderEngine.CYCLES:
|
|
354
|
+
bpy.context.scene.cycles.samples = render_data.samples
|
|
355
|
+
bpy.context.scene.cycles.max_bounces = render_data.max_bounces
|
|
356
|
+
elif render_data.engine == RenderEngine.EEVEE:
|
|
357
|
+
bpy.context.scene.eevee.taa_render_samples = render_data.samples
|
|
358
|
+
|
|
359
|
+
if not render_data.base_dir.is_dir():
|
|
360
|
+
raise BlenderError("The specified save directory does not exist")
|
|
361
|
+
|
|
362
|
+
save_dir = render_data.base_dir / "calimages"
|
|
363
|
+
if not save_dir.is_dir():
|
|
364
|
+
save_dir.mkdir(parents=True, exist_ok=True)
|
|
365
|
+
|
|
366
|
+
render_counter = 0
|
|
367
|
+
plunge_steps = int(((calibration_data.plunge_lims[1] -
|
|
368
|
+
calibration_data.plunge_lims[0]) /
|
|
369
|
+
calibration_data.plunge_step) + 1)
|
|
370
|
+
for ii in range(plunge_steps):
|
|
371
|
+
plunge = calibration_data.plunge_lims[0] + calibration_data.plunge_step * ii
|
|
372
|
+
# Plunge
|
|
373
|
+
(FOV_x, FOV_y) = CameraTools.blender_FOV(render_data.cam_data[0])
|
|
374
|
+
x_limit = int(round((FOV_x / 2) - (part.dimensions[0] / 2)))
|
|
375
|
+
|
|
376
|
+
y_limit = int(round((FOV_y / 2) - (part.dimensions[1] / 2)))
|
|
377
|
+
|
|
378
|
+
for x in np.arange(-1, 2):
|
|
379
|
+
x *= x_limit
|
|
380
|
+
# Move in x-dir
|
|
381
|
+
for y in np.arange(-1, 2):
|
|
382
|
+
y *= y_limit
|
|
383
|
+
# Move in y-dir
|
|
384
|
+
part.location = ((x, y, plunge))
|
|
385
|
+
part.location[2] = plunge
|
|
386
|
+
angle_steps = int(((calibration_data.angle_lims[1] -
|
|
387
|
+
calibration_data.angle_lims[0]) /
|
|
388
|
+
calibration_data.angle_step) + 1)
|
|
389
|
+
for jj in range(angle_steps):
|
|
390
|
+
angle = calibration_data.angle_lims[0] + calibration_data.angle_step * jj
|
|
391
|
+
|
|
392
|
+
# Rotate around x-axis
|
|
393
|
+
rotation = (np.radians(angle), 0, 0)
|
|
394
|
+
part.rotation_mode = 'XYZ'
|
|
395
|
+
part.rotation_euler = rotation
|
|
396
|
+
for kk in range(angle_steps):
|
|
397
|
+
angle = calibration_data.angle_lims[0] + calibration_data.angle_step * kk
|
|
398
|
+
# Rotate around y-axis
|
|
399
|
+
rotation = (0, np.radians(angle), 0)
|
|
400
|
+
part.rotation_mode = 'XYZ'
|
|
401
|
+
part.rotation_euler = rotation
|
|
402
|
+
|
|
403
|
+
if isinstance(render_data.cam_data, tuple):
|
|
404
|
+
cam_count = 0
|
|
405
|
+
for cam in [obj for obj in bpy.data.objects if obj.type == "CAMERA"]:
|
|
406
|
+
bpy.context.scene.camera = cam
|
|
407
|
+
cam_data_render = render_data.cam_data[cam_count]
|
|
408
|
+
bpy.context.scene.render.resolution_x = cam_data_render.pixels_num[0]
|
|
409
|
+
bpy.context.scene.render.resolution_y = cam_data_render.pixels_num[1]
|
|
410
|
+
filename = "blendercal_" + str(render_counter) + "_" + str(cam_count) + ".tiff"
|
|
411
|
+
bpy.context.scene.render.filepath = str(save_dir / filename)
|
|
412
|
+
bpy.ops.render.render(write_still=True)
|
|
413
|
+
cam_count += 1
|
|
414
|
+
render_counter += 1
|
|
415
|
+
print('Total number of calibration images = ' + str(render_counter))
|
|
416
|
+
return render_counter
|
|
417
|
+
|
|
418
|
+
|
|
419
|
+
|
|
420
|
+
|
pyvale/camera.py
CHANGED
|
@@ -1,8 +1,12 @@
|
|
|
1
|
-
#
|
|
1
|
+
# ==============================================================================
|
|
2
2
|
# pyvale: the python validation engine
|
|
3
3
|
# License: MIT
|
|
4
4
|
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
-
#
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
NOTE: This module is a feature under developement.
|
|
9
|
+
"""
|
|
6
10
|
|
|
7
11
|
import numpy as np
|
|
8
12
|
from pyvale.field import IField
|
|
@@ -14,9 +18,6 @@ from pyvale.cameradata2d import CameraData2D
|
|
|
14
18
|
from pyvale.cameratools import CameraTools
|
|
15
19
|
|
|
16
20
|
|
|
17
|
-
# NOTE: This module is a feature under developement.
|
|
18
|
-
|
|
19
|
-
|
|
20
21
|
class CameraBasic2D(ISensorArray):
|
|
21
22
|
__slots__ = ("_cam_data","_field","_error_integrator","_descriptor",
|
|
22
23
|
"_sensor_data","_truth","_measurements")
|
pyvale/cameradata.py
CHANGED
|
@@ -1,17 +1,18 @@
|
|
|
1
|
-
#
|
|
1
|
+
# ==============================================================================
|
|
2
2
|
# pyvale: the python validation engine
|
|
3
3
|
# License: MIT
|
|
4
4
|
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
-
#
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
NOTE: This module is a feature under developement.
|
|
9
|
+
"""
|
|
6
10
|
|
|
7
11
|
from dataclasses import dataclass, field
|
|
8
12
|
import numpy as np
|
|
9
13
|
from scipy.spatial.transform import Rotation
|
|
10
14
|
|
|
11
15
|
|
|
12
|
-
# NOTE: This module is a feature under developement.
|
|
13
|
-
|
|
14
|
-
|
|
15
16
|
@dataclass(slots=True)
|
|
16
17
|
class CameraData:
|
|
17
18
|
pixels_num: np.ndarray
|
|
@@ -21,19 +22,31 @@ class CameraData:
|
|
|
21
22
|
rot_world: Rotation
|
|
22
23
|
roi_cent_world: np.ndarray
|
|
23
24
|
|
|
24
|
-
focal_length: float = 50.0
|
|
25
|
+
focal_length: float | None = 50.0
|
|
25
26
|
sub_samp: int = 2
|
|
26
27
|
|
|
27
28
|
back_face_removal: bool = True
|
|
28
29
|
|
|
30
|
+
k1: float = 0.0
|
|
31
|
+
k2: float = 0.0
|
|
32
|
+
k3: float = 0.0
|
|
33
|
+
p1: float = 0.0
|
|
34
|
+
p2: float = 0.0
|
|
35
|
+
c0: float | None = None
|
|
36
|
+
c1: float | None = None
|
|
37
|
+
|
|
38
|
+
fstop: float | None = None
|
|
39
|
+
|
|
29
40
|
sensor_size: np.ndarray = field(init=False)
|
|
30
41
|
image_dims: np.ndarray = field(init=False)
|
|
31
42
|
image_dist: float = field(init=False)
|
|
32
43
|
cam_to_world_mat: np.ndarray = field(init=False)
|
|
33
44
|
world_to_cam_mat: np.ndarray = field(init=False)
|
|
34
45
|
|
|
46
|
+
|
|
35
47
|
def __post_init__(self) -> None:
|
|
36
|
-
|
|
48
|
+
relative_pos = np.subtract(self.pos_world, self.roi_cent_world)
|
|
49
|
+
self.image_dist = np.linalg.norm(relative_pos)
|
|
37
50
|
self.sensor_size = self.pixels_num*self.pixels_size
|
|
38
51
|
self.image_dims = (self.image_dist
|
|
39
52
|
*self.sensor_size/self.focal_length)
|
|
@@ -44,6 +57,11 @@ class CameraData:
|
|
|
44
57
|
self.cam_to_world_mat[0:3,-1] = self.pos_world
|
|
45
58
|
self.world_to_cam_mat = np.linalg.inv(self.cam_to_world_mat)
|
|
46
59
|
|
|
60
|
+
if self.c0 is None:
|
|
61
|
+
self.c0 = self.pixels_num[0] / 2
|
|
62
|
+
if self.c1 is None:
|
|
63
|
+
self.c1 = self.pixels_num[1] / 2
|
|
64
|
+
|
|
47
65
|
|
|
48
66
|
|
|
49
67
|
|
pyvale/cameradata2d.py
CHANGED
|
@@ -1,15 +1,17 @@
|
|
|
1
|
-
#
|
|
1
|
+
# ==============================================================================
|
|
2
2
|
# pyvale: the python validation engine
|
|
3
3
|
# License: MIT
|
|
4
4
|
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
-
#
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
NOTE: This module is a feature under developement.
|
|
9
|
+
"""
|
|
6
10
|
|
|
7
11
|
from dataclasses import dataclass, field
|
|
8
12
|
import numpy as np
|
|
9
13
|
from scipy.spatial.transform import Rotation
|
|
10
14
|
|
|
11
|
-
# NOTE: This module is a feature under developement.
|
|
12
|
-
|
|
13
15
|
@dataclass(slots=True)
|
|
14
16
|
class CameraData2D:
|
|
15
17
|
pixels_count: np.ndarray | None = None
|