pyvale 2025.5.3__cp311-cp311-musllinux_1_2_i686.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyvale might be problematic. Click here for more details.

Files changed (174) hide show
  1. pyvale/__init__.py +89 -0
  2. pyvale/analyticmeshgen.py +102 -0
  3. pyvale/analyticsimdatafactory.py +91 -0
  4. pyvale/analyticsimdatagenerator.py +323 -0
  5. pyvale/blendercalibrationdata.py +15 -0
  6. pyvale/blenderlightdata.py +26 -0
  7. pyvale/blendermaterialdata.py +15 -0
  8. pyvale/blenderrenderdata.py +30 -0
  9. pyvale/blenderscene.py +488 -0
  10. pyvale/blendertools.py +420 -0
  11. pyvale/camera.py +146 -0
  12. pyvale/cameradata.py +69 -0
  13. pyvale/cameradata2d.py +84 -0
  14. pyvale/camerastereo.py +217 -0
  15. pyvale/cameratools.py +522 -0
  16. pyvale/cython/rastercyth.c +32211 -0
  17. pyvale/cython/rastercyth.cpython-311-i386-linux-musl.so +0 -0
  18. pyvale/cython/rastercyth.py +640 -0
  19. pyvale/data/__init__.py +5 -0
  20. pyvale/data/cal_target.tiff +0 -0
  21. pyvale/data/case00_HEX20_out.e +0 -0
  22. pyvale/data/case00_HEX27_out.e +0 -0
  23. pyvale/data/case00_HEX8_out.e +0 -0
  24. pyvale/data/case00_TET10_out.e +0 -0
  25. pyvale/data/case00_TET14_out.e +0 -0
  26. pyvale/data/case00_TET4_out.e +0 -0
  27. pyvale/data/case13_out.e +0 -0
  28. pyvale/data/case16_out.e +0 -0
  29. pyvale/data/case17_out.e +0 -0
  30. pyvale/data/case18_1_out.e +0 -0
  31. pyvale/data/case18_2_out.e +0 -0
  32. pyvale/data/case18_3_out.e +0 -0
  33. pyvale/data/case25_out.e +0 -0
  34. pyvale/data/case26_out.e +0 -0
  35. pyvale/data/optspeckle_2464x2056px_spec5px_8bit_gblur1px.tiff +0 -0
  36. pyvale/dataset.py +325 -0
  37. pyvale/errorcalculator.py +109 -0
  38. pyvale/errordriftcalc.py +146 -0
  39. pyvale/errorintegrator.py +336 -0
  40. pyvale/errorrand.py +607 -0
  41. pyvale/errorsyscalib.py +134 -0
  42. pyvale/errorsysdep.py +327 -0
  43. pyvale/errorsysfield.py +414 -0
  44. pyvale/errorsysindep.py +808 -0
  45. pyvale/examples/__init__.py +5 -0
  46. pyvale/examples/basics/ex1_1_basicscalars_therm2d.py +131 -0
  47. pyvale/examples/basics/ex1_2_sensormodel_therm2d.py +158 -0
  48. pyvale/examples/basics/ex1_3_customsens_therm3d.py +216 -0
  49. pyvale/examples/basics/ex1_4_basicerrors_therm3d.py +153 -0
  50. pyvale/examples/basics/ex1_5_fielderrs_therm3d.py +168 -0
  51. pyvale/examples/basics/ex1_6_caliberrs_therm2d.py +133 -0
  52. pyvale/examples/basics/ex1_7_spatavg_therm2d.py +123 -0
  53. pyvale/examples/basics/ex2_1_basicvectors_disp2d.py +112 -0
  54. pyvale/examples/basics/ex2_2_vectorsens_disp2d.py +111 -0
  55. pyvale/examples/basics/ex2_3_sensangle_disp2d.py +139 -0
  56. pyvale/examples/basics/ex2_4_chainfielderrs_disp2d.py +196 -0
  57. pyvale/examples/basics/ex2_5_vectorfields3d_disp3d.py +109 -0
  58. pyvale/examples/basics/ex3_1_basictensors_strain2d.py +114 -0
  59. pyvale/examples/basics/ex3_2_tensorsens2d_strain2d.py +111 -0
  60. pyvale/examples/basics/ex3_3_tensorsens3d_strain3d.py +182 -0
  61. pyvale/examples/basics/ex4_1_expsim2d_thermmech2d.py +171 -0
  62. pyvale/examples/basics/ex4_2_expsim3d_thermmech3d.py +252 -0
  63. pyvale/examples/genanalyticdata/ex1_1_scalarvisualisation.py +35 -0
  64. pyvale/examples/genanalyticdata/ex1_2_scalarcasebuild.py +43 -0
  65. pyvale/examples/genanalyticdata/ex2_1_analyticsensors.py +80 -0
  66. pyvale/examples/imagedef2d/ex_imagedef2d_todisk.py +79 -0
  67. pyvale/examples/renderblender/ex1_1_blenderscene.py +121 -0
  68. pyvale/examples/renderblender/ex1_2_blenderdeformed.py +119 -0
  69. pyvale/examples/renderblender/ex2_1_stereoscene.py +128 -0
  70. pyvale/examples/renderblender/ex2_2_stereodeformed.py +131 -0
  71. pyvale/examples/renderblender/ex3_1_blendercalibration.py +120 -0
  72. pyvale/examples/renderrasterisation/ex_rastenp.py +153 -0
  73. pyvale/examples/renderrasterisation/ex_rastercyth_oneframe.py +218 -0
  74. pyvale/examples/renderrasterisation/ex_rastercyth_static_cypara.py +187 -0
  75. pyvale/examples/renderrasterisation/ex_rastercyth_static_pypara.py +190 -0
  76. pyvale/examples/visualisation/ex1_1_plot_traces.py +102 -0
  77. pyvale/examples/visualisation/ex2_1_animate_sim.py +89 -0
  78. pyvale/experimentsimulator.py +175 -0
  79. pyvale/field.py +128 -0
  80. pyvale/fieldconverter.py +351 -0
  81. pyvale/fieldsampler.py +111 -0
  82. pyvale/fieldscalar.py +166 -0
  83. pyvale/fieldtensor.py +218 -0
  84. pyvale/fieldtransform.py +388 -0
  85. pyvale/fieldvector.py +213 -0
  86. pyvale/generatorsrandom.py +505 -0
  87. pyvale/imagedef2d.py +569 -0
  88. pyvale/integratorfactory.py +240 -0
  89. pyvale/integratorquadrature.py +217 -0
  90. pyvale/integratorrectangle.py +165 -0
  91. pyvale/integratorspatial.py +89 -0
  92. pyvale/integratortype.py +43 -0
  93. pyvale/output.py +17 -0
  94. pyvale/pyvaleexceptions.py +11 -0
  95. pyvale/raster.py +31 -0
  96. pyvale/rastercy.py +77 -0
  97. pyvale/rasternp.py +603 -0
  98. pyvale/rendermesh.py +147 -0
  99. pyvale/sensorarray.py +178 -0
  100. pyvale/sensorarrayfactory.py +196 -0
  101. pyvale/sensorarraypoint.py +278 -0
  102. pyvale/sensordata.py +71 -0
  103. pyvale/sensordescriptor.py +213 -0
  104. pyvale/sensortools.py +142 -0
  105. pyvale/simcases/case00_HEX20.i +242 -0
  106. pyvale/simcases/case00_HEX27.i +242 -0
  107. pyvale/simcases/case00_HEX8.i +242 -0
  108. pyvale/simcases/case00_TET10.i +242 -0
  109. pyvale/simcases/case00_TET14.i +242 -0
  110. pyvale/simcases/case00_TET4.i +242 -0
  111. pyvale/simcases/case01.i +101 -0
  112. pyvale/simcases/case02.i +156 -0
  113. pyvale/simcases/case03.i +136 -0
  114. pyvale/simcases/case04.i +181 -0
  115. pyvale/simcases/case05.i +234 -0
  116. pyvale/simcases/case06.i +305 -0
  117. pyvale/simcases/case07.geo +135 -0
  118. pyvale/simcases/case07.i +87 -0
  119. pyvale/simcases/case08.geo +144 -0
  120. pyvale/simcases/case08.i +153 -0
  121. pyvale/simcases/case09.geo +204 -0
  122. pyvale/simcases/case09.i +87 -0
  123. pyvale/simcases/case10.geo +204 -0
  124. pyvale/simcases/case10.i +257 -0
  125. pyvale/simcases/case11.geo +337 -0
  126. pyvale/simcases/case11.i +147 -0
  127. pyvale/simcases/case12.geo +388 -0
  128. pyvale/simcases/case12.i +329 -0
  129. pyvale/simcases/case13.i +140 -0
  130. pyvale/simcases/case14.i +159 -0
  131. pyvale/simcases/case15.geo +337 -0
  132. pyvale/simcases/case15.i +150 -0
  133. pyvale/simcases/case16.geo +391 -0
  134. pyvale/simcases/case16.i +357 -0
  135. pyvale/simcases/case17.geo +135 -0
  136. pyvale/simcases/case17.i +144 -0
  137. pyvale/simcases/case18.i +254 -0
  138. pyvale/simcases/case18_1.i +254 -0
  139. pyvale/simcases/case18_2.i +254 -0
  140. pyvale/simcases/case18_3.i +254 -0
  141. pyvale/simcases/case19.geo +252 -0
  142. pyvale/simcases/case19.i +99 -0
  143. pyvale/simcases/case20.geo +252 -0
  144. pyvale/simcases/case20.i +250 -0
  145. pyvale/simcases/case21.geo +74 -0
  146. pyvale/simcases/case21.i +155 -0
  147. pyvale/simcases/case22.geo +82 -0
  148. pyvale/simcases/case22.i +140 -0
  149. pyvale/simcases/case23.geo +164 -0
  150. pyvale/simcases/case23.i +140 -0
  151. pyvale/simcases/case24.geo +79 -0
  152. pyvale/simcases/case24.i +123 -0
  153. pyvale/simcases/case25.geo +82 -0
  154. pyvale/simcases/case25.i +140 -0
  155. pyvale/simcases/case26.geo +166 -0
  156. pyvale/simcases/case26.i +140 -0
  157. pyvale/simcases/run_1case.py +61 -0
  158. pyvale/simcases/run_all_cases.py +69 -0
  159. pyvale/simcases/run_build_case.py +64 -0
  160. pyvale/simcases/run_example_cases.py +69 -0
  161. pyvale/simtools.py +67 -0
  162. pyvale/visualexpplotter.py +191 -0
  163. pyvale/visualimagedef.py +74 -0
  164. pyvale/visualimages.py +76 -0
  165. pyvale/visualopts.py +493 -0
  166. pyvale/visualsimanimator.py +111 -0
  167. pyvale/visualsimsensors.py +318 -0
  168. pyvale/visualtools.py +136 -0
  169. pyvale/visualtraceplotter.py +142 -0
  170. pyvale-2025.5.3.dist-info/METADATA +144 -0
  171. pyvale-2025.5.3.dist-info/RECORD +174 -0
  172. pyvale-2025.5.3.dist-info/WHEEL +5 -0
  173. pyvale-2025.5.3.dist-info/licenses/LICENSE +21 -0
  174. pyvale-2025.5.3.dist-info/top_level.txt +1 -0
pyvale/blendertools.py ADDED
@@ -0,0 +1,420 @@
1
+ # ==============================================================================
2
+ # pyvale: the python validation engine
3
+ # License: MIT
4
+ # Copyright (C) 2025 The Computer Aided Validation Team
5
+ # ==============================================================================
6
+ import numpy as np
7
+ from pathlib import Path
8
+ from scipy.spatial.transform import Rotation
9
+ from PIL import Image
10
+ import bpy
11
+ from pyvale.cameratools import CameraTools
12
+ from pyvale.blendermaterialdata import BlenderMaterialData
13
+ from pyvale.blenderrenderdata import RenderData, RenderEngine
14
+ from pyvale.blendercalibrationdata import CalibrationData
15
+ from pyvale.output import Outputs
16
+ from pyvale.pyvaleexceptions import BlenderError
17
+
18
+ class BlenderTools:
19
+ """Namespace for tools used within the pyvale Blender module.
20
+ """
21
+
22
+ @staticmethod
23
+ def save_blender_file(base_dir: Path | None = Outputs.base_dir,
24
+ override: bool = False) -> None:
25
+ """A method to save the current Blender scene to a Blender .blend filepath
26
+
27
+ Parameters
28
+ ----------
29
+ base_dir : Path
30
+ The base directory to which the Blender file will be saved to. The
31
+ file will be saved in a subfolder of this directory named blenderfiles.
32
+ override : bool, optional
33
+ A flag which can be set to True or False. If set to True, if the
34
+ specified filepath already exists, this file will be automatically
35
+ overwritten. If set to False and the specified filepath already exists
36
+ an error will be thrown. If the specified filepath does not exist,
37
+ the file will be saved normally, by default False
38
+
39
+ Raises
40
+ ------
41
+ BlenderError
42
+ "The specified save directory does not exist".
43
+ BlenderError
44
+ "A file already exists with this filepath". This error is thrown
45
+ when override is set to False, and the specified filepath already
46
+ exists.
47
+
48
+ """
49
+ if not base_dir.is_dir():
50
+ raise BlenderError("The specified save directory does not exist")
51
+
52
+ save_dir = base_dir / "blenderfiles"
53
+ if not save_dir.is_dir():
54
+ print("Yes")
55
+ save_dir.mkdir(parents=True, exist_ok=True)
56
+
57
+ filename = save_dir / "projectfile.blend"
58
+
59
+ if filename.exists():
60
+ if override:
61
+ filename.unlink()
62
+ else:
63
+ raise BlenderError("A file already exists with this filepath")
64
+
65
+
66
+ bpy.ops.wm.save_as_mainfile(filepath=str(filename))
67
+
68
+
69
+ print()
70
+ print(80*"-")
71
+ print("Save directory of the project file:", filename)
72
+ print(80*"-")
73
+ print()
74
+
75
+ @staticmethod
76
+ def move_blender_obj(pos_world: np.ndarray, part: bpy.data.objects) -> None:
77
+ """A method to move an object within Blender.
78
+
79
+ Parameters
80
+ ----------
81
+ pos_world : np.ndarray
82
+ A array describing the vector position to which the part should be
83
+ moved to.
84
+ part : bpy.data.objects
85
+ The Blender part object to be moved.
86
+ """
87
+ z_location = int(part.dimensions[2])
88
+ part.location = (pos_world[0], pos_world[1], (pos_world[2] - z_location))
89
+
90
+ @staticmethod
91
+ def rotate_blender_obj(rot_world: Rotation, part: bpy.data.objects) -> None:
92
+ """A method to rotate an object within Blender.
93
+
94
+ Parameters
95
+ ----------
96
+ rot_world : Rotation
97
+ The rotation that is to be applied to the part object.
98
+ part : bpy.data.objects
99
+ The Blender part object to be rotated.
100
+ """
101
+ part.rotation_mode = "XYZ"
102
+ part_rotation = rot_world.as_euler("xyz", degrees=False)
103
+ part.rotation_euler = part_rotation
104
+
105
+ @staticmethod
106
+ def set_new_frame(part: bpy.data.objects) -> None:
107
+ """A method to set a new frame within Blender (needed to differenciate
108
+ the timesteps).
109
+
110
+ Parameters
111
+ ----------
112
+ part : bpy.data.objects
113
+ The Blender part object, normally the sample object. This is passed
114
+ in to ensure it is the active object within the scene.
115
+ """
116
+ frame_incr = 20
117
+ ob = bpy.context.view_layer.objects.active
118
+ if ob is None:
119
+ bpy.context.objects.active = part
120
+
121
+ current_frame = bpy.context.scene.frame_current
122
+ current_frame += frame_incr
123
+ bpy.context.scene.frame_set(current_frame)
124
+
125
+ bpy.data.shape_keys["Key"].eval_time = current_frame
126
+ part.data.shape_keys.keyframe_insert("eval_time", frame=current_frame)
127
+ bpy.context.scene.frame_end = current_frame
128
+
129
+ @staticmethod
130
+ def deform_single_timestep(part: bpy.data.objects,
131
+ deformed_nodes: np.ndarray) -> bpy.data.objects:
132
+ """A method to deform the part for a single timestep, given the node
133
+ positions the nodes will move to.
134
+
135
+ Parameters
136
+ ----------
137
+ part : bpy.data.objects
138
+ The Blender part object to be deformed, normally the sample object.
139
+ deformed_nodes : np.ndarray
140
+ An array of the deformed positions of each node in the surface mesh.
141
+
142
+ Returns
143
+ -------
144
+ bpy.data.objects
145
+ The deformed Blender part object.
146
+ """
147
+ if part.data.shape_keys is None:
148
+ part.shape_key_add()
149
+ BlenderTools.set_new_frame(part)
150
+ shape_key = part.shape_key_add()
151
+ part.data.shape_keys.use_relative = False
152
+
153
+ n_nodes_layer = int(len(part.data.vertices))
154
+ for i in range(len(part.data.vertices)):
155
+ if i < n_nodes_layer:
156
+ shape_key.data[i].co = deformed_nodes[i]
157
+ return part
158
+
159
+ @staticmethod
160
+ def clear_material_nodes(part: bpy.data.objects) -> None:
161
+ """A method to clear any existing material nodes from the specified
162
+ Blender object.
163
+
164
+ Parameters
165
+ ----------
166
+ part : bpy.data.objects
167
+ The Blender part object to which a material will be applied.
168
+ """
169
+ part.select_set(True)
170
+ mat = bpy.data.materials.new(name="Material")
171
+ mat.use_nodes = True
172
+ part.active_material = mat
173
+ tree = mat.node_tree
174
+ nodes = tree.nodes
175
+ nodes.clear()
176
+
177
+ @staticmethod
178
+ def uv_unwrap_part(part: bpy.data.objects,
179
+ resolution: float,
180
+ cal: bool = False) -> None:
181
+ """A method to UV unwrap the Blender object, in order to apply a speckle
182
+ image texture.
183
+
184
+ Parameters
185
+ ----------
186
+ part : bpy.data.objects
187
+ The Blender part object to be unwrapped, normally the sample object.
188
+ resolution : float
189
+ The mm/px resolution of the rendered image, used to size the UV unwrapping.
190
+ cal : bool, optional
191
+ A flag that can be set when UV unwrapping a calibration target as the
192
+ sizing differs, by default False
193
+ """
194
+ part.select_set(True)
195
+ bpy.context.view_layer.objects.active = part
196
+ bpy.ops.object.mode_set(mode="EDIT")
197
+ bpy.ops.mesh.select_all(action="SELECT")
198
+ cube_size = resolution * 1500
199
+ # TODO: Add capability here to uv unwrap non-rectangular objects
200
+ if cal is not True:
201
+ bpy.ops.uv.cube_project(scale_to_bounds = False,
202
+ correct_aspect=True,
203
+ cube_size = cube_size)
204
+ else:
205
+ bpy.ops.uv.cube_project(scale_to_bounds=True)
206
+ bpy.ops.object.mode_set(mode="OBJECT")
207
+ part.select_set(False)
208
+
209
+ @staticmethod
210
+ def add_image_texture(mat_data: BlenderMaterialData,
211
+ image_path: Path | None = None,
212
+ image_array: np.ndarray | None = None) -> None:
213
+ """A method to add an image texture to a Blender object, this will
214
+ primarily be used for applying a speckle pattern to a sample object.
215
+
216
+ Parameters
217
+ ----------
218
+ mat_data : BlenderMaterialData
219
+ A dataclass containing the material parameters, including roughness
220
+ image_path : Path | None, optional
221
+ The filepath for the speckle image file. If provided, that image will
222
+ be used, by default None
223
+ image_array : np.ndarray | None, optional
224
+ An 2D array of a speckle image. If provided, this image will be used,
225
+ by default None
226
+
227
+ Raises
228
+ ------
229
+ BlenderError
230
+ "Image texture filepath does not exist". This error is thrown when
231
+ neither a filepath nor an image array have been provided
232
+ """
233
+ mat_nodes = bpy.data.materials["Material"].node_tree.nodes
234
+ bsdf = mat_nodes.new(type="ShaderNodeBsdfPrincipled")
235
+ bsdf.location = (0, 0)
236
+ bsdf.inputs["Roughness"].default_value = mat_data.roughness
237
+ bsdf.inputs["Metallic"].default_value = mat_data.metallic
238
+
239
+ node_tree = bpy.data.materials["Material"].node_tree
240
+ tex_image = node_tree.nodes.new(type="ShaderNodeTexImage")
241
+ tex_image.location = (0, 0)
242
+
243
+ if image_array is None:
244
+ if image_path.exists:
245
+ tex_image.image = bpy.data.images.load(str(image_path))
246
+ else:
247
+ raise BlenderError("Image texture filepath does not exist")
248
+
249
+ if image_array is not None:
250
+ size = image_array.shape
251
+ image = Image.fromarray(image_array).convert("RGBA")
252
+ new_image_array = np.array(image)
253
+ blender_image = bpy.data.images.new("Speckle",
254
+ width=size[0],
255
+ height=size[1])
256
+ pixels = new_image_array.flatten()
257
+ blender_image.pixels = pixels
258
+ blender_image.update()
259
+ tex_image.image = blender_image
260
+
261
+
262
+ tex_image.interpolation = mat_data.interpolant
263
+
264
+ output = node_tree.nodes.new(type="ShaderNodeOutputMaterial")
265
+ output.location = (0, 0)
266
+
267
+ node_tree.links.new(tex_image.outputs["Color"], bsdf.inputs["Base Color"])
268
+ node_tree.links.new(bsdf.outputs["BSDF"], output.inputs["Surface"])
269
+
270
+ obj = bpy.data.objects.get("Part")
271
+ if obj:
272
+ obj.active_material = bpy.data.materials["Material"]
273
+
274
+ @staticmethod
275
+ def save_render_as_array(filepath: Path) -> np.ndarray:
276
+ """Method to save a rendered image as an array. This method write the
277
+ image to the disk and then extracts it
278
+
279
+ Parameters
280
+ ----------
281
+ filepath : Path
282
+ The filepath to which the image is saved
283
+
284
+ Returns
285
+ -------
286
+ np.ndarray
287
+ The rendered image as an array with the following dimensions:
288
+ shape=(pixels_num_y, pixels_num_x)
289
+ """
290
+ image = Image.open(filepath)
291
+ image_array = np.asarray(image)
292
+ filepath.unlink()
293
+ return image_array
294
+
295
+ @staticmethod
296
+ def number_calibration_images(calibration_data: CalibrationData) -> int:
297
+ """A function to calculate the number of calibration images that will
298
+ be rendered, given the calibration target's movement limits.
299
+
300
+ Parameters
301
+ ----------
302
+ calibration_data : CalibrationData
303
+ A dataclass detailing the movement the calibration target will have
304
+ throughout the calibration
305
+
306
+ Returns
307
+ -------
308
+ int
309
+ The number of calibration images that will be rendered with the
310
+ given settings
311
+ """
312
+ number_plunge_steps = (((calibration_data.plunge_lims[1] -
313
+ calibration_data.plunge_lims[0]) /
314
+ calibration_data.plunge_step) + 1)
315
+ number_angle_steps = (((calibration_data.angle_lims[1] -
316
+ calibration_data.angle_lims[0]) /
317
+ calibration_data.angle_step) + 1)
318
+
319
+ number_cal_images = int(number_angle_steps * number_angle_steps * number_plunge_steps * 9)
320
+ return number_cal_images
321
+
322
+
323
+ def render_calibration_images(render_data: RenderData,
324
+ calibration_data: CalibrationData,
325
+ part: bpy.data.objects) -> int:
326
+ """A method to render a set of calibration images, which can be used to
327
+ calculate the intrinsic and extrinsic parameters.
328
+
329
+ Parameters
330
+ ----------
331
+ render_data : RenderData
332
+ A dataclass containing the parameters needed to render the images
333
+ calibration_data : CalibrationData
334
+ A dataclass containing the parameters by which to move the calibration
335
+ target. These inclcude the plungle depth and rotation angle.
336
+ part : bpy.data.objects
337
+ The Blender part object, in this instance the calibration target.
338
+
339
+ Returns
340
+ -------
341
+ int
342
+ The number of calibration images that will be rendered. This is
343
+ dependant on the values set within the CalibrationData dataclass.
344
+ """
345
+ # Render parameters
346
+ bpy.context.scene.render.engine = render_data.engine.value
347
+ bpy.context.scene.render.image_settings.color_mode = "BW"
348
+ bpy.context.scene.render.image_settings.color_depth = str(render_data.bit_size)
349
+ bpy.context.scene.render.threads_mode = "FIXED"
350
+ bpy.context.scene.render.threads = render_data.threads
351
+ bpy.context.scene.render.image_settings.file_format = "TIFF"
352
+
353
+ if render_data.engine == RenderEngine.CYCLES:
354
+ bpy.context.scene.cycles.samples = render_data.samples
355
+ bpy.context.scene.cycles.max_bounces = render_data.max_bounces
356
+ elif render_data.engine == RenderEngine.EEVEE:
357
+ bpy.context.scene.eevee.taa_render_samples = render_data.samples
358
+
359
+ if not render_data.base_dir.is_dir():
360
+ raise BlenderError("The specified save directory does not exist")
361
+
362
+ save_dir = render_data.base_dir / "calimages"
363
+ if not save_dir.is_dir():
364
+ save_dir.mkdir(parents=True, exist_ok=True)
365
+
366
+ render_counter = 0
367
+ plunge_steps = int(((calibration_data.plunge_lims[1] -
368
+ calibration_data.plunge_lims[0]) /
369
+ calibration_data.plunge_step) + 1)
370
+ for ii in range(plunge_steps):
371
+ plunge = calibration_data.plunge_lims[0] + calibration_data.plunge_step * ii
372
+ # Plunge
373
+ (FOV_x, FOV_y) = CameraTools.blender_FOV(render_data.cam_data[0])
374
+ x_limit = int(round((FOV_x / 2) - (part.dimensions[0] / 2)))
375
+
376
+ y_limit = int(round((FOV_y / 2) - (part.dimensions[1] / 2)))
377
+
378
+ for x in np.arange(-1, 2):
379
+ x *= x_limit
380
+ # Move in x-dir
381
+ for y in np.arange(-1, 2):
382
+ y *= y_limit
383
+ # Move in y-dir
384
+ part.location = ((x, y, plunge))
385
+ part.location[2] = plunge
386
+ angle_steps = int(((calibration_data.angle_lims[1] -
387
+ calibration_data.angle_lims[0]) /
388
+ calibration_data.angle_step) + 1)
389
+ for jj in range(angle_steps):
390
+ angle = calibration_data.angle_lims[0] + calibration_data.angle_step * jj
391
+
392
+ # Rotate around x-axis
393
+ rotation = (np.radians(angle), 0, 0)
394
+ part.rotation_mode = 'XYZ'
395
+ part.rotation_euler = rotation
396
+ for kk in range(angle_steps):
397
+ angle = calibration_data.angle_lims[0] + calibration_data.angle_step * kk
398
+ # Rotate around y-axis
399
+ rotation = (0, np.radians(angle), 0)
400
+ part.rotation_mode = 'XYZ'
401
+ part.rotation_euler = rotation
402
+
403
+ if isinstance(render_data.cam_data, tuple):
404
+ cam_count = 0
405
+ for cam in [obj for obj in bpy.data.objects if obj.type == "CAMERA"]:
406
+ bpy.context.scene.camera = cam
407
+ cam_data_render = render_data.cam_data[cam_count]
408
+ bpy.context.scene.render.resolution_x = cam_data_render.pixels_num[0]
409
+ bpy.context.scene.render.resolution_y = cam_data_render.pixels_num[1]
410
+ filename = "blendercal_" + str(render_counter) + "_" + str(cam_count) + ".tiff"
411
+ bpy.context.scene.render.filepath = str(save_dir / filename)
412
+ bpy.ops.render.render(write_still=True)
413
+ cam_count += 1
414
+ render_counter += 1
415
+ print('Total number of calibration images = ' + str(render_counter))
416
+ return render_counter
417
+
418
+
419
+
420
+
pyvale/camera.py ADDED
@@ -0,0 +1,146 @@
1
+ # ==============================================================================
2
+ # pyvale: the python validation engine
3
+ # License: MIT
4
+ # Copyright (C) 2025 The Computer Aided Validation Team
5
+ # ==============================================================================
6
+
7
+ """
8
+ NOTE: This module is a feature under developement.
9
+ """
10
+
11
+ import numpy as np
12
+ from pyvale.field import IField
13
+ from pyvale.sensorarray import ISensorArray
14
+ from pyvale.errorintegrator import ErrIntegrator
15
+ from pyvale.sensordescriptor import SensorDescriptor
16
+ from pyvale.fieldsampler import sample_field_with_sensor_data
17
+ from pyvale.cameradata2d import CameraData2D
18
+ from pyvale.cameratools import CameraTools
19
+
20
+
21
+ class CameraBasic2D(ISensorArray):
22
+ __slots__ = ("_cam_data","_field","_error_integrator","_descriptor",
23
+ "_sensor_data","_truth","_measurements")
24
+
25
+ def __init__(self,
26
+ cam_data: CameraData2D,
27
+ field: IField,
28
+ descriptor: SensorDescriptor | None = None,
29
+ ) -> None:
30
+
31
+ self._cam_data = cam_data
32
+ self._field = field
33
+ self._error_integrator = None
34
+
35
+ self._descriptor = SensorDescriptor()
36
+ if descriptor is not None:
37
+ self._descriptor = descriptor
38
+
39
+ self._sensor_data = CameraTools.build_sensor_data_from_camera_2d(self._cam_data)
40
+
41
+ self._truth = None
42
+ self._measurements = None
43
+
44
+ #---------------------------------------------------------------------------
45
+ # Accessors
46
+ def get_sample_times(self) -> np.ndarray:
47
+ if self._sensor_data.sample_times is None:
48
+ #shape=(n_time_steps,)
49
+ return self._field.get_time_steps()
50
+
51
+ #shape=(n_time_steps,)
52
+ return self._sensor_data.sample_times
53
+
54
+ def get_measurement_shape(self) -> tuple[int,int,int]:
55
+ return (self._sensor_data.positions.shape[0],
56
+ len(self._field.get_all_components()),
57
+ self.get_sample_times().shape[0])
58
+
59
+ def get_image_measurements_shape(self) -> tuple[int,int,int,int]:
60
+ return (self._cam_data.num_pixels[1],
61
+ self._cam_data.num_pixels[0],
62
+ len(self._field.get_all_components()),
63
+ self.get_sample_times().shape[0])
64
+
65
+ def get_field(self) -> IField:
66
+ return self._field
67
+
68
+ def get_descriptor(self) -> SensorDescriptor:
69
+ return self._descriptor
70
+
71
+ #---------------------------------------------------------------------------
72
+ # Truth calculation from simulation
73
+ def calc_truth_values(self) -> np.ndarray:
74
+ self._truth = sample_field_with_sensor_data(self._field,
75
+ self._sensor_data)
76
+ #shape=(n_pixels,n_field_comps,n_time_steps)
77
+ return self._truth
78
+
79
+ def get_truth(self) -> np.ndarray:
80
+ if self._truth is None:
81
+ self._truth = self.calc_truth_values()
82
+ #shape=(n_pixels,n_field_comps,n_time_steps)
83
+ return self._truth
84
+
85
+ #---------------------------------------------------------------------------
86
+ # Errors
87
+ def set_error_integrator(self, err_int: ErrIntegrator) -> None:
88
+ self._error_integrator = err_int
89
+
90
+ def get_errors_systematic(self) -> np.ndarray | None:
91
+ if self._error_integrator is None:
92
+ return None
93
+
94
+ #shape=(n_pixels,n_field_comps,n_time_steps)
95
+ return self._error_integrator.get_errs_systematic()
96
+
97
+ def get_errors_random(self) -> np.ndarray | None:
98
+ if self._error_integrator is None:
99
+ return None
100
+
101
+ #shape=(n_pixels,n_field_comps,n_time_steps)
102
+ return self._error_integrator.get_errs_random()
103
+
104
+ def get_errors_total(self) -> np.ndarray | None:
105
+ if self._error_integrator is None:
106
+ return None
107
+
108
+ #shape=(n_pixels,n_field_comps,n_time_steps)
109
+ return self._error_integrator.get_errs_total()
110
+
111
+ #---------------------------------------------------------------------------
112
+ # Measurements
113
+ def calc_measurements(self) -> np.ndarray:
114
+ if self._error_integrator is None:
115
+ self._measurements = self.get_truth()
116
+ else:
117
+ self._measurements = self.get_truth() + \
118
+ self._error_integrator.calc_errors_from_chain(self.get_truth())
119
+
120
+ #shape=(n_pixels,n_field_comps,n_time_steps)
121
+ return self._measurements
122
+
123
+ def get_measurements(self) -> np.ndarray:
124
+ if self._measurements is None:
125
+ self._measurements = self.calc_measurements()
126
+
127
+ #shape=(n_pixels,n_field_comps,n_time_steps)
128
+ return self._measurements
129
+
130
+ #---------------------------------------------------------------------------
131
+ # Images
132
+ def calc_measurement_images(self) -> np.ndarray:
133
+ #shape=(n_pixels,n_field_comps,n_time_steps)
134
+ self._measurements = self.calc_measurements()
135
+ image_shape = self.get_image_measurements_shape()
136
+ #shape=(n_pixels_y,n_pixels_x,n_field_comps,n_time_steps)
137
+ return np.reshape(self._measurements,image_shape)
138
+
139
+ def get_measurement_images(self) -> np.ndarray:
140
+ self._measurements = self.get_measurements()
141
+ image_shape = self.get_image_measurements_shape()
142
+ #shape=(n_pixels_y,n_pixels_x,n_field_comps,n_time_steps)
143
+ return np.reshape(self._measurements,image_shape)
144
+
145
+
146
+
pyvale/cameradata.py ADDED
@@ -0,0 +1,69 @@
1
+ # ==============================================================================
2
+ # pyvale: the python validation engine
3
+ # License: MIT
4
+ # Copyright (C) 2025 The Computer Aided Validation Team
5
+ # ==============================================================================
6
+
7
+ """
8
+ NOTE: This module is a feature under developement.
9
+ """
10
+
11
+ from dataclasses import dataclass, field
12
+ import numpy as np
13
+ from scipy.spatial.transform import Rotation
14
+
15
+
16
+ @dataclass(slots=True)
17
+ class CameraData:
18
+ pixels_num: np.ndarray
19
+ pixels_size: np.ndarray
20
+
21
+ pos_world: np.ndarray
22
+ rot_world: Rotation
23
+ roi_cent_world: np.ndarray
24
+
25
+ focal_length: float | None = 50.0
26
+ sub_samp: int = 2
27
+
28
+ back_face_removal: bool = True
29
+
30
+ k1: float = 0.0
31
+ k2: float = 0.0
32
+ k3: float = 0.0
33
+ p1: float = 0.0
34
+ p2: float = 0.0
35
+ c0: float | None = None
36
+ c1: float | None = None
37
+
38
+ fstop: float | None = None
39
+
40
+ sensor_size: np.ndarray = field(init=False)
41
+ image_dims: np.ndarray = field(init=False)
42
+ image_dist: float = field(init=False)
43
+ cam_to_world_mat: np.ndarray = field(init=False)
44
+ world_to_cam_mat: np.ndarray = field(init=False)
45
+
46
+
47
+ def __post_init__(self) -> None:
48
+ relative_pos = np.subtract(self.pos_world, self.roi_cent_world)
49
+ self.image_dist = np.linalg.norm(relative_pos)
50
+ self.sensor_size = self.pixels_num*self.pixels_size
51
+ self.image_dims = (self.image_dist
52
+ *self.sensor_size/self.focal_length)
53
+
54
+ self.cam_to_world_mat = np.zeros((4,4))
55
+ self.cam_to_world_mat[0:3,0:3] = self.rot_world.as_matrix()
56
+ self.cam_to_world_mat[-1,-1] = 1.0
57
+ self.cam_to_world_mat[0:3,-1] = self.pos_world
58
+ self.world_to_cam_mat = np.linalg.inv(self.cam_to_world_mat)
59
+
60
+ if self.c0 is None:
61
+ self.c0 = self.pixels_num[0] / 2
62
+ if self.c1 is None:
63
+ self.c1 = self.pixels_num[1] / 2
64
+
65
+
66
+
67
+
68
+
69
+