pyvale 2025.7.2__cp311-cp311-win32.whl → 2025.8.1__cp311-cp311-win32.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyvale might be problematic. Click here for more details.
- pyvale/__init__.py +12 -92
- pyvale/blender/__init__.py +23 -0
- pyvale/{pyvaleexceptions.py → blender/blenderexceptions.py} +0 -3
- pyvale/{blenderlightdata.py → blender/blenderlightdata.py} +3 -3
- pyvale/{blendermaterialdata.py → blender/blendermaterialdata.py} +1 -1
- pyvale/{blenderrenderdata.py → blender/blenderrenderdata.py} +5 -3
- pyvale/{blenderscene.py → blender/blenderscene.py} +33 -30
- pyvale/{blendertools.py → blender/blendertools.py} +14 -10
- pyvale/dataset/__init__.py +7 -0
- pyvale/dataset/dataset.py +443 -0
- pyvale/dic/__init__.py +20 -0
- pyvale/{dic2d.py → dic/dic2d.py} +31 -36
- pyvale/dic/dic2dconv.py +6 -0
- pyvale/{dic2dcpp.cp311-win32.pyd → dic/dic2dcpp.cp311-win32.pyd} +0 -0
- pyvale/{dicdataimport.py → dic/dicdataimport.py} +8 -8
- pyvale/{dicregionofinterest.py → dic/dicregionofinterest.py} +1 -1
- pyvale/{dicresults.py → dic/dicresults.py} +1 -1
- pyvale/{dicstrain.py → dic/dicstrain.py} +9 -9
- pyvale/examples/basics/{ex1_1_basicscalars_therm2d.py → ex1a_basicscalars_therm2d.py} +12 -9
- pyvale/examples/basics/{ex1_2_sensormodel_therm2d.py → ex1b_sensormodel_therm2d.py} +17 -14
- pyvale/examples/basics/{ex1_3_customsens_therm3d.py → ex1c_customsens_therm3d.py} +27 -24
- pyvale/examples/basics/{ex1_4_basicerrors_therm3d.py → ex1d_basicerrors_therm3d.py} +32 -29
- pyvale/examples/basics/{ex1_5_fielderrs_therm3d.py → ex1e_fielderrs_therm3d.py} +19 -15
- pyvale/examples/basics/{ex1_6_caliberrs_therm2d.py → ex1f_caliberrs_therm2d.py} +20 -16
- pyvale/examples/basics/{ex1_7_spatavg_therm2d.py → ex1g_spatavg_therm2d.py} +19 -16
- pyvale/examples/basics/{ex2_1_basicvectors_disp2d.py → ex2a_basicvectors_disp2d.py} +13 -10
- pyvale/examples/basics/{ex2_2_vectorsens_disp2d.py → ex2b_vectorsens_disp2d.py} +19 -15
- pyvale/examples/basics/{ex2_3_sensangle_disp2d.py → ex2c_sensangle_disp2d.py} +21 -18
- pyvale/examples/basics/{ex2_4_chainfielderrs_disp2d.py → ex2d_chainfielderrs_disp2d.py} +31 -29
- pyvale/examples/basics/{ex2_5_vectorfields3d_disp3d.py → ex2e_vectorfields3d_disp3d.py} +21 -18
- pyvale/examples/basics/{ex3_1_basictensors_strain2d.py → ex3a_basictensors_strain2d.py} +16 -14
- pyvale/examples/basics/{ex3_2_tensorsens2d_strain2d.py → ex3b_tensorsens2d_strain2d.py} +17 -14
- pyvale/examples/basics/{ex3_3_tensorsens3d_strain3d.py → ex3c_tensorsens3d_strain3d.py} +25 -22
- pyvale/examples/basics/{ex4_1_expsim2d_thermmech2d.py → ex4a_expsim2d_thermmech2d.py} +17 -14
- pyvale/examples/basics/{ex4_2_expsim3d_thermmech3d.py → ex4b_expsim3d_thermmech3d.py} +37 -34
- pyvale/examples/basics/ex5_nomesh.py +24 -0
- pyvale/examples/dic/ex1_2_blenderdeformed.py +174 -0
- pyvale/examples/dic/ex1_region_of_interest.py +6 -3
- pyvale/examples/dic/ex2_plate_with_hole.py +21 -18
- pyvale/examples/dic/ex3_plate_with_hole_strain.py +8 -6
- pyvale/examples/dic/ex4_dic_blender.py +17 -15
- pyvale/examples/dic/ex5_dic_challenge.py +19 -14
- pyvale/examples/genanalyticdata/ex1_1_scalarvisualisation.py +16 -10
- pyvale/examples/genanalyticdata/ex1_2_scalarcasebuild.py +3 -3
- pyvale/examples/genanalyticdata/ex2_1_analyticsensors.py +29 -23
- pyvale/examples/genanalyticdata/ex2_2_analyticsensors_nomesh.py +67 -0
- pyvale/examples/imagedef2d/ex_imagedef2d_todisk.py +12 -9
- pyvale/examples/mooseherder/ex0_create_moose_config.py +65 -0
- pyvale/examples/mooseherder/ex1a_modify_moose_input.py +71 -0
- pyvale/examples/mooseherder/ex1b_modify_gmsh_input.py +69 -0
- pyvale/examples/mooseherder/ex2a_run_moose_once.py +80 -0
- pyvale/examples/mooseherder/ex2b_run_gmsh_once.py +64 -0
- pyvale/examples/mooseherder/ex2c_run_both_once.py +114 -0
- pyvale/examples/mooseherder/ex3_run_moose_seq_para.py +157 -0
- pyvale/examples/mooseherder/ex4_run_gmsh-moose_seq_para.py +176 -0
- pyvale/examples/mooseherder/ex5_run_moose_paramulti.py +136 -0
- pyvale/examples/mooseherder/ex6_read_moose_exodus.py +163 -0
- pyvale/examples/mooseherder/ex7a_read_moose_herd_results.py +153 -0
- pyvale/examples/mooseherder/ex7b_read_multi_herd_results.py +116 -0
- pyvale/examples/mooseherder/ex7c_read_multi_gmshmoose_results.py +127 -0
- pyvale/examples/mooseherder/ex7d_readconfig_multi_gmshmoose_results.py +143 -0
- pyvale/examples/mooseherder/ex8_read_existing_sweep_output.py +72 -0
- pyvale/examples/renderblender/ex1_1_blenderscene.py +24 -20
- pyvale/examples/renderblender/ex1_2_blenderdeformed.py +22 -18
- pyvale/examples/renderblender/ex2_1_stereoscene.py +36 -29
- pyvale/examples/renderblender/ex2_2_stereodeformed.py +26 -20
- pyvale/examples/renderblender/ex3_1_blendercalibration.py +24 -17
- pyvale/examples/renderrasterisation/ex_rastenp.py +14 -12
- pyvale/examples/renderrasterisation/ex_rastercyth_oneframe.py +14 -15
- pyvale/examples/renderrasterisation/ex_rastercyth_static_cypara.py +13 -11
- pyvale/examples/renderrasterisation/ex_rastercyth_static_pypara.py +13 -11
- pyvale/mooseherder/__init__.py +32 -0
- pyvale/mooseherder/directorymanager.py +416 -0
- pyvale/mooseherder/exodusreader.py +763 -0
- pyvale/mooseherder/gmshrunner.py +163 -0
- pyvale/mooseherder/inputmodifier.py +236 -0
- pyvale/mooseherder/mooseconfig.py +226 -0
- pyvale/mooseherder/mooseherd.py +527 -0
- pyvale/mooseherder/mooserunner.py +303 -0
- pyvale/mooseherder/outputreader.py +22 -0
- pyvale/mooseherder/simdata.py +92 -0
- pyvale/mooseherder/simrunner.py +31 -0
- pyvale/mooseherder/sweepreader.py +356 -0
- pyvale/mooseherder/sweeptools.py +76 -0
- pyvale/sensorsim/__init__.py +82 -0
- pyvale/{camera.py → sensorsim/camera.py} +7 -7
- pyvale/{camerasensor.py → sensorsim/camerasensor.py} +7 -7
- pyvale/{camerastereo.py → sensorsim/camerastereo.py} +2 -2
- pyvale/{cameratools.py → sensorsim/cameratools.py} +4 -4
- pyvale/{cython → sensorsim/cython}/rastercyth.c +596 -596
- pyvale/{cython → sensorsim/cython}/rastercyth.cp311-win32.pyd +0 -0
- pyvale/{cython → sensorsim/cython}/rastercyth.py +16 -17
- pyvale/{errorcalculator.py → sensorsim/errorcalculator.py} +1 -1
- pyvale/{errorintegrator.py → sensorsim/errorintegrator.py} +2 -2
- pyvale/{errorrand.py → sensorsim/errorrand.py} +4 -4
- pyvale/{errorsyscalib.py → sensorsim/errorsyscalib.py} +2 -2
- pyvale/{errorsysdep.py → sensorsim/errorsysdep.py} +2 -2
- pyvale/{errorsysfield.py → sensorsim/errorsysfield.py} +8 -8
- pyvale/{errorsysindep.py → sensorsim/errorsysindep.py} +3 -3
- pyvale/sensorsim/exceptions.py +8 -0
- pyvale/{experimentsimulator.py → sensorsim/experimentsimulator.py} +23 -3
- pyvale/{field.py → sensorsim/field.py} +1 -1
- pyvale/{fieldconverter.py → sensorsim/fieldconverter.py} +72 -19
- pyvale/sensorsim/fieldinterp.py +37 -0
- pyvale/sensorsim/fieldinterpmesh.py +124 -0
- pyvale/sensorsim/fieldinterppoints.py +55 -0
- pyvale/{fieldsampler.py → sensorsim/fieldsampler.py} +4 -4
- pyvale/{fieldscalar.py → sensorsim/fieldscalar.py} +28 -24
- pyvale/{fieldtensor.py → sensorsim/fieldtensor.py} +33 -31
- pyvale/{fieldvector.py → sensorsim/fieldvector.py} +33 -31
- pyvale/{imagedef2d.py → sensorsim/imagedef2d.py} +9 -5
- pyvale/{integratorfactory.py → sensorsim/integratorfactory.py} +6 -6
- pyvale/{integratorquadrature.py → sensorsim/integratorquadrature.py} +3 -3
- pyvale/{integratorrectangle.py → sensorsim/integratorrectangle.py} +3 -3
- pyvale/{integratorspatial.py → sensorsim/integratorspatial.py} +1 -1
- pyvale/{rastercy.py → sensorsim/rastercy.py} +5 -5
- pyvale/{rasternp.py → sensorsim/rasternp.py} +9 -9
- pyvale/{rasteropts.py → sensorsim/rasteropts.py} +1 -1
- pyvale/{renderer.py → sensorsim/renderer.py} +1 -1
- pyvale/{rendermesh.py → sensorsim/rendermesh.py} +5 -5
- pyvale/{renderscene.py → sensorsim/renderscene.py} +2 -2
- pyvale/{sensorarray.py → sensorsim/sensorarray.py} +1 -1
- pyvale/{sensorarrayfactory.py → sensorsim/sensorarrayfactory.py} +12 -12
- pyvale/{sensorarraypoint.py → sensorsim/sensorarraypoint.py} +10 -8
- pyvale/{sensordata.py → sensorsim/sensordata.py} +1 -1
- pyvale/{sensortools.py → sensorsim/sensortools.py} +2 -20
- pyvale/sensorsim/simtools.py +174 -0
- pyvale/{visualexpplotter.py → sensorsim/visualexpplotter.py} +3 -3
- pyvale/{visualimages.py → sensorsim/visualimages.py} +2 -2
- pyvale/{visualsimanimator.py → sensorsim/visualsimanimator.py} +4 -4
- pyvale/{visualsimplotter.py → sensorsim/visualsimplotter.py} +5 -5
- pyvale/{visualsimsensors.py → sensorsim/visualsimsensors.py} +12 -12
- pyvale/{visualtools.py → sensorsim/visualtools.py} +1 -1
- pyvale/{visualtraceplotter.py → sensorsim/visualtraceplotter.py} +2 -2
- pyvale/simcases/case17.geo +3 -0
- pyvale/simcases/case17.i +4 -4
- pyvale/simcases/run_1case.py +1 -9
- pyvale/simcases/run_all_cases.py +1 -1
- pyvale/simcases/run_build_case.py +1 -1
- pyvale/simcases/run_example_cases.py +1 -1
- pyvale/verif/__init__.py +12 -0
- pyvale/{analyticsimdatafactory.py → verif/analyticsimdatafactory.py} +2 -2
- pyvale/{analyticsimdatagenerator.py → verif/analyticsimdatagenerator.py} +2 -2
- pyvale/verif/psens.py +125 -0
- pyvale/verif/psensconst.py +18 -0
- pyvale/verif/psensmech.py +227 -0
- pyvale/verif/psensmultiphys.py +187 -0
- pyvale/verif/psensscalar.py +347 -0
- pyvale/verif/psenstensor.py +123 -0
- pyvale/verif/psensvector.py +116 -0
- {pyvale-2025.7.2.dist-info → pyvale-2025.8.1.dist-info}/METADATA +6 -7
- pyvale-2025.8.1.dist-info/RECORD +260 -0
- pyvale/dataset.py +0 -415
- pyvale/simtools.py +0 -67
- pyvale-2025.7.2.dist-info/RECORD +0 -212
- /pyvale/{blendercalibrationdata.py → blender/blendercalibrationdata.py} +0 -0
- /pyvale/{dicchecks.py → dic/dicchecks.py} +0 -0
- /pyvale/{dicspecklegenerator.py → dic/dicspecklegenerator.py} +0 -0
- /pyvale/{dicspecklequality.py → dic/dicspecklequality.py} +0 -0
- /pyvale/{dicstrainresults.py → dic/dicstrainresults.py} +0 -0
- /pyvale/{cameradata.py → sensorsim/cameradata.py} +0 -0
- /pyvale/{cameradata2d.py → sensorsim/cameradata2d.py} +0 -0
- /pyvale/{errordriftcalc.py → sensorsim/errordriftcalc.py} +0 -0
- /pyvale/{fieldtransform.py → sensorsim/fieldtransform.py} +0 -0
- /pyvale/{generatorsrandom.py → sensorsim/generatorsrandom.py} +0 -0
- /pyvale/{imagetools.py → sensorsim/imagetools.py} +0 -0
- /pyvale/{integratortype.py → sensorsim/integratortype.py} +0 -0
- /pyvale/{output.py → sensorsim/output.py} +0 -0
- /pyvale/{raster.py → sensorsim/raster.py} +0 -0
- /pyvale/{sensordescriptor.py → sensorsim/sensordescriptor.py} +0 -0
- /pyvale/{visualimagedef.py → sensorsim/visualimagedef.py} +0 -0
- /pyvale/{visualopts.py → sensorsim/visualopts.py} +0 -0
- /pyvale/{analyticmeshgen.py → verif/analyticmeshgen.py} +0 -0
- {pyvale-2025.7.2.dist-info → pyvale-2025.8.1.dist-info}/WHEEL +0 -0
- {pyvale-2025.7.2.dist-info → pyvale-2025.8.1.dist-info}/licenses/LICENSE +0 -0
- {pyvale-2025.7.2.dist-info → pyvale-2025.8.1.dist-info}/top_level.txt +0 -0
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
# ==============================================================================
|
|
6
6
|
|
|
7
7
|
"""
|
|
8
|
-
|
|
8
|
+
Deforming a sample with 2D DIC
|
|
9
9
|
===============================================
|
|
10
10
|
|
|
11
11
|
This example follows a similar workflow to the previous Blender example.
|
|
@@ -18,15 +18,19 @@ Test case: mechanical analysis of a plate with a hole loaded in tension.
|
|
|
18
18
|
import numpy as np
|
|
19
19
|
from scipy.spatial.transform import Rotation
|
|
20
20
|
from pathlib import Path
|
|
21
|
-
|
|
22
|
-
|
|
21
|
+
|
|
22
|
+
# Pyvale imports
|
|
23
|
+
import pyvale.sensorsim as sens
|
|
24
|
+
import pyvale.dataset as dataset
|
|
25
|
+
import pyvale.blender as blender
|
|
26
|
+
import pyvale.mooseherder as mh
|
|
23
27
|
|
|
24
28
|
# %%
|
|
25
29
|
# The simulation results are loaded in here in the same way as the previous
|
|
26
30
|
# example. As mentioned this `data_path` can be replaced with your own MOOSE
|
|
27
31
|
# simulation output in exodus format (*.e).
|
|
28
32
|
|
|
29
|
-
data_path =
|
|
33
|
+
data_path = dataset.render_mechanical_3d_path()
|
|
30
34
|
sim_data = mh.ExodusReader(data_path).read_all_sim_data()
|
|
31
35
|
|
|
32
36
|
# %%
|
|
@@ -37,11 +41,11 @@ sim_data = mh.ExodusReader(data_path).read_all_sim_data()
|
|
|
37
41
|
# 3D deformation test case, displacement is expected in the x, y and z directions.
|
|
38
42
|
|
|
39
43
|
disp_comps = ("disp_x","disp_y", "disp_z")
|
|
40
|
-
sim_data =
|
|
44
|
+
sim_data = sens.scale_length_units(scale=1000.0,
|
|
41
45
|
sim_data=sim_data,
|
|
42
46
|
disp_comps=disp_comps)
|
|
43
47
|
|
|
44
|
-
render_mesh =
|
|
48
|
+
render_mesh = sens.create_render_mesh(sim_data,
|
|
45
49
|
("disp_y","disp_x"),
|
|
46
50
|
sim_spat_dim=3,
|
|
47
51
|
field_disp_keys=disp_comps)
|
|
@@ -62,7 +66,7 @@ base_dir = Path.cwd()
|
|
|
62
66
|
# A scene is initialised using the `BlenderScene` class. All the subsequent
|
|
63
67
|
# objects and actions necessary are then methods of this class.
|
|
64
68
|
|
|
65
|
-
scene =
|
|
69
|
+
scene = blender.Scene()
|
|
66
70
|
|
|
67
71
|
# %%
|
|
68
72
|
# The next thing that can be added to the scene is a sample.
|
|
@@ -75,9 +79,9 @@ scene = pyvale.BlenderScene()
|
|
|
75
79
|
part = scene.add_part(render_mesh, sim_spat_dim=3)
|
|
76
80
|
# Set the part location
|
|
77
81
|
part_location = np.array([0, 0, 0])
|
|
78
|
-
|
|
82
|
+
blender.Tools.move_blender_obj(part=part, pos_world=part_location)
|
|
79
83
|
part_rotation = Rotation.from_euler("xyz", [0, 0, 0], degrees=True)
|
|
80
|
-
|
|
84
|
+
blender.Tools.rotate_blender_obj(part=part, rot_world=part_rotation)
|
|
81
85
|
|
|
82
86
|
# %%
|
|
83
87
|
# A camera can then be added to the scene.
|
|
@@ -86,7 +90,7 @@ pyvale.BlenderTools.rotate_blender_obj(part=part, rot_world=part_rotation)
|
|
|
86
90
|
# This camera can then be added to the Blender scene.
|
|
87
91
|
# The camera can also be moved and rotated.
|
|
88
92
|
|
|
89
|
-
cam_data =
|
|
93
|
+
cam_data = sens.CameraData(pixels_num=np.array([1540, 1040]),
|
|
90
94
|
pixels_size=np.array([0.00345, 0.00345]),
|
|
91
95
|
pos_world=(0, 0, 400),
|
|
92
96
|
rot_world=Rotation.from_euler("xyz", [0, 0, 0]),
|
|
@@ -101,7 +105,7 @@ camera.rotation_euler = (0, 0, 0) # NOTE: The default is an XYZ Euler angle
|
|
|
101
105
|
# Blender offers different light types: Point, Sun, Spot and Area.
|
|
102
106
|
# The light can also be moved and rotated like the camera.
|
|
103
107
|
|
|
104
|
-
light_data =
|
|
108
|
+
light_data = blender.LightData(type=blender.LightType.POINT,
|
|
105
109
|
pos_world=(0, 0, 400),
|
|
106
110
|
rot_world=Rotation.from_euler("xyz",
|
|
107
111
|
[0, 0, 0]),
|
|
@@ -121,9 +125,9 @@ light.rotation_euler = (0, 0, 0)
|
|
|
121
125
|
# It should be noted that for a bigger camera or sample you may need to generate
|
|
122
126
|
# a larger speckle pattern.
|
|
123
127
|
|
|
124
|
-
material_data =
|
|
125
|
-
speckle_path =
|
|
126
|
-
mm_px_resolution =
|
|
128
|
+
material_data = blender.MaterialData()
|
|
129
|
+
speckle_path = dataset.dic_pattern_5mpx_path()
|
|
130
|
+
mm_px_resolution = sens.CameraTools.calculate_mm_px_resolution(cam_data)
|
|
127
131
|
scene.add_speckle(part=part,
|
|
128
132
|
speckle_path=speckle_path,
|
|
129
133
|
mat_data=material_data,
|
|
@@ -137,9 +141,9 @@ scene.add_speckle(part=part,
|
|
|
137
141
|
# Firstly, all the rendering parameters must be set, including parameters such as
|
|
138
142
|
# the number of threads to use.
|
|
139
143
|
|
|
140
|
-
render_data =
|
|
141
|
-
|
|
142
|
-
|
|
144
|
+
render_data = blender.RenderData(cam_data=cam_data,
|
|
145
|
+
base_dir=base_dir,
|
|
146
|
+
threads=8)
|
|
143
147
|
|
|
144
148
|
# %%
|
|
145
149
|
# A series of deformed images can then be rendered.
|
|
@@ -167,4 +171,4 @@ print("Save directory of the image:", (render_data.base_dir / "blenderimages"))
|
|
|
167
171
|
# There is also the option to save the scene as a Blender project file.
|
|
168
172
|
# This file can be opened with the Blender GUI to view the scene.
|
|
169
173
|
|
|
170
|
-
|
|
174
|
+
blender.Tools.save_blender_file(base_dir)
|
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
# ==============================================================================
|
|
6
6
|
|
|
7
7
|
"""
|
|
8
|
-
|
|
8
|
+
Creating a scene with stereo DIC
|
|
9
9
|
-------------------------------------------------
|
|
10
10
|
|
|
11
11
|
This example takes you through creating a scene and adding all the necessary
|
|
@@ -18,15 +18,19 @@ Test case: mechanical analysis of a plate with a hole loaded in tension.
|
|
|
18
18
|
import numpy as np
|
|
19
19
|
from scipy.spatial.transform import Rotation
|
|
20
20
|
from pathlib import Path
|
|
21
|
-
|
|
22
|
-
|
|
21
|
+
|
|
22
|
+
# Pyvale imports
|
|
23
|
+
import pyvale.sensorsim as sens
|
|
24
|
+
import pyvale.dataset as dataset
|
|
25
|
+
import pyvale.blender as blender
|
|
26
|
+
import pyvale.mooseherder as mh
|
|
23
27
|
|
|
24
28
|
# %%
|
|
25
29
|
# The simulation results are loaded in here in the same way as the previous
|
|
26
30
|
# example. As mentioned this `data_path` can be replaced with your own MOOSE
|
|
27
31
|
# simulation output in exodus format (*.e).
|
|
28
32
|
|
|
29
|
-
data_path =
|
|
33
|
+
data_path = dataset.render_mechanical_3d_path()
|
|
30
34
|
sim_data = mh.ExodusReader(data_path).read_all_sim_data()
|
|
31
35
|
|
|
32
36
|
# %%
|
|
@@ -37,11 +41,11 @@ sim_data = mh.ExodusReader(data_path).read_all_sim_data()
|
|
|
37
41
|
# 3D deformation test case, displacement is expected in the x, y and z directions.
|
|
38
42
|
|
|
39
43
|
disp_comps = ("disp_x","disp_y", "disp_z")
|
|
40
|
-
sim_data =
|
|
41
|
-
|
|
42
|
-
|
|
44
|
+
sim_data = sens.scale_length_units(scale=1000.0,
|
|
45
|
+
sim_data=sim_data,
|
|
46
|
+
disp_comps=disp_comps)
|
|
43
47
|
|
|
44
|
-
render_mesh =
|
|
48
|
+
render_mesh = sens.create_render_mesh(sim_data,
|
|
45
49
|
("disp_y","disp_x"),
|
|
46
50
|
sim_spat_dim=3,
|
|
47
51
|
field_disp_keys=disp_comps)
|
|
@@ -59,9 +63,9 @@ base_dir = Path.cwd()
|
|
|
59
63
|
# Creating the scene
|
|
60
64
|
# ^^^^^^^^^^^^^^^^^^
|
|
61
65
|
# In order to create a DIC setup in Blender, first a scene must be created.
|
|
62
|
-
# A scene is initialised using the `
|
|
66
|
+
# A scene is initialised using the `blender.Scene` class. All the subsequent
|
|
63
67
|
# objects and actions necessary are then methods of this class.
|
|
64
|
-
scene =
|
|
68
|
+
scene = blender.Scene()
|
|
65
69
|
|
|
66
70
|
# %%
|
|
67
71
|
# The next thing that can be added to the scene is a sample.
|
|
@@ -73,10 +77,10 @@ scene = pyvale.BlenderScene()
|
|
|
73
77
|
part = scene.add_part(render_mesh, sim_spat_dim=3)
|
|
74
78
|
# Set the part location
|
|
75
79
|
part_location = np.array([0, 0, 0])
|
|
76
|
-
|
|
80
|
+
blender.Tools.move_blender_obj(part=part, pos_world=part_location)
|
|
77
81
|
# Set part rotation
|
|
78
82
|
part_rotation = Rotation.from_euler("xyz", [0, 0, 0], degrees=True)
|
|
79
|
-
|
|
83
|
+
blender.Tools.rotate_blender_obj(part=part, rot_world=part_rotation)
|
|
80
84
|
|
|
81
85
|
# %%
|
|
82
86
|
# The cameras can then be initialised. A stereo camera system is defined by a
|
|
@@ -95,23 +99,26 @@ pyvale.BlenderTools.rotate_blender_obj(part=part, rot_world=part_rotation)
|
|
|
95
99
|
# between the two. The cameras can then be added to the Blender scene using the
|
|
96
100
|
# `add_stereo_system` method.
|
|
97
101
|
|
|
98
|
-
cam_data_0 =
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
102
|
+
cam_data_0 = sens.CameraData(pixels_num=np.array([1540, 1040]),
|
|
103
|
+
pixels_size=np.array([0.00345, 0.00345]),
|
|
104
|
+
pos_world=np.array([0, 0, 400]),
|
|
105
|
+
rot_world=Rotation.from_euler("xyz", [0, 0, 0]),
|
|
106
|
+
roi_cent_world=(0, 0, 0),
|
|
107
|
+
focal_length=15.0)
|
|
104
108
|
# Set this to "symmetric" to get a symmetric stereo system or set this to
|
|
105
109
|
# "faceon" to get a face-on stereo system
|
|
106
110
|
stereo_setup = "faceon"
|
|
107
111
|
if stereo_setup == "symmetric":
|
|
108
|
-
stereo_system =
|
|
112
|
+
stereo_system = sens.CameraTools.symmetric_stereo_cameras(
|
|
109
113
|
cam_data_0=cam_data_0,
|
|
110
114
|
stereo_angle=15.0)
|
|
111
|
-
|
|
112
|
-
stereo_system =
|
|
115
|
+
elif stereo_setup == "faceon":
|
|
116
|
+
stereo_system = sens.CameraTools.faceon_stereo_cameras(
|
|
113
117
|
cam_data_0=cam_data_0,
|
|
114
118
|
stereo_angle=15.0)
|
|
119
|
+
else:
|
|
120
|
+
raise ValueError(f"Unknown stereo_setup: {stereo_setup}")
|
|
121
|
+
|
|
115
122
|
cam0, cam1 = scene.add_stereo_system(stereo_system)
|
|
116
123
|
|
|
117
124
|
# %%
|
|
@@ -130,7 +137,7 @@ stereo_system.save_calibration(base_dir)
|
|
|
130
137
|
# Blender offers different light types: Point, Sun, Spot and Area.
|
|
131
138
|
# The light can also be moved and rotated like the camera.
|
|
132
139
|
|
|
133
|
-
light_data =
|
|
140
|
+
light_data = blender.LightData(type=blender.LightType.POINT,
|
|
134
141
|
pos_world=(0, 0, 400),
|
|
135
142
|
rot_world=Rotation.from_euler("xyz",
|
|
136
143
|
[0, 0, 0]),
|
|
@@ -150,10 +157,10 @@ light.rotation_euler = (0, 0, 0) # NOTE: The default is an XYZ Euler angle
|
|
|
150
157
|
# It should be noted that for a bigger camera or sample you may need to generate
|
|
151
158
|
# a larger speckle pattern.
|
|
152
159
|
|
|
153
|
-
material_data =
|
|
154
|
-
speckle_path =
|
|
160
|
+
material_data = blender.MaterialData()
|
|
161
|
+
speckle_path = dataset.dic_pattern_5mpx_path()
|
|
155
162
|
|
|
156
|
-
mm_px_resolution =
|
|
163
|
+
mm_px_resolution = sens.CameraTools.calculate_mm_px_resolution(cam_data_0)
|
|
157
164
|
scene.add_speckle(part=part,
|
|
158
165
|
speckle_path=speckle_path,
|
|
159
166
|
mat_data=material_data,
|
|
@@ -168,10 +175,10 @@ scene.add_speckle(part=part,
|
|
|
168
175
|
# Differently to a 2D DIC system, both cameras' parameters must be specified in
|
|
169
176
|
# the `RenderData` object.
|
|
170
177
|
|
|
171
|
-
render_data =
|
|
178
|
+
render_data = blender.RenderData(cam_data=(stereo_system.cam_data_0,
|
|
172
179
|
stereo_system.cam_data_1),
|
|
173
|
-
|
|
174
|
-
|
|
180
|
+
base_dir=base_dir,
|
|
181
|
+
threads=8)
|
|
175
182
|
|
|
176
183
|
# %%
|
|
177
184
|
# A single set of images of the scene can then be rendered.
|
|
@@ -192,4 +199,4 @@ print("Save directory of the image:", (render_data.base_dir / "blenderimages"))
|
|
|
192
199
|
# There is also the option to save the scene as a Blender project file.
|
|
193
200
|
# This file can be opened with the Blender GUI to view the scene.
|
|
194
201
|
|
|
195
|
-
|
|
202
|
+
blender.Tools.save_blender_file(base_dir)
|
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
# ==============================================================================
|
|
6
6
|
|
|
7
7
|
"""
|
|
8
|
-
|
|
8
|
+
Deforming a sample with stereo DIC
|
|
9
9
|
===================================================
|
|
10
10
|
|
|
11
11
|
This example takes you through creating stereo DIC scene, applying deformation
|
|
@@ -17,15 +17,19 @@ Test case: mechanical analysis of a plate with a hole loaded in tension.
|
|
|
17
17
|
import numpy as np
|
|
18
18
|
from scipy.spatial.transform import Rotation
|
|
19
19
|
from pathlib import Path
|
|
20
|
-
|
|
21
|
-
|
|
20
|
+
|
|
21
|
+
# Pyvale imports
|
|
22
|
+
import pyvale.sensorsim as sens
|
|
23
|
+
import pyvale.dataset as dataset
|
|
24
|
+
import pyvale.blender as blender
|
|
25
|
+
import pyvale.mooseherder as mh
|
|
22
26
|
|
|
23
27
|
# %%
|
|
24
28
|
# The simulation results are loaded in here in the same way as the previous
|
|
25
29
|
# example. As mentioned this `data_path` can be replaced with your own MOOSE
|
|
26
30
|
# simulation output in exodus format (*.e).
|
|
27
31
|
|
|
28
|
-
data_path =
|
|
32
|
+
data_path = dataset.render_mechanical_3d_path()
|
|
29
33
|
sim_data = mh.ExodusReader(data_path).read_all_sim_data()
|
|
30
34
|
|
|
31
35
|
# %%
|
|
@@ -36,11 +40,11 @@ sim_data = mh.ExodusReader(data_path).read_all_sim_data()
|
|
|
36
40
|
# 3D deformation test case, displacement is expected in the x, y and z directions.
|
|
37
41
|
|
|
38
42
|
disp_comps = ("disp_x","disp_y", "disp_z")
|
|
39
|
-
sim_data =
|
|
43
|
+
sim_data = sens.scale_length_units(scale=1000.0,
|
|
40
44
|
sim_data=sim_data,
|
|
41
45
|
disp_comps=disp_comps)
|
|
42
46
|
|
|
43
|
-
render_mesh =
|
|
47
|
+
render_mesh = sens.create_render_mesh(sim_data,
|
|
44
48
|
("disp_y","disp_x"),
|
|
45
49
|
sim_spat_dim=3,
|
|
46
50
|
field_disp_keys=disp_comps)
|
|
@@ -60,7 +64,7 @@ base_dir = Path.cwd()
|
|
|
60
64
|
# In order to create a DIC setup in Blender, first a scene must be created.
|
|
61
65
|
# A scene is initialised using the `BlenderScene` class. All the subsequent
|
|
62
66
|
# objects and actions necessary are then methods of this class.
|
|
63
|
-
scene =
|
|
67
|
+
scene = blender.Scene()
|
|
64
68
|
|
|
65
69
|
# %%
|
|
66
70
|
# The next thing that can be added to the scene is a sample.
|
|
@@ -72,10 +76,10 @@ scene = pyvale.BlenderScene()
|
|
|
72
76
|
part = scene.add_part(render_mesh, sim_spat_dim=3)
|
|
73
77
|
# Set the part location
|
|
74
78
|
part_location = np.array([0, 0, 0])
|
|
75
|
-
|
|
79
|
+
blender.Tools.move_blender_obj(part=part, pos_world=part_location)
|
|
76
80
|
# Set part rotation
|
|
77
81
|
part_rotation = Rotation.from_euler("xyz", [0, 0, 0], degrees=True)
|
|
78
|
-
|
|
82
|
+
blender.Tools.rotate_blender_obj(part=part, rot_world=part_rotation)
|
|
79
83
|
|
|
80
84
|
# %%
|
|
81
85
|
# The cameras can then be initialised. A stereo camera system is defined by a
|
|
@@ -94,7 +98,7 @@ pyvale.BlenderTools.rotate_blender_obj(part=part, rot_world=part_rotation)
|
|
|
94
98
|
# between the two. The cameras can then be added to the Blender scene using the
|
|
95
99
|
# `add_stereo_system` method.
|
|
96
100
|
|
|
97
|
-
cam_data_0 =
|
|
101
|
+
cam_data_0 = sens.CameraData(pixels_num=np.array([1540, 1040]),
|
|
98
102
|
pixels_size=np.array([0.00345, 0.00345]),
|
|
99
103
|
pos_world=np.array([0, 0, 400]),
|
|
100
104
|
rot_world=Rotation.from_euler("xyz", [0, 0, 0]),
|
|
@@ -104,13 +108,16 @@ cam_data_0 = pyvale.CameraData(pixels_num=np.array([1540, 1040]),
|
|
|
104
108
|
# "faceon" to get a face-on stereo system
|
|
105
109
|
stereo_setup = "faceon"
|
|
106
110
|
if stereo_setup == "symmetric":
|
|
107
|
-
stereo_system =
|
|
111
|
+
stereo_system = sens.CameraTools.symmetric_stereo_cameras(
|
|
108
112
|
cam_data_0=cam_data_0,
|
|
109
113
|
stereo_angle=15.0)
|
|
110
|
-
|
|
111
|
-
stereo_system =
|
|
114
|
+
elif stereo_setup == "faceon":
|
|
115
|
+
stereo_system = sens.CameraTools.faceon_stereo_cameras(
|
|
112
116
|
cam_data_0=cam_data_0,
|
|
113
117
|
stereo_angle=15.0)
|
|
118
|
+
else:
|
|
119
|
+
raise ValueError(f"Unknown stereo_setup: {stereo_setup}")
|
|
120
|
+
|
|
114
121
|
cam0, cam1 = scene.add_stereo_system(stereo_system)
|
|
115
122
|
|
|
116
123
|
# %%
|
|
@@ -128,7 +135,7 @@ stereo_system.save_calibration(base_dir)
|
|
|
128
135
|
# A light can the be added to the scene.
|
|
129
136
|
# Blender offers different light types: Point, Sun, Spot and Area.
|
|
130
137
|
# The light can also be moved and rotated like the camera.
|
|
131
|
-
light_data =
|
|
138
|
+
light_data = blender.LightData(type=blender.LightType.POINT,
|
|
132
139
|
pos_world=(0, 0, 400),
|
|
133
140
|
rot_world=Rotation.from_euler("xyz",
|
|
134
141
|
[0, 0, 0]),
|
|
@@ -138,8 +145,8 @@ light.location = (0, 0, 410)
|
|
|
138
145
|
light.rotation_euler = (0, 0, 0) # NOTE: The default is an XYZ Euler angle
|
|
139
146
|
|
|
140
147
|
# Apply the speckle pattern
|
|
141
|
-
material_data =
|
|
142
|
-
speckle_path =
|
|
148
|
+
material_data = blender.MaterialData()
|
|
149
|
+
speckle_path = dataset.dic_pattern_5mpx_path()
|
|
143
150
|
# NOTE: If you wish to use a bigger camera, you will need to generate a
|
|
144
151
|
# bigger speckle pattern generator
|
|
145
152
|
|
|
@@ -154,7 +161,7 @@ speckle_path = pyvale.DataSet.dic_pattern_5mpx_path()
|
|
|
154
161
|
# It should be noted that for a bigger camera or sample you may need to generate
|
|
155
162
|
# a larger speckle pattern.
|
|
156
163
|
|
|
157
|
-
mm_px_resolution =
|
|
164
|
+
mm_px_resolution = sens.CameraTools.calculate_mm_px_resolution(cam_data_0)
|
|
158
165
|
scene.add_speckle(part=part,
|
|
159
166
|
speckle_path=speckle_path,
|
|
160
167
|
mat_data=material_data,
|
|
@@ -169,7 +176,7 @@ scene.add_speckle(part=part,
|
|
|
169
176
|
# the number of threads to use.
|
|
170
177
|
# Differently to a 2D DIC system, both cameras' parameters must be specified in
|
|
171
178
|
# the `RenderData` object.
|
|
172
|
-
render_data =
|
|
179
|
+
render_data = blender.RenderData(cam_data=(stereo_system.cam_data_0,
|
|
173
180
|
stereo_system.cam_data_1),
|
|
174
181
|
base_dir=base_dir,
|
|
175
182
|
threads=8)
|
|
@@ -200,5 +207,4 @@ print("Save directory of the image:", (render_data.base_dir / "blenderimages"))
|
|
|
200
207
|
# There is also the option to save the scene as a Blender project file.
|
|
201
208
|
# This file can be opened with the Blender GUI to view the scene.
|
|
202
209
|
|
|
203
|
-
|
|
204
|
-
|
|
210
|
+
blender.Tools.save_blender_file(base_dir)
|
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
# ==============================================================================
|
|
6
6
|
|
|
7
7
|
"""
|
|
8
|
-
|
|
8
|
+
Rendering calibration images
|
|
9
9
|
---------------------------------------------
|
|
10
10
|
|
|
11
11
|
This example takes you through how to render calibration images for a given DIC
|
|
@@ -14,7 +14,11 @@ setup.
|
|
|
14
14
|
import numpy as np
|
|
15
15
|
from scipy.spatial.transform import Rotation
|
|
16
16
|
from pathlib import Path
|
|
17
|
-
|
|
17
|
+
|
|
18
|
+
# Pyvale imports
|
|
19
|
+
import pyvale.sensorsim as sens
|
|
20
|
+
import pyvale.blender as blender
|
|
21
|
+
import pyvale.dataset as dataset
|
|
18
22
|
|
|
19
23
|
# %%
|
|
20
24
|
# Firstly, a save path must be set.
|
|
@@ -31,7 +35,7 @@ base_dir = Path.cwd()
|
|
|
31
35
|
# In order to create a DIC setup in Blender, first a scene must be created.
|
|
32
36
|
# A scene is initialised using the `BlenderScene` class. All the subsequent
|
|
33
37
|
# objects and actions necessary are then methods of this class.
|
|
34
|
-
scene =
|
|
38
|
+
scene = blender.Scene()
|
|
35
39
|
|
|
36
40
|
# %%
|
|
37
41
|
# The next thing to add to the scene is the calibration target.
|
|
@@ -57,7 +61,7 @@ target = scene.add_cal_target(target_size=np.array([150, 100, 10]))
|
|
|
57
61
|
# are the camera parameters for the first camera, and the desired stereo angle
|
|
58
62
|
# between the two. The cameras can then be added to the Blender scene using the
|
|
59
63
|
# `add_stereo_system` method.
|
|
60
|
-
cam_data_0 =
|
|
64
|
+
cam_data_0 = sens.CameraData(pixels_num=np.array([1540, 1040]),
|
|
61
65
|
pixels_size=np.array([0.00345, 0.00345]),
|
|
62
66
|
pos_world=np.array([0, 0, 400]),
|
|
63
67
|
rot_world=Rotation.from_euler("xyz", [0, 0, 0]),
|
|
@@ -67,13 +71,16 @@ cam_data_0 = pyvale.CameraData(pixels_num=np.array([1540, 1040]),
|
|
|
67
71
|
# "faceon" to get a face-on stereo system
|
|
68
72
|
stereo_setup = "faceon"
|
|
69
73
|
if stereo_setup == "symmetric":
|
|
70
|
-
stereo_system =
|
|
74
|
+
stereo_system = sens.CameraTools.symmetric_stereo_cameras(
|
|
71
75
|
cam_data_0=cam_data_0,
|
|
72
76
|
stereo_angle=15.0)
|
|
73
|
-
|
|
74
|
-
stereo_system =
|
|
77
|
+
elif stereo_setup == "faceon":
|
|
78
|
+
stereo_system = sens.CameraTools.faceon_stereo_cameras(
|
|
75
79
|
cam_data_0=cam_data_0,
|
|
76
80
|
stereo_angle=15.0)
|
|
81
|
+
else:
|
|
82
|
+
raise ValueError(f"Unknown stereo_setup: {stereo_setup}")
|
|
83
|
+
|
|
77
84
|
scene.add_stereo_system(stereo_system)
|
|
78
85
|
|
|
79
86
|
# %%
|
|
@@ -95,7 +102,7 @@ stereo_system.save_calibration(base_dir)
|
|
|
95
102
|
# Blender offers different light types: Point, Sun, Spot and Area.
|
|
96
103
|
# The light can also be moved and rotated like the camera.
|
|
97
104
|
|
|
98
|
-
light_data =
|
|
105
|
+
light_data = blender.LightData(type=blender.LightType.POINT,
|
|
99
106
|
pos_world=(0, 0, 200),
|
|
100
107
|
rot_world=Rotation.from_euler("xyz",
|
|
101
108
|
[0, 0, 0]),
|
|
@@ -112,11 +119,11 @@ light.rotation_euler = (0, 0, 0) # NOTE: The default is an XYZ Euler angle
|
|
|
112
119
|
# calibration target pattern will not be scaled in the same way as a speckle
|
|
113
120
|
# pattern.
|
|
114
121
|
|
|
115
|
-
material_data =
|
|
116
|
-
|
|
117
|
-
mm_px_resolution =
|
|
122
|
+
material_data = blender.MaterialData()
|
|
123
|
+
cal_target = dataset.cal_target()
|
|
124
|
+
mm_px_resolution = sens.CameraTools.calculate_mm_px_resolution(cam_data_0)
|
|
118
125
|
scene.add_speckle(part=target,
|
|
119
|
-
speckle_path=
|
|
126
|
+
speckle_path=cal_target,
|
|
120
127
|
mat_data=material_data,
|
|
121
128
|
mm_px_resolution=mm_px_resolution,
|
|
122
129
|
cal=True)
|
|
@@ -128,7 +135,7 @@ scene.add_speckle(part=target,
|
|
|
128
135
|
# rendered.Firstly, all the rendering parameters must be set, including
|
|
129
136
|
# parameters such as the number of threads to use.
|
|
130
137
|
|
|
131
|
-
render_data =
|
|
138
|
+
render_data = blender.RenderData(cam_data=(stereo_system.cam_data_0,
|
|
132
139
|
stereo_system.cam_data_1),
|
|
133
140
|
base_dir=base_dir)
|
|
134
141
|
|
|
@@ -141,7 +148,7 @@ render_data = pyvale.RenderData(cam_data=(stereo_system.cam_data_0,
|
|
|
141
148
|
# passed in they will be initialised from the FOV to cover the whole FOV of the
|
|
142
149
|
# cameras.
|
|
143
150
|
|
|
144
|
-
calibration_data =
|
|
151
|
+
calibration_data = blender.CalibrationData(angle_lims=(-10, 10),
|
|
145
152
|
angle_step=5,
|
|
146
153
|
plunge_lims=(-5, 5),
|
|
147
154
|
plunge_step=5)
|
|
@@ -151,7 +158,7 @@ calibration_data = pyvale.CalibrationData(angle_lims=(-10, 10),
|
|
|
151
158
|
# rendered before rendering them. The only input that is needed is the
|
|
152
159
|
# `calibration_data` specified above.
|
|
153
160
|
|
|
154
|
-
number_calibration_images =
|
|
161
|
+
number_calibration_images = blender.Tools.number_calibration_images(calibration_data)
|
|
155
162
|
print("Number of calibration images to be rendered:", number_calibration_images)
|
|
156
163
|
|
|
157
164
|
# %%
|
|
@@ -159,7 +166,7 @@ print("Number of calibration images to be rendered:", number_calibration_images)
|
|
|
159
166
|
# calibration target according to movement limits set above, and will also move
|
|
160
167
|
# the target rigidly across the FOV of the camera, in order to characterise the
|
|
161
168
|
# entire FOV of the cameras.
|
|
162
|
-
|
|
169
|
+
blender.Tools.render_calibration_images(render_data,
|
|
163
170
|
calibration_data,
|
|
164
171
|
target)
|
|
165
172
|
|
|
@@ -172,4 +179,4 @@ print("Save directory of the images:", (render_data.base_dir / "calimages"))
|
|
|
172
179
|
# There is also the option to save the scene as a Blender project file.
|
|
173
180
|
# This file can be opened with the Blender GUI to view the scene.
|
|
174
181
|
|
|
175
|
-
|
|
182
|
+
blender.Tools.save_blender_file(base_dir)
|
|
@@ -10,8 +10,10 @@ import time
|
|
|
10
10
|
import numpy as np
|
|
11
11
|
from scipy.spatial.transform import Rotation
|
|
12
12
|
import matplotlib.pyplot as plt
|
|
13
|
-
|
|
14
|
-
|
|
13
|
+
|
|
14
|
+
# Pyvale imports
|
|
15
|
+
import pyvale.sensorsim as sens
|
|
16
|
+
import pyvale.mooseherder as mh
|
|
15
17
|
|
|
16
18
|
# TODO
|
|
17
19
|
# - Fix the image averaging function to use cython
|
|
@@ -34,17 +36,17 @@ def main() -> None:
|
|
|
34
36
|
# This a path to an exodus *.e output file from MOOSE, this can be
|
|
35
37
|
# replaced with a path to your own simulation file
|
|
36
38
|
sim_path = Path.home()/"pyvale"/"src"/"pyvale"/"simcases"/"case21_out.e"
|
|
37
|
-
#sim_path =
|
|
39
|
+
#sim_path = sens.DataSet.render_mechanical_3d_path()
|
|
38
40
|
|
|
39
41
|
disp_comps = ("disp_x","disp_y","disp_z")
|
|
40
42
|
|
|
41
43
|
sim_data = mh.ExodusReader(sim_path).read_all_sim_data()
|
|
42
44
|
|
|
43
45
|
# Scale m -> mm
|
|
44
|
-
sim_data =
|
|
46
|
+
sim_data = sens.scale_length_units(1000.0,sim_data,disp_comps)
|
|
45
47
|
|
|
46
48
|
# Extracts the surface mesh from a full 3d simulation for rendering
|
|
47
|
-
render_mesh =
|
|
49
|
+
render_mesh = sens.create_render_mesh(sim_data,
|
|
48
50
|
("disp_y","disp_x"),
|
|
49
51
|
sim_spat_dim=3,
|
|
50
52
|
field_disp_keys=disp_comps)
|
|
@@ -59,7 +61,7 @@ def main() -> None:
|
|
|
59
61
|
meshes[1].set_pos(np.array((0.0,12.5,0.0)))
|
|
60
62
|
meshes[1].set_rot(Rotation.from_euler("zyx",(0.0, 0.0, 0.0),degrees=True))
|
|
61
63
|
meshes[1].fields_disp = None
|
|
62
|
-
coords_all =
|
|
64
|
+
coords_all = sens.get_all_coords_world(meshes)
|
|
63
65
|
|
|
64
66
|
print()
|
|
65
67
|
print(80*"-")
|
|
@@ -85,7 +87,7 @@ def main() -> None:
|
|
|
85
87
|
fov_scale_factor: float = 1.0
|
|
86
88
|
|
|
87
89
|
(roi_pos_world,
|
|
88
|
-
cam_pos_world) =
|
|
90
|
+
cam_pos_world) = sens.CameraTools.pos_fill_frame(
|
|
89
91
|
coords_world=coords_all,
|
|
90
92
|
pixel_num=pixel_num,
|
|
91
93
|
pixel_size=pixel_size,
|
|
@@ -94,7 +96,7 @@ def main() -> None:
|
|
|
94
96
|
frame_fill=fov_scale_factor,
|
|
95
97
|
)
|
|
96
98
|
|
|
97
|
-
cam_data =
|
|
99
|
+
cam_data = sens.CameraData(
|
|
98
100
|
pixels_num=pixel_num,
|
|
99
101
|
pixels_size=pixel_size,
|
|
100
102
|
pos_world=cam_pos_world,
|
|
@@ -115,7 +117,7 @@ def main() -> None:
|
|
|
115
117
|
print(cam_data.world_to_cam_mat)
|
|
116
118
|
print(80*"-")
|
|
117
119
|
|
|
118
|
-
scene =
|
|
120
|
+
scene = sens.RenderScene([cam_data,cam_data],meshes)
|
|
119
121
|
|
|
120
122
|
frames_per_camera = (scene.meshes[0].fields_render.shape[1]
|
|
121
123
|
*scene.meshes[0].fields_render.shape[2])
|
|
@@ -139,8 +141,8 @@ def main() -> None:
|
|
|
139
141
|
print(80*"=")
|
|
140
142
|
print("IN MEM: Raster Loop start")
|
|
141
143
|
|
|
142
|
-
raster_opts =
|
|
143
|
-
renderer =
|
|
144
|
+
raster_opts = sens.RasterOpts(parallel=8)
|
|
145
|
+
renderer = sens.RasterNumpy(raster_opts)
|
|
144
146
|
|
|
145
147
|
time_start_loop = time.perf_counter()
|
|
146
148
|
|
|
@@ -183,7 +185,7 @@ def main() -> None:
|
|
|
183
185
|
|
|
184
186
|
plot_on = True
|
|
185
187
|
if plot_on:
|
|
186
|
-
(fig,ax) =
|
|
188
|
+
(fig,ax) = sens.plot_field_image(images[1][:,:,-1,0],
|
|
187
189
|
title_str="Disp. y, [mm]")
|
|
188
190
|
|
|
189
191
|
plt.show()
|