pyvale 2025.7.1__cp311-cp311-musllinux_1_2_aarch64.whl → 2025.8.1__cp311-cp311-musllinux_1_2_aarch64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyvale might be problematic. Click here for more details.
- pyvale/__init__.py +12 -92
- pyvale/blender/__init__.py +23 -0
- pyvale/{pyvaleexceptions.py → blender/blenderexceptions.py} +0 -3
- pyvale/{blenderlightdata.py → blender/blenderlightdata.py} +3 -3
- pyvale/{blendermaterialdata.py → blender/blendermaterialdata.py} +1 -1
- pyvale/{blenderrenderdata.py → blender/blenderrenderdata.py} +5 -3
- pyvale/{blenderscene.py → blender/blenderscene.py} +33 -30
- pyvale/{blendertools.py → blender/blendertools.py} +14 -10
- pyvale/dataset/__init__.py +7 -0
- pyvale/dataset/dataset.py +443 -0
- pyvale/dic/__init__.py +20 -0
- pyvale/dic/cpp/dicfourier.cpp +36 -4
- pyvale/dic/cpp/dicinterpolator.cpp +56 -1
- pyvale/dic/cpp/dicmain.cpp +24 -19
- pyvale/dic/cpp/dicoptimizer.cpp +6 -1
- pyvale/dic/cpp/dicscanmethod.cpp +32 -32
- pyvale/dic/cpp/dicsignalhandler.cpp +16 -0
- pyvale/dic/cpp/dicstrain.cpp +7 -3
- pyvale/dic/cpp/dicutil.cpp +79 -23
- pyvale/{dic2d.py → dic/dic2d.py} +51 -29
- pyvale/dic/dic2dconv.py +6 -0
- pyvale/{dic2dcpp.cpython-311-aarch64-linux-musl.so → dic/dic2dcpp.cpython-311-aarch64-linux-musl.so} +0 -0
- pyvale/{dicchecks.py → dic/dicchecks.py} +28 -16
- pyvale/dic/dicdataimport.py +370 -0
- pyvale/{dicregionofinterest.py → dic/dicregionofinterest.py} +169 -12
- pyvale/{dicresults.py → dic/dicresults.py} +4 -1
- pyvale/{dicstrain.py → dic/dicstrain.py} +9 -9
- pyvale/examples/basics/{ex1_1_basicscalars_therm2d.py → ex1a_basicscalars_therm2d.py} +12 -9
- pyvale/examples/basics/{ex1_2_sensormodel_therm2d.py → ex1b_sensormodel_therm2d.py} +17 -14
- pyvale/examples/basics/{ex1_3_customsens_therm3d.py → ex1c_customsens_therm3d.py} +27 -24
- pyvale/examples/basics/{ex1_4_basicerrors_therm3d.py → ex1d_basicerrors_therm3d.py} +32 -29
- pyvale/examples/basics/{ex1_5_fielderrs_therm3d.py → ex1e_fielderrs_therm3d.py} +19 -15
- pyvale/examples/basics/{ex1_6_caliberrs_therm2d.py → ex1f_caliberrs_therm2d.py} +20 -16
- pyvale/examples/basics/{ex1_7_spatavg_therm2d.py → ex1g_spatavg_therm2d.py} +19 -16
- pyvale/examples/basics/{ex2_1_basicvectors_disp2d.py → ex2a_basicvectors_disp2d.py} +13 -10
- pyvale/examples/basics/{ex2_2_vectorsens_disp2d.py → ex2b_vectorsens_disp2d.py} +19 -15
- pyvale/examples/basics/{ex2_3_sensangle_disp2d.py → ex2c_sensangle_disp2d.py} +21 -18
- pyvale/examples/basics/{ex2_4_chainfielderrs_disp2d.py → ex2d_chainfielderrs_disp2d.py} +31 -29
- pyvale/examples/basics/{ex2_5_vectorfields3d_disp3d.py → ex2e_vectorfields3d_disp3d.py} +21 -18
- pyvale/examples/basics/{ex3_1_basictensors_strain2d.py → ex3a_basictensors_strain2d.py} +16 -14
- pyvale/examples/basics/{ex3_2_tensorsens2d_strain2d.py → ex3b_tensorsens2d_strain2d.py} +17 -14
- pyvale/examples/basics/{ex3_3_tensorsens3d_strain3d.py → ex3c_tensorsens3d_strain3d.py} +25 -22
- pyvale/examples/basics/{ex4_1_expsim2d_thermmech2d.py → ex4a_expsim2d_thermmech2d.py} +17 -14
- pyvale/examples/basics/{ex4_2_expsim3d_thermmech3d.py → ex4b_expsim3d_thermmech3d.py} +37 -34
- pyvale/examples/basics/ex5_nomesh.py +24 -0
- pyvale/examples/dic/ex1_2_blenderdeformed.py +174 -0
- pyvale/examples/dic/ex1_region_of_interest.py +6 -3
- pyvale/examples/dic/ex2_plate_with_hole.py +21 -18
- pyvale/examples/dic/ex3_plate_with_hole_strain.py +8 -6
- pyvale/examples/dic/ex4_dic_blender.py +17 -15
- pyvale/examples/dic/ex5_dic_challenge.py +19 -14
- pyvale/examples/genanalyticdata/ex1_1_scalarvisualisation.py +16 -10
- pyvale/examples/genanalyticdata/ex1_2_scalarcasebuild.py +3 -3
- pyvale/examples/genanalyticdata/ex2_1_analyticsensors.py +29 -23
- pyvale/examples/genanalyticdata/ex2_2_analyticsensors_nomesh.py +67 -0
- pyvale/examples/imagedef2d/ex_imagedef2d_todisk.py +12 -9
- pyvale/examples/mooseherder/ex0_create_moose_config.py +65 -0
- pyvale/examples/mooseherder/ex1a_modify_moose_input.py +71 -0
- pyvale/examples/mooseherder/ex1b_modify_gmsh_input.py +69 -0
- pyvale/examples/mooseherder/ex2a_run_moose_once.py +80 -0
- pyvale/examples/mooseherder/ex2b_run_gmsh_once.py +64 -0
- pyvale/examples/mooseherder/ex2c_run_both_once.py +114 -0
- pyvale/examples/mooseherder/ex3_run_moose_seq_para.py +157 -0
- pyvale/examples/mooseherder/ex4_run_gmsh-moose_seq_para.py +176 -0
- pyvale/examples/mooseherder/ex5_run_moose_paramulti.py +136 -0
- pyvale/examples/mooseherder/ex6_read_moose_exodus.py +163 -0
- pyvale/examples/mooseherder/ex7a_read_moose_herd_results.py +153 -0
- pyvale/examples/mooseherder/ex7b_read_multi_herd_results.py +116 -0
- pyvale/examples/mooseherder/ex7c_read_multi_gmshmoose_results.py +127 -0
- pyvale/examples/mooseherder/ex7d_readconfig_multi_gmshmoose_results.py +143 -0
- pyvale/examples/mooseherder/ex8_read_existing_sweep_output.py +72 -0
- pyvale/examples/renderblender/ex1_1_blenderscene.py +24 -20
- pyvale/examples/renderblender/ex1_2_blenderdeformed.py +22 -18
- pyvale/examples/renderblender/ex2_1_stereoscene.py +36 -29
- pyvale/examples/renderblender/ex2_2_stereodeformed.py +26 -20
- pyvale/examples/renderblender/ex3_1_blendercalibration.py +24 -17
- pyvale/examples/renderrasterisation/ex_rastenp.py +14 -12
- pyvale/examples/renderrasterisation/ex_rastercyth_oneframe.py +14 -15
- pyvale/examples/renderrasterisation/ex_rastercyth_static_cypara.py +13 -11
- pyvale/examples/renderrasterisation/ex_rastercyth_static_pypara.py +13 -11
- pyvale/mooseherder/__init__.py +32 -0
- pyvale/mooseherder/directorymanager.py +416 -0
- pyvale/mooseherder/exodusreader.py +763 -0
- pyvale/mooseherder/gmshrunner.py +163 -0
- pyvale/mooseherder/inputmodifier.py +236 -0
- pyvale/mooseherder/mooseconfig.py +226 -0
- pyvale/mooseherder/mooseherd.py +527 -0
- pyvale/mooseherder/mooserunner.py +303 -0
- pyvale/mooseherder/outputreader.py +22 -0
- pyvale/mooseherder/simdata.py +92 -0
- pyvale/mooseherder/simrunner.py +31 -0
- pyvale/mooseherder/sweepreader.py +356 -0
- pyvale/mooseherder/sweeptools.py +76 -0
- pyvale/sensorsim/__init__.py +82 -0
- pyvale/{camera.py → sensorsim/camera.py} +7 -7
- pyvale/{camerasensor.py → sensorsim/camerasensor.py} +7 -7
- pyvale/{camerastereo.py → sensorsim/camerastereo.py} +2 -2
- pyvale/{cameratools.py → sensorsim/cameratools.py} +4 -4
- pyvale/{cython → sensorsim/cython}/rastercyth.c +596 -596
- pyvale/sensorsim/cython/rastercyth.cpython-311-aarch64-linux-musl.so +0 -0
- pyvale/{cython → sensorsim/cython}/rastercyth.py +16 -17
- pyvale/{errorcalculator.py → sensorsim/errorcalculator.py} +1 -1
- pyvale/{errorintegrator.py → sensorsim/errorintegrator.py} +2 -2
- pyvale/{errorrand.py → sensorsim/errorrand.py} +4 -4
- pyvale/{errorsyscalib.py → sensorsim/errorsyscalib.py} +2 -2
- pyvale/{errorsysdep.py → sensorsim/errorsysdep.py} +2 -2
- pyvale/{errorsysfield.py → sensorsim/errorsysfield.py} +8 -8
- pyvale/{errorsysindep.py → sensorsim/errorsysindep.py} +3 -3
- pyvale/sensorsim/exceptions.py +8 -0
- pyvale/{experimentsimulator.py → sensorsim/experimentsimulator.py} +23 -3
- pyvale/{field.py → sensorsim/field.py} +1 -1
- pyvale/{fieldconverter.py → sensorsim/fieldconverter.py} +72 -19
- pyvale/sensorsim/fieldinterp.py +37 -0
- pyvale/sensorsim/fieldinterpmesh.py +124 -0
- pyvale/sensorsim/fieldinterppoints.py +55 -0
- pyvale/{fieldsampler.py → sensorsim/fieldsampler.py} +4 -4
- pyvale/{fieldscalar.py → sensorsim/fieldscalar.py} +28 -24
- pyvale/{fieldtensor.py → sensorsim/fieldtensor.py} +33 -31
- pyvale/{fieldvector.py → sensorsim/fieldvector.py} +33 -31
- pyvale/{imagedef2d.py → sensorsim/imagedef2d.py} +9 -5
- pyvale/{integratorfactory.py → sensorsim/integratorfactory.py} +6 -6
- pyvale/{integratorquadrature.py → sensorsim/integratorquadrature.py} +3 -3
- pyvale/{integratorrectangle.py → sensorsim/integratorrectangle.py} +3 -3
- pyvale/{integratorspatial.py → sensorsim/integratorspatial.py} +1 -1
- pyvale/{rastercy.py → sensorsim/rastercy.py} +5 -5
- pyvale/{rasternp.py → sensorsim/rasternp.py} +9 -9
- pyvale/{rasteropts.py → sensorsim/rasteropts.py} +1 -1
- pyvale/{renderer.py → sensorsim/renderer.py} +1 -1
- pyvale/{rendermesh.py → sensorsim/rendermesh.py} +5 -5
- pyvale/{renderscene.py → sensorsim/renderscene.py} +2 -2
- pyvale/{sensorarray.py → sensorsim/sensorarray.py} +1 -1
- pyvale/{sensorarrayfactory.py → sensorsim/sensorarrayfactory.py} +12 -12
- pyvale/{sensorarraypoint.py → sensorsim/sensorarraypoint.py} +10 -8
- pyvale/{sensordata.py → sensorsim/sensordata.py} +1 -1
- pyvale/{sensortools.py → sensorsim/sensortools.py} +2 -20
- pyvale/sensorsim/simtools.py +174 -0
- pyvale/{visualexpplotter.py → sensorsim/visualexpplotter.py} +3 -3
- pyvale/{visualimages.py → sensorsim/visualimages.py} +2 -2
- pyvale/{visualsimanimator.py → sensorsim/visualsimanimator.py} +4 -4
- pyvale/{visualsimplotter.py → sensorsim/visualsimplotter.py} +5 -5
- pyvale/{visualsimsensors.py → sensorsim/visualsimsensors.py} +12 -12
- pyvale/{visualtools.py → sensorsim/visualtools.py} +1 -1
- pyvale/{visualtraceplotter.py → sensorsim/visualtraceplotter.py} +2 -2
- pyvale/simcases/case17.geo +3 -0
- pyvale/simcases/case17.i +4 -4
- pyvale/simcases/run_1case.py +1 -9
- pyvale/simcases/run_all_cases.py +1 -1
- pyvale/simcases/run_build_case.py +1 -1
- pyvale/simcases/run_example_cases.py +1 -1
- pyvale/verif/__init__.py +12 -0
- pyvale/{analyticsimdatafactory.py → verif/analyticsimdatafactory.py} +2 -2
- pyvale/{analyticsimdatagenerator.py → verif/analyticsimdatagenerator.py} +2 -2
- pyvale/verif/psens.py +125 -0
- pyvale/verif/psensconst.py +18 -0
- pyvale/verif/psensmech.py +227 -0
- pyvale/verif/psensmultiphys.py +187 -0
- pyvale/verif/psensscalar.py +347 -0
- pyvale/verif/psenstensor.py +123 -0
- pyvale/verif/psensvector.py +116 -0
- {pyvale-2025.7.1.dist-info → pyvale-2025.8.1.dist-info}/METADATA +6 -7
- pyvale-2025.8.1.dist-info/RECORD +263 -0
- pyvale/cython/rastercyth.cpython-311-aarch64-linux-musl.so +0 -0
- pyvale/dataset.py +0 -415
- pyvale/dicdataimport.py +0 -247
- pyvale/simtools.py +0 -67
- pyvale-2025.7.1.dist-info/RECORD +0 -214
- /pyvale/{blendercalibrationdata.py → blender/blendercalibrationdata.py} +0 -0
- /pyvale/{dicspecklegenerator.py → dic/dicspecklegenerator.py} +0 -0
- /pyvale/{dicspecklequality.py → dic/dicspecklequality.py} +0 -0
- /pyvale/{dicstrainresults.py → dic/dicstrainresults.py} +0 -0
- /pyvale/{cameradata.py → sensorsim/cameradata.py} +0 -0
- /pyvale/{cameradata2d.py → sensorsim/cameradata2d.py} +0 -0
- /pyvale/{errordriftcalc.py → sensorsim/errordriftcalc.py} +0 -0
- /pyvale/{fieldtransform.py → sensorsim/fieldtransform.py} +0 -0
- /pyvale/{generatorsrandom.py → sensorsim/generatorsrandom.py} +0 -0
- /pyvale/{imagetools.py → sensorsim/imagetools.py} +0 -0
- /pyvale/{integratortype.py → sensorsim/integratortype.py} +0 -0
- /pyvale/{output.py → sensorsim/output.py} +0 -0
- /pyvale/{raster.py → sensorsim/raster.py} +0 -0
- /pyvale/{sensordescriptor.py → sensorsim/sensordescriptor.py} +0 -0
- /pyvale/{visualimagedef.py → sensorsim/visualimagedef.py} +0 -0
- /pyvale/{visualopts.py → sensorsim/visualopts.py} +0 -0
- /pyvale/{analyticmeshgen.py → verif/analyticmeshgen.py} +0 -0
- {pyvale-2025.7.1.dist-info → pyvale-2025.8.1.dist-info}/WHEEL +0 -0
- {pyvale-2025.7.1.dist-info → pyvale-2025.8.1.dist-info}/licenses/LICENSE +0 -0
- {pyvale-2025.7.1.dist-info → pyvale-2025.8.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
Using read config to extract specific variables from a sweep
|
|
9
|
+
================================================================================
|
|
10
|
+
|
|
11
|
+
In this example we are going to use a read configuration object to control what
|
|
12
|
+
variables are read from our simulation output. This is useful for when we might
|
|
13
|
+
only want to read a subset of the simulation data for post-processing. For
|
|
14
|
+
example a post processor like 'max_temp' from 'glob_vars' without reading in the
|
|
15
|
+
full temperature field. This can help save memory when reading in data from
|
|
16
|
+
large sweeps.
|
|
17
|
+
|
|
18
|
+
**Installing moose**: To run this example you will need to have installed moose
|
|
19
|
+
on your system. As moose supports unix operating systems windows users will need
|
|
20
|
+
to use windows subsystem for linux (WSL). We use the proteus moose build which
|
|
21
|
+
can be found here: https://github.com/aurora-multiphysics/proteus. Build scripts
|
|
22
|
+
for common linux distributions can be found in the 'scripts' directory of the
|
|
23
|
+
repo. You can also create your own moose build using instructions here:
|
|
24
|
+
https://mooseframework.inl.gov/.
|
|
25
|
+
|
|
26
|
+
We start by importing what we need for this example.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
import time
|
|
30
|
+
from pathlib import Path
|
|
31
|
+
import numpy as np
|
|
32
|
+
|
|
33
|
+
#pyvale imports
|
|
34
|
+
import pyvale.dataset as dataset
|
|
35
|
+
from pyvale.mooseherder import (MooseHerd,
|
|
36
|
+
MooseRunner,
|
|
37
|
+
MooseConfig,
|
|
38
|
+
InputModifier,
|
|
39
|
+
DirectoryManager,
|
|
40
|
+
ExodusReader,
|
|
41
|
+
SweepReader,
|
|
42
|
+
sweep_param_grid)
|
|
43
|
+
|
|
44
|
+
#%%
|
|
45
|
+
# The first part of this example is the same as our previous example called:
|
|
46
|
+
# 'Using multiple calls to run parallel sweeps'. For a detailed explanation of
|
|
47
|
+
# the code below head to that example. For now we use this to generate multiple
|
|
48
|
+
# sets of outputs and then use a sweep reader to read this all in below.
|
|
49
|
+
|
|
50
|
+
moose_input = dataset.element_case_input_path(dataset.EElemTest.HEX20)
|
|
51
|
+
moose_modifier = InputModifier(moose_input,'#','')
|
|
52
|
+
|
|
53
|
+
config = {'main_path': Path.home()/ 'moose',
|
|
54
|
+
'app_path': Path.home() / 'proteus',
|
|
55
|
+
'app_name': 'proteus-opt'}
|
|
56
|
+
moose_config = MooseConfig(config)
|
|
57
|
+
|
|
58
|
+
moose_runner = MooseRunner(moose_config)
|
|
59
|
+
moose_runner.set_run_opts(n_tasks = 1,
|
|
60
|
+
n_threads = 2,
|
|
61
|
+
redirect_out = True)
|
|
62
|
+
|
|
63
|
+
num_para_sims: int = 4
|
|
64
|
+
dir_manager = DirectoryManager(n_dirs=num_para_sims)
|
|
65
|
+
herd = MooseHerd([moose_runner],[moose_modifier],dir_manager)
|
|
66
|
+
herd.set_num_para_sims(n_para=num_para_sims)
|
|
67
|
+
|
|
68
|
+
output_path = Path.cwd() / "pyvale-output"
|
|
69
|
+
if not output_path.is_dir():
|
|
70
|
+
output_path.mkdir(parents=True, exist_ok=True)
|
|
71
|
+
|
|
72
|
+
dir_manager.set_base_dir(output_path)
|
|
73
|
+
dir_manager.reset_dirs()
|
|
74
|
+
|
|
75
|
+
moose_params = {"nElemX": (2,3),
|
|
76
|
+
"lengX": np.array([10e-3,15e-3]),
|
|
77
|
+
"PRatio":(0.3,)}
|
|
78
|
+
params = [moose_params,]
|
|
79
|
+
sweep_params = sweep_param_grid(params)
|
|
80
|
+
|
|
81
|
+
print("\nParameter sweep variables by simulation:")
|
|
82
|
+
for ii,pp in enumerate(sweep_params):
|
|
83
|
+
print(f"Sim: {ii}, Params [moose,]: {pp}")
|
|
84
|
+
|
|
85
|
+
num_para_runs: int = 3
|
|
86
|
+
if __name__ == '__main__':
|
|
87
|
+
sweep_times = np.zeros((num_para_runs,),dtype=np.float64)
|
|
88
|
+
for rr in range(num_para_runs):
|
|
89
|
+
herd.run_para(sweep_params)
|
|
90
|
+
sweep_times[rr] = herd.get_sweep_time()
|
|
91
|
+
|
|
92
|
+
print()
|
|
93
|
+
for ii,ss in enumerate(sweep_times):
|
|
94
|
+
print(f"Sweep {ii} took: {ss:.3f}seconds")
|
|
95
|
+
print()
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
#%%
|
|
99
|
+
# Now we create a sweep reader as we have done before and we will use this to
|
|
100
|
+
# read the json keys containing the paths to all the output files. The output
|
|
101
|
+
# file paths have the same list of lists structure we get when we read the
|
|
102
|
+
# sweep data where the outer list corresponds to the unique simulation chain and
|
|
103
|
+
# the inner list corresponds to position of the specific simulation tool in the
|
|
104
|
+
# chain.
|
|
105
|
+
#
|
|
106
|
+
|
|
107
|
+
sweep_reader = SweepReader(dir_manager,num_para_read=4)
|
|
108
|
+
output_file_paths = sweep_reader.read_all_output_file_keys()
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
#%%
|
|
112
|
+
# When we used a read configuration with our exodus reader previously we saw
|
|
113
|
+
# that it can be simpler to get the read configuration that covers everything
|
|
114
|
+
# in our file and then edit it to extract what we want. Here we are use our
|
|
115
|
+
# exodus reader to extract the read configuration fo the first simulation.
|
|
116
|
+
# We also get the original sim data so we can compare to the case when we use
|
|
117
|
+
# a read configuration to control what we read.
|
|
118
|
+
#
|
|
119
|
+
# Now we want to edit the read configuration so that we only read every second
|
|
120
|
+
# time step from our simulation output files. We do this by getting the
|
|
121
|
+
# original time steps with our exodus reader and then using this to change the
|
|
122
|
+
# ``time_inds`` field of our read configuration to extract every second step.
|
|
123
|
+
exodus_reader = ExodusReader(output_file_paths[0][0])
|
|
124
|
+
sim_data_orig = exodus_reader.read_all_sim_data()
|
|
125
|
+
|
|
126
|
+
read_config = exodus_reader.get_read_config()
|
|
127
|
+
sim_time = exodus_reader.get_time()
|
|
128
|
+
read_config.time_inds = np.arange(0,sim_time.shape[0],2)
|
|
129
|
+
|
|
130
|
+
#%%
|
|
131
|
+
# Now we read the sweep results using our read configuration and then check
|
|
132
|
+
# extracted simulation data object to see that we have every second time step.
|
|
133
|
+
sweep_results_seq = sweep_reader.read_sequential(read_config=read_config)
|
|
134
|
+
|
|
135
|
+
print("Comparison of time steps extracted:")
|
|
136
|
+
print()
|
|
137
|
+
print(f"{sim_data_orig.time.shape=}")
|
|
138
|
+
print(f"{sweep_results_seq[0][0].time.shape=}")
|
|
139
|
+
print()
|
|
140
|
+
print(f"{sim_data_orig.node_vars['disp_x'].shape=}")
|
|
141
|
+
print(f"{sweep_results_seq[0][0].node_vars['disp_x'].shape=}")
|
|
142
|
+
print()
|
|
143
|
+
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
# =============================================================================="ctrl+p"
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
Reading results from a pre-run parameter sweep
|
|
9
|
+
================================================================================
|
|
10
|
+
|
|
11
|
+
In this example we will read in the output of a parameter sweep performed by the
|
|
12
|
+
'herd' workflow manager specifically showing how we can read a sweep if we don't
|
|
13
|
+
have the original directory manager.
|
|
14
|
+
|
|
15
|
+
**NOTE**: this example assumes you have run one of the previous examples using
|
|
16
|
+
the herd workflow manager and that there is a series of output simulation
|
|
17
|
+
working directories in the pyvale-output directory. If not please run the
|
|
18
|
+
example called 'Running a parameter sweep of a MOOSE simulation' first.
|
|
19
|
+
|
|
20
|
+
We start by importing what we need for this example.
|
|
21
|
+
"""
|
|
22
|
+
import time
|
|
23
|
+
from pprint import pprint
|
|
24
|
+
from pathlib import Path
|
|
25
|
+
from pyvale.mooseherder import DirectoryManager
|
|
26
|
+
from pyvale.mooseherder import SweepReader
|
|
27
|
+
|
|
28
|
+
#%%
|
|
29
|
+
# First we create a directory manager and pass it our standard pyvale output
|
|
30
|
+
# directory where our simulation sweep output is. The number of directories here
|
|
31
|
+
# is not critical as long as it is equal to or larger than the number of working
|
|
32
|
+
# directories you used when you ran your sweep it will find all simulation
|
|
33
|
+
# outputs that exist in the working directories.
|
|
34
|
+
output_base_path = Path.cwd() / "pyvale-output"
|
|
35
|
+
dir_manager = DirectoryManager(n_dirs=4)
|
|
36
|
+
dir_manager.set_base_dir(output_base_path)
|
|
37
|
+
|
|
38
|
+
#%%
|
|
39
|
+
# We now pass the directory manager into a sweep reader setting the option to
|
|
40
|
+
# read our sweep output in parallel. Note that for small output files reading
|
|
41
|
+
# the output in parallel will probably be slower than reading sequentially so
|
|
42
|
+
# make sure you test the reader for your particular case.
|
|
43
|
+
#
|
|
44
|
+
# We then use the reader extract the list of list of output file paths and the
|
|
45
|
+
# combinations of variables that were used for this parameter sweep.
|
|
46
|
+
sweep_reader = SweepReader(dir_manager,num_para_read=4)
|
|
47
|
+
|
|
48
|
+
output_files = sweep_reader.read_all_output_file_keys()
|
|
49
|
+
sweep_variables = sweep_reader.read_all_sweep_var_files()
|
|
50
|
+
|
|
51
|
+
print('Output files in json keys:')
|
|
52
|
+
pprint(sweep_reader.read_all_output_file_keys())
|
|
53
|
+
print()
|
|
54
|
+
|
|
55
|
+
print('Parameter sweep variables found:')
|
|
56
|
+
pprint(sweep_reader.read_all_sweep_var_files())
|
|
57
|
+
print()
|
|
58
|
+
|
|
59
|
+
#%%
|
|
60
|
+
# Now we use our sweep reader to read in the existing sweep results. We can also
|
|
61
|
+
# use a read configuration here if we want to filter out specific parts of the
|
|
62
|
+
# simulation output but for now we will read all of the data.
|
|
63
|
+
|
|
64
|
+
if __name__ == '__main__':
|
|
65
|
+
start_time = time.perf_counter()
|
|
66
|
+
sweep_data = sweep_reader.read_results_para()
|
|
67
|
+
read_time_para = time.perf_counter() - start_time
|
|
68
|
+
|
|
69
|
+
print("-"*80)
|
|
70
|
+
print(f'Number of simulations outputs read: {len(sweep_data):d}')
|
|
71
|
+
print(f'Read time parallel = {read_time_para:.6f} seconds')
|
|
72
|
+
print("-"*80)
|
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
# ==============================================================================
|
|
6
6
|
|
|
7
7
|
"""
|
|
8
|
-
|
|
8
|
+
Creating a scene with 2D DIC
|
|
9
9
|
---------------------------------------------
|
|
10
10
|
|
|
11
11
|
This example takes you through creating a scene and adding all the necessary
|
|
@@ -18,8 +18,12 @@ Test case: mechanical analysis of a plate with a hole loaded in tension.
|
|
|
18
18
|
import numpy as np
|
|
19
19
|
from scipy.spatial.transform import Rotation
|
|
20
20
|
from pathlib import Path
|
|
21
|
-
|
|
22
|
-
|
|
21
|
+
|
|
22
|
+
#pyvale modules
|
|
23
|
+
import pyvale.sensorsim as sens
|
|
24
|
+
import pyvale.dataset as dataset
|
|
25
|
+
import pyvale.blender as blender
|
|
26
|
+
import pyvale.mooseherder as mh
|
|
23
27
|
|
|
24
28
|
# %%
|
|
25
29
|
# Here we load in a pre-generated MOOSE finite element simulation dataset that
|
|
@@ -29,7 +33,7 @@ import mooseherder as mh
|
|
|
29
33
|
# format (*.e). `mooseherder` is then used to convert the simulation output
|
|
30
34
|
# into a `SimData` object.
|
|
31
35
|
|
|
32
|
-
data_path =
|
|
36
|
+
data_path = dataset.render_mechanical_3d_path()
|
|
33
37
|
sim_data = mh.ExodusReader(data_path).read_all_sim_data()
|
|
34
38
|
|
|
35
39
|
# %%
|
|
@@ -40,11 +44,11 @@ sim_data = mh.ExodusReader(data_path).read_all_sim_data()
|
|
|
40
44
|
# The `disp_comps` are the expected direction of displacement. Since this is a
|
|
41
45
|
# 3D deformation test case, displacement is expected in the x, y and z directions.
|
|
42
46
|
disp_comps = ("disp_x","disp_y", "disp_z")
|
|
43
|
-
sim_data =
|
|
47
|
+
sim_data = sens.scale_length_units(scale=1000.0,
|
|
44
48
|
sim_data=sim_data,
|
|
45
49
|
disp_comps=disp_comps)
|
|
46
50
|
|
|
47
|
-
render_mesh =
|
|
51
|
+
render_mesh = sens.create_render_mesh(sim_data,
|
|
48
52
|
("disp_y","disp_x"),
|
|
49
53
|
sim_spat_dim=3,
|
|
50
54
|
field_disp_keys=disp_comps)
|
|
@@ -64,10 +68,10 @@ base_dir = Path.cwd()
|
|
|
64
68
|
# In order to create a DIC setup in Blender, first a scene must be created.
|
|
65
69
|
# A scene is a holding space for all of your objects (e.g. camera(s), light(s)
|
|
66
70
|
# and sample(s)).
|
|
67
|
-
# A scene is initialised using the `
|
|
71
|
+
# A scene is initialised using the `blender.Scene` class. All the subsequent
|
|
68
72
|
# objects and actions necessary are then methods of this class.
|
|
69
73
|
|
|
70
|
-
scene =
|
|
74
|
+
scene = blender.Scene()
|
|
71
75
|
|
|
72
76
|
# %%
|
|
73
77
|
# The next thing that can be added to the scene is a sample.
|
|
@@ -79,10 +83,10 @@ scene = pyvale.BlenderScene()
|
|
|
79
83
|
part = scene.add_part(render_mesh, sim_spat_dim=3)
|
|
80
84
|
# Set the part location
|
|
81
85
|
part_location = np.array([0, 0, 0])
|
|
82
|
-
|
|
86
|
+
blender.Tools.move_blender_obj(part=part, pos_world=part_location)
|
|
83
87
|
# Set part rotation
|
|
84
88
|
part_rotation = Rotation.from_euler("xyz", [0, 0, 0], degrees=True)
|
|
85
|
-
|
|
89
|
+
blender.Tools.rotate_blender_obj(part=part, rot_world=part_rotation)
|
|
86
90
|
|
|
87
91
|
# %%
|
|
88
92
|
# A camera can then be added to the scene.
|
|
@@ -91,7 +95,7 @@ pyvale.BlenderTools.rotate_blender_obj(part=part, rot_world=part_rotation)
|
|
|
91
95
|
# This camera can then be added to the Blender scene.
|
|
92
96
|
# The camera can also be moved and rotated.
|
|
93
97
|
|
|
94
|
-
cam_data =
|
|
98
|
+
cam_data = sens.CameraData(pixels_num=np.array([1540, 1040]),
|
|
95
99
|
pixels_size=np.array([0.00345, 0.00345]),
|
|
96
100
|
pos_world=(0, 0, 400),
|
|
97
101
|
rot_world=Rotation.from_euler("xyz", [0, 0, 0]),
|
|
@@ -106,10 +110,10 @@ camera.rotation_euler = (0, 0, 0) # NOTE: The default is an XYZ Euler angle
|
|
|
106
110
|
# Blender offers different light types: Point, Sun, Spot and Area.
|
|
107
111
|
# The light can also be moved and rotated like the camera.
|
|
108
112
|
|
|
109
|
-
light_data =
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
+
light_data = blender.LightData(type=blender.LightType.POINT,
|
|
114
|
+
pos_world=(0, 0, 400),
|
|
115
|
+
rot_world=Rotation.from_euler("xyz",
|
|
116
|
+
[0, 0, 0]),
|
|
113
117
|
energy=1)
|
|
114
118
|
light = scene.add_light(light_data)
|
|
115
119
|
light.location = (0, 0, 410)
|
|
@@ -126,10 +130,10 @@ light.rotation_euler = (0, 0, 0)
|
|
|
126
130
|
# It should be noted that for a bigger camera or sample you may need to generate
|
|
127
131
|
# a larger speckle pattern.
|
|
128
132
|
|
|
129
|
-
material_data =
|
|
130
|
-
speckle_path =
|
|
133
|
+
material_data = blender.MaterialData()
|
|
134
|
+
speckle_path = dataset.dic_pattern_5mpx_path()
|
|
131
135
|
|
|
132
|
-
mm_px_resolution =
|
|
136
|
+
mm_px_resolution = sens.CameraTools.calculate_mm_px_resolution(cam_data)
|
|
133
137
|
scene.add_speckle(part=part,
|
|
134
138
|
speckle_path=speckle_path,
|
|
135
139
|
mat_data=material_data,
|
|
@@ -142,7 +146,7 @@ scene.add_speckle(part=part,
|
|
|
142
146
|
# Firstly, all the rendering parameters must be set, including parameters such as
|
|
143
147
|
# the number of threads to use.
|
|
144
148
|
|
|
145
|
-
render_data =
|
|
149
|
+
render_data = blender.RenderData(cam_data=cam_data,
|
|
146
150
|
base_dir=base_dir,
|
|
147
151
|
threads=8)
|
|
148
152
|
|
|
@@ -164,5 +168,5 @@ print("Save directory of the image:", (render_data.base_dir / "blenderimages"))
|
|
|
164
168
|
# There is also the option to save the scene as a Blender project file.
|
|
165
169
|
# This file can be opened with the Blender GUI to view the scene.
|
|
166
170
|
|
|
167
|
-
|
|
171
|
+
blender.Tools.save_blender_file(base_dir)
|
|
168
172
|
|
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
# ==============================================================================
|
|
6
6
|
|
|
7
7
|
"""
|
|
8
|
-
|
|
8
|
+
Deforming a sample with 2D DIC
|
|
9
9
|
===============================================
|
|
10
10
|
|
|
11
11
|
This example follows a similar workflow to the previous Blender example.
|
|
@@ -18,15 +18,19 @@ Test case: mechanical analysis of a plate with a hole loaded in tension.
|
|
|
18
18
|
import numpy as np
|
|
19
19
|
from scipy.spatial.transform import Rotation
|
|
20
20
|
from pathlib import Path
|
|
21
|
-
|
|
22
|
-
|
|
21
|
+
|
|
22
|
+
# Pyvale imports
|
|
23
|
+
import pyvale.sensorsim as sens
|
|
24
|
+
import pyvale.dataset as dataset
|
|
25
|
+
import pyvale.blender as blender
|
|
26
|
+
import pyvale.mooseherder as mh
|
|
23
27
|
|
|
24
28
|
# %%
|
|
25
29
|
# The simulation results are loaded in here in the same way as the previous
|
|
26
30
|
# example. As mentioned this `data_path` can be replaced with your own MOOSE
|
|
27
31
|
# simulation output in exodus format (*.e).
|
|
28
32
|
|
|
29
|
-
data_path =
|
|
33
|
+
data_path = dataset.render_mechanical_3d_path()
|
|
30
34
|
sim_data = mh.ExodusReader(data_path).read_all_sim_data()
|
|
31
35
|
|
|
32
36
|
# %%
|
|
@@ -37,11 +41,11 @@ sim_data = mh.ExodusReader(data_path).read_all_sim_data()
|
|
|
37
41
|
# 3D deformation test case, displacement is expected in the x, y and z directions.
|
|
38
42
|
|
|
39
43
|
disp_comps = ("disp_x","disp_y", "disp_z")
|
|
40
|
-
sim_data =
|
|
44
|
+
sim_data = sens.scale_length_units(scale=1000.0,
|
|
41
45
|
sim_data=sim_data,
|
|
42
46
|
disp_comps=disp_comps)
|
|
43
47
|
|
|
44
|
-
render_mesh =
|
|
48
|
+
render_mesh = sens.create_render_mesh(sim_data,
|
|
45
49
|
("disp_y","disp_x"),
|
|
46
50
|
sim_spat_dim=3,
|
|
47
51
|
field_disp_keys=disp_comps)
|
|
@@ -62,7 +66,7 @@ base_dir = Path.cwd()
|
|
|
62
66
|
# A scene is initialised using the `BlenderScene` class. All the subsequent
|
|
63
67
|
# objects and actions necessary are then methods of this class.
|
|
64
68
|
|
|
65
|
-
scene =
|
|
69
|
+
scene = blender.Scene()
|
|
66
70
|
|
|
67
71
|
# %%
|
|
68
72
|
# The next thing that can be added to the scene is a sample.
|
|
@@ -75,9 +79,9 @@ scene = pyvale.BlenderScene()
|
|
|
75
79
|
part = scene.add_part(render_mesh, sim_spat_dim=3)
|
|
76
80
|
# Set the part location
|
|
77
81
|
part_location = np.array([0, 0, 0])
|
|
78
|
-
|
|
82
|
+
blender.Tools.move_blender_obj(part=part, pos_world=part_location)
|
|
79
83
|
part_rotation = Rotation.from_euler("xyz", [0, 0, 0], degrees=True)
|
|
80
|
-
|
|
84
|
+
blender.Tools.rotate_blender_obj(part=part, rot_world=part_rotation)
|
|
81
85
|
|
|
82
86
|
# %%
|
|
83
87
|
# A camera can then be added to the scene.
|
|
@@ -86,7 +90,7 @@ pyvale.BlenderTools.rotate_blender_obj(part=part, rot_world=part_rotation)
|
|
|
86
90
|
# This camera can then be added to the Blender scene.
|
|
87
91
|
# The camera can also be moved and rotated.
|
|
88
92
|
|
|
89
|
-
cam_data =
|
|
93
|
+
cam_data = sens.CameraData(pixels_num=np.array([1540, 1040]),
|
|
90
94
|
pixels_size=np.array([0.00345, 0.00345]),
|
|
91
95
|
pos_world=(0, 0, 400),
|
|
92
96
|
rot_world=Rotation.from_euler("xyz", [0, 0, 0]),
|
|
@@ -101,7 +105,7 @@ camera.rotation_euler = (0, 0, 0) # NOTE: The default is an XYZ Euler angle
|
|
|
101
105
|
# Blender offers different light types: Point, Sun, Spot and Area.
|
|
102
106
|
# The light can also be moved and rotated like the camera.
|
|
103
107
|
|
|
104
|
-
light_data =
|
|
108
|
+
light_data = blender.LightData(type=blender.LightType.POINT,
|
|
105
109
|
pos_world=(0, 0, 400),
|
|
106
110
|
rot_world=Rotation.from_euler("xyz",
|
|
107
111
|
[0, 0, 0]),
|
|
@@ -121,9 +125,9 @@ light.rotation_euler = (0, 0, 0)
|
|
|
121
125
|
# It should be noted that for a bigger camera or sample you may need to generate
|
|
122
126
|
# a larger speckle pattern.
|
|
123
127
|
|
|
124
|
-
material_data =
|
|
125
|
-
speckle_path =
|
|
126
|
-
mm_px_resolution =
|
|
128
|
+
material_data = blender.MaterialData()
|
|
129
|
+
speckle_path = dataset.dic_pattern_5mpx_path()
|
|
130
|
+
mm_px_resolution = sens.CameraTools.calculate_mm_px_resolution(cam_data)
|
|
127
131
|
scene.add_speckle(part=part,
|
|
128
132
|
speckle_path=speckle_path,
|
|
129
133
|
mat_data=material_data,
|
|
@@ -137,9 +141,9 @@ scene.add_speckle(part=part,
|
|
|
137
141
|
# Firstly, all the rendering parameters must be set, including parameters such as
|
|
138
142
|
# the number of threads to use.
|
|
139
143
|
|
|
140
|
-
render_data =
|
|
141
|
-
|
|
142
|
-
|
|
144
|
+
render_data = blender.RenderData(cam_data=cam_data,
|
|
145
|
+
base_dir=base_dir,
|
|
146
|
+
threads=8)
|
|
143
147
|
|
|
144
148
|
# %%
|
|
145
149
|
# A series of deformed images can then be rendered.
|
|
@@ -167,4 +171,4 @@ print("Save directory of the image:", (render_data.base_dir / "blenderimages"))
|
|
|
167
171
|
# There is also the option to save the scene as a Blender project file.
|
|
168
172
|
# This file can be opened with the Blender GUI to view the scene.
|
|
169
173
|
|
|
170
|
-
|
|
174
|
+
blender.Tools.save_blender_file(base_dir)
|
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
# ==============================================================================
|
|
6
6
|
|
|
7
7
|
"""
|
|
8
|
-
|
|
8
|
+
Creating a scene with stereo DIC
|
|
9
9
|
-------------------------------------------------
|
|
10
10
|
|
|
11
11
|
This example takes you through creating a scene and adding all the necessary
|
|
@@ -18,15 +18,19 @@ Test case: mechanical analysis of a plate with a hole loaded in tension.
|
|
|
18
18
|
import numpy as np
|
|
19
19
|
from scipy.spatial.transform import Rotation
|
|
20
20
|
from pathlib import Path
|
|
21
|
-
|
|
22
|
-
|
|
21
|
+
|
|
22
|
+
# Pyvale imports
|
|
23
|
+
import pyvale.sensorsim as sens
|
|
24
|
+
import pyvale.dataset as dataset
|
|
25
|
+
import pyvale.blender as blender
|
|
26
|
+
import pyvale.mooseherder as mh
|
|
23
27
|
|
|
24
28
|
# %%
|
|
25
29
|
# The simulation results are loaded in here in the same way as the previous
|
|
26
30
|
# example. As mentioned this `data_path` can be replaced with your own MOOSE
|
|
27
31
|
# simulation output in exodus format (*.e).
|
|
28
32
|
|
|
29
|
-
data_path =
|
|
33
|
+
data_path = dataset.render_mechanical_3d_path()
|
|
30
34
|
sim_data = mh.ExodusReader(data_path).read_all_sim_data()
|
|
31
35
|
|
|
32
36
|
# %%
|
|
@@ -37,11 +41,11 @@ sim_data = mh.ExodusReader(data_path).read_all_sim_data()
|
|
|
37
41
|
# 3D deformation test case, displacement is expected in the x, y and z directions.
|
|
38
42
|
|
|
39
43
|
disp_comps = ("disp_x","disp_y", "disp_z")
|
|
40
|
-
sim_data =
|
|
41
|
-
|
|
42
|
-
|
|
44
|
+
sim_data = sens.scale_length_units(scale=1000.0,
|
|
45
|
+
sim_data=sim_data,
|
|
46
|
+
disp_comps=disp_comps)
|
|
43
47
|
|
|
44
|
-
render_mesh =
|
|
48
|
+
render_mesh = sens.create_render_mesh(sim_data,
|
|
45
49
|
("disp_y","disp_x"),
|
|
46
50
|
sim_spat_dim=3,
|
|
47
51
|
field_disp_keys=disp_comps)
|
|
@@ -59,9 +63,9 @@ base_dir = Path.cwd()
|
|
|
59
63
|
# Creating the scene
|
|
60
64
|
# ^^^^^^^^^^^^^^^^^^
|
|
61
65
|
# In order to create a DIC setup in Blender, first a scene must be created.
|
|
62
|
-
# A scene is initialised using the `
|
|
66
|
+
# A scene is initialised using the `blender.Scene` class. All the subsequent
|
|
63
67
|
# objects and actions necessary are then methods of this class.
|
|
64
|
-
scene =
|
|
68
|
+
scene = blender.Scene()
|
|
65
69
|
|
|
66
70
|
# %%
|
|
67
71
|
# The next thing that can be added to the scene is a sample.
|
|
@@ -73,10 +77,10 @@ scene = pyvale.BlenderScene()
|
|
|
73
77
|
part = scene.add_part(render_mesh, sim_spat_dim=3)
|
|
74
78
|
# Set the part location
|
|
75
79
|
part_location = np.array([0, 0, 0])
|
|
76
|
-
|
|
80
|
+
blender.Tools.move_blender_obj(part=part, pos_world=part_location)
|
|
77
81
|
# Set part rotation
|
|
78
82
|
part_rotation = Rotation.from_euler("xyz", [0, 0, 0], degrees=True)
|
|
79
|
-
|
|
83
|
+
blender.Tools.rotate_blender_obj(part=part, rot_world=part_rotation)
|
|
80
84
|
|
|
81
85
|
# %%
|
|
82
86
|
# The cameras can then be initialised. A stereo camera system is defined by a
|
|
@@ -95,23 +99,26 @@ pyvale.BlenderTools.rotate_blender_obj(part=part, rot_world=part_rotation)
|
|
|
95
99
|
# between the two. The cameras can then be added to the Blender scene using the
|
|
96
100
|
# `add_stereo_system` method.
|
|
97
101
|
|
|
98
|
-
cam_data_0 =
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
102
|
+
cam_data_0 = sens.CameraData(pixels_num=np.array([1540, 1040]),
|
|
103
|
+
pixels_size=np.array([0.00345, 0.00345]),
|
|
104
|
+
pos_world=np.array([0, 0, 400]),
|
|
105
|
+
rot_world=Rotation.from_euler("xyz", [0, 0, 0]),
|
|
106
|
+
roi_cent_world=(0, 0, 0),
|
|
107
|
+
focal_length=15.0)
|
|
104
108
|
# Set this to "symmetric" to get a symmetric stereo system or set this to
|
|
105
109
|
# "faceon" to get a face-on stereo system
|
|
106
110
|
stereo_setup = "faceon"
|
|
107
111
|
if stereo_setup == "symmetric":
|
|
108
|
-
stereo_system =
|
|
112
|
+
stereo_system = sens.CameraTools.symmetric_stereo_cameras(
|
|
109
113
|
cam_data_0=cam_data_0,
|
|
110
114
|
stereo_angle=15.0)
|
|
111
|
-
|
|
112
|
-
stereo_system =
|
|
115
|
+
elif stereo_setup == "faceon":
|
|
116
|
+
stereo_system = sens.CameraTools.faceon_stereo_cameras(
|
|
113
117
|
cam_data_0=cam_data_0,
|
|
114
118
|
stereo_angle=15.0)
|
|
119
|
+
else:
|
|
120
|
+
raise ValueError(f"Unknown stereo_setup: {stereo_setup}")
|
|
121
|
+
|
|
115
122
|
cam0, cam1 = scene.add_stereo_system(stereo_system)
|
|
116
123
|
|
|
117
124
|
# %%
|
|
@@ -130,7 +137,7 @@ stereo_system.save_calibration(base_dir)
|
|
|
130
137
|
# Blender offers different light types: Point, Sun, Spot and Area.
|
|
131
138
|
# The light can also be moved and rotated like the camera.
|
|
132
139
|
|
|
133
|
-
light_data =
|
|
140
|
+
light_data = blender.LightData(type=blender.LightType.POINT,
|
|
134
141
|
pos_world=(0, 0, 400),
|
|
135
142
|
rot_world=Rotation.from_euler("xyz",
|
|
136
143
|
[0, 0, 0]),
|
|
@@ -150,10 +157,10 @@ light.rotation_euler = (0, 0, 0) # NOTE: The default is an XYZ Euler angle
|
|
|
150
157
|
# It should be noted that for a bigger camera or sample you may need to generate
|
|
151
158
|
# a larger speckle pattern.
|
|
152
159
|
|
|
153
|
-
material_data =
|
|
154
|
-
speckle_path =
|
|
160
|
+
material_data = blender.MaterialData()
|
|
161
|
+
speckle_path = dataset.dic_pattern_5mpx_path()
|
|
155
162
|
|
|
156
|
-
mm_px_resolution =
|
|
163
|
+
mm_px_resolution = sens.CameraTools.calculate_mm_px_resolution(cam_data_0)
|
|
157
164
|
scene.add_speckle(part=part,
|
|
158
165
|
speckle_path=speckle_path,
|
|
159
166
|
mat_data=material_data,
|
|
@@ -168,10 +175,10 @@ scene.add_speckle(part=part,
|
|
|
168
175
|
# Differently to a 2D DIC system, both cameras' parameters must be specified in
|
|
169
176
|
# the `RenderData` object.
|
|
170
177
|
|
|
171
|
-
render_data =
|
|
178
|
+
render_data = blender.RenderData(cam_data=(stereo_system.cam_data_0,
|
|
172
179
|
stereo_system.cam_data_1),
|
|
173
|
-
|
|
174
|
-
|
|
180
|
+
base_dir=base_dir,
|
|
181
|
+
threads=8)
|
|
175
182
|
|
|
176
183
|
# %%
|
|
177
184
|
# A single set of images of the scene can then be rendered.
|
|
@@ -192,4 +199,4 @@ print("Save directory of the image:", (render_data.base_dir / "blenderimages"))
|
|
|
192
199
|
# There is also the option to save the scene as a Blender project file.
|
|
193
200
|
# This file can be opened with the Blender GUI to view the scene.
|
|
194
201
|
|
|
195
|
-
|
|
202
|
+
blender.Tools.save_blender_file(base_dir)
|