pyvale 2025.7.2__cp311-cp311-musllinux_1_2_aarch64.whl → 2025.8.1__cp311-cp311-musllinux_1_2_aarch64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyvale might be problematic. Click here for more details.
- pyvale/__init__.py +12 -92
- pyvale/blender/__init__.py +23 -0
- pyvale/{pyvaleexceptions.py → blender/blenderexceptions.py} +0 -3
- pyvale/{blenderlightdata.py → blender/blenderlightdata.py} +3 -3
- pyvale/{blendermaterialdata.py → blender/blendermaterialdata.py} +1 -1
- pyvale/{blenderrenderdata.py → blender/blenderrenderdata.py} +5 -3
- pyvale/{blenderscene.py → blender/blenderscene.py} +33 -30
- pyvale/{blendertools.py → blender/blendertools.py} +14 -10
- pyvale/dataset/__init__.py +7 -0
- pyvale/dataset/dataset.py +443 -0
- pyvale/dic/__init__.py +20 -0
- pyvale/{dic2d.py → dic/dic2d.py} +31 -36
- pyvale/dic/dic2dconv.py +6 -0
- pyvale/{dic2dcpp.cpython-311-aarch64-linux-musl.so → dic/dic2dcpp.cpython-311-aarch64-linux-musl.so} +0 -0
- pyvale/{dicdataimport.py → dic/dicdataimport.py} +8 -8
- pyvale/{dicregionofinterest.py → dic/dicregionofinterest.py} +1 -1
- pyvale/{dicresults.py → dic/dicresults.py} +1 -1
- pyvale/{dicstrain.py → dic/dicstrain.py} +9 -9
- pyvale/examples/basics/{ex1_1_basicscalars_therm2d.py → ex1a_basicscalars_therm2d.py} +12 -9
- pyvale/examples/basics/{ex1_2_sensormodel_therm2d.py → ex1b_sensormodel_therm2d.py} +17 -14
- pyvale/examples/basics/{ex1_3_customsens_therm3d.py → ex1c_customsens_therm3d.py} +27 -24
- pyvale/examples/basics/{ex1_4_basicerrors_therm3d.py → ex1d_basicerrors_therm3d.py} +32 -29
- pyvale/examples/basics/{ex1_5_fielderrs_therm3d.py → ex1e_fielderrs_therm3d.py} +19 -15
- pyvale/examples/basics/{ex1_6_caliberrs_therm2d.py → ex1f_caliberrs_therm2d.py} +20 -16
- pyvale/examples/basics/{ex1_7_spatavg_therm2d.py → ex1g_spatavg_therm2d.py} +19 -16
- pyvale/examples/basics/{ex2_1_basicvectors_disp2d.py → ex2a_basicvectors_disp2d.py} +13 -10
- pyvale/examples/basics/{ex2_2_vectorsens_disp2d.py → ex2b_vectorsens_disp2d.py} +19 -15
- pyvale/examples/basics/{ex2_3_sensangle_disp2d.py → ex2c_sensangle_disp2d.py} +21 -18
- pyvale/examples/basics/{ex2_4_chainfielderrs_disp2d.py → ex2d_chainfielderrs_disp2d.py} +31 -29
- pyvale/examples/basics/{ex2_5_vectorfields3d_disp3d.py → ex2e_vectorfields3d_disp3d.py} +21 -18
- pyvale/examples/basics/{ex3_1_basictensors_strain2d.py → ex3a_basictensors_strain2d.py} +16 -14
- pyvale/examples/basics/{ex3_2_tensorsens2d_strain2d.py → ex3b_tensorsens2d_strain2d.py} +17 -14
- pyvale/examples/basics/{ex3_3_tensorsens3d_strain3d.py → ex3c_tensorsens3d_strain3d.py} +25 -22
- pyvale/examples/basics/{ex4_1_expsim2d_thermmech2d.py → ex4a_expsim2d_thermmech2d.py} +17 -14
- pyvale/examples/basics/{ex4_2_expsim3d_thermmech3d.py → ex4b_expsim3d_thermmech3d.py} +37 -34
- pyvale/examples/basics/ex5_nomesh.py +24 -0
- pyvale/examples/dic/ex1_2_blenderdeformed.py +174 -0
- pyvale/examples/dic/ex1_region_of_interest.py +6 -3
- pyvale/examples/dic/ex2_plate_with_hole.py +21 -18
- pyvale/examples/dic/ex3_plate_with_hole_strain.py +8 -6
- pyvale/examples/dic/ex4_dic_blender.py +17 -15
- pyvale/examples/dic/ex5_dic_challenge.py +19 -14
- pyvale/examples/genanalyticdata/ex1_1_scalarvisualisation.py +16 -10
- pyvale/examples/genanalyticdata/ex1_2_scalarcasebuild.py +3 -3
- pyvale/examples/genanalyticdata/ex2_1_analyticsensors.py +29 -23
- pyvale/examples/genanalyticdata/ex2_2_analyticsensors_nomesh.py +67 -0
- pyvale/examples/imagedef2d/ex_imagedef2d_todisk.py +12 -9
- pyvale/examples/mooseherder/ex0_create_moose_config.py +65 -0
- pyvale/examples/mooseherder/ex1a_modify_moose_input.py +71 -0
- pyvale/examples/mooseherder/ex1b_modify_gmsh_input.py +69 -0
- pyvale/examples/mooseherder/ex2a_run_moose_once.py +80 -0
- pyvale/examples/mooseherder/ex2b_run_gmsh_once.py +64 -0
- pyvale/examples/mooseherder/ex2c_run_both_once.py +114 -0
- pyvale/examples/mooseherder/ex3_run_moose_seq_para.py +157 -0
- pyvale/examples/mooseherder/ex4_run_gmsh-moose_seq_para.py +176 -0
- pyvale/examples/mooseherder/ex5_run_moose_paramulti.py +136 -0
- pyvale/examples/mooseherder/ex6_read_moose_exodus.py +163 -0
- pyvale/examples/mooseherder/ex7a_read_moose_herd_results.py +153 -0
- pyvale/examples/mooseherder/ex7b_read_multi_herd_results.py +116 -0
- pyvale/examples/mooseherder/ex7c_read_multi_gmshmoose_results.py +127 -0
- pyvale/examples/mooseherder/ex7d_readconfig_multi_gmshmoose_results.py +143 -0
- pyvale/examples/mooseherder/ex8_read_existing_sweep_output.py +72 -0
- pyvale/examples/renderblender/ex1_1_blenderscene.py +24 -20
- pyvale/examples/renderblender/ex1_2_blenderdeformed.py +22 -18
- pyvale/examples/renderblender/ex2_1_stereoscene.py +36 -29
- pyvale/examples/renderblender/ex2_2_stereodeformed.py +26 -20
- pyvale/examples/renderblender/ex3_1_blendercalibration.py +24 -17
- pyvale/examples/renderrasterisation/ex_rastenp.py +14 -12
- pyvale/examples/renderrasterisation/ex_rastercyth_oneframe.py +14 -15
- pyvale/examples/renderrasterisation/ex_rastercyth_static_cypara.py +13 -11
- pyvale/examples/renderrasterisation/ex_rastercyth_static_pypara.py +13 -11
- pyvale/mooseherder/__init__.py +32 -0
- pyvale/mooseherder/directorymanager.py +416 -0
- pyvale/mooseherder/exodusreader.py +763 -0
- pyvale/mooseherder/gmshrunner.py +163 -0
- pyvale/mooseherder/inputmodifier.py +236 -0
- pyvale/mooseherder/mooseconfig.py +226 -0
- pyvale/mooseherder/mooseherd.py +527 -0
- pyvale/mooseherder/mooserunner.py +303 -0
- pyvale/mooseherder/outputreader.py +22 -0
- pyvale/mooseherder/simdata.py +92 -0
- pyvale/mooseherder/simrunner.py +31 -0
- pyvale/mooseherder/sweepreader.py +356 -0
- pyvale/mooseherder/sweeptools.py +76 -0
- pyvale/sensorsim/__init__.py +82 -0
- pyvale/{camera.py → sensorsim/camera.py} +7 -7
- pyvale/{camerasensor.py → sensorsim/camerasensor.py} +7 -7
- pyvale/{camerastereo.py → sensorsim/camerastereo.py} +2 -2
- pyvale/{cameratools.py → sensorsim/cameratools.py} +4 -4
- pyvale/{cython → sensorsim/cython}/rastercyth.c +596 -596
- pyvale/sensorsim/cython/rastercyth.cpython-311-aarch64-linux-musl.so +0 -0
- pyvale/{cython → sensorsim/cython}/rastercyth.py +16 -17
- pyvale/{errorcalculator.py → sensorsim/errorcalculator.py} +1 -1
- pyvale/{errorintegrator.py → sensorsim/errorintegrator.py} +2 -2
- pyvale/{errorrand.py → sensorsim/errorrand.py} +4 -4
- pyvale/{errorsyscalib.py → sensorsim/errorsyscalib.py} +2 -2
- pyvale/{errorsysdep.py → sensorsim/errorsysdep.py} +2 -2
- pyvale/{errorsysfield.py → sensorsim/errorsysfield.py} +8 -8
- pyvale/{errorsysindep.py → sensorsim/errorsysindep.py} +3 -3
- pyvale/sensorsim/exceptions.py +8 -0
- pyvale/{experimentsimulator.py → sensorsim/experimentsimulator.py} +23 -3
- pyvale/{field.py → sensorsim/field.py} +1 -1
- pyvale/{fieldconverter.py → sensorsim/fieldconverter.py} +72 -19
- pyvale/sensorsim/fieldinterp.py +37 -0
- pyvale/sensorsim/fieldinterpmesh.py +124 -0
- pyvale/sensorsim/fieldinterppoints.py +55 -0
- pyvale/{fieldsampler.py → sensorsim/fieldsampler.py} +4 -4
- pyvale/{fieldscalar.py → sensorsim/fieldscalar.py} +28 -24
- pyvale/{fieldtensor.py → sensorsim/fieldtensor.py} +33 -31
- pyvale/{fieldvector.py → sensorsim/fieldvector.py} +33 -31
- pyvale/{imagedef2d.py → sensorsim/imagedef2d.py} +9 -5
- pyvale/{integratorfactory.py → sensorsim/integratorfactory.py} +6 -6
- pyvale/{integratorquadrature.py → sensorsim/integratorquadrature.py} +3 -3
- pyvale/{integratorrectangle.py → sensorsim/integratorrectangle.py} +3 -3
- pyvale/{integratorspatial.py → sensorsim/integratorspatial.py} +1 -1
- pyvale/{rastercy.py → sensorsim/rastercy.py} +5 -5
- pyvale/{rasternp.py → sensorsim/rasternp.py} +9 -9
- pyvale/{rasteropts.py → sensorsim/rasteropts.py} +1 -1
- pyvale/{renderer.py → sensorsim/renderer.py} +1 -1
- pyvale/{rendermesh.py → sensorsim/rendermesh.py} +5 -5
- pyvale/{renderscene.py → sensorsim/renderscene.py} +2 -2
- pyvale/{sensorarray.py → sensorsim/sensorarray.py} +1 -1
- pyvale/{sensorarrayfactory.py → sensorsim/sensorarrayfactory.py} +12 -12
- pyvale/{sensorarraypoint.py → sensorsim/sensorarraypoint.py} +10 -8
- pyvale/{sensordata.py → sensorsim/sensordata.py} +1 -1
- pyvale/{sensortools.py → sensorsim/sensortools.py} +2 -20
- pyvale/sensorsim/simtools.py +174 -0
- pyvale/{visualexpplotter.py → sensorsim/visualexpplotter.py} +3 -3
- pyvale/{visualimages.py → sensorsim/visualimages.py} +2 -2
- pyvale/{visualsimanimator.py → sensorsim/visualsimanimator.py} +4 -4
- pyvale/{visualsimplotter.py → sensorsim/visualsimplotter.py} +5 -5
- pyvale/{visualsimsensors.py → sensorsim/visualsimsensors.py} +12 -12
- pyvale/{visualtools.py → sensorsim/visualtools.py} +1 -1
- pyvale/{visualtraceplotter.py → sensorsim/visualtraceplotter.py} +2 -2
- pyvale/simcases/case17.geo +3 -0
- pyvale/simcases/case17.i +4 -4
- pyvale/simcases/run_1case.py +1 -9
- pyvale/simcases/run_all_cases.py +1 -1
- pyvale/simcases/run_build_case.py +1 -1
- pyvale/simcases/run_example_cases.py +1 -1
- pyvale/verif/__init__.py +12 -0
- pyvale/{analyticsimdatafactory.py → verif/analyticsimdatafactory.py} +2 -2
- pyvale/{analyticsimdatagenerator.py → verif/analyticsimdatagenerator.py} +2 -2
- pyvale/verif/psens.py +125 -0
- pyvale/verif/psensconst.py +18 -0
- pyvale/verif/psensmech.py +227 -0
- pyvale/verif/psensmultiphys.py +187 -0
- pyvale/verif/psensscalar.py +347 -0
- pyvale/verif/psenstensor.py +123 -0
- pyvale/verif/psensvector.py +116 -0
- {pyvale-2025.7.2.dist-info → pyvale-2025.8.1.dist-info}/METADATA +6 -7
- pyvale-2025.8.1.dist-info/RECORD +263 -0
- pyvale/cython/rastercyth.cpython-311-aarch64-linux-musl.so +0 -0
- pyvale/dataset.py +0 -415
- pyvale/simtools.py +0 -67
- pyvale-2025.7.2.dist-info/RECORD +0 -215
- /pyvale/{blendercalibrationdata.py → blender/blendercalibrationdata.py} +0 -0
- /pyvale/{dicchecks.py → dic/dicchecks.py} +0 -0
- /pyvale/{dicspecklegenerator.py → dic/dicspecklegenerator.py} +0 -0
- /pyvale/{dicspecklequality.py → dic/dicspecklequality.py} +0 -0
- /pyvale/{dicstrainresults.py → dic/dicstrainresults.py} +0 -0
- /pyvale/{cameradata.py → sensorsim/cameradata.py} +0 -0
- /pyvale/{cameradata2d.py → sensorsim/cameradata2d.py} +0 -0
- /pyvale/{errordriftcalc.py → sensorsim/errordriftcalc.py} +0 -0
- /pyvale/{fieldtransform.py → sensorsim/fieldtransform.py} +0 -0
- /pyvale/{generatorsrandom.py → sensorsim/generatorsrandom.py} +0 -0
- /pyvale/{imagetools.py → sensorsim/imagetools.py} +0 -0
- /pyvale/{integratortype.py → sensorsim/integratortype.py} +0 -0
- /pyvale/{output.py → sensorsim/output.py} +0 -0
- /pyvale/{raster.py → sensorsim/raster.py} +0 -0
- /pyvale/{sensordescriptor.py → sensorsim/sensordescriptor.py} +0 -0
- /pyvale/{visualimagedef.py → sensorsim/visualimagedef.py} +0 -0
- /pyvale/{visualopts.py → sensorsim/visualopts.py} +0 -0
- /pyvale/{analyticmeshgen.py → verif/analyticmeshgen.py} +0 -0
- {pyvale-2025.7.2.dist-info → pyvale-2025.8.1.dist-info}/WHEEL +0 -0
- {pyvale-2025.7.2.dist-info → pyvale-2025.8.1.dist-info}/licenses/LICENSE +0 -0
- {pyvale-2025.7.2.dist-info → pyvale-2025.8.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
Deforming a sample with 2D DIC
|
|
9
|
+
===============================================
|
|
10
|
+
|
|
11
|
+
This example follows a similar workflow to the previous Blender example.
|
|
12
|
+
In this example, defomation is applied to sample, and images are rendered at
|
|
13
|
+
each timestep.
|
|
14
|
+
|
|
15
|
+
Test case: mechanical analysis of a plate with a hole loaded in tension.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
import numpy as np
|
|
19
|
+
from scipy.spatial.transform import Rotation
|
|
20
|
+
from pathlib import Path
|
|
21
|
+
|
|
22
|
+
# Pyvale imports
|
|
23
|
+
import pyvale.sensorsim as sens
|
|
24
|
+
import pyvale.dataset as dataset
|
|
25
|
+
import pyvale.blender as blender
|
|
26
|
+
import pyvale.mooseherder as mh
|
|
27
|
+
|
|
28
|
+
# %%
|
|
29
|
+
# The simulation results are loaded in here in the same way as the previous
|
|
30
|
+
# example. As mentioned this `data_path` can be replaced with your own MOOSE
|
|
31
|
+
# simulation output in exodus format (*.e).
|
|
32
|
+
|
|
33
|
+
data_path = dataset.render_mechanical_3d_path()
|
|
34
|
+
sim_data = mh.ExodusReader(data_path).read_all_sim_data()
|
|
35
|
+
|
|
36
|
+
# %%
|
|
37
|
+
# This is then scaled to mm, as all lengths in Blender are to be set in mm.
|
|
38
|
+
# The `SimData` object is then converted into a `RenderMeshData` object, as
|
|
39
|
+
# this skins the mesh ready to be imported into Blender.
|
|
40
|
+
# The `disp_comps` are the expected direction of displacement. Since this is a
|
|
41
|
+
# 3D deformation test case, displacement is expected in the x, y and z directions.
|
|
42
|
+
|
|
43
|
+
disp_comps = ("disp_x","disp_y", "disp_z")
|
|
44
|
+
sim_data = sens.scale_length_units(scale=1000.0,
|
|
45
|
+
sim_data=sim_data,
|
|
46
|
+
disp_comps=disp_comps)
|
|
47
|
+
|
|
48
|
+
render_mesh = sens.create_render_mesh(sim_data,
|
|
49
|
+
("disp_y","disp_x"),
|
|
50
|
+
sim_spat_dim=3,
|
|
51
|
+
field_disp_keys=disp_comps)
|
|
52
|
+
|
|
53
|
+
# %%
|
|
54
|
+
# Firstly, a save path must be set.
|
|
55
|
+
# In order to do this a base path must be set. Then all the generated files will
|
|
56
|
+
# be saved to a subfolder within this specified base directory
|
|
57
|
+
# (e.g. blenderimages).
|
|
58
|
+
# If no base directory is specified, it will be set as your home directory.
|
|
59
|
+
|
|
60
|
+
base_dir = Path.cwd()
|
|
61
|
+
|
|
62
|
+
# %%
|
|
63
|
+
# Creating the scene
|
|
64
|
+
# ^^^^^^^^^^^^^^^^^^
|
|
65
|
+
# In order to create a DIC setup in Blender, first a scene must be created.
|
|
66
|
+
# A scene is initialised using the `BlenderScene` class. All the subsequent
|
|
67
|
+
# objects and actions necessary are then methods of this class.
|
|
68
|
+
|
|
69
|
+
scene = blender.Scene()
|
|
70
|
+
|
|
71
|
+
# %%
|
|
72
|
+
# The next thing that can be added to the scene is a sample.
|
|
73
|
+
# This is done by passing in the `RenderMeshData` object.
|
|
74
|
+
# It should be noted that the mesh will be centred on the origin to allow for
|
|
75
|
+
# the cameras to be centred on the mesh.
|
|
76
|
+
# Once the part is added to the Blender scene, it can be both moved and rotated.
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
part = scene.add_part(render_mesh, sim_spat_dim=3)
|
|
80
|
+
# Set the part location
|
|
81
|
+
part_location = np.array([0, 0, 0])
|
|
82
|
+
blender.Tools.move_blender_obj(part=part, pos_world=part_location)
|
|
83
|
+
part_rotation = Rotation.from_euler("xyz", [0, 0, 0], degrees=True)
|
|
84
|
+
blender.Tools.rotate_blender_obj(part=part, rot_world=part_rotation)
|
|
85
|
+
|
|
86
|
+
# %%
|
|
87
|
+
# A camera can then be added to the scene.
|
|
88
|
+
# To initialise a camera, the camera parameters must be specified using the
|
|
89
|
+
# `CameraData` dataclass. Note that all lengths / distances inputted are in mm.
|
|
90
|
+
# This camera can then be added to the Blender scene.
|
|
91
|
+
# The camera can also be moved and rotated.
|
|
92
|
+
|
|
93
|
+
cam_data = sens.CameraData(pixels_num=np.array([1540, 1040]),
|
|
94
|
+
pixels_size=np.array([0.00345, 0.00345]),
|
|
95
|
+
pos_world=(0, 0, 400),
|
|
96
|
+
rot_world=Rotation.from_euler("xyz", [0, 0, 0]),
|
|
97
|
+
roi_cent_world=(0, 0, 0),
|
|
98
|
+
focal_length=15.0)
|
|
99
|
+
camera = scene.add_camera(cam_data)
|
|
100
|
+
camera.location = (0, 0, 410)
|
|
101
|
+
camera.rotation_euler = (0, 0, 0) # NOTE: The default is an XYZ Euler angle
|
|
102
|
+
|
|
103
|
+
# %%
|
|
104
|
+
# A light can the be added to the scene.
|
|
105
|
+
# Blender offers different light types: Point, Sun, Spot and Area.
|
|
106
|
+
# The light can also be moved and rotated like the camera.
|
|
107
|
+
|
|
108
|
+
light_data = blender.LightData(type=blender.LightType.POINT,
|
|
109
|
+
pos_world=(0, 0, 400),
|
|
110
|
+
rot_world=Rotation.from_euler("xyz",
|
|
111
|
+
[0, 0, 0]),
|
|
112
|
+
energy=1)
|
|
113
|
+
light = scene.add_light(light_data)
|
|
114
|
+
light.location = (0, 0, 410)
|
|
115
|
+
light.rotation_euler = (0, 0, 0)
|
|
116
|
+
|
|
117
|
+
# %%
|
|
118
|
+
# A speckle pattern can then be applied to the sample.
|
|
119
|
+
# Firstly, the material properties of the sample must be specified, but these
|
|
120
|
+
# will all be defaulted if no inputs are provided.
|
|
121
|
+
#The speckle pattern can then be specified by providing a path to an image file
|
|
122
|
+
# with the pattern.
|
|
123
|
+
# The mm/px resolution of the camera must also be specified in order to
|
|
124
|
+
# correctly scale the speckle pattern.
|
|
125
|
+
# It should be noted that for a bigger camera or sample you may need to generate
|
|
126
|
+
# a larger speckle pattern.
|
|
127
|
+
|
|
128
|
+
material_data = blender.MaterialData()
|
|
129
|
+
speckle_path = dataset.dic_pattern_5mpx_path()
|
|
130
|
+
mm_px_resolution = sens.CameraTools.calculate_mm_px_resolution(cam_data)
|
|
131
|
+
scene.add_speckle(part=part,
|
|
132
|
+
speckle_path=speckle_path,
|
|
133
|
+
mat_data=material_data,
|
|
134
|
+
mm_px_resolution=mm_px_resolution)
|
|
135
|
+
|
|
136
|
+
# %%
|
|
137
|
+
# Deforming the sample and rendering images
|
|
138
|
+
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
139
|
+
# Once all the objects have been added to the scene, the sample can be deformed,
|
|
140
|
+
# and images can be rendered.
|
|
141
|
+
# Firstly, all the rendering parameters must be set, including parameters such as
|
|
142
|
+
# the number of threads to use.
|
|
143
|
+
|
|
144
|
+
render_data = blender.RenderData(cam_data=cam_data,
|
|
145
|
+
base_dir=base_dir,
|
|
146
|
+
threads=8)
|
|
147
|
+
|
|
148
|
+
# %%
|
|
149
|
+
# A series of deformed images can then be rendered.
|
|
150
|
+
# This is done by passing in rendering parameters, as well as the
|
|
151
|
+
# `RenderMeshData` object, the part(sample) and the spatial dimension of the
|
|
152
|
+
# simulation.
|
|
153
|
+
# This will automatically deform the sample, and render subsequent images at
|
|
154
|
+
# each deformation timestep.
|
|
155
|
+
# If `stage_image` is set to True, the image will be saved to disk, converted to
|
|
156
|
+
# an array, deleted and the image array will be returned. This is due to the
|
|
157
|
+
# fact that an image cannot be saved directly as an array through Blender.
|
|
158
|
+
|
|
159
|
+
scene.render_deformed_images(render_mesh,
|
|
160
|
+
sim_spat_dim=3,
|
|
161
|
+
render_data=render_data,
|
|
162
|
+
part=part,
|
|
163
|
+
stage_image=False)
|
|
164
|
+
|
|
165
|
+
# %%
|
|
166
|
+
# The rendered image will be saved to this filepath:
|
|
167
|
+
|
|
168
|
+
print("Save directory of the image:", (render_data.base_dir / "blenderimages"))
|
|
169
|
+
|
|
170
|
+
# %%
|
|
171
|
+
# There is also the option to save the scene as a Blender project file.
|
|
172
|
+
# This file can be opened with the Blender GUI to view the scene.
|
|
173
|
+
|
|
174
|
+
blender.Tools.save_blender_file(base_dir)
|
|
@@ -15,14 +15,17 @@ This example looks at the current core functionality of the Region of Interest
|
|
|
15
15
|
"""
|
|
16
16
|
|
|
17
17
|
from pathlib import Path
|
|
18
|
-
|
|
18
|
+
|
|
19
|
+
# pyvale modules
|
|
20
|
+
import pyvale.dataset as dataset
|
|
21
|
+
import pyvale.dic as dic
|
|
19
22
|
|
|
20
23
|
# %%
|
|
21
24
|
# We'll begin by selecting our Region of Interest (ROI) using the interactive selection tool.
|
|
22
25
|
# First, we create an instance of the ROI class. We pass a reference image to it, which is
|
|
23
26
|
# displayed as the underlay during ROI selection.
|
|
24
|
-
ref_img =
|
|
25
|
-
roi =
|
|
27
|
+
ref_img = dataset.dic_plate_with_hole_ref()
|
|
28
|
+
roi = dic.RegionOfInterest(ref_image=ref_img)
|
|
26
29
|
roi.interactive_selection(subset_size=31)
|
|
27
30
|
|
|
28
31
|
# create a directory for the the different outputs
|
|
@@ -16,7 +16,10 @@ allowing for comparison to analytically known values.
|
|
|
16
16
|
|
|
17
17
|
import matplotlib.pyplot as plt
|
|
18
18
|
from pathlib import Path
|
|
19
|
-
|
|
19
|
+
|
|
20
|
+
# pyvale modules
|
|
21
|
+
import pyvale.dataset as dataset
|
|
22
|
+
import pyvale.dic as dic
|
|
20
23
|
|
|
21
24
|
# %%
|
|
22
25
|
# We'll start by defining some variables that will be reused throughout the example:
|
|
@@ -29,8 +32,8 @@ import pyvale
|
|
|
29
32
|
# The images used here are included in the `data <https://github.com/Computer-Aided-Validation-Laboratory/pyvale/tree/main/src/pyvale/data>`_ folder.
|
|
30
33
|
# We've provided helper functions to load them regardless of your installation path.
|
|
31
34
|
subset_size = 31
|
|
32
|
-
ref_img =
|
|
33
|
-
def_img =
|
|
35
|
+
ref_img = dataset.dic_plate_with_hole_ref()
|
|
36
|
+
def_img = dataset.dic_plate_with_hole_def()
|
|
34
37
|
|
|
35
38
|
# create a directory for the the different outputs
|
|
36
39
|
output_path = Path.cwd() / "pyvale-output"
|
|
@@ -42,7 +45,7 @@ if not output_path.is_dir():
|
|
|
42
45
|
# Create an instance of the ROI class and pass the reference image
|
|
43
46
|
# as input. This image will be shown as the underlay during any ROI selection or
|
|
44
47
|
# visualization.
|
|
45
|
-
roi =
|
|
48
|
+
roi = dic.RegionOfInterest(ref_img)
|
|
46
49
|
roi.interactive_selection(subset_size)
|
|
47
50
|
|
|
48
51
|
# %%
|
|
@@ -77,22 +80,22 @@ roi.read_array(filename=roi_file, binary=False)
|
|
|
77
80
|
# At present, the DIC engine doesn't return any results to the user, instead the results are saved to disk.
|
|
78
81
|
# You can customize the filename, location, format, and delimiter using
|
|
79
82
|
# the options options `output_basepath`, `output_prefix`, `output_delimiter`, and `output_binary`.
|
|
80
|
-
# More info on these options can be found in the documentation for :func:`
|
|
83
|
+
# More info on these options can be found in the documentation for :func:`dic.two_dimensional`.
|
|
81
84
|
# By default, the results will be saved with the prefix `dic_results_` followed
|
|
82
85
|
# by the original filename. The file extension will be replaced will either ".csv" or "dic2d"
|
|
83
86
|
# depending on whether the results are being saved in human-readable or binary format.
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
87
|
+
dic.two_dimensional(reference=ref_img,
|
|
88
|
+
deformed=def_img,
|
|
89
|
+
roi_mask=roi.mask,
|
|
90
|
+
seed=roi.seed,
|
|
91
|
+
subset_size=subset_size,
|
|
92
|
+
subset_step=10,
|
|
93
|
+
shape_function="AFFINE",
|
|
94
|
+
max_displacement=10,
|
|
95
|
+
correlation_criteria="ZNSSD",
|
|
96
|
+
output_basepath=output_path,
|
|
97
|
+
output_delimiter=",",
|
|
98
|
+
output_prefix="dic_results_")
|
|
96
99
|
|
|
97
100
|
# %%
|
|
98
101
|
# If you saved the results in a human-readable format, you can use any tool
|
|
@@ -104,7 +107,7 @@ pyvale.dic_2d(reference=ref_img,
|
|
|
104
107
|
# The returned object is an instance of :class:`pyvale.DICResults`. If the results
|
|
105
108
|
# were saved in binary format or with a custom delimiter, be sure to specify those parameters.
|
|
106
109
|
dic_files = output_path / "dic_results_*.csv"
|
|
107
|
-
dicdata =
|
|
110
|
+
dicdata = dic.data_import(data=dic_files, delimiter=",", binary=False)
|
|
108
111
|
|
|
109
112
|
# %%
|
|
110
113
|
# As an example, here's a simple visualization of the displacement (u, v) and
|
|
@@ -17,7 +17,9 @@ be used for strain calculations.
|
|
|
17
17
|
|
|
18
18
|
import matplotlib.pyplot as plt
|
|
19
19
|
from pathlib import Path
|
|
20
|
-
|
|
20
|
+
|
|
21
|
+
# pyvale modules
|
|
22
|
+
import pyvale.dic as dic
|
|
21
23
|
|
|
22
24
|
# %%
|
|
23
25
|
# We'll start by importing the DIC data from the previous example.
|
|
@@ -46,8 +48,8 @@ input_data = output_path / "dic_results_*.csv"
|
|
|
46
48
|
# The output will always include the window coordinates and the full deformation
|
|
47
49
|
# gradient tensor. If you also specify a `strain_formulation`, the corresponding
|
|
48
50
|
# 2D strain tensor will be included in the output.
|
|
49
|
-
|
|
50
|
-
|
|
51
|
+
dic.strain_two_dimensional(data=input_data, window_size=5, window_element=4,
|
|
52
|
+
output_basepath=output_path)
|
|
51
53
|
|
|
52
54
|
# %%
|
|
53
55
|
# Once the strain calculation is complete, you can import the results using
|
|
@@ -55,9 +57,9 @@ pyvale.strain_2d(data=input_data, window_size=5, window_element=4,
|
|
|
55
57
|
#
|
|
56
58
|
# Be sure to specify the delimiter, format (binary or not), and layout.
|
|
57
59
|
strain_output = output_path / "strain_dic_results_*.csv"
|
|
58
|
-
straindata =
|
|
59
|
-
|
|
60
|
-
|
|
60
|
+
straindata = dic.strain_data_import(data=strain_output,
|
|
61
|
+
binary=False, delimiter=",",
|
|
62
|
+
layout="matrix")
|
|
61
63
|
|
|
62
64
|
# %%
|
|
63
65
|
# Here's a simple example of how to visualize the deformation gradient components
|
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
|
|
9
9
|
"""
|
|
10
10
|
DIC with images generated from a virtual blender experiment
|
|
11
|
-
|
|
11
|
+
------------------------------------------------------------
|
|
12
12
|
|
|
13
13
|
This example looks at taking the virtual experiments conducted using the blender
|
|
14
14
|
module and taking it one step further and performing a DIC calculation on the
|
|
@@ -24,9 +24,11 @@ changed.**
|
|
|
24
24
|
import matplotlib.pyplot as plt
|
|
25
25
|
from pathlib import Path
|
|
26
26
|
import numpy as np
|
|
27
|
-
import pyvale
|
|
28
27
|
import os
|
|
29
28
|
|
|
29
|
+
# Pyvale imports
|
|
30
|
+
import pyvale.dic as dic
|
|
31
|
+
|
|
30
32
|
#subset size
|
|
31
33
|
subset_size = 21
|
|
32
34
|
|
|
@@ -40,7 +42,7 @@ ref_img = "./blenderimages/reference.tiff"
|
|
|
40
42
|
def_img = "./blenderimages/blenderimage_*.tiff"
|
|
41
43
|
|
|
42
44
|
# Interactive ROI selection
|
|
43
|
-
roi =
|
|
45
|
+
roi = dic.RegionOfInterest(ref_img)
|
|
44
46
|
roi.interactive_selection(subset_size)
|
|
45
47
|
|
|
46
48
|
#output_path
|
|
@@ -49,21 +51,21 @@ if not output_path.is_dir():
|
|
|
49
51
|
output_path.mkdir(parents=True, exist_ok=True)
|
|
50
52
|
|
|
51
53
|
# DIC Calculation
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
54
|
+
dic.two_dimensional(reference=ref_img,
|
|
55
|
+
deformed=def_img,
|
|
56
|
+
roi_mask=roi.mask,
|
|
57
|
+
seed=roi.seed,
|
|
58
|
+
subset_size=subset_size,
|
|
59
|
+
subset_step=10,
|
|
60
|
+
shape_function="AFFINE",
|
|
61
|
+
max_displacement=20,
|
|
62
|
+
correlation_criteria="ZNSSD",
|
|
63
|
+
output_basepath=output_path,
|
|
64
|
+
output_prefix="blender_dic_")
|
|
63
65
|
|
|
64
66
|
# Import the Results
|
|
65
67
|
data_path = output_path / "blender_dic_*.csv"
|
|
66
|
-
dicdata =
|
|
68
|
+
dicdata = dic.data_import(data=data_path, delimiter=",",
|
|
67
69
|
layout='matrix', binary=False)
|
|
68
70
|
|
|
69
71
|
# %%
|
|
@@ -25,13 +25,18 @@ As always, we'll start with importing the required libraries:
|
|
|
25
25
|
|
|
26
26
|
import matplotlib.pyplot as plt
|
|
27
27
|
from pathlib import Path
|
|
28
|
-
|
|
28
|
+
|
|
29
|
+
# Pyvale imports
|
|
30
|
+
import pyvale.dataset as dataset
|
|
31
|
+
import pyvale.dic as dic
|
|
32
|
+
|
|
29
33
|
|
|
30
34
|
|
|
31
35
|
# %%
|
|
32
|
-
# There's a pair of DIC challenge images that come as part of the Pyvale install.
|
|
33
|
-
|
|
34
|
-
|
|
36
|
+
# There's a pair of DIC challenge images that come as part of the Pyvale install.
|
|
37
|
+
# We can preload them with:
|
|
38
|
+
ref_pattern = dataset.dic_challenge_ref()
|
|
39
|
+
def_pattern = dataset.dic_challenge_def()
|
|
35
40
|
subset_size = 17
|
|
36
41
|
|
|
37
42
|
|
|
@@ -41,7 +46,7 @@ subset_size = 17
|
|
|
41
46
|
# We can use :func:`roi.rect_boundary` to exclude a large border region so we
|
|
42
47
|
# only correlate along the horizontal at the midpoint for our selected subset
|
|
43
48
|
# size
|
|
44
|
-
roi =
|
|
49
|
+
roi = dic.RegionOfInterest(ref_image=ref_pattern)
|
|
45
50
|
subset_radius = subset_size // 2
|
|
46
51
|
roi.rect_boundary(left=50,right=50,top=250-subset_radius,bottom=250-subset_radius)
|
|
47
52
|
roi.show_image()
|
|
@@ -66,19 +71,19 @@ if not output_path.is_dir():
|
|
|
66
71
|
# the current working directory with a filename prefix of subset_size_19_*.txt
|
|
67
72
|
# If you are feeling adventorous you could investigate the effect of varying the
|
|
68
73
|
# subset size by placing the above and below sections in a loop.
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
74
|
+
dic.two_dimensional(reference=ref_pattern,
|
|
75
|
+
deformed=def_pattern,
|
|
76
|
+
roi_mask=roi.mask,
|
|
77
|
+
subset_size=subset_size,
|
|
78
|
+
subset_step=1,
|
|
79
|
+
seed=[3500,250],
|
|
80
|
+
max_displacement=10,
|
|
81
|
+
output_basepath=output_path)
|
|
77
82
|
|
|
78
83
|
# %%
|
|
79
84
|
# We can import the results in the standard way
|
|
80
85
|
data_path = output_path / "dic_results_DIC_Challenge*.csv"
|
|
81
|
-
dicdata =
|
|
86
|
+
dicdata = dic.data_import(data=data_path, layout='column',
|
|
82
87
|
binary=False, delimiter=",")
|
|
83
88
|
|
|
84
89
|
# &&
|
|
@@ -3,32 +3,38 @@
|
|
|
3
3
|
# License: MIT
|
|
4
4
|
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
5
|
# ==============================================================================
|
|
6
|
-
|
|
6
|
+
import numpy as np
|
|
7
7
|
import matplotlib.pyplot as plt
|
|
8
|
-
import pyvale
|
|
8
|
+
import pyvale.verif as va
|
|
9
9
|
|
|
10
10
|
def main() -> None:
|
|
11
11
|
|
|
12
|
-
(sim_data,data_gen) =
|
|
12
|
+
(sim_data,data_gen) = va.AnalyticCaseFactory.scalar_linear_2d()
|
|
13
13
|
|
|
14
14
|
(grid_x,grid_y,grid_field) = data_gen.get_visualisation_grid()
|
|
15
15
|
|
|
16
|
+
print()
|
|
17
|
+
print(f"{np.min(grid_field.flatten())=}")
|
|
18
|
+
print(f"{np.max(grid_field.flatten())=}")
|
|
19
|
+
print()
|
|
20
|
+
|
|
16
21
|
fig, ax = plt.subplots()
|
|
17
22
|
cs = ax.contourf(grid_x,grid_y,grid_field)
|
|
18
23
|
cbar = fig.colorbar(cs)
|
|
19
24
|
plt.axis('scaled')
|
|
20
25
|
|
|
21
26
|
|
|
22
|
-
(sim_data,data_gen) =
|
|
27
|
+
# (sim_data,data_gen) = va.AnalyticCaseFactory.scalar_quadratic_2d()
|
|
23
28
|
|
|
24
|
-
(grid_x,grid_y,grid_field) = data_gen.get_visualisation_grid()
|
|
29
|
+
# (grid_x,grid_y,grid_field) = data_gen.get_visualisation_grid()
|
|
25
30
|
|
|
26
|
-
fig, ax = plt.subplots()
|
|
27
|
-
cs = ax.contourf(grid_x,grid_y,grid_field)
|
|
28
|
-
cbar = fig.colorbar(cs)
|
|
29
|
-
plt.axis('scaled')
|
|
31
|
+
# fig, ax = plt.subplots()
|
|
32
|
+
# cs = ax.contourf(grid_x,grid_y,grid_field)
|
|
33
|
+
# cbar = fig.colorbar(cs)
|
|
34
|
+
# plt.axis('scaled')
|
|
35
|
+
|
|
36
|
+
# plt.show()
|
|
30
37
|
|
|
31
|
-
plt.show()
|
|
32
38
|
|
|
33
39
|
|
|
34
40
|
if __name__ == '__main__':
|
|
@@ -7,11 +7,11 @@
|
|
|
7
7
|
import numpy as np
|
|
8
8
|
import matplotlib.pyplot as plt
|
|
9
9
|
import sympy
|
|
10
|
-
import pyvale
|
|
10
|
+
import pyvale.verif as va
|
|
11
11
|
|
|
12
12
|
def main() -> None:
|
|
13
13
|
|
|
14
|
-
case_data =
|
|
14
|
+
case_data = va.AnalyticData2D()
|
|
15
15
|
case_data.length_x = 10.0
|
|
16
16
|
case_data.length_y = 7.5
|
|
17
17
|
n_elem_mult = 10
|
|
@@ -27,7 +27,7 @@ def main() -> None:
|
|
|
27
27
|
case_data.offsets_time = (0.0,)
|
|
28
28
|
|
|
29
29
|
|
|
30
|
-
data_gen =
|
|
30
|
+
data_gen = va.AnalyticSimDataGen(case_data)
|
|
31
31
|
sim_data = data_gen.generate_sim_data()
|
|
32
32
|
|
|
33
33
|
(grid_x,grid_y,grid_field) = data_gen.get_visualisation_grid()
|
|
@@ -6,23 +6,29 @@
|
|
|
6
6
|
|
|
7
7
|
import matplotlib.pyplot as plt
|
|
8
8
|
import numpy as np
|
|
9
|
-
import pyvale
|
|
9
|
+
import pyvale as pyv
|
|
10
|
+
import pyvale.verif as va
|
|
10
11
|
|
|
11
12
|
def main() -> None:
|
|
12
|
-
|
|
13
|
+
# 10x7.5 plate with bi-directional field gradient
|
|
14
|
+
# 40x30 elements [x,y]
|
|
15
|
+
# Field slope of 20/lengX in X
|
|
16
|
+
# Field slope of 10/lengY in Y
|
|
17
|
+
# Field max in top corner of 220, field min in bottom corner 20
|
|
18
|
+
(sim_data,_) = va.AnalyticCaseFactory.scalar_linear_2d()
|
|
13
19
|
|
|
14
|
-
descriptor =
|
|
20
|
+
descriptor = pyv.SensorDescriptorFactory.temperature_descriptor()
|
|
15
21
|
|
|
16
22
|
field_key = 'scalar'
|
|
17
|
-
t_field =
|
|
18
|
-
|
|
19
|
-
|
|
23
|
+
t_field = pyv.FieldScalar(sim_data,
|
|
24
|
+
field_key=field_key,
|
|
25
|
+
elem_dims=2)
|
|
20
26
|
|
|
21
27
|
n_sens = (4,1,1)
|
|
22
28
|
x_lims = (0.0,10.0)
|
|
23
29
|
y_lims = (0.0,7.5)
|
|
24
30
|
z_lims = (0.0,0.0)
|
|
25
|
-
sens_pos =
|
|
31
|
+
sens_pos = pyv.create_sensor_pos_array(n_sens,x_lims,y_lims,z_lims)
|
|
26
32
|
|
|
27
33
|
use_sim_time = False
|
|
28
34
|
if use_sim_time:
|
|
@@ -30,10 +36,10 @@ def main() -> None:
|
|
|
30
36
|
else:
|
|
31
37
|
sample_times = np.linspace(0.0,np.max(sim_data.time),50)
|
|
32
38
|
|
|
33
|
-
sensor_data =
|
|
39
|
+
sensor_data = pyv.SensorData(positions=sens_pos,
|
|
34
40
|
sample_times=sample_times)
|
|
35
41
|
|
|
36
|
-
tc_array =
|
|
42
|
+
tc_array = pyv.SensorArrayPoint(sensor_data,
|
|
37
43
|
t_field,
|
|
38
44
|
descriptor)
|
|
39
45
|
|
|
@@ -43,37 +49,37 @@ def main() -> None:
|
|
|
43
49
|
|
|
44
50
|
error_chain = []
|
|
45
51
|
if errors_on['indep_sys']:
|
|
46
|
-
error_chain.append(
|
|
47
|
-
error_chain.append(
|
|
52
|
+
error_chain.append(pyv.ErrSysOffset(offset=-5.0))
|
|
53
|
+
error_chain.append(pyv.ErrSysUnif(low=-5.0,
|
|
48
54
|
high=5.0))
|
|
49
|
-
gen_norm =
|
|
55
|
+
gen_norm = pyv.GenNormal(std=1.0)
|
|
50
56
|
|
|
51
57
|
if errors_on['rand']:
|
|
52
|
-
error_chain.append(
|
|
53
|
-
error_chain.append(
|
|
58
|
+
error_chain.append(pyv.ErrRandNormPercent(std_percent=1.0))
|
|
59
|
+
error_chain.append(pyv.ErrRandUnifPercent(low_percent=-1.0,
|
|
54
60
|
high_percent=1.0))
|
|
55
61
|
|
|
56
62
|
if errors_on['dep_sys']:
|
|
57
|
-
error_chain.append(
|
|
58
|
-
error_chain.append(
|
|
63
|
+
error_chain.append(pyv.ErrSysDigitisation(bits_per_unit=2**8/100))
|
|
64
|
+
error_chain.append(pyv.ErrSysSaturation(meas_min=0.0,meas_max=300.0))
|
|
59
65
|
|
|
60
66
|
if len(error_chain) > 0:
|
|
61
|
-
error_integrator =
|
|
67
|
+
error_integrator = pyv.ErrIntegrator(error_chain,
|
|
62
68
|
sensor_data,
|
|
63
69
|
tc_array.get_measurement_shape())
|
|
64
70
|
tc_array.set_error_integrator(error_integrator)
|
|
65
71
|
|
|
66
72
|
measurements = tc_array.get_measurements()
|
|
67
73
|
|
|
68
|
-
|
|
69
|
-
(0,1), # Sensor 1
|
|
70
|
-
(0,1), # Component 1: scalar field = 1 component
|
|
71
|
-
(measurements.shape[2]-5,measurements.shape[2]))
|
|
74
|
+
pyv.print_measurements(tc_array,
|
|
75
|
+
slice(0,1), # Sensor 1
|
|
76
|
+
slice(0,1), # Component 1: scalar field = 1 component
|
|
77
|
+
slice (measurements.shape[2]-5,measurements.shape[2]))
|
|
72
78
|
|
|
73
|
-
(fig,_) =
|
|
79
|
+
(fig,_) = pyv.plot_time_traces(tc_array,field_key)
|
|
74
80
|
plt.show()
|
|
75
81
|
|
|
76
|
-
pv_plot =
|
|
82
|
+
pv_plot = pyv.plot_point_sensors_on_sim(tc_array,field_key)
|
|
77
83
|
pv_plot.show(cpos="xy")
|
|
78
84
|
|
|
79
85
|
if __name__ == '__main__':
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
import copy
|
|
7
|
+
import matplotlib.pyplot as plt
|
|
8
|
+
import numpy as np
|
|
9
|
+
import pyvale.sensorsim as sens
|
|
10
|
+
import pyvale.verif as va
|
|
11
|
+
|
|
12
|
+
def main() -> None:
|
|
13
|
+
# 10x7.5 plate with bi-directional field gradient
|
|
14
|
+
# 40x30 elements [x,y]
|
|
15
|
+
# Field slope of 20/lengX in X
|
|
16
|
+
# Field slope of 10/lengY in Y
|
|
17
|
+
# Field max in top corner of 220, field min in bottom corner 20
|
|
18
|
+
(sim_data,_) = va.AnalyticCaseFactory.scalar_linear_2d()
|
|
19
|
+
sim_data_nomesh = copy.deepcopy(sim_data)
|
|
20
|
+
sim_data_nomesh.connect = None
|
|
21
|
+
|
|
22
|
+
descriptor = sens.SensorDescriptorFactory.temperature_descriptor()
|
|
23
|
+
|
|
24
|
+
field_key = 'scalar'
|
|
25
|
+
scal_field = sens.FieldScalar(sim_data,
|
|
26
|
+
field_key=field_key,
|
|
27
|
+
elem_dims=2)
|
|
28
|
+
|
|
29
|
+
# scal_field_nm = sens.FieldScalar(sim_data_nomesh,
|
|
30
|
+
# field_key=field_key,
|
|
31
|
+
# elem_dims=2)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
n_sens = (4,1,1)
|
|
35
|
+
x_lims = (0.0,10.0)
|
|
36
|
+
y_lims = (0.0,7.5)
|
|
37
|
+
z_lims = (0.0,0.0)
|
|
38
|
+
sens_pos = sens.create_sensor_pos_array(n_sens,x_lims,y_lims,z_lims)
|
|
39
|
+
|
|
40
|
+
use_sim_time = False
|
|
41
|
+
if use_sim_time:
|
|
42
|
+
sample_times = None
|
|
43
|
+
else:
|
|
44
|
+
sample_times = np.linspace(0.0,np.max(sim_data.time),50)
|
|
45
|
+
|
|
46
|
+
sensor_data = sens.SensorData(positions=sens_pos,
|
|
47
|
+
sample_times=sample_times)
|
|
48
|
+
|
|
49
|
+
tc_array = sens.SensorArrayPoint(sensor_data,
|
|
50
|
+
scal_field,
|
|
51
|
+
descriptor)
|
|
52
|
+
|
|
53
|
+
measurements = tc_array.get_measurements()
|
|
54
|
+
|
|
55
|
+
sens.print_measurements(tc_array,
|
|
56
|
+
slice(0,1), # Sensor 1
|
|
57
|
+
slice(0,1), # Component 1: scalar field = 1 component
|
|
58
|
+
slice (measurements.shape[2]-5,measurements.shape[2]))
|
|
59
|
+
|
|
60
|
+
# (fig,_) = sens.plot_time_traces(tc_array,field_key)
|
|
61
|
+
# plt.show()
|
|
62
|
+
|
|
63
|
+
# pv_plot = sens.plot_point_sensors_on_sim(tc_array,field_key)
|
|
64
|
+
# pv_plot.show(cpos="xy")
|
|
65
|
+
|
|
66
|
+
if __name__ == '__main__':
|
|
67
|
+
main()
|