pyvale 2025.4.1__py3-none-any.whl → 2025.5.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyvale might be problematic. Click here for more details.
- pyvale/__init__.py +18 -3
- pyvale/analyticmeshgen.py +1 -0
- pyvale/analyticsimdatafactory.py +18 -13
- pyvale/analyticsimdatagenerator.py +105 -72
- pyvale/blendercalibrationdata.py +15 -0
- pyvale/blenderlightdata.py +26 -0
- pyvale/blendermaterialdata.py +15 -0
- pyvale/blenderrenderdata.py +30 -0
- pyvale/blenderscene.py +488 -0
- pyvale/blendertools.py +420 -0
- pyvale/camera.py +6 -5
- pyvale/cameradata.py +25 -7
- pyvale/cameradata2d.py +6 -4
- pyvale/camerastereo.py +217 -0
- pyvale/cameratools.py +206 -11
- pyvale/cython/rastercyth.py +6 -2
- pyvale/data/cal_target.tiff +0 -0
- pyvale/dataset.py +73 -14
- pyvale/errorcalculator.py +8 -10
- pyvale/errordriftcalc.py +10 -9
- pyvale/errorintegrator.py +19 -21
- pyvale/errorrand.py +33 -39
- pyvale/errorsyscalib.py +134 -0
- pyvale/errorsysdep.py +19 -22
- pyvale/errorsysfield.py +49 -41
- pyvale/errorsysindep.py +79 -175
- pyvale/examples/basics/ex1_1_basicscalars_therm2d.py +131 -0
- pyvale/examples/basics/ex1_2_sensormodel_therm2d.py +158 -0
- pyvale/examples/basics/ex1_3_customsens_therm3d.py +216 -0
- pyvale/examples/basics/ex1_4_basicerrors_therm3d.py +153 -0
- pyvale/examples/basics/ex1_5_fielderrs_therm3d.py +168 -0
- pyvale/examples/basics/ex1_6_caliberrs_therm2d.py +133 -0
- pyvale/examples/basics/ex1_7_spatavg_therm2d.py +123 -0
- pyvale/examples/basics/ex2_1_basicvectors_disp2d.py +112 -0
- pyvale/examples/basics/ex2_2_vectorsens_disp2d.py +111 -0
- pyvale/examples/basics/ex2_3_sensangle_disp2d.py +139 -0
- pyvale/examples/basics/ex2_4_chainfielderrs_disp2d.py +196 -0
- pyvale/examples/basics/ex2_5_vectorfields3d_disp3d.py +109 -0
- pyvale/examples/basics/ex3_1_basictensors_strain2d.py +114 -0
- pyvale/examples/basics/ex3_2_tensorsens2d_strain2d.py +111 -0
- pyvale/examples/basics/ex3_3_tensorsens3d_strain3d.py +182 -0
- pyvale/examples/basics/ex4_1_expsim2d_thermmech2d.py +171 -0
- pyvale/examples/basics/ex4_2_expsim3d_thermmech3d.py +252 -0
- pyvale/examples/{analyticdatagen → genanalyticdata}/ex1_1_scalarvisualisation.py +6 -9
- pyvale/examples/{analyticdatagen → genanalyticdata}/ex1_2_scalarcasebuild.py +8 -11
- pyvale/examples/{analyticdatagen → genanalyticdata}/ex2_1_analyticsensors.py +9 -12
- pyvale/examples/imagedef2d/ex_imagedef2d_todisk.py +8 -15
- pyvale/examples/renderblender/ex1_1_blenderscene.py +121 -0
- pyvale/examples/renderblender/ex1_2_blenderdeformed.py +119 -0
- pyvale/examples/renderblender/ex2_1_stereoscene.py +128 -0
- pyvale/examples/renderblender/ex2_2_stereodeformed.py +131 -0
- pyvale/examples/renderblender/ex3_1_blendercalibration.py +120 -0
- pyvale/examples/{rasterisation → renderrasterisation}/ex_rastenp.py +3 -2
- pyvale/examples/{rasterisation → renderrasterisation}/ex_rastercyth_oneframe.py +2 -2
- pyvale/examples/{rasterisation → renderrasterisation}/ex_rastercyth_static_cypara.py +3 -8
- pyvale/examples/{rasterisation → renderrasterisation}/ex_rastercyth_static_pypara.py +6 -7
- pyvale/examples/{ex1_4_thermal2d.py → visualisation/ex1_1_plot_traces.py} +32 -16
- pyvale/examples/{features/ex_animation_tools_3dmonoblock.py → visualisation/ex2_1_animate_sim.py} +37 -31
- pyvale/experimentsimulator.py +107 -30
- pyvale/field.py +2 -9
- pyvale/fieldconverter.py +98 -22
- pyvale/fieldsampler.py +2 -2
- pyvale/fieldscalar.py +10 -10
- pyvale/fieldtensor.py +15 -17
- pyvale/fieldtransform.py +7 -2
- pyvale/fieldvector.py +6 -7
- pyvale/generatorsrandom.py +25 -47
- pyvale/imagedef2d.py +6 -2
- pyvale/integratorfactory.py +2 -2
- pyvale/integratorquadrature.py +50 -24
- pyvale/integratorrectangle.py +85 -7
- pyvale/integratorspatial.py +4 -4
- pyvale/integratortype.py +3 -3
- pyvale/output.py +17 -0
- pyvale/pyvaleexceptions.py +11 -0
- pyvale/raster.py +6 -5
- pyvale/rastercy.py +6 -4
- pyvale/rasternp.py +6 -4
- pyvale/rendermesh.py +6 -2
- pyvale/sensorarray.py +2 -2
- pyvale/sensorarrayfactory.py +52 -65
- pyvale/sensorarraypoint.py +29 -30
- pyvale/sensordata.py +2 -2
- pyvale/sensordescriptor.py +138 -25
- pyvale/sensortools.py +3 -3
- pyvale/simtools.py +67 -0
- pyvale/visualexpplotter.py +99 -57
- pyvale/visualimagedef.py +11 -7
- pyvale/visualimages.py +6 -4
- pyvale/visualopts.py +372 -58
- pyvale/visualsimanimator.py +42 -13
- pyvale/visualsimsensors.py +318 -0
- pyvale/visualtools.py +69 -13
- pyvale/visualtraceplotter.py +52 -165
- {pyvale-2025.4.1.dist-info → pyvale-2025.5.2.dist-info}/METADATA +17 -14
- pyvale-2025.5.2.dist-info/RECORD +172 -0
- {pyvale-2025.4.1.dist-info → pyvale-2025.5.2.dist-info}/WHEEL +1 -1
- pyvale/examples/analyticdatagen/__init__.py +0 -5
- pyvale/examples/ex1_1_thermal2d.py +0 -86
- pyvale/examples/ex1_2_thermal2d.py +0 -108
- pyvale/examples/ex1_3_thermal2d.py +0 -110
- pyvale/examples/ex1_5_thermal2d.py +0 -102
- pyvale/examples/ex2_1_thermal3d .py +0 -84
- pyvale/examples/ex2_2_thermal3d.py +0 -51
- pyvale/examples/ex2_3_thermal3d.py +0 -106
- pyvale/examples/ex3_1_displacement2d.py +0 -44
- pyvale/examples/ex3_2_displacement2d.py +0 -76
- pyvale/examples/ex3_3_displacement2d.py +0 -101
- pyvale/examples/ex3_4_displacement2d.py +0 -102
- pyvale/examples/ex4_1_strain2d.py +0 -54
- pyvale/examples/ex4_2_strain2d.py +0 -76
- pyvale/examples/ex4_3_strain2d.py +0 -97
- pyvale/examples/ex5_1_multiphysics2d.py +0 -75
- pyvale/examples/ex6_1_multiphysics2d_expsim.py +0 -115
- pyvale/examples/ex6_2_multiphysics3d_expsim.py +0 -160
- pyvale/examples/features/__init__.py +0 -5
- pyvale/examples/features/ex_area_avg.py +0 -89
- pyvale/examples/features/ex_calibration_error.py +0 -108
- pyvale/examples/features/ex_chain_field_errs.py +0 -141
- pyvale/examples/features/ex_field_errs.py +0 -78
- pyvale/examples/features/ex_sensor_single_angle_batch.py +0 -110
- pyvale/optimcheckfuncs.py +0 -153
- pyvale/visualsimplotter.py +0 -182
- pyvale-2025.4.1.dist-info/RECORD +0 -163
- {pyvale-2025.4.1.dist-info → pyvale-2025.5.2.dist-info}/licenses/LICENSE +0 -0
- {pyvale-2025.4.1.dist-info → pyvale-2025.5.2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
import numpy as np
|
|
8
|
+
from scipy.spatial.transform import Rotation
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
import pyvale
|
|
11
|
+
|
|
12
|
+
def main() -> None:
|
|
13
|
+
#NOTE: All lengths are to be specified in mm
|
|
14
|
+
|
|
15
|
+
# Set the save path
|
|
16
|
+
# --------------------------------------------------------------------------
|
|
17
|
+
# All the files saved will be saved to a subfolder within this specified
|
|
18
|
+
# base directory.
|
|
19
|
+
# This base directory can be specified by:
|
|
20
|
+
base_dir = Path("./")
|
|
21
|
+
# If no base directory is specified, it will be set as your home directory
|
|
22
|
+
|
|
23
|
+
# Creating the scene
|
|
24
|
+
# --------------------------------------------------------------------------
|
|
25
|
+
scene = pyvale.BlenderScene()
|
|
26
|
+
|
|
27
|
+
# Add the calibration target
|
|
28
|
+
# A rectangular calibration target of the specified size is added to the scene
|
|
29
|
+
target = scene.add_cal_target(target_size=np.array([150, 100, 10]))
|
|
30
|
+
|
|
31
|
+
# Add the camera
|
|
32
|
+
cam_data_0 = pyvale.CameraData(pixels_num=np.array([1540, 1040]),
|
|
33
|
+
pixels_size=np.array([0.00345, 0.00345]),
|
|
34
|
+
pos_world=np.array([0, 0, 400]),
|
|
35
|
+
rot_world=Rotation.from_euler("xyz", [0, 0, 0]),
|
|
36
|
+
roi_cent_world=(0, 0, 0),
|
|
37
|
+
focal_length=15.0)
|
|
38
|
+
# Set this to "symmetric" to get a symmetric stereo system or set this to
|
|
39
|
+
# "faceon" to get a face-on stereo system
|
|
40
|
+
stereo_setup = "faceon"
|
|
41
|
+
if stereo_setup == "symmetric":
|
|
42
|
+
stereo_system = pyvale.CameraTools.symmetric_stereo_cameras(
|
|
43
|
+
cam_data_0=cam_data_0,
|
|
44
|
+
stereo_angle=15.0)
|
|
45
|
+
if stereo_setup == "faceon":
|
|
46
|
+
stereo_system = pyvale.CameraTools.faceon_stereo_cameras(
|
|
47
|
+
cam_data_0=cam_data_0,
|
|
48
|
+
stereo_angle=15.0)
|
|
49
|
+
|
|
50
|
+
scene.add_stereo_system(stereo_system)
|
|
51
|
+
|
|
52
|
+
# Generate calibration file
|
|
53
|
+
stereo_system.save_calibration(base_dir)
|
|
54
|
+
|
|
55
|
+
# Add the light
|
|
56
|
+
light_data = pyvale.BlenderLightData(type=pyvale.BlenderLightType.POINT,
|
|
57
|
+
pos_world=(0, 0, 200),
|
|
58
|
+
rot_world=Rotation.from_euler("xyz",
|
|
59
|
+
[0, 0, 0]),
|
|
60
|
+
energy=1)
|
|
61
|
+
light = scene.add_light(light_data)
|
|
62
|
+
# The light can be moved and rotated:
|
|
63
|
+
light.location = (0, 0, 210)
|
|
64
|
+
light.rotation_euler = (0, 0, 0) # NOTE: The default is an XYZ Euler angle
|
|
65
|
+
|
|
66
|
+
# Apply the calibration target pattern
|
|
67
|
+
material_data = pyvale.BlenderMaterialData()
|
|
68
|
+
speckle_path = Path.cwd() / "src/pyvale/data/cal_target.tiff"
|
|
69
|
+
mm_px_resolution = pyvale.CameraTools.calculate_mm_px_resolution(cam_data_0)
|
|
70
|
+
scene.add_speckle(part=target,
|
|
71
|
+
speckle_path=speckle_path,
|
|
72
|
+
mat_data=material_data,
|
|
73
|
+
mm_px_resolution=mm_px_resolution,
|
|
74
|
+
cal=True)
|
|
75
|
+
# NOTE: The `cal` flag has to be set to True in order to scale the
|
|
76
|
+
# calibration target pattern correctly
|
|
77
|
+
|
|
78
|
+
# Rendering calibration images
|
|
79
|
+
# --------------------------------------------------------------------------
|
|
80
|
+
save_dir = Path.cwd() / "blenderimages"
|
|
81
|
+
save_name = "cal"
|
|
82
|
+
render_data = pyvale.RenderData(cam_data=(stereo_system.cam_data_0,
|
|
83
|
+
stereo_system.cam_data_1),
|
|
84
|
+
base_dir=base_dir)
|
|
85
|
+
# NOTE: The number of threads used to render the images is set within
|
|
86
|
+
# RenderData, it is defaulted to 4 threads
|
|
87
|
+
|
|
88
|
+
# The desired limits for the calibration target movement are to be set within
|
|
89
|
+
# the CalibrationData dataclass
|
|
90
|
+
calibration_data = pyvale.CalibrationData(angle_lims=(-10, 10),
|
|
91
|
+
angle_step=5,
|
|
92
|
+
plunge_lims=(-5, 5),
|
|
93
|
+
plunge_step=5)
|
|
94
|
+
|
|
95
|
+
# The number of calibration images that will be rendered can be calculated
|
|
96
|
+
number_calibration_images = pyvale.BlenderTools.number_calibration_images(calibration_data)
|
|
97
|
+
print()
|
|
98
|
+
print(80*"-")
|
|
99
|
+
print("Number of calibration images to be rendered:", number_calibration_images)
|
|
100
|
+
print(80*"-")
|
|
101
|
+
|
|
102
|
+
# The calibration images can then be rendered
|
|
103
|
+
pyvale.BlenderTools.render_calibration_images(render_data,
|
|
104
|
+
calibration_data,
|
|
105
|
+
target)
|
|
106
|
+
|
|
107
|
+
print()
|
|
108
|
+
print(80*"-")
|
|
109
|
+
print("Save directory of the images:", (render_data.base_dir / "calimages"))
|
|
110
|
+
print(80*"-")
|
|
111
|
+
print()
|
|
112
|
+
|
|
113
|
+
# Save Blender file
|
|
114
|
+
# --------------------------------------------------------------------------
|
|
115
|
+
# The file that will be saved is a Blender project file. This can be opened
|
|
116
|
+
# with the Blender GUI to view the scene.
|
|
117
|
+
pyvale.BlenderTools.save_blender_file(base_dir)
|
|
118
|
+
|
|
119
|
+
if __name__ == "__main__":
|
|
120
|
+
main()
|
|
@@ -1,8 +1,9 @@
|
|
|
1
|
-
|
|
1
|
+
# ==============================================================================
|
|
2
2
|
# pyvale: the python validation engine
|
|
3
3
|
# License: MIT
|
|
4
4
|
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
-
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
6
7
|
from pathlib import Path
|
|
7
8
|
import time
|
|
8
9
|
import numpy as np
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
|
|
1
|
+
# ==============================================================================
|
|
2
2
|
# pyvale: the python validation engine
|
|
3
3
|
# License: MIT
|
|
4
4
|
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
-
|
|
5
|
+
# ==============================================================================
|
|
6
6
|
|
|
7
7
|
import time
|
|
8
8
|
import numpy as np
|
|
@@ -1,8 +1,9 @@
|
|
|
1
|
-
|
|
1
|
+
# ==============================================================================
|
|
2
2
|
# pyvale: the python validation engine
|
|
3
3
|
# License: MIT
|
|
4
4
|
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
-
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
6
7
|
from pathlib import Path
|
|
7
8
|
import time
|
|
8
9
|
import numpy as np
|
|
@@ -12,10 +13,6 @@ import mooseherder as mh
|
|
|
12
13
|
import pyvale as pyv
|
|
13
14
|
|
|
14
15
|
def main() -> None:
|
|
15
|
-
"""pyvale example: rasterisation field renderer
|
|
16
|
-
----------------------------------------------------------------------------
|
|
17
|
-
- TODO
|
|
18
|
-
"""
|
|
19
16
|
print()
|
|
20
17
|
print(80*"=")
|
|
21
18
|
print("RASTER CYTHON FILE (should be *.so on Linux):")
|
|
@@ -23,8 +20,6 @@ def main() -> None:
|
|
|
23
20
|
print(80*"=")
|
|
24
21
|
print()
|
|
25
22
|
|
|
26
|
-
# This a path to an exodus *.e output file from MOOSE, this can be
|
|
27
|
-
# replaced with a path to your own simulation file
|
|
28
23
|
sim_path = pyv.DataSet.render_mechanical_3d_path()
|
|
29
24
|
#sim_path = pyv.DataSet.render_simple_block_path()
|
|
30
25
|
#sim_path = Path.home()/"pyvale"/"src"/"pyvale"/"simcases"/"case26_out.e"
|
|
@@ -1,9 +1,11 @@
|
|
|
1
|
-
|
|
1
|
+
# ==============================================================================
|
|
2
2
|
# pyvale: the python validation engine
|
|
3
3
|
# License: MIT
|
|
4
4
|
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
-
|
|
6
|
-
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
|
|
7
9
|
import time
|
|
8
10
|
import numpy as np
|
|
9
11
|
from scipy.spatial.transform import Rotation
|
|
@@ -11,11 +13,8 @@ import matplotlib.pyplot as plt
|
|
|
11
13
|
import mooseherder as mh
|
|
12
14
|
import pyvale as pyv
|
|
13
15
|
|
|
16
|
+
|
|
14
17
|
def main() -> None:
|
|
15
|
-
"""pyvale example: rasterisation field renderer
|
|
16
|
-
----------------------------------------------------------------------------
|
|
17
|
-
- TODO
|
|
18
|
-
"""
|
|
19
18
|
print()
|
|
20
19
|
print(80*"=")
|
|
21
20
|
print("RASTER CYTHON FILE (should be *.so on Linux):")
|
|
@@ -1,26 +1,31 @@
|
|
|
1
|
-
#
|
|
1
|
+
# ==============================================================================
|
|
2
2
|
# pyvale: the python validation engine
|
|
3
3
|
# License: MIT
|
|
4
4
|
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
-
#
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
Pyvale example: TODO
|
|
9
|
+
--------------------------------------------------------------------------------
|
|
10
|
+
TODO
|
|
11
|
+
|
|
12
|
+
Test case: TODO
|
|
13
|
+
"""
|
|
6
14
|
|
|
7
15
|
import numpy as np
|
|
8
16
|
import matplotlib.pyplot as plt
|
|
9
17
|
import mooseherder as mh
|
|
10
18
|
import pyvale as pyv
|
|
11
19
|
|
|
20
|
+
# TODO: comments and full description for this example like the basics examples
|
|
12
21
|
|
|
13
22
|
def main() -> None:
|
|
14
|
-
|
|
15
|
-
----------------------------------------------------------------------------
|
|
16
|
-
- Demonstrates options for controlling plots of points sensor traces using
|
|
17
|
-
matplotlib
|
|
18
|
-
"""
|
|
23
|
+
|
|
19
24
|
data_path = pyv.DataSet.thermal_2d_path()
|
|
20
25
|
sim_data = mh.ExodusReader(data_path).read_all_sim_data()
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
26
|
+
sim_data = pyv.scale_length_units(scale=1000.0,
|
|
27
|
+
sim_data=sim_data,
|
|
28
|
+
disp_comps=None)
|
|
24
29
|
|
|
25
30
|
n_sens = (4,1,1)
|
|
26
31
|
x_lims = (0.0,100.0)
|
|
@@ -33,11 +38,12 @@ def main() -> None:
|
|
|
33
38
|
sens_data = pyv.SensorData(positions=sens_pos,
|
|
34
39
|
sample_times=sample_times)
|
|
35
40
|
|
|
41
|
+
field_key = "temperature"
|
|
36
42
|
tc_array = pyv.SensorArrayFactory \
|
|
37
43
|
.thermocouples_basic_errs(sim_data,
|
|
38
44
|
sens_data,
|
|
39
45
|
field_key,
|
|
40
|
-
|
|
46
|
+
elem_dims=2)
|
|
41
47
|
|
|
42
48
|
err_int = pyv.ErrIntegrator([pyv.ErrSysOffset(offset=-5.0)],
|
|
43
49
|
sens_data,
|
|
@@ -46,14 +52,24 @@ def main() -> None:
|
|
|
46
52
|
|
|
47
53
|
measurements = tc_array.get_measurements()
|
|
48
54
|
|
|
55
|
+
|
|
49
56
|
print(80*"-")
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
57
|
+
|
|
58
|
+
sens_print: int = 0
|
|
59
|
+
time_print: int = 5
|
|
60
|
+
comp_print: int = 0
|
|
61
|
+
|
|
62
|
+
print(f"These are the last {time_print} virtual measurements of sensor "
|
|
63
|
+
+ f"{sens_print}:")
|
|
64
|
+
|
|
65
|
+
pyv.print_measurements(sens_array=tc_array,
|
|
66
|
+
sensors=(sens_print,sens_print+1),
|
|
67
|
+
components=(comp_print,comp_print+1),
|
|
68
|
+
time_steps=(measurements.shape[2]-time_print,
|
|
69
|
+
measurements.shape[2]))
|
|
55
70
|
print(80*"-")
|
|
56
71
|
|
|
72
|
+
|
|
57
73
|
trace_props = pyv.TraceOptsSensor()
|
|
58
74
|
|
|
59
75
|
trace_props.truth_line = None
|
pyvale/examples/{features/ex_animation_tools_3dmonoblock.py → visualisation/ex2_1_animate_sim.py}
RENAMED
|
@@ -1,48 +1,54 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
Pyvale example: TODO
|
|
9
|
+
--------------------------------------------------------------------------------
|
|
10
|
+
TODO
|
|
11
|
+
|
|
12
|
+
Test case: TODO
|
|
13
|
+
"""
|
|
14
|
+
|
|
10
15
|
from pathlib import Path
|
|
11
16
|
import numpy as np
|
|
12
17
|
import mooseherder as mh
|
|
13
|
-
import pyvale
|
|
18
|
+
import pyvale as pyv
|
|
14
19
|
|
|
15
20
|
|
|
16
21
|
def main() -> None:
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
# Use mooseherder to read the exodus and get a SimData object
|
|
20
|
-
data_path = pyvale.DataSet.thermal_3d_path()
|
|
22
|
+
|
|
23
|
+
data_path = pyv.DataSet.thermal_3d_path()
|
|
21
24
|
sim_data = mh.ExodusReader(data_path).read_all_sim_data()
|
|
22
|
-
|
|
23
|
-
|
|
25
|
+
|
|
26
|
+
sim_data = pyv.scale_length_units(scale=1000.0,
|
|
27
|
+
sim_data=sim_data,
|
|
28
|
+
disp_comps=None)
|
|
24
29
|
sim_data.coords = sim_data.coords*1000.0 # type: ignore
|
|
25
30
|
|
|
26
|
-
|
|
31
|
+
pyv.print_dimensions(sim_data)
|
|
27
32
|
|
|
28
33
|
n_sens = (1,4,1)
|
|
29
34
|
x_lims = (12.5,12.5)
|
|
30
35
|
y_lims = (0,33.0)
|
|
31
36
|
z_lims = (0.0,12.0)
|
|
32
|
-
sens_pos =
|
|
37
|
+
sens_pos = pyv.create_sensor_pos_array(n_sens,x_lims,y_lims,z_lims)
|
|
33
38
|
|
|
34
|
-
sens_data =
|
|
39
|
+
sens_data = pyv.SensorData(positions=sens_pos)
|
|
35
40
|
|
|
36
|
-
|
|
41
|
+
field_key = 'temperature'
|
|
42
|
+
tc_array = pyv.SensorArrayFactory() \
|
|
37
43
|
.thermocouples_basic_errs(sim_data,
|
|
38
44
|
sens_data,
|
|
39
|
-
|
|
40
|
-
|
|
45
|
+
field_key,
|
|
46
|
+
elem_dims=3)
|
|
41
47
|
|
|
42
48
|
measurements = tc_array.get_measurements()
|
|
43
49
|
print(f'\nMeasurements for sensor at top of block:\n{measurements[-1,0,:]}\n')
|
|
44
50
|
|
|
45
|
-
vis_opts =
|
|
51
|
+
vis_opts = pyv.VisOptsSimSensors()
|
|
46
52
|
vis_opts.window_size_px = (1200,800)
|
|
47
53
|
vis_opts.camera_position = np.array([(59.354, 43.428, 69.946),
|
|
48
54
|
(-2.858, 13.189, 4.523),
|
|
@@ -54,25 +60,25 @@ def main() -> None:
|
|
|
54
60
|
save_dir.mkdir()
|
|
55
61
|
|
|
56
62
|
if vis_mode == "animate":
|
|
57
|
-
anim_opts =
|
|
63
|
+
anim_opts = pyv.VisOptsAnimation()
|
|
58
64
|
|
|
59
65
|
anim_opts.save_path = save_dir / "test_animation"
|
|
60
|
-
anim_opts.save_animation =
|
|
66
|
+
anim_opts.save_animation = pyv.EAnimationType.MP4
|
|
61
67
|
|
|
62
|
-
pv_anim =
|
|
63
|
-
|
|
68
|
+
pv_anim = pyv.animate_sim_with_sensors(tc_array,
|
|
69
|
+
field_key,
|
|
64
70
|
time_steps=None,
|
|
65
71
|
vis_opts=vis_opts,
|
|
66
72
|
anim_opts=anim_opts)
|
|
67
73
|
|
|
68
74
|
else:
|
|
69
|
-
image_save_opts =
|
|
75
|
+
image_save_opts = pyv.VisOptsImageSave()
|
|
70
76
|
|
|
71
77
|
image_save_opts.path = save_dir / "test_vector_graphics"
|
|
72
|
-
image_save_opts.image_type =
|
|
78
|
+
image_save_opts.image_type = pyv.EImageType.SVG
|
|
73
79
|
|
|
74
|
-
pv_plot =
|
|
75
|
-
|
|
80
|
+
pv_plot = pyv.plot_point_sensors_on_sim(tc_array,
|
|
81
|
+
field_key,
|
|
76
82
|
time_step=-1,
|
|
77
83
|
vis_opts=vis_opts,
|
|
78
84
|
image_save_opts=image_save_opts)
|
pyvale/experimentsimulator.py
CHANGED
|
@@ -1,30 +1,74 @@
|
|
|
1
|
-
#
|
|
1
|
+
# ==============================================================================
|
|
2
2
|
# pyvale: the python validation engine
|
|
3
3
|
# License: MIT
|
|
4
4
|
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
-
#
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
This module is used for performing Monte-Carlo virtual experiments over a series
|
|
9
|
+
of input simulation cases and sensor arrays.
|
|
10
|
+
"""
|
|
6
11
|
|
|
7
12
|
from dataclasses import dataclass
|
|
8
13
|
import numpy as np
|
|
9
|
-
from pyvale.sensorarray import ISensorArray
|
|
10
14
|
import mooseherder as mh
|
|
15
|
+
from pyvale.sensorarray import ISensorArray
|
|
11
16
|
|
|
12
|
-
# NOTE: This module is a feature under developement.
|
|
13
17
|
|
|
14
18
|
@dataclass(slots=True)
|
|
15
19
|
class ExperimentStats:
|
|
16
20
|
"""Dataclass holding summary statistics for a series of simulated
|
|
17
|
-
experiments.
|
|
21
|
+
experiments produced using the experiment simulator. All summary statistics
|
|
22
|
+
are calculated over the 'experiments' dimension of the measurements array so
|
|
23
|
+
the arrays of statistics have the shape=(n_sims,n_sensors,n_field_comps,
|
|
24
|
+
n_time_steps). Note that the n_sims dimension refers to the number of input
|
|
25
|
+
multi-physics simulations (i.e. SimData objects) that the virtual
|
|
26
|
+
experiments were performed over.
|
|
18
27
|
"""
|
|
28
|
+
|
|
19
29
|
mean: np.ndarray | None = None
|
|
30
|
+
"""Mean of each sensors measurement for the given field component and time
|
|
31
|
+
step as an array with shape=(n_sims,n_sensors,n_field_comps,n_time_steps).
|
|
32
|
+
"""
|
|
33
|
+
|
|
20
34
|
std: np.ndarray | None = None
|
|
21
|
-
|
|
35
|
+
"""Standard deviation of the sensor measurements for the given field
|
|
36
|
+
component and time step as an array with shape=(n_sims,n_sensors,
|
|
37
|
+
n_field_comps, n_time_steps)
|
|
38
|
+
"""
|
|
39
|
+
|
|
22
40
|
max: np.ndarray | None = None
|
|
41
|
+
"""Maximum of the sensor measurements for the given field component and time
|
|
42
|
+
step as an array with shape=(n_sims,n_sensors,n_field_comps,n_time_steps)
|
|
43
|
+
"""
|
|
44
|
+
|
|
23
45
|
min: np.ndarray | None = None
|
|
46
|
+
"""Minmum of the sensor measurements for the given field component and time
|
|
47
|
+
step as an array with shape=(n_sims,n_sensors,n_field_comps,n_time_steps)
|
|
48
|
+
"""
|
|
49
|
+
|
|
24
50
|
med: np.ndarray | None = None
|
|
51
|
+
"""Median of the sensor measurements for the given field component and time
|
|
52
|
+
step as an array with shape=(n_sims,n_sensors,n_field_comps,n_time_steps)
|
|
53
|
+
"""
|
|
54
|
+
|
|
25
55
|
q25: np.ndarray | None = None
|
|
56
|
+
"""Lower 25% quantile of the sensor measurements for the given field
|
|
57
|
+
component and time step as an array with shape=(n_sims,n_sensors,
|
|
58
|
+
n_field_comps, n_time_steps)
|
|
59
|
+
"""
|
|
60
|
+
|
|
26
61
|
q75: np.ndarray | None = None
|
|
62
|
+
"""Upper 75% quantile of the sensor measurements for the given field
|
|
63
|
+
component and time step as an array with shape=(n_sims,n_sensors,
|
|
64
|
+
_field_comps, n_time_steps)
|
|
65
|
+
"""
|
|
66
|
+
|
|
27
67
|
mad: np.ndarray | None = None
|
|
68
|
+
"""Median absolute deviation of the sensor measurements for the given field
|
|
69
|
+
component and time step as an array with shape=(n_sims,n_sensors,
|
|
70
|
+
n_field_comps, n_time_steps)
|
|
71
|
+
"""
|
|
28
72
|
|
|
29
73
|
|
|
30
74
|
class ExperimentSimulator:
|
|
@@ -33,7 +77,7 @@ class ExperimentSimulator:
|
|
|
33
77
|
defined experiments. Calculates summary statistics for each sensor array
|
|
34
78
|
applied to each simulation.
|
|
35
79
|
"""
|
|
36
|
-
__slots__ = ("
|
|
80
|
+
__slots__ = ("_sim_list","_sensor_arrays","_num_exp_per_sim","_exp_data",
|
|
37
81
|
"_exp_stats")
|
|
38
82
|
|
|
39
83
|
def __init__(self,
|
|
@@ -41,43 +85,75 @@ class ExperimentSimulator:
|
|
|
41
85
|
sensor_arrays: list[ISensorArray],
|
|
42
86
|
num_exp_per_sim: int
|
|
43
87
|
) -> None:
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
88
|
+
"""
|
|
89
|
+
Parameters
|
|
90
|
+
----------
|
|
91
|
+
sim_list : list[mh.SimData]
|
|
92
|
+
List of simulation data objects over which the virtual experiments
|
|
93
|
+
will be performed.
|
|
94
|
+
sensor_arrays : list[ISensorArray]
|
|
95
|
+
The sensor arrays that will be applied to each simulation to
|
|
96
|
+
generate the virtual experiment data.
|
|
97
|
+
num_exp_per_sim : int
|
|
98
|
+
Number of virtual experiments to perform for each simulation and
|
|
99
|
+
sensor array.
|
|
100
|
+
"""
|
|
101
|
+
self._sim_list = sim_list
|
|
102
|
+
self._sensor_arrays = sensor_arrays
|
|
103
|
+
self._num_exp_per_sim = num_exp_per_sim
|
|
104
|
+
self._exp_data = None
|
|
105
|
+
self._exp_stats = None
|
|
56
106
|
|
|
57
107
|
def run_experiments(self) -> list[np.ndarray]:
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
108
|
+
"""Runs the specified number of virtual experiments over the number of
|
|
109
|
+
input simulation cases and virtual sensor arrays.
|
|
110
|
+
|
|
111
|
+
Returns
|
|
112
|
+
-------
|
|
113
|
+
list[np.ndarray]
|
|
114
|
+
List of virtual experimental data arrays where the list index
|
|
115
|
+
corresponds to the virtual sensor array and the data is an array
|
|
116
|
+
with shape=(n_sims,n_exps,n_sens,n_comps,n_time_steps).
|
|
117
|
+
"""
|
|
118
|
+
|
|
119
|
+
n_sims = len(self._sim_list)
|
|
120
|
+
# shape=list[n_sens_arrays](n_sims,n_exps,n_sens,n_comps,n_time_steps)
|
|
121
|
+
self._exp_data = [None]*len(self._sensor_arrays)
|
|
122
|
+
|
|
123
|
+
for ii,aa in enumerate(self._sensor_arrays):
|
|
124
|
+
meas_array = np.zeros((n_sims,self._num_exp_per_sim)+
|
|
64
125
|
aa.get_measurement_shape())
|
|
65
126
|
|
|
66
|
-
for jj,ss in enumerate(self.
|
|
127
|
+
for jj,ss in enumerate(self._sim_list):
|
|
67
128
|
aa.get_field().set_sim_data(ss)
|
|
68
129
|
|
|
69
|
-
for ee in range(self.
|
|
130
|
+
for ee in range(self._num_exp_per_sim):
|
|
70
131
|
meas_array[jj,ee,:,:,:] = aa.calc_measurements()
|
|
71
132
|
|
|
72
133
|
self._exp_data[ii] = meas_array
|
|
73
134
|
|
|
135
|
+
# shape=list[n_sens_arrays](n_sims,n_exps,n_sens,n_comps,n_time_steps)
|
|
74
136
|
return self._exp_data
|
|
75
137
|
|
|
76
138
|
|
|
77
139
|
def calc_stats(self) -> list[ExperimentStats]:
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
140
|
+
"""Calculates summary statistics over the number of virtual experiments
|
|
141
|
+
specified. If `run_experiments()` has not been called then it is called
|
|
142
|
+
to generate the virtual experimental data to perform the statistical
|
|
143
|
+
calculations.
|
|
144
|
+
|
|
145
|
+
Returns
|
|
146
|
+
-------
|
|
147
|
+
list[ExperimentStats]
|
|
148
|
+
List of summary statistics data classes for the virtual experiments.
|
|
149
|
+
The list index correponds to the virtual sensor array.
|
|
150
|
+
"""
|
|
151
|
+
if self._exp_data is None:
|
|
152
|
+
self._exp_data = self.run_experiments()
|
|
153
|
+
|
|
154
|
+
# shape=list[n_sens_arrays](n_sims,n_sens,n_comps,n_time_steps)
|
|
155
|
+
self._exp_stats = [None]*len(self._sensor_arrays)
|
|
156
|
+
for ii,_ in enumerate(self._sensor_arrays):
|
|
81
157
|
array_stats = ExperimentStats()
|
|
82
158
|
array_stats.max = np.max(self._exp_data[ii],axis=1)
|
|
83
159
|
array_stats.min = np.min(self._exp_data[ii],axis=1)
|
|
@@ -90,6 +166,7 @@ class ExperimentSimulator:
|
|
|
90
166
|
np.median(self._exp_data[ii],axis=1,keepdims=True)),axis=1)
|
|
91
167
|
self._exp_stats[ii] = array_stats
|
|
92
168
|
|
|
169
|
+
# shape=list[n_sens_arrays](n_sims,n_sens,n_comps,n_time_steps)
|
|
93
170
|
return self._exp_stats
|
|
94
171
|
|
|
95
172
|
|
pyvale/field.py
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
#
|
|
1
|
+
# ==============================================================================
|
|
2
2
|
# pyvale: the python validation engine
|
|
3
3
|
# License: MIT
|
|
4
4
|
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
-
#
|
|
5
|
+
# ==============================================================================
|
|
6
6
|
|
|
7
7
|
from abc import ABC, abstractmethod
|
|
8
8
|
import numpy as np
|
|
@@ -29,7 +29,6 @@ class IField(ABC):
|
|
|
29
29
|
Mooseherder SimData object. Contains a mesh and a simulated
|
|
30
30
|
physical field.
|
|
31
31
|
"""
|
|
32
|
-
pass
|
|
33
32
|
|
|
34
33
|
@abstractmethod
|
|
35
34
|
def get_sim_data(self) -> mh.SimData:
|
|
@@ -43,7 +42,6 @@ class IField(ABC):
|
|
|
43
42
|
Mooseherder SimData object. Contains a mesh and a simulated
|
|
44
43
|
physical field.
|
|
45
44
|
"""
|
|
46
|
-
pass
|
|
47
45
|
|
|
48
46
|
@abstractmethod
|
|
49
47
|
def get_time_steps(self) -> np.ndarray:
|
|
@@ -55,7 +53,6 @@ class IField(ABC):
|
|
|
55
53
|
np.ndarray
|
|
56
54
|
1D array of simulation time steps. shape=(num_time_steps,)
|
|
57
55
|
"""
|
|
58
|
-
pass
|
|
59
56
|
|
|
60
57
|
@abstractmethod
|
|
61
58
|
def get_visualiser(self) -> pv.UnstructuredGrid:
|
|
@@ -68,7 +65,6 @@ class IField(ABC):
|
|
|
68
65
|
Pyvista unstructured grid object containing only a mesh without any
|
|
69
66
|
physical field data attached.
|
|
70
67
|
"""
|
|
71
|
-
pass
|
|
72
68
|
|
|
73
69
|
@abstractmethod
|
|
74
70
|
def get_all_components(self) -> tuple[str,...]:
|
|
@@ -82,7 +78,6 @@ class IField(ABC):
|
|
|
82
78
|
Tuple containing the string keys for all components of the physical
|
|
83
79
|
field.
|
|
84
80
|
"""
|
|
85
|
-
pass
|
|
86
81
|
|
|
87
82
|
@abstractmethod
|
|
88
83
|
def get_component_index(self,component: str) -> int:
|
|
@@ -99,7 +94,6 @@ class IField(ABC):
|
|
|
99
94
|
int
|
|
100
95
|
Index for the selected field component
|
|
101
96
|
"""
|
|
102
|
-
pass
|
|
103
97
|
|
|
104
98
|
@abstractmethod
|
|
105
99
|
def sample_field(self,
|
|
@@ -132,4 +126,3 @@ class IField(ABC):
|
|
|
132
126
|
An array of sampled (interpolated) values with the following
|
|
133
127
|
dimensions: shape=(num_points,num_components,num_time_steps).
|
|
134
128
|
"""
|
|
135
|
-
pass
|