pyvale 2025.5.3__cp311-cp311-macosx_13_0_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyvale might be problematic. Click here for more details.
- pyvale/.dylibs/libomp.dylib +0 -0
- pyvale/__init__.py +89 -0
- pyvale/analyticmeshgen.py +102 -0
- pyvale/analyticsimdatafactory.py +91 -0
- pyvale/analyticsimdatagenerator.py +323 -0
- pyvale/blendercalibrationdata.py +15 -0
- pyvale/blenderlightdata.py +26 -0
- pyvale/blendermaterialdata.py +15 -0
- pyvale/blenderrenderdata.py +30 -0
- pyvale/blenderscene.py +488 -0
- pyvale/blendertools.py +420 -0
- pyvale/camera.py +146 -0
- pyvale/cameradata.py +69 -0
- pyvale/cameradata2d.py +84 -0
- pyvale/camerastereo.py +217 -0
- pyvale/cameratools.py +522 -0
- pyvale/cython/rastercyth.c +32211 -0
- pyvale/cython/rastercyth.cpython-311-darwin.so +0 -0
- pyvale/cython/rastercyth.py +640 -0
- pyvale/data/__init__.py +5 -0
- pyvale/data/cal_target.tiff +0 -0
- pyvale/data/case00_HEX20_out.e +0 -0
- pyvale/data/case00_HEX27_out.e +0 -0
- pyvale/data/case00_HEX8_out.e +0 -0
- pyvale/data/case00_TET10_out.e +0 -0
- pyvale/data/case00_TET14_out.e +0 -0
- pyvale/data/case00_TET4_out.e +0 -0
- pyvale/data/case13_out.e +0 -0
- pyvale/data/case16_out.e +0 -0
- pyvale/data/case17_out.e +0 -0
- pyvale/data/case18_1_out.e +0 -0
- pyvale/data/case18_2_out.e +0 -0
- pyvale/data/case18_3_out.e +0 -0
- pyvale/data/case25_out.e +0 -0
- pyvale/data/case26_out.e +0 -0
- pyvale/data/optspeckle_2464x2056px_spec5px_8bit_gblur1px.tiff +0 -0
- pyvale/dataset.py +325 -0
- pyvale/errorcalculator.py +109 -0
- pyvale/errordriftcalc.py +146 -0
- pyvale/errorintegrator.py +336 -0
- pyvale/errorrand.py +607 -0
- pyvale/errorsyscalib.py +134 -0
- pyvale/errorsysdep.py +327 -0
- pyvale/errorsysfield.py +414 -0
- pyvale/errorsysindep.py +808 -0
- pyvale/examples/__init__.py +5 -0
- pyvale/examples/basics/ex1_1_basicscalars_therm2d.py +131 -0
- pyvale/examples/basics/ex1_2_sensormodel_therm2d.py +158 -0
- pyvale/examples/basics/ex1_3_customsens_therm3d.py +216 -0
- pyvale/examples/basics/ex1_4_basicerrors_therm3d.py +153 -0
- pyvale/examples/basics/ex1_5_fielderrs_therm3d.py +168 -0
- pyvale/examples/basics/ex1_6_caliberrs_therm2d.py +133 -0
- pyvale/examples/basics/ex1_7_spatavg_therm2d.py +123 -0
- pyvale/examples/basics/ex2_1_basicvectors_disp2d.py +112 -0
- pyvale/examples/basics/ex2_2_vectorsens_disp2d.py +111 -0
- pyvale/examples/basics/ex2_3_sensangle_disp2d.py +139 -0
- pyvale/examples/basics/ex2_4_chainfielderrs_disp2d.py +196 -0
- pyvale/examples/basics/ex2_5_vectorfields3d_disp3d.py +109 -0
- pyvale/examples/basics/ex3_1_basictensors_strain2d.py +114 -0
- pyvale/examples/basics/ex3_2_tensorsens2d_strain2d.py +111 -0
- pyvale/examples/basics/ex3_3_tensorsens3d_strain3d.py +182 -0
- pyvale/examples/basics/ex4_1_expsim2d_thermmech2d.py +171 -0
- pyvale/examples/basics/ex4_2_expsim3d_thermmech3d.py +252 -0
- pyvale/examples/genanalyticdata/ex1_1_scalarvisualisation.py +35 -0
- pyvale/examples/genanalyticdata/ex1_2_scalarcasebuild.py +43 -0
- pyvale/examples/genanalyticdata/ex2_1_analyticsensors.py +80 -0
- pyvale/examples/imagedef2d/ex_imagedef2d_todisk.py +79 -0
- pyvale/examples/renderblender/ex1_1_blenderscene.py +121 -0
- pyvale/examples/renderblender/ex1_2_blenderdeformed.py +119 -0
- pyvale/examples/renderblender/ex2_1_stereoscene.py +128 -0
- pyvale/examples/renderblender/ex2_2_stereodeformed.py +131 -0
- pyvale/examples/renderblender/ex3_1_blendercalibration.py +120 -0
- pyvale/examples/renderrasterisation/ex_rastenp.py +153 -0
- pyvale/examples/renderrasterisation/ex_rastercyth_oneframe.py +218 -0
- pyvale/examples/renderrasterisation/ex_rastercyth_static_cypara.py +187 -0
- pyvale/examples/renderrasterisation/ex_rastercyth_static_pypara.py +190 -0
- pyvale/examples/visualisation/ex1_1_plot_traces.py +102 -0
- pyvale/examples/visualisation/ex2_1_animate_sim.py +89 -0
- pyvale/experimentsimulator.py +175 -0
- pyvale/field.py +128 -0
- pyvale/fieldconverter.py +351 -0
- pyvale/fieldsampler.py +111 -0
- pyvale/fieldscalar.py +166 -0
- pyvale/fieldtensor.py +218 -0
- pyvale/fieldtransform.py +388 -0
- pyvale/fieldvector.py +213 -0
- pyvale/generatorsrandom.py +505 -0
- pyvale/imagedef2d.py +569 -0
- pyvale/integratorfactory.py +240 -0
- pyvale/integratorquadrature.py +217 -0
- pyvale/integratorrectangle.py +165 -0
- pyvale/integratorspatial.py +89 -0
- pyvale/integratortype.py +43 -0
- pyvale/output.py +17 -0
- pyvale/pyvaleexceptions.py +11 -0
- pyvale/raster.py +31 -0
- pyvale/rastercy.py +77 -0
- pyvale/rasternp.py +603 -0
- pyvale/rendermesh.py +147 -0
- pyvale/sensorarray.py +178 -0
- pyvale/sensorarrayfactory.py +196 -0
- pyvale/sensorarraypoint.py +278 -0
- pyvale/sensordata.py +71 -0
- pyvale/sensordescriptor.py +213 -0
- pyvale/sensortools.py +142 -0
- pyvale/simcases/case00_HEX20.i +242 -0
- pyvale/simcases/case00_HEX27.i +242 -0
- pyvale/simcases/case00_HEX8.i +242 -0
- pyvale/simcases/case00_TET10.i +242 -0
- pyvale/simcases/case00_TET14.i +242 -0
- pyvale/simcases/case00_TET4.i +242 -0
- pyvale/simcases/case01.i +101 -0
- pyvale/simcases/case02.i +156 -0
- pyvale/simcases/case03.i +136 -0
- pyvale/simcases/case04.i +181 -0
- pyvale/simcases/case05.i +234 -0
- pyvale/simcases/case06.i +305 -0
- pyvale/simcases/case07.geo +135 -0
- pyvale/simcases/case07.i +87 -0
- pyvale/simcases/case08.geo +144 -0
- pyvale/simcases/case08.i +153 -0
- pyvale/simcases/case09.geo +204 -0
- pyvale/simcases/case09.i +87 -0
- pyvale/simcases/case10.geo +204 -0
- pyvale/simcases/case10.i +257 -0
- pyvale/simcases/case11.geo +337 -0
- pyvale/simcases/case11.i +147 -0
- pyvale/simcases/case12.geo +388 -0
- pyvale/simcases/case12.i +329 -0
- pyvale/simcases/case13.i +140 -0
- pyvale/simcases/case14.i +159 -0
- pyvale/simcases/case15.geo +337 -0
- pyvale/simcases/case15.i +150 -0
- pyvale/simcases/case16.geo +391 -0
- pyvale/simcases/case16.i +357 -0
- pyvale/simcases/case17.geo +135 -0
- pyvale/simcases/case17.i +144 -0
- pyvale/simcases/case18.i +254 -0
- pyvale/simcases/case18_1.i +254 -0
- pyvale/simcases/case18_2.i +254 -0
- pyvale/simcases/case18_3.i +254 -0
- pyvale/simcases/case19.geo +252 -0
- pyvale/simcases/case19.i +99 -0
- pyvale/simcases/case20.geo +252 -0
- pyvale/simcases/case20.i +250 -0
- pyvale/simcases/case21.geo +74 -0
- pyvale/simcases/case21.i +155 -0
- pyvale/simcases/case22.geo +82 -0
- pyvale/simcases/case22.i +140 -0
- pyvale/simcases/case23.geo +164 -0
- pyvale/simcases/case23.i +140 -0
- pyvale/simcases/case24.geo +79 -0
- pyvale/simcases/case24.i +123 -0
- pyvale/simcases/case25.geo +82 -0
- pyvale/simcases/case25.i +140 -0
- pyvale/simcases/case26.geo +166 -0
- pyvale/simcases/case26.i +140 -0
- pyvale/simcases/run_1case.py +61 -0
- pyvale/simcases/run_all_cases.py +69 -0
- pyvale/simcases/run_build_case.py +64 -0
- pyvale/simcases/run_example_cases.py +69 -0
- pyvale/simtools.py +67 -0
- pyvale/visualexpplotter.py +191 -0
- pyvale/visualimagedef.py +74 -0
- pyvale/visualimages.py +76 -0
- pyvale/visualopts.py +493 -0
- pyvale/visualsimanimator.py +111 -0
- pyvale/visualsimsensors.py +318 -0
- pyvale/visualtools.py +136 -0
- pyvale/visualtraceplotter.py +142 -0
- pyvale-2025.5.3.dist-info/METADATA +144 -0
- pyvale-2025.5.3.dist-info/RECORD +175 -0
- pyvale-2025.5.3.dist-info/WHEEL +6 -0
- pyvale-2025.5.3.dist-info/licenses/LICENSE +21 -0
- pyvale-2025.5.3.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
import time
|
|
10
|
+
import numpy as np
|
|
11
|
+
from scipy.spatial.transform import Rotation
|
|
12
|
+
import matplotlib.pyplot as plt
|
|
13
|
+
import mooseherder as mh
|
|
14
|
+
import pyvale as pyv
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def main() -> None:
|
|
18
|
+
print()
|
|
19
|
+
print(80*"=")
|
|
20
|
+
print("RASTER CYTHON FILE (should be *.so on Linux):")
|
|
21
|
+
print(pyv.rastercyth.__file__)
|
|
22
|
+
print(80*"=")
|
|
23
|
+
print()
|
|
24
|
+
|
|
25
|
+
# This a path to an exodus *.e output file from MOOSE, this can be
|
|
26
|
+
# replaced with a path to your own simulation file
|
|
27
|
+
sim_path = pyv.DataSet.render_mechanical_3d_path()
|
|
28
|
+
#sim_path = pyv.DataSet.render_simple_block_path()
|
|
29
|
+
#sim_path = Path.home()/"pyvale"/"src"/"pyvale"/"simcases"/"case26_out.e"
|
|
30
|
+
sim_data = mh.ExodusReader(sim_path).read_all_sim_data()
|
|
31
|
+
|
|
32
|
+
disp_comps = ("disp_x","disp_y","disp_z")
|
|
33
|
+
|
|
34
|
+
# Scale m -> mm
|
|
35
|
+
sim_data = pyv.scale_length_units(sim_data,disp_comps,1000.0)
|
|
36
|
+
|
|
37
|
+
print()
|
|
38
|
+
print(f"{np.max(np.abs(sim_data.node_vars['disp_x']))=}")
|
|
39
|
+
print(f"{np.max(np.abs(sim_data.node_vars['disp_y']))=}")
|
|
40
|
+
print(f"{np.max(np.abs(sim_data.node_vars['disp_z']))=}")
|
|
41
|
+
print()
|
|
42
|
+
|
|
43
|
+
# Extracts the surface mesh from a full 3d simulation for rendering
|
|
44
|
+
render_mesh = pyv.create_render_mesh(sim_data,
|
|
45
|
+
("disp_y","disp_x"),
|
|
46
|
+
sim_spat_dim=3,
|
|
47
|
+
field_disp_keys=disp_comps)
|
|
48
|
+
|
|
49
|
+
print()
|
|
50
|
+
print(80*"-")
|
|
51
|
+
print("MESH DATA:")
|
|
52
|
+
print(80*"-")
|
|
53
|
+
print("connectivity.shape=(num_elems,num_nodes_per_elem)")
|
|
54
|
+
print(f"{render_mesh.connectivity.shape=}")
|
|
55
|
+
print()
|
|
56
|
+
print("coords.shape=(num_nodes,coord[x,y,z])")
|
|
57
|
+
print(f"{render_mesh.coords.shape=}")
|
|
58
|
+
print()
|
|
59
|
+
print("fields.shape=(num_coords,num_time_steps,num_components)")
|
|
60
|
+
print(f"{render_mesh.fields_render.shape=}")
|
|
61
|
+
if render_mesh.fields_disp is not None:
|
|
62
|
+
print(f"{render_mesh.fields_disp.shape=}")
|
|
63
|
+
print(80*"-")
|
|
64
|
+
print()
|
|
65
|
+
|
|
66
|
+
pixel_num = np.array((960,1280),dtype=np.int32)
|
|
67
|
+
pixel_size = np.array((5.3e-3,5.3e-3),dtype=np.float64)
|
|
68
|
+
focal_leng: float = 50.0
|
|
69
|
+
cam_rot = Rotation.from_euler("zyx",(0.0,-30.0,-10.0),degrees=True)
|
|
70
|
+
fov_scale_factor: float = 1.1
|
|
71
|
+
|
|
72
|
+
(roi_pos_world,
|
|
73
|
+
cam_pos_world) = pyv.CameraTools.pos_fill_frame_from_rotation(
|
|
74
|
+
coords_world=render_mesh.coords,
|
|
75
|
+
pixel_num=pixel_num,
|
|
76
|
+
pixel_size=pixel_size,
|
|
77
|
+
focal_leng=focal_leng,
|
|
78
|
+
cam_rot=cam_rot,
|
|
79
|
+
frame_fill=fov_scale_factor,
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
cam_data = pyv.CameraData(
|
|
83
|
+
pixels_num=pixel_num,
|
|
84
|
+
pixels_size=pixel_size,
|
|
85
|
+
pos_world=cam_pos_world,
|
|
86
|
+
rot_world=cam_rot,
|
|
87
|
+
roi_cent_world=roi_pos_world,
|
|
88
|
+
focal_length=focal_leng,
|
|
89
|
+
sub_samp=2,
|
|
90
|
+
back_face_removal=True,
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
print(80*"-")
|
|
94
|
+
print("CAMERA DATA:")
|
|
95
|
+
print(80*"-")
|
|
96
|
+
print(f"{cam_data.image_dist=}")
|
|
97
|
+
print(f"{cam_data.roi_cent_world=}")
|
|
98
|
+
print(f"{cam_data.pos_world=}")
|
|
99
|
+
print()
|
|
100
|
+
print("World to camera matrix:")
|
|
101
|
+
print(cam_data.world_to_cam_mat)
|
|
102
|
+
print(80*"-")
|
|
103
|
+
print()
|
|
104
|
+
|
|
105
|
+
print(80*"-")
|
|
106
|
+
total_frames = render_mesh.fields_render.shape[1]*render_mesh.fields_render.shape[2]
|
|
107
|
+
print(f"Time steps to render: {render_mesh.fields_render.shape[1]}")
|
|
108
|
+
print(f"Fields to render: {render_mesh.fields_render.shape[2]}")
|
|
109
|
+
print(f"Total frames to render: {total_frames}")
|
|
110
|
+
print(80*"-")
|
|
111
|
+
|
|
112
|
+
print(80*"=")
|
|
113
|
+
print("RASTER ELEMENT LOOP START")
|
|
114
|
+
print(80*"=")
|
|
115
|
+
|
|
116
|
+
num_loops = 1
|
|
117
|
+
loop_times = np.zeros((num_loops,),dtype=np.float64)
|
|
118
|
+
|
|
119
|
+
print()
|
|
120
|
+
print("Running raster loop.")
|
|
121
|
+
for nn in range(num_loops):
|
|
122
|
+
print(f"Running loop {nn}")
|
|
123
|
+
loop_start = time.perf_counter()
|
|
124
|
+
|
|
125
|
+
(image_buffer,
|
|
126
|
+
depth_buffer,
|
|
127
|
+
elems_in_images) = pyv.RasterCY.raster_static_mesh(cam_data,
|
|
128
|
+
render_mesh,
|
|
129
|
+
16)
|
|
130
|
+
|
|
131
|
+
loop_times[nn] = time.perf_counter() - loop_start
|
|
132
|
+
|
|
133
|
+
print()
|
|
134
|
+
print(80*"=")
|
|
135
|
+
print("PERFORMANCE TIMERS")
|
|
136
|
+
print(f"Elements in image = {elems_in_images}")
|
|
137
|
+
print(f"Image buffer shape = {image_buffer.shape}")
|
|
138
|
+
print(f"Avg. total render time = {np.mean(loop_times):.4f} seconds")
|
|
139
|
+
print(f"Avg. render time per frame = {(np.mean(loop_times)/total_frames):.4f} seconds")
|
|
140
|
+
print(80*"=")
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
#===========================================================================
|
|
144
|
+
# PLOTTING
|
|
145
|
+
plot_on = False
|
|
146
|
+
plot_frames = (-1,)#range(3)
|
|
147
|
+
plot_field = 0
|
|
148
|
+
|
|
149
|
+
# depth_to_plot = np.copy(np.asarray(depth_buffer[:,:,plot_frame]))
|
|
150
|
+
# depth_to_plot[depth_buffer[:,:,plot_frame] > 10*cam_data.image_dist] = np.nan
|
|
151
|
+
# image_to_plot = np.copy(np.asarray(image_buffer[:,:,plot_frame,plot_field]))
|
|
152
|
+
# image_to_plot[depth_buffer[:,:,plot_frame] > 10*cam_data.image_dist] = np.nan
|
|
153
|
+
|
|
154
|
+
if plot_on:
|
|
155
|
+
plot_opts = pyv.PlotOptsGeneral()
|
|
156
|
+
|
|
157
|
+
for ff in plot_frames:
|
|
158
|
+
(fig, ax) = plt.subplots(figsize=plot_opts.single_fig_size_square,
|
|
159
|
+
layout='constrained')
|
|
160
|
+
fig.set_dpi(plot_opts.resolution)
|
|
161
|
+
cset = plt.imshow(depth_buffer[:,:,ff],
|
|
162
|
+
cmap=plt.get_cmap(plot_opts.cmap_seq))
|
|
163
|
+
#origin='lower')
|
|
164
|
+
ax.set_aspect('equal','box')
|
|
165
|
+
fig.colorbar(cset)
|
|
166
|
+
ax.set_title(f"Depth buffer: {ff}",fontsize=plot_opts.font_head_size)
|
|
167
|
+
ax.set_xlabel(r"x ($px$)",
|
|
168
|
+
fontsize=plot_opts.font_ax_size, fontname=plot_opts.font_name)
|
|
169
|
+
ax.set_ylabel(r"y ($px$)",
|
|
170
|
+
fontsize=plot_opts.font_ax_size, fontname=plot_opts.font_name)
|
|
171
|
+
|
|
172
|
+
(fig, ax) = plt.subplots(figsize=plot_opts.single_fig_size_square,
|
|
173
|
+
layout='constrained')
|
|
174
|
+
fig.set_dpi(plot_opts.resolution)
|
|
175
|
+
cset = plt.imshow(image_buffer[:,:,ff,plot_field],
|
|
176
|
+
cmap=plt.get_cmap(plot_opts.cmap_seq))
|
|
177
|
+
#origin='lower')
|
|
178
|
+
ax.set_aspect('equal','box')
|
|
179
|
+
fig.colorbar(cset)
|
|
180
|
+
ax.set_title(f"Field Image: {ff}",fontsize=plot_opts.font_head_size)
|
|
181
|
+
ax.set_xlabel(r"x ($px$)",
|
|
182
|
+
fontsize=plot_opts.font_ax_size, fontname=plot_opts.font_name)
|
|
183
|
+
ax.set_ylabel(r"y ($px$)",
|
|
184
|
+
fontsize=plot_opts.font_ax_size, fontname=plot_opts.font_name)
|
|
185
|
+
|
|
186
|
+
plt.show()
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
if __name__ == "__main__":
|
|
190
|
+
main()
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
Pyvale example: TODO
|
|
9
|
+
--------------------------------------------------------------------------------
|
|
10
|
+
TODO
|
|
11
|
+
|
|
12
|
+
Test case: TODO
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
import numpy as np
|
|
16
|
+
import matplotlib.pyplot as plt
|
|
17
|
+
import mooseherder as mh
|
|
18
|
+
import pyvale as pyv
|
|
19
|
+
|
|
20
|
+
# TODO: comments and full description for this example like the basics examples
|
|
21
|
+
|
|
22
|
+
def main() -> None:
|
|
23
|
+
|
|
24
|
+
data_path = pyv.DataSet.thermal_2d_path()
|
|
25
|
+
sim_data = mh.ExodusReader(data_path).read_all_sim_data()
|
|
26
|
+
sim_data = pyv.scale_length_units(scale=1000.0,
|
|
27
|
+
sim_data=sim_data,
|
|
28
|
+
disp_comps=None)
|
|
29
|
+
|
|
30
|
+
n_sens = (4,1,1)
|
|
31
|
+
x_lims = (0.0,100.0)
|
|
32
|
+
y_lims = (0.0,50.0)
|
|
33
|
+
z_lims = (0.0,0.0)
|
|
34
|
+
sens_pos = pyv.create_sensor_pos_array(n_sens,x_lims,y_lims,z_lims)
|
|
35
|
+
|
|
36
|
+
sample_times = np.linspace(0.0,np.max(sim_data.time),12)
|
|
37
|
+
|
|
38
|
+
sens_data = pyv.SensorData(positions=sens_pos,
|
|
39
|
+
sample_times=sample_times)
|
|
40
|
+
|
|
41
|
+
field_key = "temperature"
|
|
42
|
+
tc_array = pyv.SensorArrayFactory \
|
|
43
|
+
.thermocouples_basic_errs(sim_data,
|
|
44
|
+
sens_data,
|
|
45
|
+
field_key,
|
|
46
|
+
elem_dims=2)
|
|
47
|
+
|
|
48
|
+
err_int = pyv.ErrIntegrator([pyv.ErrSysOffset(offset=-5.0)],
|
|
49
|
+
sens_data,
|
|
50
|
+
tc_array.get_measurement_shape())
|
|
51
|
+
tc_array.set_error_integrator(err_int)
|
|
52
|
+
|
|
53
|
+
measurements = tc_array.get_measurements()
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
print(80*"-")
|
|
57
|
+
|
|
58
|
+
sens_print: int = 0
|
|
59
|
+
time_print: int = 5
|
|
60
|
+
comp_print: int = 0
|
|
61
|
+
|
|
62
|
+
print(f"These are the last {time_print} virtual measurements of sensor "
|
|
63
|
+
+ f"{sens_print}:")
|
|
64
|
+
|
|
65
|
+
pyv.print_measurements(sens_array=tc_array,
|
|
66
|
+
sensors=(sens_print,sens_print+1),
|
|
67
|
+
components=(comp_print,comp_print+1),
|
|
68
|
+
time_steps=(measurements.shape[2]-time_print,
|
|
69
|
+
measurements.shape[2]))
|
|
70
|
+
print(80*"-")
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
trace_props = pyv.TraceOptsSensor()
|
|
74
|
+
|
|
75
|
+
trace_props.truth_line = None
|
|
76
|
+
trace_props.sim_line = None
|
|
77
|
+
pyv.plot_time_traces(tc_array,field_key,trace_props)
|
|
78
|
+
|
|
79
|
+
trace_props.meas_line = "--o"
|
|
80
|
+
trace_props.truth_line = "-x"
|
|
81
|
+
trace_props.sim_line = ":+"
|
|
82
|
+
pyv.plot_time_traces(tc_array,field_key,trace_props)
|
|
83
|
+
|
|
84
|
+
trace_props.sensors_to_plot = np.arange(measurements.shape[0]-2
|
|
85
|
+
,measurements.shape[0])
|
|
86
|
+
pyv.plot_time_traces(tc_array,field_key,trace_props)
|
|
87
|
+
|
|
88
|
+
trace_props.sensors_to_plot = None
|
|
89
|
+
trace_props.time_min_max = (0.0,100.0)
|
|
90
|
+
pyv.plot_time_traces(tc_array,field_key,trace_props)
|
|
91
|
+
|
|
92
|
+
plt.show()
|
|
93
|
+
|
|
94
|
+
pv_plot = pyv.plot_point_sensors_on_sim(tc_array,field_key)
|
|
95
|
+
pv_plot.camera_position = [(-7.547, 59.753, 134.52),
|
|
96
|
+
(41.916, 25.303, 9.297),
|
|
97
|
+
(0.0810, 0.969, -0.234)]
|
|
98
|
+
pv_plot.show()
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
if __name__ == "__main__":
|
|
102
|
+
main()
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
Pyvale example: TODO
|
|
9
|
+
--------------------------------------------------------------------------------
|
|
10
|
+
TODO
|
|
11
|
+
|
|
12
|
+
Test case: TODO
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
import numpy as np
|
|
17
|
+
import mooseherder as mh
|
|
18
|
+
import pyvale as pyv
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def main() -> None:
|
|
22
|
+
|
|
23
|
+
data_path = pyv.DataSet.thermal_3d_path()
|
|
24
|
+
sim_data = mh.ExodusReader(data_path).read_all_sim_data()
|
|
25
|
+
|
|
26
|
+
sim_data = pyv.scale_length_units(scale=1000.0,
|
|
27
|
+
sim_data=sim_data,
|
|
28
|
+
disp_comps=None)
|
|
29
|
+
sim_data.coords = sim_data.coords*1000.0 # type: ignore
|
|
30
|
+
|
|
31
|
+
pyv.print_dimensions(sim_data)
|
|
32
|
+
|
|
33
|
+
n_sens = (1,4,1)
|
|
34
|
+
x_lims = (12.5,12.5)
|
|
35
|
+
y_lims = (0,33.0)
|
|
36
|
+
z_lims = (0.0,12.0)
|
|
37
|
+
sens_pos = pyv.create_sensor_pos_array(n_sens,x_lims,y_lims,z_lims)
|
|
38
|
+
|
|
39
|
+
sens_data = pyv.SensorData(positions=sens_pos)
|
|
40
|
+
|
|
41
|
+
field_key = 'temperature'
|
|
42
|
+
tc_array = pyv.SensorArrayFactory() \
|
|
43
|
+
.thermocouples_basic_errs(sim_data,
|
|
44
|
+
sens_data,
|
|
45
|
+
field_key,
|
|
46
|
+
elem_dims=3)
|
|
47
|
+
|
|
48
|
+
measurements = tc_array.get_measurements()
|
|
49
|
+
print(f'\nMeasurements for sensor at top of block:\n{measurements[-1,0,:]}\n')
|
|
50
|
+
|
|
51
|
+
vis_opts = pyv.VisOptsSimSensors()
|
|
52
|
+
vis_opts.window_size_px = (1200,800)
|
|
53
|
+
vis_opts.camera_position = np.array([(59.354, 43.428, 69.946),
|
|
54
|
+
(-2.858, 13.189, 4.523),
|
|
55
|
+
(-0.215, 0.948, -0.233)])
|
|
56
|
+
|
|
57
|
+
vis_mode = "vector"
|
|
58
|
+
save_dir = Path.cwd() / "exampleoutput"
|
|
59
|
+
if not save_dir.is_dir():
|
|
60
|
+
save_dir.mkdir()
|
|
61
|
+
|
|
62
|
+
if vis_mode == "animate":
|
|
63
|
+
anim_opts = pyv.VisOptsAnimation()
|
|
64
|
+
|
|
65
|
+
anim_opts.save_path = save_dir / "test_animation"
|
|
66
|
+
anim_opts.save_animation = pyv.EAnimationType.MP4
|
|
67
|
+
|
|
68
|
+
pv_anim = pyv.animate_sim_with_sensors(tc_array,
|
|
69
|
+
field_key,
|
|
70
|
+
time_steps=None,
|
|
71
|
+
vis_opts=vis_opts,
|
|
72
|
+
anim_opts=anim_opts)
|
|
73
|
+
|
|
74
|
+
else:
|
|
75
|
+
image_save_opts = pyv.VisOptsImageSave()
|
|
76
|
+
|
|
77
|
+
image_save_opts.path = save_dir / "test_vector_graphics"
|
|
78
|
+
image_save_opts.image_type = pyv.EImageType.SVG
|
|
79
|
+
|
|
80
|
+
pv_plot = pyv.plot_point_sensors_on_sim(tc_array,
|
|
81
|
+
field_key,
|
|
82
|
+
time_step=-1,
|
|
83
|
+
vis_opts=vis_opts,
|
|
84
|
+
image_save_opts=image_save_opts)
|
|
85
|
+
pv_plot.show()
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
if __name__ == '__main__':
|
|
89
|
+
main()
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
This module is used for performing Monte-Carlo virtual experiments over a series
|
|
9
|
+
of input simulation cases and sensor arrays.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from dataclasses import dataclass
|
|
13
|
+
import numpy as np
|
|
14
|
+
import mooseherder as mh
|
|
15
|
+
from pyvale.sensorarray import ISensorArray
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass(slots=True)
|
|
19
|
+
class ExperimentStats:
|
|
20
|
+
"""Dataclass holding summary statistics for a series of simulated
|
|
21
|
+
experiments produced using the experiment simulator. All summary statistics
|
|
22
|
+
are calculated over the 'experiments' dimension of the measurements array so
|
|
23
|
+
the arrays of statistics have the shape=(n_sims,n_sensors,n_field_comps,
|
|
24
|
+
n_time_steps). Note that the n_sims dimension refers to the number of input
|
|
25
|
+
multi-physics simulations (i.e. SimData objects) that the virtual
|
|
26
|
+
experiments were performed over.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
mean: np.ndarray | None = None
|
|
30
|
+
"""Mean of each sensors measurement for the given field component and time
|
|
31
|
+
step as an array with shape=(n_sims,n_sensors,n_field_comps,n_time_steps).
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
std: np.ndarray | None = None
|
|
35
|
+
"""Standard deviation of the sensor measurements for the given field
|
|
36
|
+
component and time step as an array with shape=(n_sims,n_sensors,
|
|
37
|
+
n_field_comps, n_time_steps)
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
max: np.ndarray | None = None
|
|
41
|
+
"""Maximum of the sensor measurements for the given field component and time
|
|
42
|
+
step as an array with shape=(n_sims,n_sensors,n_field_comps,n_time_steps)
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
min: np.ndarray | None = None
|
|
46
|
+
"""Minmum of the sensor measurements for the given field component and time
|
|
47
|
+
step as an array with shape=(n_sims,n_sensors,n_field_comps,n_time_steps)
|
|
48
|
+
"""
|
|
49
|
+
|
|
50
|
+
med: np.ndarray | None = None
|
|
51
|
+
"""Median of the sensor measurements for the given field component and time
|
|
52
|
+
step as an array with shape=(n_sims,n_sensors,n_field_comps,n_time_steps)
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
q25: np.ndarray | None = None
|
|
56
|
+
"""Lower 25% quantile of the sensor measurements for the given field
|
|
57
|
+
component and time step as an array with shape=(n_sims,n_sensors,
|
|
58
|
+
n_field_comps, n_time_steps)
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
q75: np.ndarray | None = None
|
|
62
|
+
"""Upper 75% quantile of the sensor measurements for the given field
|
|
63
|
+
component and time step as an array with shape=(n_sims,n_sensors,
|
|
64
|
+
_field_comps, n_time_steps)
|
|
65
|
+
"""
|
|
66
|
+
|
|
67
|
+
mad: np.ndarray | None = None
|
|
68
|
+
"""Median absolute deviation of the sensor measurements for the given field
|
|
69
|
+
component and time step as an array with shape=(n_sims,n_sensors,
|
|
70
|
+
n_field_comps, n_time_steps)
|
|
71
|
+
"""
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
class ExperimentSimulator:
|
|
75
|
+
"""An experiment simulator for running monte-carlo analysis by applying a
|
|
76
|
+
list of sensor arrays to a list of simulations over a given number of user
|
|
77
|
+
defined experiments. Calculates summary statistics for each sensor array
|
|
78
|
+
applied to each simulation.
|
|
79
|
+
"""
|
|
80
|
+
__slots__ = ("_sim_list","_sensor_arrays","_num_exp_per_sim","_exp_data",
|
|
81
|
+
"_exp_stats")
|
|
82
|
+
|
|
83
|
+
def __init__(self,
|
|
84
|
+
sim_list: list[mh.SimData],
|
|
85
|
+
sensor_arrays: list[ISensorArray],
|
|
86
|
+
num_exp_per_sim: int
|
|
87
|
+
) -> None:
|
|
88
|
+
"""
|
|
89
|
+
Parameters
|
|
90
|
+
----------
|
|
91
|
+
sim_list : list[mh.SimData]
|
|
92
|
+
List of simulation data objects over which the virtual experiments
|
|
93
|
+
will be performed.
|
|
94
|
+
sensor_arrays : list[ISensorArray]
|
|
95
|
+
The sensor arrays that will be applied to each simulation to
|
|
96
|
+
generate the virtual experiment data.
|
|
97
|
+
num_exp_per_sim : int
|
|
98
|
+
Number of virtual experiments to perform for each simulation and
|
|
99
|
+
sensor array.
|
|
100
|
+
"""
|
|
101
|
+
self._sim_list = sim_list
|
|
102
|
+
self._sensor_arrays = sensor_arrays
|
|
103
|
+
self._num_exp_per_sim = num_exp_per_sim
|
|
104
|
+
self._exp_data = None
|
|
105
|
+
self._exp_stats = None
|
|
106
|
+
|
|
107
|
+
def run_experiments(self) -> list[np.ndarray]:
|
|
108
|
+
"""Runs the specified number of virtual experiments over the number of
|
|
109
|
+
input simulation cases and virtual sensor arrays.
|
|
110
|
+
|
|
111
|
+
Returns
|
|
112
|
+
-------
|
|
113
|
+
list[np.ndarray]
|
|
114
|
+
List of virtual experimental data arrays where the list index
|
|
115
|
+
corresponds to the virtual sensor array and the data is an array
|
|
116
|
+
with shape=(n_sims,n_exps,n_sens,n_comps,n_time_steps).
|
|
117
|
+
"""
|
|
118
|
+
|
|
119
|
+
n_sims = len(self._sim_list)
|
|
120
|
+
# shape=list[n_sens_arrays](n_sims,n_exps,n_sens,n_comps,n_time_steps)
|
|
121
|
+
self._exp_data = [None]*len(self._sensor_arrays)
|
|
122
|
+
|
|
123
|
+
for ii,aa in enumerate(self._sensor_arrays):
|
|
124
|
+
meas_array = np.zeros((n_sims,self._num_exp_per_sim)+
|
|
125
|
+
aa.get_measurement_shape())
|
|
126
|
+
|
|
127
|
+
for jj,ss in enumerate(self._sim_list):
|
|
128
|
+
aa.get_field().set_sim_data(ss)
|
|
129
|
+
|
|
130
|
+
for ee in range(self._num_exp_per_sim):
|
|
131
|
+
meas_array[jj,ee,:,:,:] = aa.calc_measurements()
|
|
132
|
+
|
|
133
|
+
self._exp_data[ii] = meas_array
|
|
134
|
+
|
|
135
|
+
# shape=list[n_sens_arrays](n_sims,n_exps,n_sens,n_comps,n_time_steps)
|
|
136
|
+
return self._exp_data
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def calc_stats(self) -> list[ExperimentStats]:
|
|
140
|
+
"""Calculates summary statistics over the number of virtual experiments
|
|
141
|
+
specified. If `run_experiments()` has not been called then it is called
|
|
142
|
+
to generate the virtual experimental data to perform the statistical
|
|
143
|
+
calculations.
|
|
144
|
+
|
|
145
|
+
Returns
|
|
146
|
+
-------
|
|
147
|
+
list[ExperimentStats]
|
|
148
|
+
List of summary statistics data classes for the virtual experiments.
|
|
149
|
+
The list index correponds to the virtual sensor array.
|
|
150
|
+
"""
|
|
151
|
+
if self._exp_data is None:
|
|
152
|
+
self._exp_data = self.run_experiments()
|
|
153
|
+
|
|
154
|
+
# shape=list[n_sens_arrays](n_sims,n_sens,n_comps,n_time_steps)
|
|
155
|
+
self._exp_stats = [None]*len(self._sensor_arrays)
|
|
156
|
+
for ii,_ in enumerate(self._sensor_arrays):
|
|
157
|
+
array_stats = ExperimentStats()
|
|
158
|
+
array_stats.max = np.max(self._exp_data[ii],axis=1)
|
|
159
|
+
array_stats.min = np.min(self._exp_data[ii],axis=1)
|
|
160
|
+
array_stats.mean = np.mean(self._exp_data[ii],axis=1)
|
|
161
|
+
array_stats.std = np.std(self._exp_data[ii],axis=1)
|
|
162
|
+
array_stats.med = np.median(self._exp_data[ii],axis=1)
|
|
163
|
+
array_stats.q25 = np.quantile(self._exp_data[ii],0.25,axis=1)
|
|
164
|
+
array_stats.q75 = np.quantile(self._exp_data[ii],0.75,axis=1)
|
|
165
|
+
array_stats.mad = np.median(np.abs(self._exp_data[ii] -
|
|
166
|
+
np.median(self._exp_data[ii],axis=1,keepdims=True)),axis=1)
|
|
167
|
+
self._exp_stats[ii] = array_stats
|
|
168
|
+
|
|
169
|
+
# shape=list[n_sens_arrays](n_sims,n_sens,n_comps,n_time_steps)
|
|
170
|
+
return self._exp_stats
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
|
pyvale/field.py
ADDED
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
from abc import ABC, abstractmethod
|
|
8
|
+
import numpy as np
|
|
9
|
+
from scipy.spatial.transform import Rotation
|
|
10
|
+
import pyvista as pv
|
|
11
|
+
import mooseherder as mh
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class IField(ABC):
|
|
15
|
+
"""Interface (abstract base class) for sampling (interpolating) physical
|
|
16
|
+
fields from simulations to provide sensor values at specified locations and
|
|
17
|
+
times.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
@abstractmethod
|
|
21
|
+
def set_sim_data(self,sim_data: mh.SimData) -> None:
|
|
22
|
+
"""Abstract method. Sets the SimData object that will be interpolated to
|
|
23
|
+
obtain sensor values. The purpose of this is to be able to apply the
|
|
24
|
+
same sensor array to an array of different simulations.
|
|
25
|
+
|
|
26
|
+
Parameters
|
|
27
|
+
----------
|
|
28
|
+
sim_data : mh.SimData
|
|
29
|
+
Mooseherder SimData object. Contains a mesh and a simulated
|
|
30
|
+
physical field.
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
@abstractmethod
|
|
34
|
+
def get_sim_data(self) -> mh.SimData:
|
|
35
|
+
"""Abstract method. Gets the simulation data object associated with this
|
|
36
|
+
field. Used by pyvale visualisation tools to display simulation data
|
|
37
|
+
with simulated sensor values.
|
|
38
|
+
|
|
39
|
+
Returns
|
|
40
|
+
-------
|
|
41
|
+
mh.SimData
|
|
42
|
+
Mooseherder SimData object. Contains a mesh and a simulated
|
|
43
|
+
physical field.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
@abstractmethod
|
|
47
|
+
def get_time_steps(self) -> np.ndarray:
|
|
48
|
+
"""Abstract method. Gets a 1D array of time steps from the simulation
|
|
49
|
+
data.
|
|
50
|
+
|
|
51
|
+
Returns
|
|
52
|
+
-------
|
|
53
|
+
np.ndarray
|
|
54
|
+
1D array of simulation time steps. shape=(num_time_steps,)
|
|
55
|
+
"""
|
|
56
|
+
|
|
57
|
+
@abstractmethod
|
|
58
|
+
def get_visualiser(self) -> pv.UnstructuredGrid:
|
|
59
|
+
"""Abstract method. Gets a pyvista unstructured grid object for
|
|
60
|
+
visualisation purposes.
|
|
61
|
+
|
|
62
|
+
Returns
|
|
63
|
+
-------
|
|
64
|
+
pv.UnstructuredGrid
|
|
65
|
+
Pyvista unstructured grid object containing only a mesh without any
|
|
66
|
+
physical field data attached.
|
|
67
|
+
"""
|
|
68
|
+
|
|
69
|
+
@abstractmethod
|
|
70
|
+
def get_all_components(self) -> tuple[str,...]:
|
|
71
|
+
"""Gets the string keys for the component of the physical field. For
|
|
72
|
+
example: a scalar field might just have ('temperature',) whereas a
|
|
73
|
+
vector field might have ('disp_x','disp_y','disp_z').
|
|
74
|
+
|
|
75
|
+
Returns
|
|
76
|
+
-------
|
|
77
|
+
tuple[str,...]
|
|
78
|
+
Tuple containing the string keys for all components of the physical
|
|
79
|
+
field.
|
|
80
|
+
"""
|
|
81
|
+
|
|
82
|
+
@abstractmethod
|
|
83
|
+
def get_component_index(self,component: str) -> int:
|
|
84
|
+
"""Gets the index for a component of the physical field. Used for
|
|
85
|
+
getting the index of a component in the sensor measurement array.
|
|
86
|
+
|
|
87
|
+
Parameters
|
|
88
|
+
----------
|
|
89
|
+
component : str
|
|
90
|
+
String key for the field component (e.g. 'temperature' or 'disp_x').
|
|
91
|
+
|
|
92
|
+
Returns
|
|
93
|
+
-------
|
|
94
|
+
int
|
|
95
|
+
Index for the selected field component
|
|
96
|
+
"""
|
|
97
|
+
|
|
98
|
+
@abstractmethod
|
|
99
|
+
def sample_field(self,
|
|
100
|
+
points: np.ndarray,
|
|
101
|
+
times: np.ndarray | None = None,
|
|
102
|
+
angles: tuple[Rotation,...] | None = None,
|
|
103
|
+
) -> np.ndarray:
|
|
104
|
+
"""Samples (interpolates) the simulation field at the specified
|
|
105
|
+
positions, times, and angles.
|
|
106
|
+
|
|
107
|
+
Parameters
|
|
108
|
+
----------
|
|
109
|
+
points : np.ndarray
|
|
110
|
+
Spatial points to be sampled with the rows indicating the point
|
|
111
|
+
number of the columns indicating the X,Y and Z coordinates.
|
|
112
|
+
times : np.ndarray | None, optional
|
|
113
|
+
Times to sample the underlying simulation. If None then the
|
|
114
|
+
simulation time steps are used and no temporal interpolation is
|
|
115
|
+
performed, by default None.
|
|
116
|
+
angles : tuple[Rotation,...] | None, optional
|
|
117
|
+
Angles to rotate the sampled values into with rotations specified
|
|
118
|
+
with respect to the simulation world coordinates. If a single
|
|
119
|
+
rotation is specified then all points are assumed to have the same
|
|
120
|
+
angle and are batch processed for speed. If None then no rotation is
|
|
121
|
+
performed, by default None.
|
|
122
|
+
|
|
123
|
+
Returns
|
|
124
|
+
-------
|
|
125
|
+
np.ndarray
|
|
126
|
+
An array of sampled (interpolated) values with the following
|
|
127
|
+
dimensions: shape=(num_points,num_components,num_time_steps).
|
|
128
|
+
"""
|