pyvale 2025.5.3__cp311-cp311-macosx_13_0_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyvale might be problematic. Click here for more details.
- pyvale/.dylibs/libomp.dylib +0 -0
- pyvale/__init__.py +89 -0
- pyvale/analyticmeshgen.py +102 -0
- pyvale/analyticsimdatafactory.py +91 -0
- pyvale/analyticsimdatagenerator.py +323 -0
- pyvale/blendercalibrationdata.py +15 -0
- pyvale/blenderlightdata.py +26 -0
- pyvale/blendermaterialdata.py +15 -0
- pyvale/blenderrenderdata.py +30 -0
- pyvale/blenderscene.py +488 -0
- pyvale/blendertools.py +420 -0
- pyvale/camera.py +146 -0
- pyvale/cameradata.py +69 -0
- pyvale/cameradata2d.py +84 -0
- pyvale/camerastereo.py +217 -0
- pyvale/cameratools.py +522 -0
- pyvale/cython/rastercyth.c +32211 -0
- pyvale/cython/rastercyth.cpython-311-darwin.so +0 -0
- pyvale/cython/rastercyth.py +640 -0
- pyvale/data/__init__.py +5 -0
- pyvale/data/cal_target.tiff +0 -0
- pyvale/data/case00_HEX20_out.e +0 -0
- pyvale/data/case00_HEX27_out.e +0 -0
- pyvale/data/case00_HEX8_out.e +0 -0
- pyvale/data/case00_TET10_out.e +0 -0
- pyvale/data/case00_TET14_out.e +0 -0
- pyvale/data/case00_TET4_out.e +0 -0
- pyvale/data/case13_out.e +0 -0
- pyvale/data/case16_out.e +0 -0
- pyvale/data/case17_out.e +0 -0
- pyvale/data/case18_1_out.e +0 -0
- pyvale/data/case18_2_out.e +0 -0
- pyvale/data/case18_3_out.e +0 -0
- pyvale/data/case25_out.e +0 -0
- pyvale/data/case26_out.e +0 -0
- pyvale/data/optspeckle_2464x2056px_spec5px_8bit_gblur1px.tiff +0 -0
- pyvale/dataset.py +325 -0
- pyvale/errorcalculator.py +109 -0
- pyvale/errordriftcalc.py +146 -0
- pyvale/errorintegrator.py +336 -0
- pyvale/errorrand.py +607 -0
- pyvale/errorsyscalib.py +134 -0
- pyvale/errorsysdep.py +327 -0
- pyvale/errorsysfield.py +414 -0
- pyvale/errorsysindep.py +808 -0
- pyvale/examples/__init__.py +5 -0
- pyvale/examples/basics/ex1_1_basicscalars_therm2d.py +131 -0
- pyvale/examples/basics/ex1_2_sensormodel_therm2d.py +158 -0
- pyvale/examples/basics/ex1_3_customsens_therm3d.py +216 -0
- pyvale/examples/basics/ex1_4_basicerrors_therm3d.py +153 -0
- pyvale/examples/basics/ex1_5_fielderrs_therm3d.py +168 -0
- pyvale/examples/basics/ex1_6_caliberrs_therm2d.py +133 -0
- pyvale/examples/basics/ex1_7_spatavg_therm2d.py +123 -0
- pyvale/examples/basics/ex2_1_basicvectors_disp2d.py +112 -0
- pyvale/examples/basics/ex2_2_vectorsens_disp2d.py +111 -0
- pyvale/examples/basics/ex2_3_sensangle_disp2d.py +139 -0
- pyvale/examples/basics/ex2_4_chainfielderrs_disp2d.py +196 -0
- pyvale/examples/basics/ex2_5_vectorfields3d_disp3d.py +109 -0
- pyvale/examples/basics/ex3_1_basictensors_strain2d.py +114 -0
- pyvale/examples/basics/ex3_2_tensorsens2d_strain2d.py +111 -0
- pyvale/examples/basics/ex3_3_tensorsens3d_strain3d.py +182 -0
- pyvale/examples/basics/ex4_1_expsim2d_thermmech2d.py +171 -0
- pyvale/examples/basics/ex4_2_expsim3d_thermmech3d.py +252 -0
- pyvale/examples/genanalyticdata/ex1_1_scalarvisualisation.py +35 -0
- pyvale/examples/genanalyticdata/ex1_2_scalarcasebuild.py +43 -0
- pyvale/examples/genanalyticdata/ex2_1_analyticsensors.py +80 -0
- pyvale/examples/imagedef2d/ex_imagedef2d_todisk.py +79 -0
- pyvale/examples/renderblender/ex1_1_blenderscene.py +121 -0
- pyvale/examples/renderblender/ex1_2_blenderdeformed.py +119 -0
- pyvale/examples/renderblender/ex2_1_stereoscene.py +128 -0
- pyvale/examples/renderblender/ex2_2_stereodeformed.py +131 -0
- pyvale/examples/renderblender/ex3_1_blendercalibration.py +120 -0
- pyvale/examples/renderrasterisation/ex_rastenp.py +153 -0
- pyvale/examples/renderrasterisation/ex_rastercyth_oneframe.py +218 -0
- pyvale/examples/renderrasterisation/ex_rastercyth_static_cypara.py +187 -0
- pyvale/examples/renderrasterisation/ex_rastercyth_static_pypara.py +190 -0
- pyvale/examples/visualisation/ex1_1_plot_traces.py +102 -0
- pyvale/examples/visualisation/ex2_1_animate_sim.py +89 -0
- pyvale/experimentsimulator.py +175 -0
- pyvale/field.py +128 -0
- pyvale/fieldconverter.py +351 -0
- pyvale/fieldsampler.py +111 -0
- pyvale/fieldscalar.py +166 -0
- pyvale/fieldtensor.py +218 -0
- pyvale/fieldtransform.py +388 -0
- pyvale/fieldvector.py +213 -0
- pyvale/generatorsrandom.py +505 -0
- pyvale/imagedef2d.py +569 -0
- pyvale/integratorfactory.py +240 -0
- pyvale/integratorquadrature.py +217 -0
- pyvale/integratorrectangle.py +165 -0
- pyvale/integratorspatial.py +89 -0
- pyvale/integratortype.py +43 -0
- pyvale/output.py +17 -0
- pyvale/pyvaleexceptions.py +11 -0
- pyvale/raster.py +31 -0
- pyvale/rastercy.py +77 -0
- pyvale/rasternp.py +603 -0
- pyvale/rendermesh.py +147 -0
- pyvale/sensorarray.py +178 -0
- pyvale/sensorarrayfactory.py +196 -0
- pyvale/sensorarraypoint.py +278 -0
- pyvale/sensordata.py +71 -0
- pyvale/sensordescriptor.py +213 -0
- pyvale/sensortools.py +142 -0
- pyvale/simcases/case00_HEX20.i +242 -0
- pyvale/simcases/case00_HEX27.i +242 -0
- pyvale/simcases/case00_HEX8.i +242 -0
- pyvale/simcases/case00_TET10.i +242 -0
- pyvale/simcases/case00_TET14.i +242 -0
- pyvale/simcases/case00_TET4.i +242 -0
- pyvale/simcases/case01.i +101 -0
- pyvale/simcases/case02.i +156 -0
- pyvale/simcases/case03.i +136 -0
- pyvale/simcases/case04.i +181 -0
- pyvale/simcases/case05.i +234 -0
- pyvale/simcases/case06.i +305 -0
- pyvale/simcases/case07.geo +135 -0
- pyvale/simcases/case07.i +87 -0
- pyvale/simcases/case08.geo +144 -0
- pyvale/simcases/case08.i +153 -0
- pyvale/simcases/case09.geo +204 -0
- pyvale/simcases/case09.i +87 -0
- pyvale/simcases/case10.geo +204 -0
- pyvale/simcases/case10.i +257 -0
- pyvale/simcases/case11.geo +337 -0
- pyvale/simcases/case11.i +147 -0
- pyvale/simcases/case12.geo +388 -0
- pyvale/simcases/case12.i +329 -0
- pyvale/simcases/case13.i +140 -0
- pyvale/simcases/case14.i +159 -0
- pyvale/simcases/case15.geo +337 -0
- pyvale/simcases/case15.i +150 -0
- pyvale/simcases/case16.geo +391 -0
- pyvale/simcases/case16.i +357 -0
- pyvale/simcases/case17.geo +135 -0
- pyvale/simcases/case17.i +144 -0
- pyvale/simcases/case18.i +254 -0
- pyvale/simcases/case18_1.i +254 -0
- pyvale/simcases/case18_2.i +254 -0
- pyvale/simcases/case18_3.i +254 -0
- pyvale/simcases/case19.geo +252 -0
- pyvale/simcases/case19.i +99 -0
- pyvale/simcases/case20.geo +252 -0
- pyvale/simcases/case20.i +250 -0
- pyvale/simcases/case21.geo +74 -0
- pyvale/simcases/case21.i +155 -0
- pyvale/simcases/case22.geo +82 -0
- pyvale/simcases/case22.i +140 -0
- pyvale/simcases/case23.geo +164 -0
- pyvale/simcases/case23.i +140 -0
- pyvale/simcases/case24.geo +79 -0
- pyvale/simcases/case24.i +123 -0
- pyvale/simcases/case25.geo +82 -0
- pyvale/simcases/case25.i +140 -0
- pyvale/simcases/case26.geo +166 -0
- pyvale/simcases/case26.i +140 -0
- pyvale/simcases/run_1case.py +61 -0
- pyvale/simcases/run_all_cases.py +69 -0
- pyvale/simcases/run_build_case.py +64 -0
- pyvale/simcases/run_example_cases.py +69 -0
- pyvale/simtools.py +67 -0
- pyvale/visualexpplotter.py +191 -0
- pyvale/visualimagedef.py +74 -0
- pyvale/visualimages.py +76 -0
- pyvale/visualopts.py +493 -0
- pyvale/visualsimanimator.py +111 -0
- pyvale/visualsimsensors.py +318 -0
- pyvale/visualtools.py +136 -0
- pyvale/visualtraceplotter.py +142 -0
- pyvale-2025.5.3.dist-info/METADATA +144 -0
- pyvale-2025.5.3.dist-info/RECORD +175 -0
- pyvale-2025.5.3.dist-info/WHEEL +6 -0
- pyvale-2025.5.3.dist-info/licenses/LICENSE +21 -0
- pyvale-2025.5.3.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
import numpy as np
|
|
8
|
+
from scipy.spatial.transform import Rotation
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
import pyvale
|
|
11
|
+
|
|
12
|
+
def main() -> None:
|
|
13
|
+
#NOTE: All lengths are to be specified in mm
|
|
14
|
+
|
|
15
|
+
# Set the save path
|
|
16
|
+
# --------------------------------------------------------------------------
|
|
17
|
+
# All the files saved will be saved to a subfolder within this specified
|
|
18
|
+
# base directory.
|
|
19
|
+
# This base directory can be specified by:
|
|
20
|
+
base_dir = Path("./")
|
|
21
|
+
# If no base directory is specified, it will be set as your home directory
|
|
22
|
+
|
|
23
|
+
# Creating the scene
|
|
24
|
+
# --------------------------------------------------------------------------
|
|
25
|
+
scene = pyvale.BlenderScene()
|
|
26
|
+
|
|
27
|
+
# Add the calibration target
|
|
28
|
+
# A rectangular calibration target of the specified size is added to the scene
|
|
29
|
+
target = scene.add_cal_target(target_size=np.array([150, 100, 10]))
|
|
30
|
+
|
|
31
|
+
# Add the camera
|
|
32
|
+
cam_data_0 = pyvale.CameraData(pixels_num=np.array([1540, 1040]),
|
|
33
|
+
pixels_size=np.array([0.00345, 0.00345]),
|
|
34
|
+
pos_world=np.array([0, 0, 400]),
|
|
35
|
+
rot_world=Rotation.from_euler("xyz", [0, 0, 0]),
|
|
36
|
+
roi_cent_world=(0, 0, 0),
|
|
37
|
+
focal_length=15.0)
|
|
38
|
+
# Set this to "symmetric" to get a symmetric stereo system or set this to
|
|
39
|
+
# "faceon" to get a face-on stereo system
|
|
40
|
+
stereo_setup = "faceon"
|
|
41
|
+
if stereo_setup == "symmetric":
|
|
42
|
+
stereo_system = pyvale.CameraTools.symmetric_stereo_cameras(
|
|
43
|
+
cam_data_0=cam_data_0,
|
|
44
|
+
stereo_angle=15.0)
|
|
45
|
+
if stereo_setup == "faceon":
|
|
46
|
+
stereo_system = pyvale.CameraTools.faceon_stereo_cameras(
|
|
47
|
+
cam_data_0=cam_data_0,
|
|
48
|
+
stereo_angle=15.0)
|
|
49
|
+
|
|
50
|
+
scene.add_stereo_system(stereo_system)
|
|
51
|
+
|
|
52
|
+
# Generate calibration file
|
|
53
|
+
stereo_system.save_calibration(base_dir)
|
|
54
|
+
|
|
55
|
+
# Add the light
|
|
56
|
+
light_data = pyvale.BlenderLightData(type=pyvale.BlenderLightType.POINT,
|
|
57
|
+
pos_world=(0, 0, 200),
|
|
58
|
+
rot_world=Rotation.from_euler("xyz",
|
|
59
|
+
[0, 0, 0]),
|
|
60
|
+
energy=1)
|
|
61
|
+
light = scene.add_light(light_data)
|
|
62
|
+
# The light can be moved and rotated:
|
|
63
|
+
light.location = (0, 0, 210)
|
|
64
|
+
light.rotation_euler = (0, 0, 0) # NOTE: The default is an XYZ Euler angle
|
|
65
|
+
|
|
66
|
+
# Apply the calibration target pattern
|
|
67
|
+
material_data = pyvale.BlenderMaterialData()
|
|
68
|
+
speckle_path = Path.cwd() / "src/pyvale/data/cal_target.tiff"
|
|
69
|
+
mm_px_resolution = pyvale.CameraTools.calculate_mm_px_resolution(cam_data_0)
|
|
70
|
+
scene.add_speckle(part=target,
|
|
71
|
+
speckle_path=speckle_path,
|
|
72
|
+
mat_data=material_data,
|
|
73
|
+
mm_px_resolution=mm_px_resolution,
|
|
74
|
+
cal=True)
|
|
75
|
+
# NOTE: The `cal` flag has to be set to True in order to scale the
|
|
76
|
+
# calibration target pattern correctly
|
|
77
|
+
|
|
78
|
+
# Rendering calibration images
|
|
79
|
+
# --------------------------------------------------------------------------
|
|
80
|
+
save_dir = Path.cwd() / "blenderimages"
|
|
81
|
+
save_name = "cal"
|
|
82
|
+
render_data = pyvale.RenderData(cam_data=(stereo_system.cam_data_0,
|
|
83
|
+
stereo_system.cam_data_1),
|
|
84
|
+
base_dir=base_dir)
|
|
85
|
+
# NOTE: The number of threads used to render the images is set within
|
|
86
|
+
# RenderData, it is defaulted to 4 threads
|
|
87
|
+
|
|
88
|
+
# The desired limits for the calibration target movement are to be set within
|
|
89
|
+
# the CalibrationData dataclass
|
|
90
|
+
calibration_data = pyvale.CalibrationData(angle_lims=(-10, 10),
|
|
91
|
+
angle_step=5,
|
|
92
|
+
plunge_lims=(-5, 5),
|
|
93
|
+
plunge_step=5)
|
|
94
|
+
|
|
95
|
+
# The number of calibration images that will be rendered can be calculated
|
|
96
|
+
number_calibration_images = pyvale.BlenderTools.number_calibration_images(calibration_data)
|
|
97
|
+
print()
|
|
98
|
+
print(80*"-")
|
|
99
|
+
print("Number of calibration images to be rendered:", number_calibration_images)
|
|
100
|
+
print(80*"-")
|
|
101
|
+
|
|
102
|
+
# The calibration images can then be rendered
|
|
103
|
+
pyvale.BlenderTools.render_calibration_images(render_data,
|
|
104
|
+
calibration_data,
|
|
105
|
+
target)
|
|
106
|
+
|
|
107
|
+
print()
|
|
108
|
+
print(80*"-")
|
|
109
|
+
print("Save directory of the images:", (render_data.base_dir / "calimages"))
|
|
110
|
+
print(80*"-")
|
|
111
|
+
print()
|
|
112
|
+
|
|
113
|
+
# Save Blender file
|
|
114
|
+
# --------------------------------------------------------------------------
|
|
115
|
+
# The file that will be saved is a Blender project file. This can be opened
|
|
116
|
+
# with the Blender GUI to view the scene.
|
|
117
|
+
pyvale.BlenderTools.save_blender_file(base_dir)
|
|
118
|
+
|
|
119
|
+
if __name__ == "__main__":
|
|
120
|
+
main()
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
import time
|
|
9
|
+
import numpy as np
|
|
10
|
+
from scipy.spatial.transform import Rotation
|
|
11
|
+
import matplotlib.pyplot as plt
|
|
12
|
+
import mooseherder as mh
|
|
13
|
+
import pyvale as pyv
|
|
14
|
+
|
|
15
|
+
# TODO
|
|
16
|
+
# - Fix the image averaging function to use cython
|
|
17
|
+
# - Saving of the rendered images for post processing or analysis
|
|
18
|
+
# - Collapse image display functions into visual to simplify code
|
|
19
|
+
#
|
|
20
|
+
# CAMERA:
|
|
21
|
+
# - Need option to work camera rotation based on a given position
|
|
22
|
+
# - The z axis is easy as we can just do roi-cam_pos but what about x and y
|
|
23
|
+
#
|
|
24
|
+
# SCENE OBJECT:
|
|
25
|
+
# - Allow multiple objects in the scene with their own transformations
|
|
26
|
+
# - Allow multiple cameras in the scene
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def main() -> None:
|
|
30
|
+
"""pyvale example: rasterisation field renderer
|
|
31
|
+
----------------------------------------------------------------------------
|
|
32
|
+
- TODO
|
|
33
|
+
"""
|
|
34
|
+
# This a path to an exodus *.e output file from MOOSE, this can be
|
|
35
|
+
# replaced with a path to your own simulation file
|
|
36
|
+
#sim_path = Path.home()/"pyvale"/"src"/"pyvale"/"simcases"/"case21_out.e"
|
|
37
|
+
sim_path = pyv.DataSet.render_mechanical_3d_path()
|
|
38
|
+
|
|
39
|
+
disp_comps = ("disp_x","disp_y","disp_z")
|
|
40
|
+
|
|
41
|
+
sim_data = mh.ExodusReader(sim_path).read_all_sim_data()
|
|
42
|
+
|
|
43
|
+
# Scale m -> mm
|
|
44
|
+
sim_data = pyv.scale_length_units(sim_data,disp_comps,1000.0)
|
|
45
|
+
|
|
46
|
+
# Extracts the surface mesh from a full 3d simulation for rendering
|
|
47
|
+
render_mesh = pyv.create_render_mesh(sim_data,
|
|
48
|
+
("disp_y","disp_x"),
|
|
49
|
+
sim_spat_dim=3,
|
|
50
|
+
field_disp_keys=disp_comps)
|
|
51
|
+
|
|
52
|
+
print()
|
|
53
|
+
print(80*"-")
|
|
54
|
+
print("MESH DATA:")
|
|
55
|
+
print(80*"-")
|
|
56
|
+
print("connectivity.shape=(num_elems,num_nodes_per_elem)")
|
|
57
|
+
print(f"{render_mesh.connectivity.shape=}")
|
|
58
|
+
print()
|
|
59
|
+
print("coords.shape=(num_nodes,coord[x,y,z])")
|
|
60
|
+
print(f"{render_mesh.coords.shape=}")
|
|
61
|
+
print()
|
|
62
|
+
print("fields.shape=(num_coords,num_time_steps,num_components)")
|
|
63
|
+
print(f"{render_mesh.fields_render.shape=}")
|
|
64
|
+
if render_mesh.fields_disp is not None:
|
|
65
|
+
print(f"{render_mesh.fields_disp.shape=}")
|
|
66
|
+
print(80*"-")
|
|
67
|
+
print()
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
pixel_num = np.array((960,1280))
|
|
71
|
+
pixel_size = np.array((5.3e-3,5.3e-3))
|
|
72
|
+
focal_leng: float = 50
|
|
73
|
+
cam_rot = Rotation.from_euler("zyx",(0.0,-30.0,-10.0),degrees=True)
|
|
74
|
+
fov_scale_factor: float = 1.1
|
|
75
|
+
|
|
76
|
+
(roi_pos_world,
|
|
77
|
+
cam_pos_world) = pyv.CameraTools.pos_fill_frame_from_rotation(
|
|
78
|
+
coords_world=render_mesh.coords,
|
|
79
|
+
pixel_num=pixel_num,
|
|
80
|
+
pixel_size=pixel_size,
|
|
81
|
+
focal_leng=focal_leng,
|
|
82
|
+
cam_rot=cam_rot,
|
|
83
|
+
frame_fill=fov_scale_factor,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
cam_data = pyv.CameraData(
|
|
87
|
+
pixels_num=pixel_num,
|
|
88
|
+
pixels_size=pixel_size,
|
|
89
|
+
pos_world=cam_pos_world,
|
|
90
|
+
rot_world=cam_rot,
|
|
91
|
+
roi_cent_world=roi_pos_world,
|
|
92
|
+
focal_length=focal_leng,
|
|
93
|
+
sub_samp=2,
|
|
94
|
+
back_face_removal=True,
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
print(80*"-")
|
|
98
|
+
print("CAMERA DATA:")
|
|
99
|
+
print(80*"-")
|
|
100
|
+
print(f"{roi_pos_world=}")
|
|
101
|
+
print(f"{cam_pos_world=}")
|
|
102
|
+
print()
|
|
103
|
+
print("World to camera matrix:")
|
|
104
|
+
print(cam_data.world_to_cam_mat)
|
|
105
|
+
print(80*"-")
|
|
106
|
+
print()
|
|
107
|
+
|
|
108
|
+
print(80*"-")
|
|
109
|
+
total_frames = render_mesh.fields_render.shape[1]*render_mesh.fields_render.shape[2]
|
|
110
|
+
print(f"Time steps to render: {render_mesh.fields_render.shape[1]}")
|
|
111
|
+
print(f"Fields to render: {render_mesh.fields_render.shape[2]}")
|
|
112
|
+
print(f"Total frames to render: {total_frames}")
|
|
113
|
+
print(80*"-")
|
|
114
|
+
|
|
115
|
+
print()
|
|
116
|
+
print(80*"=")
|
|
117
|
+
print("RASTER LOOP START")
|
|
118
|
+
|
|
119
|
+
#save_path = Path.cwd()/"example_output"
|
|
120
|
+
save_path = None
|
|
121
|
+
static_mesh = False
|
|
122
|
+
|
|
123
|
+
time_start_loop = time.perf_counter()
|
|
124
|
+
if static_mesh:
|
|
125
|
+
images = pyv.RasterNP.raster_static_mesh(
|
|
126
|
+
cam_data,render_mesh,save_path,threads_num=8
|
|
127
|
+
)
|
|
128
|
+
else:
|
|
129
|
+
time_start_loop = time.perf_counter()
|
|
130
|
+
images = pyv.RasterNP.raster_deformed_mesh(
|
|
131
|
+
cam_data,render_mesh,save_path,parallel=8
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
time_end_loop = time.perf_counter()
|
|
135
|
+
render_time = time_end_loop - time_start_loop
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
print("RASTER LOOP END")
|
|
139
|
+
print(80*"=")
|
|
140
|
+
print("PERFORMANCE")
|
|
141
|
+
print(f"Total frames = {total_frames}")
|
|
142
|
+
print(f"Total render time = {render_time:.4f} seconds")
|
|
143
|
+
print(f"Time per frame = {(render_time/total_frames):.4f} seconds")
|
|
144
|
+
print(80*"=")
|
|
145
|
+
|
|
146
|
+
plot_on = True
|
|
147
|
+
if plot_on:
|
|
148
|
+
(fig,ax) = pyv.plot_field_image(images[:,:,-1,0],
|
|
149
|
+
title_str="Disp. y, [mm]")
|
|
150
|
+
plt.show()
|
|
151
|
+
|
|
152
|
+
if __name__ == "__main__":
|
|
153
|
+
main()
|
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
import time
|
|
8
|
+
import numpy as np
|
|
9
|
+
from scipy.spatial.transform import Rotation
|
|
10
|
+
import matplotlib.pyplot as plt
|
|
11
|
+
import mooseherder as mh
|
|
12
|
+
import pyvale as pyv
|
|
13
|
+
import imagebenchmarks as ib
|
|
14
|
+
|
|
15
|
+
def main() -> None:
|
|
16
|
+
"""pyvale example: rasterisation field renderer
|
|
17
|
+
----------------------------------------------------------------------------
|
|
18
|
+
- TODO
|
|
19
|
+
"""
|
|
20
|
+
print()
|
|
21
|
+
print(80*"=")
|
|
22
|
+
print("RASTER CYTHON FILE (should be *.so on Linux):")
|
|
23
|
+
print(pyv.rastercyth.__file__)
|
|
24
|
+
print(80*"=")
|
|
25
|
+
print()
|
|
26
|
+
|
|
27
|
+
return
|
|
28
|
+
|
|
29
|
+
benchmark = True
|
|
30
|
+
if not benchmark:
|
|
31
|
+
|
|
32
|
+
# This a path to an exodus *.e output file from MOOSE, this can be
|
|
33
|
+
# replaced with a path to your own simulation file
|
|
34
|
+
#sim_path = Path.home()/"pyvale"/"src"/"pyvale"/"simcases"/"case26_out.e"
|
|
35
|
+
|
|
36
|
+
sim_path = pyv.DataSet.render_simple_block_path()
|
|
37
|
+
sim_path = pyv.DataSet.render_mechanical_3d_path()
|
|
38
|
+
sim_data = mh.ExodusReader(sim_path).read_all_sim_data()
|
|
39
|
+
|
|
40
|
+
disp_comps = ("disp_x","disp_y","disp_z")
|
|
41
|
+
|
|
42
|
+
# Scale m -> mm
|
|
43
|
+
sim_data = pyv.scale_length_units(sim_data,disp_comps,1000.0)
|
|
44
|
+
|
|
45
|
+
print()
|
|
46
|
+
print(f"{np.max(np.abs(sim_data.node_vars['disp_x']))=}")
|
|
47
|
+
print(f"{np.max(np.abs(sim_data.node_vars['disp_y']))=}")
|
|
48
|
+
print(f"{np.max(np.abs(sim_data.node_vars['disp_z']))=}")
|
|
49
|
+
print()
|
|
50
|
+
|
|
51
|
+
# Extracts the surface mesh from a full 3d simulation for rendering
|
|
52
|
+
render_mesh = pyv.create_render_mesh(sim_data,
|
|
53
|
+
("disp_y","disp_x","disp_z"),
|
|
54
|
+
sim_spat_dim=3,
|
|
55
|
+
field_disp_keys=disp_comps)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
pixel_num = np.array((960,1280),dtype=np.int32)
|
|
62
|
+
pixel_size = np.array((5.3e-3,5.3e-3),dtype=np.float64)
|
|
63
|
+
focal_leng: float = 50.0
|
|
64
|
+
cam_rot = Rotation.from_euler("ZYX",(0.0,-30.0,-10.0),degrees=True)
|
|
65
|
+
fov_scale_factor: float = 1.1
|
|
66
|
+
|
|
67
|
+
(roi_pos_world,
|
|
68
|
+
cam_pos_world) = pyv.CameraTools.pos_fill_frame_from_rotation(
|
|
69
|
+
coords_world=render_mesh.coords,
|
|
70
|
+
pixel_num=pixel_num,
|
|
71
|
+
pixel_size=pixel_size,
|
|
72
|
+
focal_leng=focal_leng,
|
|
73
|
+
cam_rot=cam_rot,
|
|
74
|
+
frame_fill=fov_scale_factor,
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
cam_data = pyv.CameraData(
|
|
78
|
+
pixels_num=pixel_num,
|
|
79
|
+
pixels_size=pixel_size,
|
|
80
|
+
pos_world=cam_pos_world,
|
|
81
|
+
rot_world=cam_rot,
|
|
82
|
+
roi_cent_world=roi_pos_world,
|
|
83
|
+
focal_length=focal_leng,
|
|
84
|
+
sub_samp=2,
|
|
85
|
+
back_face_removal=True,
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
else:
|
|
89
|
+
case_ind = 0
|
|
90
|
+
(case_ident,render_mesh,cam_data) = ib.load_benchmark_by_index(case_ind)
|
|
91
|
+
print(80*"=")
|
|
92
|
+
print("BENCHMARK IDENTIFIER:")
|
|
93
|
+
print(f"{case_ident}")
|
|
94
|
+
print(80*"=")
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
print()
|
|
98
|
+
print(80*"-")
|
|
99
|
+
print("MESH DATA:")
|
|
100
|
+
print(80*"-")
|
|
101
|
+
print("connectivity.shape=(num_elems,num_nodes_per_elem)")
|
|
102
|
+
print(f"{render_mesh.connectivity.shape=}")
|
|
103
|
+
print()
|
|
104
|
+
print("coords.shape=(num_nodes,coord[x,y,z])")
|
|
105
|
+
print(f"{render_mesh.coords.shape=}")
|
|
106
|
+
print()
|
|
107
|
+
print("fields.shape=(num_coords,num_time_steps,num_components)")
|
|
108
|
+
print(f"{render_mesh.fields_render.shape=}")
|
|
109
|
+
if render_mesh.fields_disp is not None:
|
|
110
|
+
print(f"{render_mesh.fields_disp.shape=}")
|
|
111
|
+
print(80*"-")
|
|
112
|
+
print()
|
|
113
|
+
|
|
114
|
+
print(80*"-")
|
|
115
|
+
print("CAMERA DATA:")
|
|
116
|
+
print(80*"-")
|
|
117
|
+
print(f"{cam_data.image_dist=}")
|
|
118
|
+
print(f"{cam_data.roi_cent_world=}")
|
|
119
|
+
print(f"{cam_data.pos_world=}")
|
|
120
|
+
print()
|
|
121
|
+
print("World to camera matrix:")
|
|
122
|
+
print(cam_data.world_to_cam_mat)
|
|
123
|
+
print(80*"-")
|
|
124
|
+
print()
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
print(80*"-")
|
|
128
|
+
total_frames = render_mesh.fields_render.shape[1]*render_mesh.fields_render.shape[2]
|
|
129
|
+
print(f"Time steps to render: {render_mesh.fields_render.shape[1]}")
|
|
130
|
+
print(f"Fields to render: {render_mesh.fields_render.shape[2]}")
|
|
131
|
+
print(f"Total frames to render: {total_frames}")
|
|
132
|
+
print(80*"-")
|
|
133
|
+
|
|
134
|
+
print(80*"=")
|
|
135
|
+
print("RASTER ELEMENT LOOP START")
|
|
136
|
+
print(80*"=")
|
|
137
|
+
|
|
138
|
+
num_loops = 1
|
|
139
|
+
|
|
140
|
+
loop_times = np.zeros((num_loops,),dtype=np.float64)
|
|
141
|
+
|
|
142
|
+
frame = -1
|
|
143
|
+
fields_render = render_mesh.fields_render[:,frame,:]
|
|
144
|
+
total_frames = fields_render.shape[1]
|
|
145
|
+
|
|
146
|
+
print()
|
|
147
|
+
print("Running raster loop.")
|
|
148
|
+
for nn in range(num_loops):
|
|
149
|
+
print(f"Running loop {nn}")
|
|
150
|
+
loop_start = time.perf_counter()
|
|
151
|
+
|
|
152
|
+
(image_buffer,
|
|
153
|
+
depth_buffer,
|
|
154
|
+
elems_in_image) = pyv.rastercyth.raster_frame(
|
|
155
|
+
render_mesh.coords,
|
|
156
|
+
render_mesh.connectivity,
|
|
157
|
+
fields_render,
|
|
158
|
+
cam_data)
|
|
159
|
+
|
|
160
|
+
loop_times[nn] = time.perf_counter() - loop_start
|
|
161
|
+
|
|
162
|
+
print()
|
|
163
|
+
print(80*"=")
|
|
164
|
+
print("PERFORMANCE TIMERS")
|
|
165
|
+
print(f"Elements in image = {elems_in_image}")
|
|
166
|
+
print(f"Image buffer shape = {image_buffer.shape}")
|
|
167
|
+
print(f"Avg. total render time = {np.mean(loop_times):.4f} seconds")
|
|
168
|
+
print(f"Avg. render time per frame = {(np.mean(loop_times)/total_frames):.4f} seconds")
|
|
169
|
+
print(80*"=")
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
#===========================================================================
|
|
173
|
+
# PLOTTING
|
|
174
|
+
plot_on = True
|
|
175
|
+
plot_field = 0
|
|
176
|
+
|
|
177
|
+
# depth_to_plot = np.copy(np.asarray(depth_buffer[:,:,plot_frame]))
|
|
178
|
+
# depth_to_plot[depth_buffer[:,:,plot_frame] > 10*cam_data.image_dist] = np.nan
|
|
179
|
+
# image_to_plot = np.copy(np.asarray(image_buffer[:,:,plot_frame,plot_field]))
|
|
180
|
+
# image_to_plot[depth_buffer[:,:,plot_frame] > 10*cam_data.image_dist] = np.nan
|
|
181
|
+
|
|
182
|
+
if plot_on:
|
|
183
|
+
plot_opts = pyv.PlotOptsGeneral()
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
(fig, ax) = plt.subplots(figsize=plot_opts.single_fig_size_square,
|
|
187
|
+
layout='constrained')
|
|
188
|
+
fig.set_dpi(plot_opts.resolution)
|
|
189
|
+
cset = plt.imshow(depth_buffer[:,:],
|
|
190
|
+
cmap=plt.get_cmap(plot_opts.cmap_seq))
|
|
191
|
+
#origin='lower')
|
|
192
|
+
ax.set_aspect('equal','box')
|
|
193
|
+
fig.colorbar(cset)
|
|
194
|
+
ax.set_title(f"Depth buffer",fontsize=plot_opts.font_head_size)
|
|
195
|
+
ax.set_xlabel(r"x ($px$)",
|
|
196
|
+
fontsize=plot_opts.font_ax_size, fontname=plot_opts.font_name)
|
|
197
|
+
ax.set_ylabel(r"y ($px$)",
|
|
198
|
+
fontsize=plot_opts.font_ax_size, fontname=plot_opts.font_name)
|
|
199
|
+
|
|
200
|
+
(fig, ax) = plt.subplots(figsize=plot_opts.single_fig_size_square,
|
|
201
|
+
layout='constrained')
|
|
202
|
+
fig.set_dpi(plot_opts.resolution)
|
|
203
|
+
cset = plt.imshow(image_buffer[:,:,plot_field],
|
|
204
|
+
cmap=plt.get_cmap(plot_opts.cmap_seq))
|
|
205
|
+
#origin='lower')
|
|
206
|
+
ax.set_aspect('equal','box')
|
|
207
|
+
fig.colorbar(cset)
|
|
208
|
+
ax.set_title(f"Field Image",fontsize=plot_opts.font_head_size)
|
|
209
|
+
ax.set_xlabel(r"x ($px$)",
|
|
210
|
+
fontsize=plot_opts.font_ax_size, fontname=plot_opts.font_name)
|
|
211
|
+
ax.set_ylabel(r"y ($px$)",
|
|
212
|
+
fontsize=plot_opts.font_ax_size, fontname=plot_opts.font_name)
|
|
213
|
+
|
|
214
|
+
plt.show()
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
if __name__ == "__main__":
|
|
218
|
+
main()
|
|
@@ -0,0 +1,187 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
import time
|
|
9
|
+
import numpy as np
|
|
10
|
+
from scipy.spatial.transform import Rotation
|
|
11
|
+
import matplotlib.pyplot as plt
|
|
12
|
+
import mooseherder as mh
|
|
13
|
+
import pyvale as pyv
|
|
14
|
+
|
|
15
|
+
def main() -> None:
|
|
16
|
+
print()
|
|
17
|
+
print(80*"=")
|
|
18
|
+
print("RASTER CYTHON FILE (should be *.so on Linux):")
|
|
19
|
+
print(pyv.rastercyth.__file__)
|
|
20
|
+
print(80*"=")
|
|
21
|
+
print()
|
|
22
|
+
|
|
23
|
+
sim_path = pyv.DataSet.render_mechanical_3d_path()
|
|
24
|
+
#sim_path = pyv.DataSet.render_simple_block_path()
|
|
25
|
+
#sim_path = Path.home()/"pyvale"/"src"/"pyvale"/"simcases"/"case26_out.e"
|
|
26
|
+
sim_data = mh.ExodusReader(sim_path).read_all_sim_data()
|
|
27
|
+
|
|
28
|
+
disp_comps = ("disp_x","disp_y","disp_z")
|
|
29
|
+
|
|
30
|
+
# Scale m -> mm
|
|
31
|
+
sim_data = pyv.scale_length_units(sim_data,disp_comps,1000.0)
|
|
32
|
+
|
|
33
|
+
print()
|
|
34
|
+
print(f"{np.max(np.abs(sim_data.node_vars['disp_x']))=}")
|
|
35
|
+
print(f"{np.max(np.abs(sim_data.node_vars['disp_y']))=}")
|
|
36
|
+
print(f"{np.max(np.abs(sim_data.node_vars['disp_z']))=}")
|
|
37
|
+
print()
|
|
38
|
+
|
|
39
|
+
# Extracts the surface mesh from a full 3d simulation for rendering
|
|
40
|
+
render_mesh = pyv.create_render_mesh(sim_data,
|
|
41
|
+
("disp_y","disp_x"),
|
|
42
|
+
sim_spat_dim=3,
|
|
43
|
+
field_disp_keys=disp_comps)
|
|
44
|
+
|
|
45
|
+
print()
|
|
46
|
+
print(80*"-")
|
|
47
|
+
print("MESH DATA:")
|
|
48
|
+
print(80*"-")
|
|
49
|
+
print("connectivity.shape=(num_elems,num_nodes_per_elem)")
|
|
50
|
+
print(f"{render_mesh.connectivity.shape=}")
|
|
51
|
+
print()
|
|
52
|
+
print("coords.shape=(num_nodes,coord[x,y,z])")
|
|
53
|
+
print(f"{render_mesh.coords.shape=}")
|
|
54
|
+
print()
|
|
55
|
+
print("fields.shape=(num_coords,num_time_steps,num_components)")
|
|
56
|
+
print(f"{render_mesh.fields_render.shape=}")
|
|
57
|
+
if render_mesh.fields_disp is not None:
|
|
58
|
+
print(f"{render_mesh.fields_disp.shape=}")
|
|
59
|
+
print(80*"-")
|
|
60
|
+
print()
|
|
61
|
+
|
|
62
|
+
pixel_num = np.array((960,1280),dtype=np.int32)
|
|
63
|
+
pixel_size = np.array((5.3e-3,5.3e-3),dtype=np.float64)
|
|
64
|
+
focal_leng: float = 50.0
|
|
65
|
+
cam_rot = Rotation.from_euler("zyx",(0.0,-30.0,-10.0),degrees=True)
|
|
66
|
+
fov_scale_factor: float = 1.1
|
|
67
|
+
|
|
68
|
+
(roi_pos_world,
|
|
69
|
+
cam_pos_world) = pyv.CameraTools.pos_fill_frame_from_rotation(
|
|
70
|
+
coords_world=render_mesh.coords,
|
|
71
|
+
pixel_num=pixel_num,
|
|
72
|
+
pixel_size=pixel_size,
|
|
73
|
+
focal_leng=focal_leng,
|
|
74
|
+
cam_rot=cam_rot,
|
|
75
|
+
frame_fill=fov_scale_factor,
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
cam_data = pyv.CameraData(
|
|
79
|
+
pixels_num=pixel_num,
|
|
80
|
+
pixels_size=pixel_size,
|
|
81
|
+
pos_world=cam_pos_world,
|
|
82
|
+
rot_world=cam_rot,
|
|
83
|
+
roi_cent_world=roi_pos_world,
|
|
84
|
+
focal_length=focal_leng,
|
|
85
|
+
sub_samp=2,
|
|
86
|
+
back_face_removal=True,
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
print(80*"-")
|
|
90
|
+
print("CAMERA DATA:")
|
|
91
|
+
print(80*"-")
|
|
92
|
+
print(f"{cam_data.image_dist=}")
|
|
93
|
+
print(f"{cam_data.roi_cent_world=}")
|
|
94
|
+
print(f"{cam_data.pos_world=}")
|
|
95
|
+
print()
|
|
96
|
+
print("World to camera matrix:")
|
|
97
|
+
print(cam_data.world_to_cam_mat)
|
|
98
|
+
print(80*"-")
|
|
99
|
+
print()
|
|
100
|
+
|
|
101
|
+
print(80*"-")
|
|
102
|
+
total_frames = render_mesh.fields_render.shape[1]*render_mesh.fields_render.shape[2]
|
|
103
|
+
print(f"Time steps to render: {render_mesh.fields_render.shape[1]}")
|
|
104
|
+
print(f"Fields to render: {render_mesh.fields_render.shape[2]}")
|
|
105
|
+
print(f"Total frames to render: {total_frames}")
|
|
106
|
+
print(80*"-")
|
|
107
|
+
|
|
108
|
+
print(80*"=")
|
|
109
|
+
print("RASTER ELEMENT LOOP START")
|
|
110
|
+
print(80*"=")
|
|
111
|
+
|
|
112
|
+
num_loops = 1
|
|
113
|
+
loop_times = np.zeros((num_loops,),dtype=np.float64)
|
|
114
|
+
|
|
115
|
+
print()
|
|
116
|
+
print("Running raster loop.")
|
|
117
|
+
for nn in range(num_loops):
|
|
118
|
+
print(f"Running loop {nn}")
|
|
119
|
+
loop_start = time.perf_counter()
|
|
120
|
+
|
|
121
|
+
(image_buffer,
|
|
122
|
+
depth_buffer,
|
|
123
|
+
elems_in_image) = pyv.rastercyth.raster_static_mesh(
|
|
124
|
+
render_mesh,
|
|
125
|
+
cam_data,
|
|
126
|
+
0)
|
|
127
|
+
|
|
128
|
+
loop_times[nn] = time.perf_counter() - loop_start
|
|
129
|
+
|
|
130
|
+
print()
|
|
131
|
+
print(80*"=")
|
|
132
|
+
print("PERFORMANCE TIMERS")
|
|
133
|
+
print(f"Elements in image = {elems_in_image}")
|
|
134
|
+
print(f"Image buffer shape = {image_buffer.shape}")
|
|
135
|
+
print(f"Avg. total render time = {np.mean(loop_times):.4f} seconds")
|
|
136
|
+
print(f"Avg. render time per frame = {(np.mean(loop_times)/total_frames):.4f} seconds")
|
|
137
|
+
print(80*"=")
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
#===========================================================================
|
|
141
|
+
# PLOTTING
|
|
142
|
+
plot_on = True
|
|
143
|
+
plot_frames = (1,)#range(3)
|
|
144
|
+
plot_field = 0
|
|
145
|
+
|
|
146
|
+
# depth_to_plot = np.copy(np.asarray(depth_buffer[:,:,plot_frame]))
|
|
147
|
+
# depth_to_plot[depth_buffer[:,:,plot_frame] > 10*cam_data.image_dist] = np.nan
|
|
148
|
+
# image_to_plot = np.copy(np.asarray(image_buffer[:,:,plot_frame,plot_field]))
|
|
149
|
+
# image_to_plot[depth_buffer[:,:,plot_frame] > 10*cam_data.image_dist] = np.nan
|
|
150
|
+
|
|
151
|
+
if plot_on:
|
|
152
|
+
plot_opts = pyv.PlotOptsGeneral()
|
|
153
|
+
|
|
154
|
+
for ff in plot_frames:
|
|
155
|
+
(fig, ax) = plt.subplots(figsize=plot_opts.single_fig_size_square,
|
|
156
|
+
layout='constrained')
|
|
157
|
+
fig.set_dpi(plot_opts.resolution)
|
|
158
|
+
cset = plt.imshow(depth_buffer[:,:,ff],
|
|
159
|
+
cmap=plt.get_cmap(plot_opts.cmap_seq))
|
|
160
|
+
#origin='lower')
|
|
161
|
+
ax.set_aspect('equal','box')
|
|
162
|
+
fig.colorbar(cset)
|
|
163
|
+
ax.set_title(f"Depth buffer: {ff}",fontsize=plot_opts.font_head_size)
|
|
164
|
+
ax.set_xlabel(r"x ($px$)",
|
|
165
|
+
fontsize=plot_opts.font_ax_size, fontname=plot_opts.font_name)
|
|
166
|
+
ax.set_ylabel(r"y ($px$)",
|
|
167
|
+
fontsize=plot_opts.font_ax_size, fontname=plot_opts.font_name)
|
|
168
|
+
|
|
169
|
+
(fig, ax) = plt.subplots(figsize=plot_opts.single_fig_size_square,
|
|
170
|
+
layout='constrained')
|
|
171
|
+
fig.set_dpi(plot_opts.resolution)
|
|
172
|
+
cset = plt.imshow(image_buffer[:,:,ff,plot_field],
|
|
173
|
+
cmap=plt.get_cmap(plot_opts.cmap_seq))
|
|
174
|
+
#origin='lower')
|
|
175
|
+
ax.set_aspect('equal','box')
|
|
176
|
+
fig.colorbar(cset)
|
|
177
|
+
ax.set_title(f"Field Image: {ff}",fontsize=plot_opts.font_head_size)
|
|
178
|
+
ax.set_xlabel(r"x ($px$)",
|
|
179
|
+
fontsize=plot_opts.font_ax_size, fontname=plot_opts.font_name)
|
|
180
|
+
ax.set_ylabel(r"y ($px$)",
|
|
181
|
+
fontsize=plot_opts.font_ax_size, fontname=plot_opts.font_name)
|
|
182
|
+
|
|
183
|
+
plt.show()
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
if __name__ == "__main__":
|
|
187
|
+
main()
|