pyvale 2025.5.3__cp311-cp311-musllinux_1_2_i686.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyvale might be problematic. Click here for more details.
- pyvale/__init__.py +89 -0
- pyvale/analyticmeshgen.py +102 -0
- pyvale/analyticsimdatafactory.py +91 -0
- pyvale/analyticsimdatagenerator.py +323 -0
- pyvale/blendercalibrationdata.py +15 -0
- pyvale/blenderlightdata.py +26 -0
- pyvale/blendermaterialdata.py +15 -0
- pyvale/blenderrenderdata.py +30 -0
- pyvale/blenderscene.py +488 -0
- pyvale/blendertools.py +420 -0
- pyvale/camera.py +146 -0
- pyvale/cameradata.py +69 -0
- pyvale/cameradata2d.py +84 -0
- pyvale/camerastereo.py +217 -0
- pyvale/cameratools.py +522 -0
- pyvale/cython/rastercyth.c +32211 -0
- pyvale/cython/rastercyth.cpython-311-i386-linux-musl.so +0 -0
- pyvale/cython/rastercyth.py +640 -0
- pyvale/data/__init__.py +5 -0
- pyvale/data/cal_target.tiff +0 -0
- pyvale/data/case00_HEX20_out.e +0 -0
- pyvale/data/case00_HEX27_out.e +0 -0
- pyvale/data/case00_HEX8_out.e +0 -0
- pyvale/data/case00_TET10_out.e +0 -0
- pyvale/data/case00_TET14_out.e +0 -0
- pyvale/data/case00_TET4_out.e +0 -0
- pyvale/data/case13_out.e +0 -0
- pyvale/data/case16_out.e +0 -0
- pyvale/data/case17_out.e +0 -0
- pyvale/data/case18_1_out.e +0 -0
- pyvale/data/case18_2_out.e +0 -0
- pyvale/data/case18_3_out.e +0 -0
- pyvale/data/case25_out.e +0 -0
- pyvale/data/case26_out.e +0 -0
- pyvale/data/optspeckle_2464x2056px_spec5px_8bit_gblur1px.tiff +0 -0
- pyvale/dataset.py +325 -0
- pyvale/errorcalculator.py +109 -0
- pyvale/errordriftcalc.py +146 -0
- pyvale/errorintegrator.py +336 -0
- pyvale/errorrand.py +607 -0
- pyvale/errorsyscalib.py +134 -0
- pyvale/errorsysdep.py +327 -0
- pyvale/errorsysfield.py +414 -0
- pyvale/errorsysindep.py +808 -0
- pyvale/examples/__init__.py +5 -0
- pyvale/examples/basics/ex1_1_basicscalars_therm2d.py +131 -0
- pyvale/examples/basics/ex1_2_sensormodel_therm2d.py +158 -0
- pyvale/examples/basics/ex1_3_customsens_therm3d.py +216 -0
- pyvale/examples/basics/ex1_4_basicerrors_therm3d.py +153 -0
- pyvale/examples/basics/ex1_5_fielderrs_therm3d.py +168 -0
- pyvale/examples/basics/ex1_6_caliberrs_therm2d.py +133 -0
- pyvale/examples/basics/ex1_7_spatavg_therm2d.py +123 -0
- pyvale/examples/basics/ex2_1_basicvectors_disp2d.py +112 -0
- pyvale/examples/basics/ex2_2_vectorsens_disp2d.py +111 -0
- pyvale/examples/basics/ex2_3_sensangle_disp2d.py +139 -0
- pyvale/examples/basics/ex2_4_chainfielderrs_disp2d.py +196 -0
- pyvale/examples/basics/ex2_5_vectorfields3d_disp3d.py +109 -0
- pyvale/examples/basics/ex3_1_basictensors_strain2d.py +114 -0
- pyvale/examples/basics/ex3_2_tensorsens2d_strain2d.py +111 -0
- pyvale/examples/basics/ex3_3_tensorsens3d_strain3d.py +182 -0
- pyvale/examples/basics/ex4_1_expsim2d_thermmech2d.py +171 -0
- pyvale/examples/basics/ex4_2_expsim3d_thermmech3d.py +252 -0
- pyvale/examples/genanalyticdata/ex1_1_scalarvisualisation.py +35 -0
- pyvale/examples/genanalyticdata/ex1_2_scalarcasebuild.py +43 -0
- pyvale/examples/genanalyticdata/ex2_1_analyticsensors.py +80 -0
- pyvale/examples/imagedef2d/ex_imagedef2d_todisk.py +79 -0
- pyvale/examples/renderblender/ex1_1_blenderscene.py +121 -0
- pyvale/examples/renderblender/ex1_2_blenderdeformed.py +119 -0
- pyvale/examples/renderblender/ex2_1_stereoscene.py +128 -0
- pyvale/examples/renderblender/ex2_2_stereodeformed.py +131 -0
- pyvale/examples/renderblender/ex3_1_blendercalibration.py +120 -0
- pyvale/examples/renderrasterisation/ex_rastenp.py +153 -0
- pyvale/examples/renderrasterisation/ex_rastercyth_oneframe.py +218 -0
- pyvale/examples/renderrasterisation/ex_rastercyth_static_cypara.py +187 -0
- pyvale/examples/renderrasterisation/ex_rastercyth_static_pypara.py +190 -0
- pyvale/examples/visualisation/ex1_1_plot_traces.py +102 -0
- pyvale/examples/visualisation/ex2_1_animate_sim.py +89 -0
- pyvale/experimentsimulator.py +175 -0
- pyvale/field.py +128 -0
- pyvale/fieldconverter.py +351 -0
- pyvale/fieldsampler.py +111 -0
- pyvale/fieldscalar.py +166 -0
- pyvale/fieldtensor.py +218 -0
- pyvale/fieldtransform.py +388 -0
- pyvale/fieldvector.py +213 -0
- pyvale/generatorsrandom.py +505 -0
- pyvale/imagedef2d.py +569 -0
- pyvale/integratorfactory.py +240 -0
- pyvale/integratorquadrature.py +217 -0
- pyvale/integratorrectangle.py +165 -0
- pyvale/integratorspatial.py +89 -0
- pyvale/integratortype.py +43 -0
- pyvale/output.py +17 -0
- pyvale/pyvaleexceptions.py +11 -0
- pyvale/raster.py +31 -0
- pyvale/rastercy.py +77 -0
- pyvale/rasternp.py +603 -0
- pyvale/rendermesh.py +147 -0
- pyvale/sensorarray.py +178 -0
- pyvale/sensorarrayfactory.py +196 -0
- pyvale/sensorarraypoint.py +278 -0
- pyvale/sensordata.py +71 -0
- pyvale/sensordescriptor.py +213 -0
- pyvale/sensortools.py +142 -0
- pyvale/simcases/case00_HEX20.i +242 -0
- pyvale/simcases/case00_HEX27.i +242 -0
- pyvale/simcases/case00_HEX8.i +242 -0
- pyvale/simcases/case00_TET10.i +242 -0
- pyvale/simcases/case00_TET14.i +242 -0
- pyvale/simcases/case00_TET4.i +242 -0
- pyvale/simcases/case01.i +101 -0
- pyvale/simcases/case02.i +156 -0
- pyvale/simcases/case03.i +136 -0
- pyvale/simcases/case04.i +181 -0
- pyvale/simcases/case05.i +234 -0
- pyvale/simcases/case06.i +305 -0
- pyvale/simcases/case07.geo +135 -0
- pyvale/simcases/case07.i +87 -0
- pyvale/simcases/case08.geo +144 -0
- pyvale/simcases/case08.i +153 -0
- pyvale/simcases/case09.geo +204 -0
- pyvale/simcases/case09.i +87 -0
- pyvale/simcases/case10.geo +204 -0
- pyvale/simcases/case10.i +257 -0
- pyvale/simcases/case11.geo +337 -0
- pyvale/simcases/case11.i +147 -0
- pyvale/simcases/case12.geo +388 -0
- pyvale/simcases/case12.i +329 -0
- pyvale/simcases/case13.i +140 -0
- pyvale/simcases/case14.i +159 -0
- pyvale/simcases/case15.geo +337 -0
- pyvale/simcases/case15.i +150 -0
- pyvale/simcases/case16.geo +391 -0
- pyvale/simcases/case16.i +357 -0
- pyvale/simcases/case17.geo +135 -0
- pyvale/simcases/case17.i +144 -0
- pyvale/simcases/case18.i +254 -0
- pyvale/simcases/case18_1.i +254 -0
- pyvale/simcases/case18_2.i +254 -0
- pyvale/simcases/case18_3.i +254 -0
- pyvale/simcases/case19.geo +252 -0
- pyvale/simcases/case19.i +99 -0
- pyvale/simcases/case20.geo +252 -0
- pyvale/simcases/case20.i +250 -0
- pyvale/simcases/case21.geo +74 -0
- pyvale/simcases/case21.i +155 -0
- pyvale/simcases/case22.geo +82 -0
- pyvale/simcases/case22.i +140 -0
- pyvale/simcases/case23.geo +164 -0
- pyvale/simcases/case23.i +140 -0
- pyvale/simcases/case24.geo +79 -0
- pyvale/simcases/case24.i +123 -0
- pyvale/simcases/case25.geo +82 -0
- pyvale/simcases/case25.i +140 -0
- pyvale/simcases/case26.geo +166 -0
- pyvale/simcases/case26.i +140 -0
- pyvale/simcases/run_1case.py +61 -0
- pyvale/simcases/run_all_cases.py +69 -0
- pyvale/simcases/run_build_case.py +64 -0
- pyvale/simcases/run_example_cases.py +69 -0
- pyvale/simtools.py +67 -0
- pyvale/visualexpplotter.py +191 -0
- pyvale/visualimagedef.py +74 -0
- pyvale/visualimages.py +76 -0
- pyvale/visualopts.py +493 -0
- pyvale/visualsimanimator.py +111 -0
- pyvale/visualsimsensors.py +318 -0
- pyvale/visualtools.py +136 -0
- pyvale/visualtraceplotter.py +142 -0
- pyvale-2025.5.3.dist-info/METADATA +144 -0
- pyvale-2025.5.3.dist-info/RECORD +174 -0
- pyvale-2025.5.3.dist-info/WHEEL +5 -0
- pyvale-2025.5.3.dist-info/licenses/LICENSE +21 -0
- pyvale-2025.5.3.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
from abc import ABC, abstractmethod
|
|
8
|
+
import numpy as np
|
|
9
|
+
from pyvale.sensordata import SensorData
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def create_int_pt_array(sens_data: SensorData,
|
|
13
|
+
int_pt_offsets: np.ndarray,
|
|
14
|
+
) -> np.ndarray:
|
|
15
|
+
"""Creates the integration point locations in local coordinates based on the
|
|
16
|
+
specified offsets from the local origin.
|
|
17
|
+
|
|
18
|
+
Parameters
|
|
19
|
+
----------
|
|
20
|
+
sens_data : SensorData
|
|
21
|
+
Contains the parameters of the sensor array including: positions, sample
|
|
22
|
+
times and orientations. If specified the sensor orientations are used
|
|
23
|
+
to rotate the positions of the integration points.
|
|
24
|
+
int_pt_offsets : np.ndarray
|
|
25
|
+
Offsets of the intergation points in non-rotated local coordinates.
|
|
26
|
+
|
|
27
|
+
Returns
|
|
28
|
+
-------
|
|
29
|
+
np.ndarray
|
|
30
|
+
The integration point locations in world (simulation) coordinates. The
|
|
31
|
+
rows of the array are all the integration points for all sensors and the
|
|
32
|
+
columns are the X,Y,Z coordinates. shape=(num_sensors*num_int_points,3).
|
|
33
|
+
"""
|
|
34
|
+
n_sens = sens_data.positions.shape[0]
|
|
35
|
+
n_int_pts = int_pt_offsets.shape[0]
|
|
36
|
+
|
|
37
|
+
# shape=(n_sens*n_int_pts,n_dims)
|
|
38
|
+
offset_array = np.tile(int_pt_offsets,(n_sens,1))
|
|
39
|
+
|
|
40
|
+
if sens_data.angles is not None:
|
|
41
|
+
for ii,rr in enumerate(sens_data.angles):
|
|
42
|
+
offset_array[ii*n_int_pts:(ii+1)*n_int_pts,:] = \
|
|
43
|
+
np.matmul(rr.as_matrix(),int_pt_offsets.T).T
|
|
44
|
+
|
|
45
|
+
# shape=(n_sens*n_int_pts,n_dims)
|
|
46
|
+
int_pt_array = np.repeat(sens_data.positions,int_pt_offsets.shape[0],axis=0)
|
|
47
|
+
|
|
48
|
+
return int_pt_array + offset_array
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class IIntegratorSpatial(ABC):
|
|
52
|
+
"""Interface (abstract base class) for spatial integrators. Used for
|
|
53
|
+
averaging sensor values over a given space.
|
|
54
|
+
"""
|
|
55
|
+
|
|
56
|
+
@abstractmethod
|
|
57
|
+
def calc_averages(self, sens_data: SensorData) -> np.ndarray:
|
|
58
|
+
"""Abstract method. Calculates the spatial average for each sensor using
|
|
59
|
+
the specified sensor dimensions and integration method. This is done by
|
|
60
|
+
interpolating the sensor values at each sensors integration points.
|
|
61
|
+
|
|
62
|
+
Parameters
|
|
63
|
+
----------
|
|
64
|
+
sens_data : SensorData
|
|
65
|
+
Contains the parameters of the sensor array including: positions,
|
|
66
|
+
sample times, spatial averaging and orientations.
|
|
67
|
+
|
|
68
|
+
Returns
|
|
69
|
+
-------
|
|
70
|
+
np.ndarray
|
|
71
|
+
Array of simulated sensor measurements. shape=(num_sensors,
|
|
72
|
+
num_field_components,num_time_steps).
|
|
73
|
+
"""
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
@abstractmethod
|
|
77
|
+
def get_averages(self) -> np.ndarray:
|
|
78
|
+
"""Abstract method. Returns the previously calculated spatial averages
|
|
79
|
+
for each sensor. If these have not been calculated then `calc_averages`
|
|
80
|
+
is called and the result is returned.
|
|
81
|
+
|
|
82
|
+
Returns
|
|
83
|
+
-------
|
|
84
|
+
np.ndarray
|
|
85
|
+
Array of simulated sensor measurements. shape=(num_sensors,
|
|
86
|
+
num_field_components,num_time_steps).
|
|
87
|
+
"""
|
|
88
|
+
|
|
89
|
+
|
pyvale/integratortype.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
import enum
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class EIntSpatialType(enum.Enum):
|
|
11
|
+
"""Enumeration specifying the type of spatial integrator to build. Used for
|
|
12
|
+
spatial averaging for sensors.
|
|
13
|
+
|
|
14
|
+
RECT1PT
|
|
15
|
+
Rectangular 2D integrator splitting the area into 1 part.
|
|
16
|
+
|
|
17
|
+
RECT4PT
|
|
18
|
+
Rectangular 2D integrator splitting the area into 4 equal parts.
|
|
19
|
+
|
|
20
|
+
RECT9PT
|
|
21
|
+
Rectangular 2D integrator splitting the area into 9 equal parts.
|
|
22
|
+
|
|
23
|
+
QUAD4PT
|
|
24
|
+
Gaussian quadrature 2D integrator over 4 points.
|
|
25
|
+
|
|
26
|
+
QUAD9PT
|
|
27
|
+
Gaussian quadrature 2D integrator over 9 points.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
RECT1PT = enum.auto()
|
|
31
|
+
"""Rectangular 2D integrator splitting the area into 1 part."""
|
|
32
|
+
|
|
33
|
+
RECT4PT = enum.auto()
|
|
34
|
+
"""Rectangular 2D integrator splitting the area into 4 equal parts."""
|
|
35
|
+
|
|
36
|
+
RECT9PT = enum.auto()
|
|
37
|
+
"""Rectangular 2D integrator splitting the area into 9 equal parts."""
|
|
38
|
+
|
|
39
|
+
QUAD4PT = enum.auto()
|
|
40
|
+
"""Gaussian quadrature 2D integrator over 4 points."""
|
|
41
|
+
|
|
42
|
+
QUAD9PT = enum.auto()
|
|
43
|
+
"""Gaussia quadrature 2D integrator over 9 points."""
|
pyvale/output.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
"""
|
|
10
|
+
NOTE: this module is a feature under developement.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
# TODO: remove this? but check blender tests
|
|
14
|
+
|
|
15
|
+
@dataclass(slots=True)
|
|
16
|
+
class Outputs():
|
|
17
|
+
base_dir = Path.home()
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
class BlenderError(Exception):
|
|
8
|
+
pass
|
|
9
|
+
|
|
10
|
+
class VisError(Exception):
|
|
11
|
+
pass
|
pyvale/raster.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
NOTE: this module is a feature under developement.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from abc import ABC, abstractmethod
|
|
12
|
+
import numpy as np
|
|
13
|
+
|
|
14
|
+
# NOTE:
|
|
15
|
+
# - Need to render a single frame static/deformed
|
|
16
|
+
# - Need to render all frames static/deformed
|
|
17
|
+
class IRaster(ABC):
|
|
18
|
+
@abstractmethod
|
|
19
|
+
def render_static_frame() -> None:
|
|
20
|
+
pass
|
|
21
|
+
|
|
22
|
+
@abstractmethod
|
|
23
|
+
def render_deformed_frame() -> None:
|
|
24
|
+
pass
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
# NOTE:
|
|
28
|
+
# - Manages parallelisation/saving with different rendering backends
|
|
29
|
+
# - Uses multi-processing
|
|
30
|
+
class Raster:
|
|
31
|
+
pass
|
pyvale/rastercy.py
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
NOTE: this module is a feature under developement.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from multiprocessing.pool import Pool
|
|
13
|
+
import numpy as np
|
|
14
|
+
from pyvale.cameradata import CameraData
|
|
15
|
+
from pyvale.rendermesh import RenderMeshData
|
|
16
|
+
import pyvale.cython.rastercyth as rastercyth
|
|
17
|
+
|
|
18
|
+
class RasterCY:
|
|
19
|
+
@staticmethod
|
|
20
|
+
def raster_static_mesh(cam_data: CameraData,
|
|
21
|
+
render_mesh: RenderMeshData,
|
|
22
|
+
threads_num: int | None = None,
|
|
23
|
+
) -> tuple[np.ndarray,np.ndarray,np.ndarray] | None:
|
|
24
|
+
|
|
25
|
+
frames_num = render_mesh.fields_render.shape[1]
|
|
26
|
+
field_num = render_mesh.fields_render.shape[2]
|
|
27
|
+
|
|
28
|
+
images_out = np.empty((cam_data.pixels_num[1],
|
|
29
|
+
cam_data.pixels_num[0],
|
|
30
|
+
frames_num,
|
|
31
|
+
field_num))
|
|
32
|
+
depths_out = np.empty((cam_data.pixels_num[1],
|
|
33
|
+
cam_data.pixels_num[0],
|
|
34
|
+
frames_num))
|
|
35
|
+
elems_out = np.empty((frames_num,))
|
|
36
|
+
|
|
37
|
+
if threads_num is None:
|
|
38
|
+
for tt in range(frames_num):
|
|
39
|
+
(image_buffer,
|
|
40
|
+
depth_buffer,
|
|
41
|
+
elems_in_image) = rastercyth.raster_frame(
|
|
42
|
+
render_mesh.coords,
|
|
43
|
+
render_mesh.connectivity,
|
|
44
|
+
render_mesh.fields_render[:,tt,:],
|
|
45
|
+
cam_data)
|
|
46
|
+
|
|
47
|
+
images_out[:,:,tt,:] = image_buffer
|
|
48
|
+
depths_out[:,:,tt] = depth_buffer
|
|
49
|
+
elems_out[tt] = elems_in_image
|
|
50
|
+
|
|
51
|
+
else:
|
|
52
|
+
assert threads_num > 0, "Number of threads must be greater than 0."
|
|
53
|
+
|
|
54
|
+
with Pool(threads_num) as pool:
|
|
55
|
+
processes_with_id = []
|
|
56
|
+
|
|
57
|
+
for tt in range(frames_num):
|
|
58
|
+
args = (render_mesh.coords,
|
|
59
|
+
render_mesh.connectivity,
|
|
60
|
+
render_mesh.fields_render[:,tt,:],
|
|
61
|
+
cam_data)
|
|
62
|
+
|
|
63
|
+
process = pool.apply_async(rastercyth.raster_frame, args=args)
|
|
64
|
+
processes_with_id.append({"process": process,
|
|
65
|
+
"frame": tt})
|
|
66
|
+
|
|
67
|
+
for pp in processes_with_id:
|
|
68
|
+
(image,depth,elems) = pp["process"].get()
|
|
69
|
+
images_out[:,:,pp["frame"],:] = image
|
|
70
|
+
depths_out[:,:,pp["frame"]] = depth
|
|
71
|
+
elems_out = elems
|
|
72
|
+
|
|
73
|
+
return (images_out,depths_out,elems_out)
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
|