pyvale 2025.5.3__cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyvale might be problematic. Click here for more details.

Files changed (175) hide show
  1. pyvale/__init__.py +89 -0
  2. pyvale/analyticmeshgen.py +102 -0
  3. pyvale/analyticsimdatafactory.py +91 -0
  4. pyvale/analyticsimdatagenerator.py +323 -0
  5. pyvale/blendercalibrationdata.py +15 -0
  6. pyvale/blenderlightdata.py +26 -0
  7. pyvale/blendermaterialdata.py +15 -0
  8. pyvale/blenderrenderdata.py +30 -0
  9. pyvale/blenderscene.py +488 -0
  10. pyvale/blendertools.py +420 -0
  11. pyvale/camera.py +146 -0
  12. pyvale/cameradata.py +69 -0
  13. pyvale/cameradata2d.py +84 -0
  14. pyvale/camerastereo.py +217 -0
  15. pyvale/cameratools.py +522 -0
  16. pyvale/cython/rastercyth.c +32211 -0
  17. pyvale/cython/rastercyth.cpython-311-aarch64-linux-gnu.so +0 -0
  18. pyvale/cython/rastercyth.py +640 -0
  19. pyvale/data/__init__.py +5 -0
  20. pyvale/data/cal_target.tiff +0 -0
  21. pyvale/data/case00_HEX20_out.e +0 -0
  22. pyvale/data/case00_HEX27_out.e +0 -0
  23. pyvale/data/case00_HEX8_out.e +0 -0
  24. pyvale/data/case00_TET10_out.e +0 -0
  25. pyvale/data/case00_TET14_out.e +0 -0
  26. pyvale/data/case00_TET4_out.e +0 -0
  27. pyvale/data/case13_out.e +0 -0
  28. pyvale/data/case16_out.e +0 -0
  29. pyvale/data/case17_out.e +0 -0
  30. pyvale/data/case18_1_out.e +0 -0
  31. pyvale/data/case18_2_out.e +0 -0
  32. pyvale/data/case18_3_out.e +0 -0
  33. pyvale/data/case25_out.e +0 -0
  34. pyvale/data/case26_out.e +0 -0
  35. pyvale/data/optspeckle_2464x2056px_spec5px_8bit_gblur1px.tiff +0 -0
  36. pyvale/dataset.py +325 -0
  37. pyvale/errorcalculator.py +109 -0
  38. pyvale/errordriftcalc.py +146 -0
  39. pyvale/errorintegrator.py +336 -0
  40. pyvale/errorrand.py +607 -0
  41. pyvale/errorsyscalib.py +134 -0
  42. pyvale/errorsysdep.py +327 -0
  43. pyvale/errorsysfield.py +414 -0
  44. pyvale/errorsysindep.py +808 -0
  45. pyvale/examples/__init__.py +5 -0
  46. pyvale/examples/basics/ex1_1_basicscalars_therm2d.py +131 -0
  47. pyvale/examples/basics/ex1_2_sensormodel_therm2d.py +158 -0
  48. pyvale/examples/basics/ex1_3_customsens_therm3d.py +216 -0
  49. pyvale/examples/basics/ex1_4_basicerrors_therm3d.py +153 -0
  50. pyvale/examples/basics/ex1_5_fielderrs_therm3d.py +168 -0
  51. pyvale/examples/basics/ex1_6_caliberrs_therm2d.py +133 -0
  52. pyvale/examples/basics/ex1_7_spatavg_therm2d.py +123 -0
  53. pyvale/examples/basics/ex2_1_basicvectors_disp2d.py +112 -0
  54. pyvale/examples/basics/ex2_2_vectorsens_disp2d.py +111 -0
  55. pyvale/examples/basics/ex2_3_sensangle_disp2d.py +139 -0
  56. pyvale/examples/basics/ex2_4_chainfielderrs_disp2d.py +196 -0
  57. pyvale/examples/basics/ex2_5_vectorfields3d_disp3d.py +109 -0
  58. pyvale/examples/basics/ex3_1_basictensors_strain2d.py +114 -0
  59. pyvale/examples/basics/ex3_2_tensorsens2d_strain2d.py +111 -0
  60. pyvale/examples/basics/ex3_3_tensorsens3d_strain3d.py +182 -0
  61. pyvale/examples/basics/ex4_1_expsim2d_thermmech2d.py +171 -0
  62. pyvale/examples/basics/ex4_2_expsim3d_thermmech3d.py +252 -0
  63. pyvale/examples/genanalyticdata/ex1_1_scalarvisualisation.py +35 -0
  64. pyvale/examples/genanalyticdata/ex1_2_scalarcasebuild.py +43 -0
  65. pyvale/examples/genanalyticdata/ex2_1_analyticsensors.py +80 -0
  66. pyvale/examples/imagedef2d/ex_imagedef2d_todisk.py +79 -0
  67. pyvale/examples/renderblender/ex1_1_blenderscene.py +121 -0
  68. pyvale/examples/renderblender/ex1_2_blenderdeformed.py +119 -0
  69. pyvale/examples/renderblender/ex2_1_stereoscene.py +128 -0
  70. pyvale/examples/renderblender/ex2_2_stereodeformed.py +131 -0
  71. pyvale/examples/renderblender/ex3_1_blendercalibration.py +120 -0
  72. pyvale/examples/renderrasterisation/ex_rastenp.py +153 -0
  73. pyvale/examples/renderrasterisation/ex_rastercyth_oneframe.py +218 -0
  74. pyvale/examples/renderrasterisation/ex_rastercyth_static_cypara.py +187 -0
  75. pyvale/examples/renderrasterisation/ex_rastercyth_static_pypara.py +190 -0
  76. pyvale/examples/visualisation/ex1_1_plot_traces.py +102 -0
  77. pyvale/examples/visualisation/ex2_1_animate_sim.py +89 -0
  78. pyvale/experimentsimulator.py +175 -0
  79. pyvale/field.py +128 -0
  80. pyvale/fieldconverter.py +351 -0
  81. pyvale/fieldsampler.py +111 -0
  82. pyvale/fieldscalar.py +166 -0
  83. pyvale/fieldtensor.py +218 -0
  84. pyvale/fieldtransform.py +388 -0
  85. pyvale/fieldvector.py +213 -0
  86. pyvale/generatorsrandom.py +505 -0
  87. pyvale/imagedef2d.py +569 -0
  88. pyvale/integratorfactory.py +240 -0
  89. pyvale/integratorquadrature.py +217 -0
  90. pyvale/integratorrectangle.py +165 -0
  91. pyvale/integratorspatial.py +89 -0
  92. pyvale/integratortype.py +43 -0
  93. pyvale/output.py +17 -0
  94. pyvale/pyvaleexceptions.py +11 -0
  95. pyvale/raster.py +31 -0
  96. pyvale/rastercy.py +77 -0
  97. pyvale/rasternp.py +603 -0
  98. pyvale/rendermesh.py +147 -0
  99. pyvale/sensorarray.py +178 -0
  100. pyvale/sensorarrayfactory.py +196 -0
  101. pyvale/sensorarraypoint.py +278 -0
  102. pyvale/sensordata.py +71 -0
  103. pyvale/sensordescriptor.py +213 -0
  104. pyvale/sensortools.py +142 -0
  105. pyvale/simcases/case00_HEX20.i +242 -0
  106. pyvale/simcases/case00_HEX27.i +242 -0
  107. pyvale/simcases/case00_HEX8.i +242 -0
  108. pyvale/simcases/case00_TET10.i +242 -0
  109. pyvale/simcases/case00_TET14.i +242 -0
  110. pyvale/simcases/case00_TET4.i +242 -0
  111. pyvale/simcases/case01.i +101 -0
  112. pyvale/simcases/case02.i +156 -0
  113. pyvale/simcases/case03.i +136 -0
  114. pyvale/simcases/case04.i +181 -0
  115. pyvale/simcases/case05.i +234 -0
  116. pyvale/simcases/case06.i +305 -0
  117. pyvale/simcases/case07.geo +135 -0
  118. pyvale/simcases/case07.i +87 -0
  119. pyvale/simcases/case08.geo +144 -0
  120. pyvale/simcases/case08.i +153 -0
  121. pyvale/simcases/case09.geo +204 -0
  122. pyvale/simcases/case09.i +87 -0
  123. pyvale/simcases/case10.geo +204 -0
  124. pyvale/simcases/case10.i +257 -0
  125. pyvale/simcases/case11.geo +337 -0
  126. pyvale/simcases/case11.i +147 -0
  127. pyvale/simcases/case12.geo +388 -0
  128. pyvale/simcases/case12.i +329 -0
  129. pyvale/simcases/case13.i +140 -0
  130. pyvale/simcases/case14.i +159 -0
  131. pyvale/simcases/case15.geo +337 -0
  132. pyvale/simcases/case15.i +150 -0
  133. pyvale/simcases/case16.geo +391 -0
  134. pyvale/simcases/case16.i +357 -0
  135. pyvale/simcases/case17.geo +135 -0
  136. pyvale/simcases/case17.i +144 -0
  137. pyvale/simcases/case18.i +254 -0
  138. pyvale/simcases/case18_1.i +254 -0
  139. pyvale/simcases/case18_2.i +254 -0
  140. pyvale/simcases/case18_3.i +254 -0
  141. pyvale/simcases/case19.geo +252 -0
  142. pyvale/simcases/case19.i +99 -0
  143. pyvale/simcases/case20.geo +252 -0
  144. pyvale/simcases/case20.i +250 -0
  145. pyvale/simcases/case21.geo +74 -0
  146. pyvale/simcases/case21.i +155 -0
  147. pyvale/simcases/case22.geo +82 -0
  148. pyvale/simcases/case22.i +140 -0
  149. pyvale/simcases/case23.geo +164 -0
  150. pyvale/simcases/case23.i +140 -0
  151. pyvale/simcases/case24.geo +79 -0
  152. pyvale/simcases/case24.i +123 -0
  153. pyvale/simcases/case25.geo +82 -0
  154. pyvale/simcases/case25.i +140 -0
  155. pyvale/simcases/case26.geo +166 -0
  156. pyvale/simcases/case26.i +140 -0
  157. pyvale/simcases/run_1case.py +61 -0
  158. pyvale/simcases/run_all_cases.py +69 -0
  159. pyvale/simcases/run_build_case.py +64 -0
  160. pyvale/simcases/run_example_cases.py +69 -0
  161. pyvale/simtools.py +67 -0
  162. pyvale/visualexpplotter.py +191 -0
  163. pyvale/visualimagedef.py +74 -0
  164. pyvale/visualimages.py +76 -0
  165. pyvale/visualopts.py +493 -0
  166. pyvale/visualsimanimator.py +111 -0
  167. pyvale/visualsimsensors.py +318 -0
  168. pyvale/visualtools.py +136 -0
  169. pyvale/visualtraceplotter.py +142 -0
  170. pyvale-2025.5.3.dist-info/METADATA +144 -0
  171. pyvale-2025.5.3.dist-info/RECORD +175 -0
  172. pyvale-2025.5.3.dist-info/WHEEL +6 -0
  173. pyvale-2025.5.3.dist-info/licenses/LICENSE +21 -0
  174. pyvale-2025.5.3.dist-info/top_level.txt +1 -0
  175. pyvale.libs/libgomp-d22c30c5.so.1.0.0 +0 -0
pyvale/fieldtensor.py ADDED
@@ -0,0 +1,218 @@
1
+ # ==============================================================================
2
+ # pyvale: the python validation engine
3
+ # License: MIT
4
+ # Copyright (C) 2025 The Computer Aided Validation Team
5
+ # ==============================================================================
6
+
7
+ import numpy as np
8
+ import pyvista as pv
9
+ from scipy.spatial.transform import Rotation
10
+ import mooseherder as mh
11
+
12
+ from pyvale.field import IField
13
+ from pyvale.fieldconverter import simdata_to_pyvista
14
+ from pyvale.fieldsampler import sample_pyvista_grid
15
+ from pyvale.fieldtransform import (transform_tensor_2d,
16
+ transform_tensor_2d_batch,
17
+ transform_tensor_3d,
18
+ transform_tensor_3d_batch)
19
+
20
+ # TODO:
21
+ # - Checking to ensure normal and dev components are consistent
22
+
23
+ class FieldTensor(IField):
24
+ """Class for sampling (interpolating) tensor fields from simulations to
25
+ provide sensor values at specified locations and times.
26
+
27
+ Implements the `IField` interface.
28
+ """
29
+ __slots__ = ("_field_key","_spat_dims","_time_steps","_pyvista_grid",
30
+ "_norm_components","_dev_components")
31
+
32
+ def __init__(self,
33
+ sim_data: mh.SimData,
34
+ field_name: str,
35
+ norm_comps: tuple[str,...],
36
+ dev_comps: tuple[str,...],
37
+ elem_dims: int) -> None:
38
+ """
39
+ Parameters
40
+ ----------
41
+ sim_data : mh.SimData
42
+ Simulation data object containing the mesh and field to interpolate.
43
+ field_name : str
44
+ String describing the tensor field. For example: 'strain'.
45
+ components : tuple[str,...]
46
+ String keys to the field components in the `SimData` object. For
47
+ example ('stain_xx','strain_yy','strain_xy').
48
+ elem_dims : int
49
+ Number of spatial dimensions (2 or 3) used for identifying element
50
+ types.
51
+ """
52
+ self._field_key = field_name
53
+ self._norm_components = norm_comps
54
+ self._dev_components = dev_comps
55
+ self._spat_dims = elem_dims
56
+
57
+ self._sim_data = sim_data
58
+ (self._pyvista_grid,self._pyvista_vis) = simdata_to_pyvista(
59
+ self._sim_data,
60
+ self._norm_components+self._dev_components,
61
+ self._spat_dims
62
+ )
63
+
64
+ def set_sim_data(self, sim_data: mh.SimData) -> None:
65
+ """Sets the `SimData` object that will be interpolated to obtain sensor
66
+ values. The purpose of this is to be able to apply the same sensor array
67
+ to an array of different simulations by setting a different `SimData`.
68
+
69
+ Parameters
70
+ ----------
71
+ sim_data : mh.SimData
72
+ Mooseherder SimData object. Contains a mesh and a simulated
73
+ physical field.
74
+ """
75
+ self._sim_data = sim_data
76
+ (self._pyvista_grid,self._pyvista_vis) = simdata_to_pyvista(
77
+ sim_data,
78
+ self._norm_components+self._dev_components,
79
+ self._spat_dims
80
+ )
81
+
82
+ def get_sim_data(self) -> mh.SimData:
83
+ """Gets the simulation data object associated with this field. Used by
84
+ pyvale visualisation tools to display simulation data with simulated
85
+ sensor values.
86
+
87
+ Returns
88
+ -------
89
+ mh.SimData
90
+ Mooseherder SimData object. Contains a mesh and a simulated
91
+ physical field.
92
+ """
93
+ return self._sim_data
94
+
95
+ def get_time_steps(self) -> np.ndarray:
96
+ """Gets a 1D array of time steps from the simulation data.
97
+
98
+ Returns
99
+ -------
100
+ np.ndarray
101
+ 1D array of simulation time steps. shape=(num_time_steps,)
102
+ """
103
+ return self._sim_data.time
104
+
105
+ def get_visualiser(self) -> pv.UnstructuredGrid:
106
+ """Gets a pyvista unstructured grid object for visualisation purposes.
107
+
108
+ Returns
109
+ -------
110
+ pv.UnstructuredGrid
111
+ Pyvista unstructured grid object containing only a mesh without any
112
+ physical field data attached.
113
+ """
114
+ return self._pyvista_vis
115
+
116
+ def get_all_components(self) -> tuple[str, ...]:
117
+ """Gets the string keys for the component of the physical field. For
118
+ example: a tensor field might have ('strain_xx','strain_yy','strain_xy')
119
+ in 2D.
120
+
121
+ Returns
122
+ -------
123
+ tuple[str,...]
124
+ Tuple containing the string keys for all components of the physical
125
+ field.
126
+ """
127
+ return self._norm_components + self._dev_components
128
+
129
+ def get_component_index(self, comp: str) -> int:
130
+ """Gets the index for a component of the physical field. Used for
131
+ getting the index of a component in the sensor measurement array.
132
+
133
+ Parameters
134
+ ----------
135
+ component : str
136
+ String key for the field component (e.g. 'temperature' or 'disp_x').
137
+
138
+ Returns
139
+ -------
140
+ int
141
+ Index for the selected field component
142
+ """
143
+ return self.get_all_components().index(comp)
144
+
145
+ def sample_field(self,
146
+ points: np.ndarray,
147
+ times: np.ndarray | None = None,
148
+ angles: tuple[Rotation,...] | None = None,
149
+ ) -> np.ndarray:
150
+ """Samples (interpolates) the simulation field at the specified
151
+ positions, times, and angles.
152
+
153
+ Parameters
154
+ ----------
155
+ points : np.ndarray
156
+ Spatial points to be sampled with the rows indicating the point
157
+ number of the columns indicating the X,Y and Z coordinates.
158
+ times : np.ndarray | None, optional
159
+ Times to sample the underlying simulation. If None then the
160
+ simulation time steps are used and no temporal interpolation is
161
+ performed, by default None.
162
+ angles : tuple[Rotation,...] | None, optional
163
+ Angles to rotate the sampled values into with rotations specified
164
+ with respect to the simulation world coordinates. If a single
165
+ rotation is specified then all points are assumed to have the same
166
+ angle and are batch processed for speed. If None then no rotation is
167
+ performed, by default None.
168
+
169
+ Returns
170
+ -------
171
+ np.ndarray
172
+ An array of sampled (interpolated) values with the following
173
+ dimensions: shape=(num_points,num_components,num_time_steps).
174
+ """
175
+ field_data = sample_pyvista_grid(self._norm_components+self._dev_components,
176
+ self._pyvista_grid,
177
+ self._sim_data.time,
178
+ points,
179
+ times)
180
+
181
+ if angles is None:
182
+ return field_data
183
+
184
+ # NOTE:
185
+ # ROTATION= object rotates with coords fixed
186
+ # For Z rotation: sin negative in row 1.
187
+ # TRANSFORMATION= coords rotate with object fixed
188
+ # For Z transformation: sin negative in row 2, transpose scipy mat.
189
+
190
+
191
+ # If we only have one angle we assume all sensors have the same angle
192
+ # and we can batch process the rotations
193
+ if len(angles) == 1:
194
+ rmat = angles[0].as_matrix().T
195
+
196
+ #TODO: assumes 2D in the x-y plane
197
+ if self._spat_dims == 2:
198
+ rmat = rmat[:2,:2]
199
+ field_data = transform_tensor_2d_batch(rmat,field_data)
200
+ else:
201
+ field_data = transform_tensor_3d_batch(rmat,field_data)
202
+
203
+ else: # Need to rotate each sensor using individual rotation = loop :(
204
+ #TODO: assumes 2D in the x-y plane
205
+ if self._spat_dims == 2:
206
+ for ii,rr in enumerate(angles):
207
+ rmat = rr.as_matrix().T
208
+ rmat = rmat[:2,:2]
209
+ field_data[ii,:,:] = transform_tensor_2d(rmat,field_data[ii,:,:])
210
+
211
+ else:
212
+ for ii,rr in enumerate(angles):
213
+ rmat = rr.as_matrix().T
214
+ field_data[ii,:,:] = transform_tensor_3d(rmat,field_data[ii,:,:])
215
+
216
+
217
+ return field_data
218
+
@@ -0,0 +1,388 @@
1
+ # ==============================================================================
2
+ # pyvale: the python validation engine
3
+ # License: MIT
4
+ # Copyright (C) 2025 The Computer Aided Validation Team
5
+ # ==============================================================================
6
+
7
+ """
8
+ This module contains a set of functions for transforming vector and tensor
9
+ fields based on an input transformation matrix.
10
+ """
11
+
12
+ import numpy as np
13
+
14
+ def transform_vector_2d(trans_mat: np.ndarray, vector: np.ndarray
15
+ ) -> np.ndarray:
16
+ """Transforms a 2D vector field based on the input transformation matrix.
17
+
18
+ Parameters
19
+ ----------
20
+ trans_mat : np.ndarray
21
+ Transformation matrix with shape=(2,2)
22
+ vector : np.ndarray
23
+ Vector field with shape = (2,num_points), where the first row are the X
24
+ components of the field and the second row are the Y components.
25
+
26
+ Returns
27
+ -------
28
+ np.ndarray
29
+ Transformed vector field with shape (2,num_points).
30
+ """
31
+ vector_trans = np.zeros_like(vector)
32
+ (xx,yy) = (0,1)
33
+
34
+ vector_trans[xx,:] = (trans_mat[0,0]*vector[xx,:]
35
+ + trans_mat[0,1]*vector[yy,:])
36
+ vector_trans[yy,:] = (trans_mat[0,1]*vector[xx,:]
37
+ + trans_mat[1,1]*vector[yy,:])
38
+ return vector_trans
39
+
40
+
41
+ def transform_vector_3d(trans_mat: np.ndarray, vector: np.ndarray
42
+ ) -> np.ndarray:
43
+ """Transforms a 3D vector field based on the input transformation matrix.
44
+
45
+ Parameters
46
+ ----------
47
+ trans_mat : np.ndarray
48
+ Transformation matrix with shape=(3,3).
49
+ vector : np.ndarray
50
+ Vector field with shape = (3,num_points), where the rows are the X, Y
51
+ and Z components of the vector field.
52
+
53
+ Returns
54
+ -------
55
+ np.ndarray
56
+ Transformed vector field with shape=(3,num_points).
57
+ """
58
+ vector_trans = np.zeros_like(vector)
59
+ (xx,yy,zz) = (0,1,2)
60
+
61
+ vector_trans[xx,:] = (trans_mat[0,0]*vector[xx,:]
62
+ + trans_mat[0,1]*vector[yy,:]
63
+ + trans_mat[0,2]*vector[zz,:])
64
+ vector_trans[yy,:] = (trans_mat[0,1]*vector[xx,:]
65
+ + trans_mat[1,1]*vector[yy,:]
66
+ + trans_mat[1,2]*vector[zz,:])
67
+ vector_trans[zz,:] = (trans_mat[0,2]*vector[xx,:]
68
+ + trans_mat[1,2]*vector[yy,:]
69
+ + trans_mat[2,2]*vector[zz,:])
70
+
71
+ return vector_trans
72
+
73
+ def transform_vector_2d_batch(trans_mat: np.ndarray, vector: np.ndarray
74
+ ) -> np.ndarray:
75
+ """Performs a batched 2D vector transformation for a series of sensors
76
+ assuming all sensors have the same transformation matrix.
77
+
78
+ Parameters
79
+ ----------
80
+ trans_mat : np.ndarray
81
+ Transformation matrix with shape=(2,2).
82
+ vector : np.ndarray
83
+ Input vector field to transform with shape=(num_sensors,2,num_time_steps
84
+ ) where the second dimension is the X and Y components of the vector
85
+ field.
86
+
87
+ Returns
88
+ -------
89
+ np.ndarray
90
+ Transformed vector field with shape=(num_sensors,2,num_time_steps),
91
+ where the second dimension is the X and Y components of the
92
+ transformed vector field.
93
+ """
94
+ vector_trans = np.zeros_like(vector)
95
+ (xx,yy) = (0,1)
96
+
97
+ vector_trans[:,xx,:] = (trans_mat[0,0]*vector[:,xx,:]
98
+ + trans_mat[0,1]*vector[:,yy,:])
99
+ vector_trans[:,yy,:] = (trans_mat[0,1]*vector[:,xx,:]
100
+ + trans_mat[1,1]*vector[:,yy,:])
101
+
102
+ return vector_trans
103
+
104
+
105
+ def transform_vector_3d_batch(trans_mat: np.ndarray, vector: np.ndarray
106
+ ) -> np.ndarray:
107
+ """Performs a batched 3D vector transformation for a series of sensors
108
+ assuming all sensors have the same transformation matrix.
109
+
110
+ Parameters
111
+ ----------
112
+ trans_mat : np.ndarray
113
+ Transformation matrix with shape=(3,3).
114
+ vector : np.ndarray
115
+ Input vector field to transform with shape=(num_sensors,3,num_time_steps
116
+ ) where the second dimension is the X, Y and Z components of the vector
117
+ field.
118
+
119
+ Returns
120
+ -------
121
+ np.ndarray
122
+ Transformed vector field with shape=(num_sensors,2,num_time_steps),
123
+ where the second dimension is the X, Y and Z components of the
124
+ transformed vector field.
125
+ """
126
+ vector_trans = np.zeros_like(vector)
127
+ (xx,yy,zz) = (0,1,2)
128
+
129
+ vector_trans[:,xx,:] = (trans_mat[0,0]*vector[:,xx,:]
130
+ + trans_mat[0,1]*vector[:,yy,:]
131
+ + trans_mat[0,2]*vector[:,zz,:])
132
+ vector_trans[:,yy,:] = (trans_mat[0,1]*vector[:,xx,:]
133
+ + trans_mat[1,1]*vector[:,yy,:]
134
+ + trans_mat[1,2]*vector[:,zz,:])
135
+ vector_trans[:,zz,:] = (trans_mat[0,2]*vector[:,xx,:]
136
+ + trans_mat[1,2]*vector[:,yy,:]
137
+ + trans_mat[2,2]*vector[:,zz,:])
138
+
139
+ return vector_trans
140
+
141
+ def transform_tensor_2d(trans_mat: np.ndarray, tensor: np.ndarray
142
+ ) -> np.ndarray:
143
+ """Transforms a 2D tensor field assuming the shear terms are symmetric.
144
+
145
+ Parameters
146
+ ----------
147
+ trans_mat : np.ndarray
148
+ Transformation matrix with shape=(2,2)
149
+ tensor : np.ndarray
150
+ Tensor field with shape=(3,num_points) where the rows are the XX, YY and
151
+ XY components of the tensor field
152
+
153
+ Returns
154
+ -------
155
+ np.ndarray
156
+ Transformed tensor field with shape=(3,num_points) where the rows are
157
+ the XX, YY and XY components of the tensor field.
158
+ """
159
+ tensor_trans = np.zeros_like(tensor)
160
+ (xx,yy,xy) = (0,1,2)
161
+
162
+ tensor_trans[xx,:] = (trans_mat[0,0]*(trans_mat[0,0]*tensor[xx,:]
163
+ + trans_mat[0,1]*tensor[xy,:])
164
+ + trans_mat[0,1]*(trans_mat[0,0]*tensor[xy,:]
165
+ + trans_mat[0,1]*tensor[yy,:]))
166
+
167
+ tensor_trans[yy,:] = (trans_mat[0,1]*(trans_mat[0,1]*tensor[xx,:]
168
+ + trans_mat[1,1]*tensor[xy,:])
169
+ + trans_mat[1,1]*(trans_mat[0,1]*tensor[xy,:]
170
+ + trans_mat[1,1]*tensor[yy,:]))
171
+
172
+ tensor_trans[xy,:] = (trans_mat[0,1]*(trans_mat[0,0]*tensor[xx,:]
173
+ + trans_mat[0,1]*tensor[xy,:])
174
+ + trans_mat[1,1]*(trans_mat[0,0]*tensor[xy,:]
175
+ + trans_mat[0,1]*tensor[yy,:]))
176
+
177
+ return tensor_trans
178
+
179
+
180
+ def transform_tensor_3d(trans_mat: np.ndarray, tensor: np.ndarray
181
+ ) -> np.ndarray:
182
+ """Transforms a 3D tensor field assuming all the shear terms are symmetric.
183
+
184
+ Parameters
185
+ ----------
186
+ trans_mat : np.ndarray
187
+ Transformation matrix with shape=(3,3).
188
+ tensor : np.ndarray
189
+ Tensor field with shape=(6,num_points), where the rows are the XX, YY,
190
+ ZZ, XY, XZ and YZ components of the field.
191
+
192
+ Returns
193
+ -------
194
+ np.ndarray
195
+ Transformed tensor field with shape=(6,num_points), where the rows are
196
+ the XX, YY, ZZ, XY, XZ and YZ components of the field.
197
+ """
198
+ tensor_trans = np.zeros_like(tensor)
199
+ (xx,yy,zz,xy,xz,yz) = (0,1,2,3,4,5)
200
+
201
+ tensor_trans[xx,:] = (trans_mat[0,0]*(trans_mat[0,0]*tensor[xx,:]
202
+ + trans_mat[0,1]*tensor[xy,:]
203
+ + trans_mat[0,2]*tensor[xz,:])
204
+ + trans_mat[0,1]*(trans_mat[0,0]*tensor[xy,:]
205
+ + trans_mat[0,1]*tensor[yy,:]
206
+ + trans_mat[0,2]*tensor[yz,:])
207
+ + trans_mat[0,2]*(trans_mat[0,0]*tensor[xz,:]
208
+ + trans_mat[0,1]*tensor[yz,:]
209
+ + trans_mat[0,2]*tensor[zz,:]))
210
+
211
+ tensor_trans[yy,:] = (trans_mat[0,1]*(trans_mat[0,1]*tensor[xx,:]
212
+ + trans_mat[1,1]*tensor[xy,:]
213
+ + trans_mat[1,2]*tensor[xz,:])
214
+ + trans_mat[1,1]*(trans_mat[0,1]*tensor[xy,:]
215
+ + trans_mat[1,1]*tensor[yy,:]
216
+ + trans_mat[1,2]*tensor[yz,:])
217
+ + trans_mat[1,2]*(trans_mat[0,1]*tensor[xz,:]
218
+ + trans_mat[1,1]*tensor[yz,:]
219
+ + trans_mat[1,2]*tensor[zz,:]))
220
+
221
+ tensor_trans[zz,:] = (trans_mat[0,2]*(trans_mat[0,2]*tensor[xx,:]
222
+ + trans_mat[1,2]*tensor[xy,:]
223
+ + trans_mat[2,2]*tensor[xz,:])
224
+ + trans_mat[1,2]*(trans_mat[0,2]*tensor[xy,:]
225
+ + trans_mat[1,2]*tensor[yy,:]
226
+ + trans_mat[2,2]*tensor[yz,:])
227
+ + trans_mat[2,2]*(trans_mat[0,2]*tensor[xz,:]
228
+ + trans_mat[1,2]*tensor[yz,:]
229
+ + trans_mat[2,2]*tensor[zz,:]))
230
+
231
+ tensor_trans[xy,:] = (trans_mat[0,1]*(trans_mat[0,0]*tensor[xx,:]
232
+ + trans_mat[0,1]*tensor[xy,:]
233
+ + trans_mat[0,2]*tensor[xz,:])
234
+ + trans_mat[1,1]*(trans_mat[0,0]*tensor[xy,:]
235
+ + trans_mat[0,1]*tensor[yy,:]
236
+ + trans_mat[0,2]*tensor[yz,:])
237
+ + trans_mat[1,2]*(trans_mat[0,0]*tensor[xz,:]
238
+ + trans_mat[0,1]*tensor[yz,:]
239
+ + trans_mat[0,2]*tensor[zz,:]))
240
+
241
+ tensor_trans[xz,:] = (trans_mat[0,2]*(trans_mat[0,0]*tensor[xx,:]
242
+ + trans_mat[0,1]*tensor[xy,:]
243
+ + trans_mat[0,2]*tensor[xz,:])
244
+ + trans_mat[1,2]*(trans_mat[0,0]*tensor[xy,:]
245
+ + trans_mat[0,1]*tensor[yy,:]
246
+ + trans_mat[0,2]*tensor[yz,:])
247
+ + trans_mat[2,2]*(trans_mat[0,0]*tensor[xz,:]
248
+ + trans_mat[0,1]*tensor[yz,:]
249
+ + trans_mat[0,2]*tensor[zz,:]))
250
+
251
+ tensor_trans[yz,:] = (trans_mat[0,2]*(trans_mat[0,1]*tensor[xx,:]
252
+ + trans_mat[1,1]*tensor[xy,:]
253
+ + trans_mat[1,2]*tensor[xz,:])
254
+ + trans_mat[1,2]*(trans_mat[0,1]*tensor[xy,:]
255
+ + trans_mat[1,1]*tensor[yy,:]
256
+ + trans_mat[1,2]*tensor[yz,:])
257
+ + trans_mat[2,2]*(trans_mat[0,1]*tensor[xz,:]
258
+ + trans_mat[1,1]*tensor[yz,:]
259
+ + trans_mat[1,2]*tensor[zz,:]))
260
+
261
+ return tensor_trans
262
+
263
+
264
+ def transform_tensor_2d_batch(trans_mat: np.ndarray, tensor: np.ndarray
265
+ ) -> np.ndarray:
266
+ """Performs a batched transformation of a 2D tensor field assuming the shear
267
+ terms are symmetric. Assumes the same transformation is applied to all
268
+ sensors in the array so they can be processed together for speed.
269
+
270
+ Parameters
271
+ ----------
272
+ trans_mat : np.ndarray
273
+ Transformation matrix with shape=(2,2)
274
+ tensor : np.ndarray
275
+ Tensor field with shape=(num_sensors,3,num_points) where the rows are
276
+ the XX, YY and XY components of the tensor field
277
+
278
+ Returns
279
+ -------
280
+ np.ndarray
281
+ Transformed tensor field with shape=(num_sensors,3,num_time_steps) where
282
+ the rows are the XX, YY and XY components of the tensor field.
283
+ """
284
+ tensor_trans = np.zeros_like(tensor)
285
+ (xx,yy,xy) = (0,1,2)
286
+
287
+ tensor_trans[:,xx,:] = (trans_mat[0,0]*(trans_mat[0,0]*tensor[:,xx,:]
288
+ + trans_mat[0,1]*tensor[:,xy,:])
289
+ + trans_mat[0,1]*(trans_mat[0,0]*tensor[:,xy,:]
290
+ + trans_mat[0,1]*tensor[:,yy,:]))
291
+
292
+ tensor_trans[:,yy,:] = (trans_mat[0,1]*(trans_mat[0,1]*tensor[:,xx,:]
293
+ + trans_mat[1,1]*tensor[:,xy,:])
294
+ + trans_mat[1,1]*(trans_mat[0,1]*tensor[:,xy,:]
295
+ + trans_mat[1,1]*tensor[:,yy,:]))
296
+
297
+ tensor_trans[:,xy,:] = (trans_mat[0,1]*(trans_mat[0,0]*tensor[:,xx,:]
298
+ + trans_mat[0,1]*tensor[:,xy,:])
299
+ + trans_mat[1,1]*(trans_mat[0,0]*tensor[:,xy,:]
300
+ + trans_mat[0,1]*tensor[:,yy,:]))
301
+
302
+ return tensor_trans
303
+
304
+
305
+ def transform_tensor_3d_batch(trans_mat: np.ndarray, tensor: np.ndarray
306
+ ) -> np.ndarray:
307
+ """Performs a batched transformation a 3D tensor field assuming all the
308
+ shear terms are symmetric. Assumes all sensors have the same transformation
309
+ applied so they can be processed together for speed.
310
+
311
+ Parameters
312
+ ----------
313
+ trans_mat : np.ndarray
314
+ Transformation matrix with shape=(3,3).
315
+ tensor : np.ndarray
316
+ Tensor field with shape=(num_sensors,6,num_points), where the rows are
317
+ the XX, YY, ZZ, XY, XZ and YZ components of the field.
318
+
319
+ Returns
320
+ -------
321
+ np.ndarray
322
+ Transformed tensor field with shape=(num_sensors,6,num_points), where
323
+ the rows are the XX, YY, ZZ, XY, XZ and YZ components of the field.
324
+ """
325
+ tensor_trans = np.zeros_like(tensor)
326
+ (xx,yy,zz,xy,xz,yz) = (0,1,2,3,4,5)
327
+
328
+ tensor_trans[:,xx,:] = (trans_mat[0,0]*(trans_mat[0,0]*tensor[:,xx,:]
329
+ + trans_mat[0,1]*tensor[:,xy,:]
330
+ + trans_mat[0,2]*tensor[:,xz,:])
331
+ + trans_mat[0,1]*(trans_mat[0,0]*tensor[:,xy,:]
332
+ + trans_mat[0,1]*tensor[:,yy,:]
333
+ + trans_mat[0,2]*tensor[:,yz,:])
334
+ + trans_mat[0,2]*(trans_mat[0,0]*tensor[:,xz,:]
335
+ + trans_mat[0,1]*tensor[:,yz,:]
336
+ + trans_mat[0,2]*tensor[:,zz,:]))
337
+
338
+ tensor_trans[:,yy,:] = (trans_mat[0,1]*(trans_mat[0,1]*tensor[:,xx,:]
339
+ + trans_mat[1,1]*tensor[:,xy,:]
340
+ + trans_mat[1,2]*tensor[:,xz,:])
341
+ + trans_mat[1,1]*(trans_mat[0,1]*tensor[:,xy,:]
342
+ + trans_mat[1,1]*tensor[:,yy,:]
343
+ + trans_mat[1,2]*tensor[:,yz,:])
344
+ + trans_mat[1,2]*(trans_mat[0,1]*tensor[:,xz,:]
345
+ + trans_mat[1,1]*tensor[:,yz,:]
346
+ + trans_mat[1,2]*tensor[:,zz,:]))
347
+
348
+ tensor_trans[:,zz,:] = (trans_mat[0,2]*(trans_mat[0,2]*tensor[:,xx,:]
349
+ + trans_mat[1,2]*tensor[:,xy,:]
350
+ + trans_mat[2,2]*tensor[:,xz,:])
351
+ + trans_mat[1,2]*(trans_mat[0,2]*tensor[:,xy,:]
352
+ + trans_mat[1,2]*tensor[:,yy,:]
353
+ + trans_mat[2,2]*tensor[:,yz,:])
354
+ + trans_mat[2,2]*(trans_mat[0,2]*tensor[:,xz,:]
355
+ + trans_mat[1,2]*tensor[:,yz,:]
356
+ + trans_mat[2,2]*tensor[:,zz,:]))
357
+
358
+ tensor_trans[:,xy,:] = (trans_mat[0,1]*(trans_mat[0,0]*tensor[:,xx,:]
359
+ + trans_mat[0,1]*tensor[:,xy,:]
360
+ + trans_mat[0,2]*tensor[:,xz,:])
361
+ + trans_mat[1,1]*(trans_mat[0,0]*tensor[:,xy,:]
362
+ + trans_mat[0,1]*tensor[:,yy,:]
363
+ + trans_mat[0,2]*tensor[:,yz,:])
364
+ + trans_mat[1,2]*(trans_mat[0,0]*tensor[:,xz,:]
365
+ + trans_mat[0,1]*tensor[:,yz,:]
366
+ + trans_mat[0,2]*tensor[:,zz,:]))
367
+
368
+ tensor_trans[:,xz,:] = (trans_mat[0,2]*(trans_mat[0,0]*tensor[:,xx,:]
369
+ + trans_mat[0,1]*tensor[:,xy,:]
370
+ + trans_mat[0,2]*tensor[:,xz,:])
371
+ + trans_mat[1,2]*(trans_mat[0,0]*tensor[:,xy,:]
372
+ + trans_mat[0,1]*tensor[:,yy,:]
373
+ + trans_mat[0,2]*tensor[:,yz,:])
374
+ + trans_mat[2,2]*(trans_mat[0,0]*tensor[:,xz,:]
375
+ + trans_mat[0,1]*tensor[:,yz,:]
376
+ + trans_mat[0,2]*tensor[:,zz,:]))
377
+
378
+ tensor_trans[:,yz,:] = (trans_mat[0,2]*(trans_mat[0,1]*tensor[:,xx,:]
379
+ + trans_mat[1,1]*tensor[:,xy,:]
380
+ + trans_mat[1,2]*tensor[:,xz,:])
381
+ + trans_mat[1,2]*(trans_mat[0,1]*tensor[:,xy,:]
382
+ + trans_mat[1,1]*tensor[:,yy,:]
383
+ + trans_mat[1,2]*tensor[:,yz,:])
384
+ + trans_mat[2,2]*(trans_mat[0,1]*tensor[:,xz,:]
385
+ + trans_mat[1,1]*tensor[:,yz,:]
386
+ + trans_mat[1,2]*tensor[:,zz,:]))
387
+
388
+ return tensor_trans