pyvale 2025.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyvale might be problematic. Click here for more details.

Files changed (157) hide show
  1. pyvale/__init__.py +75 -0
  2. pyvale/core/__init__.py +7 -0
  3. pyvale/core/analyticmeshgen.py +59 -0
  4. pyvale/core/analyticsimdatafactory.py +63 -0
  5. pyvale/core/analyticsimdatagenerator.py +160 -0
  6. pyvale/core/camera.py +146 -0
  7. pyvale/core/cameradata.py +64 -0
  8. pyvale/core/cameradata2d.py +82 -0
  9. pyvale/core/cameratools.py +328 -0
  10. pyvale/core/cython/rastercyth.c +32267 -0
  11. pyvale/core/cython/rastercyth.py +636 -0
  12. pyvale/core/dataset.py +250 -0
  13. pyvale/core/errorcalculator.py +112 -0
  14. pyvale/core/errordriftcalc.py +146 -0
  15. pyvale/core/errorintegrator.py +339 -0
  16. pyvale/core/errorrand.py +614 -0
  17. pyvale/core/errorsysdep.py +331 -0
  18. pyvale/core/errorsysfield.py +407 -0
  19. pyvale/core/errorsysindep.py +905 -0
  20. pyvale/core/experimentsimulator.py +99 -0
  21. pyvale/core/field.py +136 -0
  22. pyvale/core/fieldconverter.py +154 -0
  23. pyvale/core/fieldsampler.py +112 -0
  24. pyvale/core/fieldscalar.py +167 -0
  25. pyvale/core/fieldtensor.py +221 -0
  26. pyvale/core/fieldtransform.py +384 -0
  27. pyvale/core/fieldvector.py +215 -0
  28. pyvale/core/generatorsrandom.py +528 -0
  29. pyvale/core/imagedef2d.py +566 -0
  30. pyvale/core/integratorfactory.py +241 -0
  31. pyvale/core/integratorquadrature.py +192 -0
  32. pyvale/core/integratorrectangle.py +88 -0
  33. pyvale/core/integratorspatial.py +90 -0
  34. pyvale/core/integratortype.py +44 -0
  35. pyvale/core/optimcheckfuncs.py +153 -0
  36. pyvale/core/raster.py +31 -0
  37. pyvale/core/rastercy.py +76 -0
  38. pyvale/core/rasternp.py +604 -0
  39. pyvale/core/rendermesh.py +156 -0
  40. pyvale/core/sensorarray.py +179 -0
  41. pyvale/core/sensorarrayfactory.py +210 -0
  42. pyvale/core/sensorarraypoint.py +280 -0
  43. pyvale/core/sensordata.py +72 -0
  44. pyvale/core/sensordescriptor.py +101 -0
  45. pyvale/core/sensortools.py +143 -0
  46. pyvale/core/visualexpplotter.py +151 -0
  47. pyvale/core/visualimagedef.py +71 -0
  48. pyvale/core/visualimages.py +75 -0
  49. pyvale/core/visualopts.py +180 -0
  50. pyvale/core/visualsimanimator.py +83 -0
  51. pyvale/core/visualsimplotter.py +182 -0
  52. pyvale/core/visualtools.py +81 -0
  53. pyvale/core/visualtraceplotter.py +256 -0
  54. pyvale/data/__init__.py +7 -0
  55. pyvale/data/case13_out.e +0 -0
  56. pyvale/data/case16_out.e +0 -0
  57. pyvale/data/case17_out.e +0 -0
  58. pyvale/data/case18_1_out.e +0 -0
  59. pyvale/data/case18_2_out.e +0 -0
  60. pyvale/data/case18_3_out.e +0 -0
  61. pyvale/data/case25_out.e +0 -0
  62. pyvale/data/case26_out.e +0 -0
  63. pyvale/data/optspeckle_2464x2056px_spec5px_8bit_gblur1px.tiff +0 -0
  64. pyvale/examples/__init__.py +7 -0
  65. pyvale/examples/analyticdatagen/__init__.py +7 -0
  66. pyvale/examples/analyticdatagen/ex1_1_scalarvisualisation.py +38 -0
  67. pyvale/examples/analyticdatagen/ex1_2_scalarcasebuild.py +46 -0
  68. pyvale/examples/analyticdatagen/ex2_1_analyticsensors.py +83 -0
  69. pyvale/examples/ex1_1_thermal2d.py +89 -0
  70. pyvale/examples/ex1_2_thermal2d.py +111 -0
  71. pyvale/examples/ex1_3_thermal2d.py +113 -0
  72. pyvale/examples/ex1_4_thermal2d.py +89 -0
  73. pyvale/examples/ex1_5_thermal2d.py +105 -0
  74. pyvale/examples/ex2_1_thermal3d .py +87 -0
  75. pyvale/examples/ex2_2_thermal3d.py +51 -0
  76. pyvale/examples/ex2_3_thermal3d.py +109 -0
  77. pyvale/examples/ex3_1_displacement2d.py +47 -0
  78. pyvale/examples/ex3_2_displacement2d.py +79 -0
  79. pyvale/examples/ex3_3_displacement2d.py +104 -0
  80. pyvale/examples/ex3_4_displacement2d.py +105 -0
  81. pyvale/examples/ex4_1_strain2d.py +57 -0
  82. pyvale/examples/ex4_2_strain2d.py +79 -0
  83. pyvale/examples/ex4_3_strain2d.py +100 -0
  84. pyvale/examples/ex5_1_multiphysics2d.py +78 -0
  85. pyvale/examples/ex6_1_multiphysics2d_expsim.py +118 -0
  86. pyvale/examples/ex6_2_multiphysics3d_expsim.py +158 -0
  87. pyvale/examples/features/__init__.py +7 -0
  88. pyvale/examples/features/ex_animation_tools_3dmonoblock.py +83 -0
  89. pyvale/examples/features/ex_area_avg.py +89 -0
  90. pyvale/examples/features/ex_calibration_error.py +108 -0
  91. pyvale/examples/features/ex_chain_field_errs.py +141 -0
  92. pyvale/examples/features/ex_field_errs.py +78 -0
  93. pyvale/examples/features/ex_sensor_single_angle_batch.py +110 -0
  94. pyvale/examples/imagedef2d/ex_imagedef2d_todisk.py +86 -0
  95. pyvale/examples/rasterisation/ex_rastenp.py +154 -0
  96. pyvale/examples/rasterisation/ex_rastercyth_oneframe.py +220 -0
  97. pyvale/examples/rasterisation/ex_rastercyth_static_cypara.py +194 -0
  98. pyvale/examples/rasterisation/ex_rastercyth_static_pypara.py +193 -0
  99. pyvale/simcases/case00_HEX20.i +242 -0
  100. pyvale/simcases/case00_HEX27.i +242 -0
  101. pyvale/simcases/case00_TET10.i +242 -0
  102. pyvale/simcases/case00_TET14.i +242 -0
  103. pyvale/simcases/case01.i +101 -0
  104. pyvale/simcases/case02.i +156 -0
  105. pyvale/simcases/case03.i +136 -0
  106. pyvale/simcases/case04.i +181 -0
  107. pyvale/simcases/case05.i +234 -0
  108. pyvale/simcases/case06.i +305 -0
  109. pyvale/simcases/case07.geo +135 -0
  110. pyvale/simcases/case07.i +87 -0
  111. pyvale/simcases/case08.geo +144 -0
  112. pyvale/simcases/case08.i +153 -0
  113. pyvale/simcases/case09.geo +204 -0
  114. pyvale/simcases/case09.i +87 -0
  115. pyvale/simcases/case10.geo +204 -0
  116. pyvale/simcases/case10.i +257 -0
  117. pyvale/simcases/case11.geo +337 -0
  118. pyvale/simcases/case11.i +147 -0
  119. pyvale/simcases/case12.geo +388 -0
  120. pyvale/simcases/case12.i +329 -0
  121. pyvale/simcases/case13.i +140 -0
  122. pyvale/simcases/case14.i +159 -0
  123. pyvale/simcases/case15.geo +337 -0
  124. pyvale/simcases/case15.i +150 -0
  125. pyvale/simcases/case16.geo +391 -0
  126. pyvale/simcases/case16.i +357 -0
  127. pyvale/simcases/case17.geo +135 -0
  128. pyvale/simcases/case17.i +144 -0
  129. pyvale/simcases/case18.i +254 -0
  130. pyvale/simcases/case18_1.i +254 -0
  131. pyvale/simcases/case18_2.i +254 -0
  132. pyvale/simcases/case18_3.i +254 -0
  133. pyvale/simcases/case19.geo +252 -0
  134. pyvale/simcases/case19.i +99 -0
  135. pyvale/simcases/case20.geo +252 -0
  136. pyvale/simcases/case20.i +250 -0
  137. pyvale/simcases/case21.geo +74 -0
  138. pyvale/simcases/case21.i +155 -0
  139. pyvale/simcases/case22.geo +82 -0
  140. pyvale/simcases/case22.i +140 -0
  141. pyvale/simcases/case23.geo +164 -0
  142. pyvale/simcases/case23.i +140 -0
  143. pyvale/simcases/case24.geo +79 -0
  144. pyvale/simcases/case24.i +123 -0
  145. pyvale/simcases/case25.geo +82 -0
  146. pyvale/simcases/case25.i +140 -0
  147. pyvale/simcases/case26.geo +166 -0
  148. pyvale/simcases/case26.i +140 -0
  149. pyvale/simcases/run_1case.py +61 -0
  150. pyvale/simcases/run_all_cases.py +69 -0
  151. pyvale/simcases/run_build_case.py +64 -0
  152. pyvale/simcases/run_example_cases.py +69 -0
  153. pyvale-2025.4.0.dist-info/METADATA +140 -0
  154. pyvale-2025.4.0.dist-info/RECORD +157 -0
  155. pyvale-2025.4.0.dist-info/WHEEL +5 -0
  156. pyvale-2025.4.0.dist-info/licenses/LICENSE +21 -0
  157. pyvale-2025.4.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,221 @@
1
+ """
2
+ ================================================================================
3
+ pyvale: the python validation engine
4
+ License: MIT
5
+ Copyright (C) 2025 The Computer Aided Validation Team
6
+ ================================================================================
7
+ """
8
+ import numpy as np
9
+ import pyvista as pv
10
+ from scipy.spatial.transform import Rotation
11
+ import mooseherder as mh
12
+
13
+ from pyvale.core.field import IField
14
+ from pyvale.core.fieldconverter import simdata_to_pyvista
15
+ from pyvale.core.fieldsampler import sample_pyvista_grid
16
+ from pyvale.core.fieldtransform import (transform_tensor_2d,
17
+ transform_tensor_2d_batch,
18
+ transform_tensor_3d,
19
+ transform_tensor_3d_batch)
20
+
21
+
22
+ class FieldTensor(IField):
23
+ """Class for sampling (interpolating) tensor fields from simulations to
24
+ provide sensor values at specified locations and times.
25
+
26
+ Implements the `IField` interface.
27
+ """
28
+ __slots__ = ("_field_key","_spat_dims","_time_steps","_pyvista_grid",
29
+ "_norm_components","_dev_components")
30
+
31
+ def __init__(self,
32
+ sim_data: mh.SimData,
33
+ field_key: str,
34
+ norm_components: tuple[str,...],
35
+ dev_components: tuple[str,...],
36
+ spat_dims: int) -> None:
37
+ """Initialiser for the `FieldVector` class.
38
+
39
+ Parameters
40
+ ----------
41
+ sim_data : mh.SimData
42
+ Simulation data object containing the mesh and field to interpolate.
43
+ field_key : str
44
+ String describing the tensor field. For example: 'strain'.
45
+ components : tuple[str,...]
46
+ String keys to the field components in the `SimData` object. For
47
+ example ('stain_xx','strain_yy','strain_xy').
48
+ spat_dims : int
49
+ Number of spatial dimensions (2 or 3) used for identifying element
50
+ types.
51
+ """
52
+ self._field_key = field_key
53
+ self._norm_components = norm_components
54
+ self._dev_components = dev_components
55
+ self._spat_dims = spat_dims
56
+
57
+ #TODO: do some checking to make sure norm/dev components are consistent
58
+ # based on the spatial dimensions
59
+
60
+ self._sim_data = sim_data
61
+ (self._pyvista_grid,self._pyvista_vis) = simdata_to_pyvista(
62
+ self._sim_data,
63
+ self._norm_components+self._dev_components,
64
+ self._spat_dims
65
+ )
66
+
67
+ def set_sim_data(self, sim_data: mh.SimData) -> None:
68
+ """Sets the `SimData` object that will be interpolated to obtain sensor
69
+ values. The purpose of this is to be able to apply the same sensor array
70
+ to an array of different simulations by setting a different `SimData`.
71
+
72
+ Parameters
73
+ ----------
74
+ sim_data : mh.SimData
75
+ Mooseherder SimData object. Contains a mesh and a simulated
76
+ physical field.
77
+ """
78
+ self._sim_data = sim_data
79
+ (self._pyvista_grid,self._pyvista_vis) = simdata_to_pyvista(
80
+ sim_data,
81
+ self._norm_components+self._dev_components,
82
+ self._spat_dims
83
+ )
84
+
85
+ def get_sim_data(self) -> mh.SimData:
86
+ """Gets the simulation data object associated with this field. Used by
87
+ pyvale visualisation tools to display simulation data with simulated
88
+ sensor values.
89
+
90
+ Returns
91
+ -------
92
+ mh.SimData
93
+ Mooseherder SimData object. Contains a mesh and a simulated
94
+ physical field.
95
+ """
96
+ return self._sim_data
97
+
98
+ def get_time_steps(self) -> np.ndarray:
99
+ """Gets a 1D array of time steps from the simulation data.
100
+
101
+ Returns
102
+ -------
103
+ np.ndarray
104
+ 1D array of simulation time steps. shape=(num_time_steps,)
105
+ """
106
+ return self._sim_data.time
107
+
108
+ def get_visualiser(self) -> pv.UnstructuredGrid:
109
+ """Gets a pyvista unstructured grid object for visualisation purposes.
110
+
111
+ Returns
112
+ -------
113
+ pv.UnstructuredGrid
114
+ Pyvista unstructured grid object containing only a mesh without any
115
+ physical field data attached.
116
+ """
117
+ return self._pyvista_vis
118
+
119
+ def get_all_components(self) -> tuple[str, ...]:
120
+ """Gets the string keys for the component of the physical field. For
121
+ example: a tensor field might have ('strain_xx','strain_yy','strain_xy')
122
+ in 2D.
123
+
124
+ Returns
125
+ -------
126
+ tuple[str,...]
127
+ Tuple containing the string keys for all components of the physical
128
+ field.
129
+ """
130
+ return self._norm_components + self._dev_components
131
+
132
+ def get_component_index(self, comp: str) -> int:
133
+ """Gets the index for a component of the physical field. Used for
134
+ getting the index of a component in the sensor measurement array.
135
+
136
+ Parameters
137
+ ----------
138
+ component : str
139
+ String key for the field component (e.g. 'temperature' or 'disp_x').
140
+
141
+ Returns
142
+ -------
143
+ int
144
+ Index for the selected field component
145
+ """
146
+ return self.get_all_components().index(comp)
147
+
148
+ def sample_field(self,
149
+ points: np.ndarray,
150
+ times: np.ndarray | None = None,
151
+ angles: tuple[Rotation,...] | None = None,
152
+ ) -> np.ndarray:
153
+ """Samples (interpolates) the simulation field at the specified
154
+ positions, times, and angles.
155
+
156
+ Parameters
157
+ ----------
158
+ points : np.ndarray
159
+ Spatial points to be sampled with the rows indicating the point
160
+ number of the columns indicating the X,Y and Z coordinates.
161
+ times : np.ndarray | None, optional
162
+ Times to sample the underlying simulation. If None then the
163
+ simulation time steps are used and no temporal interpolation is
164
+ performed, by default None.
165
+ angles : tuple[Rotation,...] | None, optional
166
+ Angles to rotate the sampled values into with rotations specified
167
+ with respect to the simulation world coordinates. If a single
168
+ rotation is specified then all points are assumed to have the same
169
+ angle and are batch processed for speed. If None then no rotation is
170
+ performed, by default None.
171
+
172
+ Returns
173
+ -------
174
+ np.ndarray
175
+ An array of sampled (interpolated) values with the following
176
+ dimensions: shape=(num_points,num_components,num_time_steps).
177
+ """
178
+ field_data = sample_pyvista_grid(self._norm_components+self._dev_components,
179
+ self._pyvista_grid,
180
+ self._sim_data.time,
181
+ points,
182
+ times)
183
+
184
+ if angles is None:
185
+ return field_data
186
+
187
+ # NOTE:
188
+ # ROTATION= object rotates with coords fixed
189
+ # For Z rotation: sin negative in row 1.
190
+ # TRANSFORMATION= coords rotate with object fixed
191
+ # For Z transformation: sin negative in row 2, transpose scipy mat.
192
+
193
+
194
+ # If we only have one angle we assume all sensors have the same angle
195
+ # and we can batch process the rotations
196
+ if len(angles) == 1:
197
+ rmat = angles[0].as_matrix().T
198
+
199
+ #TODO: assumes 2D in the x-y plane
200
+ if self._spat_dims == 2:
201
+ rmat = rmat[:2,:2]
202
+ field_data = transform_tensor_2d_batch(rmat,field_data)
203
+ else:
204
+ field_data = transform_tensor_3d_batch(rmat,field_data)
205
+
206
+ else: # Need to rotate each sensor using individual rotation = loop :(
207
+ #TODO: assumes 2D in the x-y plane
208
+ if self._spat_dims == 2:
209
+ for ii,rr in enumerate(angles):
210
+ rmat = rr.as_matrix().T
211
+ rmat = rmat[:2,:2]
212
+ field_data[ii,:,:] = transform_tensor_2d(rmat,field_data[ii,:,:])
213
+
214
+ else:
215
+ for ii,rr in enumerate(angles):
216
+ rmat = rr.as_matrix().T
217
+ field_data[ii,:,:] = transform_tensor_3d(rmat,field_data[ii,:,:])
218
+
219
+
220
+ return field_data
221
+
@@ -0,0 +1,384 @@
1
+ """
2
+ ================================================================================
3
+ pyvale: the python validation engine
4
+ License: MIT
5
+ Copyright (C) 2025 The Computer Aided Validation Team
6
+ ================================================================================
7
+ """
8
+ import numpy as np
9
+
10
+ def transform_vector_2d(trans_mat: np.ndarray, vector: np.ndarray
11
+ ) -> np.ndarray:
12
+ """Transforms a 2D vector field based on the input transformation matrix.
13
+
14
+ Parameters
15
+ ----------
16
+ trans_mat : np.ndarray
17
+ Transformation matrix with shape=(2,2)
18
+ vector : np.ndarray
19
+ Vector field with shape = (2,num_points), where the first row are the X
20
+ components of the field and the second row are the Y components.
21
+
22
+ Returns
23
+ -------
24
+ np.ndarray
25
+ Transformed vector field with shape (2,num_points).
26
+ """
27
+ vector_trans = np.zeros_like(vector)
28
+ (xx,yy) = (0,1)
29
+
30
+ vector_trans[xx,:] = (trans_mat[0,0]*vector[xx,:]
31
+ + trans_mat[0,1]*vector[yy,:])
32
+ vector_trans[yy,:] = (trans_mat[0,1]*vector[xx,:]
33
+ + trans_mat[1,1]*vector[yy,:])
34
+ return vector_trans
35
+
36
+
37
+ def transform_vector_3d(trans_mat: np.ndarray, vector: np.ndarray
38
+ ) -> np.ndarray:
39
+ """Transforms a 3D vector field based on the input transformation matrix.
40
+
41
+ Parameters
42
+ ----------
43
+ trans_mat : np.ndarray
44
+ Transformation matrix with shape=(3,3).
45
+ vector : np.ndarray
46
+ Vector field with shape = (3,num_points), where the rows are the X, Y
47
+ and Z components of the vector field.
48
+
49
+ Returns
50
+ -------
51
+ np.ndarray
52
+ Transformed vector field with shape=(3,num_points).
53
+ """
54
+ vector_trans = np.zeros_like(vector)
55
+ (xx,yy,zz) = (0,1,2)
56
+
57
+ vector_trans[xx,:] = (trans_mat[0,0]*vector[xx,:]
58
+ + trans_mat[0,1]*vector[yy,:]
59
+ + trans_mat[0,2]*vector[zz,:])
60
+ vector_trans[yy,:] = (trans_mat[0,1]*vector[xx,:]
61
+ + trans_mat[1,1]*vector[yy,:]
62
+ + trans_mat[1,2]*vector[zz,:])
63
+ vector_trans[zz,:] = (trans_mat[0,2]*vector[xx,:]
64
+ + trans_mat[1,2]*vector[yy,:]
65
+ + trans_mat[2,2]*vector[zz,:])
66
+
67
+ return vector_trans
68
+
69
+ def transform_vector_2d_batch(trans_mat: np.ndarray, vector: np.ndarray
70
+ ) -> np.ndarray:
71
+ """Performs a batched 2D vector transformation for a series of sensors
72
+ assuming all sensors have the same transformation matrix.
73
+
74
+ Parameters
75
+ ----------
76
+ trans_mat : np.ndarray
77
+ Transformation matrix with shape=(2,2).
78
+ vector : np.ndarray
79
+ Input vector field to transform with shape=(num_sensors,2,num_time_steps
80
+ ) where the second dimension is the X and Y components of the vector
81
+ field.
82
+
83
+ Returns
84
+ -------
85
+ np.ndarray
86
+ Transformed vector field with shape=(num_sensors,2,num_time_steps),
87
+ where the second dimension is the X and Y components of the
88
+ transformed vector field.
89
+ """
90
+ vector_trans = np.zeros_like(vector)
91
+ (xx,yy) = (0,1)
92
+
93
+ vector_trans[:,xx,:] = (trans_mat[0,0]*vector[:,xx,:]
94
+ + trans_mat[0,1]*vector[:,yy,:])
95
+ vector_trans[:,yy,:] = (trans_mat[0,1]*vector[:,xx,:]
96
+ + trans_mat[1,1]*vector[:,yy,:])
97
+
98
+ return vector_trans
99
+
100
+
101
+ def transform_vector_3d_batch(trans_mat: np.ndarray, vector: np.ndarray
102
+ ) -> np.ndarray:
103
+ """Performs a batched 3D vector transformation for a series of sensors
104
+ assuming all sensors have the same transformation matrix.
105
+
106
+ Parameters
107
+ ----------
108
+ trans_mat : np.ndarray
109
+ Transformation matrix with shape=(3,3).
110
+ vector : np.ndarray
111
+ Input vector field to transform with shape=(num_sensors,3,num_time_steps
112
+ ) where the second dimension is the X, Y and Z components of the vector
113
+ field.
114
+
115
+ Returns
116
+ -------
117
+ np.ndarray
118
+ Transformed vector field with shape=(num_sensors,2,num_time_steps),
119
+ where the second dimension is the X, Y and Z components of the
120
+ transformed vector field.
121
+ """
122
+ vector_trans = np.zeros_like(vector)
123
+ (xx,yy,zz) = (0,1,2)
124
+
125
+ vector_trans[:,xx,:] = (trans_mat[0,0]*vector[:,xx,:]
126
+ + trans_mat[0,1]*vector[:,yy,:]
127
+ + trans_mat[0,2]*vector[:,zz,:])
128
+ vector_trans[:,yy,:] = (trans_mat[0,1]*vector[:,xx,:]
129
+ + trans_mat[1,1]*vector[:,yy,:]
130
+ + trans_mat[1,2]*vector[:,zz,:])
131
+ vector_trans[:,zz,:] = (trans_mat[0,2]*vector[:,xx,:]
132
+ + trans_mat[1,2]*vector[:,yy,:]
133
+ + trans_mat[2,2]*vector[:,zz,:])
134
+
135
+ return vector_trans
136
+
137
+ def transform_tensor_2d(trans_mat: np.ndarray, tensor: np.ndarray
138
+ ) -> np.ndarray:
139
+ """Transforms a 2D tensor field assuming the shear terms are symmetric.
140
+
141
+ Parameters
142
+ ----------
143
+ trans_mat : np.ndarray
144
+ Transformation matrix with shape=(2,2)
145
+ tensor : np.ndarray
146
+ Tensor field with shape=(3,num_points) where the rows are the XX, YY and
147
+ XY components of the tensor field
148
+
149
+ Returns
150
+ -------
151
+ np.ndarray
152
+ Transformed tensor field with shape=(3,num_points) where the rows are
153
+ the XX, YY and XY components of the tensor field.
154
+ """
155
+ tensor_trans = np.zeros_like(tensor)
156
+ (xx,yy,xy) = (0,1,2)
157
+
158
+ tensor_trans[xx,:] = (trans_mat[0,0]*(trans_mat[0,0]*tensor[xx,:]
159
+ + trans_mat[0,1]*tensor[xy,:])
160
+ + trans_mat[0,1]*(trans_mat[0,0]*tensor[xy,:]
161
+ + trans_mat[0,1]*tensor[yy,:]))
162
+
163
+ tensor_trans[yy,:] = (trans_mat[0,1]*(trans_mat[0,1]*tensor[xx,:]
164
+ + trans_mat[1,1]*tensor[xy,:])
165
+ + trans_mat[1,1]*(trans_mat[0,1]*tensor[xy,:]
166
+ + trans_mat[1,1]*tensor[yy,:]))
167
+
168
+ tensor_trans[xy,:] = (trans_mat[0,1]*(trans_mat[0,0]*tensor[xx,:]
169
+ + trans_mat[0,1]*tensor[xy,:])
170
+ + trans_mat[1,1]*(trans_mat[0,0]*tensor[xy,:]
171
+ + trans_mat[0,1]*tensor[yy,:]))
172
+
173
+ return tensor_trans
174
+
175
+
176
+ def transform_tensor_3d(trans_mat: np.ndarray, tensor: np.ndarray
177
+ ) -> np.ndarray:
178
+ """Transforms a 3D tensor field assuming all the shear terms are symmetric.
179
+
180
+ Parameters
181
+ ----------
182
+ trans_mat : np.ndarray
183
+ Transformation matrix with shape=(3,3).
184
+ tensor : np.ndarray
185
+ Tensor field with shape=(6,num_points), where the rows are the XX, YY,
186
+ ZZ, XY, XZ and YZ components of the field.
187
+
188
+ Returns
189
+ -------
190
+ np.ndarray
191
+ Transformed tensor field with shape=(6,num_points), where the rows are
192
+ the XX, YY, ZZ, XY, XZ and YZ components of the field.
193
+ """
194
+ tensor_trans = np.zeros_like(tensor)
195
+ (xx,yy,zz,xy,xz,yz) = (0,1,2,3,4,5)
196
+
197
+ tensor_trans[xx,:] = (trans_mat[0,0]*(trans_mat[0,0]*tensor[xx,:]
198
+ + trans_mat[0,1]*tensor[xy,:]
199
+ + trans_mat[0,2]*tensor[xz,:])
200
+ + trans_mat[0,1]*(trans_mat[0,0]*tensor[xy,:]
201
+ + trans_mat[0,1]*tensor[yy,:]
202
+ + trans_mat[0,2]*tensor[yz,:])
203
+ + trans_mat[0,2]*(trans_mat[0,0]*tensor[xz,:]
204
+ + trans_mat[0,1]*tensor[yz,:]
205
+ + trans_mat[0,2]*tensor[zz,:]))
206
+
207
+ tensor_trans[yy,:] = (trans_mat[0,1]*(trans_mat[0,1]*tensor[xx,:]
208
+ + trans_mat[1,1]*tensor[xy,:]
209
+ + trans_mat[1,2]*tensor[xz,:])
210
+ + trans_mat[1,1]*(trans_mat[0,1]*tensor[xy,:]
211
+ + trans_mat[1,1]*tensor[yy,:]
212
+ + trans_mat[1,2]*tensor[yz,:])
213
+ + trans_mat[1,2]*(trans_mat[0,1]*tensor[xz,:]
214
+ + trans_mat[1,1]*tensor[yz,:]
215
+ + trans_mat[1,2]*tensor[zz,:]))
216
+
217
+ tensor_trans[zz,:] = (trans_mat[0,2]*(trans_mat[0,2]*tensor[xx,:]
218
+ + trans_mat[1,2]*tensor[xy,:]
219
+ + trans_mat[2,2]*tensor[xz,:])
220
+ + trans_mat[1,2]*(trans_mat[0,2]*tensor[xy,:]
221
+ + trans_mat[1,2]*tensor[yy,:]
222
+ + trans_mat[2,2]*tensor[yz,:])
223
+ + trans_mat[2,2]*(trans_mat[0,2]*tensor[xz,:]
224
+ + trans_mat[1,2]*tensor[yz,:]
225
+ + trans_mat[2,2]*tensor[zz,:]))
226
+
227
+ tensor_trans[xy,:] = (trans_mat[0,1]*(trans_mat[0,0]*tensor[xx,:]
228
+ + trans_mat[0,1]*tensor[xy,:]
229
+ + trans_mat[0,2]*tensor[xz,:])
230
+ + trans_mat[1,1]*(trans_mat[0,0]*tensor[xy,:]
231
+ + trans_mat[0,1]*tensor[yy,:]
232
+ + trans_mat[0,2]*tensor[yz,:])
233
+ + trans_mat[1,2]*(trans_mat[0,0]*tensor[xz,:]
234
+ + trans_mat[0,1]*tensor[yz,:]
235
+ + trans_mat[0,2]*tensor[zz,:]))
236
+
237
+ tensor_trans[xz,:] = (trans_mat[0,2]*(trans_mat[0,0]*tensor[xx,:]
238
+ + trans_mat[0,1]*tensor[xy,:]
239
+ + trans_mat[0,2]*tensor[xz,:])
240
+ + trans_mat[1,2]*(trans_mat[0,0]*tensor[xy,:]
241
+ + trans_mat[0,1]*tensor[yy,:]
242
+ + trans_mat[0,2]*tensor[yz,:])
243
+ + trans_mat[2,2]*(trans_mat[0,0]*tensor[xz,:]
244
+ + trans_mat[0,1]*tensor[yz,:]
245
+ + trans_mat[0,2]*tensor[zz,:]))
246
+
247
+ tensor_trans[yz,:] = (trans_mat[0,2]*(trans_mat[0,1]*tensor[xx,:]
248
+ + trans_mat[1,1]*tensor[xy,:]
249
+ + trans_mat[1,2]*tensor[xz,:])
250
+ + trans_mat[1,2]*(trans_mat[0,1]*tensor[xy,:]
251
+ + trans_mat[1,1]*tensor[yy,:]
252
+ + trans_mat[1,2]*tensor[yz,:])
253
+ + trans_mat[2,2]*(trans_mat[0,1]*tensor[xz,:]
254
+ + trans_mat[1,1]*tensor[yz,:]
255
+ + trans_mat[1,2]*tensor[zz,:]))
256
+
257
+ return tensor_trans
258
+
259
+
260
+ def transform_tensor_2d_batch(trans_mat: np.ndarray, tensor: np.ndarray
261
+ ) -> np.ndarray:
262
+ """Performs a batched transformation of a 2D tensor field assuming the shear
263
+ terms are symmetric. Assumes the same transformation is applied to all
264
+ sensors in the array so they can be processed together for speed.
265
+
266
+ Parameters
267
+ ----------
268
+ trans_mat : np.ndarray
269
+ Transformation matrix with shape=(2,2)
270
+ tensor : np.ndarray
271
+ Tensor field with shape=(num_sensors,3,num_points) where the rows are
272
+ the XX, YY and XY components of the tensor field
273
+
274
+ Returns
275
+ -------
276
+ np.ndarray
277
+ Transformed tensor field with shape=(num_sensors,3,num_time_steps) where
278
+ the rows are the XX, YY and XY components of the tensor field.
279
+ """
280
+ tensor_trans = np.zeros_like(tensor)
281
+ (xx,yy,xy) = (0,1,2)
282
+
283
+ tensor_trans[:,xx,:] = (trans_mat[0,0]*(trans_mat[0,0]*tensor[:,xx,:]
284
+ + trans_mat[0,1]*tensor[:,xy,:])
285
+ + trans_mat[0,1]*(trans_mat[0,0]*tensor[:,xy,:]
286
+ + trans_mat[0,1]*tensor[:,yy,:]))
287
+
288
+ tensor_trans[:,yy,:] = (trans_mat[0,1]*(trans_mat[0,1]*tensor[:,xx,:]
289
+ + trans_mat[1,1]*tensor[:,xy,:])
290
+ + trans_mat[1,1]*(trans_mat[0,1]*tensor[:,xy,:]
291
+ + trans_mat[1,1]*tensor[:,yy,:]))
292
+
293
+ tensor_trans[:,xy,:] = (trans_mat[0,1]*(trans_mat[0,0]*tensor[:,xx,:]
294
+ + trans_mat[0,1]*tensor[:,xy,:])
295
+ + trans_mat[1,1]*(trans_mat[0,0]*tensor[:,xy,:]
296
+ + trans_mat[0,1]*tensor[:,yy,:]))
297
+
298
+ return tensor_trans
299
+
300
+
301
+ def transform_tensor_3d_batch(trans_mat: np.ndarray, tensor: np.ndarray
302
+ ) -> np.ndarray:
303
+ """Performs a batched transformation a 3D tensor field assuming all the
304
+ shear terms are symmetric. Assumes all sensors have the same transformation
305
+ applied so they can be processed together for speed.
306
+
307
+ Parameters
308
+ ----------
309
+ trans_mat : np.ndarray
310
+ Transformation matrix with shape=(3,3).
311
+ tensor : np.ndarray
312
+ Tensor field with shape=(num_sensors,6,num_points), where the rows are
313
+ the XX, YY, ZZ, XY, XZ and YZ components of the field.
314
+
315
+ Returns
316
+ -------
317
+ np.ndarray
318
+ Transformed tensor field with shape=(num_sensors,6,num_points), where
319
+ the rows are the XX, YY, ZZ, XY, XZ and YZ components of the field.
320
+ """
321
+ tensor_trans = np.zeros_like(tensor)
322
+ (xx,yy,zz,xy,xz,yz) = (0,1,2,3,4,5)
323
+
324
+ tensor_trans[:,xx,:] = (trans_mat[0,0]*(trans_mat[0,0]*tensor[:,xx,:]
325
+ + trans_mat[0,1]*tensor[:,xy,:]
326
+ + trans_mat[0,2]*tensor[:,xz,:])
327
+ + trans_mat[0,1]*(trans_mat[0,0]*tensor[:,xy,:]
328
+ + trans_mat[0,1]*tensor[:,yy,:]
329
+ + trans_mat[0,2]*tensor[:,yz,:])
330
+ + trans_mat[0,2]*(trans_mat[0,0]*tensor[:,xz,:]
331
+ + trans_mat[0,1]*tensor[:,yz,:]
332
+ + trans_mat[0,2]*tensor[:,zz,:]))
333
+
334
+ tensor_trans[:,yy,:] = (trans_mat[0,1]*(trans_mat[0,1]*tensor[:,xx,:]
335
+ + trans_mat[1,1]*tensor[:,xy,:]
336
+ + trans_mat[1,2]*tensor[:,xz,:])
337
+ + trans_mat[1,1]*(trans_mat[0,1]*tensor[:,xy,:]
338
+ + trans_mat[1,1]*tensor[:,yy,:]
339
+ + trans_mat[1,2]*tensor[:,yz,:])
340
+ + trans_mat[1,2]*(trans_mat[0,1]*tensor[:,xz,:]
341
+ + trans_mat[1,1]*tensor[:,yz,:]
342
+ + trans_mat[1,2]*tensor[:,zz,:]))
343
+
344
+ tensor_trans[:,zz,:] = (trans_mat[0,2]*(trans_mat[0,2]*tensor[:,xx,:]
345
+ + trans_mat[1,2]*tensor[:,xy,:]
346
+ + trans_mat[2,2]*tensor[:,xz,:])
347
+ + trans_mat[1,2]*(trans_mat[0,2]*tensor[:,xy,:]
348
+ + trans_mat[1,2]*tensor[:,yy,:]
349
+ + trans_mat[2,2]*tensor[:,yz,:])
350
+ + trans_mat[2,2]*(trans_mat[0,2]*tensor[:,xz,:]
351
+ + trans_mat[1,2]*tensor[:,yz,:]
352
+ + trans_mat[2,2]*tensor[:,zz,:]))
353
+
354
+ tensor_trans[:,xy,:] = (trans_mat[0,1]*(trans_mat[0,0]*tensor[:,xx,:]
355
+ + trans_mat[0,1]*tensor[:,xy,:]
356
+ + trans_mat[0,2]*tensor[:,xz,:])
357
+ + trans_mat[1,1]*(trans_mat[0,0]*tensor[:,xy,:]
358
+ + trans_mat[0,1]*tensor[:,yy,:]
359
+ + trans_mat[0,2]*tensor[:,yz,:])
360
+ + trans_mat[1,2]*(trans_mat[0,0]*tensor[:,xz,:]
361
+ + trans_mat[0,1]*tensor[:,yz,:]
362
+ + trans_mat[0,2]*tensor[:,zz,:]))
363
+
364
+ tensor_trans[:,xz,:] = (trans_mat[0,2]*(trans_mat[0,0]*tensor[:,xx,:]
365
+ + trans_mat[0,1]*tensor[:,xy,:]
366
+ + trans_mat[0,2]*tensor[:,xz,:])
367
+ + trans_mat[1,2]*(trans_mat[0,0]*tensor[:,xy,:]
368
+ + trans_mat[0,1]*tensor[:,yy,:]
369
+ + trans_mat[0,2]*tensor[:,yz,:])
370
+ + trans_mat[2,2]*(trans_mat[0,0]*tensor[:,xz,:]
371
+ + trans_mat[0,1]*tensor[:,yz,:]
372
+ + trans_mat[0,2]*tensor[:,zz,:]))
373
+
374
+ tensor_trans[:,yz,:] = (trans_mat[0,2]*(trans_mat[0,1]*tensor[:,xx,:]
375
+ + trans_mat[1,1]*tensor[:,xy,:]
376
+ + trans_mat[1,2]*tensor[:,xz,:])
377
+ + trans_mat[1,2]*(trans_mat[0,1]*tensor[:,xy,:]
378
+ + trans_mat[1,1]*tensor[:,yy,:]
379
+ + trans_mat[1,2]*tensor[:,yz,:])
380
+ + trans_mat[2,2]*(trans_mat[0,1]*tensor[:,xz,:]
381
+ + trans_mat[1,1]*tensor[:,yz,:]
382
+ + trans_mat[1,2]*tensor[:,zz,:]))
383
+
384
+ return tensor_trans