pyvale 2025.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyvale might be problematic. Click here for more details.

Files changed (157) hide show
  1. pyvale/__init__.py +75 -0
  2. pyvale/core/__init__.py +7 -0
  3. pyvale/core/analyticmeshgen.py +59 -0
  4. pyvale/core/analyticsimdatafactory.py +63 -0
  5. pyvale/core/analyticsimdatagenerator.py +160 -0
  6. pyvale/core/camera.py +146 -0
  7. pyvale/core/cameradata.py +64 -0
  8. pyvale/core/cameradata2d.py +82 -0
  9. pyvale/core/cameratools.py +328 -0
  10. pyvale/core/cython/rastercyth.c +32267 -0
  11. pyvale/core/cython/rastercyth.py +636 -0
  12. pyvale/core/dataset.py +250 -0
  13. pyvale/core/errorcalculator.py +112 -0
  14. pyvale/core/errordriftcalc.py +146 -0
  15. pyvale/core/errorintegrator.py +339 -0
  16. pyvale/core/errorrand.py +614 -0
  17. pyvale/core/errorsysdep.py +331 -0
  18. pyvale/core/errorsysfield.py +407 -0
  19. pyvale/core/errorsysindep.py +905 -0
  20. pyvale/core/experimentsimulator.py +99 -0
  21. pyvale/core/field.py +136 -0
  22. pyvale/core/fieldconverter.py +154 -0
  23. pyvale/core/fieldsampler.py +112 -0
  24. pyvale/core/fieldscalar.py +167 -0
  25. pyvale/core/fieldtensor.py +221 -0
  26. pyvale/core/fieldtransform.py +384 -0
  27. pyvale/core/fieldvector.py +215 -0
  28. pyvale/core/generatorsrandom.py +528 -0
  29. pyvale/core/imagedef2d.py +566 -0
  30. pyvale/core/integratorfactory.py +241 -0
  31. pyvale/core/integratorquadrature.py +192 -0
  32. pyvale/core/integratorrectangle.py +88 -0
  33. pyvale/core/integratorspatial.py +90 -0
  34. pyvale/core/integratortype.py +44 -0
  35. pyvale/core/optimcheckfuncs.py +153 -0
  36. pyvale/core/raster.py +31 -0
  37. pyvale/core/rastercy.py +76 -0
  38. pyvale/core/rasternp.py +604 -0
  39. pyvale/core/rendermesh.py +156 -0
  40. pyvale/core/sensorarray.py +179 -0
  41. pyvale/core/sensorarrayfactory.py +210 -0
  42. pyvale/core/sensorarraypoint.py +280 -0
  43. pyvale/core/sensordata.py +72 -0
  44. pyvale/core/sensordescriptor.py +101 -0
  45. pyvale/core/sensortools.py +143 -0
  46. pyvale/core/visualexpplotter.py +151 -0
  47. pyvale/core/visualimagedef.py +71 -0
  48. pyvale/core/visualimages.py +75 -0
  49. pyvale/core/visualopts.py +180 -0
  50. pyvale/core/visualsimanimator.py +83 -0
  51. pyvale/core/visualsimplotter.py +182 -0
  52. pyvale/core/visualtools.py +81 -0
  53. pyvale/core/visualtraceplotter.py +256 -0
  54. pyvale/data/__init__.py +7 -0
  55. pyvale/data/case13_out.e +0 -0
  56. pyvale/data/case16_out.e +0 -0
  57. pyvale/data/case17_out.e +0 -0
  58. pyvale/data/case18_1_out.e +0 -0
  59. pyvale/data/case18_2_out.e +0 -0
  60. pyvale/data/case18_3_out.e +0 -0
  61. pyvale/data/case25_out.e +0 -0
  62. pyvale/data/case26_out.e +0 -0
  63. pyvale/data/optspeckle_2464x2056px_spec5px_8bit_gblur1px.tiff +0 -0
  64. pyvale/examples/__init__.py +7 -0
  65. pyvale/examples/analyticdatagen/__init__.py +7 -0
  66. pyvale/examples/analyticdatagen/ex1_1_scalarvisualisation.py +38 -0
  67. pyvale/examples/analyticdatagen/ex1_2_scalarcasebuild.py +46 -0
  68. pyvale/examples/analyticdatagen/ex2_1_analyticsensors.py +83 -0
  69. pyvale/examples/ex1_1_thermal2d.py +89 -0
  70. pyvale/examples/ex1_2_thermal2d.py +111 -0
  71. pyvale/examples/ex1_3_thermal2d.py +113 -0
  72. pyvale/examples/ex1_4_thermal2d.py +89 -0
  73. pyvale/examples/ex1_5_thermal2d.py +105 -0
  74. pyvale/examples/ex2_1_thermal3d .py +87 -0
  75. pyvale/examples/ex2_2_thermal3d.py +51 -0
  76. pyvale/examples/ex2_3_thermal3d.py +109 -0
  77. pyvale/examples/ex3_1_displacement2d.py +47 -0
  78. pyvale/examples/ex3_2_displacement2d.py +79 -0
  79. pyvale/examples/ex3_3_displacement2d.py +104 -0
  80. pyvale/examples/ex3_4_displacement2d.py +105 -0
  81. pyvale/examples/ex4_1_strain2d.py +57 -0
  82. pyvale/examples/ex4_2_strain2d.py +79 -0
  83. pyvale/examples/ex4_3_strain2d.py +100 -0
  84. pyvale/examples/ex5_1_multiphysics2d.py +78 -0
  85. pyvale/examples/ex6_1_multiphysics2d_expsim.py +118 -0
  86. pyvale/examples/ex6_2_multiphysics3d_expsim.py +158 -0
  87. pyvale/examples/features/__init__.py +7 -0
  88. pyvale/examples/features/ex_animation_tools_3dmonoblock.py +83 -0
  89. pyvale/examples/features/ex_area_avg.py +89 -0
  90. pyvale/examples/features/ex_calibration_error.py +108 -0
  91. pyvale/examples/features/ex_chain_field_errs.py +141 -0
  92. pyvale/examples/features/ex_field_errs.py +78 -0
  93. pyvale/examples/features/ex_sensor_single_angle_batch.py +110 -0
  94. pyvale/examples/imagedef2d/ex_imagedef2d_todisk.py +86 -0
  95. pyvale/examples/rasterisation/ex_rastenp.py +154 -0
  96. pyvale/examples/rasterisation/ex_rastercyth_oneframe.py +220 -0
  97. pyvale/examples/rasterisation/ex_rastercyth_static_cypara.py +194 -0
  98. pyvale/examples/rasterisation/ex_rastercyth_static_pypara.py +193 -0
  99. pyvale/simcases/case00_HEX20.i +242 -0
  100. pyvale/simcases/case00_HEX27.i +242 -0
  101. pyvale/simcases/case00_TET10.i +242 -0
  102. pyvale/simcases/case00_TET14.i +242 -0
  103. pyvale/simcases/case01.i +101 -0
  104. pyvale/simcases/case02.i +156 -0
  105. pyvale/simcases/case03.i +136 -0
  106. pyvale/simcases/case04.i +181 -0
  107. pyvale/simcases/case05.i +234 -0
  108. pyvale/simcases/case06.i +305 -0
  109. pyvale/simcases/case07.geo +135 -0
  110. pyvale/simcases/case07.i +87 -0
  111. pyvale/simcases/case08.geo +144 -0
  112. pyvale/simcases/case08.i +153 -0
  113. pyvale/simcases/case09.geo +204 -0
  114. pyvale/simcases/case09.i +87 -0
  115. pyvale/simcases/case10.geo +204 -0
  116. pyvale/simcases/case10.i +257 -0
  117. pyvale/simcases/case11.geo +337 -0
  118. pyvale/simcases/case11.i +147 -0
  119. pyvale/simcases/case12.geo +388 -0
  120. pyvale/simcases/case12.i +329 -0
  121. pyvale/simcases/case13.i +140 -0
  122. pyvale/simcases/case14.i +159 -0
  123. pyvale/simcases/case15.geo +337 -0
  124. pyvale/simcases/case15.i +150 -0
  125. pyvale/simcases/case16.geo +391 -0
  126. pyvale/simcases/case16.i +357 -0
  127. pyvale/simcases/case17.geo +135 -0
  128. pyvale/simcases/case17.i +144 -0
  129. pyvale/simcases/case18.i +254 -0
  130. pyvale/simcases/case18_1.i +254 -0
  131. pyvale/simcases/case18_2.i +254 -0
  132. pyvale/simcases/case18_3.i +254 -0
  133. pyvale/simcases/case19.geo +252 -0
  134. pyvale/simcases/case19.i +99 -0
  135. pyvale/simcases/case20.geo +252 -0
  136. pyvale/simcases/case20.i +250 -0
  137. pyvale/simcases/case21.geo +74 -0
  138. pyvale/simcases/case21.i +155 -0
  139. pyvale/simcases/case22.geo +82 -0
  140. pyvale/simcases/case22.i +140 -0
  141. pyvale/simcases/case23.geo +164 -0
  142. pyvale/simcases/case23.i +140 -0
  143. pyvale/simcases/case24.geo +79 -0
  144. pyvale/simcases/case24.i +123 -0
  145. pyvale/simcases/case25.geo +82 -0
  146. pyvale/simcases/case25.i +140 -0
  147. pyvale/simcases/case26.geo +166 -0
  148. pyvale/simcases/case26.i +140 -0
  149. pyvale/simcases/run_1case.py +61 -0
  150. pyvale/simcases/run_all_cases.py +69 -0
  151. pyvale/simcases/run_build_case.py +64 -0
  152. pyvale/simcases/run_example_cases.py +69 -0
  153. pyvale-2025.4.0.dist-info/METADATA +140 -0
  154. pyvale-2025.4.0.dist-info/RECORD +157 -0
  155. pyvale-2025.4.0.dist-info/WHEEL +5 -0
  156. pyvale-2025.4.0.dist-info/licenses/LICENSE +21 -0
  157. pyvale-2025.4.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,604 @@
1
+ """
2
+ ================================================================================
3
+ pyvale: the python validation engine
4
+ License: MIT
5
+ Copyright (C) 2025 The Computer Aided Validation Team
6
+ ================================================================================
7
+ """
8
+ from pathlib import Path
9
+ from multiprocessing.pool import Pool
10
+ import numpy as np
11
+ import numba
12
+ from pyvale.core.cameradata import CameraData
13
+ from pyvale.core.cameratools import CameraTools
14
+ from pyvale.core.rendermesh import RenderMeshData
15
+ import pyvale.core.cython.rastercyth as rastercyth
16
+
17
+
18
+ # NOTE: This module is a feature under developement.
19
+
20
+
21
+ class RasterNP:
22
+ @staticmethod
23
+ def world_to_raster_coords(cam_data: CameraData,
24
+ coords_world: np.ndarray) -> np.ndarray:
25
+ # coords_world.shape=(num_nodes,coord[X,Y,Z,W])
26
+
27
+ # Project onto camera coords
28
+ # coords_raster.shape=(num_nodes,coord[X,Y,Z,W])
29
+ coords_raster = np.matmul(coords_world,cam_data.world_to_cam_mat.T)
30
+
31
+ # NOTE: w is not 1 when the matrix is a perspective projection! It is only 1
32
+ # here when we have an affine transformation
33
+ coords_raster[:,0] = coords_raster[:,0] / coords_raster[:,3]
34
+ coords_raster[:,1] = coords_raster[:,1] / coords_raster[:,3]
35
+ coords_raster[:,2] = coords_raster[:,2] / coords_raster[:,3]
36
+
37
+ # Coords Image: Perspective divide
38
+ coords_raster[:,0] = (cam_data.image_dist * coords_raster[:,0]
39
+ / -coords_raster[:,2])
40
+ coords_raster[:,1] = (cam_data.image_dist * coords_raster[:,1]
41
+ / -coords_raster[:,2])
42
+
43
+ # Coords NDC: Convert to normalised device coords in the range [-1,1]
44
+ coords_raster[:,0] = 2*coords_raster[:,0] / cam_data.image_dims[0]
45
+ coords_raster[:,1] = 2*coords_raster[:,1] / cam_data.image_dims[1]
46
+
47
+ # Coords Raster: Covert to pixel (raster) coords
48
+ # Shape = ([X,Y,Z],num_nodes)
49
+ coords_raster[:,0] = (coords_raster[:,0] + 1)/2 * cam_data.pixels_num[0]
50
+ coords_raster[:,1] = (1-coords_raster[:,1])/2 * cam_data.pixels_num[1]
51
+ coords_raster[:,2] = -coords_raster[:,2]
52
+
53
+ return coords_raster
54
+
55
+
56
+ @staticmethod
57
+ def back_face_removal_mask(cam_data: CameraData,
58
+ coords_world: np.ndarray,
59
+ connect: np.ndarray
60
+ ) -> np.ndarray:
61
+ coords_cam = np.matmul(coords_world,cam_data.world_to_cam_mat.T)
62
+
63
+ # shape=(num_elems,nodes_per_elem,coord[x,y,z,w])
64
+ elem_cam_coords = coords_cam[connect,:]
65
+
66
+ # Calculate the normal vectors for all of the elements, remove the w coord
67
+ # shape=(num_elems,coord[x,y,z])
68
+ elem_cam_edge0 = elem_cam_coords[:,1,:-1] - elem_cam_coords[:,0,:-1]
69
+ elem_cam_edge1 = elem_cam_coords[:,2,:-1] - elem_cam_coords[:,0,:-1]
70
+ elem_cam_normals = np.cross(elem_cam_edge0,elem_cam_edge1,
71
+ axisa=1,axisb=1).T
72
+ elem_cam_normals = elem_cam_normals / np.linalg.norm(elem_cam_normals,axis=0)
73
+
74
+ cam_normal = np.array([0,0,1])
75
+ # shape=(num_elems,)
76
+ proj_elem_to_cam = np.dot(cam_normal,elem_cam_normals)
77
+
78
+ # NOTE this should be a numerical precision tolerance (epsilon)
79
+ back_face_mask = proj_elem_to_cam > 1e-6
80
+
81
+ return back_face_mask
82
+
83
+ @staticmethod
84
+ def crop_and_bound_by_connect(cam_data: CameraData,
85
+ coords_raster: np.ndarray,
86
+ connectivity: np.ndarray,
87
+ ) -> tuple[np.ndarray,np.ndarray]:
88
+
89
+ #shape=(num_elems,coord[x,y,z,w])
90
+ if coords_raster.ndim == 2:
91
+ coords_by_elem = coords_raster[connectivity,:]
92
+ else:
93
+ coords_by_elem = coords_raster[connectivity,:,:]
94
+
95
+ elem_raster_coord_min = np.min(coords_by_elem,axis=1)
96
+ elem_raster_coord_max = np.max(coords_by_elem,axis=1)
97
+
98
+ # Check that min/max nodes are within the 4 edges of the camera image
99
+ #shape=(4_edges_to_check,num_elems)
100
+ crop_mask = np.zeros([connectivity.shape[0],4],dtype=np.int8)
101
+ crop_mask[elem_raster_coord_min[:,0] <= (cam_data.pixels_num[0]-1), 0] = 1
102
+ crop_mask[elem_raster_coord_min[:,1] <= (cam_data.pixels_num[1]-1), 1] = 1
103
+ crop_mask[elem_raster_coord_max[:,0] >= 0, 2] = 1
104
+ crop_mask[elem_raster_coord_max[:,1] >= 0, 3] = 1
105
+ crop_mask = np.sum(crop_mask,axis=1) == 4
106
+
107
+ # Get only the elements that are within the FOV
108
+ # Mask the elem coords and the max and min elem coords for processing
109
+ elem_raster_coord_min = elem_raster_coord_min[crop_mask,:]
110
+ elem_raster_coord_max = elem_raster_coord_max[crop_mask,:]
111
+ num_elems_in_image = elem_raster_coord_min.shape[0]
112
+
113
+
114
+ # Find the indices of the bounding box that each element lies within on
115
+ # the image, bounded by the upper and lower edges of the image
116
+ elem_bound_boxes_inds = np.zeros([num_elems_in_image,4],dtype=np.int32)
117
+ elem_bound_boxes_inds[:,0] = RasterNP.elem_bound_box_low(
118
+ elem_raster_coord_min[:,0])
119
+ elem_bound_boxes_inds[:,1] = RasterNP.elem_bound_box_high(
120
+ elem_raster_coord_max[:,0],
121
+ cam_data.pixels_num[0]-1)
122
+ elem_bound_boxes_inds[:,2] = RasterNP.elem_bound_box_low(
123
+ elem_raster_coord_min[:,1])
124
+ elem_bound_boxes_inds[:,3] = RasterNP.elem_bound_box_high(
125
+ elem_raster_coord_max[:,1],
126
+ cam_data.pixels_num[1]-1)
127
+
128
+ return (crop_mask,elem_bound_boxes_inds)
129
+
130
+
131
+ @staticmethod
132
+ def elem_bound_box_low(coord_min: np.ndarray) -> np.ndarray:
133
+ bound_elem = np.floor(coord_min).astype(np.int32)
134
+ bound_low = np.zeros_like(coord_min,dtype=np.int32)
135
+ bound_mat = np.vstack((bound_elem,bound_low))
136
+ return np.max(bound_mat,axis=0)
137
+
138
+
139
+ @staticmethod
140
+ def elem_bound_box_high(coord_max: np.ndarray,image_px: int) -> np.ndarray:
141
+ bound_elem = np.ceil(coord_max).astype(np.int32)
142
+ bound_high = image_px*np.ones_like(coord_max,dtype=np.int32)
143
+ bound_mat = np.vstack((bound_elem,bound_high))
144
+ bound = np.min(bound_mat,axis=0)
145
+ return bound
146
+
147
+
148
+ @staticmethod
149
+ def setup_frame(cam_data: CameraData,
150
+ coords_world: np.ndarray,
151
+ connectivity: np.ndarray,
152
+ disp_field_frame: np.ndarray | None = None,
153
+ ) -> tuple[np.ndarray,np.ndarray,np.ndarray]:
154
+
155
+ connect_in_frame = np.copy(connectivity)
156
+ coords_deform = np.copy(coords_world)
157
+
158
+ #-----------------------------------------------------------------------
159
+ # DEFORM MESH WITH DISPLACEMENT
160
+ if disp_field_frame is not None:
161
+ # Exclude w coord from mesh deformation
162
+ coords_deform[:,:-1] = coords_deform[:,:-1] + disp_field_frame
163
+
164
+ #-----------------------------------------------------------------------
165
+ # Convert world coords of all elements in the scene
166
+ # shape=(num_nodes,coord[x,y,z,w])
167
+ coords_raster = RasterNP.world_to_raster_coords(cam_data,
168
+ coords_deform)
169
+
170
+ # Convert to perspective correct hyperbolic interpolation for z interp
171
+ # shape=(num_nodes,coord[x,y,z,w])
172
+ coords_raster[:,2] = 1/coords_raster[:,2]
173
+ # Remove w coord
174
+ coords_raster = coords_raster[:,:-1]
175
+
176
+ #-----------------------------------------------------------------------
177
+ # BACKFACE REMOVAL
178
+ # shape=(num_elems,)
179
+ back_face_mask = RasterNP.back_face_removal_mask(cam_data,
180
+ coords_deform,
181
+ connect_in_frame)
182
+ connect_in_frame = connect_in_frame[back_face_mask,:]
183
+
184
+ #-----------------------------------------------------------------------
185
+ # CROPPING & BOUNDING BOX OPERATIONS
186
+ (crop_mask,
187
+ elem_bound_box_inds) = RasterNP.crop_and_bound_by_connect(
188
+ cam_data,
189
+ coords_raster,
190
+ connect_in_frame,
191
+ )
192
+ connect_in_frame = connect_in_frame[crop_mask,:]
193
+
194
+ #-----------------------------------------------------------------------
195
+ # ELEMENT AREAS FOR INTERPOLATION
196
+ elem_raster_coords = coords_raster[connect_in_frame,:]
197
+ elem_areas = edge_function_slice(elem_raster_coords[:,0,:],
198
+ elem_raster_coords[:,1,:],
199
+ elem_raster_coords[:,2,:])
200
+
201
+ return (coords_raster,connect_in_frame,elem_bound_box_inds,elem_areas)
202
+
203
+
204
+ @staticmethod
205
+ def raster_elem(cam_data: CameraData,
206
+ elem_raster_coords: np.ndarray,
207
+ elem_bound_box_inds: np.ndarray,
208
+ elem_area: float,
209
+ field_divide_z: np.ndarray
210
+ ) -> tuple[np.ndarray,np.ndarray,np.ndarray,np.ndarray]:
211
+
212
+ # Create the subpixel coords inside the bounding box to test with the
213
+ # edge function. Use the pixel indices of the bounding box.
214
+ bound_subpx_x = np.arange(elem_bound_box_inds[0],
215
+ elem_bound_box_inds[1],
216
+ 1/cam_data.sub_samp) + 1/(2*cam_data.sub_samp)
217
+ bound_subpx_y = np.arange(elem_bound_box_inds[2],
218
+ elem_bound_box_inds[3],
219
+ 1/cam_data.sub_samp) + 1/(2*cam_data.sub_samp)
220
+ (bound_subpx_grid_x,bound_subpx_grid_y) = np.meshgrid(bound_subpx_x,
221
+ bound_subpx_y)
222
+ bound_coords_grid_shape = bound_subpx_grid_x.shape
223
+ # shape=(coord[x,y],num_subpx_in_box)
224
+ bound_subpx_coords_flat = np.vstack((bound_subpx_grid_x.flatten(),
225
+ bound_subpx_grid_y.flatten()))
226
+
227
+ # Create the subpixel indices for buffer slicing later
228
+ subpx_inds_x = np.arange(cam_data.sub_samp*elem_bound_box_inds[0],
229
+ cam_data.sub_samp*elem_bound_box_inds[1])
230
+ subpx_inds_y = np.arange(cam_data.sub_samp*elem_bound_box_inds[2],
231
+ cam_data.sub_samp*elem_bound_box_inds[3])
232
+ (subpx_inds_grid_x,subpx_inds_grid_y) = np.meshgrid(subpx_inds_x,
233
+ subpx_inds_y)
234
+
235
+
236
+ # We compute the edge function for all pixels in the box to determine if the
237
+ # pixel is inside the element or not
238
+ # NOTE: first axis of element_raster_coords is the node/vertex num.
239
+ # shape=(num_elems_in_bound,nodes_per_elem)
240
+ edge = np.zeros((3,bound_subpx_coords_flat.shape[1]),dtype=np.float64)
241
+ edge[0,:] = edge_function(elem_raster_coords[1,:],
242
+ elem_raster_coords[2,:],
243
+ bound_subpx_coords_flat)
244
+ edge[1,:] = edge_function(elem_raster_coords[2,:],
245
+ elem_raster_coords[0,:],
246
+ bound_subpx_coords_flat)
247
+ edge[2,:] = edge_function(elem_raster_coords[0,:],
248
+ elem_raster_coords[1,:],
249
+ bound_subpx_coords_flat)
250
+
251
+ # Now we check where the edge function is above zero for all edges
252
+ edge_check = np.zeros_like(edge,dtype=np.int8)
253
+ edge_check[edge >= 0.0] = 1
254
+ edge_check = np.sum(edge_check, axis=0)
255
+ # Create a mask with the check, TODO check the 3 here for non triangles
256
+ edge_mask_flat = edge_check == 3
257
+ edge_mask_grid = np.reshape(edge_mask_flat,bound_coords_grid_shape)
258
+
259
+ # Calculate the weights for the masked pixels
260
+ edge_masked = edge[:,edge_mask_flat]
261
+ interp_weights = edge_masked / elem_area
262
+
263
+ # Compute the depth of all pixels using hyperbolic interp
264
+ # NOTE: second index on raster coords is Z
265
+ px_coord_z = 1/(elem_raster_coords[0,2] * interp_weights[0,:]
266
+ + elem_raster_coords[1,2] * interp_weights[1,:]
267
+ + elem_raster_coords[2,2] * interp_weights[2,:])
268
+
269
+ field_interp = ((field_divide_z[0] * interp_weights[0,:]
270
+ + field_divide_z[1] * interp_weights[1,:]
271
+ + field_divide_z[2] * interp_weights[2,:])
272
+ * px_coord_z)
273
+
274
+ return (px_coord_z,
275
+ field_interp,
276
+ subpx_inds_grid_x[edge_mask_grid],
277
+ subpx_inds_grid_y[edge_mask_grid])
278
+
279
+ @staticmethod
280
+ def raster_frame(cam_data: CameraData,
281
+ connect_in_frame: np.ndarray,
282
+ coords_raster: np.ndarray,
283
+ elem_bound_box_inds: np.ndarray,
284
+ elem_areas: np.ndarray,
285
+ field_frame_div_z: np.ndarray
286
+ ) -> tuple[np.ndarray,np.ndarray,int]:
287
+ #connect_in_frame.shape=(num_elems,nodes_per_elem)
288
+ #coords_raster.shape=(num_coords,coord[x,y,z,w])
289
+ #elem_bound_box_inds.shape=(num_elems,[min_x,max_x,min_y,max_y])
290
+ #elem_areas.shape=(num_elems,)
291
+ #field_frame_divide_z=(num_coords,)
292
+
293
+ depth_buffer = 1e5*cam_data.image_dist*np.ones(
294
+ cam_data.sub_samp*cam_data.pixels_num).T
295
+ image_buffer = np.full(cam_data.sub_samp*cam_data.pixels_num,0.0).T
296
+
297
+ # elem_raster_coords.shape=(num_elems,nodes_per_elem,coord[x,y,z,w])
298
+ # field_divide_z.shape=(num_elems,nodes_per_elem,num_time_steps)
299
+ # elem_raster_coords.shape=(nodes_per_elem,coord[x,y,z,w])
300
+
301
+ for ee in range(connect_in_frame.shape[0]):
302
+ cc = connect_in_frame[ee,:]
303
+
304
+ (px_coord_z,
305
+ field_interp,
306
+ subpx_inds_x_in,
307
+ subpx_inds_y_in) = RasterNP.raster_elem(
308
+ cam_data,
309
+ coords_raster[cc,:],
310
+ elem_bound_box_inds[ee,:],
311
+ elem_areas[ee],
312
+ field_frame_div_z[cc])
313
+
314
+
315
+ # Build a mask to replace the depth information if there is already an
316
+ # element in front of the one we are rendering
317
+ px_coord_z_depth_mask = (px_coord_z
318
+ < depth_buffer[subpx_inds_y_in,subpx_inds_x_in])
319
+
320
+ # Initialise the z coord to the value in the depth buffer
321
+ px_coord_z_masked = depth_buffer[subpx_inds_y_in,subpx_inds_x_in]
322
+ # Use the depth mask to overwrite the depth buffer values if points are in
323
+ # front of the values in the depth buffer
324
+ px_coord_z_masked[px_coord_z_depth_mask] = px_coord_z[px_coord_z_depth_mask]
325
+
326
+ # Push the masked values into the depth buffer
327
+ depth_buffer[subpx_inds_y_in,subpx_inds_x_in] = px_coord_z_masked
328
+
329
+ # Mask the image buffer using the depth mask
330
+ image_buffer_depth_masked = image_buffer[subpx_inds_y_in,subpx_inds_x_in]
331
+ image_buffer_depth_masked[px_coord_z_depth_mask] = field_interp[px_coord_z_depth_mask]
332
+
333
+ # Push the masked values into the image buffer
334
+ image_buffer[subpx_inds_y_in,subpx_inds_x_in] = image_buffer_depth_masked
335
+
336
+ #---------------------------------------------------------------------------
337
+ # END RASTER LOOP
338
+ # TODO: fix this for windows
339
+ if Path(rastercyth.__file__).suffix == ".so":
340
+ depth_buff = np.empty((cam_data.pixels_num[1],cam_data.pixels_num[0]),dtype=np.float64)
341
+ depth_buff = np.array(rastercyth.average_image(depth_buffer,cam_data.sub_samp))
342
+ image_buff = np.empty((cam_data.pixels_num[1],cam_data.pixels_num[0]),dtype=np.float64)
343
+ image_buff = np.array(rastercyth.average_image(image_buffer,cam_data.sub_samp))
344
+ else:
345
+ depth_buff = CameraTools.average_subpixel_image(depth_buffer,cam_data.sub_samp)
346
+ image_buff = CameraTools.average_subpixel_image(image_buffer,cam_data.sub_samp)
347
+
348
+ return (image_buff,depth_buff)
349
+
350
+
351
+ @staticmethod
352
+ def raster_static_mesh(cam_data: CameraData,
353
+ render_mesh: RenderMeshData,
354
+ save_path: Path | None = None,
355
+ threads_num: int | None = None,
356
+ ) -> np.ndarray | None:
357
+
358
+ frames_num = render_mesh.fields_render.shape[1]
359
+ field_num = render_mesh.fields_render.shape[2]
360
+ (frames,fields) = np.meshgrid(np.arange(0,frames_num),
361
+ np.arange(0,field_num))
362
+ frames = frames.flatten()
363
+ fields = fields.flatten()
364
+
365
+ if save_path is None:
366
+ images = np.empty((cam_data.pixels_num[1],
367
+ cam_data.pixels_num[0],
368
+ frames_num,
369
+ field_num))
370
+ else:
371
+ images = None
372
+ if not save_path.is_dir():
373
+ save_path.mkdir()
374
+
375
+ # DO THIS ONCE: for non deforming meshes
376
+ # coords_raster.shape=(num_coords,coord[x,y,z,w])
377
+ # connect_in_frame.shape=(num_elems_in_scene,nodes_per_elem)
378
+ # elem_bound_box_inds.shape=(num_elems_in_scene,4[x_min,x_max,y_min,y_max])
379
+ # elem_areas.shape=(num_elems,)
380
+ (coords_raster,
381
+ connect_in_frame,
382
+ elem_bound_box_inds,
383
+ elem_areas) = RasterNP.setup_frame(
384
+ cam_data,
385
+ render_mesh.coords,
386
+ render_mesh.connectivity,
387
+ )
388
+
389
+ if threads_num is None:
390
+ for ff in range(0,frames.shape[0]):
391
+ image = RasterNP._static_mesh_frame_loop(
392
+ frames[ff],
393
+ fields[ff],
394
+ cam_data,
395
+ coords_raster,
396
+ connect_in_frame,
397
+ elem_bound_box_inds,
398
+ elem_areas,
399
+ render_mesh.fields_render[:,frames[ff],fields[ff]],
400
+ save_path
401
+ )
402
+
403
+ if images is not None:
404
+ images[:,:,frames[ff],fields[ff]] = image
405
+ else:
406
+ with Pool(threads_num) as pool:
407
+ processes_with_id = []
408
+
409
+ for ff in range(0,frames.shape[0]):
410
+ args = (frames[ff],
411
+ fields[ff],
412
+ cam_data,
413
+ coords_raster,
414
+ connect_in_frame,
415
+ elem_bound_box_inds,
416
+ elem_areas,
417
+ render_mesh.fields_render[:,frames[ff],fields[ff]],
418
+ save_path)
419
+
420
+ process = pool.apply_async(
421
+ RasterNP._static_mesh_frame_loop, args=args
422
+ )
423
+ processes_with_id.append({"process": process,
424
+ "frame": frames[ff],
425
+ "field": fields[ff]})
426
+
427
+ for pp in processes_with_id:
428
+ image = pp["process"].get()
429
+ images[:,:,pp["frame"],pp["field"]] = image
430
+
431
+ if images is not None:
432
+ return images
433
+
434
+ return None
435
+
436
+
437
+
438
+
439
+ @staticmethod
440
+ def raster_deformed_mesh(cam_data: CameraData,
441
+ render_mesh: RenderMeshData,
442
+ save_path: Path | None = None,
443
+ parallel: int | None = None
444
+ ) -> np.ndarray | None:
445
+
446
+ frames_num = render_mesh.fields_render.shape[1]
447
+ field_num = render_mesh.fields_render.shape[2]
448
+ (frames,fields) = np.meshgrid(np.arange(0,frames_num),
449
+ np.arange(0,field_num))
450
+ frames = frames.flatten()
451
+ fields = fields.flatten()
452
+
453
+
454
+ if save_path is None:
455
+ images = np.empty((cam_data.pixels_num[1],
456
+ cam_data.pixels_num[0],
457
+ frames_num,
458
+ field_num))
459
+ else:
460
+ images = None
461
+ if not save_path.is_dir():
462
+ save_path.mkdir()
463
+
464
+
465
+ if parallel is None:
466
+ for ff in range(0,frames.shape[0]):
467
+ image = RasterNP._deformed_mesh_frame_loop(
468
+ frames[ff],
469
+ fields[ff],
470
+ cam_data,
471
+ render_mesh,
472
+ save_path,
473
+ )
474
+
475
+ if images is not None:
476
+ images[:,:,frames[ff],fields[ff]] = image
477
+ else:
478
+ with Pool(parallel) as pool:
479
+ processes_with_id = []
480
+
481
+ for ff in range(0,frames.shape[0]):
482
+ args = (frames[ff],
483
+ fields[ff],
484
+ cam_data,
485
+ render_mesh,
486
+ save_path)
487
+
488
+ process = pool.apply_async(
489
+ RasterNP._deformed_mesh_frame_loop, args=args
490
+ )
491
+ processes_with_id.append({"process": process,
492
+ "frame": frames[ff],
493
+ "field": fields[ff]})
494
+
495
+ for pp in processes_with_id:
496
+ image = pp["process"].get()
497
+ images[:,:,pp["frame"],pp["field"]] = image
498
+
499
+ if images is not None:
500
+ return images
501
+
502
+ return None
503
+
504
+
505
+ @staticmethod
506
+ def _static_mesh_frame_loop(frame_ind: int,
507
+ field_ind: int,
508
+ cam_data: CameraData,
509
+ coords_raster: np.ndarray,
510
+ connect_in_frame: np.ndarray,
511
+ elem_bound_box_inds: np.ndarray,
512
+ elem_areas: np.ndarray,
513
+ field_to_render: np.ndarray,
514
+ save_path: Path | None,
515
+ ) -> np.ndarray | None:
516
+
517
+ # NOTE: the z coord has already been inverted in setup so we multiply here
518
+ render_field_div_z = field_to_render*coords_raster[:,2]
519
+
520
+ (image_buffer,
521
+ depth_buffer) = RasterNP.raster_frame(
522
+ cam_data,
523
+ connect_in_frame,
524
+ coords_raster,
525
+ elem_bound_box_inds,
526
+ elem_areas,
527
+ render_field_div_z)
528
+
529
+ # TODO: make this configurable
530
+ image_buffer[depth_buffer > 1000*cam_data.image_dist] = np.nan
531
+
532
+ if save_path is None:
533
+ return image_buffer
534
+
535
+ image_file = save_path/f"image_frame{frame_ind}_field{field_ind}"
536
+ np.save(image_file,image_buffer)
537
+ return None
538
+
539
+
540
+ @staticmethod
541
+ def _deformed_mesh_frame_loop(frame_ind: int,
542
+ field_ind: int,
543
+ cam_data: CameraData,
544
+ render_mesh: RenderMeshData,
545
+ save_path: Path | None
546
+ ) -> np.ndarray | None:
547
+ # coords_raster.shape=(num_coords,coord[x,y,z,w])
548
+ # connect_in_frame.shape=(num_elems_in_scene,nodes_per_elem)
549
+ # elem_bound_box_inds.shape=(num_elems_in_scene,4[x_min,x_max,y_min,y_max])
550
+ # elem_areas.shape=(num_elems,)
551
+ (coords_raster,
552
+ connect_in_frame,
553
+ elem_bound_box_inds,
554
+ elem_areas) = RasterNP.setup_frame(
555
+ cam_data,
556
+ render_mesh.coords,
557
+ render_mesh.connectivity,
558
+ render_mesh.fields_disp[:,frame_ind,:],
559
+ )
560
+
561
+ # NOTE: the z coord has already been inverted in setup so we multiply here
562
+ render_field_div_z = (render_mesh.fields_render[:,frame_ind,field_ind]
563
+ *coords_raster[:,2])
564
+
565
+ # image_buffer.shape=(num_px_y,num_px_x)
566
+ # depth_buffer.shape=(num_px_y,num_px_x)
567
+ (image_buffer,
568
+ depth_buffer) = RasterNP.raster_frame(
569
+ cam_data,
570
+ connect_in_frame,
571
+ coords_raster,
572
+ elem_bound_box_inds,
573
+ elem_areas,
574
+ render_field_div_z)
575
+
576
+ # TODO: make this configurable
577
+ image_buffer[depth_buffer > 1000*cam_data.image_dist] = np.nan
578
+
579
+ if save_path is None:
580
+ return image_buffer
581
+
582
+ image_file = save_path/f"image_frame{frame_ind}_field{field_ind}"
583
+ np.save(image_file.with_suffix(".npy"),image_buffer)
584
+ return None
585
+
586
+
587
+ #-------------------------------------------------------------------------------
588
+ @numba.jit(nopython=True)
589
+ def edge_function(vert_a: np.ndarray,
590
+ vert_b: np.ndarray,
591
+ vert_c: np.ndarray) -> np.ndarray:
592
+
593
+ return ((vert_c[0] - vert_a[0]) * (vert_b[1] - vert_a[1])
594
+ - (vert_c[1] - vert_a[1]) * (vert_b[0] - vert_a[0]))
595
+
596
+ @numba.jit(nopython=True)
597
+ def edge_function_slice(vert_a: np.ndarray,
598
+ vert_b: np.ndarray,
599
+ vert_c: np.ndarray) -> np.ndarray:
600
+
601
+ return ((vert_c[:,0] - vert_a[:,0]) * (vert_b[:,1] - vert_a[:,1])
602
+ - (vert_c[:,1] - vert_a[:,1]) * (vert_b[:,0] - vert_a[:,0]))
603
+
604
+