pyvale 2025.5.3__cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyvale might be problematic. Click here for more details.
- pyvale/__init__.py +89 -0
- pyvale/analyticmeshgen.py +102 -0
- pyvale/analyticsimdatafactory.py +91 -0
- pyvale/analyticsimdatagenerator.py +323 -0
- pyvale/blendercalibrationdata.py +15 -0
- pyvale/blenderlightdata.py +26 -0
- pyvale/blendermaterialdata.py +15 -0
- pyvale/blenderrenderdata.py +30 -0
- pyvale/blenderscene.py +488 -0
- pyvale/blendertools.py +420 -0
- pyvale/camera.py +146 -0
- pyvale/cameradata.py +69 -0
- pyvale/cameradata2d.py +84 -0
- pyvale/camerastereo.py +217 -0
- pyvale/cameratools.py +522 -0
- pyvale/cython/rastercyth.c +32211 -0
- pyvale/cython/rastercyth.cpython-311-x86_64-linux-gnu.so +0 -0
- pyvale/cython/rastercyth.py +640 -0
- pyvale/data/__init__.py +5 -0
- pyvale/data/cal_target.tiff +0 -0
- pyvale/data/case00_HEX20_out.e +0 -0
- pyvale/data/case00_HEX27_out.e +0 -0
- pyvale/data/case00_HEX8_out.e +0 -0
- pyvale/data/case00_TET10_out.e +0 -0
- pyvale/data/case00_TET14_out.e +0 -0
- pyvale/data/case00_TET4_out.e +0 -0
- pyvale/data/case13_out.e +0 -0
- pyvale/data/case16_out.e +0 -0
- pyvale/data/case17_out.e +0 -0
- pyvale/data/case18_1_out.e +0 -0
- pyvale/data/case18_2_out.e +0 -0
- pyvale/data/case18_3_out.e +0 -0
- pyvale/data/case25_out.e +0 -0
- pyvale/data/case26_out.e +0 -0
- pyvale/data/optspeckle_2464x2056px_spec5px_8bit_gblur1px.tiff +0 -0
- pyvale/dataset.py +325 -0
- pyvale/errorcalculator.py +109 -0
- pyvale/errordriftcalc.py +146 -0
- pyvale/errorintegrator.py +336 -0
- pyvale/errorrand.py +607 -0
- pyvale/errorsyscalib.py +134 -0
- pyvale/errorsysdep.py +327 -0
- pyvale/errorsysfield.py +414 -0
- pyvale/errorsysindep.py +808 -0
- pyvale/examples/__init__.py +5 -0
- pyvale/examples/basics/ex1_1_basicscalars_therm2d.py +131 -0
- pyvale/examples/basics/ex1_2_sensormodel_therm2d.py +158 -0
- pyvale/examples/basics/ex1_3_customsens_therm3d.py +216 -0
- pyvale/examples/basics/ex1_4_basicerrors_therm3d.py +153 -0
- pyvale/examples/basics/ex1_5_fielderrs_therm3d.py +168 -0
- pyvale/examples/basics/ex1_6_caliberrs_therm2d.py +133 -0
- pyvale/examples/basics/ex1_7_spatavg_therm2d.py +123 -0
- pyvale/examples/basics/ex2_1_basicvectors_disp2d.py +112 -0
- pyvale/examples/basics/ex2_2_vectorsens_disp2d.py +111 -0
- pyvale/examples/basics/ex2_3_sensangle_disp2d.py +139 -0
- pyvale/examples/basics/ex2_4_chainfielderrs_disp2d.py +196 -0
- pyvale/examples/basics/ex2_5_vectorfields3d_disp3d.py +109 -0
- pyvale/examples/basics/ex3_1_basictensors_strain2d.py +114 -0
- pyvale/examples/basics/ex3_2_tensorsens2d_strain2d.py +111 -0
- pyvale/examples/basics/ex3_3_tensorsens3d_strain3d.py +182 -0
- pyvale/examples/basics/ex4_1_expsim2d_thermmech2d.py +171 -0
- pyvale/examples/basics/ex4_2_expsim3d_thermmech3d.py +252 -0
- pyvale/examples/genanalyticdata/ex1_1_scalarvisualisation.py +35 -0
- pyvale/examples/genanalyticdata/ex1_2_scalarcasebuild.py +43 -0
- pyvale/examples/genanalyticdata/ex2_1_analyticsensors.py +80 -0
- pyvale/examples/imagedef2d/ex_imagedef2d_todisk.py +79 -0
- pyvale/examples/renderblender/ex1_1_blenderscene.py +121 -0
- pyvale/examples/renderblender/ex1_2_blenderdeformed.py +119 -0
- pyvale/examples/renderblender/ex2_1_stereoscene.py +128 -0
- pyvale/examples/renderblender/ex2_2_stereodeformed.py +131 -0
- pyvale/examples/renderblender/ex3_1_blendercalibration.py +120 -0
- pyvale/examples/renderrasterisation/ex_rastenp.py +153 -0
- pyvale/examples/renderrasterisation/ex_rastercyth_oneframe.py +218 -0
- pyvale/examples/renderrasterisation/ex_rastercyth_static_cypara.py +187 -0
- pyvale/examples/renderrasterisation/ex_rastercyth_static_pypara.py +190 -0
- pyvale/examples/visualisation/ex1_1_plot_traces.py +102 -0
- pyvale/examples/visualisation/ex2_1_animate_sim.py +89 -0
- pyvale/experimentsimulator.py +175 -0
- pyvale/field.py +128 -0
- pyvale/fieldconverter.py +351 -0
- pyvale/fieldsampler.py +111 -0
- pyvale/fieldscalar.py +166 -0
- pyvale/fieldtensor.py +218 -0
- pyvale/fieldtransform.py +388 -0
- pyvale/fieldvector.py +213 -0
- pyvale/generatorsrandom.py +505 -0
- pyvale/imagedef2d.py +569 -0
- pyvale/integratorfactory.py +240 -0
- pyvale/integratorquadrature.py +217 -0
- pyvale/integratorrectangle.py +165 -0
- pyvale/integratorspatial.py +89 -0
- pyvale/integratortype.py +43 -0
- pyvale/output.py +17 -0
- pyvale/pyvaleexceptions.py +11 -0
- pyvale/raster.py +31 -0
- pyvale/rastercy.py +77 -0
- pyvale/rasternp.py +603 -0
- pyvale/rendermesh.py +147 -0
- pyvale/sensorarray.py +178 -0
- pyvale/sensorarrayfactory.py +196 -0
- pyvale/sensorarraypoint.py +278 -0
- pyvale/sensordata.py +71 -0
- pyvale/sensordescriptor.py +213 -0
- pyvale/sensortools.py +142 -0
- pyvale/simcases/case00_HEX20.i +242 -0
- pyvale/simcases/case00_HEX27.i +242 -0
- pyvale/simcases/case00_HEX8.i +242 -0
- pyvale/simcases/case00_TET10.i +242 -0
- pyvale/simcases/case00_TET14.i +242 -0
- pyvale/simcases/case00_TET4.i +242 -0
- pyvale/simcases/case01.i +101 -0
- pyvale/simcases/case02.i +156 -0
- pyvale/simcases/case03.i +136 -0
- pyvale/simcases/case04.i +181 -0
- pyvale/simcases/case05.i +234 -0
- pyvale/simcases/case06.i +305 -0
- pyvale/simcases/case07.geo +135 -0
- pyvale/simcases/case07.i +87 -0
- pyvale/simcases/case08.geo +144 -0
- pyvale/simcases/case08.i +153 -0
- pyvale/simcases/case09.geo +204 -0
- pyvale/simcases/case09.i +87 -0
- pyvale/simcases/case10.geo +204 -0
- pyvale/simcases/case10.i +257 -0
- pyvale/simcases/case11.geo +337 -0
- pyvale/simcases/case11.i +147 -0
- pyvale/simcases/case12.geo +388 -0
- pyvale/simcases/case12.i +329 -0
- pyvale/simcases/case13.i +140 -0
- pyvale/simcases/case14.i +159 -0
- pyvale/simcases/case15.geo +337 -0
- pyvale/simcases/case15.i +150 -0
- pyvale/simcases/case16.geo +391 -0
- pyvale/simcases/case16.i +357 -0
- pyvale/simcases/case17.geo +135 -0
- pyvale/simcases/case17.i +144 -0
- pyvale/simcases/case18.i +254 -0
- pyvale/simcases/case18_1.i +254 -0
- pyvale/simcases/case18_2.i +254 -0
- pyvale/simcases/case18_3.i +254 -0
- pyvale/simcases/case19.geo +252 -0
- pyvale/simcases/case19.i +99 -0
- pyvale/simcases/case20.geo +252 -0
- pyvale/simcases/case20.i +250 -0
- pyvale/simcases/case21.geo +74 -0
- pyvale/simcases/case21.i +155 -0
- pyvale/simcases/case22.geo +82 -0
- pyvale/simcases/case22.i +140 -0
- pyvale/simcases/case23.geo +164 -0
- pyvale/simcases/case23.i +140 -0
- pyvale/simcases/case24.geo +79 -0
- pyvale/simcases/case24.i +123 -0
- pyvale/simcases/case25.geo +82 -0
- pyvale/simcases/case25.i +140 -0
- pyvale/simcases/case26.geo +166 -0
- pyvale/simcases/case26.i +140 -0
- pyvale/simcases/run_1case.py +61 -0
- pyvale/simcases/run_all_cases.py +69 -0
- pyvale/simcases/run_build_case.py +64 -0
- pyvale/simcases/run_example_cases.py +69 -0
- pyvale/simtools.py +67 -0
- pyvale/visualexpplotter.py +191 -0
- pyvale/visualimagedef.py +74 -0
- pyvale/visualimages.py +76 -0
- pyvale/visualopts.py +493 -0
- pyvale/visualsimanimator.py +111 -0
- pyvale/visualsimsensors.py +318 -0
- pyvale/visualtools.py +136 -0
- pyvale/visualtraceplotter.py +142 -0
- pyvale-2025.5.3.dist-info/METADATA +144 -0
- pyvale-2025.5.3.dist-info/RECORD +175 -0
- pyvale-2025.5.3.dist-info/WHEEL +6 -0
- pyvale-2025.5.3.dist-info/licenses/LICENSE +21 -0
- pyvale-2025.5.3.dist-info/top_level.txt +1 -0
- pyvale.libs/libgomp-a34b3233.so.1.0.0 +0 -0
pyvale/rasternp.py
ADDED
|
@@ -0,0 +1,603 @@
|
|
|
1
|
+
# ==============================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ==============================================================================
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
NOTE: this module is a feature under developement.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from multiprocessing.pool import Pool
|
|
13
|
+
import numpy as np
|
|
14
|
+
import numba
|
|
15
|
+
from pyvale.cameradata import CameraData
|
|
16
|
+
from pyvale.cameratools import CameraTools
|
|
17
|
+
from pyvale.rendermesh import RenderMeshData
|
|
18
|
+
import pyvale.cython.rastercyth as rastercyth
|
|
19
|
+
|
|
20
|
+
class RasterNP:
|
|
21
|
+
@staticmethod
|
|
22
|
+
def world_to_raster_coords(cam_data: CameraData,
|
|
23
|
+
coords_world: np.ndarray) -> np.ndarray:
|
|
24
|
+
# coords_world.shape=(num_nodes,coord[X,Y,Z,W])
|
|
25
|
+
|
|
26
|
+
# Project onto camera coords
|
|
27
|
+
# coords_raster.shape=(num_nodes,coord[X,Y,Z,W])
|
|
28
|
+
coords_raster = np.matmul(coords_world,cam_data.world_to_cam_mat.T)
|
|
29
|
+
|
|
30
|
+
# NOTE: w is not 1 when the matrix is a perspective projection! It is only 1
|
|
31
|
+
# here when we have an affine transformation
|
|
32
|
+
coords_raster[:,0] = coords_raster[:,0] / coords_raster[:,3]
|
|
33
|
+
coords_raster[:,1] = coords_raster[:,1] / coords_raster[:,3]
|
|
34
|
+
coords_raster[:,2] = coords_raster[:,2] / coords_raster[:,3]
|
|
35
|
+
|
|
36
|
+
# Coords Image: Perspective divide
|
|
37
|
+
coords_raster[:,0] = (cam_data.image_dist * coords_raster[:,0]
|
|
38
|
+
/ -coords_raster[:,2])
|
|
39
|
+
coords_raster[:,1] = (cam_data.image_dist * coords_raster[:,1]
|
|
40
|
+
/ -coords_raster[:,2])
|
|
41
|
+
|
|
42
|
+
# Coords NDC: Convert to normalised device coords in the range [-1,1]
|
|
43
|
+
coords_raster[:,0] = 2*coords_raster[:,0] / cam_data.image_dims[0]
|
|
44
|
+
coords_raster[:,1] = 2*coords_raster[:,1] / cam_data.image_dims[1]
|
|
45
|
+
|
|
46
|
+
# Coords Raster: Covert to pixel (raster) coords
|
|
47
|
+
# Shape = ([X,Y,Z],num_nodes)
|
|
48
|
+
coords_raster[:,0] = (coords_raster[:,0] + 1)/2 * cam_data.pixels_num[0]
|
|
49
|
+
coords_raster[:,1] = (1-coords_raster[:,1])/2 * cam_data.pixels_num[1]
|
|
50
|
+
coords_raster[:,2] = -coords_raster[:,2]
|
|
51
|
+
|
|
52
|
+
return coords_raster
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
@staticmethod
|
|
56
|
+
def back_face_removal_mask(cam_data: CameraData,
|
|
57
|
+
coords_world: np.ndarray,
|
|
58
|
+
connect: np.ndarray
|
|
59
|
+
) -> np.ndarray:
|
|
60
|
+
coords_cam = np.matmul(coords_world,cam_data.world_to_cam_mat.T)
|
|
61
|
+
|
|
62
|
+
# shape=(num_elems,nodes_per_elem,coord[x,y,z,w])
|
|
63
|
+
elem_cam_coords = coords_cam[connect,:]
|
|
64
|
+
|
|
65
|
+
# Calculate the normal vectors for all of the elements, remove the w coord
|
|
66
|
+
# shape=(num_elems,coord[x,y,z])
|
|
67
|
+
elem_cam_edge0 = elem_cam_coords[:,1,:-1] - elem_cam_coords[:,0,:-1]
|
|
68
|
+
elem_cam_edge1 = elem_cam_coords[:,2,:-1] - elem_cam_coords[:,0,:-1]
|
|
69
|
+
elem_cam_normals = np.cross(elem_cam_edge0,elem_cam_edge1,
|
|
70
|
+
axisa=1,axisb=1).T
|
|
71
|
+
elem_cam_normals = elem_cam_normals / np.linalg.norm(elem_cam_normals,axis=0)
|
|
72
|
+
|
|
73
|
+
cam_normal = np.array([0,0,1])
|
|
74
|
+
# shape=(num_elems,)
|
|
75
|
+
proj_elem_to_cam = np.dot(cam_normal,elem_cam_normals)
|
|
76
|
+
|
|
77
|
+
# NOTE this should be a numerical precision tolerance (epsilon)
|
|
78
|
+
back_face_mask = proj_elem_to_cam > 1e-6
|
|
79
|
+
|
|
80
|
+
return back_face_mask
|
|
81
|
+
|
|
82
|
+
@staticmethod
|
|
83
|
+
def crop_and_bound_by_connect(cam_data: CameraData,
|
|
84
|
+
coords_raster: np.ndarray,
|
|
85
|
+
connectivity: np.ndarray,
|
|
86
|
+
) -> tuple[np.ndarray,np.ndarray]:
|
|
87
|
+
|
|
88
|
+
#shape=(num_elems,coord[x,y,z,w])
|
|
89
|
+
if coords_raster.ndim == 2:
|
|
90
|
+
coords_by_elem = coords_raster[connectivity,:]
|
|
91
|
+
else:
|
|
92
|
+
coords_by_elem = coords_raster[connectivity,:,:]
|
|
93
|
+
|
|
94
|
+
elem_raster_coord_min = np.min(coords_by_elem,axis=1)
|
|
95
|
+
elem_raster_coord_max = np.max(coords_by_elem,axis=1)
|
|
96
|
+
|
|
97
|
+
# Check that min/max nodes are within the 4 edges of the camera image
|
|
98
|
+
#shape=(4_edges_to_check,num_elems)
|
|
99
|
+
crop_mask = np.zeros([connectivity.shape[0],4],dtype=np.int8)
|
|
100
|
+
crop_mask[elem_raster_coord_min[:,0] <= (cam_data.pixels_num[0]-1), 0] = 1
|
|
101
|
+
crop_mask[elem_raster_coord_min[:,1] <= (cam_data.pixels_num[1]-1), 1] = 1
|
|
102
|
+
crop_mask[elem_raster_coord_max[:,0] >= 0, 2] = 1
|
|
103
|
+
crop_mask[elem_raster_coord_max[:,1] >= 0, 3] = 1
|
|
104
|
+
crop_mask = np.sum(crop_mask,axis=1) == 4
|
|
105
|
+
|
|
106
|
+
# Get only the elements that are within the FOV
|
|
107
|
+
# Mask the elem coords and the max and min elem coords for processing
|
|
108
|
+
elem_raster_coord_min = elem_raster_coord_min[crop_mask,:]
|
|
109
|
+
elem_raster_coord_max = elem_raster_coord_max[crop_mask,:]
|
|
110
|
+
num_elems_in_image = elem_raster_coord_min.shape[0]
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
# Find the indices of the bounding box that each element lies within on
|
|
114
|
+
# the image, bounded by the upper and lower edges of the image
|
|
115
|
+
elem_bound_boxes_inds = np.zeros([num_elems_in_image,4],dtype=np.int32)
|
|
116
|
+
elem_bound_boxes_inds[:,0] = RasterNP.elem_bound_box_low(
|
|
117
|
+
elem_raster_coord_min[:,0])
|
|
118
|
+
elem_bound_boxes_inds[:,1] = RasterNP.elem_bound_box_high(
|
|
119
|
+
elem_raster_coord_max[:,0],
|
|
120
|
+
cam_data.pixels_num[0]-1)
|
|
121
|
+
elem_bound_boxes_inds[:,2] = RasterNP.elem_bound_box_low(
|
|
122
|
+
elem_raster_coord_min[:,1])
|
|
123
|
+
elem_bound_boxes_inds[:,3] = RasterNP.elem_bound_box_high(
|
|
124
|
+
elem_raster_coord_max[:,1],
|
|
125
|
+
cam_data.pixels_num[1]-1)
|
|
126
|
+
|
|
127
|
+
return (crop_mask,elem_bound_boxes_inds)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
@staticmethod
|
|
131
|
+
def elem_bound_box_low(coord_min: np.ndarray) -> np.ndarray:
|
|
132
|
+
bound_elem = np.floor(coord_min).astype(np.int32)
|
|
133
|
+
bound_low = np.zeros_like(coord_min,dtype=np.int32)
|
|
134
|
+
bound_mat = np.vstack((bound_elem,bound_low))
|
|
135
|
+
return np.max(bound_mat,axis=0)
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
@staticmethod
|
|
139
|
+
def elem_bound_box_high(coord_max: np.ndarray,image_px: int) -> np.ndarray:
|
|
140
|
+
bound_elem = np.ceil(coord_max).astype(np.int32)
|
|
141
|
+
bound_high = image_px*np.ones_like(coord_max,dtype=np.int32)
|
|
142
|
+
bound_mat = np.vstack((bound_elem,bound_high))
|
|
143
|
+
bound = np.min(bound_mat,axis=0)
|
|
144
|
+
return bound
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
@staticmethod
|
|
148
|
+
def setup_frame(cam_data: CameraData,
|
|
149
|
+
coords_world: np.ndarray,
|
|
150
|
+
connectivity: np.ndarray,
|
|
151
|
+
disp_field_frame: np.ndarray | None = None,
|
|
152
|
+
) -> tuple[np.ndarray,np.ndarray,np.ndarray]:
|
|
153
|
+
|
|
154
|
+
connect_in_frame = np.copy(connectivity)
|
|
155
|
+
coords_deform = np.copy(coords_world)
|
|
156
|
+
|
|
157
|
+
#-----------------------------------------------------------------------
|
|
158
|
+
# DEFORM MESH WITH DISPLACEMENT
|
|
159
|
+
if disp_field_frame is not None:
|
|
160
|
+
# Exclude w coord from mesh deformation
|
|
161
|
+
coords_deform[:,:-1] = coords_deform[:,:-1] + disp_field_frame
|
|
162
|
+
|
|
163
|
+
#-----------------------------------------------------------------------
|
|
164
|
+
# Convert world coords of all elements in the scene
|
|
165
|
+
# shape=(num_nodes,coord[x,y,z,w])
|
|
166
|
+
coords_raster = RasterNP.world_to_raster_coords(cam_data,
|
|
167
|
+
coords_deform)
|
|
168
|
+
|
|
169
|
+
# Convert to perspective correct hyperbolic interpolation for z interp
|
|
170
|
+
# shape=(num_nodes,coord[x,y,z,w])
|
|
171
|
+
coords_raster[:,2] = 1/coords_raster[:,2]
|
|
172
|
+
# Remove w coord
|
|
173
|
+
coords_raster = coords_raster[:,:-1]
|
|
174
|
+
|
|
175
|
+
#-----------------------------------------------------------------------
|
|
176
|
+
# BACKFACE REMOVAL
|
|
177
|
+
# shape=(num_elems,)
|
|
178
|
+
back_face_mask = RasterNP.back_face_removal_mask(cam_data,
|
|
179
|
+
coords_deform,
|
|
180
|
+
connect_in_frame)
|
|
181
|
+
connect_in_frame = connect_in_frame[back_face_mask,:]
|
|
182
|
+
|
|
183
|
+
#-----------------------------------------------------------------------
|
|
184
|
+
# CROPPING & BOUNDING BOX OPERATIONS
|
|
185
|
+
(crop_mask,
|
|
186
|
+
elem_bound_box_inds) = RasterNP.crop_and_bound_by_connect(
|
|
187
|
+
cam_data,
|
|
188
|
+
coords_raster,
|
|
189
|
+
connect_in_frame,
|
|
190
|
+
)
|
|
191
|
+
connect_in_frame = connect_in_frame[crop_mask,:]
|
|
192
|
+
|
|
193
|
+
#-----------------------------------------------------------------------
|
|
194
|
+
# ELEMENT AREAS FOR INTERPOLATION
|
|
195
|
+
elem_raster_coords = coords_raster[connect_in_frame,:]
|
|
196
|
+
elem_areas = edge_function_slice(elem_raster_coords[:,0,:],
|
|
197
|
+
elem_raster_coords[:,1,:],
|
|
198
|
+
elem_raster_coords[:,2,:])
|
|
199
|
+
|
|
200
|
+
return (coords_raster,connect_in_frame,elem_bound_box_inds,elem_areas)
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
@staticmethod
|
|
204
|
+
def raster_elem(cam_data: CameraData,
|
|
205
|
+
elem_raster_coords: np.ndarray,
|
|
206
|
+
elem_bound_box_inds: np.ndarray,
|
|
207
|
+
elem_area: float,
|
|
208
|
+
field_divide_z: np.ndarray
|
|
209
|
+
) -> tuple[np.ndarray,np.ndarray,np.ndarray,np.ndarray]:
|
|
210
|
+
|
|
211
|
+
# Create the subpixel coords inside the bounding box to test with the
|
|
212
|
+
# edge function. Use the pixel indices of the bounding box.
|
|
213
|
+
bound_subpx_x = np.arange(elem_bound_box_inds[0],
|
|
214
|
+
elem_bound_box_inds[1],
|
|
215
|
+
1/cam_data.sub_samp) + 1/(2*cam_data.sub_samp)
|
|
216
|
+
bound_subpx_y = np.arange(elem_bound_box_inds[2],
|
|
217
|
+
elem_bound_box_inds[3],
|
|
218
|
+
1/cam_data.sub_samp) + 1/(2*cam_data.sub_samp)
|
|
219
|
+
(bound_subpx_grid_x,bound_subpx_grid_y) = np.meshgrid(bound_subpx_x,
|
|
220
|
+
bound_subpx_y)
|
|
221
|
+
bound_coords_grid_shape = bound_subpx_grid_x.shape
|
|
222
|
+
# shape=(coord[x,y],num_subpx_in_box)
|
|
223
|
+
bound_subpx_coords_flat = np.vstack((bound_subpx_grid_x.flatten(),
|
|
224
|
+
bound_subpx_grid_y.flatten()))
|
|
225
|
+
|
|
226
|
+
# Create the subpixel indices for buffer slicing later
|
|
227
|
+
subpx_inds_x = np.arange(cam_data.sub_samp*elem_bound_box_inds[0],
|
|
228
|
+
cam_data.sub_samp*elem_bound_box_inds[1])
|
|
229
|
+
subpx_inds_y = np.arange(cam_data.sub_samp*elem_bound_box_inds[2],
|
|
230
|
+
cam_data.sub_samp*elem_bound_box_inds[3])
|
|
231
|
+
(subpx_inds_grid_x,subpx_inds_grid_y) = np.meshgrid(subpx_inds_x,
|
|
232
|
+
subpx_inds_y)
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
# We compute the edge function for all pixels in the box to determine if the
|
|
236
|
+
# pixel is inside the element or not
|
|
237
|
+
# NOTE: first axis of element_raster_coords is the node/vertex num.
|
|
238
|
+
# shape=(num_elems_in_bound,nodes_per_elem)
|
|
239
|
+
edge = np.zeros((3,bound_subpx_coords_flat.shape[1]),dtype=np.float64)
|
|
240
|
+
edge[0,:] = edge_function(elem_raster_coords[1,:],
|
|
241
|
+
elem_raster_coords[2,:],
|
|
242
|
+
bound_subpx_coords_flat)
|
|
243
|
+
edge[1,:] = edge_function(elem_raster_coords[2,:],
|
|
244
|
+
elem_raster_coords[0,:],
|
|
245
|
+
bound_subpx_coords_flat)
|
|
246
|
+
edge[2,:] = edge_function(elem_raster_coords[0,:],
|
|
247
|
+
elem_raster_coords[1,:],
|
|
248
|
+
bound_subpx_coords_flat)
|
|
249
|
+
|
|
250
|
+
# Now we check where the edge function is above zero for all edges
|
|
251
|
+
edge_check = np.zeros_like(edge,dtype=np.int8)
|
|
252
|
+
edge_check[edge >= 0.0] = 1
|
|
253
|
+
edge_check = np.sum(edge_check, axis=0)
|
|
254
|
+
# Create a mask with the check, TODO check the 3 here for non triangles
|
|
255
|
+
edge_mask_flat = edge_check == 3
|
|
256
|
+
edge_mask_grid = np.reshape(edge_mask_flat,bound_coords_grid_shape)
|
|
257
|
+
|
|
258
|
+
# Calculate the weights for the masked pixels
|
|
259
|
+
edge_masked = edge[:,edge_mask_flat]
|
|
260
|
+
interp_weights = edge_masked / elem_area
|
|
261
|
+
|
|
262
|
+
# Compute the depth of all pixels using hyperbolic interp
|
|
263
|
+
# NOTE: second index on raster coords is Z
|
|
264
|
+
px_coord_z = 1/(elem_raster_coords[0,2] * interp_weights[0,:]
|
|
265
|
+
+ elem_raster_coords[1,2] * interp_weights[1,:]
|
|
266
|
+
+ elem_raster_coords[2,2] * interp_weights[2,:])
|
|
267
|
+
|
|
268
|
+
field_interp = ((field_divide_z[0] * interp_weights[0,:]
|
|
269
|
+
+ field_divide_z[1] * interp_weights[1,:]
|
|
270
|
+
+ field_divide_z[2] * interp_weights[2,:])
|
|
271
|
+
* px_coord_z)
|
|
272
|
+
|
|
273
|
+
return (px_coord_z,
|
|
274
|
+
field_interp,
|
|
275
|
+
subpx_inds_grid_x[edge_mask_grid],
|
|
276
|
+
subpx_inds_grid_y[edge_mask_grid])
|
|
277
|
+
|
|
278
|
+
@staticmethod
|
|
279
|
+
def raster_frame(cam_data: CameraData,
|
|
280
|
+
connect_in_frame: np.ndarray,
|
|
281
|
+
coords_raster: np.ndarray,
|
|
282
|
+
elem_bound_box_inds: np.ndarray,
|
|
283
|
+
elem_areas: np.ndarray,
|
|
284
|
+
field_frame_div_z: np.ndarray
|
|
285
|
+
) -> tuple[np.ndarray,np.ndarray,int]:
|
|
286
|
+
#connect_in_frame.shape=(num_elems,nodes_per_elem)
|
|
287
|
+
#coords_raster.shape=(num_coords,coord[x,y,z,w])
|
|
288
|
+
#elem_bound_box_inds.shape=(num_elems,[min_x,max_x,min_y,max_y])
|
|
289
|
+
#elem_areas.shape=(num_elems,)
|
|
290
|
+
#field_frame_divide_z=(num_coords,)
|
|
291
|
+
|
|
292
|
+
depth_buffer = 1e5*cam_data.image_dist*np.ones(
|
|
293
|
+
cam_data.sub_samp*cam_data.pixels_num).T
|
|
294
|
+
image_buffer = np.full(cam_data.sub_samp*cam_data.pixels_num,0.0).T
|
|
295
|
+
|
|
296
|
+
# elem_raster_coords.shape=(num_elems,nodes_per_elem,coord[x,y,z,w])
|
|
297
|
+
# field_divide_z.shape=(num_elems,nodes_per_elem,num_time_steps)
|
|
298
|
+
# elem_raster_coords.shape=(nodes_per_elem,coord[x,y,z,w])
|
|
299
|
+
|
|
300
|
+
for ee in range(connect_in_frame.shape[0]):
|
|
301
|
+
cc = connect_in_frame[ee,:]
|
|
302
|
+
|
|
303
|
+
(px_coord_z,
|
|
304
|
+
field_interp,
|
|
305
|
+
subpx_inds_x_in,
|
|
306
|
+
subpx_inds_y_in) = RasterNP.raster_elem(
|
|
307
|
+
cam_data,
|
|
308
|
+
coords_raster[cc,:],
|
|
309
|
+
elem_bound_box_inds[ee,:],
|
|
310
|
+
elem_areas[ee],
|
|
311
|
+
field_frame_div_z[cc])
|
|
312
|
+
|
|
313
|
+
|
|
314
|
+
# Build a mask to replace the depth information if there is already an
|
|
315
|
+
# element in front of the one we are rendering
|
|
316
|
+
px_coord_z_depth_mask = (px_coord_z
|
|
317
|
+
< depth_buffer[subpx_inds_y_in,subpx_inds_x_in])
|
|
318
|
+
|
|
319
|
+
# Initialise the z coord to the value in the depth buffer
|
|
320
|
+
px_coord_z_masked = depth_buffer[subpx_inds_y_in,subpx_inds_x_in]
|
|
321
|
+
# Use the depth mask to overwrite the depth buffer values if points are in
|
|
322
|
+
# front of the values in the depth buffer
|
|
323
|
+
px_coord_z_masked[px_coord_z_depth_mask] = px_coord_z[px_coord_z_depth_mask]
|
|
324
|
+
|
|
325
|
+
# Push the masked values into the depth buffer
|
|
326
|
+
depth_buffer[subpx_inds_y_in,subpx_inds_x_in] = px_coord_z_masked
|
|
327
|
+
|
|
328
|
+
# Mask the image buffer using the depth mask
|
|
329
|
+
image_buffer_depth_masked = image_buffer[subpx_inds_y_in,subpx_inds_x_in]
|
|
330
|
+
image_buffer_depth_masked[px_coord_z_depth_mask] = field_interp[px_coord_z_depth_mask]
|
|
331
|
+
|
|
332
|
+
# Push the masked values into the image buffer
|
|
333
|
+
image_buffer[subpx_inds_y_in,subpx_inds_x_in] = image_buffer_depth_masked
|
|
334
|
+
|
|
335
|
+
#---------------------------------------------------------------------------
|
|
336
|
+
# END RASTER LOOP
|
|
337
|
+
# TODO: fix this for windows
|
|
338
|
+
if Path(rastercyth.__file__).suffix == ".so":
|
|
339
|
+
depth_buff = np.empty((cam_data.pixels_num[1],cam_data.pixels_num[0]),dtype=np.float64)
|
|
340
|
+
depth_buff = np.array(rastercyth.average_image(depth_buffer,cam_data.sub_samp))
|
|
341
|
+
image_buff = np.empty((cam_data.pixels_num[1],cam_data.pixels_num[0]),dtype=np.float64)
|
|
342
|
+
image_buff = np.array(rastercyth.average_image(image_buffer,cam_data.sub_samp))
|
|
343
|
+
else:
|
|
344
|
+
depth_buff = CameraTools.average_subpixel_image(depth_buffer,cam_data.sub_samp)
|
|
345
|
+
image_buff = CameraTools.average_subpixel_image(image_buffer,cam_data.sub_samp)
|
|
346
|
+
|
|
347
|
+
return (image_buff,depth_buff)
|
|
348
|
+
|
|
349
|
+
|
|
350
|
+
@staticmethod
|
|
351
|
+
def raster_static_mesh(cam_data: CameraData,
|
|
352
|
+
render_mesh: RenderMeshData,
|
|
353
|
+
save_path: Path | None = None,
|
|
354
|
+
threads_num: int | None = None,
|
|
355
|
+
) -> np.ndarray | None:
|
|
356
|
+
|
|
357
|
+
frames_num = render_mesh.fields_render.shape[1]
|
|
358
|
+
field_num = render_mesh.fields_render.shape[2]
|
|
359
|
+
(frames,fields) = np.meshgrid(np.arange(0,frames_num),
|
|
360
|
+
np.arange(0,field_num))
|
|
361
|
+
frames = frames.flatten()
|
|
362
|
+
fields = fields.flatten()
|
|
363
|
+
|
|
364
|
+
if save_path is None:
|
|
365
|
+
images = np.empty((cam_data.pixels_num[1],
|
|
366
|
+
cam_data.pixels_num[0],
|
|
367
|
+
frames_num,
|
|
368
|
+
field_num))
|
|
369
|
+
else:
|
|
370
|
+
images = None
|
|
371
|
+
if not save_path.is_dir():
|
|
372
|
+
save_path.mkdir()
|
|
373
|
+
|
|
374
|
+
# DO THIS ONCE: for non deforming meshes
|
|
375
|
+
# coords_raster.shape=(num_coords,coord[x,y,z,w])
|
|
376
|
+
# connect_in_frame.shape=(num_elems_in_scene,nodes_per_elem)
|
|
377
|
+
# elem_bound_box_inds.shape=(num_elems_in_scene,4[x_min,x_max,y_min,y_max])
|
|
378
|
+
# elem_areas.shape=(num_elems,)
|
|
379
|
+
(coords_raster,
|
|
380
|
+
connect_in_frame,
|
|
381
|
+
elem_bound_box_inds,
|
|
382
|
+
elem_areas) = RasterNP.setup_frame(
|
|
383
|
+
cam_data,
|
|
384
|
+
render_mesh.coords,
|
|
385
|
+
render_mesh.connectivity,
|
|
386
|
+
)
|
|
387
|
+
|
|
388
|
+
if threads_num is None:
|
|
389
|
+
for ff in range(0,frames.shape[0]):
|
|
390
|
+
image = RasterNP._static_mesh_frame_loop(
|
|
391
|
+
frames[ff],
|
|
392
|
+
fields[ff],
|
|
393
|
+
cam_data,
|
|
394
|
+
coords_raster,
|
|
395
|
+
connect_in_frame,
|
|
396
|
+
elem_bound_box_inds,
|
|
397
|
+
elem_areas,
|
|
398
|
+
render_mesh.fields_render[:,frames[ff],fields[ff]],
|
|
399
|
+
save_path
|
|
400
|
+
)
|
|
401
|
+
|
|
402
|
+
if images is not None:
|
|
403
|
+
images[:,:,frames[ff],fields[ff]] = image
|
|
404
|
+
else:
|
|
405
|
+
with Pool(threads_num) as pool:
|
|
406
|
+
processes_with_id = []
|
|
407
|
+
|
|
408
|
+
for ff in range(0,frames.shape[0]):
|
|
409
|
+
args = (frames[ff],
|
|
410
|
+
fields[ff],
|
|
411
|
+
cam_data,
|
|
412
|
+
coords_raster,
|
|
413
|
+
connect_in_frame,
|
|
414
|
+
elem_bound_box_inds,
|
|
415
|
+
elem_areas,
|
|
416
|
+
render_mesh.fields_render[:,frames[ff],fields[ff]],
|
|
417
|
+
save_path)
|
|
418
|
+
|
|
419
|
+
process = pool.apply_async(
|
|
420
|
+
RasterNP._static_mesh_frame_loop, args=args
|
|
421
|
+
)
|
|
422
|
+
processes_with_id.append({"process": process,
|
|
423
|
+
"frame": frames[ff],
|
|
424
|
+
"field": fields[ff]})
|
|
425
|
+
|
|
426
|
+
for pp in processes_with_id:
|
|
427
|
+
image = pp["process"].get()
|
|
428
|
+
images[:,:,pp["frame"],pp["field"]] = image
|
|
429
|
+
|
|
430
|
+
if images is not None:
|
|
431
|
+
return images
|
|
432
|
+
|
|
433
|
+
return None
|
|
434
|
+
|
|
435
|
+
|
|
436
|
+
|
|
437
|
+
|
|
438
|
+
@staticmethod
|
|
439
|
+
def raster_deformed_mesh(cam_data: CameraData,
|
|
440
|
+
render_mesh: RenderMeshData,
|
|
441
|
+
save_path: Path | None = None,
|
|
442
|
+
parallel: int | None = None
|
|
443
|
+
) -> np.ndarray | None:
|
|
444
|
+
|
|
445
|
+
frames_num = render_mesh.fields_render.shape[1]
|
|
446
|
+
field_num = render_mesh.fields_render.shape[2]
|
|
447
|
+
(frames,fields) = np.meshgrid(np.arange(0,frames_num),
|
|
448
|
+
np.arange(0,field_num))
|
|
449
|
+
frames = frames.flatten()
|
|
450
|
+
fields = fields.flatten()
|
|
451
|
+
|
|
452
|
+
|
|
453
|
+
if save_path is None:
|
|
454
|
+
images = np.empty((cam_data.pixels_num[1],
|
|
455
|
+
cam_data.pixels_num[0],
|
|
456
|
+
frames_num,
|
|
457
|
+
field_num))
|
|
458
|
+
else:
|
|
459
|
+
images = None
|
|
460
|
+
if not save_path.is_dir():
|
|
461
|
+
save_path.mkdir()
|
|
462
|
+
|
|
463
|
+
|
|
464
|
+
if parallel is None:
|
|
465
|
+
for ff in range(0,frames.shape[0]):
|
|
466
|
+
image = RasterNP._deformed_mesh_frame_loop(
|
|
467
|
+
frames[ff],
|
|
468
|
+
fields[ff],
|
|
469
|
+
cam_data,
|
|
470
|
+
render_mesh,
|
|
471
|
+
save_path,
|
|
472
|
+
)
|
|
473
|
+
|
|
474
|
+
if images is not None:
|
|
475
|
+
images[:,:,frames[ff],fields[ff]] = image
|
|
476
|
+
else:
|
|
477
|
+
with Pool(parallel) as pool:
|
|
478
|
+
processes_with_id = []
|
|
479
|
+
|
|
480
|
+
for ff in range(0,frames.shape[0]):
|
|
481
|
+
args = (frames[ff],
|
|
482
|
+
fields[ff],
|
|
483
|
+
cam_data,
|
|
484
|
+
render_mesh,
|
|
485
|
+
save_path)
|
|
486
|
+
|
|
487
|
+
process = pool.apply_async(
|
|
488
|
+
RasterNP._deformed_mesh_frame_loop, args=args
|
|
489
|
+
)
|
|
490
|
+
processes_with_id.append({"process": process,
|
|
491
|
+
"frame": frames[ff],
|
|
492
|
+
"field": fields[ff]})
|
|
493
|
+
|
|
494
|
+
for pp in processes_with_id:
|
|
495
|
+
image = pp["process"].get()
|
|
496
|
+
images[:,:,pp["frame"],pp["field"]] = image
|
|
497
|
+
|
|
498
|
+
if images is not None:
|
|
499
|
+
return images
|
|
500
|
+
|
|
501
|
+
return None
|
|
502
|
+
|
|
503
|
+
|
|
504
|
+
@staticmethod
|
|
505
|
+
def _static_mesh_frame_loop(frame_ind: int,
|
|
506
|
+
field_ind: int,
|
|
507
|
+
cam_data: CameraData,
|
|
508
|
+
coords_raster: np.ndarray,
|
|
509
|
+
connect_in_frame: np.ndarray,
|
|
510
|
+
elem_bound_box_inds: np.ndarray,
|
|
511
|
+
elem_areas: np.ndarray,
|
|
512
|
+
field_to_render: np.ndarray,
|
|
513
|
+
save_path: Path | None,
|
|
514
|
+
) -> np.ndarray | None:
|
|
515
|
+
|
|
516
|
+
# NOTE: the z coord has already been inverted in setup so we multiply here
|
|
517
|
+
render_field_div_z = field_to_render*coords_raster[:,2]
|
|
518
|
+
|
|
519
|
+
(image_buffer,
|
|
520
|
+
depth_buffer) = RasterNP.raster_frame(
|
|
521
|
+
cam_data,
|
|
522
|
+
connect_in_frame,
|
|
523
|
+
coords_raster,
|
|
524
|
+
elem_bound_box_inds,
|
|
525
|
+
elem_areas,
|
|
526
|
+
render_field_div_z)
|
|
527
|
+
|
|
528
|
+
# TODO: make this configurable
|
|
529
|
+
image_buffer[depth_buffer > 1000*cam_data.image_dist] = np.nan
|
|
530
|
+
|
|
531
|
+
if save_path is None:
|
|
532
|
+
return image_buffer
|
|
533
|
+
|
|
534
|
+
image_file = save_path/f"image_frame{frame_ind}_field{field_ind}"
|
|
535
|
+
np.save(image_file,image_buffer)
|
|
536
|
+
return None
|
|
537
|
+
|
|
538
|
+
|
|
539
|
+
@staticmethod
|
|
540
|
+
def _deformed_mesh_frame_loop(frame_ind: int,
|
|
541
|
+
field_ind: int,
|
|
542
|
+
cam_data: CameraData,
|
|
543
|
+
render_mesh: RenderMeshData,
|
|
544
|
+
save_path: Path | None
|
|
545
|
+
) -> np.ndarray | None:
|
|
546
|
+
# coords_raster.shape=(num_coords,coord[x,y,z,w])
|
|
547
|
+
# connect_in_frame.shape=(num_elems_in_scene,nodes_per_elem)
|
|
548
|
+
# elem_bound_box_inds.shape=(num_elems_in_scene,4[x_min,x_max,y_min,y_max])
|
|
549
|
+
# elem_areas.shape=(num_elems,)
|
|
550
|
+
(coords_raster,
|
|
551
|
+
connect_in_frame,
|
|
552
|
+
elem_bound_box_inds,
|
|
553
|
+
elem_areas) = RasterNP.setup_frame(
|
|
554
|
+
cam_data,
|
|
555
|
+
render_mesh.coords,
|
|
556
|
+
render_mesh.connectivity,
|
|
557
|
+
render_mesh.fields_disp[:,frame_ind,:],
|
|
558
|
+
)
|
|
559
|
+
|
|
560
|
+
# NOTE: the z coord has already been inverted in setup so we multiply here
|
|
561
|
+
render_field_div_z = (render_mesh.fields_render[:,frame_ind,field_ind]
|
|
562
|
+
*coords_raster[:,2])
|
|
563
|
+
|
|
564
|
+
# image_buffer.shape=(num_px_y,num_px_x)
|
|
565
|
+
# depth_buffer.shape=(num_px_y,num_px_x)
|
|
566
|
+
(image_buffer,
|
|
567
|
+
depth_buffer) = RasterNP.raster_frame(
|
|
568
|
+
cam_data,
|
|
569
|
+
connect_in_frame,
|
|
570
|
+
coords_raster,
|
|
571
|
+
elem_bound_box_inds,
|
|
572
|
+
elem_areas,
|
|
573
|
+
render_field_div_z)
|
|
574
|
+
|
|
575
|
+
# TODO: make this configurable
|
|
576
|
+
image_buffer[depth_buffer > 1000*cam_data.image_dist] = np.nan
|
|
577
|
+
|
|
578
|
+
if save_path is None:
|
|
579
|
+
return image_buffer
|
|
580
|
+
|
|
581
|
+
image_file = save_path/f"image_frame{frame_ind}_field{field_ind}"
|
|
582
|
+
np.save(image_file.with_suffix(".npy"),image_buffer)
|
|
583
|
+
return None
|
|
584
|
+
|
|
585
|
+
|
|
586
|
+
#-------------------------------------------------------------------------------
|
|
587
|
+
@numba.jit(nopython=True)
|
|
588
|
+
def edge_function(vert_a: np.ndarray,
|
|
589
|
+
vert_b: np.ndarray,
|
|
590
|
+
vert_c: np.ndarray) -> np.ndarray:
|
|
591
|
+
|
|
592
|
+
return ((vert_c[0] - vert_a[0]) * (vert_b[1] - vert_a[1])
|
|
593
|
+
- (vert_c[1] - vert_a[1]) * (vert_b[0] - vert_a[0]))
|
|
594
|
+
|
|
595
|
+
@numba.jit(nopython=True)
|
|
596
|
+
def edge_function_slice(vert_a: np.ndarray,
|
|
597
|
+
vert_b: np.ndarray,
|
|
598
|
+
vert_c: np.ndarray) -> np.ndarray:
|
|
599
|
+
|
|
600
|
+
return ((vert_c[:,0] - vert_a[:,0]) * (vert_b[:,1] - vert_a[:,1])
|
|
601
|
+
- (vert_c[:,1] - vert_a[:,1]) * (vert_b[:,0] - vert_a[:,0]))
|
|
602
|
+
|
|
603
|
+
|