pyvale 2025.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyvale might be problematic. Click here for more details.

Files changed (157) hide show
  1. pyvale/__init__.py +75 -0
  2. pyvale/core/__init__.py +7 -0
  3. pyvale/core/analyticmeshgen.py +59 -0
  4. pyvale/core/analyticsimdatafactory.py +63 -0
  5. pyvale/core/analyticsimdatagenerator.py +160 -0
  6. pyvale/core/camera.py +146 -0
  7. pyvale/core/cameradata.py +64 -0
  8. pyvale/core/cameradata2d.py +82 -0
  9. pyvale/core/cameratools.py +328 -0
  10. pyvale/core/cython/rastercyth.c +32267 -0
  11. pyvale/core/cython/rastercyth.py +636 -0
  12. pyvale/core/dataset.py +250 -0
  13. pyvale/core/errorcalculator.py +112 -0
  14. pyvale/core/errordriftcalc.py +146 -0
  15. pyvale/core/errorintegrator.py +339 -0
  16. pyvale/core/errorrand.py +614 -0
  17. pyvale/core/errorsysdep.py +331 -0
  18. pyvale/core/errorsysfield.py +407 -0
  19. pyvale/core/errorsysindep.py +905 -0
  20. pyvale/core/experimentsimulator.py +99 -0
  21. pyvale/core/field.py +136 -0
  22. pyvale/core/fieldconverter.py +154 -0
  23. pyvale/core/fieldsampler.py +112 -0
  24. pyvale/core/fieldscalar.py +167 -0
  25. pyvale/core/fieldtensor.py +221 -0
  26. pyvale/core/fieldtransform.py +384 -0
  27. pyvale/core/fieldvector.py +215 -0
  28. pyvale/core/generatorsrandom.py +528 -0
  29. pyvale/core/imagedef2d.py +566 -0
  30. pyvale/core/integratorfactory.py +241 -0
  31. pyvale/core/integratorquadrature.py +192 -0
  32. pyvale/core/integratorrectangle.py +88 -0
  33. pyvale/core/integratorspatial.py +90 -0
  34. pyvale/core/integratortype.py +44 -0
  35. pyvale/core/optimcheckfuncs.py +153 -0
  36. pyvale/core/raster.py +31 -0
  37. pyvale/core/rastercy.py +76 -0
  38. pyvale/core/rasternp.py +604 -0
  39. pyvale/core/rendermesh.py +156 -0
  40. pyvale/core/sensorarray.py +179 -0
  41. pyvale/core/sensorarrayfactory.py +210 -0
  42. pyvale/core/sensorarraypoint.py +280 -0
  43. pyvale/core/sensordata.py +72 -0
  44. pyvale/core/sensordescriptor.py +101 -0
  45. pyvale/core/sensortools.py +143 -0
  46. pyvale/core/visualexpplotter.py +151 -0
  47. pyvale/core/visualimagedef.py +71 -0
  48. pyvale/core/visualimages.py +75 -0
  49. pyvale/core/visualopts.py +180 -0
  50. pyvale/core/visualsimanimator.py +83 -0
  51. pyvale/core/visualsimplotter.py +182 -0
  52. pyvale/core/visualtools.py +81 -0
  53. pyvale/core/visualtraceplotter.py +256 -0
  54. pyvale/data/__init__.py +7 -0
  55. pyvale/data/case13_out.e +0 -0
  56. pyvale/data/case16_out.e +0 -0
  57. pyvale/data/case17_out.e +0 -0
  58. pyvale/data/case18_1_out.e +0 -0
  59. pyvale/data/case18_2_out.e +0 -0
  60. pyvale/data/case18_3_out.e +0 -0
  61. pyvale/data/case25_out.e +0 -0
  62. pyvale/data/case26_out.e +0 -0
  63. pyvale/data/optspeckle_2464x2056px_spec5px_8bit_gblur1px.tiff +0 -0
  64. pyvale/examples/__init__.py +7 -0
  65. pyvale/examples/analyticdatagen/__init__.py +7 -0
  66. pyvale/examples/analyticdatagen/ex1_1_scalarvisualisation.py +38 -0
  67. pyvale/examples/analyticdatagen/ex1_2_scalarcasebuild.py +46 -0
  68. pyvale/examples/analyticdatagen/ex2_1_analyticsensors.py +83 -0
  69. pyvale/examples/ex1_1_thermal2d.py +89 -0
  70. pyvale/examples/ex1_2_thermal2d.py +111 -0
  71. pyvale/examples/ex1_3_thermal2d.py +113 -0
  72. pyvale/examples/ex1_4_thermal2d.py +89 -0
  73. pyvale/examples/ex1_5_thermal2d.py +105 -0
  74. pyvale/examples/ex2_1_thermal3d .py +87 -0
  75. pyvale/examples/ex2_2_thermal3d.py +51 -0
  76. pyvale/examples/ex2_3_thermal3d.py +109 -0
  77. pyvale/examples/ex3_1_displacement2d.py +47 -0
  78. pyvale/examples/ex3_2_displacement2d.py +79 -0
  79. pyvale/examples/ex3_3_displacement2d.py +104 -0
  80. pyvale/examples/ex3_4_displacement2d.py +105 -0
  81. pyvale/examples/ex4_1_strain2d.py +57 -0
  82. pyvale/examples/ex4_2_strain2d.py +79 -0
  83. pyvale/examples/ex4_3_strain2d.py +100 -0
  84. pyvale/examples/ex5_1_multiphysics2d.py +78 -0
  85. pyvale/examples/ex6_1_multiphysics2d_expsim.py +118 -0
  86. pyvale/examples/ex6_2_multiphysics3d_expsim.py +158 -0
  87. pyvale/examples/features/__init__.py +7 -0
  88. pyvale/examples/features/ex_animation_tools_3dmonoblock.py +83 -0
  89. pyvale/examples/features/ex_area_avg.py +89 -0
  90. pyvale/examples/features/ex_calibration_error.py +108 -0
  91. pyvale/examples/features/ex_chain_field_errs.py +141 -0
  92. pyvale/examples/features/ex_field_errs.py +78 -0
  93. pyvale/examples/features/ex_sensor_single_angle_batch.py +110 -0
  94. pyvale/examples/imagedef2d/ex_imagedef2d_todisk.py +86 -0
  95. pyvale/examples/rasterisation/ex_rastenp.py +154 -0
  96. pyvale/examples/rasterisation/ex_rastercyth_oneframe.py +220 -0
  97. pyvale/examples/rasterisation/ex_rastercyth_static_cypara.py +194 -0
  98. pyvale/examples/rasterisation/ex_rastercyth_static_pypara.py +193 -0
  99. pyvale/simcases/case00_HEX20.i +242 -0
  100. pyvale/simcases/case00_HEX27.i +242 -0
  101. pyvale/simcases/case00_TET10.i +242 -0
  102. pyvale/simcases/case00_TET14.i +242 -0
  103. pyvale/simcases/case01.i +101 -0
  104. pyvale/simcases/case02.i +156 -0
  105. pyvale/simcases/case03.i +136 -0
  106. pyvale/simcases/case04.i +181 -0
  107. pyvale/simcases/case05.i +234 -0
  108. pyvale/simcases/case06.i +305 -0
  109. pyvale/simcases/case07.geo +135 -0
  110. pyvale/simcases/case07.i +87 -0
  111. pyvale/simcases/case08.geo +144 -0
  112. pyvale/simcases/case08.i +153 -0
  113. pyvale/simcases/case09.geo +204 -0
  114. pyvale/simcases/case09.i +87 -0
  115. pyvale/simcases/case10.geo +204 -0
  116. pyvale/simcases/case10.i +257 -0
  117. pyvale/simcases/case11.geo +337 -0
  118. pyvale/simcases/case11.i +147 -0
  119. pyvale/simcases/case12.geo +388 -0
  120. pyvale/simcases/case12.i +329 -0
  121. pyvale/simcases/case13.i +140 -0
  122. pyvale/simcases/case14.i +159 -0
  123. pyvale/simcases/case15.geo +337 -0
  124. pyvale/simcases/case15.i +150 -0
  125. pyvale/simcases/case16.geo +391 -0
  126. pyvale/simcases/case16.i +357 -0
  127. pyvale/simcases/case17.geo +135 -0
  128. pyvale/simcases/case17.i +144 -0
  129. pyvale/simcases/case18.i +254 -0
  130. pyvale/simcases/case18_1.i +254 -0
  131. pyvale/simcases/case18_2.i +254 -0
  132. pyvale/simcases/case18_3.i +254 -0
  133. pyvale/simcases/case19.geo +252 -0
  134. pyvale/simcases/case19.i +99 -0
  135. pyvale/simcases/case20.geo +252 -0
  136. pyvale/simcases/case20.i +250 -0
  137. pyvale/simcases/case21.geo +74 -0
  138. pyvale/simcases/case21.i +155 -0
  139. pyvale/simcases/case22.geo +82 -0
  140. pyvale/simcases/case22.i +140 -0
  141. pyvale/simcases/case23.geo +164 -0
  142. pyvale/simcases/case23.i +140 -0
  143. pyvale/simcases/case24.geo +79 -0
  144. pyvale/simcases/case24.i +123 -0
  145. pyvale/simcases/case25.geo +82 -0
  146. pyvale/simcases/case25.i +140 -0
  147. pyvale/simcases/case26.geo +166 -0
  148. pyvale/simcases/case26.i +140 -0
  149. pyvale/simcases/run_1case.py +61 -0
  150. pyvale/simcases/run_all_cases.py +69 -0
  151. pyvale/simcases/run_build_case.py +64 -0
  152. pyvale/simcases/run_example_cases.py +69 -0
  153. pyvale-2025.4.0.dist-info/METADATA +140 -0
  154. pyvale-2025.4.0.dist-info/RECORD +157 -0
  155. pyvale-2025.4.0.dist-info/WHEEL +5 -0
  156. pyvale-2025.4.0.dist-info/licenses/LICENSE +21 -0
  157. pyvale-2025.4.0.dist-info/top_level.txt +1 -0
pyvale/__init__.py ADDED
@@ -0,0 +1,75 @@
1
+ """
2
+ `pyvale`: the python validation engine. Used to simulate experimental data from
3
+ an input multi-physics simulation by explicitly modelling sensors with realistic
4
+ uncertainties. Useful for experimental design, sensor placement optimisation,
5
+ testing simulation validation metrics and testing digital shadows/twins.
6
+ """
7
+
8
+ """
9
+ ================================================================================
10
+ pyvale: the python validation engine
11
+ License: MIT
12
+ Copyright (C) 2025 The Computer Aided Validation Team
13
+ ================================================================================
14
+ """
15
+ # NOTE: this simplifies and decouples how the user calls pyvale from the
16
+ # underlying project structure: the user should be able to use 'pyvale.'
17
+ # and access everything in one layer without multiple import dots
18
+
19
+ from pyvale.core.dataset import *
20
+
21
+ from pyvale.core.field import *
22
+ from pyvale.core.fieldscalar import *
23
+ from pyvale.core.fieldvector import *
24
+ from pyvale.core.fieldtensor import *
25
+ from pyvale.core.fieldconverter import *
26
+ from pyvale.core.fieldtransform import *
27
+
28
+ from pyvale.core.integratorspatial import *
29
+ from pyvale.core.integratorquadrature import *
30
+ from pyvale.core.integratorrectangle import *
31
+ from pyvale.core.integratorfactory import *
32
+
33
+ from pyvale.core.sensordescriptor import *
34
+ from pyvale.core.sensortools import *
35
+ from pyvale.core.sensorarray import *
36
+ from pyvale.core.sensorarrayfactory import *
37
+ from pyvale.core.sensorarraypoint import *
38
+ from pyvale.core.sensordata import *
39
+
40
+ from pyvale.core.camera import *
41
+ from pyvale.core.cameradata import *
42
+ from pyvale.core.cameradata2d import *
43
+ from pyvale.core.cameratools import *
44
+
45
+ import pyvale.core.cython.rastercyth as rastercyth
46
+ from pyvale.core.rastercy import *
47
+
48
+ from pyvale.core.rendermesh import *
49
+ from pyvale.core.rasternp import *
50
+
51
+ from pyvale.core.imagedef2d import *
52
+
53
+ from pyvale.core.errorintegrator import *
54
+ from pyvale.core.errorrand import *
55
+ from pyvale.core.errorsysindep import *
56
+ from pyvale.core.errorsysdep import *
57
+ from pyvale.core.errorsysfield import *
58
+ from pyvale.core.errordriftcalc import *
59
+
60
+ from pyvale.core.generatorsrandom import *
61
+
62
+ from pyvale.core.visualopts import *
63
+ from pyvale.core.visualtools import *
64
+ from pyvale.core.visualsimplotter import *
65
+ from pyvale.core.visualsimanimator import *
66
+ from pyvale.core.visualexpplotter import *
67
+ from pyvale.core.visualtraceplotter import *
68
+ from pyvale.core.visualimages import *
69
+ from pyvale.core.visualimagedef import *
70
+
71
+ from pyvale.core.analyticmeshgen import *
72
+ from pyvale.core.analyticsimdatagenerator import *
73
+ from pyvale.core.analyticsimdatafactory import *
74
+
75
+ from pyvale.core.experimentsimulator import *
@@ -0,0 +1,7 @@
1
+ """
2
+ ================================================================================
3
+ pyvale: the python validation engine
4
+ License: MIT
5
+ Copyright (C) 2025 The Computer Aided Validation Team
6
+ ================================================================================
7
+ """
@@ -0,0 +1,59 @@
1
+ """
2
+ ================================================================================
3
+ pyvale: the python validation engine
4
+ License: MIT
5
+ Copyright (C) 2025 The Computer Aided Validation Team
6
+ ================================================================================
7
+ """
8
+ import numpy as np
9
+
10
+ # NOTE: This module is a feature under developement.
11
+
12
+ def rectangle_mesh_2d(leng_x: float,
13
+ leng_y: float,
14
+ n_elem_x: int,
15
+ n_elem_y: int) -> tuple[np.ndarray,np.ndarray]:
16
+
17
+ n_elems = n_elem_x*n_elem_y
18
+ n_node_x = n_elem_x+1
19
+ n_node_y = n_elem_y+1
20
+ nodes_per_elem = 4
21
+
22
+ coord_x = np.linspace(0,leng_x,n_node_x)
23
+ coord_y = np.linspace(0,leng_y,n_node_y)
24
+ (coord_grid_x,coord_grid_y) = np.meshgrid(coord_x,coord_y)
25
+
26
+ coord_x = np.atleast_2d(coord_grid_x.flatten()).T
27
+ coord_y = np.atleast_2d(coord_grid_y.flatten()).T
28
+ coord_z = np.zeros_like(coord_x)
29
+ coords = np.hstack((coord_x,coord_y,coord_z))
30
+
31
+ connect = np.zeros((n_elems,nodes_per_elem)).astype(np.int64)
32
+ row = 1
33
+ nn = 0
34
+ for ee in range(n_elems):
35
+ nn += 1
36
+ if nn >= row*n_node_x:
37
+ row += 1
38
+ nn += 1
39
+
40
+ connect[ee,:] = np.array([nn,nn+1,nn+n_node_x+1,nn+n_node_x])
41
+ connect = connect.T
42
+
43
+ return (coords,connect)
44
+
45
+
46
+ def fill_dims(coord_x: np.ndarray,
47
+ coord_y: np.ndarray,
48
+ time: np.ndarray) -> tuple[np.ndarray,np.ndarray,np.ndarray]:
49
+
50
+ full_x = np.repeat(np.atleast_2d(coord_x).T,
51
+ time.shape[0],
52
+ axis=1)
53
+ full_y = np.repeat(np.atleast_2d(coord_y).T,
54
+ time.shape[0],
55
+ axis=1)
56
+ full_time = np.repeat(np.atleast_2d(time),
57
+ coord_x.shape[0],
58
+ axis=0)
59
+ return (full_x,full_y,full_time)
@@ -0,0 +1,63 @@
1
+ """
2
+ ================================================================================
3
+ pyvale: the python validation engine
4
+ License: MIT
5
+ Copyright (C) 2025 The Computer Aided Validation Team
6
+ ================================================================================
7
+ """
8
+ import numpy as np
9
+ import sympy
10
+ import mooseherder as mh
11
+ from pyvale.core.analyticsimdatagenerator import (AnalyticCaseData2D,
12
+ AnalyticSimDataGenerator)
13
+
14
+ # NOTE: This module is a feature under developement.
15
+
16
+ def standard_case_2d() -> AnalyticCaseData2D:
17
+ case_data = AnalyticCaseData2D()
18
+ case_data.length_x = 10.0
19
+ case_data.length_y = 7.5
20
+ n_elem_mult = 10
21
+ case_data.num_elem_x = 4*n_elem_mult
22
+ case_data.num_elem_y = 3*n_elem_mult
23
+ case_data.time_steps = np.linspace(0.0,1.0,11)
24
+ return case_data
25
+
26
+
27
+ class AnalyticCaseFactory:
28
+
29
+ @staticmethod
30
+ def scalar_linear_2d() -> tuple[mh.SimData,AnalyticSimDataGenerator]:
31
+
32
+ case_data = standard_case_2d()
33
+ (sym_y,sym_x,sym_t) = sympy.symbols("y,x,t")
34
+ case_data.funcs_x = (20.0/case_data.length_x * sym_x,)
35
+ case_data.funcs_y = (10.0/case_data.length_y * sym_y,)
36
+ case_data.funcs_t = (sym_t,)
37
+ case_data.offsets_space = (20.0,)
38
+ case_data.offsets_time = (0.0,)
39
+
40
+ data_gen = AnalyticSimDataGenerator(case_data)
41
+
42
+ sim_data = data_gen.generate_sim_data()
43
+
44
+ return (sim_data,data_gen)
45
+
46
+ @staticmethod
47
+ def scalar_quadratic_2d() -> tuple[mh.SimData,AnalyticSimDataGenerator]:
48
+
49
+ case_data = standard_case_2d()
50
+ (sym_y,sym_x,sym_t) = sympy.symbols("y,x,t")
51
+ case_data.funcs_x = (sym_x*(sym_x - case_data.length_x),)
52
+ case_data.funcs_y = (sym_y*(sym_y - case_data.length_y),)
53
+ case_data.funcs_t = (sym_t,)
54
+
55
+ data_gen = AnalyticSimDataGenerator(case_data)
56
+
57
+ sim_data = data_gen.generate_sim_data()
58
+
59
+ return (sim_data,data_gen)
60
+
61
+
62
+
63
+
@@ -0,0 +1,160 @@
1
+ """
2
+ ================================================================================
3
+ pyvale: the python validation engine
4
+ License: MIT
5
+ Copyright (C) 2025 The Computer Aided Validation Team
6
+ ================================================================================
7
+ """
8
+ from dataclasses import dataclass
9
+ import numpy as np
10
+ import sympy
11
+ import mooseherder as mh
12
+ from pyvale.core.analyticmeshgen import rectangle_mesh_2d, fill_dims
13
+
14
+ # NOTE: This module is a feature under developement.
15
+
16
+ @dataclass
17
+ class AnalyticCaseData2D:
18
+ length_x: float = 10.0
19
+ length_y: float = 7.5
20
+ num_elem_x: int = 4
21
+ num_elem_y: int = 3
22
+ time_steps: np.ndarray | None = None
23
+ field_keys: tuple[str,...] = ('scalar',)
24
+ funcs_x: tuple[sympy.Expr,...] | None = None
25
+ funcs_y: tuple[sympy.Expr,...] | None = None
26
+ funcs_t: tuple[sympy.Expr,...] | None = None
27
+ symbols: tuple[sympy.Symbol,...] = (sympy.Symbol("y"),
28
+ sympy.Symbol("x"),
29
+ sympy.Symbol("t"))
30
+ offsets_space: tuple[float,...] = (0.0,)
31
+ offsets_time: tuple[float,...] = (0.0,)
32
+ nodes_per_elem: int = 4
33
+
34
+
35
+ class AnalyticSimDataGenerator:
36
+ def __init__(self, case_data: AnalyticCaseData2D
37
+ ) -> None:
38
+
39
+ self._case_data = case_data
40
+ (self._coords,self._connect) = rectangle_mesh_2d(case_data.length_x,
41
+ case_data.length_y,
42
+ case_data.num_elem_x,
43
+ case_data.num_elem_y)
44
+
45
+ self._field_sym_funcs = dict()
46
+ self._field_lam_funcs = dict()
47
+ for ii,kk in enumerate(case_data.field_keys):
48
+ self._field_sym_funcs[kk] = ((case_data.funcs_x[ii] *
49
+ case_data.funcs_y[ii] +
50
+ case_data.offsets_space[ii]) *
51
+ (case_data.funcs_t[ii] +
52
+ case_data.offsets_time[ii]))
53
+
54
+ self._field_lam_funcs[kk] = sympy.lambdify(case_data.symbols,
55
+ self._field_sym_funcs[kk],
56
+ 'numpy')
57
+ self._field_eval = dict()
58
+
59
+
60
+ def evaluate_field_truth(self,
61
+ field_key: str,
62
+ coords: np.ndarray,
63
+ time_steps: np.ndarray | None = None) -> np.ndarray:
64
+
65
+ if time_steps is None:
66
+ time_steps = self._case_data.time_steps
67
+
68
+ (x_eval,y_eval,t_eval) = fill_dims(coords[:,0],
69
+ coords[:,1],
70
+ time_steps)
71
+
72
+ field_vals = self._field_lam_funcs[field_key](y_eval,
73
+ x_eval,
74
+ t_eval)
75
+ return field_vals
76
+
77
+
78
+ def evaluate_all_fields_truth(self,
79
+ coords: np.ndarray,
80
+ time_steps: np.ndarray | None = None) -> np.ndarray:
81
+
82
+ if time_steps is None:
83
+ time_steps = self._case_data.time_steps
84
+
85
+ (x_eval,y_eval,t_eval) = fill_dims(coords[:,0],
86
+ coords[:,1],
87
+ time_steps)
88
+
89
+ eval_comps = dict()
90
+ for kk in self._case_data.field_keys:
91
+ eval_comps[kk] = self._field_lam_funcs[kk](y_eval,
92
+ x_eval,
93
+ t_eval)
94
+ return eval_comps
95
+
96
+
97
+ def evaluate_field_at_nodes(self, field_key: str) -> np.ndarray:
98
+ (x_eval,y_eval,t_eval) = fill_dims(self._coords[:,0],
99
+ self._coords[:,1],
100
+ self._case_data.time_steps)
101
+
102
+ self._field_eval[field_key] = self._field_lam_funcs[field_key](y_eval,
103
+ x_eval,
104
+ t_eval)
105
+ return self._field_eval[field_key]
106
+
107
+ def evaluate_all_fields_at_nodes(self) -> dict[str,np.ndarray]:
108
+ (x_eval,y_eval,t_eval) = fill_dims(self._coords[:,0],
109
+ self._coords[:,1],
110
+ self._case_data.time_steps)
111
+ eval_comps = dict()
112
+ for kk in self._case_data.field_keys:
113
+ eval_comps[kk] = self._field_lam_funcs[kk](y_eval,
114
+ x_eval,
115
+ t_eval)
116
+ self._field_eval = eval_comps
117
+ return self._field_eval
118
+
119
+
120
+ def generate_sim_data(self) -> mh.SimData:
121
+
122
+ sim_data = mh.SimData()
123
+ sim_data.num_spat_dims = 2
124
+ sim_data.time = self._case_data.time_steps
125
+ sim_data.coords = self._coords
126
+ sim_data.connect = {'connect1': self._connect}
127
+
128
+ if not self._field_eval:
129
+ self.evaluate_all_fields_at_nodes()
130
+ sim_data.node_vars = self._field_eval
131
+
132
+ return sim_data
133
+
134
+
135
+ def get_visualisation_grid(self,
136
+ field_key: str | None = None,
137
+ time_step: int = -1
138
+ ) -> tuple[np.ndarray,np.ndarray,np.ndarray]:
139
+
140
+ if field_key is None:
141
+ field_key = self._case_data.field_keys[0]
142
+
143
+ grid_shape = (self._case_data.num_elem_y+1,
144
+ self._case_data.num_elem_x+1)
145
+
146
+ grid_x = np.atleast_2d(self._coords[:,0]).T.reshape(grid_shape)
147
+ grid_y = np.atleast_2d(self._coords[:,1]).T.reshape(grid_shape)
148
+
149
+ if not self._field_eval:
150
+ self.evaluate_all_fields_at_nodes()
151
+
152
+ scalar_grid = np.reshape(self._field_eval[field_key][:,time_step],grid_shape)
153
+
154
+ return (grid_x,grid_y,scalar_grid)
155
+
156
+
157
+
158
+
159
+
160
+
pyvale/core/camera.py ADDED
@@ -0,0 +1,146 @@
1
+ """
2
+ ================================================================================
3
+ pyvale: the python validation engine
4
+ License: MIT
5
+ Copyright (C) 2025 The Computer Aided Validation Team
6
+ ================================================================================
7
+ """
8
+ import numpy as np
9
+ from pyvale.core.field import IField
10
+ from pyvale.core.sensorarray import ISensorArray
11
+ from pyvale.core.errorintegrator import ErrIntegrator
12
+ from pyvale.core.sensordescriptor import SensorDescriptor
13
+ from pyvale.core.fieldsampler import sample_field_with_sensor_data
14
+ from pyvale.core.cameradata2d import CameraData2D
15
+ from pyvale.core.cameratools import CameraTools
16
+
17
+
18
+ # NOTE: This module is a feature under developement.
19
+
20
+
21
+ class CameraBasic2D(ISensorArray):
22
+ __slots__ = ("_cam_data","_field","_error_integrator","_descriptor",
23
+ "_sensor_data","_truth","_measurements")
24
+
25
+ def __init__(self,
26
+ cam_data: CameraData2D,
27
+ field: IField,
28
+ descriptor: SensorDescriptor | None = None,
29
+ ) -> None:
30
+
31
+ self._cam_data = cam_data
32
+ self._field = field
33
+ self._error_integrator = None
34
+
35
+ self._descriptor = SensorDescriptor()
36
+ if descriptor is not None:
37
+ self._descriptor = descriptor
38
+
39
+ self._sensor_data = CameraTools.build_sensor_data_from_camera_2d(self._cam_data)
40
+
41
+ self._truth = None
42
+ self._measurements = None
43
+
44
+ #---------------------------------------------------------------------------
45
+ # Accessors
46
+ def get_sample_times(self) -> np.ndarray:
47
+ if self._sensor_data.sample_times is None:
48
+ #shape=(n_time_steps,)
49
+ return self._field.get_time_steps()
50
+
51
+ #shape=(n_time_steps,)
52
+ return self._sensor_data.sample_times
53
+
54
+ def get_measurement_shape(self) -> tuple[int,int,int]:
55
+ return (self._sensor_data.positions.shape[0],
56
+ len(self._field.get_all_components()),
57
+ self.get_sample_times().shape[0])
58
+
59
+ def get_image_measurements_shape(self) -> tuple[int,int,int,int]:
60
+ return (self._cam_data.num_pixels[1],
61
+ self._cam_data.num_pixels[0],
62
+ len(self._field.get_all_components()),
63
+ self.get_sample_times().shape[0])
64
+
65
+ def get_field(self) -> IField:
66
+ return self._field
67
+
68
+ def get_descriptor(self) -> SensorDescriptor:
69
+ return self._descriptor
70
+
71
+ #---------------------------------------------------------------------------
72
+ # Truth calculation from simulation
73
+ def calc_truth_values(self) -> np.ndarray:
74
+ self._truth = sample_field_with_sensor_data(self._field,
75
+ self._sensor_data)
76
+ #shape=(n_pixels,n_field_comps,n_time_steps)
77
+ return self._truth
78
+
79
+ def get_truth(self) -> np.ndarray:
80
+ if self._truth is None:
81
+ self._truth = self.calc_truth_values()
82
+ #shape=(n_pixels,n_field_comps,n_time_steps)
83
+ return self._truth
84
+
85
+ #---------------------------------------------------------------------------
86
+ # Errors
87
+ def set_error_integrator(self, err_int: ErrIntegrator) -> None:
88
+ self._error_integrator = err_int
89
+
90
+ def get_errors_systematic(self) -> np.ndarray | None:
91
+ if self._error_integrator is None:
92
+ return None
93
+
94
+ #shape=(n_pixels,n_field_comps,n_time_steps)
95
+ return self._error_integrator.get_errs_systematic()
96
+
97
+ def get_errors_random(self) -> np.ndarray | None:
98
+ if self._error_integrator is None:
99
+ return None
100
+
101
+ #shape=(n_pixels,n_field_comps,n_time_steps)
102
+ return self._error_integrator.get_errs_random()
103
+
104
+ def get_errors_total(self) -> np.ndarray | None:
105
+ if self._error_integrator is None:
106
+ return None
107
+
108
+ #shape=(n_pixels,n_field_comps,n_time_steps)
109
+ return self._error_integrator.get_errs_total()
110
+
111
+ #---------------------------------------------------------------------------
112
+ # Measurements
113
+ def calc_measurements(self) -> np.ndarray:
114
+ if self._error_integrator is None:
115
+ self._measurements = self.get_truth()
116
+ else:
117
+ self._measurements = self.get_truth() + \
118
+ self._error_integrator.calc_errors_from_chain(self.get_truth())
119
+
120
+ #shape=(n_pixels,n_field_comps,n_time_steps)
121
+ return self._measurements
122
+
123
+ def get_measurements(self) -> np.ndarray:
124
+ if self._measurements is None:
125
+ self._measurements = self.calc_measurements()
126
+
127
+ #shape=(n_pixels,n_field_comps,n_time_steps)
128
+ return self._measurements
129
+
130
+ #---------------------------------------------------------------------------
131
+ # Images
132
+ def calc_measurement_images(self) -> np.ndarray:
133
+ #shape=(n_pixels,n_field_comps,n_time_steps)
134
+ self._measurements = self.calc_measurements()
135
+ image_shape = self.get_image_measurements_shape()
136
+ #shape=(n_pixels_y,n_pixels_x,n_field_comps,n_time_steps)
137
+ return np.reshape(self._measurements,image_shape)
138
+
139
+ def get_measurement_images(self) -> np.ndarray:
140
+ self._measurements = self.get_measurements()
141
+ image_shape = self.get_image_measurements_shape()
142
+ #shape=(n_pixels_y,n_pixels_x,n_field_comps,n_time_steps)
143
+ return np.reshape(self._measurements,image_shape)
144
+
145
+
146
+
@@ -0,0 +1,64 @@
1
+ """
2
+ ================================================================================
3
+ pyvale: the python validation engine
4
+ License: MIT
5
+ Copyright (C) 2025 The Computer Aided Validation Team
6
+ ================================================================================
7
+ """
8
+ from dataclasses import dataclass, field
9
+ import numpy as np
10
+ from scipy.spatial.transform import Rotation
11
+
12
+
13
+ # NOTE: This module is a feature under developement.
14
+ #
15
+ # - Camera Local Coords: Pixel positions in pixels/meters
16
+ # - Global Sim Coords: Transform from local pixel positions to sim coords in meters
17
+ # - For this transformation we need user to specify center of ROI in sim coords
18
+ # - There are going to be different ways to specify the camera properties
19
+
20
+ # For thin lens theory will need to know some combination of:
21
+ # - The focal length of the lense
22
+ # - The working distance
23
+
24
+ # Will need to create different ways for the user to automatically position the
25
+ # camera
26
+
27
+
28
+ @dataclass(slots=True)
29
+ class CameraData:
30
+ pixels_num: np.ndarray
31
+ pixels_size: np.ndarray
32
+
33
+ pos_world: np.ndarray
34
+ rot_world: Rotation
35
+ roi_cent_world: np.ndarray
36
+
37
+ focal_length: float = 50.0
38
+ sub_samp: int = 2
39
+
40
+ back_face_removal: bool = True
41
+
42
+ sensor_size: np.ndarray = field(init=False)
43
+ image_dims: np.ndarray = field(init=False)
44
+ image_dist: float = field(init=False)
45
+ cam_to_world_mat: np.ndarray = field(init=False)
46
+ world_to_cam_mat: np.ndarray = field(init=False)
47
+
48
+ def __post_init__(self) -> None:
49
+ self.image_dist = np.linalg.norm(self.pos_world - self.roi_cent_world)
50
+ self.sensor_size = self.pixels_num*self.pixels_size
51
+ self.image_dims = (self.image_dist
52
+ *self.sensor_size/self.focal_length)
53
+
54
+ self.cam_to_world_mat = np.zeros((4,4))
55
+ self.cam_to_world_mat[0:3,0:3] = self.rot_world.as_matrix()
56
+ self.cam_to_world_mat[-1,-1] = 1.0
57
+ self.cam_to_world_mat[0:3,-1] = self.pos_world
58
+ self.world_to_cam_mat = np.linalg.inv(self.cam_to_world_mat)
59
+
60
+
61
+
62
+
63
+
64
+
@@ -0,0 +1,82 @@
1
+ """
2
+ ================================================================================
3
+ pyvale: the python validation engine
4
+ License: MIT
5
+ Copyright (C) 2025 The Computer Aided Validation Team
6
+ ================================================================================
7
+ """
8
+ from dataclasses import dataclass, field
9
+ import numpy as np
10
+ from scipy.spatial.transform import Rotation
11
+
12
+
13
+ @dataclass(slots=True)
14
+ class CameraData2D:
15
+ pixels_count: np.ndarray | None = None
16
+ leng_per_px: float = 1.0e-3
17
+ bits: int = 8
18
+ roi_cent_world: np.ndarray | None = None
19
+
20
+ background: float = 0.5
21
+ sample_times: np.ndarray | None = None
22
+ angle: Rotation | None = None
23
+
24
+ subsample: int = 2
25
+
26
+ field_of_view: np.ndarray = field(init=False)
27
+ dynamic_range: int = field(init=False)
28
+
29
+ world_to_cam: np.ndarray = field(init=False)
30
+ cam_to_world: np.ndarray = field(init=False)
31
+
32
+ def __post_init__(self) -> None:
33
+
34
+ if self.pixels_count is None:
35
+ self.pixels_count = np.array((1000,1000),dtype=np.int32)
36
+
37
+ if self.roi_cent_world is None:
38
+ self.roi_cent_world = np.array((0.0,0.0,0.0),dtype=np.float64)
39
+
40
+ self.field_of_view = self.leng_per_px*(self.pixels_count.astype(np.float64))
41
+ self.dynamic_range = 2**self.bits
42
+ self.background = self.background*float(self.dynamic_range)
43
+
44
+ self.world_to_cam = self.field_of_view/2 - self.roi_cent_world[:-1]
45
+ self.cam_to_world = -self.world_to_cam
46
+
47
+
48
+ #@dataclass(slots=True)
49
+ #class CameraData2D:
50
+ # #shape=(n_px_X,n_px_Y)
51
+ # num_pixels: np.ndarray
52
+
53
+ # # Center location of the region of interest in world coords
54
+ # #shape=(3,) as (x,y,z)
55
+ # roi_center_world: np.ndarray
56
+
57
+ # # Converts pixels to length units to align with global coords
58
+ # leng_per_px: float
59
+
60
+ # #shape=(n_time_steps,)
61
+ # sample_times: np.ndarray | None = None
62
+
63
+ # #TODO: this only works for flat surfaces aligned with the axis
64
+ # view_axes: tuple[int,int] = (0,1)
65
+
66
+ # bits_sensor: int = 16
67
+ # bits_file: int = 16
68
+
69
+ # angle: Rotation | None = None
70
+
71
+ # field_of_view_center_local: np.ndarray = field(init=False)
72
+ # field_of_view_local: np.ndarray = field(init=False)
73
+ # roi_shift_world: np.ndarray = field(init=False)
74
+
75
+ # def __post_init__(self) -> None:
76
+ # self.field_of_view_local = self.num_pixels*self.leng_per_px
77
+ # self.field_of_view_center_local = self.field_of_view_local/2
78
+
79
+ # self.roi_shift_world = np.zeros_like(self.roi_center_world)
80
+ # for ii,vv in enumerate(self.view_axes):
81
+ # self.roi_shift_world[vv] = self.roi_center_world[vv] - \
82
+ # self.field_of_view_center_local[ii]