pyvale 2025.7.1__cp311-cp311-musllinux_1_2_aarch64.whl → 2025.8.1__cp311-cp311-musllinux_1_2_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyvale might be problematic. Click here for more details.

Files changed (186) hide show
  1. pyvale/__init__.py +12 -92
  2. pyvale/blender/__init__.py +23 -0
  3. pyvale/{pyvaleexceptions.py → blender/blenderexceptions.py} +0 -3
  4. pyvale/{blenderlightdata.py → blender/blenderlightdata.py} +3 -3
  5. pyvale/{blendermaterialdata.py → blender/blendermaterialdata.py} +1 -1
  6. pyvale/{blenderrenderdata.py → blender/blenderrenderdata.py} +5 -3
  7. pyvale/{blenderscene.py → blender/blenderscene.py} +33 -30
  8. pyvale/{blendertools.py → blender/blendertools.py} +14 -10
  9. pyvale/dataset/__init__.py +7 -0
  10. pyvale/dataset/dataset.py +443 -0
  11. pyvale/dic/__init__.py +20 -0
  12. pyvale/dic/cpp/dicfourier.cpp +36 -4
  13. pyvale/dic/cpp/dicinterpolator.cpp +56 -1
  14. pyvale/dic/cpp/dicmain.cpp +24 -19
  15. pyvale/dic/cpp/dicoptimizer.cpp +6 -1
  16. pyvale/dic/cpp/dicscanmethod.cpp +32 -32
  17. pyvale/dic/cpp/dicsignalhandler.cpp +16 -0
  18. pyvale/dic/cpp/dicstrain.cpp +7 -3
  19. pyvale/dic/cpp/dicutil.cpp +79 -23
  20. pyvale/{dic2d.py → dic/dic2d.py} +51 -29
  21. pyvale/dic/dic2dconv.py +6 -0
  22. pyvale/{dic2dcpp.cpython-311-aarch64-linux-musl.so → dic/dic2dcpp.cpython-311-aarch64-linux-musl.so} +0 -0
  23. pyvale/{dicchecks.py → dic/dicchecks.py} +28 -16
  24. pyvale/dic/dicdataimport.py +370 -0
  25. pyvale/{dicregionofinterest.py → dic/dicregionofinterest.py} +169 -12
  26. pyvale/{dicresults.py → dic/dicresults.py} +4 -1
  27. pyvale/{dicstrain.py → dic/dicstrain.py} +9 -9
  28. pyvale/examples/basics/{ex1_1_basicscalars_therm2d.py → ex1a_basicscalars_therm2d.py} +12 -9
  29. pyvale/examples/basics/{ex1_2_sensormodel_therm2d.py → ex1b_sensormodel_therm2d.py} +17 -14
  30. pyvale/examples/basics/{ex1_3_customsens_therm3d.py → ex1c_customsens_therm3d.py} +27 -24
  31. pyvale/examples/basics/{ex1_4_basicerrors_therm3d.py → ex1d_basicerrors_therm3d.py} +32 -29
  32. pyvale/examples/basics/{ex1_5_fielderrs_therm3d.py → ex1e_fielderrs_therm3d.py} +19 -15
  33. pyvale/examples/basics/{ex1_6_caliberrs_therm2d.py → ex1f_caliberrs_therm2d.py} +20 -16
  34. pyvale/examples/basics/{ex1_7_spatavg_therm2d.py → ex1g_spatavg_therm2d.py} +19 -16
  35. pyvale/examples/basics/{ex2_1_basicvectors_disp2d.py → ex2a_basicvectors_disp2d.py} +13 -10
  36. pyvale/examples/basics/{ex2_2_vectorsens_disp2d.py → ex2b_vectorsens_disp2d.py} +19 -15
  37. pyvale/examples/basics/{ex2_3_sensangle_disp2d.py → ex2c_sensangle_disp2d.py} +21 -18
  38. pyvale/examples/basics/{ex2_4_chainfielderrs_disp2d.py → ex2d_chainfielderrs_disp2d.py} +31 -29
  39. pyvale/examples/basics/{ex2_5_vectorfields3d_disp3d.py → ex2e_vectorfields3d_disp3d.py} +21 -18
  40. pyvale/examples/basics/{ex3_1_basictensors_strain2d.py → ex3a_basictensors_strain2d.py} +16 -14
  41. pyvale/examples/basics/{ex3_2_tensorsens2d_strain2d.py → ex3b_tensorsens2d_strain2d.py} +17 -14
  42. pyvale/examples/basics/{ex3_3_tensorsens3d_strain3d.py → ex3c_tensorsens3d_strain3d.py} +25 -22
  43. pyvale/examples/basics/{ex4_1_expsim2d_thermmech2d.py → ex4a_expsim2d_thermmech2d.py} +17 -14
  44. pyvale/examples/basics/{ex4_2_expsim3d_thermmech3d.py → ex4b_expsim3d_thermmech3d.py} +37 -34
  45. pyvale/examples/basics/ex5_nomesh.py +24 -0
  46. pyvale/examples/dic/ex1_2_blenderdeformed.py +174 -0
  47. pyvale/examples/dic/ex1_region_of_interest.py +6 -3
  48. pyvale/examples/dic/ex2_plate_with_hole.py +21 -18
  49. pyvale/examples/dic/ex3_plate_with_hole_strain.py +8 -6
  50. pyvale/examples/dic/ex4_dic_blender.py +17 -15
  51. pyvale/examples/dic/ex5_dic_challenge.py +19 -14
  52. pyvale/examples/genanalyticdata/ex1_1_scalarvisualisation.py +16 -10
  53. pyvale/examples/genanalyticdata/ex1_2_scalarcasebuild.py +3 -3
  54. pyvale/examples/genanalyticdata/ex2_1_analyticsensors.py +29 -23
  55. pyvale/examples/genanalyticdata/ex2_2_analyticsensors_nomesh.py +67 -0
  56. pyvale/examples/imagedef2d/ex_imagedef2d_todisk.py +12 -9
  57. pyvale/examples/mooseherder/ex0_create_moose_config.py +65 -0
  58. pyvale/examples/mooseherder/ex1a_modify_moose_input.py +71 -0
  59. pyvale/examples/mooseherder/ex1b_modify_gmsh_input.py +69 -0
  60. pyvale/examples/mooseherder/ex2a_run_moose_once.py +80 -0
  61. pyvale/examples/mooseherder/ex2b_run_gmsh_once.py +64 -0
  62. pyvale/examples/mooseherder/ex2c_run_both_once.py +114 -0
  63. pyvale/examples/mooseherder/ex3_run_moose_seq_para.py +157 -0
  64. pyvale/examples/mooseherder/ex4_run_gmsh-moose_seq_para.py +176 -0
  65. pyvale/examples/mooseherder/ex5_run_moose_paramulti.py +136 -0
  66. pyvale/examples/mooseherder/ex6_read_moose_exodus.py +163 -0
  67. pyvale/examples/mooseherder/ex7a_read_moose_herd_results.py +153 -0
  68. pyvale/examples/mooseherder/ex7b_read_multi_herd_results.py +116 -0
  69. pyvale/examples/mooseherder/ex7c_read_multi_gmshmoose_results.py +127 -0
  70. pyvale/examples/mooseherder/ex7d_readconfig_multi_gmshmoose_results.py +143 -0
  71. pyvale/examples/mooseherder/ex8_read_existing_sweep_output.py +72 -0
  72. pyvale/examples/renderblender/ex1_1_blenderscene.py +24 -20
  73. pyvale/examples/renderblender/ex1_2_blenderdeformed.py +22 -18
  74. pyvale/examples/renderblender/ex2_1_stereoscene.py +36 -29
  75. pyvale/examples/renderblender/ex2_2_stereodeformed.py +26 -20
  76. pyvale/examples/renderblender/ex3_1_blendercalibration.py +24 -17
  77. pyvale/examples/renderrasterisation/ex_rastenp.py +14 -12
  78. pyvale/examples/renderrasterisation/ex_rastercyth_oneframe.py +14 -15
  79. pyvale/examples/renderrasterisation/ex_rastercyth_static_cypara.py +13 -11
  80. pyvale/examples/renderrasterisation/ex_rastercyth_static_pypara.py +13 -11
  81. pyvale/mooseherder/__init__.py +32 -0
  82. pyvale/mooseherder/directorymanager.py +416 -0
  83. pyvale/mooseherder/exodusreader.py +763 -0
  84. pyvale/mooseherder/gmshrunner.py +163 -0
  85. pyvale/mooseherder/inputmodifier.py +236 -0
  86. pyvale/mooseherder/mooseconfig.py +226 -0
  87. pyvale/mooseherder/mooseherd.py +527 -0
  88. pyvale/mooseherder/mooserunner.py +303 -0
  89. pyvale/mooseherder/outputreader.py +22 -0
  90. pyvale/mooseherder/simdata.py +92 -0
  91. pyvale/mooseherder/simrunner.py +31 -0
  92. pyvale/mooseherder/sweepreader.py +356 -0
  93. pyvale/mooseherder/sweeptools.py +76 -0
  94. pyvale/sensorsim/__init__.py +82 -0
  95. pyvale/{camera.py → sensorsim/camera.py} +7 -7
  96. pyvale/{camerasensor.py → sensorsim/camerasensor.py} +7 -7
  97. pyvale/{camerastereo.py → sensorsim/camerastereo.py} +2 -2
  98. pyvale/{cameratools.py → sensorsim/cameratools.py} +4 -4
  99. pyvale/{cython → sensorsim/cython}/rastercyth.c +596 -596
  100. pyvale/sensorsim/cython/rastercyth.cpython-311-aarch64-linux-musl.so +0 -0
  101. pyvale/{cython → sensorsim/cython}/rastercyth.py +16 -17
  102. pyvale/{errorcalculator.py → sensorsim/errorcalculator.py} +1 -1
  103. pyvale/{errorintegrator.py → sensorsim/errorintegrator.py} +2 -2
  104. pyvale/{errorrand.py → sensorsim/errorrand.py} +4 -4
  105. pyvale/{errorsyscalib.py → sensorsim/errorsyscalib.py} +2 -2
  106. pyvale/{errorsysdep.py → sensorsim/errorsysdep.py} +2 -2
  107. pyvale/{errorsysfield.py → sensorsim/errorsysfield.py} +8 -8
  108. pyvale/{errorsysindep.py → sensorsim/errorsysindep.py} +3 -3
  109. pyvale/sensorsim/exceptions.py +8 -0
  110. pyvale/{experimentsimulator.py → sensorsim/experimentsimulator.py} +23 -3
  111. pyvale/{field.py → sensorsim/field.py} +1 -1
  112. pyvale/{fieldconverter.py → sensorsim/fieldconverter.py} +72 -19
  113. pyvale/sensorsim/fieldinterp.py +37 -0
  114. pyvale/sensorsim/fieldinterpmesh.py +124 -0
  115. pyvale/sensorsim/fieldinterppoints.py +55 -0
  116. pyvale/{fieldsampler.py → sensorsim/fieldsampler.py} +4 -4
  117. pyvale/{fieldscalar.py → sensorsim/fieldscalar.py} +28 -24
  118. pyvale/{fieldtensor.py → sensorsim/fieldtensor.py} +33 -31
  119. pyvale/{fieldvector.py → sensorsim/fieldvector.py} +33 -31
  120. pyvale/{imagedef2d.py → sensorsim/imagedef2d.py} +9 -5
  121. pyvale/{integratorfactory.py → sensorsim/integratorfactory.py} +6 -6
  122. pyvale/{integratorquadrature.py → sensorsim/integratorquadrature.py} +3 -3
  123. pyvale/{integratorrectangle.py → sensorsim/integratorrectangle.py} +3 -3
  124. pyvale/{integratorspatial.py → sensorsim/integratorspatial.py} +1 -1
  125. pyvale/{rastercy.py → sensorsim/rastercy.py} +5 -5
  126. pyvale/{rasternp.py → sensorsim/rasternp.py} +9 -9
  127. pyvale/{rasteropts.py → sensorsim/rasteropts.py} +1 -1
  128. pyvale/{renderer.py → sensorsim/renderer.py} +1 -1
  129. pyvale/{rendermesh.py → sensorsim/rendermesh.py} +5 -5
  130. pyvale/{renderscene.py → sensorsim/renderscene.py} +2 -2
  131. pyvale/{sensorarray.py → sensorsim/sensorarray.py} +1 -1
  132. pyvale/{sensorarrayfactory.py → sensorsim/sensorarrayfactory.py} +12 -12
  133. pyvale/{sensorarraypoint.py → sensorsim/sensorarraypoint.py} +10 -8
  134. pyvale/{sensordata.py → sensorsim/sensordata.py} +1 -1
  135. pyvale/{sensortools.py → sensorsim/sensortools.py} +2 -20
  136. pyvale/sensorsim/simtools.py +174 -0
  137. pyvale/{visualexpplotter.py → sensorsim/visualexpplotter.py} +3 -3
  138. pyvale/{visualimages.py → sensorsim/visualimages.py} +2 -2
  139. pyvale/{visualsimanimator.py → sensorsim/visualsimanimator.py} +4 -4
  140. pyvale/{visualsimplotter.py → sensorsim/visualsimplotter.py} +5 -5
  141. pyvale/{visualsimsensors.py → sensorsim/visualsimsensors.py} +12 -12
  142. pyvale/{visualtools.py → sensorsim/visualtools.py} +1 -1
  143. pyvale/{visualtraceplotter.py → sensorsim/visualtraceplotter.py} +2 -2
  144. pyvale/simcases/case17.geo +3 -0
  145. pyvale/simcases/case17.i +4 -4
  146. pyvale/simcases/run_1case.py +1 -9
  147. pyvale/simcases/run_all_cases.py +1 -1
  148. pyvale/simcases/run_build_case.py +1 -1
  149. pyvale/simcases/run_example_cases.py +1 -1
  150. pyvale/verif/__init__.py +12 -0
  151. pyvale/{analyticsimdatafactory.py → verif/analyticsimdatafactory.py} +2 -2
  152. pyvale/{analyticsimdatagenerator.py → verif/analyticsimdatagenerator.py} +2 -2
  153. pyvale/verif/psens.py +125 -0
  154. pyvale/verif/psensconst.py +18 -0
  155. pyvale/verif/psensmech.py +227 -0
  156. pyvale/verif/psensmultiphys.py +187 -0
  157. pyvale/verif/psensscalar.py +347 -0
  158. pyvale/verif/psenstensor.py +123 -0
  159. pyvale/verif/psensvector.py +116 -0
  160. {pyvale-2025.7.1.dist-info → pyvale-2025.8.1.dist-info}/METADATA +6 -7
  161. pyvale-2025.8.1.dist-info/RECORD +263 -0
  162. pyvale/cython/rastercyth.cpython-311-aarch64-linux-musl.so +0 -0
  163. pyvale/dataset.py +0 -415
  164. pyvale/dicdataimport.py +0 -247
  165. pyvale/simtools.py +0 -67
  166. pyvale-2025.7.1.dist-info/RECORD +0 -214
  167. /pyvale/{blendercalibrationdata.py → blender/blendercalibrationdata.py} +0 -0
  168. /pyvale/{dicspecklegenerator.py → dic/dicspecklegenerator.py} +0 -0
  169. /pyvale/{dicspecklequality.py → dic/dicspecklequality.py} +0 -0
  170. /pyvale/{dicstrainresults.py → dic/dicstrainresults.py} +0 -0
  171. /pyvale/{cameradata.py → sensorsim/cameradata.py} +0 -0
  172. /pyvale/{cameradata2d.py → sensorsim/cameradata2d.py} +0 -0
  173. /pyvale/{errordriftcalc.py → sensorsim/errordriftcalc.py} +0 -0
  174. /pyvale/{fieldtransform.py → sensorsim/fieldtransform.py} +0 -0
  175. /pyvale/{generatorsrandom.py → sensorsim/generatorsrandom.py} +0 -0
  176. /pyvale/{imagetools.py → sensorsim/imagetools.py} +0 -0
  177. /pyvale/{integratortype.py → sensorsim/integratortype.py} +0 -0
  178. /pyvale/{output.py → sensorsim/output.py} +0 -0
  179. /pyvale/{raster.py → sensorsim/raster.py} +0 -0
  180. /pyvale/{sensordescriptor.py → sensorsim/sensordescriptor.py} +0 -0
  181. /pyvale/{visualimagedef.py → sensorsim/visualimagedef.py} +0 -0
  182. /pyvale/{visualopts.py → sensorsim/visualopts.py} +0 -0
  183. /pyvale/{analyticmeshgen.py → verif/analyticmeshgen.py} +0 -0
  184. {pyvale-2025.7.1.dist-info → pyvale-2025.8.1.dist-info}/WHEEL +0 -0
  185. {pyvale-2025.7.1.dist-info → pyvale-2025.8.1.dist-info}/licenses/LICENSE +0 -0
  186. {pyvale-2025.7.1.dist-info → pyvale-2025.8.1.dist-info}/top_level.txt +0 -0
@@ -5,7 +5,7 @@
5
5
  # ==============================================================================
6
6
 
7
7
  """
8
- Blender example: Deforming a sample with stereo DIC
8
+ Deforming a sample with stereo DIC
9
9
  ===================================================
10
10
 
11
11
  This example takes you through creating stereo DIC scene, applying deformation
@@ -17,15 +17,19 @@ Test case: mechanical analysis of a plate with a hole loaded in tension.
17
17
  import numpy as np
18
18
  from scipy.spatial.transform import Rotation
19
19
  from pathlib import Path
20
- import pyvale
21
- import mooseherder as mh
20
+
21
+ # Pyvale imports
22
+ import pyvale.sensorsim as sens
23
+ import pyvale.dataset as dataset
24
+ import pyvale.blender as blender
25
+ import pyvale.mooseherder as mh
22
26
 
23
27
  # %%
24
28
  # The simulation results are loaded in here in the same way as the previous
25
29
  # example. As mentioned this `data_path` can be replaced with your own MOOSE
26
30
  # simulation output in exodus format (*.e).
27
31
 
28
- data_path = pyvale.DataSet.render_mechanical_3d_path()
32
+ data_path = dataset.render_mechanical_3d_path()
29
33
  sim_data = mh.ExodusReader(data_path).read_all_sim_data()
30
34
 
31
35
  # %%
@@ -36,11 +40,11 @@ sim_data = mh.ExodusReader(data_path).read_all_sim_data()
36
40
  # 3D deformation test case, displacement is expected in the x, y and z directions.
37
41
 
38
42
  disp_comps = ("disp_x","disp_y", "disp_z")
39
- sim_data = pyvale.scale_length_units(scale=1000.0,
43
+ sim_data = sens.scale_length_units(scale=1000.0,
40
44
  sim_data=sim_data,
41
45
  disp_comps=disp_comps)
42
46
 
43
- render_mesh = pyvale.create_render_mesh(sim_data,
47
+ render_mesh = sens.create_render_mesh(sim_data,
44
48
  ("disp_y","disp_x"),
45
49
  sim_spat_dim=3,
46
50
  field_disp_keys=disp_comps)
@@ -60,7 +64,7 @@ base_dir = Path.cwd()
60
64
  # In order to create a DIC setup in Blender, first a scene must be created.
61
65
  # A scene is initialised using the `BlenderScene` class. All the subsequent
62
66
  # objects and actions necessary are then methods of this class.
63
- scene = pyvale.BlenderScene()
67
+ scene = blender.Scene()
64
68
 
65
69
  # %%
66
70
  # The next thing that can be added to the scene is a sample.
@@ -72,10 +76,10 @@ scene = pyvale.BlenderScene()
72
76
  part = scene.add_part(render_mesh, sim_spat_dim=3)
73
77
  # Set the part location
74
78
  part_location = np.array([0, 0, 0])
75
- pyvale.BlenderTools.move_blender_obj(part=part, pos_world=part_location)
79
+ blender.Tools.move_blender_obj(part=part, pos_world=part_location)
76
80
  # Set part rotation
77
81
  part_rotation = Rotation.from_euler("xyz", [0, 0, 0], degrees=True)
78
- pyvale.BlenderTools.rotate_blender_obj(part=part, rot_world=part_rotation)
82
+ blender.Tools.rotate_blender_obj(part=part, rot_world=part_rotation)
79
83
 
80
84
  # %%
81
85
  # The cameras can then be initialised. A stereo camera system is defined by a
@@ -94,7 +98,7 @@ pyvale.BlenderTools.rotate_blender_obj(part=part, rot_world=part_rotation)
94
98
  # between the two. The cameras can then be added to the Blender scene using the
95
99
  # `add_stereo_system` method.
96
100
 
97
- cam_data_0 = pyvale.CameraData(pixels_num=np.array([1540, 1040]),
101
+ cam_data_0 = sens.CameraData(pixels_num=np.array([1540, 1040]),
98
102
  pixels_size=np.array([0.00345, 0.00345]),
99
103
  pos_world=np.array([0, 0, 400]),
100
104
  rot_world=Rotation.from_euler("xyz", [0, 0, 0]),
@@ -104,13 +108,16 @@ cam_data_0 = pyvale.CameraData(pixels_num=np.array([1540, 1040]),
104
108
  # "faceon" to get a face-on stereo system
105
109
  stereo_setup = "faceon"
106
110
  if stereo_setup == "symmetric":
107
- stereo_system = pyvale.CameraTools.symmetric_stereo_cameras(
111
+ stereo_system = sens.CameraTools.symmetric_stereo_cameras(
108
112
  cam_data_0=cam_data_0,
109
113
  stereo_angle=15.0)
110
- if stereo_setup == "faceon":
111
- stereo_system = pyvale.CameraTools.faceon_stereo_cameras(
114
+ elif stereo_setup == "faceon":
115
+ stereo_system = sens.CameraTools.faceon_stereo_cameras(
112
116
  cam_data_0=cam_data_0,
113
117
  stereo_angle=15.0)
118
+ else:
119
+ raise ValueError(f"Unknown stereo_setup: {stereo_setup}")
120
+
114
121
  cam0, cam1 = scene.add_stereo_system(stereo_system)
115
122
 
116
123
  # %%
@@ -128,7 +135,7 @@ stereo_system.save_calibration(base_dir)
128
135
  # A light can the be added to the scene.
129
136
  # Blender offers different light types: Point, Sun, Spot and Area.
130
137
  # The light can also be moved and rotated like the camera.
131
- light_data = pyvale.BlenderLightData(type=pyvale.BlenderLightType.POINT,
138
+ light_data = blender.LightData(type=blender.LightType.POINT,
132
139
  pos_world=(0, 0, 400),
133
140
  rot_world=Rotation.from_euler("xyz",
134
141
  [0, 0, 0]),
@@ -138,8 +145,8 @@ light.location = (0, 0, 410)
138
145
  light.rotation_euler = (0, 0, 0) # NOTE: The default is an XYZ Euler angle
139
146
 
140
147
  # Apply the speckle pattern
141
- material_data = pyvale.BlenderMaterialData()
142
- speckle_path = pyvale.DataSet.dic_pattern_5mpx_path()
148
+ material_data = blender.MaterialData()
149
+ speckle_path = dataset.dic_pattern_5mpx_path()
143
150
  # NOTE: If you wish to use a bigger camera, you will need to generate a
144
151
  # bigger speckle pattern generator
145
152
 
@@ -154,7 +161,7 @@ speckle_path = pyvale.DataSet.dic_pattern_5mpx_path()
154
161
  # It should be noted that for a bigger camera or sample you may need to generate
155
162
  # a larger speckle pattern.
156
163
 
157
- mm_px_resolution = pyvale.CameraTools.calculate_mm_px_resolution(cam_data_0)
164
+ mm_px_resolution = sens.CameraTools.calculate_mm_px_resolution(cam_data_0)
158
165
  scene.add_speckle(part=part,
159
166
  speckle_path=speckle_path,
160
167
  mat_data=material_data,
@@ -169,7 +176,7 @@ scene.add_speckle(part=part,
169
176
  # the number of threads to use.
170
177
  # Differently to a 2D DIC system, both cameras' parameters must be specified in
171
178
  # the `RenderData` object.
172
- render_data = pyvale.RenderData(cam_data=(stereo_system.cam_data_0,
179
+ render_data = blender.RenderData(cam_data=(stereo_system.cam_data_0,
173
180
  stereo_system.cam_data_1),
174
181
  base_dir=base_dir,
175
182
  threads=8)
@@ -200,5 +207,4 @@ print("Save directory of the image:", (render_data.base_dir / "blenderimages"))
200
207
  # There is also the option to save the scene as a Blender project file.
201
208
  # This file can be opened with the Blender GUI to view the scene.
202
209
 
203
- pyvale.BlenderTools.save_blender_file(base_dir)
204
-
210
+ blender.Tools.save_blender_file(base_dir)
@@ -5,7 +5,7 @@
5
5
  # ==============================================================================
6
6
 
7
7
  """
8
- Blender example: Rendering calibration images
8
+ Rendering calibration images
9
9
  ---------------------------------------------
10
10
 
11
11
  This example takes you through how to render calibration images for a given DIC
@@ -14,7 +14,11 @@ setup.
14
14
  import numpy as np
15
15
  from scipy.spatial.transform import Rotation
16
16
  from pathlib import Path
17
- import pyvale
17
+
18
+ # Pyvale imports
19
+ import pyvale.sensorsim as sens
20
+ import pyvale.blender as blender
21
+ import pyvale.dataset as dataset
18
22
 
19
23
  # %%
20
24
  # Firstly, a save path must be set.
@@ -31,7 +35,7 @@ base_dir = Path.cwd()
31
35
  # In order to create a DIC setup in Blender, first a scene must be created.
32
36
  # A scene is initialised using the `BlenderScene` class. All the subsequent
33
37
  # objects and actions necessary are then methods of this class.
34
- scene = pyvale.BlenderScene()
38
+ scene = blender.Scene()
35
39
 
36
40
  # %%
37
41
  # The next thing to add to the scene is the calibration target.
@@ -57,7 +61,7 @@ target = scene.add_cal_target(target_size=np.array([150, 100, 10]))
57
61
  # are the camera parameters for the first camera, and the desired stereo angle
58
62
  # between the two. The cameras can then be added to the Blender scene using the
59
63
  # `add_stereo_system` method.
60
- cam_data_0 = pyvale.CameraData(pixels_num=np.array([1540, 1040]),
64
+ cam_data_0 = sens.CameraData(pixels_num=np.array([1540, 1040]),
61
65
  pixels_size=np.array([0.00345, 0.00345]),
62
66
  pos_world=np.array([0, 0, 400]),
63
67
  rot_world=Rotation.from_euler("xyz", [0, 0, 0]),
@@ -67,13 +71,16 @@ cam_data_0 = pyvale.CameraData(pixels_num=np.array([1540, 1040]),
67
71
  # "faceon" to get a face-on stereo system
68
72
  stereo_setup = "faceon"
69
73
  if stereo_setup == "symmetric":
70
- stereo_system = pyvale.CameraTools.symmetric_stereo_cameras(
74
+ stereo_system = sens.CameraTools.symmetric_stereo_cameras(
71
75
  cam_data_0=cam_data_0,
72
76
  stereo_angle=15.0)
73
- if stereo_setup == "faceon":
74
- stereo_system = pyvale.CameraTools.faceon_stereo_cameras(
77
+ elif stereo_setup == "faceon":
78
+ stereo_system = sens.CameraTools.faceon_stereo_cameras(
75
79
  cam_data_0=cam_data_0,
76
80
  stereo_angle=15.0)
81
+ else:
82
+ raise ValueError(f"Unknown stereo_setup: {stereo_setup}")
83
+
77
84
  scene.add_stereo_system(stereo_system)
78
85
 
79
86
  # %%
@@ -95,7 +102,7 @@ stereo_system.save_calibration(base_dir)
95
102
  # Blender offers different light types: Point, Sun, Spot and Area.
96
103
  # The light can also be moved and rotated like the camera.
97
104
 
98
- light_data = pyvale.BlenderLightData(type=pyvale.BlenderLightType.POINT,
105
+ light_data = blender.LightData(type=blender.LightType.POINT,
99
106
  pos_world=(0, 0, 200),
100
107
  rot_world=Rotation.from_euler("xyz",
101
108
  [0, 0, 0]),
@@ -112,11 +119,11 @@ light.rotation_euler = (0, 0, 0) # NOTE: The default is an XYZ Euler angle
112
119
  # calibration target pattern will not be scaled in the same way as a speckle
113
120
  # pattern.
114
121
 
115
- material_data = pyvale.BlenderMaterialData()
116
- speckle_path = Path.cwd() / "src/pyvale/data/cal_target.tiff"
117
- mm_px_resolution = pyvale.CameraTools.calculate_mm_px_resolution(cam_data_0)
122
+ material_data = blender.MaterialData()
123
+ cal_target = dataset.cal_target()
124
+ mm_px_resolution = sens.CameraTools.calculate_mm_px_resolution(cam_data_0)
118
125
  scene.add_speckle(part=target,
119
- speckle_path=speckle_path,
126
+ speckle_path=cal_target,
120
127
  mat_data=material_data,
121
128
  mm_px_resolution=mm_px_resolution,
122
129
  cal=True)
@@ -128,7 +135,7 @@ scene.add_speckle(part=target,
128
135
  # rendered.Firstly, all the rendering parameters must be set, including
129
136
  # parameters such as the number of threads to use.
130
137
 
131
- render_data = pyvale.RenderData(cam_data=(stereo_system.cam_data_0,
138
+ render_data = blender.RenderData(cam_data=(stereo_system.cam_data_0,
132
139
  stereo_system.cam_data_1),
133
140
  base_dir=base_dir)
134
141
 
@@ -141,7 +148,7 @@ render_data = pyvale.RenderData(cam_data=(stereo_system.cam_data_0,
141
148
  # passed in they will be initialised from the FOV to cover the whole FOV of the
142
149
  # cameras.
143
150
 
144
- calibration_data = pyvale.CalibrationData(angle_lims=(-10, 10),
151
+ calibration_data = blender.CalibrationData(angle_lims=(-10, 10),
145
152
  angle_step=5,
146
153
  plunge_lims=(-5, 5),
147
154
  plunge_step=5)
@@ -151,7 +158,7 @@ calibration_data = pyvale.CalibrationData(angle_lims=(-10, 10),
151
158
  # rendered before rendering them. The only input that is needed is the
152
159
  # `calibration_data` specified above.
153
160
 
154
- number_calibration_images = pyvale.BlenderTools.number_calibration_images(calibration_data)
161
+ number_calibration_images = blender.Tools.number_calibration_images(calibration_data)
155
162
  print("Number of calibration images to be rendered:", number_calibration_images)
156
163
 
157
164
  # %%
@@ -159,7 +166,7 @@ print("Number of calibration images to be rendered:", number_calibration_images)
159
166
  # calibration target according to movement limits set above, and will also move
160
167
  # the target rigidly across the FOV of the camera, in order to characterise the
161
168
  # entire FOV of the cameras.
162
- pyvale.BlenderTools.render_calibration_images(render_data,
169
+ blender.Tools.render_calibration_images(render_data,
163
170
  calibration_data,
164
171
  target)
165
172
 
@@ -172,4 +179,4 @@ print("Save directory of the images:", (render_data.base_dir / "calimages"))
172
179
  # There is also the option to save the scene as a Blender project file.
173
180
  # This file can be opened with the Blender GUI to view the scene.
174
181
 
175
- pyvale.BlenderTools.save_blender_file(base_dir)
182
+ blender.Tools.save_blender_file(base_dir)
@@ -10,8 +10,10 @@ import time
10
10
  import numpy as np
11
11
  from scipy.spatial.transform import Rotation
12
12
  import matplotlib.pyplot as plt
13
- import mooseherder as mh
14
- import pyvale as pyv
13
+
14
+ # Pyvale imports
15
+ import pyvale.sensorsim as sens
16
+ import pyvale.mooseherder as mh
15
17
 
16
18
  # TODO
17
19
  # - Fix the image averaging function to use cython
@@ -34,17 +36,17 @@ def main() -> None:
34
36
  # This a path to an exodus *.e output file from MOOSE, this can be
35
37
  # replaced with a path to your own simulation file
36
38
  sim_path = Path.home()/"pyvale"/"src"/"pyvale"/"simcases"/"case21_out.e"
37
- #sim_path = pyv.DataSet.render_mechanical_3d_path()
39
+ #sim_path = sens.DataSet.render_mechanical_3d_path()
38
40
 
39
41
  disp_comps = ("disp_x","disp_y","disp_z")
40
42
 
41
43
  sim_data = mh.ExodusReader(sim_path).read_all_sim_data()
42
44
 
43
45
  # Scale m -> mm
44
- sim_data = pyv.scale_length_units(1000.0,sim_data,disp_comps)
46
+ sim_data = sens.scale_length_units(1000.0,sim_data,disp_comps)
45
47
 
46
48
  # Extracts the surface mesh from a full 3d simulation for rendering
47
- render_mesh = pyv.create_render_mesh(sim_data,
49
+ render_mesh = sens.create_render_mesh(sim_data,
48
50
  ("disp_y","disp_x"),
49
51
  sim_spat_dim=3,
50
52
  field_disp_keys=disp_comps)
@@ -59,7 +61,7 @@ def main() -> None:
59
61
  meshes[1].set_pos(np.array((0.0,12.5,0.0)))
60
62
  meshes[1].set_rot(Rotation.from_euler("zyx",(0.0, 0.0, 0.0),degrees=True))
61
63
  meshes[1].fields_disp = None
62
- coords_all = pyv.get_all_coords_world(meshes)
64
+ coords_all = sens.get_all_coords_world(meshes)
63
65
 
64
66
  print()
65
67
  print(80*"-")
@@ -85,7 +87,7 @@ def main() -> None:
85
87
  fov_scale_factor: float = 1.0
86
88
 
87
89
  (roi_pos_world,
88
- cam_pos_world) = pyv.CameraTools.pos_fill_frame(
90
+ cam_pos_world) = sens.CameraTools.pos_fill_frame(
89
91
  coords_world=coords_all,
90
92
  pixel_num=pixel_num,
91
93
  pixel_size=pixel_size,
@@ -94,7 +96,7 @@ def main() -> None:
94
96
  frame_fill=fov_scale_factor,
95
97
  )
96
98
 
97
- cam_data = pyv.CameraData(
99
+ cam_data = sens.CameraData(
98
100
  pixels_num=pixel_num,
99
101
  pixels_size=pixel_size,
100
102
  pos_world=cam_pos_world,
@@ -115,7 +117,7 @@ def main() -> None:
115
117
  print(cam_data.world_to_cam_mat)
116
118
  print(80*"-")
117
119
 
118
- scene = pyv.RenderScene([cam_data,cam_data],meshes)
120
+ scene = sens.RenderScene([cam_data,cam_data],meshes)
119
121
 
120
122
  frames_per_camera = (scene.meshes[0].fields_render.shape[1]
121
123
  *scene.meshes[0].fields_render.shape[2])
@@ -139,8 +141,8 @@ def main() -> None:
139
141
  print(80*"=")
140
142
  print("IN MEM: Raster Loop start")
141
143
 
142
- raster_opts = pyv.RasterOpts(parallel=8)
143
- renderer = pyv.RasterNumpy(raster_opts)
144
+ raster_opts = sens.RasterOpts(parallel=8)
145
+ renderer = sens.RasterNumpy(raster_opts)
144
146
 
145
147
  time_start_loop = time.perf_counter()
146
148
 
@@ -183,7 +185,7 @@ def main() -> None:
183
185
 
184
186
  plot_on = True
185
187
  if plot_on:
186
- (fig,ax) = pyv.plot_field_image(images[1][:,:,-1,0],
188
+ (fig,ax) = sens.plot_field_image(images[1][:,:,-1,0],
187
189
  title_str="Disp. y, [mm]")
188
190
 
189
191
  plt.show()
@@ -8,15 +8,17 @@ import time
8
8
  import numpy as np
9
9
  from scipy.spatial.transform import Rotation
10
10
  import matplotlib.pyplot as plt
11
- import mooseherder as mh
12
- import pyvale as pyv
13
11
  import imagebenchmarks as ib
14
12
 
13
+ # Pyvale imports
14
+ import pyvale.sensorsim as sens
15
+ import pyvale.mooseherder as mh
16
+
15
17
  def main() -> None:
16
18
  print()
17
19
  print(80*"=")
18
20
  print("RASTER CYTHON FILE (should be *.so on Linux):")
19
- print(pyv.rastercyth.__file__)
21
+ print(sens.rastercyth.__file__)
20
22
  print(80*"=")
21
23
  print()
22
24
 
@@ -26,14 +28,14 @@ def main() -> None:
26
28
  # replaced with a path to your own simulation file
27
29
  #sim_path = Path.home()/"pyvale"/"src"/"pyvale"/"simcases"/"case26_out.e"
28
30
 
29
- sim_path = pyv.DataSet.render_simple_block_path()
30
- #sim_path = pyv.DataSet.render_mechanical_3d_path()
31
+ sim_path = sens.DataSet.render_simple_block_path()
32
+ #sim_path = sens.DataSet.render_mechanical_3d_path()
31
33
  sim_data = mh.ExodusReader(sim_path).read_all_sim_data()
32
34
 
33
35
  disp_comps = ("disp_x","disp_y","disp_z")
34
36
 
35
37
  # Scale m -> mm
36
- sim_data = pyv.scale_length_units(1000.0,sim_data,disp_comps)
38
+ sim_data = sens.scale_length_units(1000.0,sim_data,disp_comps)
37
39
 
38
40
  print()
39
41
  print(f"{np.max(np.abs(sim_data.node_vars['disp_x']))=}")
@@ -42,15 +44,12 @@ def main() -> None:
42
44
  print()
43
45
 
44
46
  # Extracts the surface mesh from a full 3d simulation for rendering
45
- render_mesh = pyv.create_render_mesh(sim_data,
47
+ render_mesh = sens.create_render_mesh(sim_data,
46
48
  ("disp_y","disp_x","disp_z"),
47
49
  sim_spat_dim=3,
48
50
  field_disp_keys=disp_comps)
49
51
 
50
52
 
51
-
52
-
53
-
54
53
  pixel_num = np.array((960,1280),dtype=np.int32)
55
54
  pixel_size = np.array((5.3e-3,5.3e-3),dtype=np.float64)
56
55
  focal_leng: float = 50.0
@@ -58,7 +57,7 @@ def main() -> None:
58
57
  fov_scale_factor: float = 1.1
59
58
 
60
59
  (roi_pos_world,
61
- cam_pos_world) = pyv.CameraTools.pos_fill_frame(
60
+ cam_pos_world) = sens.CameraTools.pos_fill_frame(
62
61
  coords_world=render_mesh.coords,
63
62
  pixel_num=pixel_num,
64
63
  pixel_size=pixel_size,
@@ -67,7 +66,7 @@ def main() -> None:
67
66
  frame_fill=fov_scale_factor,
68
67
  )
69
68
 
70
- cam_data = pyv.CameraData(
69
+ cam_data = sens.CameraData(
71
70
  pixels_num=pixel_num,
72
71
  pixels_size=pixel_size,
73
72
  pos_world=cam_pos_world,
@@ -144,7 +143,7 @@ def main() -> None:
144
143
 
145
144
  (image_buffer,
146
145
  depth_buffer,
147
- elems_in_image) = pyv.rastercyth.raster_static_frame(
146
+ elems_in_image) = sens.rastercyth.raster_static_frame(
148
147
  render_mesh.coords,
149
148
  render_mesh.connectivity,
150
149
  fields_render,
@@ -164,7 +163,7 @@ def main() -> None:
164
163
 
165
164
  #===========================================================================
166
165
  # PLOTTING
167
- plot_on = False
166
+ plot_on = True
168
167
  plot_field = 0
169
168
 
170
169
  # depth_to_plot = np.copy(np.asarray(depth_buffer[:,:,plot_frame]))
@@ -173,7 +172,7 @@ def main() -> None:
173
172
  # image_to_plot[depth_buffer[:,:,plot_frame] > 10*cam_data.image_dist] = np.nan
174
173
 
175
174
  if plot_on:
176
- plot_opts = pyv.PlotOptsGeneral()
175
+ plot_opts = sens.PlotOptsGeneral()
177
176
 
178
177
 
179
178
  (fig, ax) = plt.subplots(figsize=plot_opts.single_fig_size_square,
@@ -9,26 +9,28 @@ import time
9
9
  import numpy as np
10
10
  from scipy.spatial.transform import Rotation
11
11
  import matplotlib.pyplot as plt
12
- import mooseherder as mh
13
- import pyvale as pyv
12
+
13
+ # Pyvale imports
14
+ import pyvale.sensorsim as sens
15
+ import pyvale.mooseherder as mh
14
16
 
15
17
  def main() -> None:
16
18
  print()
17
19
  print(80*"=")
18
20
  print("RASTER CYTHON FILE (should be *.so on Linux):")
19
- print(pyv.rastercyth.__file__)
21
+ print(sens.rastercyth.__file__)
20
22
  print(80*"=")
21
23
  print()
22
24
 
23
- sim_path = pyv.DataSet.render_mechanical_3d_path()
24
- #sim_path = pyv.DataSet.render_simple_block_path()
25
+ sim_path = sens.DataSet.render_mechanical_3d_path()
26
+ #sim_path = sens.DataSet.render_simple_block_path()
25
27
  #sim_path = Path.home()/"pyvale"/"src"/"pyvale"/"simcases"/"case26_out.e"
26
28
  sim_data = mh.ExodusReader(sim_path).read_all_sim_data()
27
29
 
28
30
  disp_comps = ("disp_x","disp_y","disp_z")
29
31
 
30
32
  # Scale m -> mm
31
- sim_data = pyv.scale_length_units(1000.0,sim_data,disp_comps)
33
+ sim_data = sens.scale_length_units(1000.0,sim_data,disp_comps)
32
34
 
33
35
  print()
34
36
  print(f"{np.max(np.abs(sim_data.node_vars['disp_x']))=}")
@@ -37,7 +39,7 @@ def main() -> None:
37
39
  print()
38
40
 
39
41
  # Extracts the surface mesh from a full 3d simulation for rendering
40
- render_mesh = pyv.create_render_mesh(sim_data,
42
+ render_mesh = sens.create_render_mesh(sim_data,
41
43
  ("disp_y","disp_x"),
42
44
  sim_spat_dim=3,
43
45
  field_disp_keys=disp_comps)
@@ -66,7 +68,7 @@ def main() -> None:
66
68
  fov_scale_factor: float = 1.1
67
69
 
68
70
  (roi_pos_world,
69
- cam_pos_world) = pyv.CameraTools.pos_fill_frame(
71
+ cam_pos_world) = sens.CameraTools.pos_fill_frame(
70
72
  coords_world=render_mesh.coords,
71
73
  pixel_num=pixel_num,
72
74
  pixel_size=pixel_size,
@@ -75,7 +77,7 @@ def main() -> None:
75
77
  frame_fill=fov_scale_factor,
76
78
  )
77
79
 
78
- cam_data = pyv.CameraData(
80
+ cam_data = sens.CameraData(
79
81
  pixels_num=pixel_num,
80
82
  pixels_size=pixel_size,
81
83
  pos_world=cam_pos_world,
@@ -120,7 +122,7 @@ def main() -> None:
120
122
 
121
123
  (image_buffer,
122
124
  depth_buffer,
123
- elems_in_image) = pyv.rastercyth.raster_static_mesh(
125
+ elems_in_image) = sens.rastercyth.raster_static_mesh(
124
126
  render_mesh,
125
127
  cam_data,
126
128
  0)
@@ -149,7 +151,7 @@ def main() -> None:
149
151
  # image_to_plot[depth_buffer[:,:,plot_frame] > 10*cam_data.image_dist] = np.nan
150
152
 
151
153
  if plot_on:
152
- plot_opts = pyv.PlotOptsGeneral()
154
+ plot_opts = sens.PlotOptsGeneral()
153
155
 
154
156
  for ff in plot_frames:
155
157
  (fig, ax) = plt.subplots(figsize=plot_opts.single_fig_size_square,
@@ -8,29 +8,31 @@ import time
8
8
  import numpy as np
9
9
  from scipy.spatial.transform import Rotation
10
10
  import matplotlib.pyplot as plt
11
- import mooseherder as mh
12
- import pyvale as pyv
11
+
12
+ # Pyvale imports
13
+ import pyvale.sensorsim as sens
14
+ import pyvale.mooseherder as mh
13
15
 
14
16
 
15
17
  def main() -> None:
16
18
  print()
17
19
  print(80*"=")
18
20
  print("RASTER CYTHON FILE (should be *.so on Linux):")
19
- print(pyv.rastercyth.__file__)
21
+ print(sens.rastercyth.__file__)
20
22
  print(80*"=")
21
23
  print()
22
24
 
23
25
  # This a path to an exodus *.e output file from MOOSE, this can be
24
26
  # replaced with a path to your own simulation file
25
- sim_path = pyv.DataSet.render_mechanical_3d_path()
26
- #sim_path = pyv.DataSet.render_simple_block_path()
27
+ sim_path = sens.DataSet.render_mechanical_3d_path()
28
+ #sim_path = sens.DataSet.render_simple_block_path()
27
29
  #sim_path = Path.home()/"pyvale"/"src"/"pyvale"/"simcases"/"case26_out.e"
28
30
  sim_data = mh.ExodusReader(sim_path).read_all_sim_data()
29
31
 
30
32
  disp_comps = ("disp_x","disp_y","disp_z")
31
33
 
32
34
  # Scale m -> mm
33
- sim_data = pyv.scale_length_units(1000.0,sim_data,disp_comps)
35
+ sim_data = sens.scale_length_units(1000.0,sim_data,disp_comps)
34
36
 
35
37
  print()
36
38
  print(f"{np.max(np.abs(sim_data.node_vars['disp_x']))=}")
@@ -39,7 +41,7 @@ def main() -> None:
39
41
  print()
40
42
 
41
43
  # Extracts the surface mesh from a full 3d simulation for rendering
42
- render_mesh = pyv.create_render_mesh(sim_data,
44
+ render_mesh = sens.create_render_mesh(sim_data,
43
45
  ("disp_y","disp_x"),
44
46
  sim_spat_dim=3,
45
47
  field_disp_keys=disp_comps)
@@ -68,7 +70,7 @@ def main() -> None:
68
70
  fov_scale_factor: float = 1.1
69
71
 
70
72
  (roi_pos_world,
71
- cam_pos_world) = pyv.CameraTools.pos_fill_frame(
73
+ cam_pos_world) = sens.CameraTools.pos_fill_frame(
72
74
  coords_world=render_mesh.coords,
73
75
  pixel_num=pixel_num,
74
76
  pixel_size=pixel_size,
@@ -77,7 +79,7 @@ def main() -> None:
77
79
  frame_fill=fov_scale_factor,
78
80
  )
79
81
 
80
- cam_data = pyv.CameraData(
82
+ cam_data = sens.CameraData(
81
83
  pixels_num=pixel_num,
82
84
  pixels_size=pixel_size,
83
85
  pos_world=cam_pos_world,
@@ -122,7 +124,7 @@ def main() -> None:
122
124
 
123
125
  (image_buffer,
124
126
  depth_buffer,
125
- elems_in_images) = pyv.RasterCY.raster_static_mesh(cam_data,
127
+ elems_in_images) = sens.RasterCY.raster_static_mesh(cam_data,
126
128
  render_mesh,
127
129
  16)
128
130
 
@@ -150,7 +152,7 @@ def main() -> None:
150
152
  # image_to_plot[depth_buffer[:,:,plot_frame] > 10*cam_data.image_dist] = np.nan
151
153
 
152
154
  if plot_on:
153
- plot_opts = pyv.PlotOptsGeneral()
155
+ plot_opts = sens.PlotOptsGeneral()
154
156
 
155
157
  for ff in plot_frames:
156
158
  (fig, ax) = plt.subplots(figsize=plot_opts.single_fig_size_square,
@@ -0,0 +1,32 @@
1
+ #===============================================================================
2
+ # pyvale: the python validation engine
3
+ # License: MIT
4
+ # Copyright (C) 2025 The Computer Aided Validation Team
5
+ #===============================================================================
6
+
7
+ from .inputmodifier import InputModifier
8
+ from .simrunner import SimRunner
9
+ from .mooserunner import MooseRunner
10
+ from .gmshrunner import GmshRunner
11
+ from .exodusreader import ExodusReader
12
+ from .mooseherd import MooseHerd
13
+ from .directorymanager import DirectoryManager
14
+ from .sweepreader import SweepReader
15
+ from .simdata import SimData
16
+ from .simdata import SimReadConfig
17
+ from .mooseconfig import MooseConfig
18
+ from .sweeptools import sweep_param_grid
19
+
20
+
21
+ __all__ = ["InputModifier",
22
+ "SimRunner",
23
+ "MooseRunner",
24
+ "GmshRunner",
25
+ "ExodusReader",
26
+ "mooseherd",
27
+ "DirectoryManager",
28
+ "SweepReader",
29
+ "SimData",
30
+ "SimReadConfig",
31
+ "MooseConfig",
32
+ "sweep_param_grid"]