pyvale 2025.7.2__cp311-cp311-win32.whl → 2025.8.1__cp311-cp311-win32.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyvale might be problematic. Click here for more details.

Files changed (176) hide show
  1. pyvale/__init__.py +12 -92
  2. pyvale/blender/__init__.py +23 -0
  3. pyvale/{pyvaleexceptions.py → blender/blenderexceptions.py} +0 -3
  4. pyvale/{blenderlightdata.py → blender/blenderlightdata.py} +3 -3
  5. pyvale/{blendermaterialdata.py → blender/blendermaterialdata.py} +1 -1
  6. pyvale/{blenderrenderdata.py → blender/blenderrenderdata.py} +5 -3
  7. pyvale/{blenderscene.py → blender/blenderscene.py} +33 -30
  8. pyvale/{blendertools.py → blender/blendertools.py} +14 -10
  9. pyvale/dataset/__init__.py +7 -0
  10. pyvale/dataset/dataset.py +443 -0
  11. pyvale/dic/__init__.py +20 -0
  12. pyvale/{dic2d.py → dic/dic2d.py} +31 -36
  13. pyvale/dic/dic2dconv.py +6 -0
  14. pyvale/{dic2dcpp.cp311-win32.pyd → dic/dic2dcpp.cp311-win32.pyd} +0 -0
  15. pyvale/{dicdataimport.py → dic/dicdataimport.py} +8 -8
  16. pyvale/{dicregionofinterest.py → dic/dicregionofinterest.py} +1 -1
  17. pyvale/{dicresults.py → dic/dicresults.py} +1 -1
  18. pyvale/{dicstrain.py → dic/dicstrain.py} +9 -9
  19. pyvale/examples/basics/{ex1_1_basicscalars_therm2d.py → ex1a_basicscalars_therm2d.py} +12 -9
  20. pyvale/examples/basics/{ex1_2_sensormodel_therm2d.py → ex1b_sensormodel_therm2d.py} +17 -14
  21. pyvale/examples/basics/{ex1_3_customsens_therm3d.py → ex1c_customsens_therm3d.py} +27 -24
  22. pyvale/examples/basics/{ex1_4_basicerrors_therm3d.py → ex1d_basicerrors_therm3d.py} +32 -29
  23. pyvale/examples/basics/{ex1_5_fielderrs_therm3d.py → ex1e_fielderrs_therm3d.py} +19 -15
  24. pyvale/examples/basics/{ex1_6_caliberrs_therm2d.py → ex1f_caliberrs_therm2d.py} +20 -16
  25. pyvale/examples/basics/{ex1_7_spatavg_therm2d.py → ex1g_spatavg_therm2d.py} +19 -16
  26. pyvale/examples/basics/{ex2_1_basicvectors_disp2d.py → ex2a_basicvectors_disp2d.py} +13 -10
  27. pyvale/examples/basics/{ex2_2_vectorsens_disp2d.py → ex2b_vectorsens_disp2d.py} +19 -15
  28. pyvale/examples/basics/{ex2_3_sensangle_disp2d.py → ex2c_sensangle_disp2d.py} +21 -18
  29. pyvale/examples/basics/{ex2_4_chainfielderrs_disp2d.py → ex2d_chainfielderrs_disp2d.py} +31 -29
  30. pyvale/examples/basics/{ex2_5_vectorfields3d_disp3d.py → ex2e_vectorfields3d_disp3d.py} +21 -18
  31. pyvale/examples/basics/{ex3_1_basictensors_strain2d.py → ex3a_basictensors_strain2d.py} +16 -14
  32. pyvale/examples/basics/{ex3_2_tensorsens2d_strain2d.py → ex3b_tensorsens2d_strain2d.py} +17 -14
  33. pyvale/examples/basics/{ex3_3_tensorsens3d_strain3d.py → ex3c_tensorsens3d_strain3d.py} +25 -22
  34. pyvale/examples/basics/{ex4_1_expsim2d_thermmech2d.py → ex4a_expsim2d_thermmech2d.py} +17 -14
  35. pyvale/examples/basics/{ex4_2_expsim3d_thermmech3d.py → ex4b_expsim3d_thermmech3d.py} +37 -34
  36. pyvale/examples/basics/ex5_nomesh.py +24 -0
  37. pyvale/examples/dic/ex1_2_blenderdeformed.py +174 -0
  38. pyvale/examples/dic/ex1_region_of_interest.py +6 -3
  39. pyvale/examples/dic/ex2_plate_with_hole.py +21 -18
  40. pyvale/examples/dic/ex3_plate_with_hole_strain.py +8 -6
  41. pyvale/examples/dic/ex4_dic_blender.py +17 -15
  42. pyvale/examples/dic/ex5_dic_challenge.py +19 -14
  43. pyvale/examples/genanalyticdata/ex1_1_scalarvisualisation.py +16 -10
  44. pyvale/examples/genanalyticdata/ex1_2_scalarcasebuild.py +3 -3
  45. pyvale/examples/genanalyticdata/ex2_1_analyticsensors.py +29 -23
  46. pyvale/examples/genanalyticdata/ex2_2_analyticsensors_nomesh.py +67 -0
  47. pyvale/examples/imagedef2d/ex_imagedef2d_todisk.py +12 -9
  48. pyvale/examples/mooseherder/ex0_create_moose_config.py +65 -0
  49. pyvale/examples/mooseherder/ex1a_modify_moose_input.py +71 -0
  50. pyvale/examples/mooseherder/ex1b_modify_gmsh_input.py +69 -0
  51. pyvale/examples/mooseherder/ex2a_run_moose_once.py +80 -0
  52. pyvale/examples/mooseherder/ex2b_run_gmsh_once.py +64 -0
  53. pyvale/examples/mooseherder/ex2c_run_both_once.py +114 -0
  54. pyvale/examples/mooseherder/ex3_run_moose_seq_para.py +157 -0
  55. pyvale/examples/mooseherder/ex4_run_gmsh-moose_seq_para.py +176 -0
  56. pyvale/examples/mooseherder/ex5_run_moose_paramulti.py +136 -0
  57. pyvale/examples/mooseherder/ex6_read_moose_exodus.py +163 -0
  58. pyvale/examples/mooseherder/ex7a_read_moose_herd_results.py +153 -0
  59. pyvale/examples/mooseherder/ex7b_read_multi_herd_results.py +116 -0
  60. pyvale/examples/mooseherder/ex7c_read_multi_gmshmoose_results.py +127 -0
  61. pyvale/examples/mooseherder/ex7d_readconfig_multi_gmshmoose_results.py +143 -0
  62. pyvale/examples/mooseherder/ex8_read_existing_sweep_output.py +72 -0
  63. pyvale/examples/renderblender/ex1_1_blenderscene.py +24 -20
  64. pyvale/examples/renderblender/ex1_2_blenderdeformed.py +22 -18
  65. pyvale/examples/renderblender/ex2_1_stereoscene.py +36 -29
  66. pyvale/examples/renderblender/ex2_2_stereodeformed.py +26 -20
  67. pyvale/examples/renderblender/ex3_1_blendercalibration.py +24 -17
  68. pyvale/examples/renderrasterisation/ex_rastenp.py +14 -12
  69. pyvale/examples/renderrasterisation/ex_rastercyth_oneframe.py +14 -15
  70. pyvale/examples/renderrasterisation/ex_rastercyth_static_cypara.py +13 -11
  71. pyvale/examples/renderrasterisation/ex_rastercyth_static_pypara.py +13 -11
  72. pyvale/mooseherder/__init__.py +32 -0
  73. pyvale/mooseherder/directorymanager.py +416 -0
  74. pyvale/mooseherder/exodusreader.py +763 -0
  75. pyvale/mooseherder/gmshrunner.py +163 -0
  76. pyvale/mooseherder/inputmodifier.py +236 -0
  77. pyvale/mooseherder/mooseconfig.py +226 -0
  78. pyvale/mooseherder/mooseherd.py +527 -0
  79. pyvale/mooseherder/mooserunner.py +303 -0
  80. pyvale/mooseherder/outputreader.py +22 -0
  81. pyvale/mooseherder/simdata.py +92 -0
  82. pyvale/mooseherder/simrunner.py +31 -0
  83. pyvale/mooseherder/sweepreader.py +356 -0
  84. pyvale/mooseherder/sweeptools.py +76 -0
  85. pyvale/sensorsim/__init__.py +82 -0
  86. pyvale/{camera.py → sensorsim/camera.py} +7 -7
  87. pyvale/{camerasensor.py → sensorsim/camerasensor.py} +7 -7
  88. pyvale/{camerastereo.py → sensorsim/camerastereo.py} +2 -2
  89. pyvale/{cameratools.py → sensorsim/cameratools.py} +4 -4
  90. pyvale/{cython → sensorsim/cython}/rastercyth.c +596 -596
  91. pyvale/{cython → sensorsim/cython}/rastercyth.cp311-win32.pyd +0 -0
  92. pyvale/{cython → sensorsim/cython}/rastercyth.py +16 -17
  93. pyvale/{errorcalculator.py → sensorsim/errorcalculator.py} +1 -1
  94. pyvale/{errorintegrator.py → sensorsim/errorintegrator.py} +2 -2
  95. pyvale/{errorrand.py → sensorsim/errorrand.py} +4 -4
  96. pyvale/{errorsyscalib.py → sensorsim/errorsyscalib.py} +2 -2
  97. pyvale/{errorsysdep.py → sensorsim/errorsysdep.py} +2 -2
  98. pyvale/{errorsysfield.py → sensorsim/errorsysfield.py} +8 -8
  99. pyvale/{errorsysindep.py → sensorsim/errorsysindep.py} +3 -3
  100. pyvale/sensorsim/exceptions.py +8 -0
  101. pyvale/{experimentsimulator.py → sensorsim/experimentsimulator.py} +23 -3
  102. pyvale/{field.py → sensorsim/field.py} +1 -1
  103. pyvale/{fieldconverter.py → sensorsim/fieldconverter.py} +72 -19
  104. pyvale/sensorsim/fieldinterp.py +37 -0
  105. pyvale/sensorsim/fieldinterpmesh.py +124 -0
  106. pyvale/sensorsim/fieldinterppoints.py +55 -0
  107. pyvale/{fieldsampler.py → sensorsim/fieldsampler.py} +4 -4
  108. pyvale/{fieldscalar.py → sensorsim/fieldscalar.py} +28 -24
  109. pyvale/{fieldtensor.py → sensorsim/fieldtensor.py} +33 -31
  110. pyvale/{fieldvector.py → sensorsim/fieldvector.py} +33 -31
  111. pyvale/{imagedef2d.py → sensorsim/imagedef2d.py} +9 -5
  112. pyvale/{integratorfactory.py → sensorsim/integratorfactory.py} +6 -6
  113. pyvale/{integratorquadrature.py → sensorsim/integratorquadrature.py} +3 -3
  114. pyvale/{integratorrectangle.py → sensorsim/integratorrectangle.py} +3 -3
  115. pyvale/{integratorspatial.py → sensorsim/integratorspatial.py} +1 -1
  116. pyvale/{rastercy.py → sensorsim/rastercy.py} +5 -5
  117. pyvale/{rasternp.py → sensorsim/rasternp.py} +9 -9
  118. pyvale/{rasteropts.py → sensorsim/rasteropts.py} +1 -1
  119. pyvale/{renderer.py → sensorsim/renderer.py} +1 -1
  120. pyvale/{rendermesh.py → sensorsim/rendermesh.py} +5 -5
  121. pyvale/{renderscene.py → sensorsim/renderscene.py} +2 -2
  122. pyvale/{sensorarray.py → sensorsim/sensorarray.py} +1 -1
  123. pyvale/{sensorarrayfactory.py → sensorsim/sensorarrayfactory.py} +12 -12
  124. pyvale/{sensorarraypoint.py → sensorsim/sensorarraypoint.py} +10 -8
  125. pyvale/{sensordata.py → sensorsim/sensordata.py} +1 -1
  126. pyvale/{sensortools.py → sensorsim/sensortools.py} +2 -20
  127. pyvale/sensorsim/simtools.py +174 -0
  128. pyvale/{visualexpplotter.py → sensorsim/visualexpplotter.py} +3 -3
  129. pyvale/{visualimages.py → sensorsim/visualimages.py} +2 -2
  130. pyvale/{visualsimanimator.py → sensorsim/visualsimanimator.py} +4 -4
  131. pyvale/{visualsimplotter.py → sensorsim/visualsimplotter.py} +5 -5
  132. pyvale/{visualsimsensors.py → sensorsim/visualsimsensors.py} +12 -12
  133. pyvale/{visualtools.py → sensorsim/visualtools.py} +1 -1
  134. pyvale/{visualtraceplotter.py → sensorsim/visualtraceplotter.py} +2 -2
  135. pyvale/simcases/case17.geo +3 -0
  136. pyvale/simcases/case17.i +4 -4
  137. pyvale/simcases/run_1case.py +1 -9
  138. pyvale/simcases/run_all_cases.py +1 -1
  139. pyvale/simcases/run_build_case.py +1 -1
  140. pyvale/simcases/run_example_cases.py +1 -1
  141. pyvale/verif/__init__.py +12 -0
  142. pyvale/{analyticsimdatafactory.py → verif/analyticsimdatafactory.py} +2 -2
  143. pyvale/{analyticsimdatagenerator.py → verif/analyticsimdatagenerator.py} +2 -2
  144. pyvale/verif/psens.py +125 -0
  145. pyvale/verif/psensconst.py +18 -0
  146. pyvale/verif/psensmech.py +227 -0
  147. pyvale/verif/psensmultiphys.py +187 -0
  148. pyvale/verif/psensscalar.py +347 -0
  149. pyvale/verif/psenstensor.py +123 -0
  150. pyvale/verif/psensvector.py +116 -0
  151. {pyvale-2025.7.2.dist-info → pyvale-2025.8.1.dist-info}/METADATA +6 -7
  152. pyvale-2025.8.1.dist-info/RECORD +260 -0
  153. pyvale/dataset.py +0 -415
  154. pyvale/simtools.py +0 -67
  155. pyvale-2025.7.2.dist-info/RECORD +0 -212
  156. /pyvale/{blendercalibrationdata.py → blender/blendercalibrationdata.py} +0 -0
  157. /pyvale/{dicchecks.py → dic/dicchecks.py} +0 -0
  158. /pyvale/{dicspecklegenerator.py → dic/dicspecklegenerator.py} +0 -0
  159. /pyvale/{dicspecklequality.py → dic/dicspecklequality.py} +0 -0
  160. /pyvale/{dicstrainresults.py → dic/dicstrainresults.py} +0 -0
  161. /pyvale/{cameradata.py → sensorsim/cameradata.py} +0 -0
  162. /pyvale/{cameradata2d.py → sensorsim/cameradata2d.py} +0 -0
  163. /pyvale/{errordriftcalc.py → sensorsim/errordriftcalc.py} +0 -0
  164. /pyvale/{fieldtransform.py → sensorsim/fieldtransform.py} +0 -0
  165. /pyvale/{generatorsrandom.py → sensorsim/generatorsrandom.py} +0 -0
  166. /pyvale/{imagetools.py → sensorsim/imagetools.py} +0 -0
  167. /pyvale/{integratortype.py → sensorsim/integratortype.py} +0 -0
  168. /pyvale/{output.py → sensorsim/output.py} +0 -0
  169. /pyvale/{raster.py → sensorsim/raster.py} +0 -0
  170. /pyvale/{sensordescriptor.py → sensorsim/sensordescriptor.py} +0 -0
  171. /pyvale/{visualimagedef.py → sensorsim/visualimagedef.py} +0 -0
  172. /pyvale/{visualopts.py → sensorsim/visualopts.py} +0 -0
  173. /pyvale/{analyticmeshgen.py → verif/analyticmeshgen.py} +0 -0
  174. {pyvale-2025.7.2.dist-info → pyvale-2025.8.1.dist-info}/WHEEL +0 -0
  175. {pyvale-2025.7.2.dist-info → pyvale-2025.8.1.dist-info}/licenses/LICENSE +0 -0
  176. {pyvale-2025.7.2.dist-info → pyvale-2025.8.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,176 @@
1
+ # ==============================================================================
2
+ # pyvale: the python validation engine
3
+ # License: MIT
4
+ # Copyright (C) 2025 The Computer Aided Validation Team
5
+ # ==============================================================================
6
+
7
+ """
8
+ Running a parameter sweep of a Gmsh and MOOSE simulation
9
+ ================================================================================
10
+
11
+ In this example we will perform a parameter sweep of a gmsh-moose simulation
12
+ chain to demonstrate the capability of the 'herder' workflow manager. Here we
13
+ pass the 'herder' a list of simulation tools that we want to modify inputs for
14
+ (i.e. input modifiers) and then run (i.e. with runners). The simulation tools
15
+ are called sequentially in the order they are in the lists so we will need to
16
+ make sure we call gmsh first to generate the mesh and then call moose to use the
17
+ mesh to run the simulation.
18
+
19
+ As in the previous example we will generate a parameter sweep and then run it
20
+ sequentially and in parallel and compare the run times.
21
+
22
+ **Installing moose**: To run this example you will need to have installed moose
23
+ on your system. As moose supports unix operating systems windows users will need
24
+ to use windows subsystem for linux (WSL). We use the proteus moose build which
25
+ can be found here: https://github.com/aurora-multiphysics/proteus. Build scripts
26
+ for common linux distributions can be found in the 'scripts' directory of the
27
+ repo. You can also create your own moose build using instructions here:
28
+ https://mooseframework.inl.gov/.
29
+
30
+ **Installing gmsh**: For this example you will need to have a gmsh executable
31
+ which can be downloaded and installed from here: https://gmsh.info/#Download
32
+
33
+ We start by importing what we need for this example.
34
+ """
35
+
36
+ from pathlib import Path
37
+ import numpy as np
38
+
39
+ #pyvale imports
40
+ import pyvale.dataset as dataset
41
+ from pyvale.mooseherder import (MooseHerd,
42
+ MooseRunner,
43
+ MooseConfig,
44
+ GmshRunner,
45
+ InputModifier,
46
+ DirectoryManager,
47
+ sweep_param_grid)
48
+
49
+ #%%
50
+ # First we setup our input modifer and runner for gmsh using the same 2D plate
51
+ # with a hole simulation test case from the pyvale simulation library. This is
52
+ # the same as we have seen in previous examples for the gmsh input modifier and
53
+ # running gmsh.
54
+ sim_case: int = 17
55
+
56
+ gmsh_input = dataset.sim_case_gmsh_file_path(case_num=sim_case)
57
+ gmsh_modifier = InputModifier(gmsh_input,"//",";")
58
+
59
+ gmsh_path = Path.home() / "gmsh/bin/gmsh"
60
+ gmsh_runner = GmshRunner(gmsh_path)
61
+ gmsh_runner.set_input_file(gmsh_input)
62
+
63
+
64
+ #%%
65
+ # Next we setup our moose input modifier and runner in the same way as we have
66
+ # done in previous examples. We set our parallelisation options for moose here
67
+ # as well as redirecting stdout to file to save our terminal when we run in
68
+ # parallel.
69
+ moose_input = dataset.sim_case_input_file_path(case_num=sim_case)
70
+ moose_modifier = InputModifier(moose_input,"#","")
71
+
72
+ config = {'main_path': Path.home()/ 'moose',
73
+ 'app_path': Path.home() / 'proteus',
74
+ 'app_name': 'proteus-opt'}
75
+ moose_config = MooseConfig(config)
76
+ moose_runner = MooseRunner(moose_config)
77
+ moose_runner.set_run_opts(n_tasks = 1,
78
+ n_threads = 2,
79
+ redirect_out = True)
80
+
81
+ #%%
82
+ # We can now setup our 'herd' workflow manager making sure me place gmsh ahead
83
+ # of moose in the input modifier and runner lists so it is executed first to
84
+ # generate our mesh. We setup our directories and number of simulations to run
85
+ # in paralle as we have done previously.
86
+ num_para_sims: int = 4
87
+
88
+ sim_runners = [gmsh_runner,moose_runner]
89
+ input_modifiers = [gmsh_modifier,moose_modifier]
90
+ dir_manager = DirectoryManager(n_dirs=num_para_sims)
91
+
92
+ herd = MooseHerd(sim_runners,input_modifiers,dir_manager)
93
+ herd.set_num_para_sims(n_para=num_para_sims)
94
+
95
+
96
+ #%%
97
+ # We need somewhere to run our simulations and store the output so we create our
98
+ # standard pyvale output directory and then we set this as the base directory
99
+ # for our directory manager. We clear any old output directories and then create
100
+ # new ones ready to write our simulation output to.
101
+ output_path = Path.cwd() / "pyvale-output"
102
+ if not output_path.is_dir():
103
+ output_path.mkdir(parents=True, exist_ok=True)
104
+
105
+ dir_manager.set_base_dir(output_path)
106
+ dir_manager.clear_dirs()
107
+ dir_manager.create_dirs()
108
+
109
+ #%%
110
+ # We can now setup our grid parameter sweep to run simulations for all
111
+ # combinations of variables we are interested in. For now we will only change
112
+ # the parameters of our gmsh simulation so we set our moose parameters to None.
113
+ # For the gmsh simulation parameters we pass a dictionary keyed by the variable
114
+ # we want to change and then an iterable object (e.g. tuple, list, numpy array)
115
+ # for all values of the variable we want to run. We can also have an iterable of
116
+ # strings which will insert expressions into the input file for us as shown for
117
+ # the plate height below. If we only want to analyse a single value of a
118
+ # parameter we just pass an iterable with a single element. Note that the list
119
+ # of parameters that we pass to the sweep grid function should be in the same
120
+ # order we intend to call our simulation tools - so gmsh first in this case.
121
+ #
122
+ # After running this example replace the moose params with the following:
123
+ # ``moose_params = {"EMod": (70e9,100e9),"PRatio": (0.3,0.35)}``. This should
124
+ # demonstrate how all combinations of parameters between both gmsh and moose are
125
+ # generated using the sweep grid function.
126
+ gmsh_params = {"plate_width": np.array([150e-3,100e-3]),
127
+ "plate_height": ("plate_width + 100e-3",
128
+ "plate_width + 50e-3")}
129
+ moose_params = None
130
+ params = [gmsh_params,moose_params]
131
+ sweep_params = sweep_param_grid(params)
132
+
133
+ print("\nParameter sweep variables by simulation:")
134
+ for ii,pp in enumerate(sweep_params):
135
+ print(f"Sim: {ii}, Params [gmsh,moose]: {pp}")
136
+
137
+ #%%
138
+ # The run once function of the herd allows us to run a particular single
139
+ # simulation chain from anywhere in the sweep. This is useful for debugging when
140
+ # you want to rerun a single case to see the output or what went wrong. The herd
141
+ # also stores the solution time for each single iteration so we will store this
142
+ # to estimate how long the whole sweep should take when solving sequentially.
143
+ herd.run_once(0,sweep_params[0])
144
+ time_run_once = herd.get_iter_time()
145
+
146
+
147
+ #%%
148
+ # We can run the whole parameter sweep sequentially (one by one) using the run
149
+ # sequential function of the herd. We also store the total solution time
150
+ # for all simulation chains so that we can compare to a parallel run later. Note
151
+ # that it can be beneficial to run sequentially if you are using the herd within
152
+ # another loop or if one of the steps in your simulation chain is expensive and
153
+ # that step needs the computational resource.
154
+ herd.run_sequential(sweep_params)
155
+ time_run_seq = herd.get_sweep_time()
156
+
157
+ #%%
158
+ # Finally, we can run our parameter sweep in parallel. We need a main guard here
159
+ # as we use the multi-processing package. We also store the sweep time for this
160
+ # case to compare our sequential to parallel run time.
161
+ if __name__ == "__main__":
162
+ herd.run_para(sweep_params)
163
+ time_run_para = herd.get_sweep_time()
164
+
165
+ #%%
166
+ # Now that we have run all cases we can compare run times for a single
167
+ # simulation multiplied by the total number of simulations against runnning the
168
+ # sweep in parallel
169
+ print("-"*80)
170
+ print(f'Run time (one iter) = {time_run_once:.3f} seconds')
171
+ print(f'Est. time (one iter x num sims) = {(time_run_once*len(sweep_params)):.3f} seconds')
172
+ print()
173
+ print(f'Run time (seq) = {time_run_seq:.3f} seconds')
174
+ print(f'Run time (para) = {time_run_para:.3f} seconds')
175
+ print("-"*80)
176
+ print()
@@ -0,0 +1,136 @@
1
+ # ==============================================================================
2
+ # pyvale: the python validation engine
3
+ # License: MIT
4
+ # Copyright (C) 2025 The Computer Aided Validation Team
5
+ # ==============================================================================
6
+
7
+ """
8
+ Using multiple calls to run parallel sweeps
9
+ ================================================================================
10
+
11
+ In this example we demonstrate how multiple repeated calls can be made to run
12
+ 'herd' workflow manager where the simulations do not overwrite each other,
13
+ instead they accumulate within the output directories. If you need the
14
+ simulation output to be cleared after each call to run a sweep sequentially or
15
+ in parallel then you will need to call clear using the directory manager.
16
+
17
+ **Installing moose**: To run this example you will need to have installed moose
18
+ on your system. As moose supports unix operating systems windows users will need
19
+ to use windows subsystem for linux (WSL). We use the proteus moose build which
20
+ can be found here: https://github.com/aurora-multiphysics/proteus. Build scripts
21
+ for common linux distributions can be found in the 'scripts' directory of the
22
+ repo. You can also create your own moose build using instructions here:
23
+ https://mooseframework.inl.gov/.
24
+
25
+ We start by importing what we need for this example. For this example the
26
+ everything at the start is similar to previous examples where we have setup
27
+ our herd workflow manager. So, if you feel confident with things so far then
28
+ skip down to the last section.
29
+ """
30
+
31
+ from pathlib import Path
32
+ import numpy as np
33
+
34
+ #pyvale imports
35
+ import pyvale.dataset as dataset
36
+ from pyvale.mooseherder import (MooseHerd,
37
+ MooseRunner,
38
+ MooseConfig,
39
+ InputModifier,
40
+ DirectoryManager,
41
+ sweep_param_grid)
42
+
43
+ #%%
44
+ # First we setup an input modifier and runner for our moose simulation in
45
+ # exactly the same way as we have done in previous examples.
46
+
47
+ moose_input = dataset.element_case_input_path(dataset.EElemTest.HEX20)
48
+ moose_modifier = InputModifier(moose_input,'#','')
49
+
50
+ config = {'main_path': Path.home()/ 'moose',
51
+ 'app_path': Path.home() / 'proteus',
52
+ 'app_name': 'proteus-opt'}
53
+ moose_config = MooseConfig(config)
54
+
55
+ moose_runner = MooseRunner(moose_config)
56
+ moose_runner.set_run_opts(n_tasks = 1,
57
+ n_threads = 2,
58
+ redirect_out = True)
59
+
60
+ #%%
61
+ # We use the moose input modifier and runner to create our herd workflow manager
62
+ # as we have seen in previous examples.
63
+ num_para_sims: int = 4
64
+ dir_manager = DirectoryManager(n_dirs=num_para_sims)
65
+ herd = MooseHerd([moose_runner],[moose_modifier],dir_manager)
66
+ herd.set_num_para_sims(n_para=num_para_sims)
67
+
68
+ #%%
69
+ # We need somewhere to run our simulations and store the output so we create our
70
+ # standard pyvale output directory as we have done in previous examples and then
71
+ # pass this to our directory manager.
72
+ output_path = Path.cwd() / "pyvale-output"
73
+ if not output_path.is_dir():
74
+ output_path.mkdir(parents=True, exist_ok=True)
75
+
76
+ dir_manager.set_base_dir(output_path)
77
+ dir_manager.reset_dirs()
78
+
79
+ #%%
80
+ # We generate a grid sweep of the variables we are interested in analysing as
81
+ # we have done previously and then print this to the console so we can check
82
+ # all combinations of variables that we want are present and that the total
83
+ # number of simulations makes sense.
84
+
85
+ moose_params = {"nElemX": (2,3),
86
+ "lengX": np.array([10e-3,15e-3]),
87
+ "PRatio":(0.3,)}
88
+ params = [moose_params,]
89
+ sweep_params = sweep_param_grid(params)
90
+
91
+ print("\nParameter sweep variables by simulation:")
92
+ for ii,pp in enumerate(sweep_params):
93
+ print(f"Sim: {ii}, Params [moose,]: {pp}")
94
+
95
+ print()
96
+ print(f"Total simulations = {len(sweep_params)}")
97
+ print()
98
+
99
+ #%%
100
+ # Here we are going to run the parameter sweep a certain number of times and
101
+ # while storing the total time to complete the parameter sweep each time. Once
102
+ # we have completed all the parameter sweeps we print the time taken for each
103
+ # sweep and the average sweep time to the console.
104
+ #
105
+ # Now if we inspect the simulation working directories in our pyvale-output
106
+ # directory we will see that all runs have been stored. If we need to clear
107
+ # the directories in between parallel sweeps we can call
108
+ # ``dir_manager.reset_dirs()`` and then we will only be left with one copy of
109
+ # the sweep output. Retaining all simulations is useful if we want to update
110
+ # the parameters we are passing to the ``run_para`` function every time it
111
+ # is called.
112
+
113
+ num_para_runs: int = 3
114
+
115
+ if __name__ == '__main__':
116
+ sweep_times = np.zeros((num_para_runs,),dtype=np.float64)
117
+ for rr in range(num_para_runs):
118
+ herd.run_para(sweep_params)
119
+ sweep_times[rr] = herd.get_sweep_time()
120
+
121
+
122
+ print(80*"-")
123
+ for ii,ss in enumerate(sweep_times):
124
+ print(f"Sweep {ii} took: {ss:.3f}seconds")
125
+
126
+ print(80*"-")
127
+ print(f"Average sweep time: {np.mean(sweep_times):.3f} seconds")
128
+ print(80*"-")
129
+
130
+
131
+
132
+
133
+
134
+
135
+
136
+
@@ -0,0 +1,163 @@
1
+ # ==============================================================================
2
+ # pyvale: the python validation engine
3
+ # License: MIT
4
+ # Copyright (C) 2025 The Computer Aided Validation Team
5
+ # ==============================================================================
6
+
7
+ """
8
+ Reading exodus output from a MOOSE simulation
9
+ ================================================================================
10
+
11
+ In this example we ...
12
+
13
+ **Installing moose**: To run this example you will need to have installed moose
14
+ on your system. As moose supports unix operating systems windows users will need
15
+ to use windows subsystem for linux (WSL). We use the proteus moose build which
16
+ can be found here: https://github.com/aurora-multiphysics/proteus. Build scripts
17
+ for common linux distributions can be found in the "scripts" directory of the
18
+ repo. You can also create your own moose build using instructions here:
19
+ https://mooseframework.inl.gov/.
20
+
21
+ We start by importing what we need for this example.
22
+ """
23
+
24
+ import time
25
+ import shutil
26
+ from pathlib import Path
27
+ from typing import Any
28
+ import dataclasses
29
+ import numpy as np
30
+
31
+ #pyvale imports
32
+ import pyvale.dataset as dataset
33
+ import pyvale.sensorsim as sens
34
+ from pyvale.mooseherder import (MooseRunner,
35
+ MooseConfig,
36
+ ExodusReader)
37
+
38
+ #%%
39
+ # We also define a helper function that will print all attriubutes of a
40
+ # dataclas so we can see what it contains. This will be useful when we inspect
41
+ # what our ``SimData`` objects contain.
42
+ def print_attrs(in_obj: Any) -> None:
43
+ for field in dataclasses.fields(in_obj):
44
+ if not field.name.startswith('__'):
45
+ print(f" {field.name}: {field.type}")
46
+
47
+ #%%
48
+ # We need to know where our simulation output is so we are going to create our
49
+ # standard pyvale-output directory, grab our simulation input file from the
50
+ # pyvale simulation library and then copy it to this directory to run. This
51
+ # means the output exodus will appear in the same directory as the input file.
52
+
53
+ output_path = Path.cwd() / "pyvale-output"
54
+ if not output_path.is_dir():
55
+ output_path.mkdir(parents=True, exist_ok=True)
56
+
57
+ moose_file = dataset.element_case_input_path(dataset.EElemTest.HEX20)
58
+ moose_input = output_path / moose_file.name
59
+
60
+ shutil.copyfile(moose_file,moose_input)
61
+
62
+
63
+ #%%
64
+ # We now create our moose runner with the same method we have used in previous
65
+ # examples. We run the simulation and time it, printing the solve time to the
66
+ # terminal.
67
+
68
+ config = {"main_path": Path.home()/ "moose",
69
+ "app_path": Path.home() / "proteus",
70
+ "app_name": "proteus-opt"}
71
+ moose_config = MooseConfig(config)
72
+
73
+ moose_runner = MooseRunner(moose_config)
74
+
75
+ moose_runner.set_run_opts(n_tasks=1, n_threads=4, redirect_out=True)
76
+
77
+ moose_runner.set_input_file(moose_input)
78
+
79
+ start_time = time.perf_counter()
80
+ moose_runner.run()
81
+ run_time = time.perf_counter() - start_time
82
+
83
+ print("-"*80)
84
+ print(f"MOOSE run time = {run_time:.3f} seconds")
85
+ print("-"*80)
86
+
87
+ #%%
88
+ # Now we create our exodus reader by giving it the path to the exodus file we
89
+ # want to read. By default moose creates an exodus output with the input file
90
+ # name with "_out.e" appended.
91
+ output_exodus = output_path / (moose_input.stem + "_out.e")
92
+ exodus_reader = ExodusReader(output_exodus)
93
+
94
+ print("\nReading exodus file with ExodusReader:")
95
+ print(output_exodus.resolve())
96
+ print()
97
+
98
+
99
+ #%%
100
+ # We start with the simplest method which is to just read everything in the
101
+ # exodus file and return it as a ``SimData`` object. In some cases we will not
102
+ # want to read everything into memory so we will show how we can control this n
103
+ # next.
104
+ #
105
+ # We then use a helper function to print the sim data fields to the terminal so
106
+ # we can see the structure of the dataclass. The documentation for the
107
+ # ``SimData`` class provides descriptions of each of the fields and we
108
+ # recommend you check this out to understand the terminal output.
109
+ all_sim_data = exodus_reader.read_all_sim_data()
110
+ print("SimData from 'read_all':")
111
+ sens.SimTools.print_sim_data(all_sim_data)
112
+
113
+ #%%
114
+ # We are now going to read specific variables from the exodus output using a
115
+ # read configuration object. There are two ways to create this object. A good
116
+ # way to start is to use the exodus reader to return the read config that would
117
+ # extract all variables from the exodus as shown below. This is helpful as it
118
+ # will pre-populate the 'node', 'elem' and 'glob' variables with the appropriate
119
+ # dicitionary keys to read based on what is already in the exodus file.
120
+
121
+ read_config = exodus_reader.get_read_config()
122
+ sens.SimTools.print_dataclass_fields(read_config)
123
+
124
+ #%%
125
+ # We set the 'node_vars' field to None to prevent the nodal variables being read
126
+ # from the exodus file. We then use the read function to return a ``SimData``
127
+ # object and we print the 'node_vars' field to verify that it has not been read.
128
+ read_config.node_vars = None
129
+ sim_data = exodus_reader.read_sim_data(read_config)
130
+
131
+ print("Read config without 'node_vars':")
132
+ print(f" {sim_data.node_vars=}")
133
+ print()
134
+
135
+ #%%
136
+ # We can also turn off reading of the simulation time steps, nodal coordinates
137
+ # and the connectivity table by setting these flags to False in our read config.
138
+ read_config.time = False
139
+ read_config.coords = False
140
+ read_config.connect = False
141
+ sim_data = exodus_reader.read_sim_data(read_config)
142
+
143
+ print("Read config without time, coords and connectivity:")
144
+ print(f" {sim_data.time=}")
145
+ print(f" {sim_data.coords=}")
146
+ print(f" {sim_data.connect=}")
147
+ print()
148
+
149
+
150
+ #%%
151
+ # We can also read specific keyed fields from 'node', 'elem' and 'glob'
152
+ # variables. Here we will read just the x displacement from the node variables.
153
+ # Note that for element variables you also need to specify the block number (
154
+ # corresponding to the number X in the key for the connectivity table in the
155
+ # format "connectivityX" in the connectivity dictionary).
156
+
157
+ read_config.node_vars = ("disp_x",)
158
+ sim_data = exodus_reader.read_sim_data(read_config)
159
+ print("Read config only extracting x displacement:")
160
+ print(f" {sim_data.node_vars.keys()=}")
161
+ print(f" {sim_data.node_vars['disp_x'].shape=}")
162
+ print()
163
+
@@ -0,0 +1,153 @@
1
+ # ==============================================================================
2
+ # pyvale: the python validation engine
3
+ # License: MIT
4
+ # Copyright (C) 2025 The Computer Aided Validation Team
5
+ # ==============================================================================
6
+
7
+ """
8
+ Reading exodus output from a parameter sweep
9
+ ================================================================================
10
+
11
+ In this example we run a parallel sweep of a moose simulation and then read the
12
+ results of the whole sweep using the sweep reader class.
13
+
14
+ **Installing moose**: To run this example you will need to have installed moose
15
+ on your system. As moose supports unix operating systems windows users will need
16
+ to use windows subsystem for linux (WSL). We use the proteus moose build which
17
+ can be found here: https://github.com/aurora-multiphysics/proteus. Build scripts
18
+ for common linux distributions can be found in the 'scripts' directory of the
19
+ repo. You can also create your own moose build using instructions here:
20
+ https://mooseframework.inl.gov/.
21
+
22
+ We start by importing what we need for this example.
23
+ """
24
+
25
+ import time
26
+ from pathlib import Path
27
+ import numpy as np
28
+
29
+ #pyvale imports
30
+ import pyvale.sensorsim as sens
31
+ import pyvale.dataset as dataset
32
+ from pyvale.mooseherder import (MooseHerd,
33
+ MooseRunner,
34
+ MooseConfig,
35
+ InputModifier,
36
+ DirectoryManager,
37
+ SweepReader,
38
+ sweep_param_grid)
39
+
40
+ #%%
41
+ # In this first section we setup our herd workflow manager to run a parameter
42
+ # sweep of our moose simulation as we have done in previous examples. We run
43
+ # the parameter sweep and print the solve time to the terminal. The sweep
44
+ # output is in the standard pyvale-output directory we have used previously.
45
+ # In the next section we will read the output from the parameter sweep below.
46
+
47
+ moose_input = dataset.element_case_input_path(dataset.EElemTest.HEX20)
48
+ moose_modifier = InputModifier(moose_input,'#','')
49
+
50
+ config = {'main_path': Path.home()/ 'moose',
51
+ 'app_path': Path.home() / 'proteus',
52
+ 'app_name': 'proteus-opt'}
53
+ moose_config = MooseConfig(config)
54
+
55
+ moose_runner = MooseRunner(moose_config)
56
+ moose_runner.set_run_opts(n_tasks = 1,
57
+ n_threads = 2,
58
+ redirect_out = True)
59
+
60
+ num_para_sims: int = 4
61
+ dir_manager = DirectoryManager(n_dirs=num_para_sims)
62
+ herd = MooseHerd([moose_runner],[moose_modifier],dir_manager)
63
+ herd.set_num_para_sims(n_para=num_para_sims)
64
+
65
+ output_path = Path.cwd() / "pyvale-output"
66
+ if not output_path.is_dir():
67
+ output_path.mkdir(parents=True, exist_ok=True)
68
+
69
+ dir_manager.set_base_dir(output_path)
70
+ dir_manager.reset_dirs()
71
+
72
+ moose_params = {"nElemX": (2,3),
73
+ "lengX": np.array([10e-3,15e-3]),
74
+ "PRatio":(0.3,0.35)}
75
+ params = [moose_params,]
76
+ sweep_params = sweep_param_grid(params)
77
+
78
+
79
+ if __name__ == "__main__":
80
+ print('Running simulation parameter sweep in parallel.')
81
+ herd.run_para(sweep_params)
82
+ print(f'Run time (parallel) = {herd.get_sweep_time():.3f} seconds\n')
83
+
84
+
85
+ #%%
86
+ # To read the sweep output files we first create our sweep reader and pass it
87
+ # the same directory manager we used to run the sweep. We also set the number
88
+ # of simulation outputs to read in parallel when we call the read parallel
89
+ # function. We will see below that we can still read sequentially by calling
90
+ # read sequential functions and if the simulation output files are small it is
91
+ # likely to be faster to read them sequentially.
92
+ #
93
+ # We first use our sweep reader to inspect the output path keys to find the
94
+ # simulation output files that exist in the simulation working directories.
95
+
96
+ sweep_reader = SweepReader(dir_manager,num_para_read=4)
97
+ output_files = sweep_reader.read_all_output_file_keys()
98
+
99
+ print('Sweep output files (from output_keys.json):')
100
+ for ff in output_files:
101
+ print(f" {ff}")
102
+ print()
103
+
104
+ #%%
105
+ # Using the sweep reader we can read the results for a single simulation chain
106
+ # from the sweep. Our simulation chain only has a single moose simulation so
107
+ # the list of ``SimData`` objects we are returned only has a single element.
108
+ # We then use a helper function to print the contents of the ``SimData`` object
109
+ # to the terminal.
110
+ #
111
+ # We suggest you check out the documentation for the ``SimData`` object as it
112
+ # includes a detailed description of each of the relevant fields you might want
113
+ # to use for post-processing.
114
+ sim_data_list = sweep_reader.read_results_once(output_files[0])
115
+ sens.SimTools.print_sim_data(sim_data_list[0])
116
+
117
+ #%%
118
+ # We can use the sweep reader to read results for each simulation chain in the
119
+ # sweep sequentially with read sequential function. The sweep results we are
120
+ # returned is a list of list of data classes where the outer list corresponds to
121
+ # the unique simulation chain in the sweep and the inner list corresponds to the
122
+ # the results for the particular simulation tool in the chain.
123
+ #
124
+ # After reading the sweep results we print the inner and outer list lengths. We
125
+ # have 8 unique simulation chains with a single simulation tool (moose) in the
126
+ # chain.
127
+ start_time = time.perf_counter()
128
+ sweep_results_seq = sweep_reader.read_sequential()
129
+ read_time_seq = time.perf_counter() - start_time
130
+
131
+ print("Outer list = unique simulation chain:")
132
+ print(f" {len(sweep_results_seq)=}")
133
+ print("Inner list = particular simulation tool in the chain:")
134
+ print(f" {len(sweep_results_seq[0])=}")
135
+ print("'SimData' object for the particular simulation tool:")
136
+ print(f" {type(sweep_results_seq[0][0])=}")
137
+ print()
138
+
139
+ #%%
140
+ # Finally, we read the same sweep in parallel making sure we include a main
141
+ # guard as we will be using the multi-processing package to do this. We then
142
+ # print the read time to the console for the sequential and parallel reads.
143
+ if __name__ == '__main__':
144
+ start_time = time.perf_counter()
145
+ sweep_results_para = sweep_reader.read_results_para()
146
+ read_time_para = time.perf_counter() - start_time
147
+
148
+ print()
149
+ print("-"*80)
150
+ print(f'Read time sequential = {read_time_seq:.6f} seconds')
151
+ print(f'Read time parallel = {read_time_para:.6f} seconds')
152
+ print("-"*80)
153
+ print()