pyvale 2025.7.1__cp311-cp311-win_amd64.whl → 2025.8.1__cp311-cp311-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyvale might be problematic. Click here for more details.

Files changed (186) hide show
  1. pyvale/__init__.py +12 -92
  2. pyvale/blender/__init__.py +23 -0
  3. pyvale/{pyvaleexceptions.py → blender/blenderexceptions.py} +0 -3
  4. pyvale/{blenderlightdata.py → blender/blenderlightdata.py} +3 -3
  5. pyvale/{blendermaterialdata.py → blender/blendermaterialdata.py} +1 -1
  6. pyvale/{blenderrenderdata.py → blender/blenderrenderdata.py} +5 -3
  7. pyvale/{blenderscene.py → blender/blenderscene.py} +33 -30
  8. pyvale/{blendertools.py → blender/blendertools.py} +14 -10
  9. pyvale/dataset/__init__.py +7 -0
  10. pyvale/dataset/dataset.py +443 -0
  11. pyvale/dic/__init__.py +20 -0
  12. pyvale/dic/cpp/dicfourier.cpp +36 -4
  13. pyvale/dic/cpp/dicinterpolator.cpp +56 -1
  14. pyvale/dic/cpp/dicmain.cpp +24 -19
  15. pyvale/dic/cpp/dicoptimizer.cpp +6 -1
  16. pyvale/dic/cpp/dicscanmethod.cpp +32 -32
  17. pyvale/dic/cpp/dicsignalhandler.cpp +16 -0
  18. pyvale/dic/cpp/dicstrain.cpp +7 -3
  19. pyvale/dic/cpp/dicutil.cpp +79 -23
  20. pyvale/{dic2d.py → dic/dic2d.py} +51 -29
  21. pyvale/dic/dic2dconv.py +6 -0
  22. pyvale/dic/dic2dcpp.cp311-win_amd64.pyd +0 -0
  23. pyvale/{dicchecks.py → dic/dicchecks.py} +28 -16
  24. pyvale/dic/dicdataimport.py +370 -0
  25. pyvale/{dicregionofinterest.py → dic/dicregionofinterest.py} +169 -12
  26. pyvale/{dicresults.py → dic/dicresults.py} +4 -1
  27. pyvale/{dicstrain.py → dic/dicstrain.py} +9 -9
  28. pyvale/examples/basics/{ex1_1_basicscalars_therm2d.py → ex1a_basicscalars_therm2d.py} +12 -9
  29. pyvale/examples/basics/{ex1_2_sensormodel_therm2d.py → ex1b_sensormodel_therm2d.py} +17 -14
  30. pyvale/examples/basics/{ex1_3_customsens_therm3d.py → ex1c_customsens_therm3d.py} +27 -24
  31. pyvale/examples/basics/{ex1_4_basicerrors_therm3d.py → ex1d_basicerrors_therm3d.py} +32 -29
  32. pyvale/examples/basics/{ex1_5_fielderrs_therm3d.py → ex1e_fielderrs_therm3d.py} +19 -15
  33. pyvale/examples/basics/{ex1_6_caliberrs_therm2d.py → ex1f_caliberrs_therm2d.py} +20 -16
  34. pyvale/examples/basics/{ex1_7_spatavg_therm2d.py → ex1g_spatavg_therm2d.py} +19 -16
  35. pyvale/examples/basics/{ex2_1_basicvectors_disp2d.py → ex2a_basicvectors_disp2d.py} +13 -10
  36. pyvale/examples/basics/{ex2_2_vectorsens_disp2d.py → ex2b_vectorsens_disp2d.py} +19 -15
  37. pyvale/examples/basics/{ex2_3_sensangle_disp2d.py → ex2c_sensangle_disp2d.py} +21 -18
  38. pyvale/examples/basics/{ex2_4_chainfielderrs_disp2d.py → ex2d_chainfielderrs_disp2d.py} +31 -29
  39. pyvale/examples/basics/{ex2_5_vectorfields3d_disp3d.py → ex2e_vectorfields3d_disp3d.py} +21 -18
  40. pyvale/examples/basics/{ex3_1_basictensors_strain2d.py → ex3a_basictensors_strain2d.py} +16 -14
  41. pyvale/examples/basics/{ex3_2_tensorsens2d_strain2d.py → ex3b_tensorsens2d_strain2d.py} +17 -14
  42. pyvale/examples/basics/{ex3_3_tensorsens3d_strain3d.py → ex3c_tensorsens3d_strain3d.py} +25 -22
  43. pyvale/examples/basics/{ex4_1_expsim2d_thermmech2d.py → ex4a_expsim2d_thermmech2d.py} +17 -14
  44. pyvale/examples/basics/{ex4_2_expsim3d_thermmech3d.py → ex4b_expsim3d_thermmech3d.py} +37 -34
  45. pyvale/examples/basics/ex5_nomesh.py +24 -0
  46. pyvale/examples/dic/ex1_2_blenderdeformed.py +174 -0
  47. pyvale/examples/dic/ex1_region_of_interest.py +6 -3
  48. pyvale/examples/dic/ex2_plate_with_hole.py +21 -18
  49. pyvale/examples/dic/ex3_plate_with_hole_strain.py +8 -6
  50. pyvale/examples/dic/ex4_dic_blender.py +17 -15
  51. pyvale/examples/dic/ex5_dic_challenge.py +19 -14
  52. pyvale/examples/genanalyticdata/ex1_1_scalarvisualisation.py +16 -10
  53. pyvale/examples/genanalyticdata/ex1_2_scalarcasebuild.py +3 -3
  54. pyvale/examples/genanalyticdata/ex2_1_analyticsensors.py +29 -23
  55. pyvale/examples/genanalyticdata/ex2_2_analyticsensors_nomesh.py +67 -0
  56. pyvale/examples/imagedef2d/ex_imagedef2d_todisk.py +12 -9
  57. pyvale/examples/mooseherder/ex0_create_moose_config.py +65 -0
  58. pyvale/examples/mooseherder/ex1a_modify_moose_input.py +71 -0
  59. pyvale/examples/mooseherder/ex1b_modify_gmsh_input.py +69 -0
  60. pyvale/examples/mooseherder/ex2a_run_moose_once.py +80 -0
  61. pyvale/examples/mooseherder/ex2b_run_gmsh_once.py +64 -0
  62. pyvale/examples/mooseherder/ex2c_run_both_once.py +114 -0
  63. pyvale/examples/mooseherder/ex3_run_moose_seq_para.py +157 -0
  64. pyvale/examples/mooseherder/ex4_run_gmsh-moose_seq_para.py +176 -0
  65. pyvale/examples/mooseherder/ex5_run_moose_paramulti.py +136 -0
  66. pyvale/examples/mooseherder/ex6_read_moose_exodus.py +163 -0
  67. pyvale/examples/mooseherder/ex7a_read_moose_herd_results.py +153 -0
  68. pyvale/examples/mooseherder/ex7b_read_multi_herd_results.py +116 -0
  69. pyvale/examples/mooseherder/ex7c_read_multi_gmshmoose_results.py +127 -0
  70. pyvale/examples/mooseherder/ex7d_readconfig_multi_gmshmoose_results.py +143 -0
  71. pyvale/examples/mooseherder/ex8_read_existing_sweep_output.py +72 -0
  72. pyvale/examples/renderblender/ex1_1_blenderscene.py +24 -20
  73. pyvale/examples/renderblender/ex1_2_blenderdeformed.py +22 -18
  74. pyvale/examples/renderblender/ex2_1_stereoscene.py +36 -29
  75. pyvale/examples/renderblender/ex2_2_stereodeformed.py +26 -20
  76. pyvale/examples/renderblender/ex3_1_blendercalibration.py +24 -17
  77. pyvale/examples/renderrasterisation/ex_rastenp.py +14 -12
  78. pyvale/examples/renderrasterisation/ex_rastercyth_oneframe.py +14 -15
  79. pyvale/examples/renderrasterisation/ex_rastercyth_static_cypara.py +13 -11
  80. pyvale/examples/renderrasterisation/ex_rastercyth_static_pypara.py +13 -11
  81. pyvale/mooseherder/__init__.py +32 -0
  82. pyvale/mooseherder/directorymanager.py +416 -0
  83. pyvale/mooseherder/exodusreader.py +763 -0
  84. pyvale/mooseherder/gmshrunner.py +163 -0
  85. pyvale/mooseherder/inputmodifier.py +236 -0
  86. pyvale/mooseherder/mooseconfig.py +226 -0
  87. pyvale/mooseherder/mooseherd.py +527 -0
  88. pyvale/mooseherder/mooserunner.py +303 -0
  89. pyvale/mooseherder/outputreader.py +22 -0
  90. pyvale/mooseherder/simdata.py +92 -0
  91. pyvale/mooseherder/simrunner.py +31 -0
  92. pyvale/mooseherder/sweepreader.py +356 -0
  93. pyvale/mooseherder/sweeptools.py +76 -0
  94. pyvale/sensorsim/__init__.py +82 -0
  95. pyvale/{camera.py → sensorsim/camera.py} +7 -7
  96. pyvale/{camerasensor.py → sensorsim/camerasensor.py} +7 -7
  97. pyvale/{camerastereo.py → sensorsim/camerastereo.py} +2 -2
  98. pyvale/{cameratools.py → sensorsim/cameratools.py} +4 -4
  99. pyvale/{cython → sensorsim/cython}/rastercyth.c +596 -596
  100. pyvale/{cython → sensorsim/cython}/rastercyth.cp311-win_amd64.pyd +0 -0
  101. pyvale/{cython → sensorsim/cython}/rastercyth.py +16 -17
  102. pyvale/{errorcalculator.py → sensorsim/errorcalculator.py} +1 -1
  103. pyvale/{errorintegrator.py → sensorsim/errorintegrator.py} +2 -2
  104. pyvale/{errorrand.py → sensorsim/errorrand.py} +4 -4
  105. pyvale/{errorsyscalib.py → sensorsim/errorsyscalib.py} +2 -2
  106. pyvale/{errorsysdep.py → sensorsim/errorsysdep.py} +2 -2
  107. pyvale/{errorsysfield.py → sensorsim/errorsysfield.py} +8 -8
  108. pyvale/{errorsysindep.py → sensorsim/errorsysindep.py} +3 -3
  109. pyvale/sensorsim/exceptions.py +8 -0
  110. pyvale/{experimentsimulator.py → sensorsim/experimentsimulator.py} +23 -3
  111. pyvale/{field.py → sensorsim/field.py} +1 -1
  112. pyvale/{fieldconverter.py → sensorsim/fieldconverter.py} +72 -19
  113. pyvale/sensorsim/fieldinterp.py +37 -0
  114. pyvale/sensorsim/fieldinterpmesh.py +124 -0
  115. pyvale/sensorsim/fieldinterppoints.py +55 -0
  116. pyvale/{fieldsampler.py → sensorsim/fieldsampler.py} +4 -4
  117. pyvale/{fieldscalar.py → sensorsim/fieldscalar.py} +28 -24
  118. pyvale/{fieldtensor.py → sensorsim/fieldtensor.py} +33 -31
  119. pyvale/{fieldvector.py → sensorsim/fieldvector.py} +33 -31
  120. pyvale/{imagedef2d.py → sensorsim/imagedef2d.py} +9 -5
  121. pyvale/{integratorfactory.py → sensorsim/integratorfactory.py} +6 -6
  122. pyvale/{integratorquadrature.py → sensorsim/integratorquadrature.py} +3 -3
  123. pyvale/{integratorrectangle.py → sensorsim/integratorrectangle.py} +3 -3
  124. pyvale/{integratorspatial.py → sensorsim/integratorspatial.py} +1 -1
  125. pyvale/{rastercy.py → sensorsim/rastercy.py} +5 -5
  126. pyvale/{rasternp.py → sensorsim/rasternp.py} +9 -9
  127. pyvale/{rasteropts.py → sensorsim/rasteropts.py} +1 -1
  128. pyvale/{renderer.py → sensorsim/renderer.py} +1 -1
  129. pyvale/{rendermesh.py → sensorsim/rendermesh.py} +5 -5
  130. pyvale/{renderscene.py → sensorsim/renderscene.py} +2 -2
  131. pyvale/{sensorarray.py → sensorsim/sensorarray.py} +1 -1
  132. pyvale/{sensorarrayfactory.py → sensorsim/sensorarrayfactory.py} +12 -12
  133. pyvale/{sensorarraypoint.py → sensorsim/sensorarraypoint.py} +10 -8
  134. pyvale/{sensordata.py → sensorsim/sensordata.py} +1 -1
  135. pyvale/{sensortools.py → sensorsim/sensortools.py} +2 -20
  136. pyvale/sensorsim/simtools.py +174 -0
  137. pyvale/{visualexpplotter.py → sensorsim/visualexpplotter.py} +3 -3
  138. pyvale/{visualimages.py → sensorsim/visualimages.py} +2 -2
  139. pyvale/{visualsimanimator.py → sensorsim/visualsimanimator.py} +4 -4
  140. pyvale/{visualsimplotter.py → sensorsim/visualsimplotter.py} +5 -5
  141. pyvale/{visualsimsensors.py → sensorsim/visualsimsensors.py} +12 -12
  142. pyvale/{visualtools.py → sensorsim/visualtools.py} +1 -1
  143. pyvale/{visualtraceplotter.py → sensorsim/visualtraceplotter.py} +2 -2
  144. pyvale/simcases/case17.geo +3 -0
  145. pyvale/simcases/case17.i +4 -4
  146. pyvale/simcases/run_1case.py +1 -9
  147. pyvale/simcases/run_all_cases.py +1 -1
  148. pyvale/simcases/run_build_case.py +1 -1
  149. pyvale/simcases/run_example_cases.py +1 -1
  150. pyvale/verif/__init__.py +12 -0
  151. pyvale/{analyticsimdatafactory.py → verif/analyticsimdatafactory.py} +2 -2
  152. pyvale/{analyticsimdatagenerator.py → verif/analyticsimdatagenerator.py} +2 -2
  153. pyvale/verif/psens.py +125 -0
  154. pyvale/verif/psensconst.py +18 -0
  155. pyvale/verif/psensmech.py +227 -0
  156. pyvale/verif/psensmultiphys.py +187 -0
  157. pyvale/verif/psensscalar.py +347 -0
  158. pyvale/verif/psenstensor.py +123 -0
  159. pyvale/verif/psensvector.py +116 -0
  160. {pyvale-2025.7.1.dist-info → pyvale-2025.8.1.dist-info}/METADATA +6 -7
  161. pyvale-2025.8.1.dist-info/RECORD +260 -0
  162. pyvale/dataset.py +0 -415
  163. pyvale/dic2dcpp.cp311-win_amd64.pyd +0 -0
  164. pyvale/dicdataimport.py +0 -247
  165. pyvale/simtools.py +0 -67
  166. pyvale-2025.7.1.dist-info/RECORD +0 -211
  167. /pyvale/{blendercalibrationdata.py → blender/blendercalibrationdata.py} +0 -0
  168. /pyvale/{dicspecklegenerator.py → dic/dicspecklegenerator.py} +0 -0
  169. /pyvale/{dicspecklequality.py → dic/dicspecklequality.py} +0 -0
  170. /pyvale/{dicstrainresults.py → dic/dicstrainresults.py} +0 -0
  171. /pyvale/{cameradata.py → sensorsim/cameradata.py} +0 -0
  172. /pyvale/{cameradata2d.py → sensorsim/cameradata2d.py} +0 -0
  173. /pyvale/{errordriftcalc.py → sensorsim/errordriftcalc.py} +0 -0
  174. /pyvale/{fieldtransform.py → sensorsim/fieldtransform.py} +0 -0
  175. /pyvale/{generatorsrandom.py → sensorsim/generatorsrandom.py} +0 -0
  176. /pyvale/{imagetools.py → sensorsim/imagetools.py} +0 -0
  177. /pyvale/{integratortype.py → sensorsim/integratortype.py} +0 -0
  178. /pyvale/{output.py → sensorsim/output.py} +0 -0
  179. /pyvale/{raster.py → sensorsim/raster.py} +0 -0
  180. /pyvale/{sensordescriptor.py → sensorsim/sensordescriptor.py} +0 -0
  181. /pyvale/{visualimagedef.py → sensorsim/visualimagedef.py} +0 -0
  182. /pyvale/{visualopts.py → sensorsim/visualopts.py} +0 -0
  183. /pyvale/{analyticmeshgen.py → verif/analyticmeshgen.py} +0 -0
  184. {pyvale-2025.7.1.dist-info → pyvale-2025.8.1.dist-info}/WHEEL +0 -0
  185. {pyvale-2025.7.1.dist-info → pyvale-2025.8.1.dist-info}/licenses/LICENSE +0 -0
  186. {pyvale-2025.7.1.dist-info → pyvale-2025.8.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,80 @@
1
+ # ==============================================================================
2
+ # pyvale: the python validation engine
3
+ # License: MIT
4
+ # Copyright (C) 2025 The Computer Aided Validation Team
5
+ # ==============================================================================
6
+
7
+ """
8
+ Running MOOSE once
9
+ ================================================================================
10
+
11
+ In this example we will run a single moose simulation from a moose input .i file
12
+ using a 'runner' object.
13
+
14
+ **Installing moose**: To run this example you will need to have installed moose
15
+ on your system. As moose supports unix operating systems windows users will need
16
+ to use windows subsystem for linux (WSL). We use the proteus moose build which
17
+ can be found here: https://github.com/aurora-multiphysics/proteus. Build scripts
18
+ for common linux distributions can be found in the 'scripts' directory of the
19
+ repo. You can also create your own moose build using instructions here:
20
+ https://mooseframework.inl.gov/.
21
+
22
+ We start by importing what we need for this example.
23
+ """
24
+
25
+ import time
26
+ from pathlib import Path
27
+
28
+ #pyvale imports
29
+ import pyvale.dataset as dataset
30
+ from pyvale.mooseherder import (MooseConfig,
31
+ MooseRunner)
32
+
33
+
34
+ #%%
35
+ # First we build our moose configuration which gives the location of our main
36
+ # moose build, our moose app and the name of the app to use when called on the
37
+ # command line.
38
+ config = {'main_path': Path.home()/ 'moose',
39
+ 'app_path': Path.home() / 'proteus',
40
+ 'app_name': 'proteus-opt'}
41
+ moose_config = MooseConfig(config)
42
+
43
+ #%%
44
+ # We can now build a runner object using our configuration. We can then set some
45
+ # options for the run including parallelisation and if we should redirect
46
+ # terminal output to file. For smaller simulations we are better off using
47
+ # threads for paralleisation as they reduce overhead compared to MPI tasks.
48
+ moose_runner = MooseRunner(moose_config)
49
+
50
+ moose_runner.set_run_opts(n_tasks = 1,
51
+ n_threads = 8,
52
+ redirect_out = False)
53
+
54
+ #%%
55
+ # Let's grab a simple thermo-mechanical cube test case from pyvale's moose
56
+ # simulation library and we will set this as the input file to run with our
57
+ # 'runner'.
58
+ moose_input = dataset.element_case_input_path(dataset.EElemTest.HEX20)
59
+ moose_runner.set_input_file(moose_input)
60
+
61
+ #%%
62
+ # Our moose runner will pass a list of strings which form the command line to
63
+ # run our moose simulation. We print the list of command line arguments here so
64
+ # we can check we are correctly calling our input file with the run options we
65
+ # want.
66
+ print(moose_runner.get_arg_list())
67
+ print()
68
+
69
+ #%%
70
+ # To run our moose simulation we just need to call 'run', here we will time our
71
+ # moose run and then print the solve time to the terminal
72
+ start_time = time.perf_counter()
73
+ moose_runner.run()
74
+ run_time = time.perf_counter() - start_time
75
+
76
+ print()
77
+ print("-"*80)
78
+ print(f'MOOSE run time = {run_time:.3f} seconds')
79
+ print("-"*80)
80
+ print()
@@ -0,0 +1,64 @@
1
+ # ==============================================================================
2
+ # pyvale: the python validation engine
3
+ # License: MIT
4
+ # Copyright (C) 2025 The Computer Aided Validation Team
5
+ # ==============================================================================
6
+
7
+ """
8
+ Running Gmsh once
9
+ ================================================================================
10
+
11
+ In this example we will run a gmsh script to generate a mesh file using the
12
+ GmshRunner class.
13
+
14
+ **Installing gmsh**: For this example you will need to have a gmsh executable
15
+ which can be downloaded and installed from here: https://gmsh.info/#Download
16
+
17
+ We start by importing what we need for this example.
18
+ """
19
+
20
+ import time
21
+ from pathlib import Path
22
+
23
+ #pyvale imports
24
+ import pyvale.dataset as dataset
25
+ from pyvale.mooseherder import GmshRunner
26
+
27
+ #%%
28
+ # First we need to create a 'runner' for gmsh which needs to know the path to
29
+ # the gmsh executable. You will need to replace this path with the path to where
30
+ # you have install gmsh on your system.
31
+ gmsh_path = Path.home() / 'gmsh/bin/gmsh'
32
+ gmsh_runner = GmshRunner(gmsh_path)
33
+
34
+ #%%
35
+ # Next we grab a gmsh file from pyvale simulation library and we set this as the
36
+ # input file for our runner.
37
+ gmsh_input = dataset.sim_case_gmsh_file_path(case_num=17)
38
+ gmsh_runner.set_input_file(gmsh_input)
39
+
40
+ #%%
41
+ # Now we can run gmsh to generate our mesh using the run method, the parse only
42
+ # flag means we will run gmsh head less and not open the gmsh GUI but terminal
43
+ # output will still be written to stdout.
44
+ #
45
+ # We also use our performance timer to time how long our mesh generation takes
46
+ # and then we print this to the console. Note that parallelisation options for
47
+ # gmsh can be controlled in the gmsh .geo script file.
48
+ start_time = time.perf_counter()
49
+ gmsh_runner.run(gmsh_input,parse_only=True)
50
+ run_time = time.perf_counter() - start_time
51
+
52
+ print()
53
+ print("-"*80)
54
+ print(f'Gmsh run time = {run_time :.3f} seconds')
55
+ print("-"*80)
56
+ print()
57
+
58
+ #%%
59
+ # The GmshRunner and the MooseRunner implement the SimRunner abstract base
60
+ # class. Later on when we will see that we can use this to run a list of
61
+ # different sim runners in order using MooseHerd workflow manager. This allows
62
+ # us to first build our mesh with gmsh and then run a moose simulation using
63
+ # that mesh. We can also implement our own SimRunner's to add additional pre or
64
+ # post processing steps to our simulation chain.
@@ -0,0 +1,114 @@
1
+ # ==============================================================================
2
+ # pyvale: the python validation engine
3
+ # License: MIT
4
+ # Copyright (C) 2025 The Computer Aided Validation Team
5
+ # ==============================================================================
6
+
7
+ """
8
+ Run Gmsh then MOOSE once
9
+ ================================================================================
10
+
11
+ In this example we use a gmsh runner followed by a moose runner to generate our
12
+ mesh and then run a moose simulation using this mesh. The moose input file needs
13
+ to know the name of the gmsh .msh file which is specified in the gmsh .geo
14
+ script when the Save command is called. It is possible to use the input
15
+ modifiers we have seen previously to update this file name as a variable in the
16
+ moose input script but for this example we have set things manually inside the
17
+ moose input script.
18
+
19
+ **Installing moose**: To run this example you will need to have installed moose
20
+ on your system. As moose supports unix operating systems windows users will need
21
+ to use windows subsystem for linux (WSL). We use the proteus moose build which
22
+ can be found here: https://github.com/aurora-multiphysics/proteus. Build scripts
23
+ for common linux distributions can be found in the 'scripts' directory of the
24
+ repo. You can also create your own moose build using instructions here:
25
+ https://mooseframework.inl.gov/.
26
+
27
+ **Installing gmsh**: For this example you will need to have a gmsh executable
28
+ which can be downloaded and installed from here: https://gmsh.info/#Download
29
+
30
+ We start by importing what we need for this example.
31
+ """
32
+
33
+ import time
34
+ import shutil
35
+ from pathlib import Path
36
+
37
+ #pyvale imports
38
+ import pyvale.dataset as dataset
39
+ from pyvale.mooseherder import (MooseConfig,
40
+ GmshRunner,
41
+ MooseRunner)
42
+
43
+ #%%
44
+ # We need to make sure the output .msh file from gmsh can be found by our moose
45
+ # input script so we are going to put them in our standard pyvale-output
46
+ # directory in our current working directory. First we grab the paths for the
47
+ # .geo and .i and then we copy them to the pyvale-output directory where we will
48
+ # run our simulation from. We then print the paths to we can see where the files
49
+ # are - try opening them with your text editor of choice so you can see how the
50
+ # name of the mesh is specified in the gmsh .geo as the .msh output and the then
51
+ # how the name is matched in the moose .i to read the .msh to run the sim.
52
+
53
+ output_path = Path.cwd() / "pyvale-output"
54
+ if not output_path.is_dir():
55
+ output_path.mkdir(parents=True, exist_ok=True)
56
+
57
+ gmsh_file = dataset.sim_case_gmsh_file_path(case_num=17)
58
+ gmsh_input = output_path / gmsh_file.name
59
+
60
+ moose_file = dataset.sim_case_input_file_path(case_num=17)
61
+ moose_input = output_path / moose_file.name
62
+
63
+ shutil.copyfile(moose_file,moose_input)
64
+ shutil.copyfile(gmsh_file,gmsh_input)
65
+
66
+ print(f"\n{moose_input.resolve()=}")
67
+ print(f"{gmsh_input.resolve()=}\n")
68
+
69
+ #%%
70
+ # We need to run gmsh first to generate our .msh file so we set it up and run it
71
+ # in exactly the same way as we have done in the previous example. We pass the
72
+ # path to the gmsh executable to our runner. We then set our input file and call
73
+ # run to generate the mesh.
74
+ gmsh_path = Path.home() / 'gmsh/bin/gmsh'
75
+ gmsh_runner = GmshRunner(gmsh_path)
76
+
77
+ gmsh_runner.set_input_file(gmsh_input)
78
+
79
+ gmsh_start = time.perf_counter()
80
+ gmsh_runner.run(parse_only=True)
81
+ gmsh_run_time = time.perf_counter()-gmsh_start
82
+
83
+ #%%
84
+ # Now that we have our mesh we can run our moose simulation. We will setup and
85
+ # run moose in exactly the same was as in a previous example. First, we setup
86
+ # our moose configuration and pass this to our runner. We then set our run /
87
+ # parallelisation options before calling run to extecute the simulation.
88
+ config = {'main_path': Path.home()/ 'moose',
89
+ 'app_path': Path.home() / 'proteus',
90
+ 'app_name': 'proteus-opt'}
91
+ moose_config = MooseConfig(config)
92
+
93
+ moose_runner = MooseRunner(moose_config)
94
+
95
+ moose_runner.set_run_opts(n_tasks = 1,
96
+ n_threads = 4,
97
+ redirect_out = True)
98
+
99
+ moose_runner.set_input_file(moose_input)
100
+
101
+ moose_start = time.perf_counter()
102
+ moose_runner.run()
103
+ moose_run_time = time.perf_counter() - moose_start
104
+
105
+ #%%
106
+ # Finally we print the execution times of both runners and print these to the
107
+ # console.
108
+ print("-"*80)
109
+ print(f'Gmsh run time = {gmsh_run_time:.2f} seconds')
110
+ print(f'MOOOSE run time = {moose_run_time:.2f} seconds')
111
+ print("-"*80)
112
+ print()
113
+
114
+
@@ -0,0 +1,157 @@
1
+ # ==============================================================================
2
+ # pyvale: the python validation engine
3
+ # License: MIT
4
+ # Copyright (C) 2025 The Computer Aided Validation Team
5
+ # ==============================================================================
6
+
7
+ """
8
+ Running a parameter sweep of a MOOSE simulation
9
+ ================================================================================
10
+
11
+ In this example we will perform a parameter sweep of a moose simulation showing
12
+ the capability of the 'herder' workflow manager which can be passed a list of
13
+ 'input modifiers' and 'runners'. The 'herder' will then use the 'input
14
+ modifiers' to update simulation parameters and then call the respective 'runner'
15
+ using the modified input file. In this example we will also see that the
16
+ 'herder' can be used to execute a parameter sweep sequentially or in parallel.
17
+
18
+ **Installing moose**: To run this example you will need to have installed moose
19
+ on your system. As moose supports unix operating systems windows users will need
20
+ to use windows subsystem for linux (WSL). We use the proteus moose build which
21
+ can be found here: https://github.com/aurora-multiphysics/proteus. Build scripts
22
+ for common linux distributions can be found in the 'scripts' directory of the
23
+ repo. You can also create your own moose build using instructions here:
24
+ https://mooseframework.inl.gov/.
25
+
26
+ We start by importing what we need for this example.
27
+ """
28
+
29
+ from pathlib import Path
30
+ import numpy as np
31
+
32
+ #pyvale imports
33
+ import pyvale.dataset as dataset
34
+ from pyvale.mooseherder import (MooseHerd,
35
+ MooseRunner,
36
+ InputModifier,
37
+ DirectoryManager,
38
+ MooseConfig,
39
+ sweep_param_grid)
40
+
41
+ #%%
42
+ # First we are going to setup an input modifier and a runner for our moose
43
+ # simulation. Here we need to make sure that when we set our moose
44
+ # parallelisation options we leave enough threads for all the simulations that
45
+ # are running at once base on our CPU. It is also helpful to redirect stdout to
46
+ # file so that our terminal does not become a mess when we start running our
47
+ # simulations in parallel.
48
+ moose_input = dataset.element_case_input_path(dataset.EElemTest.HEX20)
49
+ moose_modifier = InputModifier(moose_input,'#','')
50
+
51
+ config = {'main_path': Path.home()/ 'moose',
52
+ 'app_path': Path.home() / 'proteus',
53
+ 'app_name': 'proteus-opt'}
54
+ moose_config = MooseConfig(config)
55
+
56
+ moose_runner = MooseRunner(moose_config)
57
+ moose_runner.set_run_opts(n_tasks = 1,
58
+ n_threads = 2,
59
+ redirect_out = True)
60
+
61
+ #%%
62
+ # Now we are going to create a directory manager which will be used to make sure
63
+ # our simulations are run in separate directories. We then create our herd
64
+ # workflow manager with our list of runners and corresponding input modifiers.
65
+ # In our case we are only running moose so our lists have a single item. The
66
+ # last thing we do is specify the number of simulations we want to run in
67
+ # parallel, for this case we match the number of directories.
68
+ num_para_sims: int = 4
69
+ dir_manager = DirectoryManager(n_dirs=num_para_sims)
70
+ herd = MooseHerd([moose_runner],[moose_modifier],dir_manager)
71
+ herd.set_num_para_sims(n_para=num_para_sims)
72
+
73
+ #%%
74
+ # We need somewhere to run our simulations and store the output so we create our
75
+ # standard pyvale output directory and then we set this as the base directory
76
+ # for our directory manager. We clear any old output directories and then create
77
+ # new ones ready to write our simulation output to.
78
+ output_path = Path.cwd() / "pyvale-output"
79
+ if not output_path.is_dir():
80
+ output_path.mkdir(parents=True, exist_ok=True)
81
+
82
+ dir_manager.set_base_dir(output_path)
83
+ dir_manager.reset_dirs()
84
+
85
+ #%%
86
+ # We now need to generate the parameter combinations we want to run using our
87
+ # 'herd'. This is given as a list of list of dictionaries where the outer list
88
+ # corresponds to the unique simulation chain, the inner list corresponds to each
89
+ # simulation runner in the chain, and the dicitionary contains key value pairs
90
+ # where the keys are the variables names to edit in the input file. For this
91
+ # case we only have moose in our simulation chain so our inner list will only
92
+ # have a length of one but in the next example we will see how we can combine
93
+ # a parameter sweep with gmsh->moose sweeping all possible combinations of
94
+ # variables for both simulation tools.
95
+ #
96
+ # For now we are going to use a helper function from mooseherder which will
97
+ # generate all possible combination for us in the correct data format. We just
98
+ # provide a dictionary of lists of unique parameters we want to analyse. Finally
99
+ # we print the unique combinations of parameters to the terminal as well as the
100
+ # total number of simulations to check everything is working as expected.
101
+
102
+ moose_params = {"nElemX": (2,3,4),
103
+ "lengX": np.array([10e-3,15e-3]),
104
+ "PRatio":(0.3,)}
105
+ params = [moose_params,]
106
+ sweep_params = sweep_param_grid(params)
107
+
108
+ print("\nParameter sweep variables by simulation:")
109
+ for ii,pp in enumerate(sweep_params):
110
+ print(f"Sim: {ii}, Params [moose,]: {pp}")
111
+
112
+ print()
113
+ print(f"Total simulations = {len(sweep_params)}")
114
+ print()
115
+
116
+ #%%
117
+ # The run once function of the herd allows us to run a particular single
118
+ # simulation chain from anywhere in the sweep. This is useful for debugging when
119
+ # you want to rerun a single case to see the output or what went wrong. The herd
120
+ # also stores the solution time for each single iteration so we will store this
121
+ # to estimate how long the whole sweep should take when solving sequentially.
122
+ herd.run_once(0,sweep_params[0])
123
+ time_run_once = herd.get_iter_time()
124
+
125
+
126
+ #%%
127
+ # We can run the whole parameter sweep sequentially (one by one) using the run
128
+ # sequential function of the herd. We also store the total solution time
129
+ # for all simulation chains so that we can compare to a parallel run later. Note
130
+ # that it can be beneficial to run sequentially if you are using the herd within
131
+ # another loop or if one of the steps in your simulation chain is expensive and
132
+ # that step needs the computational resource.
133
+ herd.run_sequential(sweep_params)
134
+ time_run_seq = herd.get_sweep_time()
135
+
136
+ #%%
137
+ # Finally, we can run our parameter sweep in parallel. We need a main guard here
138
+ # as we use the multi-processing package. We also store the sweep time for this
139
+ # case to compare our sequential to parallel run time.
140
+ if __name__ == "__main__":
141
+ herd.run_para(sweep_params)
142
+ time_run_para = herd.get_sweep_time()
143
+
144
+ #%%
145
+ # Now that we have run all cases we can compare run times for a single
146
+ # simulation multiplied by the total number of simulations against runnning the
147
+ # sweep in parallel
148
+ print("-"*80)
149
+ print(f'Run time (one iter) = {time_run_once:.3f} seconds')
150
+ print(f'Est. time (one iter x num sims) = {(time_run_once*len(sweep_params)):.3f} seconds')
151
+ print()
152
+ print(f'Run time (seq) = {time_run_seq:.3f} seconds')
153
+ print(f'Run time (para) = {time_run_para:.3f} seconds')
154
+ print("-"*80)
155
+ print()
156
+
157
+
@@ -0,0 +1,176 @@
1
+ # ==============================================================================
2
+ # pyvale: the python validation engine
3
+ # License: MIT
4
+ # Copyright (C) 2025 The Computer Aided Validation Team
5
+ # ==============================================================================
6
+
7
+ """
8
+ Running a parameter sweep of a Gmsh and MOOSE simulation
9
+ ================================================================================
10
+
11
+ In this example we will perform a parameter sweep of a gmsh-moose simulation
12
+ chain to demonstrate the capability of the 'herder' workflow manager. Here we
13
+ pass the 'herder' a list of simulation tools that we want to modify inputs for
14
+ (i.e. input modifiers) and then run (i.e. with runners). The simulation tools
15
+ are called sequentially in the order they are in the lists so we will need to
16
+ make sure we call gmsh first to generate the mesh and then call moose to use the
17
+ mesh to run the simulation.
18
+
19
+ As in the previous example we will generate a parameter sweep and then run it
20
+ sequentially and in parallel and compare the run times.
21
+
22
+ **Installing moose**: To run this example you will need to have installed moose
23
+ on your system. As moose supports unix operating systems windows users will need
24
+ to use windows subsystem for linux (WSL). We use the proteus moose build which
25
+ can be found here: https://github.com/aurora-multiphysics/proteus. Build scripts
26
+ for common linux distributions can be found in the 'scripts' directory of the
27
+ repo. You can also create your own moose build using instructions here:
28
+ https://mooseframework.inl.gov/.
29
+
30
+ **Installing gmsh**: For this example you will need to have a gmsh executable
31
+ which can be downloaded and installed from here: https://gmsh.info/#Download
32
+
33
+ We start by importing what we need for this example.
34
+ """
35
+
36
+ from pathlib import Path
37
+ import numpy as np
38
+
39
+ #pyvale imports
40
+ import pyvale.dataset as dataset
41
+ from pyvale.mooseherder import (MooseHerd,
42
+ MooseRunner,
43
+ MooseConfig,
44
+ GmshRunner,
45
+ InputModifier,
46
+ DirectoryManager,
47
+ sweep_param_grid)
48
+
49
+ #%%
50
+ # First we setup our input modifer and runner for gmsh using the same 2D plate
51
+ # with a hole simulation test case from the pyvale simulation library. This is
52
+ # the same as we have seen in previous examples for the gmsh input modifier and
53
+ # running gmsh.
54
+ sim_case: int = 17
55
+
56
+ gmsh_input = dataset.sim_case_gmsh_file_path(case_num=sim_case)
57
+ gmsh_modifier = InputModifier(gmsh_input,"//",";")
58
+
59
+ gmsh_path = Path.home() / "gmsh/bin/gmsh"
60
+ gmsh_runner = GmshRunner(gmsh_path)
61
+ gmsh_runner.set_input_file(gmsh_input)
62
+
63
+
64
+ #%%
65
+ # Next we setup our moose input modifier and runner in the same way as we have
66
+ # done in previous examples. We set our parallelisation options for moose here
67
+ # as well as redirecting stdout to file to save our terminal when we run in
68
+ # parallel.
69
+ moose_input = dataset.sim_case_input_file_path(case_num=sim_case)
70
+ moose_modifier = InputModifier(moose_input,"#","")
71
+
72
+ config = {'main_path': Path.home()/ 'moose',
73
+ 'app_path': Path.home() / 'proteus',
74
+ 'app_name': 'proteus-opt'}
75
+ moose_config = MooseConfig(config)
76
+ moose_runner = MooseRunner(moose_config)
77
+ moose_runner.set_run_opts(n_tasks = 1,
78
+ n_threads = 2,
79
+ redirect_out = True)
80
+
81
+ #%%
82
+ # We can now setup our 'herd' workflow manager making sure me place gmsh ahead
83
+ # of moose in the input modifier and runner lists so it is executed first to
84
+ # generate our mesh. We setup our directories and number of simulations to run
85
+ # in paralle as we have done previously.
86
+ num_para_sims: int = 4
87
+
88
+ sim_runners = [gmsh_runner,moose_runner]
89
+ input_modifiers = [gmsh_modifier,moose_modifier]
90
+ dir_manager = DirectoryManager(n_dirs=num_para_sims)
91
+
92
+ herd = MooseHerd(sim_runners,input_modifiers,dir_manager)
93
+ herd.set_num_para_sims(n_para=num_para_sims)
94
+
95
+
96
+ #%%
97
+ # We need somewhere to run our simulations and store the output so we create our
98
+ # standard pyvale output directory and then we set this as the base directory
99
+ # for our directory manager. We clear any old output directories and then create
100
+ # new ones ready to write our simulation output to.
101
+ output_path = Path.cwd() / "pyvale-output"
102
+ if not output_path.is_dir():
103
+ output_path.mkdir(parents=True, exist_ok=True)
104
+
105
+ dir_manager.set_base_dir(output_path)
106
+ dir_manager.clear_dirs()
107
+ dir_manager.create_dirs()
108
+
109
+ #%%
110
+ # We can now setup our grid parameter sweep to run simulations for all
111
+ # combinations of variables we are interested in. For now we will only change
112
+ # the parameters of our gmsh simulation so we set our moose parameters to None.
113
+ # For the gmsh simulation parameters we pass a dictionary keyed by the variable
114
+ # we want to change and then an iterable object (e.g. tuple, list, numpy array)
115
+ # for all values of the variable we want to run. We can also have an iterable of
116
+ # strings which will insert expressions into the input file for us as shown for
117
+ # the plate height below. If we only want to analyse a single value of a
118
+ # parameter we just pass an iterable with a single element. Note that the list
119
+ # of parameters that we pass to the sweep grid function should be in the same
120
+ # order we intend to call our simulation tools - so gmsh first in this case.
121
+ #
122
+ # After running this example replace the moose params with the following:
123
+ # ``moose_params = {"EMod": (70e9,100e9),"PRatio": (0.3,0.35)}``. This should
124
+ # demonstrate how all combinations of parameters between both gmsh and moose are
125
+ # generated using the sweep grid function.
126
+ gmsh_params = {"plate_width": np.array([150e-3,100e-3]),
127
+ "plate_height": ("plate_width + 100e-3",
128
+ "plate_width + 50e-3")}
129
+ moose_params = None
130
+ params = [gmsh_params,moose_params]
131
+ sweep_params = sweep_param_grid(params)
132
+
133
+ print("\nParameter sweep variables by simulation:")
134
+ for ii,pp in enumerate(sweep_params):
135
+ print(f"Sim: {ii}, Params [gmsh,moose]: {pp}")
136
+
137
+ #%%
138
+ # The run once function of the herd allows us to run a particular single
139
+ # simulation chain from anywhere in the sweep. This is useful for debugging when
140
+ # you want to rerun a single case to see the output or what went wrong. The herd
141
+ # also stores the solution time for each single iteration so we will store this
142
+ # to estimate how long the whole sweep should take when solving sequentially.
143
+ herd.run_once(0,sweep_params[0])
144
+ time_run_once = herd.get_iter_time()
145
+
146
+
147
+ #%%
148
+ # We can run the whole parameter sweep sequentially (one by one) using the run
149
+ # sequential function of the herd. We also store the total solution time
150
+ # for all simulation chains so that we can compare to a parallel run later. Note
151
+ # that it can be beneficial to run sequentially if you are using the herd within
152
+ # another loop or if one of the steps in your simulation chain is expensive and
153
+ # that step needs the computational resource.
154
+ herd.run_sequential(sweep_params)
155
+ time_run_seq = herd.get_sweep_time()
156
+
157
+ #%%
158
+ # Finally, we can run our parameter sweep in parallel. We need a main guard here
159
+ # as we use the multi-processing package. We also store the sweep time for this
160
+ # case to compare our sequential to parallel run time.
161
+ if __name__ == "__main__":
162
+ herd.run_para(sweep_params)
163
+ time_run_para = herd.get_sweep_time()
164
+
165
+ #%%
166
+ # Now that we have run all cases we can compare run times for a single
167
+ # simulation multiplied by the total number of simulations against runnning the
168
+ # sweep in parallel
169
+ print("-"*80)
170
+ print(f'Run time (one iter) = {time_run_once:.3f} seconds')
171
+ print(f'Est. time (one iter x num sims) = {(time_run_once*len(sweep_params)):.3f} seconds')
172
+ print()
173
+ print(f'Run time (seq) = {time_run_seq:.3f} seconds')
174
+ print(f'Run time (para) = {time_run_para:.3f} seconds')
175
+ print("-"*80)
176
+ print()