sierra-research 1.3.6__py3-none-any.whl → 1.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (254) hide show
  1. sierra/__init__.py +3 -3
  2. sierra/core/__init__.py +3 -3
  3. sierra/core/batchroot.py +223 -0
  4. sierra/core/cmdline.py +681 -1057
  5. sierra/core/compare.py +11 -0
  6. sierra/core/config.py +96 -88
  7. sierra/core/engine.py +306 -0
  8. sierra/core/execenv.py +380 -0
  9. sierra/core/expdef.py +11 -0
  10. sierra/core/experiment/__init__.py +1 -0
  11. sierra/core/experiment/bindings.py +150 -101
  12. sierra/core/experiment/definition.py +414 -245
  13. sierra/core/experiment/spec.py +83 -85
  14. sierra/core/exproot.py +44 -0
  15. sierra/core/generators/__init__.py +10 -0
  16. sierra/core/generators/experiment.py +528 -0
  17. sierra/core/generators/generator_factory.py +138 -137
  18. sierra/core/graphs/__init__.py +23 -0
  19. sierra/core/graphs/bcbridge.py +94 -0
  20. sierra/core/graphs/heatmap.py +245 -324
  21. sierra/core/graphs/pathset.py +27 -0
  22. sierra/core/graphs/schema.py +77 -0
  23. sierra/core/graphs/stacked_line.py +341 -0
  24. sierra/core/graphs/summary_line.py +506 -0
  25. sierra/core/logging.py +3 -2
  26. sierra/core/models/__init__.py +3 -1
  27. sierra/core/models/info.py +19 -0
  28. sierra/core/models/interface.py +52 -122
  29. sierra/core/pipeline/__init__.py +2 -5
  30. sierra/core/pipeline/pipeline.py +228 -126
  31. sierra/core/pipeline/stage1/__init__.py +10 -0
  32. sierra/core/pipeline/stage1/pipeline_stage1.py +45 -31
  33. sierra/core/pipeline/stage2/__init__.py +10 -0
  34. sierra/core/pipeline/stage2/pipeline_stage2.py +8 -11
  35. sierra/core/pipeline/stage2/runner.py +401 -0
  36. sierra/core/pipeline/stage3/__init__.py +12 -0
  37. sierra/core/pipeline/stage3/gather.py +321 -0
  38. sierra/core/pipeline/stage3/pipeline_stage3.py +37 -84
  39. sierra/core/pipeline/stage4/__init__.py +12 -2
  40. sierra/core/pipeline/stage4/pipeline_stage4.py +36 -354
  41. sierra/core/pipeline/stage5/__init__.py +12 -0
  42. sierra/core/pipeline/stage5/pipeline_stage5.py +33 -208
  43. sierra/core/pipeline/yaml.py +48 -0
  44. sierra/core/plugin.py +529 -62
  45. sierra/core/proc.py +11 -0
  46. sierra/core/prod.py +11 -0
  47. sierra/core/ros1/__init__.py +5 -1
  48. sierra/core/ros1/callbacks.py +22 -21
  49. sierra/core/ros1/cmdline.py +59 -88
  50. sierra/core/ros1/generators.py +159 -175
  51. sierra/core/ros1/variables/__init__.py +3 -0
  52. sierra/core/ros1/variables/exp_setup.py +122 -116
  53. sierra/core/startup.py +106 -76
  54. sierra/core/stat_kernels.py +4 -5
  55. sierra/core/storage.py +13 -32
  56. sierra/core/trampoline.py +30 -0
  57. sierra/core/types.py +116 -71
  58. sierra/core/utils.py +103 -106
  59. sierra/core/variables/__init__.py +1 -1
  60. sierra/core/variables/base_variable.py +12 -17
  61. sierra/core/variables/batch_criteria.py +387 -481
  62. sierra/core/variables/builtin.py +135 -0
  63. sierra/core/variables/exp_setup.py +19 -39
  64. sierra/core/variables/population_size.py +72 -76
  65. sierra/core/variables/variable_density.py +44 -68
  66. sierra/core/vector.py +1 -1
  67. sierra/main.py +256 -88
  68. sierra/plugins/__init__.py +119 -0
  69. sierra/plugins/compare/__init__.py +14 -0
  70. sierra/plugins/compare/graphs/__init__.py +19 -0
  71. sierra/plugins/compare/graphs/cmdline.py +120 -0
  72. sierra/plugins/compare/graphs/comparator.py +291 -0
  73. sierra/plugins/compare/graphs/inter_controller.py +531 -0
  74. sierra/plugins/compare/graphs/inter_scenario.py +297 -0
  75. sierra/plugins/compare/graphs/namecalc.py +53 -0
  76. sierra/plugins/compare/graphs/outputroot.py +73 -0
  77. sierra/plugins/compare/graphs/plugin.py +147 -0
  78. sierra/plugins/compare/graphs/preprocess.py +172 -0
  79. sierra/plugins/compare/graphs/schema.py +37 -0
  80. sierra/plugins/engine/__init__.py +14 -0
  81. sierra/plugins/engine/argos/__init__.py +18 -0
  82. sierra/plugins/{platform → engine}/argos/cmdline.py +144 -151
  83. sierra/plugins/{platform/argos/variables → engine/argos/generators}/__init__.py +5 -0
  84. sierra/plugins/engine/argos/generators/engine.py +394 -0
  85. sierra/plugins/engine/argos/plugin.py +393 -0
  86. sierra/plugins/{platform/argos/generators → engine/argos/variables}/__init__.py +5 -0
  87. sierra/plugins/engine/argos/variables/arena_shape.py +183 -0
  88. sierra/plugins/engine/argos/variables/cameras.py +240 -0
  89. sierra/plugins/engine/argos/variables/constant_density.py +112 -0
  90. sierra/plugins/engine/argos/variables/exp_setup.py +82 -0
  91. sierra/plugins/{platform → engine}/argos/variables/physics_engines.py +83 -87
  92. sierra/plugins/engine/argos/variables/population_constant_density.py +178 -0
  93. sierra/plugins/engine/argos/variables/population_size.py +115 -0
  94. sierra/plugins/engine/argos/variables/population_variable_density.py +123 -0
  95. sierra/plugins/engine/argos/variables/rendering.py +108 -0
  96. sierra/plugins/engine/ros1gazebo/__init__.py +18 -0
  97. sierra/plugins/engine/ros1gazebo/cmdline.py +175 -0
  98. sierra/plugins/{platform/ros1robot → engine/ros1gazebo}/generators/__init__.py +5 -0
  99. sierra/plugins/engine/ros1gazebo/generators/engine.py +125 -0
  100. sierra/plugins/engine/ros1gazebo/plugin.py +404 -0
  101. sierra/plugins/engine/ros1gazebo/variables/__init__.py +15 -0
  102. sierra/plugins/engine/ros1gazebo/variables/population_size.py +214 -0
  103. sierra/plugins/engine/ros1robot/__init__.py +18 -0
  104. sierra/plugins/engine/ros1robot/cmdline.py +159 -0
  105. sierra/plugins/{platform/ros1gazebo → engine/ros1robot}/generators/__init__.py +4 -0
  106. sierra/plugins/engine/ros1robot/generators/engine.py +95 -0
  107. sierra/plugins/engine/ros1robot/plugin.py +410 -0
  108. sierra/plugins/{hpc/local → engine/ros1robot/variables}/__init__.py +5 -0
  109. sierra/plugins/engine/ros1robot/variables/population_size.py +146 -0
  110. sierra/plugins/execenv/__init__.py +11 -0
  111. sierra/plugins/execenv/hpc/__init__.py +18 -0
  112. sierra/plugins/execenv/hpc/adhoc/__init__.py +18 -0
  113. sierra/plugins/execenv/hpc/adhoc/cmdline.py +30 -0
  114. sierra/plugins/execenv/hpc/adhoc/plugin.py +131 -0
  115. sierra/plugins/execenv/hpc/cmdline.py +137 -0
  116. sierra/plugins/execenv/hpc/local/__init__.py +18 -0
  117. sierra/plugins/execenv/hpc/local/cmdline.py +31 -0
  118. sierra/plugins/execenv/hpc/local/plugin.py +145 -0
  119. sierra/plugins/execenv/hpc/pbs/__init__.py +18 -0
  120. sierra/plugins/execenv/hpc/pbs/cmdline.py +30 -0
  121. sierra/plugins/execenv/hpc/pbs/plugin.py +121 -0
  122. sierra/plugins/execenv/hpc/slurm/__init__.py +18 -0
  123. sierra/plugins/execenv/hpc/slurm/cmdline.py +30 -0
  124. sierra/plugins/execenv/hpc/slurm/plugin.py +133 -0
  125. sierra/plugins/execenv/prefectserver/__init__.py +18 -0
  126. sierra/plugins/execenv/prefectserver/cmdline.py +66 -0
  127. sierra/plugins/execenv/prefectserver/dockerremote/__init__.py +18 -0
  128. sierra/plugins/execenv/prefectserver/dockerremote/cmdline.py +66 -0
  129. sierra/plugins/execenv/prefectserver/dockerremote/plugin.py +132 -0
  130. sierra/plugins/execenv/prefectserver/flow.py +66 -0
  131. sierra/plugins/execenv/prefectserver/local/__init__.py +18 -0
  132. sierra/plugins/execenv/prefectserver/local/cmdline.py +29 -0
  133. sierra/plugins/execenv/prefectserver/local/plugin.py +133 -0
  134. sierra/plugins/{hpc/adhoc → execenv/robot}/__init__.py +1 -0
  135. sierra/plugins/execenv/robot/turtlebot3/__init__.py +18 -0
  136. sierra/plugins/execenv/robot/turtlebot3/plugin.py +204 -0
  137. sierra/plugins/expdef/__init__.py +14 -0
  138. sierra/plugins/expdef/json/__init__.py +14 -0
  139. sierra/plugins/expdef/json/plugin.py +504 -0
  140. sierra/plugins/expdef/xml/__init__.py +14 -0
  141. sierra/plugins/expdef/xml/plugin.py +386 -0
  142. sierra/{core/hpc → plugins/proc}/__init__.py +1 -1
  143. sierra/plugins/proc/collate/__init__.py +15 -0
  144. sierra/plugins/proc/collate/cmdline.py +47 -0
  145. sierra/plugins/proc/collate/plugin.py +271 -0
  146. sierra/plugins/proc/compress/__init__.py +18 -0
  147. sierra/plugins/proc/compress/cmdline.py +47 -0
  148. sierra/plugins/proc/compress/plugin.py +123 -0
  149. sierra/plugins/proc/decompress/__init__.py +18 -0
  150. sierra/plugins/proc/decompress/plugin.py +96 -0
  151. sierra/plugins/proc/imagize/__init__.py +15 -0
  152. sierra/plugins/proc/imagize/cmdline.py +49 -0
  153. sierra/plugins/proc/imagize/plugin.py +270 -0
  154. sierra/plugins/proc/modelrunner/__init__.py +16 -0
  155. sierra/plugins/proc/modelrunner/plugin.py +250 -0
  156. sierra/plugins/proc/statistics/__init__.py +15 -0
  157. sierra/plugins/proc/statistics/cmdline.py +64 -0
  158. sierra/plugins/proc/statistics/plugin.py +390 -0
  159. sierra/plugins/{hpc → prod}/__init__.py +1 -0
  160. sierra/plugins/prod/graphs/__init__.py +18 -0
  161. sierra/plugins/prod/graphs/cmdline.py +269 -0
  162. sierra/plugins/prod/graphs/collate.py +279 -0
  163. sierra/plugins/prod/graphs/inter/__init__.py +13 -0
  164. sierra/plugins/prod/graphs/inter/generate.py +83 -0
  165. sierra/plugins/prod/graphs/inter/heatmap.py +86 -0
  166. sierra/plugins/prod/graphs/inter/line.py +134 -0
  167. sierra/plugins/prod/graphs/intra/__init__.py +15 -0
  168. sierra/plugins/prod/graphs/intra/generate.py +202 -0
  169. sierra/plugins/prod/graphs/intra/heatmap.py +74 -0
  170. sierra/plugins/prod/graphs/intra/line.py +114 -0
  171. sierra/plugins/prod/graphs/plugin.py +103 -0
  172. sierra/plugins/prod/graphs/targets.py +63 -0
  173. sierra/plugins/prod/render/__init__.py +18 -0
  174. sierra/plugins/prod/render/cmdline.py +72 -0
  175. sierra/plugins/prod/render/plugin.py +282 -0
  176. sierra/plugins/storage/__init__.py +5 -0
  177. sierra/plugins/storage/arrow/__init__.py +18 -0
  178. sierra/plugins/storage/arrow/plugin.py +38 -0
  179. sierra/plugins/storage/csv/__init__.py +9 -0
  180. sierra/plugins/storage/csv/plugin.py +12 -5
  181. sierra/version.py +3 -2
  182. sierra_research-1.5.0.dist-info/METADATA +238 -0
  183. sierra_research-1.5.0.dist-info/RECORD +186 -0
  184. {sierra_research-1.3.6.dist-info → sierra_research-1.5.0.dist-info}/WHEEL +1 -2
  185. sierra/core/experiment/xml.py +0 -454
  186. sierra/core/generators/controller_generator_parser.py +0 -34
  187. sierra/core/generators/exp_creator.py +0 -351
  188. sierra/core/generators/exp_generators.py +0 -142
  189. sierra/core/graphs/scatterplot2D.py +0 -109
  190. sierra/core/graphs/stacked_line_graph.py +0 -249
  191. sierra/core/graphs/stacked_surface_graph.py +0 -220
  192. sierra/core/graphs/summary_line_graph.py +0 -369
  193. sierra/core/hpc/cmdline.py +0 -142
  194. sierra/core/models/graphs.py +0 -87
  195. sierra/core/pipeline/stage2/exp_runner.py +0 -286
  196. sierra/core/pipeline/stage3/imagizer.py +0 -149
  197. sierra/core/pipeline/stage3/run_collator.py +0 -317
  198. sierra/core/pipeline/stage3/statistics_calculator.py +0 -478
  199. sierra/core/pipeline/stage4/graph_collator.py +0 -319
  200. sierra/core/pipeline/stage4/inter_exp_graph_generator.py +0 -240
  201. sierra/core/pipeline/stage4/intra_exp_graph_generator.py +0 -317
  202. sierra/core/pipeline/stage4/model_runner.py +0 -168
  203. sierra/core/pipeline/stage4/rendering.py +0 -283
  204. sierra/core/pipeline/stage4/yaml_config_loader.py +0 -103
  205. sierra/core/pipeline/stage5/inter_scenario_comparator.py +0 -328
  206. sierra/core/pipeline/stage5/intra_scenario_comparator.py +0 -989
  207. sierra/core/platform.py +0 -493
  208. sierra/core/plugin_manager.py +0 -369
  209. sierra/core/root_dirpath_generator.py +0 -241
  210. sierra/plugins/hpc/adhoc/plugin.py +0 -125
  211. sierra/plugins/hpc/local/plugin.py +0 -81
  212. sierra/plugins/hpc/pbs/__init__.py +0 -9
  213. sierra/plugins/hpc/pbs/plugin.py +0 -126
  214. sierra/plugins/hpc/slurm/__init__.py +0 -9
  215. sierra/plugins/hpc/slurm/plugin.py +0 -130
  216. sierra/plugins/platform/__init__.py +0 -9
  217. sierra/plugins/platform/argos/__init__.py +0 -9
  218. sierra/plugins/platform/argos/generators/platform_generators.py +0 -383
  219. sierra/plugins/platform/argos/plugin.py +0 -337
  220. sierra/plugins/platform/argos/variables/arena_shape.py +0 -145
  221. sierra/plugins/platform/argos/variables/cameras.py +0 -243
  222. sierra/plugins/platform/argos/variables/constant_density.py +0 -136
  223. sierra/plugins/platform/argos/variables/exp_setup.py +0 -113
  224. sierra/plugins/platform/argos/variables/population_constant_density.py +0 -175
  225. sierra/plugins/platform/argos/variables/population_size.py +0 -102
  226. sierra/plugins/platform/argos/variables/population_variable_density.py +0 -132
  227. sierra/plugins/platform/argos/variables/rendering.py +0 -104
  228. sierra/plugins/platform/ros1gazebo/__init__.py +0 -9
  229. sierra/plugins/platform/ros1gazebo/cmdline.py +0 -213
  230. sierra/plugins/platform/ros1gazebo/generators/platform_generators.py +0 -137
  231. sierra/plugins/platform/ros1gazebo/plugin.py +0 -335
  232. sierra/plugins/platform/ros1gazebo/variables/__init__.py +0 -10
  233. sierra/plugins/platform/ros1gazebo/variables/population_size.py +0 -204
  234. sierra/plugins/platform/ros1robot/__init__.py +0 -9
  235. sierra/plugins/platform/ros1robot/cmdline.py +0 -175
  236. sierra/plugins/platform/ros1robot/generators/platform_generators.py +0 -112
  237. sierra/plugins/platform/ros1robot/plugin.py +0 -373
  238. sierra/plugins/platform/ros1robot/variables/__init__.py +0 -10
  239. sierra/plugins/platform/ros1robot/variables/population_size.py +0 -146
  240. sierra/plugins/robot/__init__.py +0 -9
  241. sierra/plugins/robot/turtlebot3/__init__.py +0 -9
  242. sierra/plugins/robot/turtlebot3/plugin.py +0 -194
  243. sierra_research-1.3.6.data/data/share/man/man1/sierra-cli.1 +0 -2349
  244. sierra_research-1.3.6.data/data/share/man/man7/sierra-examples.7 +0 -488
  245. sierra_research-1.3.6.data/data/share/man/man7/sierra-exec-envs.7 +0 -331
  246. sierra_research-1.3.6.data/data/share/man/man7/sierra-glossary.7 +0 -285
  247. sierra_research-1.3.6.data/data/share/man/man7/sierra-platforms.7 +0 -358
  248. sierra_research-1.3.6.data/data/share/man/man7/sierra-usage.7 +0 -725
  249. sierra_research-1.3.6.data/data/share/man/man7/sierra.7 +0 -78
  250. sierra_research-1.3.6.dist-info/METADATA +0 -500
  251. sierra_research-1.3.6.dist-info/RECORD +0 -133
  252. sierra_research-1.3.6.dist-info/top_level.txt +0 -1
  253. {sierra_research-1.3.6.dist-info → sierra_research-1.5.0.dist-info}/entry_points.txt +0 -0
  254. {sierra_research-1.3.6.dist-info → sierra_research-1.5.0.dist-info/licenses}/LICENSE +0 -0
@@ -0,0 +1,528 @@
1
+ # Copyright 2018 London Lowmanstone, John Harwell, All rights reserved.
2
+ #
3
+ # SPDX-License-Identifier: MIT
4
+ """Experiment generation classes.
5
+
6
+ Experiment generation modifies the
7
+ :class:`~sierra.core.experiment.definition.BaseExpDef` object built from
8
+ the specified batch criteria as follows:
9
+
10
+ - Engine-specific modifications common to all batch experiments
11
+ - Project-specific modifications common to all batch experiments
12
+ - Modifications generated by the selected controller+scenario
13
+
14
+ NOTE:: Generated definitions from batch criteria are not handled here; they are
15
+ already generated to scaffold the batch experiment when experiment
16
+ generation is run.
17
+
18
+ """
19
+
20
+ # Core packages
21
+ import typing as tp
22
+ import logging
23
+ import pathlib
24
+ import time
25
+ import random
26
+ import pickle
27
+ import copy
28
+ import os
29
+
30
+ # 3rd party packages
31
+
32
+ # Project packages
33
+ import sierra.core.generators.generator_factory as gf
34
+ from sierra.core.experiment import spec, definition, bindings
35
+ from sierra.core import types, batchroot, exproot, utils, config, engine
36
+ import sierra.core.variables.batch_criteria as bc
37
+ import sierra.core.plugin as pm
38
+
39
+
40
+ class BatchExpDefGenerator:
41
+ """Generate experiment definitions for a :term:`Batch Experiment`.
42
+
43
+ Does not create the batch experiment after generation.
44
+ """
45
+
46
+ def __init__(
47
+ self,
48
+ criteria: bc.XVarBatchCriteria,
49
+ pathset: batchroot.PathSet,
50
+ controller_name: str,
51
+ scenario_basename: str,
52
+ cmdopts: types.Cmdopts,
53
+ ) -> None:
54
+ #: batch_config_template: Absolute path to the root template expdef
55
+ # configuration file.
56
+ self.batch_config_template = pathlib.Path(cmdopts["expdef_template"])
57
+
58
+ assert self.batch_config_template.is_file(), "'{0}' is not a valid file".format(
59
+ self.batch_config_template
60
+ )
61
+
62
+ self.exp_template_stem = self.batch_config_template.stem
63
+ self.batch_config_extension = None
64
+
65
+ self.pathset = pathset
66
+
67
+ #: controller_name: Name of controller generator to use.
68
+ self.controller_name = controller_name
69
+
70
+ #: scenario_basename: Name of scenario generator to use.
71
+ self.scenario_basename = scenario_basename
72
+
73
+ #: criteria: :class:`~sierra.core.variables.batch_criteria.BatchCriteria`
74
+ # derived object instance created from cmdline definition.
75
+ self.criteria = criteria
76
+ self.cmdopts = cmdopts
77
+ self.logger = logging.getLogger(__name__)
78
+
79
+ def generate_defs(self) -> tp.List[definition.BaseExpDef]:
80
+ """Generate and return the batch experiment definition.
81
+
82
+ Returns:
83
+
84
+ A list of experiment definitions (one for each experiment in the
85
+ batch).
86
+
87
+ """
88
+ scaffold_spec = spec.scaffold_spec_factory(self.criteria)
89
+
90
+ # Create and run generators
91
+ defs = []
92
+ for i in range(0, scaffold_spec.n_exps):
93
+ generator = self._create_exp_generator(i)
94
+ self.logger.debug(
95
+ (
96
+ "Generating scenario+controller changes from "
97
+ "generator '%s' for exp%s"
98
+ ),
99
+ self.cmdopts["joint_generator"],
100
+ i,
101
+ )
102
+ defs.append(generator.generate())
103
+
104
+ return defs
105
+
106
+ def _create_exp_generator(self, exp_num: int):
107
+ """
108
+ Create the joint scenario+controller generator from command line definitions.
109
+
110
+ Arguments:
111
+
112
+ exp_num: Experiment number in the batch
113
+ """
114
+
115
+ exp_spec = spec.ExperimentSpec(
116
+ self.criteria, self.pathset.input_root, exp_num, self.cmdopts
117
+ )
118
+ template_fpath = exp_spec.exp_input_root / self.exp_template_stem
119
+ config_root = pathlib.Path(self.cmdopts["project_config_root"])
120
+ scenario = gf.ScenarioGenerator(
121
+ controller=self.controller_name,
122
+ scenario=self.scenario_basename,
123
+ spec=exp_spec,
124
+ expdef_template_fpath=template_fpath,
125
+ cmdopts=self.cmdopts,
126
+ )
127
+
128
+ controller = gf.ControllerGenerator(
129
+ controller=self.controller_name,
130
+ config_root=config_root,
131
+ cmdopts=self.cmdopts,
132
+ spec=exp_spec,
133
+ )
134
+
135
+ generator = gf.JointGenerator(scenario=scenario, controller=controller)
136
+ self.cmdopts["joint_generator"] = generator.name
137
+ return generator
138
+
139
+
140
+ class BatchExpCreator:
141
+ """Instantiate a :term:`Batch Experiment`.
142
+
143
+ Calls :class:`~sierra.core.generators.experiment.ExpCreator` on each
144
+ experimental definition in the batch
145
+ """
146
+
147
+ def __init__(
148
+ self,
149
+ criteria: bc.XVarBatchCriteria,
150
+ cmdopts: types.Cmdopts,
151
+ pathset: batchroot.PathSet,
152
+ ) -> None:
153
+
154
+ #: Absolute path to the root template expdef configuration file.
155
+ self.batch_config_template = pathlib.Path(cmdopts["expdef_template"])
156
+
157
+ self.pathset = pathset
158
+
159
+ #: :class:`~sierra.core.variables.batch_criteria.BatchCriteria` derived
160
+ # object instance created from cmdline definition.
161
+ self.criteria = criteria
162
+
163
+ self.cmdopts = cmdopts
164
+ self.logger = logging.getLogger(__name__)
165
+
166
+ def create(self, generator: BatchExpDefGenerator) -> None:
167
+ utils.dir_create_checked(self.pathset.input_root, self.cmdopts["exp_overwrite"])
168
+
169
+ # Scaffold the batch experiment, creating experiment directories and
170
+ # writing template expdef input files for each experiment in the batch
171
+ # with changes from the batch criteria added.
172
+ module = pm.pipeline.get_plugin_module(self.cmdopts["expdef"])
173
+
174
+ exp_def = module.ExpDef(
175
+ input_fpath=self.batch_config_template, write_config=None
176
+ )
177
+
178
+ module = pm.pipeline.get_plugin_module(self.cmdopts["engine"])
179
+
180
+ if hasattr(module, "expdef_flatten"):
181
+ self.logger.debug(
182
+ "Flattening --expdef-template definition before scaffolding"
183
+ )
184
+ # Flatten the expdef here if the engine defines the hook, so that
185
+ # the full flattened file contents are available for scaffolding.
186
+ exp_def = module.expdef_flatten(exp_def)
187
+
188
+ self.criteria.scaffold_exps(exp_def, self.cmdopts)
189
+
190
+ # Pickle experiment definitions in the actual batch experiment
191
+ # directory for later retrieval.
192
+ self.criteria.pickle_exp_defs(self.cmdopts)
193
+
194
+ # Run batch experiment generator (must be after scaffolding so the
195
+ # per-experiment template files are in place).
196
+ defs = generator.generate_defs()
197
+
198
+ assert len(defs) > 0, "No expdef modifications generated?"
199
+
200
+ self.logger.info(
201
+ "Applying generated scenario+controller changes/mods to all experiments"
202
+ )
203
+
204
+ if self.cmdopts["exec_parallelism_paradigm"] is not None:
205
+ self.logger.warning(
206
+ "Overriding engine=%s parallelism paradigm with %s",
207
+ self.cmdopts["engine"],
208
+ self.cmdopts["exec_parallelism_paradigm"],
209
+ )
210
+ parallelism_paradigm = self.cmdopts["exec_parallelism_paradigm"]
211
+ else:
212
+ configurer = engine.ExpConfigurer(self.cmdopts)
213
+ parallelism_paradigm = configurer.parallelism_paradigm()
214
+
215
+ self._init_cmdfile(parallelism_paradigm)
216
+
217
+ for i, defi in enumerate(defs):
218
+ self.logger.trace(
219
+ "Applying %s/%s generated scenario+controller changes/mods to exp%s",
220
+ defi.n_mods()[0],
221
+ defi.n_mods()[1],
222
+ i,
223
+ )
224
+ exp_pathset = exproot.PathSet(
225
+ self.pathset, self.criteria.gen_exp_names()[i]
226
+ )
227
+ ExpCreator(
228
+ self.cmdopts,
229
+ self.criteria,
230
+ self.batch_config_template,
231
+ exp_pathset,
232
+ i,
233
+ ).from_def(defi, parallelism_paradigm)
234
+
235
+ def _init_cmdfile(self, paradigm: str) -> None:
236
+ # Commands file stored in batch input root
237
+ if paradigm == "per-batch":
238
+ path = self.pathset.root / config.kGNUParallel["cmdfile_stem"]
239
+ if utils.path_exists(path.with_suffix(config.kGNUParallel["cmdfile_ext"])):
240
+ path.with_suffix(config.kGNUParallel["cmdfile_ext"]).unlink()
241
+
242
+
243
+ class ExpCreator:
244
+ """Instantiate a generated experiment from an experiment definition.
245
+
246
+ Takes generated :term:`Experiment` definitions and writes them to the
247
+ filesystem.
248
+
249
+ Args:
250
+ template_ipath: Absolute path to the template expdef configuration file.
251
+ """
252
+
253
+ def __init__(
254
+ self,
255
+ cmdopts: types.Cmdopts,
256
+ criteria: bc.XVarBatchCriteria,
257
+ template_ipath: pathlib.Path,
258
+ pathset: exproot.PathSet,
259
+ exp_num: int,
260
+ ) -> None:
261
+
262
+ # filename of template file, sans extension and parent directory path
263
+ self.template_stem = template_ipath.resolve().stem
264
+
265
+ #: Dictionary containing parsed cmdline options.
266
+ self.cmdopts = cmdopts
267
+ self.criteria = criteria
268
+ self.exp_num = exp_num
269
+ self.pathset = pathset
270
+ self.logger = logging.getLogger(__name__)
271
+
272
+ # If random seeds where previously generated, use them if configured
273
+ self.seeds_fpath = self.pathset.input_root / config.kRandomSeedsLeaf
274
+ self.preserve_seeds = self.cmdopts["preserve_seeds"]
275
+ self.random_seeds = None
276
+
277
+ if self.preserve_seeds:
278
+ if utils.path_exists(self.seeds_fpath):
279
+ with open(self.seeds_fpath, "rb") as f:
280
+ self.random_seeds = pickle.load(f)
281
+
282
+ if self.random_seeds is not None:
283
+ if len(self.random_seeds) == self.cmdopts["n_runs"]:
284
+ self.logger.trace(
285
+ "Using existing random seeds for experiment%s", self.exp_num
286
+ )
287
+ elif len(self.random_seeds) != self.cmdopts["n_runs"]:
288
+ # OK to overwrite the saved random seeds--they changed the
289
+ # experiment definition.
290
+ self.logger.warning(
291
+ (
292
+ "Experiment%s definition changed: # random "
293
+ "seeds (%s) != --n-runs (%s): create new "
294
+ "seeds"
295
+ ),
296
+ self.exp_num,
297
+ len(self.random_seeds),
298
+ self.cmdopts["n_runs"],
299
+ )
300
+ self.preserve_seeds = False
301
+
302
+ if not self.preserve_seeds or self.random_seeds is None:
303
+ self.logger.trace(
304
+ "Generating new random seeds for experiment%s", self.exp_num
305
+ )
306
+ self.random_seeds = random.sample(
307
+ range(0, int(time.time())), self.cmdopts["n_runs"]
308
+ )
309
+
310
+ def from_def(
311
+ self, exp_def: definition.BaseExpDef, parallelism_paradigm: str
312
+ ) -> None:
313
+ """Create all experimental runs by writing input files to filesystem.
314
+
315
+ The passed :class:`~sierra.core.experiment.definition.BaseExpDef` object
316
+ contains all changes that should be made to all runs in the
317
+ experiment. Additional changes to create a set of unique runs from which
318
+ distributions of system behavior can be meaningfully computed post-hoc
319
+ are added.
320
+
321
+ """
322
+ cmdfile_path = self._init_cmdfile(parallelism_paradigm)
323
+
324
+ n_agents = utils.get_n_agents(
325
+ self.criteria.main_config, self.cmdopts, self.pathset.input_root, exp_def
326
+ )
327
+ generator = engine.ExpRunShellCmdsGenerator(
328
+ self.cmdopts, self.criteria, n_agents, self.exp_num
329
+ )
330
+
331
+ # Create all experimental runs
332
+ self.logger.debug(
333
+ "Creating %s runs in exp%s", self.cmdopts["n_runs"], self.exp_num
334
+ )
335
+ for run_num in range(self.cmdopts["n_runs"]):
336
+ per_run = copy.deepcopy(exp_def)
337
+ self._create_exp_run(
338
+ per_run, generator, run_num, cmdfile_path, parallelism_paradigm
339
+ )
340
+
341
+ # Perform experiment level configuration AFTER all runs have been
342
+ # generated in the experiment, in case the configuration depends on the
343
+ # generated launch files.
344
+ engine.ExpConfigurer(self.cmdopts).for_exp(self.pathset.input_root)
345
+
346
+ # Save seeds
347
+ if not utils.path_exists(self.seeds_fpath) or not self.preserve_seeds:
348
+ if utils.path_exists(self.seeds_fpath):
349
+ os.remove(self.seeds_fpath)
350
+ with open(self.seeds_fpath, "ab") as f:
351
+ utils.pickle_dump(self.random_seeds, f)
352
+
353
+ def _create_exp_run(
354
+ self,
355
+ run_exp_def: definition.BaseExpDef,
356
+ cmds_generator,
357
+ run_num: int,
358
+ cmdfile_path: pathlib.Path,
359
+ parallelism_paradigm: str,
360
+ ) -> None:
361
+ run_output_dir = f"{self.template_stem}_run{run_num}_output"
362
+
363
+ # If the project defined per-run configuration, apply
364
+ # it. Otherwise, the already-applied configuration for the engine is
365
+ # all that will be used per-run.
366
+ per_run = pm.module_load_tiered(
367
+ project=self.cmdopts["project"], path="generators.experiment"
368
+ )
369
+
370
+ run_output_root = self.pathset.output_root / run_output_dir
371
+ stem_path = self._get_launch_file_stempath(run_num)
372
+
373
+ # Generate per-run exp changes.
374
+ per_run.for_single_exp_run(
375
+ run_exp_def,
376
+ run_num,
377
+ run_output_root,
378
+ stem_path,
379
+ self.random_seeds[run_num],
380
+ self.cmdopts,
381
+ )
382
+
383
+ # Write out the experimental run launch file
384
+ run_exp_def.write(stem_path)
385
+
386
+ # Perform any necessary programmatic (i.e., stuff you can do in python
387
+ # and don't need a shell for) per-run configuration.
388
+ configurer = engine.ExpConfigurer(self.cmdopts)
389
+ configurer.for_exp_run(self.pathset.input_root, run_output_root)
390
+
391
+ ext = config.kGNUParallel["cmdfile_ext"]
392
+ if parallelism_paradigm in ["per-exp", "per-batch"]:
393
+ # Update commands file with the command for the configured
394
+ # experimental run.
395
+ with utils.utf8open(cmdfile_path.with_suffix(ext), "a") as cmdfile:
396
+ self._update_cmdfile(
397
+ cmdfile,
398
+ cmds_generator,
399
+ parallelism_paradigm,
400
+ run_num,
401
+ run_output_root,
402
+ self._get_launch_file_stempath(run_num),
403
+ "slave",
404
+ )
405
+ elif parallelism_paradigm == "per-run":
406
+ # Write new GNU Parallel commands file with the commands for the
407
+ # experimental run.
408
+ master_fpath = f"{cmdfile_path}_run{run_num}_master{ext}"
409
+ slave_fpath = f"{cmdfile_path}_run{run_num}_slave{ext}"
410
+
411
+ self.logger.trace("Updating slave cmdfile %s", slave_fpath) # type: ignore
412
+ with utils.utf8open(slave_fpath, "w") as cmds_file:
413
+ self._update_cmdfile(
414
+ cmds_file,
415
+ cmds_generator,
416
+ "per-run",
417
+ run_num,
418
+ run_output_root,
419
+ self._get_launch_file_stempath(run_num),
420
+ "slave",
421
+ )
422
+
423
+ self.logger.trace(
424
+ "Updating master cmdfile %s", master_fpath # type: ignore
425
+ )
426
+ with utils.utf8open(master_fpath, "w") as cmdfile:
427
+ self._update_cmdfile(
428
+ cmdfile,
429
+ cmds_generator,
430
+ "per-run",
431
+ run_num,
432
+ run_output_root,
433
+ self._get_launch_file_stempath(run_num),
434
+ "master",
435
+ )
436
+
437
+ def _get_launch_file_stempath(self, run_num: int) -> pathlib.Path:
438
+ """File is named as ``<template input file stem>_run<run_num>``."""
439
+ leaf = f"{self.template_stem}_run{run_num}"
440
+ return self.pathset.input_root / leaf
441
+
442
+ def _init_cmdfile(self, paradigm: str) -> pathlib.Path:
443
+ # Commands file stored in batch input root
444
+ if paradigm == "per-batch":
445
+ path = self.pathset.parent / config.kGNUParallel["cmdfile_stem"]
446
+ # Commands file stored in input root for each experiment.
447
+ elif paradigm == "per-exp":
448
+ path = self.pathset.input_root / config.kGNUParallel["cmdfile_stem"]
449
+ elif paradigm == "per-run":
450
+ path = self.pathset.input_root / config.kGNUParallel["cmdfile_stem"]
451
+
452
+ # Clear out commands file if it exists and is per-batch/per-exp, because
453
+ # those files are appended to as we generate each experiment. We
454
+ # don't need to do that for per-run parallelism, because those files are
455
+ # not.
456
+ if paradigm == "per-exp" and utils.path_exists(
457
+ path.with_suffix(config.kGNUParallel["cmdfile_ext"])
458
+ ):
459
+ path.with_suffix(config.kGNUParallel["cmdfile_ext"]).unlink()
460
+
461
+ return path
462
+
463
+ def _update_cmdfile(
464
+ self,
465
+ cmdfile,
466
+ cmds_generator: bindings.IExpRunShellCmdsGenerator,
467
+ paradigm: str,
468
+ run_num: int,
469
+ run_output_root: pathlib.Path,
470
+ launch_stem_path: pathlib.Path,
471
+ for_host: str,
472
+ ) -> None:
473
+ """Add command to launch a given experimental run to the command file."""
474
+ pre_specs = cmds_generator.pre_run_cmds(for_host, launch_stem_path, run_num)
475
+ assert all(
476
+ spec.shell for spec in pre_specs
477
+ ), "All pre-exp commands are run in a shell"
478
+ pre_cmds = [spec.cmd for spec in pre_specs]
479
+ self.logger.trace("Pre-experiment cmds: %s", pre_cmds) # type: ignore
480
+
481
+ exec_specs = cmds_generator.exec_run_cmds(for_host, launch_stem_path, run_num)
482
+ assert all(
483
+ spec.shell for spec in exec_specs
484
+ ), "All exec-exp commands are run in a shell"
485
+ exec_cmds = [spec.cmd for spec in exec_specs]
486
+ self.logger.trace("Exec-experiment cmds: %s", exec_cmds) # type: ignore
487
+
488
+ post_specs = cmds_generator.post_run_cmds(for_host, run_output_root)
489
+ assert all(
490
+ spec.shell for spec in post_specs
491
+ ), "All post-exp commands are run in a shell"
492
+ post_cmds = [spec.cmd for spec in post_specs]
493
+ self.logger.trace("Post-experiment cmds: %s", post_cmds) # type: ignore
494
+
495
+ if len(pre_cmds + exec_cmds + post_cmds) == 0:
496
+ self.logger.debug("Skipping writing %s cmds file: no cmds", for_host)
497
+ return
498
+
499
+ #
500
+ # This is one of those crucial parts of SIERRA where the "magic"
501
+ # happens.
502
+ #
503
+ # If there is 1 cmdfile per experiment, then the pre- and post-exec cmds
504
+ # need to be prepended and appended to the exec cmds on a per-line
505
+ # basis, because each line needs to contain the total set of commands to
506
+ # run each experimental run within the experiment. Each line needs to be
507
+ # capable of being executed independently of the others, so that e.g.,
508
+ # GNU parallel can process them concurrently if directed to do so. Same
509
+ # for 1 cmdfile per batch.
510
+ #
511
+ # If there is 1 cmdfile per experimental run, then its the same thing,
512
+ # BUT we need to break the exec cmds over multiple lines in the
513
+ # cmdfile. All commands in the command file WILL be run in parallel;
514
+ # this is the paradigm that maps to real hw, and obviously you want all
515
+ # of your configured agents doing things simultaneously in an
516
+ # experiment.
517
+ if paradigm in ["per-exp", "per-batch"]:
518
+ line = " ".join(pre_cmds + exec_cmds + post_cmds) + "\n"
519
+ cmdfile.write(line)
520
+ elif paradigm == "per-run":
521
+ for e in exec_cmds:
522
+ line = " ".join(pre_cmds + [e] + post_cmds) + "\n"
523
+ cmdfile.write(line)
524
+ else:
525
+ raise ValueError(f"Bad paradigm {paradigm}")
526
+
527
+
528
+ __all__ = ["ExpCreator", "BatchExpCreator", "BatchExpDefGenerator"]