idmtools-platform-comps 0.0.0.dev0__py3-none-any.whl → 0.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. idmtools_platform_comps/__init__.py +25 -8
  2. idmtools_platform_comps/cli/__init__.py +4 -0
  3. idmtools_platform_comps/cli/cli_functions.py +50 -0
  4. idmtools_platform_comps/cli/comps.py +492 -0
  5. idmtools_platform_comps/comps_cli.py +48 -0
  6. idmtools_platform_comps/comps_operations/__init__.py +6 -0
  7. idmtools_platform_comps/comps_operations/asset_collection_operations.py +263 -0
  8. idmtools_platform_comps/comps_operations/experiment_operations.py +569 -0
  9. idmtools_platform_comps/comps_operations/simulation_operations.py +678 -0
  10. idmtools_platform_comps/comps_operations/suite_operations.py +228 -0
  11. idmtools_platform_comps/comps_operations/workflow_item_operations.py +269 -0
  12. idmtools_platform_comps/comps_platform.py +309 -0
  13. idmtools_platform_comps/plugin_info.py +168 -0
  14. idmtools_platform_comps/ssmt_operations/__init__.py +6 -0
  15. idmtools_platform_comps/ssmt_operations/simulation_operations.py +77 -0
  16. idmtools_platform_comps/ssmt_operations/workflow_item_operations.py +73 -0
  17. idmtools_platform_comps/ssmt_platform.py +44 -0
  18. idmtools_platform_comps/ssmt_work_items/__init__.py +4 -0
  19. idmtools_platform_comps/ssmt_work_items/comps_work_order_task.py +29 -0
  20. idmtools_platform_comps/ssmt_work_items/comps_workitems.py +113 -0
  21. idmtools_platform_comps/ssmt_work_items/icomps_workflowitem.py +71 -0
  22. idmtools_platform_comps/ssmt_work_items/work_order.py +54 -0
  23. idmtools_platform_comps/utils/__init__.py +4 -0
  24. idmtools_platform_comps/utils/assetize_output/__init__.py +4 -0
  25. idmtools_platform_comps/utils/assetize_output/assetize_output.py +125 -0
  26. idmtools_platform_comps/utils/assetize_output/assetize_ssmt_script.py +144 -0
  27. idmtools_platform_comps/utils/base_singularity_work_order.json +6 -0
  28. idmtools_platform_comps/utils/download/__init__.py +4 -0
  29. idmtools_platform_comps/utils/download/download.py +178 -0
  30. idmtools_platform_comps/utils/download/download_ssmt.py +81 -0
  31. idmtools_platform_comps/utils/download_experiment.py +116 -0
  32. idmtools_platform_comps/utils/file_filter_workitem.py +519 -0
  33. idmtools_platform_comps/utils/general.py +358 -0
  34. idmtools_platform_comps/utils/linux_mounts.py +73 -0
  35. idmtools_platform_comps/utils/lookups.py +123 -0
  36. idmtools_platform_comps/utils/package_version.py +489 -0
  37. idmtools_platform_comps/utils/python_requirements_ac/__init__.py +4 -0
  38. idmtools_platform_comps/utils/python_requirements_ac/create_asset_collection.py +155 -0
  39. idmtools_platform_comps/utils/python_requirements_ac/install_requirements.py +109 -0
  40. idmtools_platform_comps/utils/python_requirements_ac/requirements_to_asset_collection.py +374 -0
  41. idmtools_platform_comps/utils/python_version.py +40 -0
  42. idmtools_platform_comps/utils/scheduling.py +154 -0
  43. idmtools_platform_comps/utils/singularity_build.py +491 -0
  44. idmtools_platform_comps/utils/spatial_output.py +76 -0
  45. idmtools_platform_comps/utils/ssmt_utils/__init__.py +6 -0
  46. idmtools_platform_comps/utils/ssmt_utils/common.py +70 -0
  47. idmtools_platform_comps/utils/ssmt_utils/file_filter.py +568 -0
  48. idmtools_platform_comps/utils/sweeping.py +162 -0
  49. idmtools_platform_comps-0.0.2.dist-info/METADATA +100 -0
  50. idmtools_platform_comps-0.0.2.dist-info/RECORD +62 -0
  51. idmtools_platform_comps-0.0.2.dist-info/entry_points.txt +9 -0
  52. idmtools_platform_comps-0.0.2.dist-info/licenses/LICENSE.TXT +3 -0
  53. {idmtools_platform_comps-0.0.0.dev0.dist-info → idmtools_platform_comps-0.0.2.dist-info}/top_level.txt +1 -0
  54. ssmt_image/Dockerfile +52 -0
  55. ssmt_image/Makefile +21 -0
  56. ssmt_image/__init__.py +6 -0
  57. ssmt_image/bootstrap.sh +30 -0
  58. ssmt_image/build_docker_image.py +161 -0
  59. ssmt_image/pip.conf +3 -0
  60. ssmt_image/push_docker_image.py +49 -0
  61. ssmt_image/requirements.txt +9 -0
  62. idmtools_platform_comps-0.0.0.dev0.dist-info/METADATA +0 -41
  63. idmtools_platform_comps-0.0.0.dev0.dist-info/RECORD +0 -5
  64. {idmtools_platform_comps-0.0.0.dev0.dist-info → idmtools_platform_comps-0.0.2.dist-info}/WHEEL +0 -0
@@ -0,0 +1,109 @@
1
+ """idmtools script to run on Slurm to install python files.
2
+
3
+ This is part of the RequirementsToAssetCollection tool. This will run on the HPC in an Experiment to install the python requirements
4
+ as output that will be converted to an AssetCollection later.
5
+
6
+ Copyright 2021, Bill & Melinda Gates Foundation. All rights reserved.
7
+ """
8
+ import compileall
9
+ import glob
10
+ import os
11
+ import subprocess
12
+ import sys
13
+ import time
14
+ import traceback
15
+ from concurrent.futures.thread import ThreadPoolExecutor
16
+ from datetime import datetime
17
+
18
+ CURRENT_DIRECTORY = os.getcwd()
19
+ LIBRARY_ROOT = 'L'
20
+ LIBRARY_PATH = os.path.join(CURRENT_DIRECTORY, LIBRARY_ROOT)
21
+ REQUIREMENT_FILE = 'requirements_updated.txt'
22
+ INDEX_URL = 'https://packages.idmod.org/artifactory/api/pypi/pypi-production/simple'
23
+
24
+
25
+ def install_packages_from_requirements(python_paths=None):
26
+ """
27
+ Install our packages to a local directory.
28
+
29
+ Args:
30
+ python_paths: system Python path
31
+ Returns: None
32
+ """
33
+ if python_paths is None:
34
+ env = dict()
35
+ else:
36
+ if type(python_paths) is not list:
37
+ python_paths = [python_paths]
38
+
39
+ env = dict(os.environ)
40
+ env['PYTHONPATH'] = os.pathsep.join(python_paths)
41
+
42
+ print("Running pip install -r {} to tmp directory".format(REQUIREMENT_FILE))
43
+ subprocess.check_call(
44
+ [sys.executable, "-m", "pip", "install", "-t", LIBRARY_PATH, "-r", f"Assets/{REQUIREMENT_FILE}", "-i",
45
+ f"{INDEX_URL}"], env=env)
46
+
47
+
48
+ def set_python_dates():
49
+ """
50
+ Set python to the same dates so we don't create pyc files with differing dates.
51
+
52
+ Pyc embed the date, so this is a workaround for that behaviour.
53
+ """
54
+ print("Updating file dates")
55
+ pool = ThreadPoolExecutor()
56
+ date = datetime(year=2020, month=1, day=1, hour=0, minute=0, second=0, microsecond=0)
57
+ mod_time = time.mktime(date.timetuple())
58
+ for filename in glob.glob(f"{LIBRARY_PATH}{os.path.sep}**/*.py", recursive=True):
59
+ print(f"Updating date on {filename}")
60
+ pool.submit(os.utime, filename, (mod_time, mod_time))
61
+ pool.shutdown(True)
62
+
63
+
64
+ def compile_all(python_paths=None):
65
+ """
66
+ Compile all the python files to pyc.
67
+
68
+ This is useful to reduce how often this happens since python will be an asset
69
+ """
70
+ print("Compiling pyc files")
71
+ if python_paths is None:
72
+ env = dict()
73
+ else:
74
+ if type(python_paths) is not list:
75
+ python_paths = [python_paths]
76
+
77
+ env = dict(os.environ)
78
+ env['PYTHONPATH'] = os.pathsep.join(python_paths)
79
+ print(f'Compiling {LIBRARY_PATH}')
80
+ compileall.compile_dir(os.path.relpath(LIBRARY_PATH).strip(os.path.sep), force=True)
81
+ print(f'Pyc Files Generated: {len(glob.glob(f"{LIBRARY_PATH}{os.path.sep}**/*.pyc", recursive=True))}')
82
+
83
+
84
+ if __name__ == "__main__":
85
+ print('CURRENT_DIRECTORY: \n', CURRENT_DIRECTORY)
86
+ print('LIBRARY_PATH: \n', LIBRARY_PATH)
87
+
88
+ if sys.platform == "win32":
89
+ full_path = os.path.join(LIBRARY_PATH, 'lib', 'site-packages')
90
+ else:
91
+ full_path = os.path.join(LIBRARY_PATH, 'lib', 'python{}'.format(sys.version[:3]), 'site-packages')
92
+
93
+ if not os.path.exists(full_path):
94
+ os.makedirs(full_path)
95
+
96
+ print("Adding {} to the system path".format(full_path))
97
+ sys.path.insert(1, full_path)
98
+
99
+ tb = None
100
+ try:
101
+ install_packages_from_requirements(sys.path)
102
+ set_python_dates()
103
+ compile_all(sys.path)
104
+ except Exception:
105
+ tb = traceback.format_exc()
106
+ print(tb)
107
+ finally:
108
+ if tb:
109
+ sys.exit(-1)
@@ -0,0 +1,374 @@
1
+ """idmtools requirements to asset collection.
2
+
3
+ This is the entry point for users to use RequirementsToAssetCollection tool.
4
+
5
+ Copyright 2021, Bill & Melinda Gates Foundation. All rights reserved.
6
+ """
7
+ import os
8
+ import hashlib
9
+ from dataclasses import dataclass, field
10
+ from logging import getLogger, DEBUG
11
+ from typing import List
12
+ from packaging.requirements import Requirement
13
+ from COMPS.Data import QueryCriteria
14
+ from COMPS.Data.AssetCollection import AssetCollection as COMPSAssetCollection
15
+ from idmtools.assets import Asset, AssetCollection
16
+ from idmtools.core import ItemType
17
+ from idmtools.entities.experiment import Experiment
18
+ from idmtools_models.python.json_python_task import JSONConfiguredPythonTask
19
+ from idmtools_platform_comps.comps_platform import COMPSPlatform, SLURM_ENVS
20
+ from idmtools_platform_comps.utils.package_version import get_highest_version
21
+
22
+ CURRENT_DIRECTORY = os.path.dirname(__file__)
23
+ REQUIREMENT_FILE = 'requirements_updated.txt'
24
+ MODEL_LOAD_LIB = "install_requirements.py"
25
+ MODEL_CREATE_AC = 'create_asset_collection.py'
26
+ MD5_KEY = 'idmtools-requirements-md5-{}'
27
+ logger = getLogger(__name__)
28
+ user_logger = getLogger("user")
29
+
30
+
31
+ @dataclass(repr=False)
32
+ class RequirementsToAssetCollection:
33
+ """
34
+ RequirementsToAssetCollection provides a utility to install python packages into an asset collection.
35
+
36
+ Notes:
37
+ - TODO - Incorporate example in this docs
38
+ """
39
+ #: Platform object
40
+ platform: COMPSPlatform = field(default=None)
41
+ #: Name of experiment when installing requirements
42
+ name: str = field(default="install custom requirements")
43
+ #: Path to requirements file
44
+ requirements_path: str = field(default=None)
45
+ #: list of packages
46
+ pkg_list: list = field(default=None)
47
+ #: list of wheel files locally to upload and install
48
+ local_wheels: list = field(default=None)
49
+ # User tags
50
+ asset_tags: dict = field(default=None)
51
+ #: Internal checksum to calculate unique requirements set has be ran before
52
+ _checksum: str = field(default=None, init=False)
53
+ #: Calculated requirements including versions
54
+ _requirements: List[str] = field(default=None, init=False)
55
+ #: Since requirements vary by os, target it on the platform as well
56
+ _os_target: str = field(default=None, init=False)
57
+ #: Reserved tags
58
+ __reserved_tag: list = field(default=None, init=False)
59
+
60
+ def __post_init__(self):
61
+ """
62
+ Constructor.
63
+
64
+ Raises:
65
+ ValueError - if requirements_path, pkg_list, and local_wheels are empty.
66
+ """
67
+ if not any([self.requirements_path, self.pkg_list, self.local_wheels]):
68
+ raise ValueError("Impossible to proceed without either requirements path or package list or local wheels!")
69
+
70
+ self.requirements_path = os.path.abspath(self.requirements_path) if self.requirements_path else None
71
+ self.pkg_list = self.pkg_list or []
72
+ self.local_wheels = [os.path.abspath(whl) for whl in self.local_wheels] if self.local_wheels else []
73
+ self.asset_tags = self.asset_tags or {}
74
+
75
+ @property
76
+ def checksum(self):
77
+ """
78
+ Calculate checksum on the requirements file.
79
+
80
+ Returns:
81
+ The md5 of the requirements.
82
+ """
83
+ if not self._checksum:
84
+ req_content = '\n'.join(self.requirements)
85
+ self._checksum = hashlib.md5(req_content.encode('utf-8')).hexdigest()
86
+
87
+ return self._checksum
88
+
89
+ @property
90
+ def md5_tag(self):
91
+ """
92
+ Get unique key for our requirements + target.
93
+
94
+ Returns:
95
+ The md5 tag.
96
+ """
97
+ self.init_platform()
98
+ return {MD5_KEY.format(self._os_target): self.checksum}
99
+
100
+ @property
101
+ def requirements(self):
102
+ """
103
+ Requirements property. We calculate this using consolidate_requirements.
104
+
105
+ Returns:
106
+ Consolidated requirements.
107
+ """
108
+ if not self._requirements:
109
+ self._requirements = self.consolidate_requirements()
110
+
111
+ return self._requirements
112
+
113
+ def init_platform(self):
114
+ """Initialize the platform."""
115
+ if self.platform is None:
116
+ # Try to detect platform
117
+ from idmtools.core.context import get_current_platform
118
+ p = get_current_platform()
119
+ if p is not None:
120
+ self.platform = p
121
+
122
+ self._os_target = "win" if "slurm" not in self.platform.environment.lower() and self.platform.environment.lower() not in SLURM_ENVS else "linux"
123
+ self.__reserved_tag = ['idmtools', 'task_type', MD5_KEY.format(self._os_target)]
124
+
125
+ def run(self, rerun=False):
126
+ """
127
+ Run our utility.
128
+
129
+ The working logic of this utility:
130
+ 1. check if asset collection exists for given requirements, return ac id if exists
131
+ 2. create an Experiment to install the requirements on COMPS
132
+ 3. create a WorkItem to create a Asset Collection
133
+
134
+ Returns: return ac id based on the requirements if Experiment and WorkItem Succeeded
135
+
136
+ Raises:
137
+ Exception - If an error happens on workitem
138
+
139
+ Notes:
140
+ - TODO The exceptions here should be rewritten to parse errors from remote system like AssetizeOutputs
141
+ """
142
+ # Late validation
143
+ self.init_platform()
144
+
145
+ # Check if ac with md5 exists
146
+ ac = self.retrieve_ac_by_tag()
147
+
148
+ if ac and not rerun:
149
+ return ac.id
150
+
151
+ # Create Experiment to install custom requirements
152
+ exp = self.run_experiment_to_install_lib()
153
+ if exp is None:
154
+ if logger.isEnabledFor(DEBUG):
155
+ logger.debug('Failed to install requirements!')
156
+ raise Exception('Failed to install requirements!')
157
+
158
+ if logger.isEnabledFor(DEBUG):
159
+ logger.debug(f'\nexp: {exp.uid}')
160
+
161
+ # Create a WorkItem to create asset collection
162
+ wi = self.run_wi_to_create_ac(exp.uid)
163
+ if wi is None:
164
+ if logger.isEnabledFor(DEBUG):
165
+ logger.debug(f'Failed to create asset collection from experiment: {exp.uid}')
166
+ raise Exception(f'Failed to create asset collection from experiment: {exp.uid}')
167
+
168
+ if logger.isEnabledFor(DEBUG):
169
+ logger.debug(f'\nwi: {wi.uid}')
170
+
171
+ # get ac or return ad_id
172
+ ac = self.retrieve_ac_from_wi(wi)
173
+
174
+ if ac:
175
+ return ac.id
176
+
177
+ def save_updated_requirements(self):
178
+ """
179
+ Save consolidated requirements to a file requirements_updated.txt.
180
+
181
+ Returns:
182
+ None
183
+ """
184
+ user_logger.info(
185
+ f"Creating an updated requirements file ensuring all versions are specified at {REQUIREMENT_FILE}")
186
+ req_content = '\n'.join(self.requirements)
187
+ with open(REQUIREMENT_FILE, 'w') as outfile:
188
+ outfile.write(req_content)
189
+
190
+ def retrieve_ac_by_tag(self, md5_check=None):
191
+ """
192
+ Retrieve comps asset collection given ac tag.
193
+
194
+ Args:
195
+ md5_check: also can use custom md5 string as search tag
196
+ Returns: comps asset collection
197
+ """
198
+ # Late validation
199
+ self.init_platform()
200
+
201
+ md5_str = md5_check or self.checksum
202
+ if logger.isEnabledFor(DEBUG):
203
+ logger.debug(f'md5_str: {md5_str}')
204
+
205
+ # check if ac with tag idmtools-requirements-md5 = my_md5 exists
206
+ ac_list = COMPSAssetCollection.get(
207
+ query_criteria=QueryCriteria().select_children('tags').where_tag(
208
+ [f'{MD5_KEY.format(self._os_target)}={md5_str}']))
209
+
210
+ # if exists, get ac and return it
211
+ if len(ac_list) > 0:
212
+ ac_list = sorted(ac_list, key=lambda t: t.date_created, reverse=True)
213
+ user_logger.info(f"Found existing requirements assets at {ac_list[0].id}")
214
+ return ac_list[0]
215
+
216
+ def retrieve_ac_from_wi(self, wi):
217
+ """
218
+ Retrieve ac id from file ac_info.txt saved by WI.
219
+
220
+ Args:
221
+ wi: SSMTWorkItem (which was used to create ac from library)
222
+ Returns: COMPS asset collection
223
+ """
224
+ ac_file = "ac_info.txt"
225
+
226
+ # retrieve ac file
227
+ ret = self.platform.get_files_by_id(wi.uid, ItemType.WORKFLOW_ITEM, [ac_file])
228
+
229
+ # get file content
230
+ ac_id_bytes = ret[ac_file]
231
+
232
+ # convert bytes to string
233
+ ac_id_str = ac_id_bytes.decode('utf-8')
234
+
235
+ # return comps ac
236
+ return self.platform.get_item(ac_id_str, ItemType.ASSETCOLLECTION, raw=True)
237
+
238
+ def add_wheels_to_assets(self, experiment):
239
+ """
240
+ Add wheels to assets of our experiment.
241
+
242
+ Args:
243
+ experiment: Experiment to add assets to
244
+
245
+ Returns:
246
+ None
247
+ """
248
+ for whl in self.local_wheels:
249
+ a = Asset(filename=os.path.basename(whl), absolute_path=whl)
250
+ experiment.add_asset(a)
251
+
252
+ def run_experiment_to_install_lib(self):
253
+ """
254
+ Create an Experiment which will run another py script to install requirements.
255
+
256
+ Returns: Experiment created
257
+ """
258
+ self.save_updated_requirements()
259
+
260
+ task = JSONConfiguredPythonTask(script_path=os.path.join(CURRENT_DIRECTORY, MODEL_LOAD_LIB))
261
+ experiment = Experiment(name=self.name, simulations=[task.to_simulation()])
262
+ experiment.add_asset(Asset(REQUIREMENT_FILE))
263
+ experiment.tags = self.md5_tag
264
+
265
+ # Avoid conflict to reserved tag
266
+ if len(set(self.asset_tags).intersection(self.__reserved_tag)) > 0:
267
+ raise Exception(f"{self.__reserved_tag} are reserved tags, please use other tags!")
268
+
269
+ # Remove conflicts in case
270
+ for tag in self.__reserved_tag:
271
+ self.asset_tags.pop(tag, None)
272
+
273
+ # Update experiment's tags
274
+ experiment.tags.update(self.asset_tags)
275
+
276
+ self.add_wheels_to_assets(experiment)
277
+ user_logger.info("Run install of python requirements on COMPS. To view the details, see the experiment below")
278
+ experiment.run(wait_until_done=True, platform=self.platform, use_short_path=True, num_cores=1)
279
+
280
+ if experiment.succeeded:
281
+ return experiment
282
+
283
+ def run_wi_to_create_ac(self, exp_id):
284
+ """
285
+ Create an WorkItem which will run another py script to create new asset collection.
286
+
287
+ Args:
288
+ exp_id: the Experiment id (which installed requirements)
289
+ Returns: work item created
290
+ """
291
+ from idmtools_platform_comps.ssmt_work_items.comps_workitems import SSMTWorkItem
292
+
293
+ md5_str = self.checksum
294
+ if logger.isEnabledFor(DEBUG):
295
+ logger.debug(f'md5_str: {md5_str}')
296
+
297
+ wi_name = "wi to create ac"
298
+ command = f"python3 {MODEL_CREATE_AC} {exp_id} {self.platform.endpoint} {self._os_target}"
299
+
300
+ # Update tags
301
+ tags = {MD5_KEY.format(self._os_target): self.checksum}
302
+ tags.update(self.asset_tags)
303
+
304
+ user_logger.info(
305
+ "Converting Python Packages to an Asset Collection. This may take some time for large dependency lists")
306
+ wi = SSMTWorkItem(name=wi_name, command=command,
307
+ transient_assets=AssetCollection([os.path.join(CURRENT_DIRECTORY, MODEL_CREATE_AC)]),
308
+ tags=tags, related_experiments=[exp_id])
309
+
310
+ wi.run(wait_until_done=True, platform=self.platform)
311
+
312
+ if wi.succeeded:
313
+ # make ac as related_asset_collection to wi
314
+ from COMPS.Data.WorkItem import RelationType
315
+ comps_ac = self.retrieve_ac_from_wi(wi)
316
+ comps_wi = self.platform.get_item(wi.uid, ItemType.WORKFLOW_ITEM, raw=True)
317
+ comps_wi.add_related_asset_collection(comps_ac.id, relation_type=RelationType.Created)
318
+ comps_wi.save()
319
+ return wi
320
+ else:
321
+ user_logger.warning("Work item failed. See logs")
322
+ try:
323
+ files = self.platform.get_files_by_id(wi.uid, wi.item_type, ["stderr.txt"])
324
+ user_logger.error(f'Server Error Log: {files["stderr.txt"].decode("utf-8")}')
325
+ except: # noqa: E722
326
+ pass
327
+
328
+ def consolidate_requirements(self):
329
+ """
330
+ Combine requirements and dynamic requirements (a list).
331
+
332
+ We do the following:
333
+ - get the latest version of package if version is not provided
334
+ - dynamic requirements will overwrites the requirements file
335
+
336
+ Returns: the consolidated requirements (as a list)
337
+ """
338
+ req_dict = {}
339
+ comment_list = []
340
+
341
+ if self.requirements_path:
342
+ with open(self.requirements_path, 'r') as fd:
343
+ for _cnt, line in enumerate(fd):
344
+ line = line.strip()
345
+ if line == '':
346
+ continue
347
+
348
+ if line.startswith('#'):
349
+ comment_list.append(line)
350
+ continue
351
+
352
+ req = Requirement(line)
353
+ req_dict[req.name] = req.specifier
354
+
355
+ # pkg_list will overwrite pkg in requirement file
356
+ if self.pkg_list:
357
+ for pkg in self.pkg_list:
358
+ req = Requirement(pkg)
359
+ req_dict[req.name] = req.specifier
360
+
361
+ req_list = []
362
+ for k, v in req_dict.items():
363
+ pkg_requirement = f'{k}{str(v)}'
364
+ req_list.append(f'{k}=={get_highest_version(pkg_requirement)}')
365
+
366
+ wheel_list = []
367
+ if self.local_wheels:
368
+ wheel_list.extend([f"Assets/{os.path.basename(whl)}" for whl in self.local_wheels])
369
+
370
+ req_list = sorted(req_list, reverse=False)
371
+ wheel_list = sorted(wheel_list, reverse=False)
372
+ update_req_list = req_list + wheel_list
373
+
374
+ return update_req_list
@@ -0,0 +1,40 @@
1
+ """idmtools special comps hooks.
2
+
3
+ Notes:
4
+ - TODO update this to use new function plugins
5
+
6
+ Copyright 2021, Bill & Melinda Gates Foundation. All rights reserved.
7
+ """
8
+ SLURM_ENVIRONMENTS = ['slurmdev', 'slurm2', 'slurmstage', 'calculon']
9
+ PYTHON_EXECUTABLE = 'python3'
10
+
11
+
12
+ def platform_task_hooks(task, platform):
13
+ """
14
+ Update python task with proper python executable.
15
+
16
+ Args:
17
+ task: PythonTask or CommandTask
18
+ platform: the platform user uses
19
+
20
+ Returns: re-build task
21
+
22
+ Notes:
23
+ - TODO revisit with SingularityTasks later
24
+ """
25
+ try:
26
+ from idmtools_models.python.python_task import PythonTask
27
+ from idmtools.entities.command_task import CommandTask
28
+ if isinstance(task, PythonTask):
29
+ if platform.environment.lower() in SLURM_ENVIRONMENTS:
30
+ task.python_path = PYTHON_EXECUTABLE
31
+ task.command.executable = PYTHON_EXECUTABLE
32
+ elif isinstance(task, CommandTask) and platform.environment.lower() in SLURM_ENVIRONMENTS:
33
+ cmd_list = task.command.executable.split(' ')
34
+ if cmd_list[0].lower() == 'python':
35
+ cmd_list[0] = PYTHON_EXECUTABLE
36
+ task.command.executable = ' '.join(cmd_list)
37
+ except ImportError:
38
+ pass
39
+
40
+ return task
@@ -0,0 +1,154 @@
1
+ """idmtools scheduling utils for comps.
2
+
3
+ Copyright 2021, Bill & Melinda Gates Foundation. All rights reserved.
4
+ """
5
+ import json
6
+ from os import PathLike
7
+ from typing import List, Union, Dict
8
+ from idmtools.assets import Asset
9
+ from idmtools.entities.experiment import Experiment
10
+ from idmtools.entities.simulation import Simulation
11
+ from idmtools.entities.templated_simulation import TemplatedSimulations
12
+ from logging import DEBUG
13
+ import logging
14
+
15
+ SCHEDULING_ERROR_UNSUPPORTED_TYPE = "The method only support object type: Experiment, Simulation, TemplatedSimulations!"
16
+ SCHEDULING_ERROR_EMPTY_EXPERIMENT = "You cannot add scheduling config to an empty experiment."
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ def default_add_workorder_sweep_callback(simulation, file_name, file_path):
22
+ """
23
+ Utility function to add updated WorkOrder.json to each simulation as linked file via simulation task.
24
+
25
+ first loads original workorder file from local, then update Command field in it from each simulation object's
26
+ simulation.task.command.cmd, then write updated command to WorkOrder.json, and load this file to simulation
27
+
28
+ Args:
29
+ simulation: Simulation we are configuring
30
+ file_name: Filename to use
31
+ file_path: Path to file
32
+
33
+ Returns:
34
+ None
35
+ """
36
+ add_work_order(simulation, file_name=file_name, file_path=file_path)
37
+
38
+
39
+ def default_add_schedule_config_sweep_callback(simulation, command: str = None, **config_opts):
40
+ """Default callback to be used for sweeps that affect a scheduling config."""
41
+ add_schedule_config(simulation, command=command, **config_opts["config_opts"])
42
+
43
+
44
+ def scheduled(simulation: Simulation):
45
+ """
46
+ Determine if scheduling is defined on the simulation.
47
+
48
+ Args:
49
+ simulation: Simulation to check
50
+
51
+ Returns:
52
+ True if simulation.scheduling is defined and true.
53
+ """
54
+ scheduling = getattr(simulation, 'scheduling', False)
55
+ return scheduling
56
+
57
+
58
+ def _add_work_order_asset(item: Union[Experiment, Simulation, TemplatedSimulations], config: Dict,
59
+ file_name: str = "WorkOrder.json"):
60
+ """
61
+ Helper function to add an WorkOrder.json asset to an item.
62
+
63
+ Args:
64
+ item: The item to add the asset to
65
+ config: The configuration dictionary
66
+ file_name: The name of the file to create
67
+
68
+ Returns:
69
+ None
70
+ """
71
+
72
+ def _process_simulation(simulation: Simulation):
73
+ setattr(simulation, 'scheduling', True)
74
+ if hasattr(simulation.task.command, 'cmd') and len(simulation.task.command.cmd) > 0:
75
+ config["Command"] = simulation.task.command.cmd
76
+ ctn = json.dumps(config, indent=3)
77
+ simulation.add_asset(Asset(filename=file_name, content=ctn))
78
+
79
+ if isinstance(item, Simulation):
80
+ _process_simulation(item)
81
+ elif isinstance(item, TemplatedSimulations):
82
+ _process_simulation(item.base_simulation)
83
+ elif isinstance(item, Experiment):
84
+ if isinstance(item.simulations.items, TemplatedSimulations):
85
+ if len(item.simulations.items) == 0:
86
+ raise ValueError(SCHEDULING_ERROR_EMPTY_EXPERIMENT)
87
+ if logger.isEnabledFor(DEBUG):
88
+ logger.debug("Using Base task from template for WorkOrder.json assets")
89
+ _process_simulation(item.simulations.items.base_simulation)
90
+ for sim in item.simulations.items.extra_simulations():
91
+ _process_simulation(sim)
92
+ elif isinstance(item.simulations.items, List):
93
+ if len(item.simulations.items) == 0:
94
+ raise ValueError(SCHEDULING_ERROR_EMPTY_EXPERIMENT)
95
+ if logger.isEnabledFor(DEBUG):
96
+ logger.debug("Using all tasks to gather assets")
97
+ for sim in item.simulations.items:
98
+ _process_simulation(sim)
99
+ else:
100
+ raise ValueError("You cannot run an empty experiment")
101
+ else:
102
+ raise ValueError(SCHEDULING_ERROR_UNSUPPORTED_TYPE)
103
+
104
+
105
+ def add_work_order(item: Union[Experiment, Simulation, TemplatedSimulations], file_name: str = "WorkOrder.json",
106
+ file_path: Union[str, PathLike] = "./WorkOrder.json"):
107
+ """
108
+ Adds workorder.json.
109
+
110
+ Args:
111
+ item: Item to add work order to
112
+ file_name: Workorder file name
113
+ file_path: Path to file(locally)
114
+
115
+ Returns:
116
+ None
117
+
118
+ Raises:
119
+ ValueError - If experiment is empty
120
+ If item is not an experiment, simulation, or TemplatedSimulations
121
+ """
122
+ with open(str(file_path), "r") as jsonFile:
123
+ config = json.loads(jsonFile.read())
124
+ _add_work_order_asset(item, config, file_name=file_name)
125
+
126
+
127
+ def add_schedule_config(item: Union[Experiment, Simulation, TemplatedSimulations], command: str = None, **config_opts):
128
+ """
129
+ Add scheduling config to an Item.
130
+
131
+ Scheduling config supports adding to Experiments, Simulations, and TemplatedSimulations
132
+
133
+ Args:
134
+ item: Item to add scheduling config to
135
+ command: Command to run
136
+ **config_opts: Additional config options
137
+
138
+ config_opts details:
139
+ - Environment: Environment variables to set in the job environment; these can be dynamically expanded
140
+ - SingleNode (HPC only): A flag to limit all reserved cores to being on the same compute node
141
+ - Exclusive (HPC only): A flag that controls whether nodes should be exclusively allocated to this job
142
+ - EnableMpi (HPC or Slurm): A flag that controls whether to run the job with mpiexec
143
+ - NodeGroupName (HPC or Slurm): The cluster node-group to commission to
144
+ - NumCores (HPC or Slurm): The number of cores to reserve
145
+ - NumNodes (Slurm Only): The number of nodes to schedule
146
+ - NumProcesses (Slurm Only): The number of processes to execute
147
+ - additionalProperties (HPC or Slurm): True or False
148
+
149
+ Returns:
150
+ None
151
+ """
152
+ config = dict(Command=command)
153
+ config.update(config_opts)
154
+ _add_work_order_asset(item, config, file_name="WorkOrder.json")