idmtools-platform-comps 0.0.0.dev0__py3-none-any.whl → 0.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. idmtools_platform_comps/__init__.py +25 -8
  2. idmtools_platform_comps/cli/__init__.py +4 -0
  3. idmtools_platform_comps/cli/cli_functions.py +50 -0
  4. idmtools_platform_comps/cli/comps.py +492 -0
  5. idmtools_platform_comps/comps_cli.py +48 -0
  6. idmtools_platform_comps/comps_operations/__init__.py +6 -0
  7. idmtools_platform_comps/comps_operations/asset_collection_operations.py +263 -0
  8. idmtools_platform_comps/comps_operations/experiment_operations.py +569 -0
  9. idmtools_platform_comps/comps_operations/simulation_operations.py +678 -0
  10. idmtools_platform_comps/comps_operations/suite_operations.py +228 -0
  11. idmtools_platform_comps/comps_operations/workflow_item_operations.py +269 -0
  12. idmtools_platform_comps/comps_platform.py +309 -0
  13. idmtools_platform_comps/plugin_info.py +168 -0
  14. idmtools_platform_comps/ssmt_operations/__init__.py +6 -0
  15. idmtools_platform_comps/ssmt_operations/simulation_operations.py +77 -0
  16. idmtools_platform_comps/ssmt_operations/workflow_item_operations.py +73 -0
  17. idmtools_platform_comps/ssmt_platform.py +44 -0
  18. idmtools_platform_comps/ssmt_work_items/__init__.py +4 -0
  19. idmtools_platform_comps/ssmt_work_items/comps_work_order_task.py +29 -0
  20. idmtools_platform_comps/ssmt_work_items/comps_workitems.py +113 -0
  21. idmtools_platform_comps/ssmt_work_items/icomps_workflowitem.py +71 -0
  22. idmtools_platform_comps/ssmt_work_items/work_order.py +54 -0
  23. idmtools_platform_comps/utils/__init__.py +4 -0
  24. idmtools_platform_comps/utils/assetize_output/__init__.py +4 -0
  25. idmtools_platform_comps/utils/assetize_output/assetize_output.py +125 -0
  26. idmtools_platform_comps/utils/assetize_output/assetize_ssmt_script.py +144 -0
  27. idmtools_platform_comps/utils/base_singularity_work_order.json +6 -0
  28. idmtools_platform_comps/utils/download/__init__.py +4 -0
  29. idmtools_platform_comps/utils/download/download.py +178 -0
  30. idmtools_platform_comps/utils/download/download_ssmt.py +81 -0
  31. idmtools_platform_comps/utils/download_experiment.py +116 -0
  32. idmtools_platform_comps/utils/file_filter_workitem.py +519 -0
  33. idmtools_platform_comps/utils/general.py +358 -0
  34. idmtools_platform_comps/utils/linux_mounts.py +73 -0
  35. idmtools_platform_comps/utils/lookups.py +123 -0
  36. idmtools_platform_comps/utils/package_version.py +489 -0
  37. idmtools_platform_comps/utils/python_requirements_ac/__init__.py +4 -0
  38. idmtools_platform_comps/utils/python_requirements_ac/create_asset_collection.py +155 -0
  39. idmtools_platform_comps/utils/python_requirements_ac/install_requirements.py +109 -0
  40. idmtools_platform_comps/utils/python_requirements_ac/requirements_to_asset_collection.py +374 -0
  41. idmtools_platform_comps/utils/python_version.py +40 -0
  42. idmtools_platform_comps/utils/scheduling.py +154 -0
  43. idmtools_platform_comps/utils/singularity_build.py +491 -0
  44. idmtools_platform_comps/utils/spatial_output.py +76 -0
  45. idmtools_platform_comps/utils/ssmt_utils/__init__.py +6 -0
  46. idmtools_platform_comps/utils/ssmt_utils/common.py +70 -0
  47. idmtools_platform_comps/utils/ssmt_utils/file_filter.py +568 -0
  48. idmtools_platform_comps/utils/sweeping.py +162 -0
  49. idmtools_platform_comps-0.0.2.dist-info/METADATA +100 -0
  50. idmtools_platform_comps-0.0.2.dist-info/RECORD +62 -0
  51. idmtools_platform_comps-0.0.2.dist-info/entry_points.txt +9 -0
  52. idmtools_platform_comps-0.0.2.dist-info/licenses/LICENSE.TXT +3 -0
  53. {idmtools_platform_comps-0.0.0.dev0.dist-info → idmtools_platform_comps-0.0.2.dist-info}/top_level.txt +1 -0
  54. ssmt_image/Dockerfile +52 -0
  55. ssmt_image/Makefile +21 -0
  56. ssmt_image/__init__.py +6 -0
  57. ssmt_image/bootstrap.sh +30 -0
  58. ssmt_image/build_docker_image.py +161 -0
  59. ssmt_image/pip.conf +3 -0
  60. ssmt_image/push_docker_image.py +49 -0
  61. ssmt_image/requirements.txt +9 -0
  62. idmtools_platform_comps-0.0.0.dev0.dist-info/METADATA +0 -41
  63. idmtools_platform_comps-0.0.0.dev0.dist-info/RECORD +0 -5
  64. {idmtools_platform_comps-0.0.0.dev0.dist-info → idmtools_platform_comps-0.0.2.dist-info}/WHEEL +0 -0
@@ -0,0 +1,491 @@
1
+ """idmtools singularity build workitem.
2
+
3
+ Notes:
4
+ - TODO add examples here.
5
+
6
+ Copyright 2021, Bill & Melinda Gates Foundation. All rights reserved.
7
+ """
8
+ import hashlib
9
+ import io
10
+ import json
11
+ import os
12
+ import re
13
+ import uuid
14
+ from dataclasses import dataclass, field, InitVar
15
+ from logging import getLogger, DEBUG
16
+ from os import PathLike
17
+ from pathlib import PurePath
18
+ from typing import List, Dict, Union, Optional, TYPE_CHECKING
19
+ from urllib.parse import urlparse
20
+ from uuid import UUID
21
+ from COMPS.Data import QueryCriteria
22
+ from jinja2 import Environment
23
+ from idmtools import IdmConfigParser
24
+ from idmtools.assets import AssetCollection, Asset
25
+ from idmtools.assets.file_list import FileList
26
+ from idmtools.core import EntityStatus, NoPlatformException
27
+ from idmtools.core.logging import SUCCESS
28
+ from idmtools.entities.command_task import CommandTask
29
+ from idmtools.entities.relation_type import RelationType
30
+ from idmtools.utils.hashing import calculate_md5_stream
31
+ from idmtools_platform_comps.ssmt_work_items.comps_workitems import InputDataWorkItem
32
+ from idmtools_platform_comps.utils.general import save_sif_asset_md5_from_ac_id
33
+ from idmtools_platform_comps.utils.package_version import get_docker_manifest, get_digest_from_docker_hub
34
+
35
+ if TYPE_CHECKING:
36
+ from idmtools.entities.iplatform import IPlatform
37
+
38
+ SB_BASE_WORKER_PATH = os.path.join(os.path.dirname(__file__), 'base_singularity_work_order.json')
39
+
40
+ logger = getLogger(__name__)
41
+ user_logger = getLogger('user')
42
+
43
+
44
+ @dataclass(repr=False)
45
+ class SingularityBuildWorkItem(InputDataWorkItem):
46
+ """
47
+ Provides a wrapper to build utilizing the COMPS build server.
48
+
49
+ Notes:
50
+ - TODO add references to examples
51
+ """
52
+ #: Path to definition file
53
+ definition_file: Union[PathLike, str] = field(default=None)
54
+ #: definition content. Alternative to file
55
+ definition_content: str = field(default=None)
56
+ #: Enables Jinja parsing of the definition file or content
57
+ is_template: bool = field(default=False)
58
+ #: template_args
59
+ template_args: Dict[str, str] = field(default_factory=dict)
60
+ #: Image Url
61
+ image_url: InitVar[str] = None
62
+ #: Destination image name
63
+ image_name: str = field(default=None)
64
+ #: Name of the workitem
65
+ name: str = field(default=None)
66
+ #: Tages to add to container asset collection
67
+ image_tags: Dict[str, str] = field(default_factory=dict)
68
+ #: Allows you to set a different library. (The default library is “https://library.sylabs.io”). See https://sylabs.io/guides/3.5/user-guide/cli/singularity_build.html
69
+ library: str = field(default=None)
70
+ #: only run specific section(s) of definition file (setup, post, files, environment, test, labels, none) (default [all])
71
+ section: List[str] = field(default_factory=lambda: ['all'])
72
+ #: build using user namespace to fake root user (requires a privileged installation)
73
+ fix_permissions: bool = field(default=False)
74
+ # AssetCollection created by build
75
+ asset_collection: AssetCollection = field(default=None)
76
+ #: Additional Mounts
77
+ additional_mounts: List[str] = field(default_factory=list)
78
+ #: Environment vars for remote build
79
+ environment_variables: Dict[str, str] = field(default_factory=dict)
80
+ #: Force build
81
+ force: bool = field(default=False)
82
+ #: Don't include default tags
83
+ disable_default_tags: bool = field(default=None)
84
+ # ID that is added to work item and then results collection that can be used to tied the items together
85
+ run_id: uuid.UUID = field(default_factory=uuid.uuid4)
86
+
87
+ #: loaded if url is docker://. Used to determine if we need to re-run a build
88
+ __digest: Dict[str, str] = field(default=None)
89
+ __image_tag: str = field(default=None)
90
+
91
+ #: rendered template. We have to store so it is calculated before RUN which means outside our normal pre-create hooks
92
+ __rendered_template: str = field(default=None)
93
+
94
+ def __post_init__(self, item_name: str, asset_collection_id: UUID, asset_files: FileList, user_files: FileList, image_url: str):
95
+ """Constructor."""
96
+ self.work_item_type = 'ImageBuilderWorker'
97
+ self._image_url = None
98
+ # Set this for now. Later it should be replace with some type of Specialized worker identifier
99
+ self.task = CommandTask("ImageBuilderWorker")
100
+ super().__post_init__(item_name, asset_collection_id, asset_files, user_files)
101
+
102
+ self.image_url = image_url if isinstance(image_url, str) else None
103
+ if isinstance(self.definition_file, PathLike):
104
+ self.definition_file = str(self.definition_file)
105
+
106
+ def get_container_info(self) -> Dict[str, str]:
107
+ """Get container info.
108
+
109
+ Notes:
110
+ - TODO remove this
111
+ """
112
+ pass
113
+
114
+ @property
115
+ def image_url(self): # noqa: F811
116
+ """Get the image url."""
117
+ return self._image_url
118
+
119
+ @image_url.setter
120
+ def image_url(self, value: str):
121
+ """
122
+ Set the image url.
123
+
124
+ Args:
125
+ value: Value to set value to
126
+
127
+ Returns:
128
+ None
129
+ """
130
+ url_info = urlparse(value)
131
+ if url_info.scheme == "docker":
132
+ if "packages.idmod.org" in value:
133
+ full_manifest, self.__image_tag = get_docker_manifest(url_info.path)
134
+ self.__digest = full_manifest['config']['digest']
135
+ else:
136
+ self.__image_tag = url_info.netloc + ":latest" if ":" not in value else url_info.netloc
137
+ image, tag = url_info.netloc.split(":")
138
+ self.__digest = get_digest_from_docker_hub(image, tag)
139
+ if self.fix_permissions:
140
+ self.__digest += "--fix-perms"
141
+ if self.name is None:
142
+ self.name = f"Load Singularity image from Docker {self.__image_tag}"
143
+ # TODO how to do this for shub
144
+ self._image_url = value
145
+
146
+ def context_checksum(self) -> str:
147
+ """
148
+ Calculate the context checksum of a singularity build.
149
+
150
+ The context is the checksum of all the assets defined for input, the singularity definition file, and the environment variables
151
+
152
+ Returns:
153
+ Conext checksum.
154
+ """
155
+ file_hash = hashlib.sha256()
156
+ # ensure our template is set
157
+ self.__add_common_assets()
158
+ for asset in sorted(self.assets + self.transient_assets, key=lambda a: a.short_remote_path()):
159
+ if asset.absolute_path:
160
+ with open(asset.absolute_path, mode='rb') as ain:
161
+ calculate_md5_stream(ain, file_hash=file_hash)
162
+ else:
163
+ self.__add_file_to_context(json.dumps([asset.filename, asset.relative_path, str(asset.checksum)], sort_keys=True) if asset.persisted else asset.bytes, file_hash)
164
+
165
+ if len(self.environment_variables):
166
+ contents = json.dumps(self.environment_variables, sort_keys=True)
167
+ self.__add_file_to_context(contents, file_hash)
168
+
169
+ if logger.isEnabledFor(DEBUG):
170
+ logger.debug(f'Context: sha256:{file_hash.hexdigest()}')
171
+ return f'sha256:{file_hash.hexdigest()}'
172
+
173
+ def __add_file_to_context(self, contents: Union[str, bytes], file_hash):
174
+ """
175
+ Add a specific file content to context checksum.
176
+
177
+ Args:
178
+ contents: Contents
179
+ file_hash: File hash to add to
180
+
181
+ Returns:
182
+ None
183
+ """
184
+ item = io.BytesIO()
185
+ item.write(contents.encode('utf-8') if isinstance(contents, str) else contents)
186
+ item.seek(0)
187
+ calculate_md5_stream(item, file_hash=file_hash)
188
+
189
+ def render_template(self) -> Optional[str]:
190
+ """
191
+ Render template. Only applies when is_template is True. When true, it renders the template using Jinja to a cache value.
192
+
193
+ Returns:
194
+ Rendered Template
195
+ """
196
+ if self.is_template:
197
+ # We don't allow re-running template rendering
198
+ if self.__rendered_template is None:
199
+ if logger.isEnabledFor(DEBUG):
200
+ logger.debug("Rendering template")
201
+ contents = None
202
+ # try from file first
203
+ if self.definition_file:
204
+ with open(self.definition_file, mode='r') as ain:
205
+ contents = ain.read()
206
+ elif self.definition_content:
207
+ contents = self.definition_content
208
+
209
+ if contents:
210
+ env = Environment()
211
+ template = env.from_string(contents)
212
+ self.__rendered_template = template.render(env=os.environ, sbi=self, **self.template_args)
213
+ return self.__rendered_template
214
+ return None
215
+
216
+ @staticmethod
217
+ def find_existing_container(sbi: 'SingularityBuildWorkItem', platform: 'IPlatform' = None) -> Optional[AssetCollection]:
218
+ """
219
+ Find existing container.
220
+
221
+ Args:
222
+ sbi: SingularityBuildWorkItem to find existing container matching config
223
+ platform: Platform To load the object from
224
+
225
+ Returns:
226
+ Existing Asset Collection
227
+ """
228
+ if platform is None:
229
+ from idmtools.core.context import CURRENT_PLATFORM
230
+ if CURRENT_PLATFORM is None:
231
+ raise NoPlatformException("No Platform defined on object, in current context, or passed to run")
232
+ platform = CURRENT_PLATFORM
233
+ ac = None
234
+ if not sbi.force: # don't search if it is going to be forced
235
+ qc = QueryCriteria().where_tag(['type=singularity']).select_children(['assets', 'tags']).orderby('date_created desc')
236
+ if sbi.__digest:
237
+ qc.where_tag([f'digest={sbi.__digest}'])
238
+ elif sbi.definition_file or sbi.definition_content:
239
+ qc.where_tag([f'build_context={sbi.context_checksum()}'])
240
+ if len(qc.tag_filters) > 1:
241
+ if logger.isEnabledFor(DEBUG):
242
+ logger.debug("Searching for existing containers")
243
+ ac = platform._assets.get(None, query_criteria=qc)
244
+ if ac:
245
+ if logger.isEnabledFor(DEBUG):
246
+ logger.debug(f"Found: {len(ac)} previous builds")
247
+ ac = platform._assets.to_entity(ac[0])
248
+ if logger.isEnabledFor(DEBUG):
249
+ logger.debug(f'Found existing container in {ac.id}')
250
+ else:
251
+ ac = None
252
+
253
+ return ac
254
+
255
+ def __add_tags(self):
256
+ """
257
+ Add default tags to the asset collection to be created.
258
+
259
+ The most important part of this logic is the digest/run_id information we add. This is what enables the build/pull-cache through comps.
260
+
261
+ Returns:
262
+ None
263
+ """
264
+ self.image_tags['type'] = 'singularity'
265
+ # Disable all tags but image name and type
266
+ if not self.disable_default_tags:
267
+ if self.platform is not None and hasattr(self.platform, 'get_username'):
268
+ self.image_tags['created_by'] = self.platform.get_username()
269
+ # allow users to override run id using only the tag
270
+ if 'run_id' in self.tags:
271
+ self.run_id = self.tags['run_id']
272
+ else:
273
+ # set the run id on the workitem and resulting tags
274
+ self.tags['run_id'] = str(self.run_id)
275
+ self.image_tags['run_id'] = self.tags['run_id']
276
+ # Check for the digest
277
+ if self.__digest and isinstance(self.__digest, str):
278
+ self.image_tags['digest'] = self.__digest
279
+ self.image_tags['image_from'] = self.__image_tag
280
+ if self.image_name is None:
281
+ self.image_name = self.__image_tag.strip(" /").replace(":", "_").replace("/", "_") + ".sif"
282
+ # If we are building from a file, add the build context
283
+ elif self.definition_file:
284
+ self.image_tags['build_context'] = self.context_checksum()
285
+ if self.image_name is None:
286
+ bn = PurePath(self.definition_file).name
287
+ bn = str(bn).replace(".def", ".sif")
288
+ self.image_name = bn
289
+ elif self.definition_content:
290
+ self.image_tags['build_context'] = self.context_checksum()
291
+ if self.image_url:
292
+ self.image_tags['image_url'] = self.image_url
293
+
294
+ # Final fall back for image name
295
+ if self.image_name is None:
296
+ self.image_name = "image.sif"
297
+ if self.image_name and not self.image_name.endswith(".sif"):
298
+ self.image_name = f'{self.image_name}.sif'
299
+ # Add image name to the tags
300
+ self.image_tags['image_name'] = self.image_name
301
+
302
+ def _prep_work_order_before_create(self) -> Dict[str, str]:
303
+ """
304
+ Prep work order before creation.
305
+
306
+ Returns:
307
+ Workorder for singularity build.
308
+ """
309
+ self.__add_tags()
310
+ self.load_work_order(SB_BASE_WORKER_PATH)
311
+ if self.definition_file or self.definition_content:
312
+ self.work_order['Build']['Input'] = "Assets/Singularity.def"
313
+ else:
314
+ self.work_order['Build']['Input'] = self.image_url
315
+ if len(self.environment_variables):
316
+ self.work_order['Build']['StaticEnvironment'] = self.environment_variables
317
+ if len(self.additional_mounts):
318
+ self.work_order['Build']['AdditionalMounts'] = self.additional_mounts
319
+ self.work_order['Build']['Output'] = self.image_name if self.image_name else "image.sif"
320
+ self.work_order['Build']['Tags'] = self.image_tags
321
+ self.work_order['Build']['Flags'] = dict()
322
+ if self.fix_permissions:
323
+ self.work_order['Build']['Flags']['Switches'] = ["--fix-perms"]
324
+ if self.library:
325
+ self.work_order['Build']['Flags']['--library'] = self.library
326
+ if self.section:
327
+ self.work_order['Build']['Flags']['--section'] = self.section
328
+ return self.work_order
329
+
330
+ def pre_creation(self, platform: 'IPlatform') -> None:
331
+ """
332
+ Pre-Creation item.
333
+
334
+ Args:
335
+ platform: Platform object
336
+
337
+ Returns:
338
+ None
339
+ """
340
+ if self.name is None:
341
+ self.name = "Singularity Build"
342
+ if self.definition_file:
343
+ self.name += f" of {PurePath(self.definition_file).name}"
344
+ super(SingularityBuildWorkItem, self).pre_creation(platform)
345
+ self.__add_common_assets()
346
+ self._prep_work_order_before_create()
347
+
348
+ def __add_common_assets(self):
349
+ """
350
+ Add common assets which in this case is the singularity definition file.
351
+
352
+ Returns:
353
+ None
354
+ """
355
+ self.render_template()
356
+ if self.definition_file:
357
+ opts = dict(content=self.__rendered_template) if self.is_template else dict(absolute_path=self.definition_file)
358
+ self.assets.add_or_replace_asset(Asset(filename="Singularity.def", **opts))
359
+ elif self.definition_content:
360
+ opts = dict(content=self.__rendered_template if self.is_template else self.definition_content)
361
+ self.assets.add_or_replace_asset(Asset(filename="Singularity.def", **opts))
362
+
363
+ def __fetch_finished_asset_collection(self, platform: 'IPlatform') -> Union[AssetCollection, None]:
364
+ """
365
+ Fetch the Singularity asset collection we created.
366
+
367
+ Args:
368
+ platform: Platform to fetch from.
369
+
370
+ Returns:
371
+ Asset Collection or None
372
+ """
373
+ comps_workitem = self.get_platform_object(force=True)
374
+ acs = comps_workitem.get_related_asset_collections(RelationType.Created)
375
+ if acs:
376
+ self.asset_collection = AssetCollection.from_id(acs[0].id, platform=platform if platform else self.platform)
377
+ if IdmConfigParser.is_output_enabled():
378
+ user_logger.log(SUCCESS, f"Created Singularity image as Asset Collection: {self.asset_collection.id}")
379
+ user_logger.log(SUCCESS, f"View AC at {self.platform.get_asset_collection_link(self.asset_collection)}")
380
+ return self.asset_collection
381
+ return None
382
+
383
+ def run(self, wait_until_done: bool = True, platform: 'IPlatform' = None, wait_on_done_progress: bool = True, **run_opts) -> Optional[AssetCollection]:
384
+ """
385
+ Run the build.
386
+
387
+ Args:
388
+ wait_until_done: Wait until build completes
389
+ platform: Platform to run on
390
+ wait_on_done_progress: Show progress while waiting
391
+ **run_opts: Extra run options
392
+
393
+ Returns:
394
+ Asset collection that was created if successful
395
+ """
396
+ p = super()._check_for_platform_from_context(platform)
397
+ opts = dict(wait_on_done_progress=wait_on_done_progress, wait_until_done=wait_until_done, platform=p, wait_progress_desc=f"Waiting for build of Singularity container: {self.name}")
398
+ ac = self.find_existing_container(self, platform=p)
399
+ if ac is None or self.force:
400
+ super().run(**opts)
401
+ ac = self.asset_collection
402
+
403
+ else:
404
+ if IdmConfigParser.is_output_enabled():
405
+ user_logger.log(SUCCESS, f"Existing build of image found with Asset Collection ID of {ac.id}")
406
+ user_logger.log(SUCCESS, f"View AC at {self.platform.get_asset_collection_link(ac)}")
407
+ # Set id to None
408
+ self.uid = None
409
+ if ac:
410
+ self.image_tags = ac.tags
411
+ self.asset_collection = ac
412
+ # how do we get id for original work item from AC?
413
+ self.status = EntityStatus.SUCCEEDED
414
+
415
+ save_sif_asset_md5_from_ac_id(ac.id)
416
+ return self.asset_collection
417
+
418
+ def wait(self, wait_on_done_progress: bool = True, timeout: int = None, refresh_interval=None, platform: 'IPlatform' = None, wait_progress_desc: str = None) -> Optional[AssetCollection]:
419
+ """
420
+ Waits on Singularity Build Work item to finish and fetches the resulting asset collection.
421
+
422
+ Args:
423
+ wait_on_done_progress: When set to true, a progress bar will be shown from the item
424
+ timeout: Timeout for waiting on item. If none, wait will be forever
425
+ refresh_interval: How often to refresh progress
426
+ platform: Platform
427
+ wait_progress_desc: Wait Progress Description Text
428
+
429
+ Returns:
430
+ AssetCollection created if item succeeds
431
+ """
432
+ # wait on related items before we wait on our item
433
+ p = super()._check_for_platform_from_context(platform)
434
+ opts = dict(wait_on_done_progress=wait_on_done_progress, timeout=timeout, refresh_interval=refresh_interval, platform=p, wait_progress_desc=wait_progress_desc)
435
+
436
+ super().wait(**opts)
437
+ if self.status == EntityStatus.SUCCEEDED:
438
+ return self.__fetch_finished_asset_collection(p)
439
+ return None
440
+
441
+ def get_id_filename(self, prefix: str = None) -> str:
442
+ """
443
+ Determine the id filename. Mostly used when use does not provide one.
444
+
445
+ The logic is combine prefix and either
446
+ * definition file minus extension
447
+ * image url using with parts filtered out of the name.
448
+
449
+ Args:
450
+ prefix: Optional prefix.
451
+
452
+ Returns:
453
+ id file name
454
+
455
+ Raises:
456
+ ValueError - When the filename cannot be calculated
457
+ """
458
+ if prefix is None:
459
+ prefix = ''
460
+ if self.definition_file:
461
+ base_name = PurePath(self.definition_file).name.replace(".def", ".id")
462
+ if prefix:
463
+ base_name = f"{prefix}{base_name}"
464
+ filename = str(PurePath(self.definition_file).parent.joinpath(base_name))
465
+ elif self.image_url:
466
+ filename = re.sub(r"(docker|shub)://", "", self.image_url).replace(":", "_")
467
+ if filename:
468
+ filename = f"{prefix}{filename}"
469
+ else:
470
+ raise ValueError("Could not calculate the filename. Please specify one")
471
+ if not filename.endswith(".id"):
472
+ filename += ".id"
473
+
474
+ return filename
475
+
476
+ def to_id_file(self, filename: Union[str, PathLike] = None, save_platform: bool = False):
477
+ """
478
+ Create an ID File.
479
+
480
+ If the filename is not provided, it will be calculate for definition files or for docker pulls
481
+
482
+ Args:
483
+ filename: Filename
484
+ save_platform: Save Platform info to file as well
485
+
486
+ Returns:
487
+ None
488
+ """
489
+ if filename is None:
490
+ filename = self.get_id_filename(prefix='builder.')
491
+ super(SingularityBuildWorkItem, self).to_id_file(filename, save_platform)
@@ -0,0 +1,76 @@
1
+ """
2
+ idmtools utility.
3
+
4
+ Copyright 2021, Bill & Melinda Gates Foundation. All rights reserved.
5
+ """
6
+ import struct
7
+ import numpy as np
8
+
9
+
10
+ class SpatialOutput:
11
+ """
12
+ SpatialOutput class is used to parse data from binary file (.bin).
13
+ """
14
+
15
+ def __init__(self):
16
+ """
17
+ Initialize an instance of SpatialOutput.
18
+ This constructor does not take any parameters other than the implicit 'self'.
19
+ """
20
+ self.n_nodes = 0
21
+ self.n_tstep = 0
22
+ self.nodeids = []
23
+ self.data = None
24
+ self.start = 0
25
+ self.interval = 1
26
+
27
+ @classmethod
28
+ def from_bytes(cls, bytes, filtered=False):
29
+ """
30
+ Convert from bytes to class object.
31
+
32
+ Args:
33
+ bytes: bytes
34
+ filtered: flag for applying filter
35
+ """
36
+ # The header size changes if the file is a filtered one
37
+ headersize = 16 if filtered else 8
38
+
39
+ # Create the class
40
+ so = cls()
41
+
42
+ # Retrive the number of nodes and number of timesteps
43
+ so.n_nodes, = struct.unpack('i', bytes[0:4])
44
+ so.n_tstep, = struct.unpack('i', bytes[4:8])
45
+
46
+ # If filtered, retrieve the start and interval
47
+ if filtered:
48
+ start, = struct.unpack('f', bytes[8:12])
49
+ interval, = struct.unpack('f', bytes[12:16])
50
+ so.start = int(start)
51
+ so.interval = int(interval)
52
+
53
+ # Get the nodeids
54
+ so.nodeids = struct.unpack(str(so.n_nodes) + 'I', bytes[headersize:headersize + so.n_nodes * 4])
55
+ so.nodeids = np.asarray(so.nodeids)
56
+
57
+ # Retrieve the data
58
+ so.data = struct.unpack(str(so.n_nodes * so.n_tstep) + 'f',
59
+ bytes[
60
+ headersize + so.n_nodes * 4:headersize + so.n_nodes * 4 + so.n_nodes * so.n_tstep * 4])
61
+ so.data = np.asarray(so.data)
62
+ so.data = so.data.reshape(so.n_tstep, so.n_nodes)
63
+
64
+ return so
65
+
66
+ def to_dict(self):
67
+ """
68
+ Convert to dict.
69
+ Return: dict
70
+ """
71
+ return {'n_nodes': self.n_nodes,
72
+ 'n_tstep': self.n_tstep,
73
+ 'nodeids': self.nodeids,
74
+ 'start': self.start,
75
+ 'interval': self.interval,
76
+ 'data': self.data}
@@ -0,0 +1,6 @@
1
+ """idmtools ssmt utils.
2
+
3
+ These tools are meant to be used server-side within SSMT.
4
+
5
+ Copyright 2021, Bill & Melinda Gates Foundation. All rights reserved.
6
+ """
@@ -0,0 +1,70 @@
1
+ """idmtools common ssmt tools.
2
+
3
+ Copyright 2021, Bill & Melinda Gates Foundation. All rights reserved.
4
+ """
5
+ import json
6
+ import os
7
+ import traceback
8
+ from argparse import Namespace
9
+ from logging import getLogger, DEBUG
10
+ from COMPS import Client
11
+ from idmtools.core.exceptions import idmtools_error_handler
12
+
13
+ logger = getLogger(__name__)
14
+ user_logger = getLogger('user')
15
+
16
+
17
+ def ensure_debug_logging():
18
+ """Ensure we have debug logging enabled in idmtools."""
19
+ # set to debug before loading idmtools
20
+ os.environ['IDMTOOLS_LOGGING_LEVEL'] = 'DEBUG'
21
+ os.environ['IDMTOOLS_LOGGING_CONSOLE'] = 'on'
22
+ from idmtools.core.logging import setup_logging, IdmToolsLoggingConfig
23
+ setup_logging(IdmToolsLoggingConfig(level=DEBUG, console=True, force=True))
24
+ # Import idmtools here to enable logging
25
+ from idmtools import __version__
26
+ logger.debug(f"Using idmtools {__version__}")
27
+
28
+
29
+ def setup_verbose(args: Namespace):
30
+ """Setup verbose logging for ssmt."""
31
+ print(args)
32
+ if args.verbose:
33
+ ensure_debug_logging()
34
+ logger.debug(f"Args: {args}")
35
+
36
+
37
+ def login_to_env():
38
+ """Ensure we are logged in to COMPS client."""
39
+ # load the work item
40
+ client = Client()
41
+ if logger.isEnabledFor(DEBUG):
42
+ logger.debug(f"Logging into {os.environ['COMPS_SERVER']}")
43
+ client.login(os.environ['COMPS_SERVER'])
44
+ return client
45
+
46
+
47
+ def get_error_handler_dump_config_and_error(job_config):
48
+ """
49
+ Define our exception handler for ssmt.
50
+
51
+ This exception handler writes a "error_reason.json" file to the job that contains error info with additional data.
52
+
53
+ Args:
54
+ job_config: Job config used to execute items
55
+
56
+ Returns:
57
+ Error handler for ssmt
58
+ """
59
+
60
+ def ssmt_error_handler(exctype, value: Exception, tb):
61
+ with open("error_reason.json", 'w') as err_out:
62
+ output_error = dict(type=exctype.__name__, args=list(value.args), tb=traceback.format_tb(tb), job_config=job_config)
63
+ output_error['tb'] = [t.strip() for t in output_error['tb']]
64
+ if hasattr(value, 'doc_link'):
65
+ output_error['doc_link'] = value.doc_link
66
+ json.dump(output_error, err_out, indent=4, sort_keys=True)
67
+
68
+ idmtools_error_handler(exctype, value, tb)
69
+
70
+ return ssmt_error_handler