qarnot 2.13.1__tar.gz → 2.15.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. {qarnot-2.13.1/qarnot.egg-info → qarnot-2.15.0}/PKG-INFO +7 -4
  2. {qarnot-2.13.1 → qarnot-2.15.0}/README.rst +1 -3
  3. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/__init__.py +11 -1
  4. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/_util.py +6 -7
  5. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/_version.py +3 -3
  6. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/bucket.py +135 -75
  7. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/exceptions.py +11 -1
  8. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/pool.py +32 -0
  9. qarnot-2.15.0/qarnot/secrets.py +317 -0
  10. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/task.py +33 -0
  11. {qarnot-2.13.1 → qarnot-2.15.0/qarnot.egg-info}/PKG-INFO +7 -4
  12. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot.egg-info/SOURCES.txt +2 -0
  13. {qarnot-2.13.1 → qarnot-2.15.0}/requirements.txt +2 -1
  14. qarnot-2.15.0/test/test_bucket.py +288 -0
  15. {qarnot-2.13.1 → qarnot-2.15.0}/test/test_pool.py +72 -0
  16. qarnot-2.15.0/test/test_secrets.py +180 -0
  17. {qarnot-2.13.1 → qarnot-2.15.0}/test/test_task.py +70 -0
  18. {qarnot-2.13.1 → qarnot-2.15.0}/versioneer.py +8 -3
  19. qarnot-2.13.1/test/test_bucket.py +0 -114
  20. {qarnot-2.13.1 → qarnot-2.15.0}/LICENSE +0 -0
  21. {qarnot-2.13.1 → qarnot-2.15.0}/MANIFEST.in +0 -0
  22. {qarnot-2.13.1 → qarnot-2.15.0}/doc/Makefile +0 -0
  23. {qarnot-2.13.1 → qarnot-2.15.0}/doc/make.bat +0 -0
  24. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/_static/qarnot.png +0 -0
  25. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/api/compute/computeindex.rst +0 -0
  26. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/api/compute/hardware_constraint.rst +0 -0
  27. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/api/compute/job.rst +0 -0
  28. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/api/compute/paginate.rst +0 -0
  29. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/api/compute/pool.rst +0 -0
  30. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/api/compute/privileges.rst +0 -0
  31. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/api/compute/retry_settings.rst +0 -0
  32. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/api/compute/scheduling_type.rst +0 -0
  33. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/api/compute/status.rst +0 -0
  34. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/api/compute/task.rst +0 -0
  35. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/api/connection.rst +0 -0
  36. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/api/exceptions.rst +0 -0
  37. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/api/storage/advanced_bucket.rst +0 -0
  38. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/api/storage/bucket.rst +0 -0
  39. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/api/storage/storage.rst +0 -0
  40. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/api/storage/storageindex.rst +0 -0
  41. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/basic.rst +0 -0
  42. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/conf.py +0 -0
  43. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/index.rst +0 -0
  44. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/installation.rst +0 -0
  45. {qarnot-2.13.1 → qarnot-2.15.0}/doc/source/qarnot.rst +0 -0
  46. {qarnot-2.13.1 → qarnot-2.15.0}/pyproject.toml +0 -0
  47. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/_filter.py +0 -0
  48. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/_retry.py +0 -0
  49. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/advanced_bucket.py +0 -0
  50. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/connection.py +0 -0
  51. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/error.py +0 -0
  52. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/forced_network_rule.py +0 -0
  53. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/hardware_constraint.py +0 -0
  54. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/helper.py +0 -0
  55. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/job.py +0 -0
  56. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/paginate.py +0 -0
  57. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/privileges.py +0 -0
  58. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/retry_settings.py +0 -0
  59. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/scheduling_type.py +0 -0
  60. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/status.py +0 -0
  61. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot/storage.py +0 -0
  62. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot.egg-info/dependency_links.txt +0 -0
  63. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot.egg-info/requires.txt +0 -0
  64. {qarnot-2.13.1 → qarnot-2.15.0}/qarnot.egg-info/top_level.txt +0 -0
  65. {qarnot-2.13.1 → qarnot-2.15.0}/requirements-doc.txt +0 -0
  66. {qarnot-2.13.1 → qarnot-2.15.0}/requirements-lint.txt +0 -0
  67. {qarnot-2.13.1 → qarnot-2.15.0}/requirements-optional.txt +0 -0
  68. {qarnot-2.13.1 → qarnot-2.15.0}/requirements-test.txt +0 -0
  69. {qarnot-2.13.1 → qarnot-2.15.0}/setup.cfg +0 -0
  70. {qarnot-2.13.1 → qarnot-2.15.0}/setup.py +0 -0
  71. {qarnot-2.13.1 → qarnot-2.15.0}/test/test_advanced_bucket.py +0 -0
  72. {qarnot-2.13.1 → qarnot-2.15.0}/test/test_connection.py +0 -0
  73. {qarnot-2.13.1 → qarnot-2.15.0}/test/test_hardware_constraints.py +0 -0
  74. {qarnot-2.13.1 → qarnot-2.15.0}/test/test_import.py +0 -0
  75. {qarnot-2.13.1 → qarnot-2.15.0}/test/test_job.py +0 -0
  76. {qarnot-2.13.1 → qarnot-2.15.0}/test/test_paginate.py +0 -0
  77. {qarnot-2.13.1 → qarnot-2.15.0}/test/test_retry.py +0 -0
  78. {qarnot-2.13.1 → qarnot-2.15.0}/test/test_status.py +0 -0
  79. {qarnot-2.13.1 → qarnot-2.15.0}/test/test_util.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: qarnot
3
- Version: 2.13.1
3
+ Version: 2.15.0
4
4
  Summary: Qarnot Computing SDK
5
5
  Home-page: https://computing.qarnot.com
6
6
  Author: Qarnot computing
@@ -16,11 +16,16 @@ Classifier: Intended Audience :: Information Technology
16
16
  Classifier: License :: OSI Approved :: Apache Software License
17
17
  Requires-Python: >=3.6
18
18
  License-File: LICENSE
19
+ Requires-Dist: requests
20
+ Requires-Dist: boto3
21
+ Requires-Dist: wheel
22
+ Requires-Dist: deprecation
23
+ Requires-Dist: simplejson
19
24
 
20
25
  Qarnot computing Python SDK
21
26
  ===========================
22
27
 
23
- |travis-badge|_ |pypi-badge|_ |readthedocs-badge|_
28
+ |pypi-badge|_ |readthedocs-badge|_
24
29
 
25
30
  This package allows you to use Qarnot cloud computing service.
26
31
 
@@ -63,5 +68,3 @@ The index of the doc is then generated in `doc/_build/html/index.html`
63
68
  .. _pypi-badge: https://pypi.python.org/pypi/qarnot/
64
69
  .. |readthedocs-badge| image:: https://readthedocs.org/projects/qarnot/badge/?version=latest
65
70
  .. _readthedocs-badge: https://qarnot.readthedocs.io/en/latest/
66
- .. |travis-badge| image:: https://app.travis-ci.com/qarnot/qarnot-sdk-python.svg?branch=master
67
- .. _travis-badge: https://app.travis-ci.com/qarnot/qarnot-sdk-python
@@ -1,7 +1,7 @@
1
1
  Qarnot computing Python SDK
2
2
  ===========================
3
3
 
4
- |travis-badge|_ |pypi-badge|_ |readthedocs-badge|_
4
+ |pypi-badge|_ |readthedocs-badge|_
5
5
 
6
6
  This package allows you to use Qarnot cloud computing service.
7
7
 
@@ -44,5 +44,3 @@ The index of the doc is then generated in `doc/_build/html/index.html`
44
44
  .. _pypi-badge: https://pypi.python.org/pypi/qarnot/
45
45
  .. |readthedocs-badge| image:: https://readthedocs.org/projects/qarnot/badge/?version=latest
46
46
  .. _readthedocs-badge: https://qarnot.readthedocs.io/en/latest/
47
- .. |travis-badge| image:: https://app.travis-ci.com/qarnot/qarnot-sdk-python.svg?branch=master
48
- .. _travis-badge: https://app.travis-ci.com/qarnot/qarnot-sdk-python
@@ -16,7 +16,7 @@
16
16
  # limitations under the License.
17
17
 
18
18
 
19
- from .exceptions import QarnotGenericException, UnauthorizedException
19
+ from .exceptions import QarnotGenericException, SecretConflictException, SecretNotFoundException, UnauthorizedException
20
20
  from ._util import get_error_message_from_http_response
21
21
 
22
22
  __all__ = ["task", "connection", "bucket", "pool",
@@ -35,6 +35,13 @@ def raise_on_error(response):
35
35
  raise QarnotGenericException(response.text) from value
36
36
 
37
37
 
38
+ def raise_on_secrets_specific_error(response):
39
+ if response.status_code == 404:
40
+ raise SecretNotFoundException()
41
+ if response.status_code == 409:
42
+ raise SecretConflictException()
43
+
44
+
38
45
  def get_url(key, **kwargs):
39
46
  """Get and format the url for the given key.
40
47
  """
@@ -69,6 +76,8 @@ def get_url(key, **kwargs):
69
76
  'pool stderr': '/pools/{uuid}/stderr', # GET -> pool stderr
70
77
  'pool instance stdout': '/pools/{uuid}/stdout/{instanceId}', # GET -> pool instance stdout
71
78
  'pool instance stderr': '/pools/{uuid}/stderr/{instanceId}', # GET -> pool instance stderr
79
+ 'secrets data': '/secrets-manager/data/{secret_key}', # GET -> get secret , PUT -> create secret, PATCH -> update secret, DELETE -> delete secret
80
+ 'secrets search': '/secrets-manager/search/{secret_prefix}', # GET -> lists secrets starting with prefix
72
81
  'user': '/info', # GET -> user info
73
82
  'profiles': '/profiles', # GET -> profiles list
74
83
  'profile details': '/profiles/{profile}', # GET -> profile details
@@ -84,3 +93,4 @@ __version__ = get_versions()['version'] # type: ignore
84
93
  del get_versions
85
94
 
86
95
  from .connection import Connection # noqa
96
+ from .secrets import Secrets # noqa
@@ -20,11 +20,6 @@ from http.client import responses
20
20
 
21
21
  import re
22
22
 
23
- _IS_PY2 = bytes is str
24
-
25
- if not _IS_PY2:
26
- unicode = str
27
-
28
23
 
29
24
  def copy_docs(docs_source):
30
25
  def decorator(obj):
@@ -43,7 +38,7 @@ def decode(string, encoding='utf-8'):
43
38
 
44
39
  def is_string(x):
45
40
  """Check if x is a string (bytes or unicode)."""
46
- return isinstance(x, (str, unicode))
41
+ return isinstance(x, (str, bytes))
47
42
 
48
43
 
49
44
  def parse_to_timespan_string(value):
@@ -144,7 +139,11 @@ def get_sanitized_bucket_path(path: str, show_warning: bool = True):
144
139
  def get_error_message_from_http_response(response: Response, message_is_status_code_if_null: bool = False) -> str:
145
140
  error_message = ""
146
141
  try:
147
- error_message = response.json()['message']
142
+ error_response = response.json()
143
+ if 'message' in error_response:
144
+ error_message = error_response['message']
145
+ elif 'error' in error_response:
146
+ error_message = error_response['error']
148
147
  except (JSONDecodeError, simpleJsonDecodeError):
149
148
  error_message = response.text
150
149
  if (error_message is None or error_message == "" or len(error_message) < 1) and message_is_status_code_if_null:
@@ -8,11 +8,11 @@ import json
8
8
 
9
9
  version_json = '''
10
10
  {
11
- "date": "2023-10-20T13:38:48+0200",
11
+ "date": "2024-08-27T11:29:32+0200",
12
12
  "dirty": false,
13
13
  "error": null,
14
- "full-revisionid": "098a8560da06f8c3cf58082163ad5b4f316cd6e5",
15
- "version": "v2.13.1"
14
+ "full-revisionid": "8fe176158583e7c2db929a8a25031075991e9e36",
15
+ "version": "v2.15.0"
16
16
  }
17
17
  ''' # END VERSION_JSON
18
18
 
@@ -227,8 +227,11 @@ class Bucket(Storage): # pylint: disable=W0223
227
227
  :returns: A list of ObjectSummary resources
228
228
 
229
229
  """
230
- bucket = self._connection.s3resource.Bucket(self._uuid)
231
- return bucket.objects.all()
230
+ try:
231
+ bucket = self._connection.s3resource.Bucket(self._uuid)
232
+ return [b for b in bucket.objects.all() if b.key is not None]
233
+ except self._connection.s3resource.meta.client.exceptions.NoSuchBucket as err:
234
+ raise MissingBucketException("Cannot list files. Bucket {} not found.".format(err.response['Error']['BucketName'])) from err
232
235
 
233
236
  def directory(self, directory=''):
234
237
  """List files in a directory of the bucket according to prefix.
@@ -239,6 +242,64 @@ class Bucket(Storage): # pylint: disable=W0223
239
242
  bucket = self._connection.s3resource.Bucket(self._uuid)
240
243
  return bucket.objects.filter(Prefix=directory)
241
244
 
245
+ def sync_remote_to_local(self, local_directoy, remote_directory=None):
246
+ """Synchronize a remote directory to a local directory.
247
+
248
+ :param str local_directoy: The local directory to use for synchronization
249
+ :param str remote_directory: path of the directory on remote node (defaults to whole bucket)
250
+
251
+ .. warning::
252
+ Distant changes are reflected on the local filesystem, a file not present on the
253
+ bucket but in the local directory might be deleted from the local filesystem.
254
+
255
+ .. note::
256
+ The following parameters are used to determine whether
257
+ synchronization is required :
258
+
259
+ * name
260
+ * size
261
+ * sha1sum
262
+ """
263
+
264
+ def get_key_for_local(remote_key: str) -> str:
265
+ if remote_directory:
266
+ return removeprefix(remote_key, remote_directory).lstrip('/')
267
+ return remote_key.lstrip('/')
268
+
269
+ def removeprefix(target_str: str, prefix: str) -> str:
270
+ if target_str.startswith(prefix):
271
+ return target_str[len(prefix):]
272
+ else:
273
+ return target_str[:]
274
+
275
+ try:
276
+ if remote_directory:
277
+ entries = self.directory(remote_directory)
278
+ else:
279
+ entries = self.list_files()
280
+
281
+ list_files_only = [x for x in entries if not x.key.endswith('/')]
282
+ list_directories_only = [x for x in entries if x.key.endswith('/')]
283
+ except self._connection.s3resource.meta.client.exceptions.NoSuchBucket as err:
284
+ raise MissingBucketException("Cannot synchronize. Bucket {} not found.".format(err.response['Error']['BucketName'])) from err
285
+
286
+ for directory in list_directories_only:
287
+ if not os.path.isdir(os.path.join(local_directoy, get_key_for_local(directory.key))):
288
+ os.makedirs(os.path.join(local_directoy, get_key_for_local(directory.key)), exist_ok=True)
289
+
290
+ for _, dupes in groupby(sorted(list_files_only, key=attrgetter('e_tag')), attrgetter('e_tag')):
291
+ file_info = next(dupes)
292
+ first_file = os.path.join(local_directoy, get_key_for_local(file_info.key))
293
+ self.get_file(file_info.get()['Body'], local=first_file) # avoids making a useless HEAD request
294
+
295
+ for dupe in dupes:
296
+ local = os.path.join(local_directoy, get_key_for_local(dupe.key))
297
+ directory = os.path.dirname(local)
298
+ if not os.path.exists(directory):
299
+ os.makedirs(directory)
300
+ if (os.path.abspath(os.path.realpath(local)) is not os.path.abspath(os.path.realpath(first_file))):
301
+ shutil.copy(first_file, local)
302
+
242
303
  def sync_directory(self, directory, verbose=False, remote=None):
243
304
  """Synchronize a local directory with the remote buckets.
244
305
 
@@ -282,6 +343,7 @@ class Bucket(Storage): # pylint: disable=W0223
282
343
  :param dict files: Dictionary of synchronized files
283
344
  :param bool verbose: Print information about synchronization operations
284
345
  :param str remote: path of the directory on remote node (defaults to *local*)
346
+ :raises ~qarnot.exceptions.MissingBucketException: the bucket is not on the server
285
347
 
286
348
  Dictionary key is the remote file path while value is the local file
287
349
  path.
@@ -348,53 +410,56 @@ class Bucket(Storage): # pylint: disable=W0223
348
410
  def objectsummarytocomparable(object_):
349
411
  return Comparable(object_.key, object_.e_tag, None)
350
412
 
351
- localfiles = set()
352
- if self._connection._sanitize_bucket_paths:
353
- remote = _util.get_sanitized_bucket_path(remote, self._connection._show_bucket_warnings)
354
- for name, filepath in files.items():
355
- localfiles.add(localtocomparable(name.replace(os.path.sep, '/'), filepath, remote))
413
+ try:
414
+ localfiles = set()
415
+ if self._connection._sanitize_bucket_paths:
416
+ remote = _util.get_sanitized_bucket_path(remote, self._connection._show_bucket_warnings)
417
+ for name, filepath in files.items():
418
+ localfiles.add(localtocomparable(name.replace(os.path.sep, '/'), filepath, remote))
356
419
 
357
- remotefiles = set(map(objectsummarytocomparable, self.list_files()))
420
+ remotefiles = set(map(objectsummarytocomparable, self.list_files()))
358
421
 
359
- adds = localfiles - remotefiles
360
- removes = remotefiles - localfiles
422
+ adds = localfiles - remotefiles
423
+ removes = remotefiles - localfiles
361
424
 
362
- seen_tags = set() # To avoid copying the same objects multiple times when renaming
363
- for file_ in removes:
364
- if remote is not None and not file_.name.startswith(remote):
365
- continue
366
- renames = (x for x in adds if x.e_tag not in seen_tags and x.e_tag == file_.e_tag
367
- and all(rem.name != x.name for rem in remotefiles))
368
- for dup in renames:
369
- if verbose:
370
- self._connection.logger.info("Copy", file_.name, "to", dup.name)
371
- self.copy_file(file_.name, dup.name)
372
- if verbose:
373
- self._connection.logger.info("Remove:", file_.name)
374
- self.delete_file(file_.name)
375
- seen_tags.add(file_.e_tag)
376
-
377
- remotefiles = set(map(objectsummarytocomparable, self.list_files()))
378
-
379
- sadds = sorted(adds, key=lambda x: x.e_tag)
380
- groupedadds = (list(g) for _, g in itertools.groupby(sadds, lambda x: x.e_tag))
381
-
382
- for entry in groupedadds:
383
- try:
384
- rem = next(x for x in remotefiles if x.e_tag == entry[0].e_tag)
385
- if rem.name == entry[0].name:
425
+ seen_tags = set() # To avoid copying the same objects multiple times when renaming
426
+ for file_ in removes:
427
+ if remote is not None and not file_.name.startswith(remote):
386
428
  continue
429
+ renames = (x for x in adds if x.e_tag not in seen_tags and x.e_tag == file_.e_tag
430
+ and all(rem.name != x.name for rem in remotefiles))
431
+ for dup in renames:
432
+ if verbose:
433
+ self._connection.logger.info("Copy %s to %s" % (file_.name, dup.name))
434
+ self.copy_file(file_.name, dup.name)
387
435
  if verbose:
388
- self._connection.logger.info("Copy", rem.name, "to", entry[0].name)
389
- self.copy_file(rem.name, entry[0].name)
390
- except StopIteration:
391
- if verbose:
392
- self._connection.logger.info("Upload:", entry[0].filepath, '->', entry[0].name)
393
- self.add_file(entry[0].filepath, entry[0].name)
394
- for link in entry[1:]: # duplicate files
395
- if verbose:
396
- self._connection.logger.info("Copy", entry[0].name, "to", link.name)
397
- self.copy_file(entry[0].name, link.name)
436
+ self._connection.logger.info("Remove: %s" % file_.name)
437
+ self.delete_file(file_.name)
438
+ seen_tags.add(file_.e_tag)
439
+
440
+ remotefiles = set(map(objectsummarytocomparable, self.list_files()))
441
+
442
+ sadds = sorted(adds, key=lambda x: x.e_tag)
443
+ groupedadds = (list(g) for _, g in itertools.groupby(sadds, lambda x: x.e_tag))
444
+
445
+ for entry in groupedadds:
446
+ try:
447
+ rem = next(x for x in remotefiles if x.e_tag == entry[0].e_tag)
448
+ if rem.name == entry[0].name:
449
+ continue
450
+ if verbose:
451
+ self._connection.logger.info("Copy %s to %s" % (rem.name, entry[0].name))
452
+ self.copy_file(rem.name, entry[0].name)
453
+ except StopIteration:
454
+ if verbose:
455
+ self._connection.logger.info("Upload: %s -> %s" % (entry[0].filepath, entry[0].name))
456
+ self.add_file(entry[0].filepath, entry[0].name)
457
+ for link in entry[1:]: # duplicate files
458
+ if verbose:
459
+ self._connection.logger.info("Copy %s to %s" % (entry[0].name, link.name))
460
+ self.copy_file(entry[0].name, link.name)
461
+ except self._connection.s3resource.meta.client.exceptions.NoSuchBucket as err:
462
+ raise MissingBucketException("Cannot sync files. Bucket {} not found.".format(err.response['Error']['BucketName'])) from err
398
463
 
399
464
  def add_string(self, string, remote):
400
465
  """Add a string on the storage.
@@ -416,31 +481,17 @@ class Bucket(Storage): # pylint: disable=W0223
416
481
  file_ = local_or_file
417
482
  dest = remote or os.path.basename(file_.name)
418
483
 
419
- self._connection.s3client.upload_fileobj(file_, self._uuid, dest, Config=s3_multipart_config)
420
- if tobeclosed:
421
- file_.close()
484
+ try:
485
+ self._connection.s3client.upload_fileobj(file_, self._uuid, dest, Config=s3_multipart_config)
486
+ except self._connection.s3resource.meta.client.exceptions.NoSuchBucket as err:
487
+ raise MissingBucketException("Cannot add string. Bucket {} not found.".format(err.response['Error']['BucketName'])) from err
488
+ finally:
489
+ if tobeclosed:
490
+ file_.close()
422
491
 
423
492
  @_util.copy_docs(Storage.get_all_files)
424
493
  def get_all_files(self, output_dir, progress=None):
425
- list_files_only = [x for x in self.list_files() if not x.key.endswith('/')]
426
- list_directories_only = [x for x in self.list_files() if x.key.endswith('/')]
427
-
428
- for directory in list_directories_only:
429
- if not os.path.isdir(os.path.join(output_dir, directory.key.lstrip('/'))):
430
- os.makedirs(os.path.join(output_dir, directory.key.lstrip('/')))
431
-
432
- for _, dupes in groupby(sorted(list_files_only, key=attrgetter('e_tag')), attrgetter('e_tag')):
433
- file_info = next(dupes)
434
- first_file = os.path.join(output_dir, file_info.key.lstrip('/'))
435
- self.get_file(file_info.get()['Body'], local=first_file) # avoids making a useless HEAD request
436
-
437
- for dupe in dupes:
438
- local = os.path.join(output_dir, dupe.key.lstrip('/'))
439
- directory = os.path.dirname(local)
440
- if not os.path.exists(directory):
441
- os.makedirs(directory)
442
- if (os.path.abspath(os.path.realpath(local)) is not os.path.abspath(os.path.realpath(first_file))):
443
- shutil.copy(first_file, local)
494
+ self.sync_remote_to_local(output_dir, None)
444
495
 
445
496
  @_util.copy_docs(Storage.get_file)
446
497
  def get_file(self, remote, local=None, progress=None):
@@ -465,11 +516,14 @@ class Bucket(Storage): # pylint: disable=W0223
465
516
 
466
517
  @_util.copy_docs(Storage.copy_file)
467
518
  def copy_file(self, source, dest):
468
- copy_source = {
469
- 'Bucket': self._uuid,
470
- 'Key': source
471
- }
472
- return self._connection.s3client.copy_object(CopySource=copy_source, Bucket=self._uuid, Key=dest)
519
+ try:
520
+ copy_source = {
521
+ 'Bucket': self._uuid,
522
+ 'Key': source
523
+ }
524
+ return self._connection.s3client.copy_object(CopySource=copy_source, Bucket=self._uuid, Key=dest)
525
+ except self._connection.s3resource.meta.client.exceptions.NoSuchBucket as err:
526
+ raise MissingBucketException("Cannot copy file {} to {} from bucket {}. Bucket not found.".format(source, dest, err.response['Error']['BucketName'])) from err
473
527
 
474
528
  @deprecation.deprecated(deprecated_in="2.6.0", removed_in="3.0",
475
529
  current_version=__version__, # type: ignore
@@ -490,14 +544,20 @@ class Bucket(Storage): # pylint: disable=W0223
490
544
  if hasattr(remote, 'read'):
491
545
  shutil.copyfileobj(remote, data)
492
546
  else:
493
- self._connection.s3client.download_fileobj(self._uuid, remote, data)
547
+ try:
548
+ self._connection.s3client.download_fileobj(self._uuid, remote, data)
549
+ except self._connection.s3resource.meta.client.exceptions.NoSuchBucket as err:
550
+ raise MissingBucketException("Cannot download file {} from bucket {}. Bucket not found.".format(remote, err.response['Error']['BucketName'])) from err
494
551
  return local
495
552
 
496
553
  @_util.copy_docs(Storage.delete_file)
497
554
  def delete_file(self, remote):
498
- if self._connection._sanitize_bucket_paths:
499
- remote = _util.get_sanitized_bucket_path(remote, self._connection._show_bucket_warnings)
500
- self._connection.s3client.delete_object(Bucket=self._uuid, Key=remote)
555
+ try:
556
+ if self._connection._sanitize_bucket_paths:
557
+ remote = _util.get_sanitized_bucket_path(remote, self._connection._show_bucket_warnings)
558
+ self._connection.s3client.delete_object(Bucket=self._uuid, Key=remote)
559
+ except self._connection.s3resource.meta.client.exceptions.NoSuchBucket as err:
560
+ raise MissingBucketException("Cannot delete file {} from bucket {}. Bucket not found.".format(remote, err.response['Error']['BucketName'])) from err
501
561
 
502
562
  @property
503
563
  def uuid(self):
@@ -30,7 +30,9 @@ __all__ = ['QarnotException',
30
30
  'NotEnoughCreditsException',
31
31
  'MissingBucketException',
32
32
  'MaxJobException',
33
- 'MissingJobException']
33
+ 'MissingJobException',
34
+ 'SecretNotFoundException',
35
+ 'SecretConflictException']
34
36
 
35
37
 
36
38
  class QarnotException(Exception):
@@ -93,3 +95,11 @@ class MissingJobException(Exception):
93
95
 
94
96
  class MaxJobException(Exception):
95
97
  """Max number of jobs reached."""
98
+
99
+
100
+ class SecretNotFoundException(Exception):
101
+ """Requested secret was not found."""
102
+
103
+
104
+ class SecretConflictException(Exception):
105
+ """Secret already exists."""
@@ -19,6 +19,7 @@ from typing import Dict, List, Optional
19
19
 
20
20
  from qarnot.retry_settings import RetrySettings
21
21
  from qarnot.forced_network_rule import ForcedNetworkRule
22
+ from qarnot.secrets import SecretsAccessRights
22
23
 
23
24
  from . import raise_on_error, get_url, _util
24
25
  from .bucket import Bucket
@@ -124,6 +125,7 @@ class Pool(object):
124
125
  self._privileges: Privileges = Privileges()
125
126
  self._default_retry_settings: RetrySettings = RetrySettings()
126
127
  self._forced_network_rules: List[ForcedNetworkRule] = []
128
+ self._secrets_access_rights: SecretsAccessRights = SecretsAccessRights()
127
129
 
128
130
  @classmethod
129
131
  def _retrieve(cls, connection, uuid):
@@ -237,6 +239,8 @@ class Pool(object):
237
239
  if 'schedulingType' in json_pool:
238
240
  self._scheduling_type = SchedulingType.from_string(json_pool["schedulingType"])
239
241
  self._forced_network_rules = [ForcedNetworkRule.from_json(forced_network_dict) for forced_network_dict in json_pool.get("forcedNetworkRules", [])]
242
+ if 'secretsAccessRights' in json_pool:
243
+ self._secrets_access_rights = SecretsAccessRights.from_json(json_pool["secretsAccessRights"])
240
244
 
241
245
  def _to_json(self):
242
246
  """Get a dict ready to be json packed from this pool."""
@@ -294,6 +298,9 @@ class Pool(object):
294
298
  if self._forced_network_rules is not None:
295
299
  json_pool['forcedNetworkRules'] = [x.to_json() for x in self._forced_network_rules]
296
300
 
301
+ if self._secrets_access_rights:
302
+ json_pool['secretsAccessRights'] = self._secrets_access_rights.to_json()
303
+
297
304
  return json_pool
298
305
 
299
306
  def submit(self):
@@ -1067,6 +1074,31 @@ class Pool(object):
1067
1074
 
1068
1075
  self._constraints = value
1069
1076
 
1077
+ @property
1078
+ def secrets_access_rights(self):
1079
+ """:type: :class:`~qarnot.secrets.SecretsAccessRights`
1080
+ :getter: Returns the description of the secrets the tasks in this pool will have access to when running.
1081
+ :setter: set the secrets this pool will have access to when running.
1082
+
1083
+ Secrets can be accessible either by exact match on the key or by using a prefix
1084
+ in order to match all the secrets starting with said prefix.
1085
+ """
1086
+ self._update_if_summary()
1087
+ if self._auto_update:
1088
+ self.update()
1089
+
1090
+ return self._secrets_access_rights
1091
+
1092
+ @secrets_access_rights.setter
1093
+ def secrets_access_rights(self, value: SecretsAccessRights):
1094
+ """Setter for secrets access rights
1095
+ """
1096
+ self._update_if_summary()
1097
+ if self._auto_update:
1098
+ self.update()
1099
+
1100
+ self._secrets_access_rights = value
1101
+
1070
1102
  @property
1071
1103
  def forced_network_rules(self):
1072
1104
  """:type: list{:class:`~qarnot.forced_network_rule.ForcedNetworkRule`}