qarnot 2.13.0__tar.gz → 2.14.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. {qarnot-2.13.0/qarnot.egg-info → qarnot-2.14.5}/PKG-INFO +7 -4
  2. {qarnot-2.13.0 → qarnot-2.14.5}/README.rst +1 -3
  3. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/__init__.py +11 -1
  4. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/_util.py +5 -1
  5. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/_version.py +3 -3
  6. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/bucket.py +81 -58
  7. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/exceptions.py +11 -1
  8. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/forced_network_rule.py +60 -0
  9. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/pool.py +34 -3
  10. qarnot-2.14.5/qarnot/secrets.py +317 -0
  11. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/task.py +35 -3
  12. {qarnot-2.13.0 → qarnot-2.14.5/qarnot.egg-info}/PKG-INFO +7 -4
  13. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot.egg-info/SOURCES.txt +2 -0
  14. {qarnot-2.13.0 → qarnot-2.14.5}/requirements.txt +1 -0
  15. qarnot-2.14.5/test/test_bucket.py +288 -0
  16. {qarnot-2.13.0 → qarnot-2.14.5}/test/test_pool.py +82 -15
  17. qarnot-2.14.5/test/test_secrets.py +180 -0
  18. {qarnot-2.13.0 → qarnot-2.14.5}/test/test_task.py +80 -15
  19. {qarnot-2.13.0 → qarnot-2.14.5}/versioneer.py +8 -3
  20. qarnot-2.13.0/test/test_bucket.py +0 -114
  21. {qarnot-2.13.0 → qarnot-2.14.5}/LICENSE +0 -0
  22. {qarnot-2.13.0 → qarnot-2.14.5}/MANIFEST.in +0 -0
  23. {qarnot-2.13.0 → qarnot-2.14.5}/doc/Makefile +0 -0
  24. {qarnot-2.13.0 → qarnot-2.14.5}/doc/make.bat +0 -0
  25. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/_static/qarnot.png +0 -0
  26. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/api/compute/computeindex.rst +0 -0
  27. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/api/compute/hardware_constraint.rst +0 -0
  28. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/api/compute/job.rst +0 -0
  29. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/api/compute/paginate.rst +0 -0
  30. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/api/compute/pool.rst +0 -0
  31. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/api/compute/privileges.rst +0 -0
  32. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/api/compute/retry_settings.rst +0 -0
  33. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/api/compute/scheduling_type.rst +0 -0
  34. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/api/compute/status.rst +0 -0
  35. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/api/compute/task.rst +0 -0
  36. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/api/connection.rst +0 -0
  37. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/api/exceptions.rst +0 -0
  38. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/api/storage/advanced_bucket.rst +0 -0
  39. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/api/storage/bucket.rst +0 -0
  40. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/api/storage/storage.rst +0 -0
  41. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/api/storage/storageindex.rst +0 -0
  42. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/basic.rst +0 -0
  43. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/conf.py +0 -0
  44. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/index.rst +0 -0
  45. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/installation.rst +0 -0
  46. {qarnot-2.13.0 → qarnot-2.14.5}/doc/source/qarnot.rst +0 -0
  47. {qarnot-2.13.0 → qarnot-2.14.5}/pyproject.toml +0 -0
  48. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/_filter.py +0 -0
  49. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/_retry.py +0 -0
  50. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/advanced_bucket.py +0 -0
  51. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/connection.py +0 -0
  52. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/error.py +0 -0
  53. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/hardware_constraint.py +0 -0
  54. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/helper.py +0 -0
  55. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/job.py +0 -0
  56. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/paginate.py +0 -0
  57. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/privileges.py +0 -0
  58. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/retry_settings.py +0 -0
  59. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/scheduling_type.py +0 -0
  60. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/status.py +0 -0
  61. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot/storage.py +0 -0
  62. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot.egg-info/dependency_links.txt +0 -0
  63. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot.egg-info/requires.txt +0 -0
  64. {qarnot-2.13.0 → qarnot-2.14.5}/qarnot.egg-info/top_level.txt +0 -0
  65. {qarnot-2.13.0 → qarnot-2.14.5}/requirements-doc.txt +0 -0
  66. {qarnot-2.13.0 → qarnot-2.14.5}/requirements-lint.txt +0 -0
  67. {qarnot-2.13.0 → qarnot-2.14.5}/requirements-optional.txt +0 -0
  68. {qarnot-2.13.0 → qarnot-2.14.5}/requirements-test.txt +0 -0
  69. {qarnot-2.13.0 → qarnot-2.14.5}/setup.cfg +0 -0
  70. {qarnot-2.13.0 → qarnot-2.14.5}/setup.py +0 -0
  71. {qarnot-2.13.0 → qarnot-2.14.5}/test/test_advanced_bucket.py +0 -0
  72. {qarnot-2.13.0 → qarnot-2.14.5}/test/test_connection.py +0 -0
  73. {qarnot-2.13.0 → qarnot-2.14.5}/test/test_hardware_constraints.py +0 -0
  74. {qarnot-2.13.0 → qarnot-2.14.5}/test/test_import.py +0 -0
  75. {qarnot-2.13.0 → qarnot-2.14.5}/test/test_job.py +0 -0
  76. {qarnot-2.13.0 → qarnot-2.14.5}/test/test_paginate.py +0 -0
  77. {qarnot-2.13.0 → qarnot-2.14.5}/test/test_retry.py +0 -0
  78. {qarnot-2.13.0 → qarnot-2.14.5}/test/test_status.py +0 -0
  79. {qarnot-2.13.0 → qarnot-2.14.5}/test/test_util.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: qarnot
3
- Version: 2.13.0
3
+ Version: 2.14.5
4
4
  Summary: Qarnot Computing SDK
5
5
  Home-page: https://computing.qarnot.com
6
6
  Author: Qarnot computing
@@ -16,11 +16,16 @@ Classifier: Intended Audience :: Information Technology
16
16
  Classifier: License :: OSI Approved :: Apache Software License
17
17
  Requires-Python: >=3.6
18
18
  License-File: LICENSE
19
+ Requires-Dist: requests
20
+ Requires-Dist: boto3
21
+ Requires-Dist: wheel
22
+ Requires-Dist: deprecation
23
+ Requires-Dist: simplejson
19
24
 
20
25
  Qarnot computing Python SDK
21
26
  ===========================
22
27
 
23
- |travis-badge|_ |pypi-badge|_ |readthedocs-badge|_
28
+ |pypi-badge|_ |readthedocs-badge|_
24
29
 
25
30
  This package allows you to use Qarnot cloud computing service.
26
31
 
@@ -63,5 +68,3 @@ The index of the doc is then generated in `doc/_build/html/index.html`
63
68
  .. _pypi-badge: https://pypi.python.org/pypi/qarnot/
64
69
  .. |readthedocs-badge| image:: https://readthedocs.org/projects/qarnot/badge/?version=latest
65
70
  .. _readthedocs-badge: https://qarnot.readthedocs.io/en/latest/
66
- .. |travis-badge| image:: https://app.travis-ci.com/qarnot/qarnot-sdk-python.svg?branch=master
67
- .. _travis-badge: https://app.travis-ci.com/qarnot/qarnot-sdk-python
@@ -1,7 +1,7 @@
1
1
  Qarnot computing Python SDK
2
2
  ===========================
3
3
 
4
- |travis-badge|_ |pypi-badge|_ |readthedocs-badge|_
4
+ |pypi-badge|_ |readthedocs-badge|_
5
5
 
6
6
  This package allows you to use Qarnot cloud computing service.
7
7
 
@@ -44,5 +44,3 @@ The index of the doc is then generated in `doc/_build/html/index.html`
44
44
  .. _pypi-badge: https://pypi.python.org/pypi/qarnot/
45
45
  .. |readthedocs-badge| image:: https://readthedocs.org/projects/qarnot/badge/?version=latest
46
46
  .. _readthedocs-badge: https://qarnot.readthedocs.io/en/latest/
47
- .. |travis-badge| image:: https://app.travis-ci.com/qarnot/qarnot-sdk-python.svg?branch=master
48
- .. _travis-badge: https://app.travis-ci.com/qarnot/qarnot-sdk-python
@@ -16,7 +16,7 @@
16
16
  # limitations under the License.
17
17
 
18
18
 
19
- from .exceptions import QarnotGenericException, UnauthorizedException
19
+ from .exceptions import QarnotGenericException, SecretConflictException, SecretNotFoundException, UnauthorizedException
20
20
  from ._util import get_error_message_from_http_response
21
21
 
22
22
  __all__ = ["task", "connection", "bucket", "pool",
@@ -35,6 +35,13 @@ def raise_on_error(response):
35
35
  raise QarnotGenericException(response.text) from value
36
36
 
37
37
 
38
+ def raise_on_secrets_specific_error(response):
39
+ if response.status_code == 404:
40
+ raise SecretNotFoundException()
41
+ if response.status_code == 409:
42
+ raise SecretConflictException()
43
+
44
+
38
45
  def get_url(key, **kwargs):
39
46
  """Get and format the url for the given key.
40
47
  """
@@ -69,6 +76,8 @@ def get_url(key, **kwargs):
69
76
  'pool stderr': '/pools/{uuid}/stderr', # GET -> pool stderr
70
77
  'pool instance stdout': '/pools/{uuid}/stdout/{instanceId}', # GET -> pool instance stdout
71
78
  'pool instance stderr': '/pools/{uuid}/stderr/{instanceId}', # GET -> pool instance stderr
79
+ 'secrets data': '/secrets-manager/data/{secret_key}', # GET -> get secret , PUT -> create secret, PATCH -> update secret, DELETE -> delete secret
80
+ 'secrets search': '/secrets-manager/search/{secret_prefix}', # GET -> lists secrets starting with prefix
72
81
  'user': '/info', # GET -> user info
73
82
  'profiles': '/profiles', # GET -> profiles list
74
83
  'profile details': '/profiles/{profile}', # GET -> profile details
@@ -84,3 +93,4 @@ __version__ = get_versions()['version'] # type: ignore
84
93
  del get_versions
85
94
 
86
95
  from .connection import Connection # noqa
96
+ from .secrets import Secrets # noqa
@@ -144,7 +144,11 @@ def get_sanitized_bucket_path(path: str, show_warning: bool = True):
144
144
  def get_error_message_from_http_response(response: Response, message_is_status_code_if_null: bool = False) -> str:
145
145
  error_message = ""
146
146
  try:
147
- error_message = response.json()['message']
147
+ error_response = response.json()
148
+ if 'message' in error_response:
149
+ error_message = error_response['message']
150
+ elif 'error' in error_response:
151
+ error_message = error_response['error']
148
152
  except (JSONDecodeError, simpleJsonDecodeError):
149
153
  error_message = response.text
150
154
  if (error_message is None or error_message == "" or len(error_message) < 1) and message_is_status_code_if_null:
@@ -8,11 +8,11 @@ import json
8
8
 
9
9
  version_json = '''
10
10
  {
11
- "date": "2023-10-20T09:58:50+0200",
11
+ "date": "2024-03-29T14:48:51+0100",
12
12
  "dirty": false,
13
13
  "error": null,
14
- "full-revisionid": "8bf9157ea1fc05fc9fe5d296372308330f100280",
15
- "version": "v2.13.0"
14
+ "full-revisionid": "ff28a98ad04060443b7c6b1f487e492c61e31be1",
15
+ "version": "v2.14.5"
16
16
  }
17
17
  ''' # END VERSION_JSON
18
18
 
@@ -227,8 +227,11 @@ class Bucket(Storage): # pylint: disable=W0223
227
227
  :returns: A list of ObjectSummary resources
228
228
 
229
229
  """
230
- bucket = self._connection.s3resource.Bucket(self._uuid)
231
- return bucket.objects.all()
230
+ try:
231
+ bucket = self._connection.s3resource.Bucket(self._uuid)
232
+ return [b for b in bucket.objects.all() if b.key is not None]
233
+ except self._connection.s3resource.meta.client.exceptions.NoSuchBucket as err:
234
+ raise MissingBucketException("Cannot list files. Bucket {} not found.".format(err.response['Error']['BucketName'])) from err
232
235
 
233
236
  def directory(self, directory=''):
234
237
  """List files in a directory of the bucket according to prefix.
@@ -282,6 +285,7 @@ class Bucket(Storage): # pylint: disable=W0223
282
285
  :param dict files: Dictionary of synchronized files
283
286
  :param bool verbose: Print information about synchronization operations
284
287
  :param str remote: path of the directory on remote node (defaults to *local*)
288
+ :raises ~qarnot.exceptions.MissingBucketException: the bucket is not on the server
285
289
 
286
290
  Dictionary key is the remote file path while value is the local file
287
291
  path.
@@ -348,53 +352,56 @@ class Bucket(Storage): # pylint: disable=W0223
348
352
  def objectsummarytocomparable(object_):
349
353
  return Comparable(object_.key, object_.e_tag, None)
350
354
 
351
- localfiles = set()
352
- if self._connection._sanitize_bucket_paths:
353
- remote = _util.get_sanitized_bucket_path(remote, self._connection._show_bucket_warnings)
354
- for name, filepath in files.items():
355
- localfiles.add(localtocomparable(name.replace(os.path.sep, '/'), filepath, remote))
355
+ try:
356
+ localfiles = set()
357
+ if self._connection._sanitize_bucket_paths:
358
+ remote = _util.get_sanitized_bucket_path(remote, self._connection._show_bucket_warnings)
359
+ for name, filepath in files.items():
360
+ localfiles.add(localtocomparable(name.replace(os.path.sep, '/'), filepath, remote))
356
361
 
357
- remotefiles = set(map(objectsummarytocomparable, self.list_files()))
362
+ remotefiles = set(map(objectsummarytocomparable, self.list_files()))
358
363
 
359
- adds = localfiles - remotefiles
360
- removes = remotefiles - localfiles
364
+ adds = localfiles - remotefiles
365
+ removes = remotefiles - localfiles
361
366
 
362
- seen_tags = set() # To avoid copying the same objects multiple times when renaming
363
- for file_ in removes:
364
- if remote is not None and not file_.name.startswith(remote):
365
- continue
366
- renames = (x for x in adds if x.e_tag not in seen_tags and x.e_tag == file_.e_tag
367
- and all(rem.name != x.name for rem in remotefiles))
368
- for dup in renames:
369
- if verbose:
370
- self._connection.logger.info("Copy", file_.name, "to", dup.name)
371
- self.copy_file(file_.name, dup.name)
372
- if verbose:
373
- self._connection.logger.info("Remove:", file_.name)
374
- self.delete_file(file_.name)
375
- seen_tags.add(file_.e_tag)
376
-
377
- remotefiles = set(map(objectsummarytocomparable, self.list_files()))
378
-
379
- sadds = sorted(adds, key=lambda x: x.e_tag)
380
- groupedadds = (list(g) for _, g in itertools.groupby(sadds, lambda x: x.e_tag))
381
-
382
- for entry in groupedadds:
383
- try:
384
- rem = next(x for x in remotefiles if x.e_tag == entry[0].e_tag)
385
- if rem.name == entry[0].name:
367
+ seen_tags = set() # To avoid copying the same objects multiple times when renaming
368
+ for file_ in removes:
369
+ if remote is not None and not file_.name.startswith(remote):
386
370
  continue
371
+ renames = (x for x in adds if x.e_tag not in seen_tags and x.e_tag == file_.e_tag
372
+ and all(rem.name != x.name for rem in remotefiles))
373
+ for dup in renames:
374
+ if verbose:
375
+ self._connection.logger.info("Copy %s to %s" % (file_.name, dup.name))
376
+ self.copy_file(file_.name, dup.name)
387
377
  if verbose:
388
- self._connection.logger.info("Copy", rem.name, "to", entry[0].name)
389
- self.copy_file(rem.name, entry[0].name)
390
- except StopIteration:
391
- if verbose:
392
- self._connection.logger.info("Upload:", entry[0].filepath, '->', entry[0].name)
393
- self.add_file(entry[0].filepath, entry[0].name)
394
- for link in entry[1:]: # duplicate files
395
- if verbose:
396
- self._connection.logger.info("Copy", entry[0].name, "to", link.name)
397
- self.copy_file(entry[0].name, link.name)
378
+ self._connection.logger.info("Remove: %s" % file_.name)
379
+ self.delete_file(file_.name)
380
+ seen_tags.add(file_.e_tag)
381
+
382
+ remotefiles = set(map(objectsummarytocomparable, self.list_files()))
383
+
384
+ sadds = sorted(adds, key=lambda x: x.e_tag)
385
+ groupedadds = (list(g) for _, g in itertools.groupby(sadds, lambda x: x.e_tag))
386
+
387
+ for entry in groupedadds:
388
+ try:
389
+ rem = next(x for x in remotefiles if x.e_tag == entry[0].e_tag)
390
+ if rem.name == entry[0].name:
391
+ continue
392
+ if verbose:
393
+ self._connection.logger.info("Copy %s to %s" % (rem.name, entry[0].name))
394
+ self.copy_file(rem.name, entry[0].name)
395
+ except StopIteration:
396
+ if verbose:
397
+ self._connection.logger.info("Upload: %s -> %s" % (entry[0].filepath, entry[0].name))
398
+ self.add_file(entry[0].filepath, entry[0].name)
399
+ for link in entry[1:]: # duplicate files
400
+ if verbose:
401
+ self._connection.logger.info("Copy %s to %s" % (entry[0].name, link.name))
402
+ self.copy_file(entry[0].name, link.name)
403
+ except self._connection.s3resource.meta.client.exceptions.NoSuchBucket as err:
404
+ raise MissingBucketException("Cannot sync files. Bucket {} not found.".format(err.response['Error']['BucketName'])) from err
398
405
 
399
406
  def add_string(self, string, remote):
400
407
  """Add a string on the storage.
@@ -416,14 +423,21 @@ class Bucket(Storage): # pylint: disable=W0223
416
423
  file_ = local_or_file
417
424
  dest = remote or os.path.basename(file_.name)
418
425
 
419
- self._connection.s3client.upload_fileobj(file_, self._uuid, dest, Config=s3_multipart_config)
420
- if tobeclosed:
421
- file_.close()
426
+ try:
427
+ self._connection.s3client.upload_fileobj(file_, self._uuid, dest, Config=s3_multipart_config)
428
+ except self._connection.s3resource.meta.client.exceptions.NoSuchBucket as err:
429
+ raise MissingBucketException("Cannot add string. Bucket {} not found.".format(err.response['Error']['BucketName'])) from err
430
+ finally:
431
+ if tobeclosed:
432
+ file_.close()
422
433
 
423
434
  @_util.copy_docs(Storage.get_all_files)
424
435
  def get_all_files(self, output_dir, progress=None):
425
- list_files_only = [x for x in self.list_files() if not x.key.endswith('/')]
426
- list_directories_only = [x for x in self.list_files() if x.key.endswith('/')]
436
+ try:
437
+ list_files_only = [x for x in self.list_files() if not x.key.endswith('/')]
438
+ list_directories_only = [x for x in self.list_files() if x.key.endswith('/')]
439
+ except self._connection.s3resource.meta.client.exceptions.NoSuchBucket as err:
440
+ raise MissingBucketException("Cannot get files. Bucket {} not found.".format(err.response['Error']['BucketName'])) from err
427
441
 
428
442
  for directory in list_directories_only:
429
443
  if not os.path.isdir(os.path.join(output_dir, directory.key.lstrip('/'))):
@@ -465,11 +479,14 @@ class Bucket(Storage): # pylint: disable=W0223
465
479
 
466
480
  @_util.copy_docs(Storage.copy_file)
467
481
  def copy_file(self, source, dest):
468
- copy_source = {
469
- 'Bucket': self._uuid,
470
- 'Key': source
471
- }
472
- return self._connection.s3client.copy_object(CopySource=copy_source, Bucket=self._uuid, Key=dest)
482
+ try:
483
+ copy_source = {
484
+ 'Bucket': self._uuid,
485
+ 'Key': source
486
+ }
487
+ return self._connection.s3client.copy_object(CopySource=copy_source, Bucket=self._uuid, Key=dest)
488
+ except self._connection.s3resource.meta.client.exceptions.NoSuchBucket as err:
489
+ raise MissingBucketException("Cannot copy file {} to {} from bucket {}. Bucket not found.".format(source, dest, err.response['Error']['BucketName'])) from err
473
490
 
474
491
  @deprecation.deprecated(deprecated_in="2.6.0", removed_in="3.0",
475
492
  current_version=__version__, # type: ignore
@@ -490,14 +507,20 @@ class Bucket(Storage): # pylint: disable=W0223
490
507
  if hasattr(remote, 'read'):
491
508
  shutil.copyfileobj(remote, data)
492
509
  else:
493
- self._connection.s3client.download_fileobj(self._uuid, remote, data)
510
+ try:
511
+ self._connection.s3client.download_fileobj(self._uuid, remote, data)
512
+ except self._connection.s3resource.meta.client.exceptions.NoSuchBucket as err:
513
+ raise MissingBucketException("Cannot download file {} from bucket {}. Bucket not found.".format(remote, err.response['Error']['BucketName'])) from err
494
514
  return local
495
515
 
496
516
  @_util.copy_docs(Storage.delete_file)
497
517
  def delete_file(self, remote):
498
- if self._connection._sanitize_bucket_paths:
499
- remote = _util.get_sanitized_bucket_path(remote, self._connection._show_bucket_warnings)
500
- self._connection.s3client.delete_object(Bucket=self._uuid, Key=remote)
518
+ try:
519
+ if self._connection._sanitize_bucket_paths:
520
+ remote = _util.get_sanitized_bucket_path(remote, self._connection._show_bucket_warnings)
521
+ self._connection.s3client.delete_object(Bucket=self._uuid, Key=remote)
522
+ except self._connection.s3resource.meta.client.exceptions.NoSuchBucket as err:
523
+ raise MissingBucketException("Cannot delete file {} from bucket {}. Bucket not found.".format(remote, err.response['Error']['BucketName'])) from err
501
524
 
502
525
  @property
503
526
  def uuid(self):
@@ -30,7 +30,9 @@ __all__ = ['QarnotException',
30
30
  'NotEnoughCreditsException',
31
31
  'MissingBucketException',
32
32
  'MaxJobException',
33
- 'MissingJobException']
33
+ 'MissingJobException',
34
+ 'SecretNotFoundException',
35
+ 'SecretConflictException']
34
36
 
35
37
 
36
38
  class QarnotException(Exception):
@@ -93,3 +95,11 @@ class MissingJobException(Exception):
93
95
 
94
96
  class MaxJobException(Exception):
95
97
  """Max number of jobs reached."""
98
+
99
+
100
+ class SecretNotFoundException(Exception):
101
+ """Requested secret was not found."""
102
+
103
+
104
+ class SecretConflictException(Exception):
105
+ """Secret already exists."""
@@ -76,6 +76,66 @@ class ForcedNetworkRule(object):
76
76
 
77
77
  Whether the network endpoint to access is in the payload."""
78
78
 
79
+ @classmethod
80
+ def from_json(cls, json: Dict[str, Union[str, bool]]):
81
+ """Create the forced network rule from json.
82
+
83
+ :param dict json: Dictionary representing the forced network rule
84
+ :returns: The created :class:`~qarnot.retry_settings.ForcedNetworkRule`
85
+ """
86
+
87
+ inbound: bool = bool(json["inbound"])
88
+ proto: str = str(json["proto"])
89
+
90
+ port: str = None
91
+ if 'port' in json:
92
+ port = str(json["port"])
93
+
94
+ to: str = None
95
+ if 'to' in json:
96
+ to = str(json["to"])
97
+
98
+ public_host: str = None
99
+ if 'public_host' in json:
100
+ public_host = str(json["public_host"])
101
+
102
+ public_port: str = None
103
+ if 'public_port' in json:
104
+ public_port = str(json["public_port"])
105
+
106
+ forwarder: str = None
107
+ if 'forwarder' in json:
108
+ forwarder = str(json["forwarder"])
109
+
110
+ priority: str = None
111
+ if 'priority' in json:
112
+ priority = str(json["priority"])
113
+
114
+ description: str = None
115
+ if 'description' in json:
116
+ description = str(json["description"])
117
+
118
+ to_qbox: Optional[bool] = None
119
+ if 'to_qbox' in json:
120
+ to_qbox = bool(json["to_qbox"])
121
+
122
+ to_payload: Optional[bool] = None
123
+ if 'to_payload' in json:
124
+ to_payload = bool(json["to_payload"])
125
+
126
+ return ForcedNetworkRule(
127
+ inbound,
128
+ proto,
129
+ port,
130
+ to,
131
+ public_host,
132
+ public_port,
133
+ forwarder,
134
+ priority,
135
+ description,
136
+ to_qbox,
137
+ to_payload)
138
+
79
139
  def to_json(self):
80
140
  result: Dict[str, Union[str, bool]] = {
81
141
  "inbound": self.inbound,
@@ -19,6 +19,7 @@ from typing import Dict, List, Optional
19
19
 
20
20
  from qarnot.retry_settings import RetrySettings
21
21
  from qarnot.forced_network_rule import ForcedNetworkRule
22
+ from qarnot.secrets import SecretsAccessRights
22
23
 
23
24
  from . import raise_on_error, get_url, _util
24
25
  from .bucket import Bucket
@@ -124,6 +125,7 @@ class Pool(object):
124
125
  self._privileges: Privileges = Privileges()
125
126
  self._default_retry_settings: RetrySettings = RetrySettings()
126
127
  self._forced_network_rules: List[ForcedNetworkRule] = []
128
+ self._secrets_access_rights: SecretsAccessRights = SecretsAccessRights()
127
129
 
128
130
  @classmethod
129
131
  def _retrieve(cls, connection, uuid):
@@ -236,8 +238,9 @@ class Pool(object):
236
238
  self._default_retry_settings = RetrySettings.from_json(json_pool["defaultRetrySettings"])
237
239
  if 'schedulingType' in json_pool:
238
240
  self._scheduling_type = SchedulingType.from_string(json_pool["schedulingType"])
239
- if 'forcedNetworkRules' in json_pool:
240
- self._forced_network_rules = json_pool["forcedNetworkRules"]
241
+ self._forced_network_rules = [ForcedNetworkRule.from_json(forced_network_dict) for forced_network_dict in json_pool.get("forcedNetworkRules", [])]
242
+ if 'secretsAccessRights' in json_pool:
243
+ self._secrets_access_rights = SecretsAccessRights.from_json(json_pool["secretsAccessRights"])
241
244
 
242
245
  def _to_json(self):
243
246
  """Get a dict ready to be json packed from this pool."""
@@ -293,7 +296,10 @@ class Pool(object):
293
296
  json_pool['targetedReservedMachineKey'] = self._targeted_reserved_machine_key
294
297
 
295
298
  if self._forced_network_rules is not None:
296
- json_pool['forcedNetworkRules'] = self._forced_network_rules
299
+ json_pool['forcedNetworkRules'] = [x.to_json() for x in self._forced_network_rules]
300
+
301
+ if self._secrets_access_rights:
302
+ json_pool['secretsAccessRights'] = self._secrets_access_rights.to_json()
297
303
 
298
304
  return json_pool
299
305
 
@@ -1068,6 +1074,31 @@ class Pool(object):
1068
1074
 
1069
1075
  self._constraints = value
1070
1076
 
1077
+ @property
1078
+ def secrets_access_rights(self):
1079
+ """:type: :class:`~qarnot.secrets.SecretsAccessRights`
1080
+ :getter: Returns the description of the secrets the tasks in this pool will have access to when running.
1081
+ :setter: set the secrets this pool will have access to when running.
1082
+
1083
+ Secrets can be accessible either by exact match on the key or by using a prefix
1084
+ in order to match all the secrets starting with said prefix.
1085
+ """
1086
+ self._update_if_summary()
1087
+ if self._auto_update:
1088
+ self.update()
1089
+
1090
+ return self._secrets_access_rights
1091
+
1092
+ @secrets_access_rights.setter
1093
+ def secrets_access_rights(self, value: SecretsAccessRights):
1094
+ """Setter for secrets access rights
1095
+ """
1096
+ self._update_if_summary()
1097
+ if self._auto_update:
1098
+ self.update()
1099
+
1100
+ self._secrets_access_rights = value
1101
+
1071
1102
  @property
1072
1103
  def forced_network_rules(self):
1073
1104
  """:type: list{:class:`~qarnot.forced_network_rule.ForcedNetworkRule`}