boto3-assist 0.32.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. boto3_assist/__init__.py +0 -0
  2. boto3_assist/aws_config.py +199 -0
  3. boto3_assist/aws_lambda/event_info.py +414 -0
  4. boto3_assist/aws_lambda/mock_context.py +5 -0
  5. boto3_assist/boto3session.py +87 -0
  6. boto3_assist/cloudwatch/cloudwatch_connection.py +84 -0
  7. boto3_assist/cloudwatch/cloudwatch_connection_tracker.py +17 -0
  8. boto3_assist/cloudwatch/cloudwatch_log_connection.py +62 -0
  9. boto3_assist/cloudwatch/cloudwatch_logs.py +39 -0
  10. boto3_assist/cloudwatch/cloudwatch_query.py +191 -0
  11. boto3_assist/cognito/cognito_authorizer.py +169 -0
  12. boto3_assist/cognito/cognito_connection.py +59 -0
  13. boto3_assist/cognito/cognito_utility.py +514 -0
  14. boto3_assist/cognito/jwks_cache.py +21 -0
  15. boto3_assist/cognito/user.py +27 -0
  16. boto3_assist/connection.py +146 -0
  17. boto3_assist/connection_tracker.py +120 -0
  18. boto3_assist/dynamodb/dynamodb.py +1206 -0
  19. boto3_assist/dynamodb/dynamodb_connection.py +113 -0
  20. boto3_assist/dynamodb/dynamodb_helpers.py +333 -0
  21. boto3_assist/dynamodb/dynamodb_importer.py +102 -0
  22. boto3_assist/dynamodb/dynamodb_index.py +507 -0
  23. boto3_assist/dynamodb/dynamodb_iservice.py +29 -0
  24. boto3_assist/dynamodb/dynamodb_key.py +130 -0
  25. boto3_assist/dynamodb/dynamodb_model_base.py +382 -0
  26. boto3_assist/dynamodb/dynamodb_model_base_interfaces.py +34 -0
  27. boto3_assist/dynamodb/dynamodb_re_indexer.py +165 -0
  28. boto3_assist/dynamodb/dynamodb_reindexer.py +165 -0
  29. boto3_assist/dynamodb/dynamodb_reserved_words.py +52 -0
  30. boto3_assist/dynamodb/dynamodb_reserved_words.txt +573 -0
  31. boto3_assist/dynamodb/readme.md +68 -0
  32. boto3_assist/dynamodb/troubleshooting.md +7 -0
  33. boto3_assist/ec2/ec2_connection.py +57 -0
  34. boto3_assist/environment_services/__init__.py +0 -0
  35. boto3_assist/environment_services/environment_loader.py +128 -0
  36. boto3_assist/environment_services/environment_variables.py +219 -0
  37. boto3_assist/erc/__init__.py +64 -0
  38. boto3_assist/erc/ecr_connection.py +57 -0
  39. boto3_assist/errors/custom_exceptions.py +46 -0
  40. boto3_assist/http_status_codes.py +80 -0
  41. boto3_assist/models/serializable_model.py +9 -0
  42. boto3_assist/role_assumption_mixin.py +38 -0
  43. boto3_assist/s3/s3.py +64 -0
  44. boto3_assist/s3/s3_bucket.py +67 -0
  45. boto3_assist/s3/s3_connection.py +76 -0
  46. boto3_assist/s3/s3_event_data.py +168 -0
  47. boto3_assist/s3/s3_object.py +695 -0
  48. boto3_assist/securityhub/securityhub.py +150 -0
  49. boto3_assist/securityhub/securityhub_connection.py +57 -0
  50. boto3_assist/session_setup_mixin.py +70 -0
  51. boto3_assist/ssm/connection.py +57 -0
  52. boto3_assist/ssm/parameter_store/parameter_store.py +116 -0
  53. boto3_assist/utilities/datetime_utility.py +349 -0
  54. boto3_assist/utilities/decimal_conversion_utility.py +140 -0
  55. boto3_assist/utilities/dictionary_utility.py +32 -0
  56. boto3_assist/utilities/file_operations.py +135 -0
  57. boto3_assist/utilities/http_utility.py +48 -0
  58. boto3_assist/utilities/logging_utility.py +0 -0
  59. boto3_assist/utilities/numbers_utility.py +329 -0
  60. boto3_assist/utilities/serialization_utility.py +664 -0
  61. boto3_assist/utilities/string_utility.py +337 -0
  62. boto3_assist/version.py +1 -0
  63. boto3_assist-0.32.0.dist-info/METADATA +76 -0
  64. boto3_assist-0.32.0.dist-info/RECORD +67 -0
  65. boto3_assist-0.32.0.dist-info/WHEEL +4 -0
  66. boto3_assist-0.32.0.dist-info/licenses/LICENSE-EXPLAINED.txt +11 -0
  67. boto3_assist-0.32.0.dist-info/licenses/LICENSE.txt +21 -0
@@ -0,0 +1,695 @@
1
+ """
2
+ Geek Cafe, LLC
3
+ Maintainers: Eric Wilson
4
+ MIT License. See Project Root for the license information.
5
+ """
6
+
7
+ import os
8
+ import tempfile
9
+ import time
10
+ import io
11
+ from typing import Any, Dict, Optional, List
12
+
13
+ from aws_lambda_powertools import Logger
14
+ from botocore.exceptions import ClientError
15
+
16
+ from boto3_assist.errors.custom_exceptions import InvalidHttpMethod
17
+ from boto3_assist.s3.s3_connection import S3Connection
18
+ from boto3_assist.utilities.datetime_utility import DatetimeUtility
19
+ from boto3_assist.utilities.file_operations import FileOperations
20
+ from boto3_assist.utilities.http_utility import HttpUtility
21
+ from boto3_assist.errors.custom_exceptions import FileNotFound
22
+
23
+ logger = Logger(child=True)
24
+
25
+
26
+ class S3Object:
27
+ """S3 Object Actions"""
28
+
29
+ def __init__(self, connection: S3Connection):
30
+ self.connection = connection or S3Connection()
31
+
32
+ def delete(self, *, bucket_name: str, key: str) -> Dict[str, Any]:
33
+ """
34
+ Deletes an object key
35
+
36
+ Args:
37
+ bucket_name (str): The AWS Bucket Name
38
+ key (str): The Object Key
39
+ """
40
+ s3 = self.connection.client
41
+ # see if the object exists
42
+ try:
43
+ response = s3.head_object(Bucket=bucket_name, Key=key)
44
+ response = s3.delete_object(Bucket=bucket_name, Key=key)
45
+ except s3.exceptions.NoSuchKey:
46
+ response = {"ResponseMetadata": {"HTTPStatusCode": 404}}
47
+ except s3.exceptions.ClientError as e:
48
+ if e.response.get("Error", {}).get("Code") == "404":
49
+ response = {"ResponseMetadata": {"HTTPStatusCode": 404}}
50
+ else:
51
+ raise e
52
+
53
+ return dict(response)
54
+
55
+ def delete_all_versions(
56
+ self, *, bucket_name: str, key: str, include_deleted: bool = False
57
+ ) -> List[str]:
58
+ """
59
+ Deletes an object key and all the versions for that object key
60
+
61
+ Args:
62
+ bucket_name (str): The AWS Bucket Name
63
+ key (str): The Object Kuye
64
+ include_deleted (bool, optional): Should deleted files be removed as well.
65
+ If True it will look for the object keys with the deleted marker and remove it.
66
+ Defaults to False.
67
+ """
68
+ s3 = self.connection.client
69
+ paginator = s3.get_paginator("list_object_versions")
70
+ files: List[str] = []
71
+
72
+ for page in paginator.paginate(Bucket=bucket_name, Prefix=key):
73
+ # Delete object versions
74
+ if "Versions" in page:
75
+ for version in page["Versions"]:
76
+ s3.delete_object(
77
+ Bucket=bucket_name,
78
+ Key=version["Key"],
79
+ VersionId=version["VersionId"],
80
+ )
81
+
82
+ files.append(f"{version['Key']} - {version['VersionId']}")
83
+
84
+ if include_deleted:
85
+ # delete a previous files that may have just been a soft delete.
86
+ if "DeleteMarkers" in page:
87
+ for marker in page["DeleteMarkers"]:
88
+ s3.delete_object(
89
+ Bucket=bucket_name,
90
+ Key=marker["Key"],
91
+ VersionId=marker["VersionId"],
92
+ )
93
+
94
+ files.append(
95
+ f"{marker['Key']}:{marker['VersionId']}:delete-marker"
96
+ )
97
+ else:
98
+ response = self.delete(bucket_name=bucket_name, key=key)
99
+ if response["ResponseMetadata"]["HTTPStatusCode"] == 404:
100
+ return files
101
+
102
+ files.append(key)
103
+
104
+ return files
105
+
106
+ def generate_presigned_url(
107
+ self,
108
+ *,
109
+ bucket_name: str,
110
+ key_path: str,
111
+ file_name: str,
112
+ meta_data: Optional[dict] = None,
113
+ expiration: int = 3600,
114
+ method_type: str = "POST",
115
+ user_id: Optional[str] = None,
116
+ ) -> Dict[str, Any]:
117
+ """
118
+ Create a signed URL for uploading a file to S3.
119
+ :param bucket_name: The name of the S3 bucket.
120
+ :param user_id: The user ID of the user uploading the file.
121
+ :param file_name: The file name of the file being uploaded.
122
+ :param aws_profile: The name of the AWS profile to use.
123
+ :param aws_region: The name of the AWS region to use.
124
+ :param expiration: The number of seconds the URL is valid for.
125
+ :return: The signed URL.
126
+ """
127
+ start = DatetimeUtility.get_utc_now()
128
+ logger.debug(
129
+ f"Creating signed URL for bucket {bucket_name} for user {user_id} and file {file_name} at {start} UTC"
130
+ )
131
+
132
+ file_extension = FileOperations.get_file_extension(file_name)
133
+
134
+ local_meta = {
135
+ "user_id": f"{user_id}",
136
+ "file_name": f"{file_name}",
137
+ "extension": f"{file_extension}",
138
+ "method": "pre-signed-upload",
139
+ }
140
+
141
+ if not meta_data:
142
+ meta_data = local_meta
143
+ else:
144
+ meta_data.update(local_meta)
145
+
146
+ key = key_path
147
+ method_type = method_type.upper()
148
+
149
+ signed_url: str | Dict[str, Any]
150
+ if method_type == "PUT":
151
+ signed_url = self.connection.client.generate_presigned_url(
152
+ "put_object",
153
+ Params={
154
+ "Bucket": f"{bucket_name}",
155
+ "Key": f"{key}",
156
+ # NOTE: if you include the ContentType or Metadata then its required in the when they upload the file
157
+ # Otherwise you will get a `SignatureDoesNotMatch` error
158
+ # for now I'm commenting it out.
159
+ #'ContentType': 'application/octet-stream',
160
+ #'ACL': 'private',
161
+ # "Metadata": meta_data,
162
+ },
163
+ ExpiresIn=expiration, # URL is valid for x seconds
164
+ )
165
+ elif method_type == "POST":
166
+ signed_url = self.connection.client.generate_presigned_post(
167
+ bucket_name,
168
+ key,
169
+ ExpiresIn=expiration, # URL is valid for x seconds
170
+ )
171
+ elif method_type == "GET":
172
+ signed_url = self.connection.client.generate_presigned_url(
173
+ "get_object",
174
+ Params={
175
+ "Bucket": f"{bucket_name}",
176
+ "Key": f"{key}",
177
+ },
178
+ ExpiresIn=expiration, # URL is valid for x seconds
179
+ )
180
+ else:
181
+ raise InvalidHttpMethod(
182
+ f'Unknown method type was referenced. valid types are "PUT", "POST", "GET" , "{method_type}" as used '
183
+ )
184
+
185
+ end = DatetimeUtility.get_utc_now()
186
+ logger.debug(f"Signed URL created in {end - start}")
187
+
188
+ response = {
189
+ "signed_url": signed_url,
190
+ "key": key,
191
+ "meta_data": meta_data,
192
+ }
193
+
194
+ return response
195
+
196
+ def put(self, *, bucket: str, key: str, data: bytes | str) -> str:
197
+ """
198
+ Uploads a file object to s3. Returns the full s3 path s3://<bucket>/<key>
199
+ """
200
+ return self.upload_file_obj(bucket=bucket, key=key, file_obj=data)
201
+
202
+ def upload_file_obj(self, *, bucket: str, key: str, file_obj: bytes | str) -> str:
203
+ """
204
+ Uploads a file object to s3. Returns the full s3 path s3://<bucket>/<key>
205
+ """
206
+
207
+ if key.startswith("/"):
208
+ # remove the first slash
209
+ key = key[1:]
210
+
211
+ logger.debug(
212
+ {
213
+ "metric_filter": "upload_file_to_s3",
214
+ "bucket": bucket,
215
+ "key": key,
216
+ }
217
+ )
218
+ try:
219
+ # convert if necessary
220
+ file_obj = (
221
+ file_obj.encode("utf-8") if isinstance(file_obj, str) else file_obj
222
+ )
223
+ self.connection.client.upload_fileobj(
224
+ Fileobj=io.BytesIO(file_obj), Bucket=bucket, Key=key
225
+ )
226
+
227
+ except ClientError as ce:
228
+ error = {
229
+ "metric_filter": "upload_file_to_s3_failure",
230
+ "s3 upload": "failure",
231
+ "bucket": bucket,
232
+ "key": key,
233
+ }
234
+ logger.error(error)
235
+ raise RuntimeError(error) from ce
236
+
237
+ return f"s3://{bucket}/{key}"
238
+
239
+ def upload_file(
240
+ self,
241
+ *,
242
+ bucket: str,
243
+ key: str,
244
+ local_file_path: str,
245
+ throw_error_on_failure: bool = False,
246
+ ) -> str | None:
247
+ """
248
+ Uploads a file to s3. Returns the full s3 path s3://<bucket>/<key>
249
+ """
250
+
251
+ if key.startswith("/"):
252
+ # remove the first slash
253
+ key = key[1:]
254
+
255
+ # build the path
256
+ s3_path = f"s3://{bucket}/{key}"
257
+
258
+ logger.debug(
259
+ {
260
+ "metric_filter": "upload_file_to_s3",
261
+ "bucket": bucket,
262
+ "key": key,
263
+ "local_file_path": local_file_path,
264
+ }
265
+ )
266
+ try:
267
+ self.connection.client.upload_file(local_file_path, bucket, key)
268
+
269
+ except ClientError as ce:
270
+ error = {
271
+ "metric_filter": "upload_file_to_s3_failure",
272
+ "s3 upload": "failure",
273
+ "bucket": bucket,
274
+ "key": key,
275
+ "local_file_path": local_file_path,
276
+ }
277
+ logger.error(error)
278
+
279
+ if throw_error_on_failure:
280
+ raise RuntimeError(error) from ce
281
+
282
+ return None
283
+
284
+ return s3_path
285
+
286
+ def download_file(
287
+ self,
288
+ *,
289
+ bucket: str,
290
+ key: str,
291
+ local_directory: str | None = None,
292
+ local_file_path: str | None = None,
293
+ retry_attempts: int = 3,
294
+ retry_sleep: int = 5,
295
+ ) -> str:
296
+ """Download a file from s3"""
297
+ exception: Exception | None = None
298
+
299
+ if retry_attempts == 0:
300
+ retry_attempts = 1
301
+
302
+ for i in range(retry_attempts):
303
+ exception = None
304
+ try:
305
+ path = self.download_file_no_retries(
306
+ bucket=bucket,
307
+ key=key,
308
+ local_directory=local_directory,
309
+ local_file_path=local_file_path,
310
+ )
311
+ if path and os.path.exists(path):
312
+ return path
313
+
314
+ except Exception as e: # pylint: disable=w0718
315
+ logger.warning(
316
+ {
317
+ "action": "download_file",
318
+ "result": "failure",
319
+ "exception": str(e),
320
+ "attempt": i + 1,
321
+ "retry_attempts": retry_attempts,
322
+ }
323
+ )
324
+
325
+ exception = e
326
+
327
+ # sleep for a bit
328
+ attempt = i + 1
329
+ time.sleep(attempt * retry_sleep)
330
+
331
+ if exception:
332
+ logger.exception(
333
+ {
334
+ "action": "download_file",
335
+ "result": "failure",
336
+ "exception": str(exception),
337
+ "retry_attempts": retry_attempts,
338
+ }
339
+ )
340
+
341
+ raise exception from exception
342
+
343
+ raise RuntimeError("Unable to download file")
344
+
345
+ def download_file_no_retries(
346
+ self,
347
+ bucket: str,
348
+ key: str,
349
+ local_directory: str | None = None,
350
+ local_file_path: str | None = None,
351
+ ) -> str:
352
+ """
353
+ Downloads a file from s3
354
+
355
+ Args:
356
+ bucket (str): s3 bucket
357
+ key (str): the s3 object key
358
+ local_directory (str, optional): Local directory to download to. Defaults to None.
359
+ If None, we'll use a local tmp directory.
360
+
361
+ Raises:
362
+ e:
363
+
364
+ Returns:
365
+ str: Path to the downloaded file.
366
+ """
367
+
368
+ decoded_object_key: str
369
+ try:
370
+ logger.debug(
371
+ {
372
+ "action": "downloading file",
373
+ "bucket": bucket,
374
+ "key": key,
375
+ "local_directory": local_directory,
376
+ }
377
+ )
378
+ return self.__download_file(bucket, key, local_directory, local_file_path)
379
+ except FileNotFoundError:
380
+ logger.warning(
381
+ {
382
+ "metric_filter": "download_file_error",
383
+ "error": "FileNotFoundError",
384
+ "message": "attempting to find it decoded",
385
+ "bucket": bucket,
386
+ "key": key,
387
+ }
388
+ )
389
+
390
+ # attempt to decode the key
391
+ decoded_object_key = HttpUtility.decode_url(key)
392
+
393
+ logger.error(
394
+ {
395
+ "metric_filter": "download_file_error",
396
+ "error": "FileNotFoundError",
397
+ "message": "attempting to find it decoded",
398
+ "bucket": bucket,
399
+ "key": key,
400
+ "decoded_object_key": decoded_object_key,
401
+ }
402
+ )
403
+
404
+ return self.__download_file(bucket, decoded_object_key, local_directory)
405
+
406
+ except Exception as e:
407
+ logger.error(
408
+ {
409
+ "metric_filter": "download_file_error",
410
+ "error": str(e),
411
+ "bucket": bucket,
412
+ "decoded_object_key": decoded_object_key,
413
+ }
414
+ )
415
+ raise e
416
+
417
+ def stream_file(self, bucket_name: str, key: str) -> Dict[str, Any]:
418
+ """
419
+ Gets a file from s3 and returns the response.
420
+ The "Body" is a streaming body object. You can read it like a file.
421
+ For example:
422
+
423
+ with response["Body"] as f:
424
+ data = f.read()
425
+ print(data)
426
+
427
+ """
428
+ return self.get_object(bucket_name=bucket_name, key=key)
429
+
430
+ def get_object(self, bucket_name: str, key: str) -> Dict[str, Any]:
431
+ """
432
+ Gets a file from s3 and returns the response.
433
+ The "Body" is a streaming body object. You can read it like a file.
434
+ For example:
435
+
436
+ with response["Body"] as f:
437
+ data = f.read()
438
+ print(data)
439
+
440
+ """
441
+
442
+ logger.debug(
443
+ {
444
+ "source": "download_file",
445
+ "action": "downloading a file from s3",
446
+ "bucket": bucket_name,
447
+ "key": key,
448
+ }
449
+ )
450
+
451
+ response: Dict[str, Any] = {}
452
+ error = None
453
+
454
+ try:
455
+ response = dict(
456
+ self.connection.client.get_object(Bucket=bucket_name, Key=key)
457
+ )
458
+
459
+ logger.debug(
460
+ {"metric_filter": "s3_download_response", "response": str(response)}
461
+ )
462
+
463
+ except Exception as e: # pylint: disable=W0718
464
+ error = str(e)
465
+ error_info = {
466
+ "metric_filter": "s3_download_error",
467
+ "error": str(e),
468
+ "bucket": bucket_name,
469
+ "key": key,
470
+ }
471
+
472
+ logger.error(error_info)
473
+ # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3/client/get_object.html
474
+ error = str(e)
475
+ if "An error occurred (AccessDenied)" in error:
476
+ if (
477
+ "is not authorized to perform: s3:ListBucket on resource" in error
478
+ and "because no identity-based policy allows the s3:ListBucket action"
479
+ in error
480
+ ):
481
+ # the file is not found but you're getting a access error since you don't
482
+ # have s3:ListBucket. To make life easier, we're just going to return a 404 error
483
+ raise FileNotFound("File Not Found") from e
484
+
485
+ # last ditch
486
+ raise RuntimeError(error_info) from e
487
+
488
+ finally:
489
+ logger.debug(
490
+ {
491
+ "source": "download_file",
492
+ "action": "downloading a file from s3",
493
+ "bucket": bucket_name,
494
+ "key": key,
495
+ "response": response,
496
+ "errors": error,
497
+ }
498
+ )
499
+
500
+ return response
501
+
502
+ def __download_file(
503
+ self,
504
+ bucket: str,
505
+ key: str,
506
+ local_directory: str | None = None,
507
+ local_file_path: str | None = None,
508
+ ):
509
+ if local_directory and local_file_path:
510
+ raise ValueError(
511
+ "Only one of local_directory or local_file_path can be provided"
512
+ )
513
+
514
+ if local_directory and not os.path.exists(local_directory):
515
+ FileOperations.makedirs(local_directory)
516
+
517
+ if local_file_path and not os.path.exists(os.path.dirname(local_file_path)):
518
+ FileOperations.makedirs(os.path.dirname(local_file_path))
519
+
520
+ file_name = self.__get_file_name_from_path(key)
521
+ if local_directory is None and local_file_path is None:
522
+ local_path = self.get_local_path_for_file(file_name)
523
+ elif local_directory:
524
+ local_path = os.path.join(local_directory, file_name)
525
+ else:
526
+ local_path = local_file_path
527
+
528
+ logger.debug(
529
+ {
530
+ "source": "download_file",
531
+ "action": "downloading a file from s3",
532
+ "bucket": bucket,
533
+ "key": key,
534
+ "file_name": file_name,
535
+ "local_path": local_path,
536
+ }
537
+ )
538
+
539
+ error: str | None = None
540
+ try:
541
+ self.connection.client.download_file(bucket, key, local_path)
542
+
543
+ except Exception as e: # pylint: disable=W0718
544
+ error = str(e)
545
+ logger.error({"metric_filter": "s3_download_error", "error": str(e)})
546
+
547
+ file_exist = os.path.exists(local_path)
548
+
549
+ logger.debug(
550
+ {
551
+ "source": "download_file",
552
+ "action": "downloading a file from s3",
553
+ "bucket": bucket,
554
+ "key": key,
555
+ "file_name": file_name,
556
+ "local_path": local_path,
557
+ "file_downloaded": file_exist,
558
+ "errors": error,
559
+ }
560
+ )
561
+
562
+ if not file_exist:
563
+ raise FileNotFoundError("File Failed to download (does not exist) from S3.")
564
+
565
+ return local_path
566
+
567
+ def __get_file_name_from_path(self, path: str) -> str:
568
+ """
569
+ Get a file name from the path
570
+
571
+ Args:
572
+ path (str): a file path
573
+
574
+ Returns:
575
+ str: the file name
576
+ """
577
+ return path.rsplit("/")[-1]
578
+
579
+ def get_local_path_for_file(self, file_name: str):
580
+ """
581
+ Get a local temp location for a file.
582
+ This is designed to work with lambda functions.
583
+ The /tmp directory is the only writeable location for lambda functions.
584
+ """
585
+ temp_dir = self.get_temp_directory()
586
+ # use /tmp it's the only writeable location for lambda
587
+ local_path = os.path.join(temp_dir, file_name)
588
+ return local_path
589
+
590
+ def get_temp_directory(self):
591
+ """
592
+ Determines the appropriate temporary directory based on the environment.
593
+ If running in AWS Lambda, returns '/tmp'.
594
+ Otherwise, returns the system's standard temp directory.
595
+ """
596
+ return FileOperations.get_tmp_directory()
597
+
598
+ def encode(
599
+ self, text: str, encoding: str = "utf-8", errors: str = "strict"
600
+ ) -> bytes:
601
+ """
602
+ Encodes a string for s3
603
+ """
604
+ return text.encode(encoding=encoding, errors=errors)
605
+
606
+ def decode(
607
+ self, file_obj: bytes, encoding: str = "utf-8", errors: str = "strict"
608
+ ) -> str:
609
+ """
610
+ Decodes bytes to a string
611
+ """
612
+ return file_obj.decode(encoding=encoding, errors=errors)
613
+
614
+ def list_versions(self, bucket: str, prefix: str = "") -> List[str]:
615
+ """
616
+ List all versions of objects in an S3 bucket with a given prefix.
617
+
618
+ Args:
619
+ bucket (str): The name of the S3 bucket.
620
+ prefix (str, optional): The prefix to filter objects by. Defaults to "".
621
+
622
+ Returns:
623
+ list: A list of dictionaries containing information about each object version.
624
+ """
625
+ versions = []
626
+ paginator = self.connection.client.get_paginator("list_object_versions")
627
+ page_iterator = paginator.paginate(Bucket=bucket, Prefix=prefix)
628
+
629
+ for page in page_iterator:
630
+ if "Versions" in page:
631
+ versions.extend(page["Versions"])
632
+
633
+ return versions
634
+
635
+ def copy(
636
+ self,
637
+ source_bucket: str,
638
+ source_key: str,
639
+ destination_bucket: str,
640
+ destination_key: str,
641
+ ) -> Dict[str, Any]:
642
+ """
643
+ Copies an object from one location to another.
644
+ The original is kept.
645
+ """
646
+
647
+ if source_key.startswith("/"):
648
+ # remove the first slash
649
+ source_key = source_key[1:]
650
+
651
+ if destination_key.startswith("/"):
652
+ # remove the first slash
653
+ destination_key = destination_key[1:]
654
+
655
+ response = self.connection.client.copy_object(
656
+ CopySource={"Bucket": source_bucket, "Key": source_key},
657
+ Bucket=destination_bucket,
658
+ Key=destination_key,
659
+ )
660
+
661
+ return dict(response)
662
+
663
+ def move(
664
+ self,
665
+ source_bucket: str,
666
+ source_key: str,
667
+ destination_bucket: str,
668
+ destination_key: str,
669
+ ) -> Dict[str, Any]:
670
+ """
671
+ Copies an object from one location to another then deletes the source.
672
+ The source is only deleted if the copy is successful
673
+ """
674
+
675
+ copy_response = self.connection.client.copy_object(
676
+ CopySource={"Bucket": source_bucket, "Key": source_key},
677
+ Bucket=destination_bucket,
678
+ Key=destination_key,
679
+ )
680
+
681
+ status_code = copy_response.get("statusCode")
682
+ delete_response = {}
683
+ if status_code == 200:
684
+ if source_key.startswith("/"):
685
+ source_key = source_key[1:]
686
+ delete_response = self.delete(bucket_name=source_bucket, key=source_key)
687
+ status_code = copy_response.get("statusCode", status_code)
688
+
689
+ response = {
690
+ "status_code": status_code,
691
+ "copy": copy_response,
692
+ "delete": delete_response,
693
+ }
694
+
695
+ return response