django-nativemojo 0.1.10__py3-none-any.whl → 0.1.16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- django_nativemojo-0.1.16.dist-info/METADATA +138 -0
- django_nativemojo-0.1.16.dist-info/RECORD +302 -0
- mojo/__init__.py +1 -1
- mojo/apps/account/management/__init__.py +5 -0
- mojo/apps/account/management/commands/__init__.py +6 -0
- mojo/apps/account/management/commands/serializer_admin.py +651 -0
- mojo/apps/account/migrations/0004_user_avatar.py +20 -0
- mojo/apps/account/migrations/0005_group_last_activity.py +18 -0
- mojo/apps/account/migrations/0006_add_device_tracking_models.py +72 -0
- mojo/apps/account/migrations/0007_delete_userdevicelocation.py +16 -0
- mojo/apps/account/migrations/0008_userdevicelocation.py +33 -0
- mojo/apps/account/migrations/0009_geolocatedip_subnet.py +18 -0
- mojo/apps/account/migrations/0010_group_avatar.py +20 -0
- mojo/apps/account/migrations/0011_user_org_registereddevice_pushconfig_and_more.py +118 -0
- mojo/apps/account/migrations/0012_remove_pushconfig_apns_key_file_and_more.py +21 -0
- mojo/apps/account/migrations/0013_pushconfig_test_mode_alter_pushconfig_apns_enabled_and_more.py +28 -0
- mojo/apps/account/migrations/0014_notificationdelivery_data_payload_and_more.py +48 -0
- mojo/apps/account/models/__init__.py +2 -0
- mojo/apps/account/models/device.py +281 -0
- mojo/apps/account/models/group.py +319 -15
- mojo/apps/account/models/member.py +29 -5
- mojo/apps/account/models/push/__init__.py +4 -0
- mojo/apps/account/models/push/config.py +112 -0
- mojo/apps/account/models/push/delivery.py +93 -0
- mojo/apps/account/models/push/device.py +66 -0
- mojo/apps/account/models/push/template.py +99 -0
- mojo/apps/account/models/user.py +369 -19
- mojo/apps/account/rest/__init__.py +2 -0
- mojo/apps/account/rest/device.py +39 -0
- mojo/apps/account/rest/group.py +9 -0
- mojo/apps/account/rest/push.py +187 -0
- mojo/apps/account/rest/user.py +100 -6
- mojo/apps/account/services/__init__.py +1 -0
- mojo/apps/account/services/push.py +363 -0
- mojo/apps/aws/migrations/0001_initial.py +206 -0
- mojo/apps/aws/migrations/0002_emaildomain_can_recv_emaildomain_can_send_and_more.py +28 -0
- mojo/apps/aws/migrations/0003_mailbox_is_domain_default_mailbox_is_system_default_and_more.py +31 -0
- mojo/apps/aws/migrations/0004_s3bucket.py +39 -0
- mojo/apps/aws/migrations/0005_alter_emaildomain_region_delete_s3bucket.py +21 -0
- mojo/apps/aws/models/__init__.py +19 -0
- mojo/apps/aws/models/email_attachment.py +99 -0
- mojo/apps/aws/models/email_domain.py +218 -0
- mojo/apps/aws/models/email_template.py +132 -0
- mojo/apps/aws/models/incoming_email.py +197 -0
- mojo/apps/aws/models/mailbox.py +288 -0
- mojo/apps/aws/models/sent_message.py +175 -0
- mojo/apps/aws/rest/__init__.py +7 -0
- mojo/apps/aws/rest/email.py +33 -0
- mojo/apps/aws/rest/email_ops.py +183 -0
- mojo/apps/aws/rest/messages.py +32 -0
- mojo/apps/aws/rest/s3.py +64 -0
- mojo/apps/aws/rest/send.py +101 -0
- mojo/apps/aws/rest/sns.py +403 -0
- mojo/apps/aws/rest/templates.py +19 -0
- mojo/apps/aws/services/__init__.py +32 -0
- mojo/apps/aws/services/email.py +390 -0
- mojo/apps/aws/services/email_ops.py +548 -0
- mojo/apps/docit/__init__.py +6 -0
- mojo/apps/docit/markdown_plugins/syntax_highlight.py +25 -0
- mojo/apps/docit/markdown_plugins/toc.py +12 -0
- mojo/apps/docit/migrations/0001_initial.py +113 -0
- mojo/apps/docit/migrations/0002_alter_book_modified_by_alter_page_modified_by.py +26 -0
- mojo/apps/docit/migrations/0003_alter_book_group.py +20 -0
- mojo/apps/docit/models/__init__.py +17 -0
- mojo/apps/docit/models/asset.py +231 -0
- mojo/apps/docit/models/book.py +227 -0
- mojo/apps/docit/models/page.py +319 -0
- mojo/apps/docit/models/page_revision.py +203 -0
- mojo/apps/docit/rest/__init__.py +10 -0
- mojo/apps/docit/rest/asset.py +17 -0
- mojo/apps/docit/rest/book.py +22 -0
- mojo/apps/docit/rest/page.py +22 -0
- mojo/apps/docit/rest/page_revision.py +17 -0
- mojo/apps/docit/services/__init__.py +11 -0
- mojo/apps/docit/services/docit.py +315 -0
- mojo/apps/docit/services/markdown.py +44 -0
- mojo/apps/fileman/README.md +8 -8
- mojo/apps/fileman/backends/base.py +76 -70
- mojo/apps/fileman/backends/filesystem.py +86 -86
- mojo/apps/fileman/backends/s3.py +409 -108
- mojo/apps/fileman/migrations/0001_initial.py +106 -0
- mojo/apps/fileman/migrations/0002_filemanager_parent_alter_filemanager_max_file_size.py +24 -0
- mojo/apps/fileman/migrations/0003_remove_file_fileman_fil_upload__c4bc35_idx_and_more.py +25 -0
- mojo/apps/fileman/migrations/0004_remove_file_original_filename_and_more.py +39 -0
- mojo/apps/fileman/migrations/0005_alter_file_upload_token.py +18 -0
- mojo/apps/fileman/migrations/0006_file_download_url_filemanager_forever_urls.py +23 -0
- mojo/apps/fileman/migrations/0007_remove_filemanager_forever_urls_and_more.py +22 -0
- mojo/apps/fileman/migrations/0008_file_category.py +18 -0
- mojo/apps/fileman/migrations/0009_rename_file_path_file_storage_file_path.py +18 -0
- mojo/apps/fileman/migrations/0010_filerendition.py +33 -0
- mojo/apps/fileman/migrations/0011_alter_filerendition_original_file.py +19 -0
- mojo/apps/fileman/models/__init__.py +1 -5
- mojo/apps/fileman/models/file.py +240 -58
- mojo/apps/fileman/models/manager.py +427 -31
- mojo/apps/fileman/models/rendition.py +118 -0
- mojo/apps/fileman/renderer/__init__.py +111 -0
- mojo/apps/fileman/renderer/audio.py +403 -0
- mojo/apps/fileman/renderer/base.py +205 -0
- mojo/apps/fileman/renderer/document.py +404 -0
- mojo/apps/fileman/renderer/image.py +222 -0
- mojo/apps/fileman/renderer/utils.py +297 -0
- mojo/apps/fileman/renderer/video.py +304 -0
- mojo/apps/fileman/rest/__init__.py +1 -18
- mojo/apps/fileman/rest/upload.py +22 -32
- mojo/apps/fileman/signals.py +58 -0
- mojo/apps/fileman/tasks.py +254 -0
- mojo/apps/fileman/utils/__init__.py +40 -16
- mojo/apps/incident/migrations/0005_incidenthistory.py +39 -0
- mojo/apps/incident/migrations/0006_alter_incident_state.py +18 -0
- mojo/apps/incident/migrations/0007_event_uid.py +18 -0
- mojo/apps/incident/migrations/0008_ticket_ticketnote.py +55 -0
- mojo/apps/incident/migrations/0009_incident_status.py +18 -0
- mojo/apps/incident/migrations/0010_event_country_code.py +18 -0
- mojo/apps/incident/migrations/0011_incident_country_code.py +18 -0
- mojo/apps/incident/migrations/0012_alter_incident_status.py +18 -0
- mojo/apps/incident/models/__init__.py +2 -0
- mojo/apps/incident/models/event.py +35 -0
- mojo/apps/incident/models/history.py +36 -0
- mojo/apps/incident/models/incident.py +3 -1
- mojo/apps/incident/models/ticket.py +62 -0
- mojo/apps/incident/reporter.py +21 -1
- mojo/apps/incident/rest/__init__.py +1 -0
- mojo/apps/incident/rest/event.py +7 -1
- mojo/apps/incident/rest/ticket.py +43 -0
- mojo/apps/jobs/__init__.py +489 -0
- mojo/apps/jobs/adapters.py +24 -0
- mojo/apps/jobs/cli.py +616 -0
- mojo/apps/jobs/daemon.py +370 -0
- mojo/apps/jobs/examples/sample_jobs.py +376 -0
- mojo/apps/jobs/examples/webhook_examples.py +203 -0
- mojo/apps/jobs/handlers/__init__.py +5 -0
- mojo/apps/jobs/handlers/webhook.py +317 -0
- mojo/apps/jobs/job_engine.py +734 -0
- mojo/apps/jobs/keys.py +203 -0
- mojo/apps/jobs/local_queue.py +363 -0
- mojo/apps/jobs/management/__init__.py +3 -0
- mojo/apps/jobs/management/commands/__init__.py +3 -0
- mojo/apps/jobs/manager.py +1327 -0
- mojo/apps/jobs/migrations/0001_initial.py +97 -0
- mojo/apps/jobs/migrations/0002_alter_job_max_retries_joblog.py +39 -0
- mojo/apps/jobs/models/__init__.py +6 -0
- mojo/apps/jobs/models/job.py +441 -0
- mojo/apps/jobs/rest/__init__.py +2 -0
- mojo/apps/jobs/rest/control.py +466 -0
- mojo/apps/jobs/rest/jobs.py +421 -0
- mojo/apps/jobs/scheduler.py +571 -0
- mojo/apps/jobs/services/__init__.py +6 -0
- mojo/apps/jobs/services/job_actions.py +465 -0
- mojo/apps/jobs/settings.py +209 -0
- mojo/apps/logit/migrations/0004_alter_log_level.py +18 -0
- mojo/apps/logit/models/log.py +7 -1
- mojo/apps/metrics/__init__.py +8 -1
- mojo/apps/metrics/redis_metrics.py +198 -0
- mojo/apps/metrics/rest/__init__.py +3 -0
- mojo/apps/metrics/rest/categories.py +266 -0
- mojo/apps/metrics/rest/helpers.py +48 -0
- mojo/apps/metrics/rest/permissions.py +99 -0
- mojo/apps/metrics/rest/values.py +277 -0
- mojo/apps/metrics/utils.py +19 -2
- mojo/decorators/auth.py +6 -1
- mojo/decorators/http.py +47 -3
- mojo/helpers/aws/__init__.py +45 -0
- mojo/helpers/aws/ec2.py +804 -0
- mojo/helpers/aws/iam.py +748 -0
- mojo/helpers/aws/inbound_email.py +309 -0
- mojo/helpers/aws/kms.py +413 -0
- mojo/helpers/aws/s3.py +451 -11
- mojo/helpers/aws/ses.py +483 -0
- mojo/helpers/aws/ses_domain.py +959 -0
- mojo/helpers/aws/sns.py +461 -0
- mojo/helpers/crypto/__init__.py +1 -1
- mojo/helpers/crypto/utils.py +15 -0
- mojo/helpers/dates.py +18 -0
- mojo/helpers/location/__init__.py +2 -0
- mojo/helpers/location/countries.py +262 -0
- mojo/helpers/location/geolocation.py +196 -0
- mojo/helpers/logit.py +37 -0
- mojo/helpers/redis/__init__.py +2 -0
- mojo/helpers/redis/adapter.py +606 -0
- mojo/helpers/redis/client.py +48 -0
- mojo/helpers/redis/pool.py +225 -0
- mojo/helpers/request.py +8 -0
- mojo/helpers/response.py +14 -2
- mojo/helpers/settings/__init__.py +2 -0
- mojo/helpers/{settings.py → settings/helper.py} +1 -37
- mojo/helpers/settings/parser.py +132 -0
- mojo/middleware/auth.py +1 -1
- mojo/middleware/cors.py +40 -0
- mojo/middleware/logging.py +131 -12
- mojo/middleware/mojo.py +10 -0
- mojo/models/rest.py +494 -65
- mojo/models/secrets.py +98 -3
- mojo/serializers/__init__.py +106 -0
- mojo/serializers/core/__init__.py +90 -0
- mojo/serializers/core/cache/__init__.py +121 -0
- mojo/serializers/core/cache/backends.py +518 -0
- mojo/serializers/core/cache/base.py +102 -0
- mojo/serializers/core/cache/disabled.py +181 -0
- mojo/serializers/core/cache/memory.py +287 -0
- mojo/serializers/core/cache/redis.py +533 -0
- mojo/serializers/core/cache/utils.py +454 -0
- mojo/serializers/core/manager.py +550 -0
- mojo/serializers/core/serializer.py +475 -0
- mojo/serializers/examples/settings.py +322 -0
- mojo/serializers/formats/csv.py +393 -0
- mojo/serializers/formats/localizers.py +509 -0
- mojo/serializers/{models.py → simple.py} +38 -15
- mojo/serializers/suggested_improvements.md +388 -0
- testit/client.py +1 -1
- testit/helpers.py +35 -4
- testit/runner.py +23 -6
- django_nativemojo-0.1.10.dist-info/METADATA +0 -96
- django_nativemojo-0.1.10.dist-info/RECORD +0 -194
- mojo/apps/metrics/rest/db.py +0 -0
- mojo/apps/notify/README.md +0 -91
- mojo/apps/notify/README_NOTIFICATIONS.md +0 -566
- mojo/apps/notify/admin.py +0 -52
- mojo/apps/notify/handlers/example_handlers.py +0 -516
- mojo/apps/notify/handlers/ses/__init__.py +0 -25
- mojo/apps/notify/handlers/ses/bounce.py +0 -0
- mojo/apps/notify/handlers/ses/complaint.py +0 -25
- mojo/apps/notify/handlers/ses/message.py +0 -86
- mojo/apps/notify/management/commands/__init__.py +0 -1
- mojo/apps/notify/management/commands/process_notifications.py +0 -370
- mojo/apps/notify/mod +0 -0
- mojo/apps/notify/models/__init__.py +0 -12
- mojo/apps/notify/models/account.py +0 -128
- mojo/apps/notify/models/attachment.py +0 -24
- mojo/apps/notify/models/bounce.py +0 -68
- mojo/apps/notify/models/complaint.py +0 -40
- mojo/apps/notify/models/inbox.py +0 -113
- mojo/apps/notify/models/inbox_message.py +0 -173
- mojo/apps/notify/models/outbox.py +0 -129
- mojo/apps/notify/models/outbox_message.py +0 -288
- mojo/apps/notify/models/template.py +0 -30
- mojo/apps/notify/providers/aws.py +0 -73
- mojo/apps/notify/rest/ses.py +0 -0
- mojo/apps/notify/utils/__init__.py +0 -2
- mojo/apps/notify/utils/notifications.py +0 -404
- mojo/apps/notify/utils/parsing.py +0 -202
- mojo/apps/notify/utils/render.py +0 -144
- mojo/apps/tasks/README.md +0 -118
- mojo/apps/tasks/__init__.py +0 -11
- mojo/apps/tasks/manager.py +0 -489
- mojo/apps/tasks/rest/__init__.py +0 -2
- mojo/apps/tasks/rest/hooks.py +0 -0
- mojo/apps/tasks/rest/tasks.py +0 -62
- mojo/apps/tasks/runner.py +0 -174
- mojo/apps/tasks/tq_handlers.py +0 -14
- mojo/helpers/aws/setup_email.py +0 -0
- mojo/helpers/redis.py +0 -10
- mojo/models/meta.py +0 -262
- mojo/ws4redis/README.md +0 -174
- mojo/ws4redis/__init__.py +0 -2
- mojo/ws4redis/client.py +0 -283
- mojo/ws4redis/connection.py +0 -327
- mojo/ws4redis/exceptions.py +0 -32
- mojo/ws4redis/redis.py +0 -183
- mojo/ws4redis/servers/base.py +0 -86
- mojo/ws4redis/servers/django.py +0 -171
- mojo/ws4redis/servers/uwsgi.py +0 -63
- mojo/ws4redis/settings.py +0 -45
- mojo/ws4redis/utf8validator.py +0 -128
- mojo/ws4redis/websocket.py +0 -403
- {django_nativemojo-0.1.10.dist-info → django_nativemojo-0.1.16.dist-info}/LICENSE +0 -0
- {django_nativemojo-0.1.10.dist-info → django_nativemojo-0.1.16.dist-info}/NOTICE +0 -0
- {django_nativemojo-0.1.10.dist-info → django_nativemojo-0.1.16.dist-info}/WHEEL +0 -0
- /mojo/apps/{notify → aws}/__init__.py +0 -0
- /mojo/apps/{notify/handlers → aws/migrations}/__init__.py +0 -0
- /mojo/apps/{notify/management → docit/markdown_plugins}/__init__.py +0 -0
- /mojo/apps/{notify/providers → docit/migrations}/__init__.py +0 -0
- /mojo/apps/{notify/rest → fileman/migrations}/__init__.py +0 -0
- /mojo/{ws4redis/servers → apps/jobs/examples}/__init__.py +0 -0
- /mojo/apps/{fileman/models/render.py → jobs/migrations/__init__.py} +0 -0
- /mojo/{serializers → rest}/openapi.py +0 -0
- /mojo/{apps/fileman/rest/__init__ → serializers/formats/__init__.py} +0 -0
mojo/apps/fileman/backends/s3.py
CHANGED
@@ -6,6 +6,8 @@ from datetime import datetime, timedelta
|
|
6
6
|
import os
|
7
7
|
import uuid
|
8
8
|
import hashlib
|
9
|
+
from urllib.parse import urlparse
|
10
|
+
import json
|
9
11
|
|
10
12
|
from .base import StorageBackend
|
11
13
|
|
@@ -14,33 +16,39 @@ class S3StorageBackend(StorageBackend):
|
|
14
16
|
"""
|
15
17
|
AWS S3 storage backend implementation
|
16
18
|
"""
|
17
|
-
|
19
|
+
|
18
20
|
def __init__(self, file_manager, **kwargs):
|
19
21
|
super().__init__(file_manager, **kwargs)
|
20
|
-
|
22
|
+
|
23
|
+
# Parse bucket name and folder path
|
24
|
+
purl = urlparse(file_manager.backend_url)
|
25
|
+
if purl.scheme != "s3":
|
26
|
+
raise ValueError("Invalid scheme for S3 backend")
|
27
|
+
self.bucket_name = purl.netloc
|
28
|
+
self.folder_path = purl.path.lstrip('/')
|
29
|
+
|
21
30
|
# S3 configuration
|
22
|
-
self.
|
23
|
-
self.
|
24
|
-
self.
|
25
|
-
self.secret_access_key = self.get_setting('secret_access_key')
|
31
|
+
self.region_name = self.get_setting('aws_region', 'us-east-1')
|
32
|
+
self.access_key_id = self.get_setting('aws_key')
|
33
|
+
self.secret_access_key = self.get_setting('aws_secret')
|
26
34
|
self.endpoint_url = self.get_setting('endpoint_url') # For S3-compatible services
|
27
35
|
self.signature_version = self.get_setting('signature_version', 's3v4')
|
28
36
|
self.addressing_style = self.get_setting('addressing_style', 'auto')
|
29
|
-
|
37
|
+
|
30
38
|
# Upload configuration
|
31
39
|
self.upload_expires_in = self.get_setting('upload_expires_in', 3600) # 1 hour
|
32
40
|
self.download_expires_in = self.get_setting('download_expires_in', 3600) # 1 hour
|
33
41
|
self.multipart_threshold = self.get_setting('multipart_threshold', 8 * 1024 * 1024) # 8MB
|
34
42
|
self.max_concurrency = self.get_setting('max_concurrency', 10)
|
35
|
-
|
43
|
+
|
36
44
|
# Security settings
|
37
45
|
self.server_side_encryption = self.get_setting('server_side_encryption')
|
38
46
|
self.kms_key_id = self.get_setting('kms_key_id')
|
39
|
-
|
47
|
+
|
40
48
|
# Initialize S3 client
|
41
49
|
self._client = None
|
42
50
|
self._resource = None
|
43
|
-
|
51
|
+
|
44
52
|
@property
|
45
53
|
def client(self):
|
46
54
|
"""Lazy initialization of S3 client"""
|
@@ -50,9 +58,11 @@ class S3StorageBackend(StorageBackend):
|
|
50
58
|
aws_secret_access_key=self.secret_access_key,
|
51
59
|
region_name=self.region_name
|
52
60
|
)
|
53
|
-
|
61
|
+
|
54
62
|
config = Config(
|
55
63
|
signature_version=self.signature_version,
|
64
|
+
connect_timeout=3,
|
65
|
+
read_timeout=3,
|
56
66
|
s3={
|
57
67
|
'addressing_style': self.addressing_style
|
58
68
|
},
|
@@ -61,15 +71,15 @@ class S3StorageBackend(StorageBackend):
|
|
61
71
|
'mode': 'adaptive'
|
62
72
|
}
|
63
73
|
)
|
64
|
-
|
74
|
+
|
65
75
|
self._client = session.client(
|
66
76
|
's3',
|
67
77
|
endpoint_url=self.endpoint_url,
|
68
78
|
config=config
|
69
79
|
)
|
70
|
-
|
80
|
+
|
71
81
|
return self._client
|
72
|
-
|
82
|
+
|
73
83
|
@property
|
74
84
|
def resource(self):
|
75
85
|
"""Lazy initialization of S3 resource"""
|
@@ -79,51 +89,38 @@ class S3StorageBackend(StorageBackend):
|
|
79
89
|
aws_secret_access_key=self.secret_access_key,
|
80
90
|
region_name=self.region_name
|
81
91
|
)
|
82
|
-
|
92
|
+
|
83
93
|
self._resource = session.resource(
|
84
94
|
's3',
|
85
95
|
endpoint_url=self.endpoint_url
|
86
96
|
)
|
87
|
-
|
97
|
+
|
88
98
|
return self._resource
|
89
|
-
|
90
|
-
def save(self, file_obj,
|
99
|
+
|
100
|
+
def save(self, file_obj, file_path: str, content_type: Optional[str] = None, metadata: Optional[dict] = None) -> str:
|
91
101
|
"""Save a file to S3"""
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
# Add metadata
|
115
|
-
metadata = kwargs.get('metadata', {})
|
116
|
-
if metadata:
|
117
|
-
upload_params['Metadata'] = {k: str(v) for k, v in metadata.items()}
|
118
|
-
|
119
|
-
# Upload the file
|
120
|
-
self.client.put_object(**upload_params)
|
121
|
-
|
122
|
-
return file_path
|
123
|
-
|
124
|
-
except ClientError as e:
|
125
|
-
raise Exception(f"Failed to save file to S3: {e}")
|
126
|
-
|
102
|
+
# Prepare upload parameters
|
103
|
+
upload_params = {
|
104
|
+
'Bucket': self.bucket_name,
|
105
|
+
'Key': file_path,
|
106
|
+
'ContentType': content_type,
|
107
|
+
'Body': file_obj
|
108
|
+
}
|
109
|
+
|
110
|
+
# Add server-side encryption if configured
|
111
|
+
if self.server_side_encryption:
|
112
|
+
upload_params['ServerSideEncryption'] = self.server_side_encryption
|
113
|
+
if self.kms_key_id:
|
114
|
+
upload_params['SSEKMSKeyId'] = self.kms_key_id
|
115
|
+
|
116
|
+
if metadata:
|
117
|
+
upload_params['Metadata'] = metadata
|
118
|
+
|
119
|
+
# Upload the file
|
120
|
+
self.client.put_object(**upload_params)
|
121
|
+
|
122
|
+
return file_path
|
123
|
+
|
127
124
|
def delete(self, file_path: str) -> bool:
|
128
125
|
"""Delete a file from S3"""
|
129
126
|
try:
|
@@ -134,7 +131,23 @@ class S3StorageBackend(StorageBackend):
|
|
134
131
|
return True
|
135
132
|
except ClientError:
|
136
133
|
return False
|
137
|
-
|
134
|
+
|
135
|
+
def delete_folder(self, folder_path: str) -> bool:
|
136
|
+
"""Delete a folder from S3"""
|
137
|
+
# List all objects under the prefix
|
138
|
+
response = self.client.list_objects_v2(Bucket=self.bucket_name, Prefix=folder_path)
|
139
|
+
if 'Contents' not in response:
|
140
|
+
return True # Folder is already empty or doesn't exist
|
141
|
+
# Prepare delete batch
|
142
|
+
objects_to_delete = [{'Key': obj['Key']} for obj in response['Contents']]
|
143
|
+
|
144
|
+
# Delete in batch
|
145
|
+
self.client.delete_objects(
|
146
|
+
Bucket=self.bucket_name,
|
147
|
+
Delete={'Objects': objects_to_delete}
|
148
|
+
)
|
149
|
+
return True
|
150
|
+
|
138
151
|
def exists(self, file_path: str) -> bool:
|
139
152
|
"""Check if a file exists in S3"""
|
140
153
|
try:
|
@@ -145,7 +158,7 @@ class S3StorageBackend(StorageBackend):
|
|
145
158
|
return True
|
146
159
|
except ClientError:
|
147
160
|
return False
|
148
|
-
|
161
|
+
|
149
162
|
def get_file_size(self, file_path: str) -> Optional[int]:
|
150
163
|
"""Get the size of a file in S3"""
|
151
164
|
try:
|
@@ -156,13 +169,14 @@ class S3StorageBackend(StorageBackend):
|
|
156
169
|
return response['ContentLength']
|
157
170
|
except ClientError:
|
158
171
|
return None
|
159
|
-
|
172
|
+
|
160
173
|
def get_url(self, file_path: str, expires_in: Optional[int] = None) -> str:
|
161
|
-
"""Get a
|
174
|
+
"""Get a URL to access the file, either public or pre-signed based on expiration"""
|
162
175
|
if expires_in is None:
|
163
|
-
|
164
|
-
|
165
|
-
|
176
|
+
# Assume the bucket is public and generate a public URL
|
177
|
+
url = f"https://{self.bucket_name}.s3.amazonaws.com/{file_path}"
|
178
|
+
else:
|
179
|
+
# Generate a pre-signed URL
|
166
180
|
url = self.client.generate_presigned_url(
|
167
181
|
'get_object',
|
168
182
|
Params={
|
@@ -171,33 +185,45 @@ class S3StorageBackend(StorageBackend):
|
|
171
185
|
},
|
172
186
|
ExpiresIn=expires_in
|
173
187
|
)
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
188
|
+
return url
|
189
|
+
|
190
|
+
def supports_direct_upload(self) -> bool:
|
191
|
+
"""
|
192
|
+
Check if this backend supports direct uploads
|
193
|
+
|
194
|
+
Returns:
|
195
|
+
bool: True if direct uploads are supported
|
196
|
+
"""
|
197
|
+
return True
|
198
|
+
|
199
|
+
def generate_upload_url(self, file_path: str, content_type: str,
|
200
|
+
file_size: Optional[int] = None,
|
180
201
|
expires_in: int = 3600) -> Dict[str, Any]:
|
181
202
|
"""Generate a pre-signed URL for direct upload to S3"""
|
182
203
|
try:
|
183
204
|
# Conditions for the upload
|
184
205
|
conditions = []
|
185
|
-
|
206
|
+
|
186
207
|
# Content type condition
|
187
208
|
if content_type:
|
188
209
|
conditions.append({"Content-Type": content_type})
|
189
|
-
|
210
|
+
|
190
211
|
# File size conditions
|
191
212
|
if file_size:
|
192
213
|
# Allow some variance in file size (±1KB)
|
193
214
|
conditions.append(["content-length-range", max(0, file_size - 1024), file_size + 1024])
|
194
|
-
|
215
|
+
|
195
216
|
# Server-side encryption conditions
|
196
217
|
if self.server_side_encryption:
|
197
218
|
conditions.append({"x-amz-server-side-encryption": self.server_side_encryption})
|
198
219
|
if self.kms_key_id:
|
199
220
|
conditions.append({"x-amz-server-side-encryption-aws-kms-key-id": self.kms_key_id})
|
200
|
-
|
221
|
+
else:
|
222
|
+
params = dict(Bucket=self.bucket_name, Key=file_path, ContentType=content_type)
|
223
|
+
return self.client.generate_presigned_url(
|
224
|
+
'put_object',
|
225
|
+
ExpiresIn=expires_in,
|
226
|
+
Params=params)
|
201
227
|
# Generate the presigned POST
|
202
228
|
response = self.client.generate_presigned_post(
|
203
229
|
Bucket=self.bucket_name,
|
@@ -208,13 +234,13 @@ class S3StorageBackend(StorageBackend):
|
|
208
234
|
Conditions=conditions,
|
209
235
|
ExpiresIn=expires_in
|
210
236
|
)
|
211
|
-
|
237
|
+
|
212
238
|
# Add server-side encryption fields if configured
|
213
239
|
if self.server_side_encryption:
|
214
240
|
response['fields']['x-amz-server-side-encryption'] = self.server_side_encryption
|
215
241
|
if self.kms_key_id:
|
216
242
|
response['fields']['x-amz-server-side-encryption-aws-kms-key-id'] = self.kms_key_id
|
217
|
-
|
243
|
+
|
218
244
|
return {
|
219
245
|
'upload_url': response['url'],
|
220
246
|
'method': 'POST',
|
@@ -223,10 +249,10 @@ class S3StorageBackend(StorageBackend):
|
|
223
249
|
'Content-Type': content_type
|
224
250
|
}
|
225
251
|
}
|
226
|
-
|
252
|
+
|
227
253
|
except ClientError as e:
|
228
254
|
raise Exception(f"Failed to generate upload URL: {e}")
|
229
|
-
|
255
|
+
|
230
256
|
def get_file_checksum(self, file_path: str, algorithm: str = 'md5') -> Optional[str]:
|
231
257
|
"""Get file checksum from S3 metadata or calculate it"""
|
232
258
|
try:
|
@@ -235,52 +261,52 @@ class S3StorageBackend(StorageBackend):
|
|
235
261
|
Bucket=self.bucket_name,
|
236
262
|
Key=file_path
|
237
263
|
)
|
238
|
-
|
264
|
+
|
239
265
|
if algorithm.lower() == 'md5':
|
240
266
|
etag = response.get('ETag', '').strip('"')
|
241
267
|
# ETag is MD5 only for non-multipart uploads (no hyphens)
|
242
268
|
if etag and '-' not in etag:
|
243
269
|
return etag
|
244
|
-
|
270
|
+
|
245
271
|
# If ETag is not usable, download and calculate checksum
|
246
272
|
return super().get_file_checksum(file_path, algorithm)
|
247
|
-
|
273
|
+
|
248
274
|
except ClientError:
|
249
275
|
return None
|
250
|
-
|
276
|
+
|
251
277
|
def open(self, file_path: str, mode: str = 'rb'):
|
252
278
|
"""Open a file from S3"""
|
253
279
|
if 'w' in mode or 'a' in mode:
|
254
280
|
raise ValueError("S3 backend only supports read-only file access")
|
255
|
-
|
281
|
+
|
256
282
|
try:
|
257
283
|
obj = self.resource.Object(self.bucket_name, file_path)
|
258
284
|
return obj.get()['Body']
|
259
285
|
except ClientError as e:
|
260
286
|
raise FileNotFoundError(f"File not found in S3: {e}")
|
261
|
-
|
287
|
+
|
262
288
|
def list_files(self, path_prefix: str = "", limit: int = 1000) -> List[str]:
|
263
289
|
"""List files in S3 with optional path prefix"""
|
264
290
|
try:
|
265
291
|
paginator = self.client.get_paginator('list_objects_v2')
|
266
|
-
|
292
|
+
|
267
293
|
page_iterator = paginator.paginate(
|
268
294
|
Bucket=self.bucket_name,
|
269
295
|
Prefix=path_prefix,
|
270
296
|
PaginationConfig={'MaxItems': limit}
|
271
297
|
)
|
272
|
-
|
298
|
+
|
273
299
|
files = []
|
274
300
|
for page in page_iterator:
|
275
301
|
if 'Contents' in page:
|
276
302
|
for obj in page['Contents']:
|
277
303
|
files.append(obj['Key'])
|
278
|
-
|
304
|
+
|
279
305
|
return files
|
280
|
-
|
306
|
+
|
281
307
|
except ClientError:
|
282
308
|
return []
|
283
|
-
|
309
|
+
|
284
310
|
def copy_file(self, source_path: str, dest_path: str) -> bool:
|
285
311
|
"""Copy a file within S3"""
|
286
312
|
try:
|
@@ -288,24 +314,24 @@ class S3StorageBackend(StorageBackend):
|
|
288
314
|
'Bucket': self.bucket_name,
|
289
315
|
'Key': source_path
|
290
316
|
}
|
291
|
-
|
317
|
+
|
292
318
|
self.client.copy_object(
|
293
319
|
CopySource=copy_source,
|
294
320
|
Bucket=self.bucket_name,
|
295
321
|
Key=dest_path
|
296
322
|
)
|
297
|
-
|
323
|
+
|
298
324
|
return True
|
299
|
-
|
325
|
+
|
300
326
|
except ClientError:
|
301
327
|
return False
|
302
|
-
|
328
|
+
|
303
329
|
def move_file(self, source_path: str, dest_path: str) -> bool:
|
304
330
|
"""Move a file within S3"""
|
305
331
|
if self.copy_file(source_path, dest_path):
|
306
332
|
return self.delete(source_path)
|
307
333
|
return False
|
308
|
-
|
334
|
+
|
309
335
|
def get_file_metadata(self, file_path: str) -> Dict[str, Any]:
|
310
336
|
"""Get comprehensive metadata for a file in S3"""
|
311
337
|
try:
|
@@ -313,7 +339,7 @@ class S3StorageBackend(StorageBackend):
|
|
313
339
|
Bucket=self.bucket_name,
|
314
340
|
Key=file_path
|
315
341
|
)
|
316
|
-
|
342
|
+
|
317
343
|
metadata = {
|
318
344
|
'exists': True,
|
319
345
|
'size': response.get('ContentLength'),
|
@@ -326,22 +352,22 @@ class S3StorageBackend(StorageBackend):
|
|
326
352
|
'server_side_encryption': response.get('ServerSideEncryption'),
|
327
353
|
'version_id': response.get('VersionId')
|
328
354
|
}
|
329
|
-
|
355
|
+
|
330
356
|
return metadata
|
331
|
-
|
357
|
+
|
332
358
|
except ClientError:
|
333
359
|
return {'exists': False, 'path': file_path}
|
334
|
-
|
360
|
+
|
335
361
|
def cleanup_expired_uploads(self, before_date: Optional[datetime] = None):
|
336
362
|
"""Clean up incomplete multipart uploads"""
|
337
363
|
if before_date is None:
|
338
364
|
before_date = datetime.now() - timedelta(days=1)
|
339
|
-
|
365
|
+
|
340
366
|
try:
|
341
367
|
paginator = self.client.get_paginator('list_multipart_uploads')
|
342
|
-
|
368
|
+
|
343
369
|
page_iterator = paginator.paginate(Bucket=self.bucket_name)
|
344
|
-
|
370
|
+
|
345
371
|
for page in page_iterator:
|
346
372
|
if 'Uploads' in page:
|
347
373
|
for upload in page['Uploads']:
|
@@ -351,35 +377,37 @@ class S3StorageBackend(StorageBackend):
|
|
351
377
|
Key=upload['Key'],
|
352
378
|
UploadId=upload['UploadId']
|
353
379
|
)
|
354
|
-
|
380
|
+
|
355
381
|
except ClientError:
|
356
382
|
pass # Silently ignore cleanup errors
|
357
|
-
|
383
|
+
|
358
384
|
def get_available_space(self) -> Optional[int]:
|
359
385
|
"""S3 has virtually unlimited space"""
|
360
386
|
return None
|
361
|
-
|
387
|
+
|
362
388
|
def generate_file_path(self, filename: str, group_id: Optional[int] = None) -> str:
|
363
389
|
"""Generate an S3 key for the file"""
|
364
390
|
# Use the base implementation but ensure S3-compatible paths
|
365
391
|
path = super().generate_file_path(filename, group_id)
|
366
|
-
|
392
|
+
|
367
393
|
# Ensure no leading slash for S3 keys
|
368
394
|
return path.lstrip('/')
|
369
|
-
|
395
|
+
|
396
|
+
|
397
|
+
|
370
398
|
def validate_configuration(self) -> Tuple[bool, List[str]]:
|
371
399
|
"""Validate S3 configuration"""
|
372
400
|
errors = []
|
373
|
-
|
401
|
+
|
374
402
|
if not self.bucket_name:
|
375
403
|
errors.append("S3 bucket name is required")
|
376
|
-
|
404
|
+
|
377
405
|
if not self.access_key_id:
|
378
406
|
errors.append("AWS access key ID is required")
|
379
|
-
|
407
|
+
|
380
408
|
if not self.secret_access_key:
|
381
409
|
errors.append("AWS secret access key is required")
|
382
|
-
|
410
|
+
|
383
411
|
# Test connection if configuration looks valid
|
384
412
|
if not errors:
|
385
413
|
try:
|
@@ -394,5 +422,278 @@ class S3StorageBackend(StorageBackend):
|
|
394
422
|
errors.append(f"Access denied to S3 bucket '{self.bucket_name}'")
|
395
423
|
else:
|
396
424
|
errors.append(f"S3 connection error: {e}")
|
397
|
-
|
398
|
-
return len(errors) == 0, errors
|
425
|
+
|
426
|
+
return len(errors) == 0, errors
|
427
|
+
|
428
|
+
def test_connection(self):
|
429
|
+
try:
|
430
|
+
self.client.head_bucket(Bucket=self.bucket_name)
|
431
|
+
return True
|
432
|
+
except NoCredentialsError:
|
433
|
+
raise ValueError("Invalid AWS credentials")
|
434
|
+
except ClientError as e:
|
435
|
+
error_code = e.response['Error']['Code']
|
436
|
+
if error_code == '404':
|
437
|
+
raise ValueError(f"S3 bucket '{self.bucket_name}' does not exist")
|
438
|
+
elif error_code == '403':
|
439
|
+
raise ValueError(f"Access denied to S3 bucket '{self.bucket_name}'")
|
440
|
+
else:
|
441
|
+
raise ValueError(f"S3 connection error: {e}")
|
442
|
+
|
443
|
+
def make_path_public(self):
|
444
|
+
# Get the current bucket policy (if any)
|
445
|
+
try:
|
446
|
+
current_policy = json.loads(self.client.get_bucket_policy(Bucket=self.bucket_name)["Policy"])
|
447
|
+
statements = current_policy.get("Statement", [])
|
448
|
+
except self.client.exceptions.from_code('NoSuchBucketPolicy'):
|
449
|
+
current_policy = {"Version": "2012-10-17", "Statement": []}
|
450
|
+
statements = []
|
451
|
+
|
452
|
+
# Check if our public-read rule for the prefix already exists
|
453
|
+
public_read_sid = f"AllowPublicReadForPrefix_{self.folder_path.replace('/', '_')}"
|
454
|
+
already_exists = any(stmt.get("Sid") == public_read_sid for stmt in statements)
|
455
|
+
|
456
|
+
if already_exists:
|
457
|
+
return
|
458
|
+
|
459
|
+
# Construct the public read statement for the given prefix
|
460
|
+
new_statement = {
|
461
|
+
"Sid": public_read_sid,
|
462
|
+
"Effect": "Allow",
|
463
|
+
"Principal": "*",
|
464
|
+
"Action": "s3:GetObject",
|
465
|
+
"Resource": f"arn:aws:s3:::{self.bucket_name}/{self.folder_path}*"
|
466
|
+
}
|
467
|
+
|
468
|
+
# Add and apply the new policy
|
469
|
+
current_policy["Statement"].append(new_statement)
|
470
|
+
self.client.put_bucket_policy(
|
471
|
+
Bucket=self.bucket_name,
|
472
|
+
Policy=json.dumps(current_policy))
|
473
|
+
|
474
|
+
def make_path_private(self):
|
475
|
+
# Get the current bucket policy (if any)
|
476
|
+
try:
|
477
|
+
current_policy = json.loads(self.client.get_bucket_policy(Bucket=self.bucket_name)["Policy"])
|
478
|
+
statements = current_policy.get("Statement", [])
|
479
|
+
except self.client.exceptions.from_code('NoSuchBucketPolicy'):
|
480
|
+
current_policy = {"Version": "2012-10-17", "Statement": []}
|
481
|
+
statements = []
|
482
|
+
|
483
|
+
# Check if our public-read rule for the prefix exists
|
484
|
+
public_read_sid = f"AllowPublicReadForPrefix_{self.folder_path.replace('/', '_')}"
|
485
|
+
exists = any(stmt.get("Sid") == public_read_sid for stmt in statements)
|
486
|
+
|
487
|
+
if not exists:
|
488
|
+
return
|
489
|
+
|
490
|
+
# Remove the public read statement for the given prefix
|
491
|
+
statements = [stmt for stmt in statements if stmt.get("Sid") != public_read_sid]
|
492
|
+
|
493
|
+
# Apply the updated policy
|
494
|
+
current_policy["Statement"] = statements
|
495
|
+
self.client.put_bucket_policy(
|
496
|
+
Bucket=self.bucket_name,
|
497
|
+
Policy=json.dumps(current_policy))
|
498
|
+
|
499
|
+
def download(self, file_path: str, local_path: str) -> None:
|
500
|
+
"""Download a file from S3 to a local path"""
|
501
|
+
try:
|
502
|
+
with open(local_path, 'wb') as local_file:
|
503
|
+
self.client.download_fileobj(self.bucket_name, file_path, local_file)
|
504
|
+
except ClientError as e:
|
505
|
+
raise Exception(f"Failed to download file from S3: {e}")
|
506
|
+
|
507
|
+
# -------------------------------
|
508
|
+
# CORS MANAGEMENT FOR DIRECT UPLOADS
|
509
|
+
# -------------------------------
|
510
|
+
def get_cors_configuration(self) -> Optional[Dict[str, Any]]:
|
511
|
+
"""
|
512
|
+
Return the current CORS configuration for the bucket, or None if not set.
|
513
|
+
"""
|
514
|
+
try:
|
515
|
+
resp = self.client.get_bucket_cors(Bucket=self.bucket_name)
|
516
|
+
return resp
|
517
|
+
except ClientError as e:
|
518
|
+
if e.response.get("Error", {}).get("Code") == "NoSuchCORSConfiguration":
|
519
|
+
return None
|
520
|
+
raise
|
521
|
+
|
522
|
+
def _default_direct_upload_cors_rule(
|
523
|
+
self,
|
524
|
+
allowed_origins: List[str],
|
525
|
+
allowed_methods: Optional[List[str]] = None,
|
526
|
+
allowed_headers: Optional[List[str]] = None,
|
527
|
+
expose_headers: Optional[List[str]] = None,
|
528
|
+
max_age_seconds: int = 3000,
|
529
|
+
) -> Dict[str, Any]:
|
530
|
+
"""
|
531
|
+
Build a single CORS rule suitable for direct uploads via pre-signed PUT/POST.
|
532
|
+
Note: S3 CORS applies at the bucket level, not per-prefix. Access is still
|
533
|
+
enforced by IAM policies and the fact that we use pre-signed URLs.
|
534
|
+
"""
|
535
|
+
if not allowed_origins or not any(str(o).strip() for o in allowed_origins):
|
536
|
+
raise ValueError("allowed_origins must contain at least one origin")
|
537
|
+
methods = allowed_methods or ["PUT", "HEAD"]
|
538
|
+
headers = allowed_headers or ["*"] # simplest and safest for signed uploads
|
539
|
+
expose = expose_headers or ["ETag", "x-amz-request-id", "x-amz-id-2", "x-amz-version-id"]
|
540
|
+
|
541
|
+
return {
|
542
|
+
"CORSRules": [
|
543
|
+
{
|
544
|
+
"AllowedOrigins": allowed_origins,
|
545
|
+
"AllowedMethods": methods,
|
546
|
+
"AllowedHeaders": headers,
|
547
|
+
"ExposeHeaders": expose,
|
548
|
+
"MaxAgeSeconds": max_age_seconds,
|
549
|
+
}
|
550
|
+
]
|
551
|
+
}
|
552
|
+
|
553
|
+
def check_cors_configuration_for_direct_upload(
|
554
|
+
self,
|
555
|
+
allowed_origins: List[str],
|
556
|
+
required_methods: Optional[List[str]] = None,
|
557
|
+
required_headers: Optional[List[str]] = None,
|
558
|
+
) -> Tuple[bool, List[str], Optional[Dict[str, Any]]]:
|
559
|
+
"""
|
560
|
+
Validate current CORS config can support direct uploads from the given origins.
|
561
|
+
|
562
|
+
Returns:
|
563
|
+
(ok, issues, current_config)
|
564
|
+
"""
|
565
|
+
if not allowed_origins or not any(str(o).strip() for o in allowed_origins):
|
566
|
+
raise ValueError("allowed_origins must contain at least one origin")
|
567
|
+
issues: List[str] = []
|
568
|
+
config = self.get_cors_configuration()
|
569
|
+
if config is None:
|
570
|
+
return False, ["No CORS configuration set on this bucket."], None
|
571
|
+
|
572
|
+
if required_methods is None:
|
573
|
+
required_methods = ["POST", "HEAD"] if self.server_side_encryption else ["PUT", "HEAD"]
|
574
|
+
required_methods = [m.upper() for m in required_methods]
|
575
|
+
# For PUT we often need Content-Type. For POST, headers are not required (fields are in the form).
|
576
|
+
# Using "*" for AllowedHeaders is the simplest and reduces edge cases.
|
577
|
+
if required_headers is None:
|
578
|
+
required_headers = [] if self.server_side_encryption else ["content-type"]
|
579
|
+
required_headers = [h.lower() for h in required_headers]
|
580
|
+
|
581
|
+
# Flatten rules
|
582
|
+
cors_rules: List[Dict[str, Any]] = config.get("CORSRules", [])
|
583
|
+
|
584
|
+
def origin_is_covered(origin: str) -> bool:
|
585
|
+
for rule in cors_rules:
|
586
|
+
origins = rule.get("AllowedOrigins", [])
|
587
|
+
if "*" in origins or origin in origins:
|
588
|
+
# methods
|
589
|
+
methods = [m.upper() for m in rule.get("AllowedMethods", [])]
|
590
|
+
if not all(m in methods for m in required_methods):
|
591
|
+
continue
|
592
|
+
# headers
|
593
|
+
hdrs = [h.lower() for h in rule.get("AllowedHeaders", [])]
|
594
|
+
if "*" in hdrs:
|
595
|
+
return True
|
596
|
+
if not all(h in hdrs for h in required_headers):
|
597
|
+
continue
|
598
|
+
return True
|
599
|
+
return False
|
600
|
+
|
601
|
+
for origin in allowed_origins:
|
602
|
+
if not origin_is_covered(origin):
|
603
|
+
issues.append(f"Origin not covered for direct upload: {origin}")
|
604
|
+
|
605
|
+
return (len(issues) == 0), issues, config
|
606
|
+
|
607
|
+
def update_cors_configuration_for_direct_upload(
|
608
|
+
self,
|
609
|
+
allowed_origins: List[str],
|
610
|
+
allowed_methods: Optional[List[str]] = None,
|
611
|
+
allowed_headers: Optional[List[str]] = None,
|
612
|
+
expose_headers: Optional[List[str]] = None,
|
613
|
+
max_age_seconds: int = 3000,
|
614
|
+
merge: bool = True,
|
615
|
+
) -> Dict[str, Any]:
|
616
|
+
"""
|
617
|
+
Ensure CORS allows direct uploads from allowed_origins.
|
618
|
+
If merge=True, append our rule to any existing rules instead of replacing.
|
619
|
+
"""
|
620
|
+
# Validate input
|
621
|
+
if not allowed_origins or not any(str(o).strip() for o in allowed_origins):
|
622
|
+
raise ValueError("allowed_origins must contain at least one origin")
|
623
|
+
# If current config already satisfies requirements, do nothing
|
624
|
+
ok, issues, current = self.check_cors_configuration_for_direct_upload(
|
625
|
+
allowed_origins=allowed_origins,
|
626
|
+
required_methods=allowed_methods,
|
627
|
+
required_headers=allowed_headers,
|
628
|
+
)
|
629
|
+
if ok:
|
630
|
+
return {
|
631
|
+
"changed": False,
|
632
|
+
"message": "Existing CORS configuration already supports direct uploads.",
|
633
|
+
"issues": [],
|
634
|
+
"applied_configuration": current,
|
635
|
+
}
|
636
|
+
|
637
|
+
new_rule_config = self._default_direct_upload_cors_rule(
|
638
|
+
allowed_origins=allowed_origins,
|
639
|
+
allowed_methods=allowed_methods or (["POST", "HEAD"] if self.server_side_encryption else ["PUT", "HEAD"]),
|
640
|
+
allowed_headers=allowed_headers or ["*"],
|
641
|
+
expose_headers=expose_headers,
|
642
|
+
max_age_seconds=max_age_seconds,
|
643
|
+
)
|
644
|
+
|
645
|
+
if merge and current:
|
646
|
+
merged = dict(CORSRules=[*current.get("CORSRules", []), *new_rule_config["CORSRules"]])
|
647
|
+
self.client.put_bucket_cors(Bucket=self.bucket_name, CORSConfiguration=merged)
|
648
|
+
applied = merged
|
649
|
+
else:
|
650
|
+
# Replace entirely with our single rule
|
651
|
+
self.client.put_bucket_cors(Bucket=self.bucket_name, CORSConfiguration=new_rule_config)
|
652
|
+
applied = new_rule_config
|
653
|
+
|
654
|
+
return {
|
655
|
+
"changed": True,
|
656
|
+
"message": "CORS configuration updated to support direct uploads.",
|
657
|
+
"issues": issues,
|
658
|
+
"applied_configuration": applied,
|
659
|
+
}
|
660
|
+
|
661
|
+
def ensure_cors_for_direct_upload(
|
662
|
+
self,
|
663
|
+
allowed_origins: List[str],
|
664
|
+
allowed_methods: Optional[List[str]] = None,
|
665
|
+
allowed_headers: Optional[List[str]] = None,
|
666
|
+
expose_headers: Optional[List[str]] = None,
|
667
|
+
max_age_seconds: int = 3000,
|
668
|
+
merge: bool = True,
|
669
|
+
) -> Dict[str, Any]:
|
670
|
+
"""
|
671
|
+
Convenience wrapper that checks and updates CORS as needed.
|
672
|
+
|
673
|
+
Example:
|
674
|
+
backend.ensure_cors_for_direct_upload(
|
675
|
+
["http://localhost:3000", "https://your-prod-domain.com"]
|
676
|
+
)
|
677
|
+
"""
|
678
|
+
if not allowed_origins or not any(str(o).strip() for o in allowed_origins):
|
679
|
+
raise ValueError("allowed_origins must contain at least one origin")
|
680
|
+
result = self.update_cors_configuration_for_direct_upload(
|
681
|
+
allowed_origins=allowed_origins,
|
682
|
+
allowed_methods=allowed_methods,
|
683
|
+
allowed_headers=allowed_headers,
|
684
|
+
expose_headers=expose_headers,
|
685
|
+
max_age_seconds=max_age_seconds,
|
686
|
+
merge=merge,
|
687
|
+
)
|
688
|
+
# Re-check to confirm
|
689
|
+
ok, issues, current = self.check_cors_configuration_for_direct_upload(
|
690
|
+
allowed_origins=allowed_origins,
|
691
|
+
required_methods=allowed_methods,
|
692
|
+
required_headers=allowed_headers,
|
693
|
+
)
|
694
|
+
result.update({
|
695
|
+
"verified": ok,
|
696
|
+
"post_update_issues": issues,
|
697
|
+
"current_configuration": current,
|
698
|
+
})
|
699
|
+
return result
|