django-nativemojo 0.1.15__py3-none-any.whl → 0.1.17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {django_nativemojo-0.1.15.dist-info → django_nativemojo-0.1.17.dist-info}/METADATA +3 -2
- django_nativemojo-0.1.17.dist-info/RECORD +302 -0
- mojo/__init__.py +1 -1
- mojo/apps/account/management/commands/serializer_admin.py +121 -1
- mojo/apps/account/migrations/0006_add_device_tracking_models.py +72 -0
- mojo/apps/account/migrations/0007_delete_userdevicelocation.py +16 -0
- mojo/apps/account/migrations/0008_userdevicelocation.py +33 -0
- mojo/apps/account/migrations/0009_geolocatedip_subnet.py +18 -0
- mojo/apps/account/migrations/0010_group_avatar.py +20 -0
- mojo/apps/account/migrations/0011_user_org_registereddevice_pushconfig_and_more.py +118 -0
- mojo/apps/account/migrations/0012_remove_pushconfig_apns_key_file_and_more.py +21 -0
- mojo/apps/account/migrations/0013_pushconfig_test_mode_alter_pushconfig_apns_enabled_and_more.py +28 -0
- mojo/apps/account/migrations/0014_notificationdelivery_data_payload_and_more.py +48 -0
- mojo/apps/account/models/__init__.py +2 -0
- mojo/apps/account/models/device.py +279 -0
- mojo/apps/account/models/group.py +294 -8
- mojo/apps/account/models/member.py +14 -1
- mojo/apps/account/models/push/__init__.py +4 -0
- mojo/apps/account/models/push/config.py +112 -0
- mojo/apps/account/models/push/delivery.py +93 -0
- mojo/apps/account/models/push/device.py +66 -0
- mojo/apps/account/models/push/template.py +99 -0
- mojo/apps/account/models/user.py +190 -17
- mojo/apps/account/rest/__init__.py +2 -0
- mojo/apps/account/rest/device.py +39 -0
- mojo/apps/account/rest/group.py +8 -0
- mojo/apps/account/rest/push.py +187 -0
- mojo/apps/account/rest/user.py +95 -5
- mojo/apps/account/services/__init__.py +1 -0
- mojo/apps/account/services/push.py +363 -0
- mojo/apps/aws/migrations/0001_initial.py +206 -0
- mojo/apps/aws/migrations/0002_emaildomain_can_recv_emaildomain_can_send_and_more.py +28 -0
- mojo/apps/aws/migrations/0003_mailbox_is_domain_default_mailbox_is_system_default_and_more.py +31 -0
- mojo/apps/aws/migrations/0004_s3bucket.py +39 -0
- mojo/apps/aws/migrations/0005_alter_emaildomain_region_delete_s3bucket.py +21 -0
- mojo/apps/aws/models/__init__.py +19 -0
- mojo/apps/aws/models/email_attachment.py +99 -0
- mojo/apps/aws/models/email_domain.py +218 -0
- mojo/apps/aws/models/email_template.py +132 -0
- mojo/apps/aws/models/incoming_email.py +197 -0
- mojo/apps/aws/models/mailbox.py +288 -0
- mojo/apps/aws/models/sent_message.py +175 -0
- mojo/apps/aws/rest/__init__.py +6 -0
- mojo/apps/aws/rest/email.py +33 -0
- mojo/apps/aws/rest/email_ops.py +183 -0
- mojo/apps/aws/rest/messages.py +32 -0
- mojo/apps/aws/rest/send.py +101 -0
- mojo/apps/aws/rest/sns.py +403 -0
- mojo/apps/aws/rest/templates.py +19 -0
- mojo/apps/aws/services/__init__.py +32 -0
- mojo/apps/aws/services/email.py +390 -0
- mojo/apps/aws/services/email_ops.py +548 -0
- mojo/apps/docit/__init__.py +6 -0
- mojo/apps/docit/markdown_plugins/syntax_highlight.py +25 -0
- mojo/apps/docit/markdown_plugins/toc.py +12 -0
- mojo/apps/docit/migrations/0001_initial.py +113 -0
- mojo/apps/docit/migrations/0002_alter_book_modified_by_alter_page_modified_by.py +26 -0
- mojo/apps/docit/migrations/0003_alter_book_group.py +20 -0
- mojo/apps/docit/models/__init__.py +17 -0
- mojo/apps/docit/models/asset.py +231 -0
- mojo/apps/docit/models/book.py +227 -0
- mojo/apps/docit/models/page.py +319 -0
- mojo/apps/docit/models/page_revision.py +203 -0
- mojo/apps/docit/rest/__init__.py +10 -0
- mojo/apps/docit/rest/asset.py +17 -0
- mojo/apps/docit/rest/book.py +22 -0
- mojo/apps/docit/rest/page.py +22 -0
- mojo/apps/docit/rest/page_revision.py +17 -0
- mojo/apps/docit/services/__init__.py +11 -0
- mojo/apps/docit/services/docit.py +315 -0
- mojo/apps/docit/services/markdown.py +44 -0
- mojo/apps/fileman/backends/s3.py +209 -0
- mojo/apps/fileman/models/file.py +45 -9
- mojo/apps/fileman/models/manager.py +269 -3
- mojo/apps/incident/migrations/0007_event_uid.py +18 -0
- mojo/apps/incident/migrations/0008_ticket_ticketnote.py +55 -0
- mojo/apps/incident/migrations/0009_incident_status.py +18 -0
- mojo/apps/incident/migrations/0010_event_country_code.py +18 -0
- mojo/apps/incident/migrations/0011_incident_country_code.py +18 -0
- mojo/apps/incident/migrations/0012_alter_incident_status.py +18 -0
- mojo/apps/incident/models/__init__.py +1 -0
- mojo/apps/incident/models/event.py +35 -0
- mojo/apps/incident/models/incident.py +2 -0
- mojo/apps/incident/models/ticket.py +62 -0
- mojo/apps/incident/reporter.py +21 -3
- mojo/apps/incident/rest/__init__.py +1 -0
- mojo/apps/incident/rest/ticket.py +43 -0
- mojo/apps/jobs/__init__.py +489 -0
- mojo/apps/jobs/adapters.py +24 -0
- mojo/apps/jobs/cli.py +616 -0
- mojo/apps/jobs/daemon.py +370 -0
- mojo/apps/jobs/examples/sample_jobs.py +376 -0
- mojo/apps/jobs/examples/webhook_examples.py +203 -0
- mojo/apps/jobs/handlers/__init__.py +5 -0
- mojo/apps/jobs/handlers/webhook.py +317 -0
- mojo/apps/jobs/job_engine.py +734 -0
- mojo/apps/jobs/keys.py +203 -0
- mojo/apps/jobs/local_queue.py +363 -0
- mojo/apps/jobs/management/__init__.py +3 -0
- mojo/apps/jobs/management/commands/__init__.py +3 -0
- mojo/apps/jobs/manager.py +1327 -0
- mojo/apps/jobs/migrations/0001_initial.py +97 -0
- mojo/apps/jobs/migrations/0002_alter_job_max_retries_joblog.py +39 -0
- mojo/apps/jobs/models/__init__.py +6 -0
- mojo/apps/jobs/models/job.py +441 -0
- mojo/apps/jobs/rest/__init__.py +2 -0
- mojo/apps/jobs/rest/control.py +466 -0
- mojo/apps/jobs/rest/jobs.py +421 -0
- mojo/apps/jobs/scheduler.py +571 -0
- mojo/apps/jobs/services/__init__.py +6 -0
- mojo/apps/jobs/services/job_actions.py +465 -0
- mojo/apps/jobs/settings.py +209 -0
- mojo/apps/logit/models/log.py +3 -0
- mojo/apps/metrics/__init__.py +8 -1
- mojo/apps/metrics/redis_metrics.py +198 -0
- mojo/apps/metrics/rest/__init__.py +3 -0
- mojo/apps/metrics/rest/categories.py +266 -0
- mojo/apps/metrics/rest/helpers.py +48 -0
- mojo/apps/metrics/rest/permissions.py +99 -0
- mojo/apps/metrics/rest/values.py +277 -0
- mojo/apps/metrics/utils.py +17 -0
- mojo/decorators/http.py +40 -1
- mojo/helpers/aws/__init__.py +11 -7
- mojo/helpers/aws/inbound_email.py +309 -0
- mojo/helpers/aws/kms.py +413 -0
- mojo/helpers/aws/ses_domain.py +959 -0
- mojo/helpers/crypto/__init__.py +1 -1
- mojo/helpers/crypto/utils.py +15 -0
- mojo/helpers/location/__init__.py +2 -0
- mojo/helpers/location/countries.py +262 -0
- mojo/helpers/location/geolocation.py +196 -0
- mojo/helpers/logit.py +37 -0
- mojo/helpers/redis/__init__.py +2 -0
- mojo/helpers/redis/adapter.py +606 -0
- mojo/helpers/redis/client.py +48 -0
- mojo/helpers/redis/pool.py +225 -0
- mojo/helpers/request.py +8 -0
- mojo/helpers/response.py +8 -0
- mojo/middleware/auth.py +1 -1
- mojo/middleware/cors.py +40 -0
- mojo/middleware/logging.py +131 -12
- mojo/middleware/mojo.py +5 -0
- mojo/models/rest.py +271 -57
- mojo/models/secrets.py +86 -0
- mojo/serializers/__init__.py +16 -10
- mojo/serializers/core/__init__.py +90 -0
- mojo/serializers/core/cache/__init__.py +121 -0
- mojo/serializers/core/cache/backends.py +518 -0
- mojo/serializers/core/cache/base.py +102 -0
- mojo/serializers/core/cache/disabled.py +181 -0
- mojo/serializers/core/cache/memory.py +287 -0
- mojo/serializers/core/cache/redis.py +533 -0
- mojo/serializers/core/cache/utils.py +454 -0
- mojo/serializers/{manager.py → core/manager.py} +53 -4
- mojo/serializers/core/serializer.py +475 -0
- mojo/serializers/{advanced/formats → formats}/csv.py +116 -139
- mojo/serializers/suggested_improvements.md +388 -0
- testit/client.py +1 -1
- testit/helpers.py +14 -0
- testit/runner.py +23 -6
- django_nativemojo-0.1.15.dist-info/RECORD +0 -234
- mojo/apps/notify/README.md +0 -91
- mojo/apps/notify/README_NOTIFICATIONS.md +0 -566
- mojo/apps/notify/admin.py +0 -52
- mojo/apps/notify/handlers/example_handlers.py +0 -516
- mojo/apps/notify/handlers/ses/__init__.py +0 -25
- mojo/apps/notify/handlers/ses/complaint.py +0 -25
- mojo/apps/notify/handlers/ses/message.py +0 -86
- mojo/apps/notify/management/commands/__init__.py +0 -1
- mojo/apps/notify/management/commands/process_notifications.py +0 -370
- mojo/apps/notify/mod +0 -0
- mojo/apps/notify/models/__init__.py +0 -12
- mojo/apps/notify/models/account.py +0 -128
- mojo/apps/notify/models/attachment.py +0 -24
- mojo/apps/notify/models/bounce.py +0 -68
- mojo/apps/notify/models/complaint.py +0 -40
- mojo/apps/notify/models/inbox.py +0 -113
- mojo/apps/notify/models/inbox_message.py +0 -173
- mojo/apps/notify/models/outbox.py +0 -129
- mojo/apps/notify/models/outbox_message.py +0 -288
- mojo/apps/notify/models/template.py +0 -30
- mojo/apps/notify/providers/aws.py +0 -73
- mojo/apps/notify/rest/ses.py +0 -0
- mojo/apps/notify/utils/__init__.py +0 -2
- mojo/apps/notify/utils/notifications.py +0 -404
- mojo/apps/notify/utils/parsing.py +0 -202
- mojo/apps/notify/utils/render.py +0 -144
- mojo/apps/tasks/README.md +0 -118
- mojo/apps/tasks/__init__.py +0 -44
- mojo/apps/tasks/manager.py +0 -644
- mojo/apps/tasks/rest/__init__.py +0 -2
- mojo/apps/tasks/rest/hooks.py +0 -0
- mojo/apps/tasks/rest/tasks.py +0 -76
- mojo/apps/tasks/runner.py +0 -439
- mojo/apps/tasks/task.py +0 -99
- mojo/apps/tasks/tq_handlers.py +0 -132
- mojo/helpers/crypto/__pycache__/hash.cpython-310.pyc +0 -0
- mojo/helpers/crypto/__pycache__/sign.cpython-310.pyc +0 -0
- mojo/helpers/crypto/__pycache__/utils.cpython-310.pyc +0 -0
- mojo/helpers/redis.py +0 -10
- mojo/models/meta.py +0 -262
- mojo/serializers/advanced/README.md +0 -363
- mojo/serializers/advanced/__init__.py +0 -247
- mojo/serializers/advanced/formats/__init__.py +0 -28
- mojo/serializers/advanced/formats/excel.py +0 -516
- mojo/serializers/advanced/formats/json.py +0 -239
- mojo/serializers/advanced/formats/response.py +0 -485
- mojo/serializers/advanced/serializer.py +0 -568
- mojo/serializers/optimized.py +0 -618
- {django_nativemojo-0.1.15.dist-info → django_nativemojo-0.1.17.dist-info}/LICENSE +0 -0
- {django_nativemojo-0.1.15.dist-info → django_nativemojo-0.1.17.dist-info}/NOTICE +0 -0
- {django_nativemojo-0.1.15.dist-info → django_nativemojo-0.1.17.dist-info}/WHEEL +0 -0
- /mojo/apps/{notify → aws/migrations}/__init__.py +0 -0
- /mojo/apps/{notify/handlers → docit/markdown_plugins}/__init__.py +0 -0
- /mojo/apps/{notify/management → docit/migrations}/__init__.py +0 -0
- /mojo/apps/{notify/providers → jobs/examples}/__init__.py +0 -0
- /mojo/apps/{notify/rest → jobs/migrations}/__init__.py +0 -0
- /mojo/{serializers → rest}/openapi.py +0 -0
- /mojo/serializers/{settings_example.py → examples/settings.py} +0 -0
- /mojo/{apps/notify/handlers/ses/bounce.py → serializers/formats/__init__.py} +0 -0
- /mojo/serializers/{advanced/formats → formats}/localizers.py +0 -0
@@ -0,0 +1,315 @@
|
|
1
|
+
from mojo.helpers import logit
|
2
|
+
from ..models import Book, Page, PageRevision, Asset
|
3
|
+
|
4
|
+
|
5
|
+
class DocItService:
|
6
|
+
"""
|
7
|
+
Business logic service for DocIt operations
|
8
|
+
|
9
|
+
Handles complex operations that span multiple models or contain
|
10
|
+
business logic that doesn't belong in individual model methods.
|
11
|
+
"""
|
12
|
+
|
13
|
+
@staticmethod
|
14
|
+
def create_book_with_homepage(title, description, group, user, homepage_title="Home"):
|
15
|
+
"""
|
16
|
+
Create a new book with an initial homepage
|
17
|
+
|
18
|
+
This is a common pattern where new books should have at least one page
|
19
|
+
"""
|
20
|
+
try:
|
21
|
+
# Create the book
|
22
|
+
book = Book.objects.create(
|
23
|
+
title=title,
|
24
|
+
description=description,
|
25
|
+
group=group,
|
26
|
+
user=user,
|
27
|
+
created_by=user,
|
28
|
+
modified_by=user
|
29
|
+
)
|
30
|
+
|
31
|
+
# Create the homepage
|
32
|
+
homepage = Page.objects.create(
|
33
|
+
book=book,
|
34
|
+
title=homepage_title,
|
35
|
+
content=f"# {homepage_title}\n\nWelcome to {title}.",
|
36
|
+
order_priority=1000, # High priority to appear first
|
37
|
+
user=user,
|
38
|
+
created_by=user,
|
39
|
+
modified_by=user
|
40
|
+
)
|
41
|
+
|
42
|
+
# Create initial revision
|
43
|
+
homepage.create_revision(
|
44
|
+
user=user,
|
45
|
+
change_summary="Initial page creation"
|
46
|
+
)
|
47
|
+
|
48
|
+
logit.info(f"Created new book '{title}' with homepage for user {user.username}")
|
49
|
+
|
50
|
+
return book, homepage
|
51
|
+
|
52
|
+
except Exception as e:
|
53
|
+
logit.error(f"Failed to create book '{title}': {str(e)}")
|
54
|
+
raise
|
55
|
+
|
56
|
+
@staticmethod
|
57
|
+
def move_page(page, new_parent=None, new_position=None):
|
58
|
+
"""
|
59
|
+
Move a page to a new location in the hierarchy
|
60
|
+
|
61
|
+
Handles validation and maintains data integrity
|
62
|
+
"""
|
63
|
+
try:
|
64
|
+
old_parent = page.parent
|
65
|
+
old_path = page.full_path
|
66
|
+
|
67
|
+
# Validate the move
|
68
|
+
if new_parent and new_parent.book != page.book:
|
69
|
+
raise ValueError(f"Cannot move page to a different book: from '{page.book.id}' to '{new_parent.book.id}'")
|
70
|
+
|
71
|
+
if new_parent and page._would_create_cycle(new_parent):
|
72
|
+
raise ValueError("Move would create circular reference")
|
73
|
+
|
74
|
+
# Update the page
|
75
|
+
page.parent = new_parent
|
76
|
+
|
77
|
+
if new_position is not None:
|
78
|
+
page.order_priority = new_position
|
79
|
+
|
80
|
+
page.save()
|
81
|
+
|
82
|
+
new_path = page.full_path
|
83
|
+
logit.info(f"Moved page '{page.title}' from '{old_path}' to '{new_path}'")
|
84
|
+
|
85
|
+
return page
|
86
|
+
|
87
|
+
except Exception as e:
|
88
|
+
logit.error(f"Failed to move page '{page.title}': {str(e)}")
|
89
|
+
raise
|
90
|
+
|
91
|
+
@staticmethod
|
92
|
+
def duplicate_page(page, new_title, new_parent=None, include_children=False, user=None):
|
93
|
+
"""
|
94
|
+
Create a duplicate of a page, optionally with its children
|
95
|
+
"""
|
96
|
+
try:
|
97
|
+
# Create the duplicate
|
98
|
+
duplicate = Page.objects.create(
|
99
|
+
book=page.book,
|
100
|
+
parent=new_parent or page.parent,
|
101
|
+
title=new_title,
|
102
|
+
content=page.content,
|
103
|
+
order_priority=page.order_priority,
|
104
|
+
metadata=page.metadata.copy(),
|
105
|
+
is_published=False, # Start as draft
|
106
|
+
user=page.user,
|
107
|
+
created_by=user or page.created_by,
|
108
|
+
modified_by=user or page.modified_by
|
109
|
+
)
|
110
|
+
|
111
|
+
# Create initial revision for the duplicate
|
112
|
+
duplicate.create_revision(
|
113
|
+
user=user or page.created_by,
|
114
|
+
change_summary=f"Duplicated from '{page.title}'"
|
115
|
+
)
|
116
|
+
|
117
|
+
# Duplicate children if requested
|
118
|
+
if include_children:
|
119
|
+
for child in page.get_all_children(include_unpublished=True):
|
120
|
+
DocItService.duplicate_page(
|
121
|
+
page=child,
|
122
|
+
new_title=child.title,
|
123
|
+
new_parent=duplicate,
|
124
|
+
include_children=True, # Recursive
|
125
|
+
user=user
|
126
|
+
)
|
127
|
+
|
128
|
+
logit.info(f"Duplicated page '{page.title}' as '{new_title}' (children: {include_children})")
|
129
|
+
|
130
|
+
return duplicate
|
131
|
+
|
132
|
+
except Exception as e:
|
133
|
+
logit.error(f"Failed to duplicate page '{page.title}': {str(e)}")
|
134
|
+
raise
|
135
|
+
|
136
|
+
@staticmethod
|
137
|
+
def bulk_update_page_status(pages, is_published, user):
|
138
|
+
"""
|
139
|
+
Bulk update publication status for multiple pages
|
140
|
+
"""
|
141
|
+
try:
|
142
|
+
updated_count = 0
|
143
|
+
|
144
|
+
for page in pages:
|
145
|
+
if page.is_published != is_published:
|
146
|
+
page.is_published = is_published
|
147
|
+
page.modified_by = user
|
148
|
+
page.save()
|
149
|
+
updated_count += 1
|
150
|
+
|
151
|
+
status = "published" if is_published else "unpublished"
|
152
|
+
logit.info(f"Bulk updated {updated_count} pages to {status} by {user.username}")
|
153
|
+
|
154
|
+
return updated_count
|
155
|
+
|
156
|
+
except Exception as e:
|
157
|
+
logit.error(f"Failed to bulk update page status: {str(e)}")
|
158
|
+
raise
|
159
|
+
|
160
|
+
@staticmethod
|
161
|
+
def get_book_structure(book, include_unpublished=False):
|
162
|
+
"""
|
163
|
+
Get the complete hierarchical structure of a book
|
164
|
+
|
165
|
+
Returns a nested dictionary representing the page tree
|
166
|
+
"""
|
167
|
+
def build_tree(pages, parent_id=None):
|
168
|
+
tree = []
|
169
|
+
for page in pages:
|
170
|
+
if page.parent_id == parent_id:
|
171
|
+
page_data = {
|
172
|
+
'id': page.id,
|
173
|
+
'title': page.title,
|
174
|
+
'slug': page.slug,
|
175
|
+
'is_published': page.is_published,
|
176
|
+
'order_priority': page.order_priority,
|
177
|
+
'children': build_tree(pages, page.id)
|
178
|
+
}
|
179
|
+
tree.append(page_data)
|
180
|
+
return tree
|
181
|
+
|
182
|
+
try:
|
183
|
+
queryset = book.pages.all()
|
184
|
+
if not include_unpublished:
|
185
|
+
queryset = queryset.filter(is_published=True)
|
186
|
+
|
187
|
+
pages = list(queryset.order_by('-order_priority', 'title'))
|
188
|
+
structure = build_tree(pages)
|
189
|
+
|
190
|
+
logit.debug(f"Generated structure for book '{book.title}' with {len(pages)} pages")
|
191
|
+
|
192
|
+
return structure
|
193
|
+
|
194
|
+
except Exception as e:
|
195
|
+
logit.error(f"Failed to get book structure for '{book.title}': {str(e)}")
|
196
|
+
raise
|
197
|
+
|
198
|
+
@staticmethod
|
199
|
+
def organize_assets(book, asset_ids_in_order):
|
200
|
+
"""
|
201
|
+
Reorder assets within a book based on provided ID list
|
202
|
+
"""
|
203
|
+
try:
|
204
|
+
assets = Asset.objects.filter(book=book, id__in=asset_ids_in_order)
|
205
|
+
|
206
|
+
updated_count = 0
|
207
|
+
for index, asset_id in enumerate(asset_ids_in_order):
|
208
|
+
asset = assets.filter(id=asset_id).first()
|
209
|
+
if asset:
|
210
|
+
new_priority = len(asset_ids_in_order) - index # Higher index = higher priority
|
211
|
+
if asset.order_priority != new_priority:
|
212
|
+
asset.order_priority = new_priority
|
213
|
+
asset.save()
|
214
|
+
updated_count += 1
|
215
|
+
|
216
|
+
logit.info(f"Reorganized {updated_count} assets in book '{book.title}'")
|
217
|
+
|
218
|
+
return updated_count
|
219
|
+
|
220
|
+
except Exception as e:
|
221
|
+
logit.error(f"Failed to organize assets for book '{book.title}': {str(e)}")
|
222
|
+
raise
|
223
|
+
|
224
|
+
@staticmethod
|
225
|
+
def cleanup_orphaned_revisions(max_revisions_per_page=50):
|
226
|
+
"""
|
227
|
+
Clean up old revisions across all pages to prevent database bloat
|
228
|
+
"""
|
229
|
+
try:
|
230
|
+
total_cleaned = 0
|
231
|
+
|
232
|
+
# Get all pages that have more than the max revisions
|
233
|
+
for page in Page.objects.all():
|
234
|
+
revision_count = page.revisions.count()
|
235
|
+
|
236
|
+
if revision_count > max_revisions_per_page:
|
237
|
+
cleaned = PageRevision.cleanup_old_revisions(page, max_revisions_per_page)
|
238
|
+
total_cleaned += cleaned
|
239
|
+
|
240
|
+
if total_cleaned > 0:
|
241
|
+
logit.info(f"Cleaned up {total_cleaned} old page revisions")
|
242
|
+
|
243
|
+
return total_cleaned
|
244
|
+
|
245
|
+
except Exception as e:
|
246
|
+
logit.error(f"Failed to cleanup orphaned revisions: {str(e)}")
|
247
|
+
raise
|
248
|
+
|
249
|
+
@staticmethod
|
250
|
+
def get_book_statistics(book):
|
251
|
+
"""
|
252
|
+
Get comprehensive statistics for a book
|
253
|
+
"""
|
254
|
+
try:
|
255
|
+
stats = {
|
256
|
+
'total_pages': book.get_page_count(),
|
257
|
+
'published_pages': book.pages.filter(is_published=True).count(),
|
258
|
+
'draft_pages': book.pages.filter(is_published=False).count(),
|
259
|
+
'total_assets': book.get_asset_count(),
|
260
|
+
'image_assets': book.assets.filter(file__category='image').count(),
|
261
|
+
'document_assets': book.assets.filter(file__category='document').count(),
|
262
|
+
'total_revisions': PageRevision.objects.filter(page__book=book).count(),
|
263
|
+
'root_pages': book.get_root_pages(published_only=False).count(),
|
264
|
+
'max_depth': 0
|
265
|
+
}
|
266
|
+
|
267
|
+
# Calculate maximum page depth
|
268
|
+
for page in book.pages.all():
|
269
|
+
depth = page.get_depth()
|
270
|
+
if depth > stats['max_depth']:
|
271
|
+
stats['max_depth'] = depth
|
272
|
+
|
273
|
+
logit.debug(f"Generated statistics for book '{book.title}'")
|
274
|
+
|
275
|
+
return stats
|
276
|
+
|
277
|
+
except Exception as e:
|
278
|
+
logit.error(f"Failed to get statistics for book '{book.title}': {str(e)}")
|
279
|
+
raise
|
280
|
+
|
281
|
+
@staticmethod
|
282
|
+
def validate_book_integrity(book):
|
283
|
+
"""
|
284
|
+
Validate the integrity of a book and its pages
|
285
|
+
|
286
|
+
Returns a list of issues found
|
287
|
+
"""
|
288
|
+
issues = []
|
289
|
+
|
290
|
+
try:
|
291
|
+
# Check for circular references in page hierarchy
|
292
|
+
for page in book.pages.all():
|
293
|
+
try:
|
294
|
+
_ = page.full_path # This will fail if there's a cycle
|
295
|
+
except RecursionError:
|
296
|
+
issues.append(f"Circular reference detected in page hierarchy: {page.title}")
|
297
|
+
|
298
|
+
# Check for orphaned assets (assets without files)
|
299
|
+
orphaned_assets = book.assets.filter(file__isnull=True)
|
300
|
+
if orphaned_assets.exists():
|
301
|
+
issues.append(f"Found {orphaned_assets.count()} orphaned assets without files")
|
302
|
+
|
303
|
+
# Check for pages with same slug
|
304
|
+
slugs = book.pages.values_list('slug', flat=True)
|
305
|
+
duplicate_slugs = [slug for slug in set(slugs) if slugs.count(slug) > 1]
|
306
|
+
if duplicate_slugs:
|
307
|
+
issues.append(f"Duplicate page slugs found: {duplicate_slugs}")
|
308
|
+
|
309
|
+
logit.info(f"Book integrity check for '{book.title}' found {len(issues)} issues")
|
310
|
+
|
311
|
+
return issues
|
312
|
+
|
313
|
+
except Exception as e:
|
314
|
+
logit.error(f"Failed to validate book integrity for '{book.title}': {str(e)}")
|
315
|
+
raise
|
@@ -0,0 +1,44 @@
|
|
1
|
+
import mistune
|
2
|
+
from pygments import highlight
|
3
|
+
from pygments.lexers import get_lexer_by_name
|
4
|
+
from pygments.styles import get_style_by_name
|
5
|
+
from pygments.formatters import HtmlFormatter
|
6
|
+
|
7
|
+
|
8
|
+
class HighlightRenderer(mistune.HTMLRenderer):
|
9
|
+
def block_code(self, code, info=None):
|
10
|
+
if not info:
|
11
|
+
return f'\n<pre>{mistune.escape(code)}</pre>\n'
|
12
|
+
lexer = get_lexer_by_name(info, stripall=True)
|
13
|
+
formatter = HtmlFormatter(
|
14
|
+
linenos=False,
|
15
|
+
cssclass="highlight",
|
16
|
+
style=get_style_by_name("monokai")
|
17
|
+
)
|
18
|
+
return highlight(code, lexer, formatter)
|
19
|
+
|
20
|
+
class MarkdownRenderer:
|
21
|
+
_renderer = None
|
22
|
+
|
23
|
+
def __init__(self):
|
24
|
+
if not self._renderer:
|
25
|
+
self._initialize_renderer()
|
26
|
+
|
27
|
+
def _initialize_renderer(self):
|
28
|
+
# plugins = self._discover_plugins()
|
29
|
+
self._renderer = mistune.create_markdown(
|
30
|
+
renderer=HighlightRenderer(escape=False),
|
31
|
+
escape=False,
|
32
|
+
hard_wrap=True,
|
33
|
+
plugins=[]
|
34
|
+
)
|
35
|
+
|
36
|
+
def _discover_plugins(self):
|
37
|
+
# from mojo.apps.docit.markdown_plugins import syntax_highlight
|
38
|
+
plugins = [
|
39
|
+
'table', 'url', 'task_list',
|
40
|
+
'footnotes', 'abbr', 'mark', 'math']
|
41
|
+
return plugins
|
42
|
+
|
43
|
+
def render(self, markdown_text):
|
44
|
+
return self._renderer(markdown_text)
|
mojo/apps/fileman/backends/s3.py
CHANGED
@@ -425,6 +425,21 @@ class S3StorageBackend(StorageBackend):
|
|
425
425
|
|
426
426
|
return len(errors) == 0, errors
|
427
427
|
|
428
|
+
def test_connection(self):
|
429
|
+
try:
|
430
|
+
self.client.head_bucket(Bucket=self.bucket_name)
|
431
|
+
return True
|
432
|
+
except NoCredentialsError:
|
433
|
+
raise ValueError("Invalid AWS credentials")
|
434
|
+
except ClientError as e:
|
435
|
+
error_code = e.response['Error']['Code']
|
436
|
+
if error_code == '404':
|
437
|
+
raise ValueError(f"S3 bucket '{self.bucket_name}' does not exist")
|
438
|
+
elif error_code == '403':
|
439
|
+
raise ValueError(f"Access denied to S3 bucket '{self.bucket_name}'")
|
440
|
+
else:
|
441
|
+
raise ValueError(f"S3 connection error: {e}")
|
442
|
+
|
428
443
|
def make_path_public(self):
|
429
444
|
# Get the current bucket policy (if any)
|
430
445
|
try:
|
@@ -488,3 +503,197 @@ class S3StorageBackend(StorageBackend):
|
|
488
503
|
self.client.download_fileobj(self.bucket_name, file_path, local_file)
|
489
504
|
except ClientError as e:
|
490
505
|
raise Exception(f"Failed to download file from S3: {e}")
|
506
|
+
|
507
|
+
# -------------------------------
|
508
|
+
# CORS MANAGEMENT FOR DIRECT UPLOADS
|
509
|
+
# -------------------------------
|
510
|
+
def get_cors_configuration(self) -> Optional[Dict[str, Any]]:
|
511
|
+
"""
|
512
|
+
Return the current CORS configuration for the bucket, or None if not set.
|
513
|
+
"""
|
514
|
+
try:
|
515
|
+
resp = self.client.get_bucket_cors(Bucket=self.bucket_name)
|
516
|
+
return resp
|
517
|
+
except ClientError as e:
|
518
|
+
if e.response.get("Error", {}).get("Code") == "NoSuchCORSConfiguration":
|
519
|
+
return None
|
520
|
+
raise
|
521
|
+
|
522
|
+
def _default_direct_upload_cors_rule(
|
523
|
+
self,
|
524
|
+
allowed_origins: List[str],
|
525
|
+
allowed_methods: Optional[List[str]] = None,
|
526
|
+
allowed_headers: Optional[List[str]] = None,
|
527
|
+
expose_headers: Optional[List[str]] = None,
|
528
|
+
max_age_seconds: int = 3000,
|
529
|
+
) -> Dict[str, Any]:
|
530
|
+
"""
|
531
|
+
Build a single CORS rule suitable for direct uploads via pre-signed PUT/POST.
|
532
|
+
Note: S3 CORS applies at the bucket level, not per-prefix. Access is still
|
533
|
+
enforced by IAM policies and the fact that we use pre-signed URLs.
|
534
|
+
"""
|
535
|
+
if not allowed_origins or not any(str(o).strip() for o in allowed_origins):
|
536
|
+
raise ValueError("allowed_origins must contain at least one origin")
|
537
|
+
methods = allowed_methods or ["PUT", "HEAD"]
|
538
|
+
headers = allowed_headers or ["*"] # simplest and safest for signed uploads
|
539
|
+
expose = expose_headers or ["ETag", "x-amz-request-id", "x-amz-id-2", "x-amz-version-id"]
|
540
|
+
|
541
|
+
return {
|
542
|
+
"CORSRules": [
|
543
|
+
{
|
544
|
+
"AllowedOrigins": allowed_origins,
|
545
|
+
"AllowedMethods": methods,
|
546
|
+
"AllowedHeaders": headers,
|
547
|
+
"ExposeHeaders": expose,
|
548
|
+
"MaxAgeSeconds": max_age_seconds,
|
549
|
+
}
|
550
|
+
]
|
551
|
+
}
|
552
|
+
|
553
|
+
def check_cors_configuration_for_direct_upload(
|
554
|
+
self,
|
555
|
+
allowed_origins: List[str],
|
556
|
+
required_methods: Optional[List[str]] = None,
|
557
|
+
required_headers: Optional[List[str]] = None,
|
558
|
+
) -> Tuple[bool, List[str], Optional[Dict[str, Any]]]:
|
559
|
+
"""
|
560
|
+
Validate current CORS config can support direct uploads from the given origins.
|
561
|
+
|
562
|
+
Returns:
|
563
|
+
(ok, issues, current_config)
|
564
|
+
"""
|
565
|
+
if not allowed_origins or not any(str(o).strip() for o in allowed_origins):
|
566
|
+
raise ValueError("allowed_origins must contain at least one origin")
|
567
|
+
issues: List[str] = []
|
568
|
+
config = self.get_cors_configuration()
|
569
|
+
if config is None:
|
570
|
+
return False, ["No CORS configuration set on this bucket."], None
|
571
|
+
|
572
|
+
if required_methods is None:
|
573
|
+
required_methods = ["POST", "HEAD"] if self.server_side_encryption else ["PUT", "HEAD"]
|
574
|
+
required_methods = [m.upper() for m in required_methods]
|
575
|
+
# For PUT we often need Content-Type. For POST, headers are not required (fields are in the form).
|
576
|
+
# Using "*" for AllowedHeaders is the simplest and reduces edge cases.
|
577
|
+
if required_headers is None:
|
578
|
+
required_headers = [] if self.server_side_encryption else ["content-type"]
|
579
|
+
required_headers = [h.lower() for h in required_headers]
|
580
|
+
|
581
|
+
# Flatten rules
|
582
|
+
cors_rules: List[Dict[str, Any]] = config.get("CORSRules", [])
|
583
|
+
|
584
|
+
def origin_is_covered(origin: str) -> bool:
|
585
|
+
for rule in cors_rules:
|
586
|
+
origins = rule.get("AllowedOrigins", [])
|
587
|
+
if "*" in origins or origin in origins:
|
588
|
+
# methods
|
589
|
+
methods = [m.upper() for m in rule.get("AllowedMethods", [])]
|
590
|
+
if not all(m in methods for m in required_methods):
|
591
|
+
continue
|
592
|
+
# headers
|
593
|
+
hdrs = [h.lower() for h in rule.get("AllowedHeaders", [])]
|
594
|
+
if "*" in hdrs:
|
595
|
+
return True
|
596
|
+
if not all(h in hdrs for h in required_headers):
|
597
|
+
continue
|
598
|
+
return True
|
599
|
+
return False
|
600
|
+
|
601
|
+
for origin in allowed_origins:
|
602
|
+
if not origin_is_covered(origin):
|
603
|
+
issues.append(f"Origin not covered for direct upload: {origin}")
|
604
|
+
|
605
|
+
return (len(issues) == 0), issues, config
|
606
|
+
|
607
|
+
def update_cors_configuration_for_direct_upload(
|
608
|
+
self,
|
609
|
+
allowed_origins: List[str],
|
610
|
+
allowed_methods: Optional[List[str]] = None,
|
611
|
+
allowed_headers: Optional[List[str]] = None,
|
612
|
+
expose_headers: Optional[List[str]] = None,
|
613
|
+
max_age_seconds: int = 3000,
|
614
|
+
merge: bool = True,
|
615
|
+
) -> Dict[str, Any]:
|
616
|
+
"""
|
617
|
+
Ensure CORS allows direct uploads from allowed_origins.
|
618
|
+
If merge=True, append our rule to any existing rules instead of replacing.
|
619
|
+
"""
|
620
|
+
# Validate input
|
621
|
+
if not allowed_origins or not any(str(o).strip() for o in allowed_origins):
|
622
|
+
raise ValueError("allowed_origins must contain at least one origin")
|
623
|
+
# If current config already satisfies requirements, do nothing
|
624
|
+
ok, issues, current = self.check_cors_configuration_for_direct_upload(
|
625
|
+
allowed_origins=allowed_origins,
|
626
|
+
required_methods=allowed_methods,
|
627
|
+
required_headers=allowed_headers,
|
628
|
+
)
|
629
|
+
if ok:
|
630
|
+
return {
|
631
|
+
"changed": False,
|
632
|
+
"message": "Existing CORS configuration already supports direct uploads.",
|
633
|
+
"issues": [],
|
634
|
+
"applied_configuration": current,
|
635
|
+
}
|
636
|
+
|
637
|
+
new_rule_config = self._default_direct_upload_cors_rule(
|
638
|
+
allowed_origins=allowed_origins,
|
639
|
+
allowed_methods=allowed_methods or (["POST", "HEAD"] if self.server_side_encryption else ["PUT", "HEAD"]),
|
640
|
+
allowed_headers=allowed_headers or ["*"],
|
641
|
+
expose_headers=expose_headers,
|
642
|
+
max_age_seconds=max_age_seconds,
|
643
|
+
)
|
644
|
+
|
645
|
+
if merge and current:
|
646
|
+
merged = dict(CORSRules=[*current.get("CORSRules", []), *new_rule_config["CORSRules"]])
|
647
|
+
self.client.put_bucket_cors(Bucket=self.bucket_name, CORSConfiguration=merged)
|
648
|
+
applied = merged
|
649
|
+
else:
|
650
|
+
# Replace entirely with our single rule
|
651
|
+
self.client.put_bucket_cors(Bucket=self.bucket_name, CORSConfiguration=new_rule_config)
|
652
|
+
applied = new_rule_config
|
653
|
+
|
654
|
+
return {
|
655
|
+
"changed": True,
|
656
|
+
"message": "CORS configuration updated to support direct uploads.",
|
657
|
+
"issues": issues,
|
658
|
+
"applied_configuration": applied,
|
659
|
+
}
|
660
|
+
|
661
|
+
def ensure_cors_for_direct_upload(
|
662
|
+
self,
|
663
|
+
allowed_origins: List[str],
|
664
|
+
allowed_methods: Optional[List[str]] = None,
|
665
|
+
allowed_headers: Optional[List[str]] = None,
|
666
|
+
expose_headers: Optional[List[str]] = None,
|
667
|
+
max_age_seconds: int = 3000,
|
668
|
+
merge: bool = True,
|
669
|
+
) -> Dict[str, Any]:
|
670
|
+
"""
|
671
|
+
Convenience wrapper that checks and updates CORS as needed.
|
672
|
+
|
673
|
+
Example:
|
674
|
+
backend.ensure_cors_for_direct_upload(
|
675
|
+
["http://localhost:3000", "https://your-prod-domain.com"]
|
676
|
+
)
|
677
|
+
"""
|
678
|
+
if not allowed_origins or not any(str(o).strip() for o in allowed_origins):
|
679
|
+
raise ValueError("allowed_origins must contain at least one origin")
|
680
|
+
result = self.update_cors_configuration_for_direct_upload(
|
681
|
+
allowed_origins=allowed_origins,
|
682
|
+
allowed_methods=allowed_methods,
|
683
|
+
allowed_headers=allowed_headers,
|
684
|
+
expose_headers=expose_headers,
|
685
|
+
max_age_seconds=max_age_seconds,
|
686
|
+
merge=merge,
|
687
|
+
)
|
688
|
+
# Re-check to confirm
|
689
|
+
ok, issues, current = self.check_cors_configuration_for_direct_upload(
|
690
|
+
allowed_origins=allowed_origins,
|
691
|
+
required_methods=allowed_methods,
|
692
|
+
required_headers=allowed_headers,
|
693
|
+
)
|
694
|
+
result.update({
|
695
|
+
"verified": ok,
|
696
|
+
"post_update_issues": issues,
|
697
|
+
"current_configuration": current,
|
698
|
+
})
|
699
|
+
return result
|
mojo/apps/fileman/models/file.py
CHANGED
@@ -24,6 +24,7 @@ class File(models.Model, MojoModel):
|
|
24
24
|
DEFAULT_SORT = "-created"
|
25
25
|
VIEW_PERMS = ["view_fileman", "manage_files"]
|
26
26
|
SEARCH_FIELDS = ["filename", "content_type"]
|
27
|
+
POST_SAVE_ACTIONS = ["action"]
|
27
28
|
SEARCH_TERMS = [
|
28
29
|
"filename", "content_type",
|
29
30
|
("group", "group__name"),
|
@@ -43,7 +44,7 @@ class File(models.Model, MojoModel):
|
|
43
44
|
},
|
44
45
|
"basic": {
|
45
46
|
"fields": ["id", "filename", "content_type", "category"],
|
46
|
-
"extra": ["url", "
|
47
|
+
"extra": ["url", "thumbnail"],
|
47
48
|
},
|
48
49
|
"default": {
|
49
50
|
"extra": ["url", "renditions"],
|
@@ -291,6 +292,16 @@ class File(models.Model, MojoModel):
|
|
291
292
|
def url(self):
|
292
293
|
return self.generate_download_url()
|
293
294
|
|
295
|
+
@property
|
296
|
+
def thumbnail(self):
|
297
|
+
r = self.get_rendition_by_role('thumbnail')
|
298
|
+
if r:
|
299
|
+
return r.url
|
300
|
+
return None
|
301
|
+
|
302
|
+
def get_rendition_by_role(self, role):
|
303
|
+
return self.file_renditions.filter(role=role).first()
|
304
|
+
|
294
305
|
def generate_download_url(self):
|
295
306
|
if self.download_url:
|
296
307
|
return self.download_url
|
@@ -299,13 +310,13 @@ class File(models.Model, MojoModel):
|
|
299
310
|
return self.download_url
|
300
311
|
return self.file_manager.backend.get_url(self.storage_file_path, self.get_setting("urls_expire_in", 3600))
|
301
312
|
|
302
|
-
def
|
313
|
+
def on_action_action(self, action):
|
303
314
|
if action == "mark_as_completed":
|
304
|
-
self.mark_as_completed()
|
315
|
+
self.mark_as_completed(commit=True)
|
305
316
|
elif action == "mark_as_failed":
|
306
|
-
self.mark_as_failed()
|
317
|
+
self.mark_as_failed(commit=True)
|
307
318
|
elif action == "mark_as_uploading":
|
308
|
-
self.mark_as_uploading()
|
319
|
+
self.mark_as_uploading(commit=True)
|
309
320
|
|
310
321
|
def set_filename(self, filename):
|
311
322
|
self.filename = filename
|
@@ -421,17 +432,42 @@ class File(models.Model, MojoModel):
|
|
421
432
|
def on_rest_related_save(cls, related_instance, related_field_name, field_value, current_instance=None):
|
422
433
|
# this allows us to handle json posts with inline base64 file data
|
423
434
|
if isinstance(field_value, str):
|
424
|
-
|
425
|
-
|
426
|
-
|
427
|
-
|
435
|
+
mime_type = None
|
436
|
+
b64_data = field_value
|
437
|
+
|
438
|
+
# Check for and parse Data URL scheme (e.g., "data:image/png;base64,iVBOR...")
|
439
|
+
if field_value.startswith('data:') and ',' in field_value:
|
440
|
+
header, b64_data = field_value.split(',', 1)
|
441
|
+
mime_type = header.split(';')[0].split(':')[1]
|
442
|
+
|
443
|
+
# Fix incorrect padding, which can occur with base64 strings from web clients
|
444
|
+
missing_padding = len(b64_data) % 4
|
445
|
+
if missing_padding:
|
446
|
+
b64_data += '=' * (4 - missing_padding)
|
447
|
+
|
448
|
+
try:
|
449
|
+
file_bytes = base64.b64decode(b64_data)
|
450
|
+
except (TypeError, base64.binascii.Error):
|
451
|
+
# If decoding fails, it's not a valid base64 string.
|
452
|
+
# In a real app, you might want to raise a validation error here.
|
453
|
+
return
|
454
|
+
|
455
|
+
# If mime_type wasn't in the data URL, detect it with python-magic
|
456
|
+
if not mime_type:
|
457
|
+
mime_type = magic.from_buffer(file_bytes, mime=True)
|
458
|
+
|
459
|
+
# Safely guess the extension, defaulting to an empty string if unknown
|
460
|
+
ext = mimetypes.guess_extension(mime_type) or ''
|
461
|
+
|
428
462
|
file_obj = io.BytesIO(file_bytes)
|
429
463
|
file_obj.name = f"{related_field_name}{ext}"
|
430
464
|
file_obj.content_type = mime_type
|
431
465
|
file_obj.size = len(file_bytes)
|
466
|
+
|
432
467
|
# now we need to upload the file
|
433
468
|
instance = cls.create_from_file(file_obj, file_obj.name)
|
434
469
|
setattr(related_instance, related_field_name, instance)
|
470
|
+
|
435
471
|
elif isinstance(field_value, int):
|
436
472
|
# assume file id
|
437
473
|
instance = File.objects.get(id=field_value)
|