django-nativemojo 0.1.10__py3-none-any.whl → 0.1.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- django_nativemojo-0.1.15.dist-info/METADATA +136 -0
- {django_nativemojo-0.1.10.dist-info → django_nativemojo-0.1.15.dist-info}/RECORD +105 -65
- mojo/__init__.py +1 -1
- mojo/apps/account/management/__init__.py +5 -0
- mojo/apps/account/management/commands/__init__.py +6 -0
- mojo/apps/account/management/commands/serializer_admin.py +531 -0
- mojo/apps/account/migrations/0004_user_avatar.py +20 -0
- mojo/apps/account/migrations/0005_group_last_activity.py +18 -0
- mojo/apps/account/models/group.py +25 -7
- mojo/apps/account/models/member.py +15 -4
- mojo/apps/account/models/user.py +197 -20
- mojo/apps/account/rest/group.py +1 -0
- mojo/apps/account/rest/user.py +6 -2
- mojo/apps/aws/rest/__init__.py +1 -0
- mojo/apps/aws/rest/s3.py +64 -0
- mojo/apps/fileman/README.md +8 -8
- mojo/apps/fileman/backends/base.py +76 -70
- mojo/apps/fileman/backends/filesystem.py +86 -86
- mojo/apps/fileman/backends/s3.py +200 -108
- mojo/apps/fileman/migrations/0001_initial.py +106 -0
- mojo/apps/fileman/migrations/0002_filemanager_parent_alter_filemanager_max_file_size.py +24 -0
- mojo/apps/fileman/migrations/0003_remove_file_fileman_fil_upload__c4bc35_idx_and_more.py +25 -0
- mojo/apps/fileman/migrations/0004_remove_file_original_filename_and_more.py +39 -0
- mojo/apps/fileman/migrations/0005_alter_file_upload_token.py +18 -0
- mojo/apps/fileman/migrations/0006_file_download_url_filemanager_forever_urls.py +23 -0
- mojo/apps/fileman/migrations/0007_remove_filemanager_forever_urls_and_more.py +22 -0
- mojo/apps/fileman/migrations/0008_file_category.py +18 -0
- mojo/apps/fileman/migrations/0009_rename_file_path_file_storage_file_path.py +18 -0
- mojo/apps/fileman/migrations/0010_filerendition.py +33 -0
- mojo/apps/fileman/migrations/0011_alter_filerendition_original_file.py +19 -0
- mojo/apps/fileman/models/__init__.py +1 -5
- mojo/apps/fileman/models/file.py +204 -58
- mojo/apps/fileman/models/manager.py +161 -31
- mojo/apps/fileman/models/rendition.py +118 -0
- mojo/apps/fileman/renderer/__init__.py +111 -0
- mojo/apps/fileman/renderer/audio.py +403 -0
- mojo/apps/fileman/renderer/base.py +205 -0
- mojo/apps/fileman/renderer/document.py +404 -0
- mojo/apps/fileman/renderer/image.py +222 -0
- mojo/apps/fileman/renderer/utils.py +297 -0
- mojo/apps/fileman/renderer/video.py +304 -0
- mojo/apps/fileman/rest/__init__.py +1 -18
- mojo/apps/fileman/rest/upload.py +22 -32
- mojo/apps/fileman/signals.py +58 -0
- mojo/apps/fileman/tasks.py +254 -0
- mojo/apps/fileman/utils/__init__.py +40 -16
- mojo/apps/incident/migrations/0005_incidenthistory.py +39 -0
- mojo/apps/incident/migrations/0006_alter_incident_state.py +18 -0
- mojo/apps/incident/models/__init__.py +1 -0
- mojo/apps/incident/models/history.py +36 -0
- mojo/apps/incident/models/incident.py +1 -1
- mojo/apps/incident/reporter.py +3 -1
- mojo/apps/incident/rest/event.py +7 -1
- mojo/apps/logit/migrations/0004_alter_log_level.py +18 -0
- mojo/apps/logit/models/log.py +4 -1
- mojo/apps/metrics/utils.py +2 -2
- mojo/apps/notify/handlers/ses/message.py +1 -1
- mojo/apps/notify/providers/aws.py +2 -2
- mojo/apps/tasks/__init__.py +34 -1
- mojo/apps/tasks/manager.py +200 -45
- mojo/apps/tasks/rest/tasks.py +24 -10
- mojo/apps/tasks/runner.py +283 -18
- mojo/apps/tasks/task.py +99 -0
- mojo/apps/tasks/tq_handlers.py +118 -0
- mojo/decorators/auth.py +6 -1
- mojo/decorators/http.py +7 -2
- mojo/helpers/aws/__init__.py +41 -0
- mojo/helpers/aws/ec2.py +804 -0
- mojo/helpers/aws/iam.py +748 -0
- mojo/helpers/aws/s3.py +451 -11
- mojo/helpers/aws/ses.py +483 -0
- mojo/helpers/aws/sns.py +461 -0
- mojo/helpers/crypto/__pycache__/hash.cpython-310.pyc +0 -0
- mojo/helpers/crypto/__pycache__/sign.cpython-310.pyc +0 -0
- mojo/helpers/crypto/__pycache__/utils.cpython-310.pyc +0 -0
- mojo/helpers/dates.py +18 -0
- mojo/helpers/response.py +6 -2
- mojo/helpers/settings/__init__.py +2 -0
- mojo/helpers/{settings.py → settings/helper.py} +1 -37
- mojo/helpers/settings/parser.py +132 -0
- mojo/middleware/logging.py +1 -1
- mojo/middleware/mojo.py +5 -0
- mojo/models/rest.py +261 -46
- mojo/models/secrets.py +13 -4
- mojo/serializers/__init__.py +100 -0
- mojo/serializers/advanced/README.md +363 -0
- mojo/serializers/advanced/__init__.py +247 -0
- mojo/serializers/advanced/formats/__init__.py +28 -0
- mojo/serializers/advanced/formats/csv.py +416 -0
- mojo/serializers/advanced/formats/excel.py +516 -0
- mojo/serializers/advanced/formats/json.py +239 -0
- mojo/serializers/advanced/formats/localizers.py +509 -0
- mojo/serializers/advanced/formats/response.py +485 -0
- mojo/serializers/advanced/serializer.py +568 -0
- mojo/serializers/manager.py +501 -0
- mojo/serializers/optimized.py +618 -0
- mojo/serializers/settings_example.py +322 -0
- mojo/serializers/{models.py → simple.py} +38 -15
- testit/helpers.py +21 -4
- django_nativemojo-0.1.10.dist-info/METADATA +0 -96
- mojo/apps/metrics/rest/db.py +0 -0
- mojo/helpers/aws/setup_email.py +0 -0
- mojo/ws4redis/README.md +0 -174
- mojo/ws4redis/__init__.py +0 -2
- mojo/ws4redis/client.py +0 -283
- mojo/ws4redis/connection.py +0 -327
- mojo/ws4redis/exceptions.py +0 -32
- mojo/ws4redis/redis.py +0 -183
- mojo/ws4redis/servers/base.py +0 -86
- mojo/ws4redis/servers/django.py +0 -171
- mojo/ws4redis/servers/uwsgi.py +0 -63
- mojo/ws4redis/settings.py +0 -45
- mojo/ws4redis/utf8validator.py +0 -128
- mojo/ws4redis/websocket.py +0 -403
- {django_nativemojo-0.1.10.dist-info → django_nativemojo-0.1.15.dist-info}/LICENSE +0 -0
- {django_nativemojo-0.1.10.dist-info → django_nativemojo-0.1.15.dist-info}/NOTICE +0 -0
- {django_nativemojo-0.1.10.dist-info → django_nativemojo-0.1.15.dist-info}/WHEEL +0 -0
- /mojo/{ws4redis/servers → apps/aws}/__init__.py +0 -0
- /mojo/apps/{fileman/models/render.py → aws/models/__init__.py} +0 -0
- /mojo/apps/fileman/{rest/__init__ → migrations/__init__.py} +0 -0
@@ -0,0 +1,416 @@
|
|
1
|
+
import csv
|
2
|
+
import io
|
3
|
+
from decimal import Decimal
|
4
|
+
from datetime import datetime, date
|
5
|
+
from django.http import StreamingHttpResponse, HttpResponse
|
6
|
+
from django.db.models import QuerySet
|
7
|
+
from mojo.helpers import logit
|
8
|
+
|
9
|
+
logger = logit.get_logger("csv_formatter", "csv_formatter.log")
|
10
|
+
|
11
|
+
|
12
|
+
class CsvFormatter:
|
13
|
+
"""
|
14
|
+
Advanced CSV formatter with streaming support and RestMeta.GRAPHS integration.
|
15
|
+
"""
|
16
|
+
|
17
|
+
def __init__(self, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL,
|
18
|
+
encoding='utf-8', streaming_threshold=1000):
|
19
|
+
"""
|
20
|
+
Initialize CSV formatter.
|
21
|
+
|
22
|
+
:param delimiter: Field delimiter (default comma)
|
23
|
+
:param quotechar: Quote character for fields containing special chars
|
24
|
+
:param quoting: Quoting behavior (csv.QUOTE_MINIMAL, etc.)
|
25
|
+
:param encoding: Character encoding for output
|
26
|
+
:param streaming_threshold: Minimum rows to trigger streaming response
|
27
|
+
"""
|
28
|
+
self.delimiter = delimiter
|
29
|
+
self.quotechar = quotechar
|
30
|
+
self.quoting = quoting
|
31
|
+
self.encoding = encoding
|
32
|
+
self.streaming_threshold = streaming_threshold
|
33
|
+
|
34
|
+
def serialize_queryset(self, queryset, fields=None, graph=None, filename="export.csv",
|
35
|
+
headers=None, localize=None, stream=True):
|
36
|
+
"""
|
37
|
+
Serialize a Django QuerySet to CSV format.
|
38
|
+
|
39
|
+
:param queryset: Django QuerySet to serialize
|
40
|
+
:param fields: List of field names or tuples (field_name, display_name)
|
41
|
+
:param graph: RestMeta graph name to use for field configuration
|
42
|
+
:param filename: Output filename
|
43
|
+
:param headers: Custom header names (overrides field names)
|
44
|
+
:param localize: Localization configuration
|
45
|
+
:param stream: Enable streaming for large datasets
|
46
|
+
:return: HttpResponse or StreamingHttpResponse
|
47
|
+
"""
|
48
|
+
# Determine if we should stream based on queryset size
|
49
|
+
should_stream = stream and queryset.count() > self.streaming_threshold
|
50
|
+
|
51
|
+
# Get fields configuration
|
52
|
+
field_config = self._get_field_config(queryset, fields, graph)
|
53
|
+
|
54
|
+
if should_stream:
|
55
|
+
return self._create_streaming_response(queryset, field_config, filename,
|
56
|
+
headers, localize)
|
57
|
+
else:
|
58
|
+
return self._create_standard_response(queryset, field_config, filename,
|
59
|
+
headers, localize)
|
60
|
+
|
61
|
+
def serialize_data(self, data, fields=None, filename="export.csv", headers=None):
|
62
|
+
"""
|
63
|
+
Serialize list of dictionaries or objects to CSV.
|
64
|
+
|
65
|
+
:param data: List of dictionaries or objects
|
66
|
+
:param fields: Field names to include
|
67
|
+
:param filename: Output filename
|
68
|
+
:param headers: Custom header names
|
69
|
+
:return: HttpResponse
|
70
|
+
"""
|
71
|
+
if not data:
|
72
|
+
return self._create_empty_response(filename)
|
73
|
+
|
74
|
+
# Auto-detect fields if not provided
|
75
|
+
if not fields:
|
76
|
+
fields = self._auto_detect_fields(data[0])
|
77
|
+
|
78
|
+
# Prepare field configuration
|
79
|
+
field_config = self._prepare_field_config(fields, headers)
|
80
|
+
|
81
|
+
# Generate CSV content
|
82
|
+
output = io.StringIO()
|
83
|
+
writer = csv.writer(output, delimiter=self.delimiter,
|
84
|
+
quotechar=self.quotechar, quoting=self.quoting)
|
85
|
+
|
86
|
+
# Write header
|
87
|
+
writer.writerow(field_config['headers'])
|
88
|
+
|
89
|
+
# Write data rows
|
90
|
+
for item in data:
|
91
|
+
row = self._extract_row_data(item, field_config['field_names'])
|
92
|
+
writer.writerow(row)
|
93
|
+
|
94
|
+
# Create response
|
95
|
+
response = HttpResponse(output.getvalue(), content_type='text/csv')
|
96
|
+
response['Content-Disposition'] = f'attachment; filename="{filename}"'
|
97
|
+
return response
|
98
|
+
|
99
|
+
def _get_field_config(self, queryset, fields, graph):
|
100
|
+
"""
|
101
|
+
Get field configuration from various sources.
|
102
|
+
"""
|
103
|
+
if fields:
|
104
|
+
return self._prepare_field_config(fields)
|
105
|
+
|
106
|
+
# Try to get from RestMeta.GRAPHS
|
107
|
+
if graph and hasattr(queryset.model, 'RestMeta'):
|
108
|
+
rest_meta = queryset.model.RestMeta
|
109
|
+
if hasattr(rest_meta, 'GRAPHS') and graph in rest_meta.GRAPHS:
|
110
|
+
graph_config = rest_meta.GRAPHS[graph]
|
111
|
+
graph_fields = graph_config.get('fields', [])
|
112
|
+
if graph_fields:
|
113
|
+
return self._prepare_field_config(graph_fields)
|
114
|
+
|
115
|
+
# Fallback to model fields
|
116
|
+
model_fields = [f.name for f in queryset.model._meta.fields]
|
117
|
+
return self._prepare_field_config(model_fields)
|
118
|
+
|
119
|
+
def _prepare_field_config(self, fields, headers=None):
|
120
|
+
"""
|
121
|
+
Prepare field configuration for CSV generation.
|
122
|
+
"""
|
123
|
+
field_names = []
|
124
|
+
field_headers = []
|
125
|
+
|
126
|
+
for i, field in enumerate(fields):
|
127
|
+
if isinstance(field, (tuple, list)):
|
128
|
+
field_name, display_name = field
|
129
|
+
field_names.append(field_name)
|
130
|
+
field_headers.append(display_name)
|
131
|
+
else:
|
132
|
+
field_names.append(field)
|
133
|
+
field_headers.append(field.replace('_', ' ').title())
|
134
|
+
|
135
|
+
# Override with custom headers if provided
|
136
|
+
if headers:
|
137
|
+
field_headers = headers[:len(field_names)]
|
138
|
+
|
139
|
+
return {
|
140
|
+
'field_names': field_names,
|
141
|
+
'headers': field_headers
|
142
|
+
}
|
143
|
+
|
144
|
+
def _create_streaming_response(self, queryset, field_config, filename,
|
145
|
+
headers, localize):
|
146
|
+
"""
|
147
|
+
Create streaming HTTP response for large datasets.
|
148
|
+
"""
|
149
|
+
logger.info(f"Creating streaming CSV response for {queryset.count()} records")
|
150
|
+
|
151
|
+
def csv_generator():
|
152
|
+
# Create CSV writer with pseudo-buffer
|
153
|
+
pseudo_buffer = EchoWriter()
|
154
|
+
writer = csv.writer(pseudo_buffer, delimiter=self.delimiter,
|
155
|
+
quotechar=self.quotechar, quoting=self.quoting)
|
156
|
+
|
157
|
+
# Yield header row
|
158
|
+
yield writer.writerow(field_config['headers'])
|
159
|
+
|
160
|
+
# Yield data rows
|
161
|
+
for obj in queryset.iterator(): # Use iterator for memory efficiency
|
162
|
+
try:
|
163
|
+
row = self._extract_row_data(obj, field_config['field_names'], localize)
|
164
|
+
yield writer.writerow(row)
|
165
|
+
except Exception as e:
|
166
|
+
logger.error(f"Error processing row for object {obj.pk}: {e}")
|
167
|
+
# Continue with next row instead of failing completely
|
168
|
+
continue
|
169
|
+
|
170
|
+
response = StreamingHttpResponse(csv_generator(), content_type='text/csv')
|
171
|
+
response['Content-Disposition'] = f'attachment; filename="{filename}"'
|
172
|
+
response['Cache-Control'] = 'no-cache'
|
173
|
+
return response
|
174
|
+
|
175
|
+
def _create_standard_response(self, queryset, field_config, filename,
|
176
|
+
headers, localize):
|
177
|
+
"""
|
178
|
+
Create standard HTTP response for smaller datasets.
|
179
|
+
"""
|
180
|
+
output = io.StringIO()
|
181
|
+
writer = csv.writer(output, delimiter=self.delimiter,
|
182
|
+
quotechar=self.quotechar, quoting=self.quoting)
|
183
|
+
|
184
|
+
# Write header
|
185
|
+
writer.writerow(field_config['headers'])
|
186
|
+
|
187
|
+
# Write data rows
|
188
|
+
for obj in queryset:
|
189
|
+
try:
|
190
|
+
row = self._extract_row_data(obj, field_config['field_names'], localize)
|
191
|
+
writer.writerow(row)
|
192
|
+
except Exception as e:
|
193
|
+
logger.error(f"Error processing row for object {obj.pk}: {e}")
|
194
|
+
continue
|
195
|
+
|
196
|
+
response = HttpResponse(output.getvalue(), content_type='text/csv')
|
197
|
+
response['Content-Disposition'] = f'attachment; filename="{filename}"'
|
198
|
+
return response
|
199
|
+
|
200
|
+
def _create_empty_response(self, filename):
|
201
|
+
"""
|
202
|
+
Create response for empty dataset.
|
203
|
+
"""
|
204
|
+
response = HttpResponse('', content_type='text/csv')
|
205
|
+
response['Content-Disposition'] = f'attachment; filename="{filename}"'
|
206
|
+
return response
|
207
|
+
|
208
|
+
def _extract_row_data(self, obj, field_names, localize=None):
|
209
|
+
"""
|
210
|
+
Extract row data from an object based on field names.
|
211
|
+
"""
|
212
|
+
row = []
|
213
|
+
|
214
|
+
for field_name in field_names:
|
215
|
+
try:
|
216
|
+
value = self._get_field_value(obj, field_name)
|
217
|
+
value = self._process_field_value(value, field_name, localize)
|
218
|
+
row.append(self._format_csv_value(value))
|
219
|
+
except Exception as e:
|
220
|
+
logger.warning(f"Error extracting field '{field_name}': {e}")
|
221
|
+
row.append("N/A")
|
222
|
+
|
223
|
+
return row
|
224
|
+
|
225
|
+
def _get_field_value(self, obj, field_name):
|
226
|
+
"""
|
227
|
+
Get field value from object, supporting nested field access.
|
228
|
+
"""
|
229
|
+
# Handle nested field access (e.g., "user.email", "profile.address.city")
|
230
|
+
if '.' in field_name:
|
231
|
+
return self._get_nested_field_value(obj, field_name)
|
232
|
+
|
233
|
+
# Handle special metadata fields
|
234
|
+
if field_name.startswith('metadata.') and hasattr(obj, 'getProperty'):
|
235
|
+
parts = field_name.split('.', 2)
|
236
|
+
if len(parts) == 3:
|
237
|
+
return obj.getProperty(parts[2], category=parts[1])
|
238
|
+
elif len(parts) == 2:
|
239
|
+
return obj.getProperty(parts[1])
|
240
|
+
|
241
|
+
# Standard field access
|
242
|
+
if hasattr(obj, field_name):
|
243
|
+
value = getattr(obj, field_name)
|
244
|
+
return value() if callable(value) else value
|
245
|
+
|
246
|
+
# Dictionary-style access
|
247
|
+
if isinstance(obj, dict):
|
248
|
+
return obj.get(field_name, None)
|
249
|
+
|
250
|
+
return None
|
251
|
+
|
252
|
+
def _get_nested_field_value(self, obj, field_path):
|
253
|
+
"""
|
254
|
+
Get value from nested field path like "user.profile.name".
|
255
|
+
"""
|
256
|
+
try:
|
257
|
+
current = obj
|
258
|
+
for field_part in field_path.split('.'):
|
259
|
+
if current is None:
|
260
|
+
return None
|
261
|
+
|
262
|
+
if hasattr(current, field_part):
|
263
|
+
current = getattr(current, field_part)
|
264
|
+
elif isinstance(current, dict):
|
265
|
+
current = current.get(field_part)
|
266
|
+
else:
|
267
|
+
return None
|
268
|
+
|
269
|
+
# Handle callable attributes
|
270
|
+
if callable(current):
|
271
|
+
current = current()
|
272
|
+
|
273
|
+
return current
|
274
|
+
except Exception as e:
|
275
|
+
logger.warning(f"Error accessing nested field '{field_path}': {e}")
|
276
|
+
return None
|
277
|
+
|
278
|
+
def _process_field_value(self, value, field_name, localize=None):
|
279
|
+
"""
|
280
|
+
Process field value with localization and special handling.
|
281
|
+
"""
|
282
|
+
if value is None:
|
283
|
+
return "N/A"
|
284
|
+
|
285
|
+
# Apply localization if configured
|
286
|
+
if localize and field_name in localize:
|
287
|
+
try:
|
288
|
+
localizer_config = localize[field_name]
|
289
|
+
if '|' in localizer_config:
|
290
|
+
localizer_name, extra = localizer_config.split('|', 1)
|
291
|
+
else:
|
292
|
+
localizer_name, extra = localizer_config, None
|
293
|
+
|
294
|
+
# Import and apply localizer
|
295
|
+
from mojo.serializers.formats.localizers import get_localizer
|
296
|
+
localizer = get_localizer(localizer_name)
|
297
|
+
if localizer:
|
298
|
+
return localizer(value, extra)
|
299
|
+
except Exception as e:
|
300
|
+
logger.warning(f"Localization failed for field '{field_name}': {e}")
|
301
|
+
|
302
|
+
return value
|
303
|
+
|
304
|
+
def _format_csv_value(self, value):
|
305
|
+
"""
|
306
|
+
Format value for CSV output.
|
307
|
+
"""
|
308
|
+
if value is None:
|
309
|
+
return ""
|
310
|
+
|
311
|
+
# Handle model instances
|
312
|
+
if hasattr(value, 'pk'):
|
313
|
+
return str(value.pk)
|
314
|
+
|
315
|
+
# Handle datetime objects
|
316
|
+
elif isinstance(value, datetime):
|
317
|
+
return value.strftime('%Y-%m-%d %H:%M:%S')
|
318
|
+
elif isinstance(value, date):
|
319
|
+
return value.strftime('%Y-%m-%d')
|
320
|
+
|
321
|
+
# Handle numeric types
|
322
|
+
elif isinstance(value, Decimal):
|
323
|
+
return str(float(value)) if not value.is_nan() else "0"
|
324
|
+
elif isinstance(value, (int, float)):
|
325
|
+
return str(value)
|
326
|
+
|
327
|
+
# Handle collections
|
328
|
+
elif isinstance(value, (list, tuple)):
|
329
|
+
return '; '.join(str(item) for item in value)
|
330
|
+
elif isinstance(value, dict):
|
331
|
+
return str(value) # Could be enhanced with better dict formatting
|
332
|
+
|
333
|
+
# Default string conversion
|
334
|
+
else:
|
335
|
+
return str(value)
|
336
|
+
|
337
|
+
def _auto_detect_fields(self, sample_item):
|
338
|
+
"""
|
339
|
+
Auto-detect fields from a sample data item.
|
340
|
+
"""
|
341
|
+
if isinstance(sample_item, dict):
|
342
|
+
return list(sample_item.keys())
|
343
|
+
elif hasattr(sample_item, '_meta'):
|
344
|
+
return [f.name for f in sample_item._meta.fields]
|
345
|
+
elif hasattr(sample_item, '__dict__'):
|
346
|
+
return list(sample_item.__dict__.keys())
|
347
|
+
else:
|
348
|
+
return ['value'] # Fallback for primitive types
|
349
|
+
|
350
|
+
|
351
|
+
class EchoWriter:
|
352
|
+
"""
|
353
|
+
A writer that implements just the write method for streaming CSV generation.
|
354
|
+
"""
|
355
|
+
|
356
|
+
def writerow(self, row):
|
357
|
+
"""Write the row by returning it as a CSV line."""
|
358
|
+
output = io.StringIO()
|
359
|
+
writer = csv.writer(output)
|
360
|
+
writer.writerow(row)
|
361
|
+
return output.getvalue()
|
362
|
+
|
363
|
+
def write(self, value):
|
364
|
+
"""Write the value by returning it directly."""
|
365
|
+
return value
|
366
|
+
|
367
|
+
|
368
|
+
# Convenience functions for backwards compatibility
|
369
|
+
def generate_csv(queryset, fields, filename, headers=None, localize=None, stream=True):
|
370
|
+
"""
|
371
|
+
Generate CSV from queryset.
|
372
|
+
|
373
|
+
:param queryset: Django QuerySet
|
374
|
+
:param fields: List of field names
|
375
|
+
:param filename: Output filename
|
376
|
+
:param headers: Custom header names
|
377
|
+
:param localize: Localization config
|
378
|
+
:param stream: Enable streaming for large datasets
|
379
|
+
:return: HttpResponse or StreamingHttpResponse
|
380
|
+
"""
|
381
|
+
formatter = CsvFormatter()
|
382
|
+
return formatter.serialize_queryset(
|
383
|
+
queryset=queryset,
|
384
|
+
fields=fields,
|
385
|
+
filename=filename,
|
386
|
+
headers=headers,
|
387
|
+
localize=localize,
|
388
|
+
stream=stream
|
389
|
+
)
|
390
|
+
|
391
|
+
|
392
|
+
def generate_csv_stream(queryset, fields, filename, localize=None):
|
393
|
+
"""
|
394
|
+
Generate streaming CSV response.
|
395
|
+
"""
|
396
|
+
formatter = CsvFormatter()
|
397
|
+
return formatter.serialize_queryset(
|
398
|
+
queryset=queryset,
|
399
|
+
fields=fields,
|
400
|
+
filename=filename,
|
401
|
+
localize=localize,
|
402
|
+
stream=True
|
403
|
+
)
|
404
|
+
|
405
|
+
|
406
|
+
def serialize_to_csv(data, fields=None, filename="export.csv", headers=None):
|
407
|
+
"""
|
408
|
+
Serialize list of data to CSV.
|
409
|
+
"""
|
410
|
+
formatter = CsvFormatter()
|
411
|
+
return formatter.serialize_data(
|
412
|
+
data=data,
|
413
|
+
fields=fields,
|
414
|
+
filename=filename,
|
415
|
+
headers=headers
|
416
|
+
)
|