statezero 0.1.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- statezero/__init__.py +0 -0
- statezero/adaptors/__init__.py +0 -0
- statezero/adaptors/django/__init__.py +0 -0
- statezero/adaptors/django/apps.py +97 -0
- statezero/adaptors/django/config.py +99 -0
- statezero/adaptors/django/context_manager.py +12 -0
- statezero/adaptors/django/event_emitters.py +78 -0
- statezero/adaptors/django/exception_handler.py +98 -0
- statezero/adaptors/django/extensions/__init__.py +0 -0
- statezero/adaptors/django/extensions/custom_field_serializers/__init__.py +0 -0
- statezero/adaptors/django/extensions/custom_field_serializers/file_fields.py +141 -0
- statezero/adaptors/django/extensions/custom_field_serializers/money_field.py +75 -0
- statezero/adaptors/django/f_handler.py +312 -0
- statezero/adaptors/django/helpers.py +153 -0
- statezero/adaptors/django/middleware.py +10 -0
- statezero/adaptors/django/migrations/0001_initial.py +33 -0
- statezero/adaptors/django/migrations/0002_delete_modelviewsubscription.py +16 -0
- statezero/adaptors/django/migrations/__init__.py +0 -0
- statezero/adaptors/django/orm.py +915 -0
- statezero/adaptors/django/permissions.py +252 -0
- statezero/adaptors/django/query_optimizer.py +772 -0
- statezero/adaptors/django/schemas.py +324 -0
- statezero/adaptors/django/search_providers/__init__.py +0 -0
- statezero/adaptors/django/search_providers/basic_search.py +24 -0
- statezero/adaptors/django/search_providers/postgres_search.py +51 -0
- statezero/adaptors/django/serializers.py +554 -0
- statezero/adaptors/django/urls.py +14 -0
- statezero/adaptors/django/views.py +336 -0
- statezero/core/__init__.py +34 -0
- statezero/core/ast_parser.py +821 -0
- statezero/core/ast_validator.py +266 -0
- statezero/core/classes.py +167 -0
- statezero/core/config.py +263 -0
- statezero/core/context_storage.py +4 -0
- statezero/core/event_bus.py +175 -0
- statezero/core/event_emitters.py +60 -0
- statezero/core/exceptions.py +106 -0
- statezero/core/interfaces.py +492 -0
- statezero/core/process_request.py +184 -0
- statezero/core/types.py +63 -0
- statezero-0.1.0b1.dist-info/METADATA +252 -0
- statezero-0.1.0b1.dist-info/RECORD +45 -0
- statezero-0.1.0b1.dist-info/WHEEL +5 -0
- statezero-0.1.0b1.dist-info/licenses/license.md +117 -0
- statezero-0.1.0b1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,336 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
from django.conf import settings
|
|
4
|
+
from django.db import transaction
|
|
5
|
+
from rest_framework import status
|
|
6
|
+
from rest_framework.response import Response
|
|
7
|
+
from rest_framework.views import APIView
|
|
8
|
+
from rest_framework import serializers
|
|
9
|
+
from rest_framework.parsers import MultiPartParser
|
|
10
|
+
from django.core.files.storage import storages
|
|
11
|
+
from django.utils.module_loading import import_string
|
|
12
|
+
from datetime import datetime
|
|
13
|
+
from django.conf import settings
|
|
14
|
+
from django.core.files.storage import default_storage
|
|
15
|
+
import math
|
|
16
|
+
import mimetypes
|
|
17
|
+
|
|
18
|
+
from statezero.adaptors.django.config import config, registry
|
|
19
|
+
from statezero.adaptors.django.exception_handler import \
|
|
20
|
+
explicit_exception_handler
|
|
21
|
+
from statezero.adaptors.django.permissions import ORMBridgeViewAccessGate
|
|
22
|
+
from statezero.core.interfaces import AbstractEventEmitter
|
|
23
|
+
from statezero.core.process_request import RequestProcessor
|
|
24
|
+
|
|
25
|
+
logger = logging.getLogger(__name__)
|
|
26
|
+
logger.setLevel(logging.DEBUG)
|
|
27
|
+
|
|
28
|
+
default_permission = "rest_framework.permissions.AllowAny"
|
|
29
|
+
permission_class = import_string(getattr(settings, "STATEZERO_VIEW_ACCESS_CLASS", default_permission))
|
|
30
|
+
default_storage = default_storage = storages[getattr(settings, 'STATEZERO_STORAGE_KEY', 'default')]
|
|
31
|
+
|
|
32
|
+
class EventsAuthView(APIView):
|
|
33
|
+
"""
|
|
34
|
+
A generic authentication view for event emitters.
|
|
35
|
+
It uses the broadcast emitter from the event bus to check access and then
|
|
36
|
+
calls its authenticate method with the request.
|
|
37
|
+
"""
|
|
38
|
+
permission_classes = [permission_class]
|
|
39
|
+
|
|
40
|
+
def post(self, request, *args, **kwargs):
|
|
41
|
+
channel_name = request.data.get("channel_name")
|
|
42
|
+
socket_id = request.data.get("socket_id")
|
|
43
|
+
|
|
44
|
+
if not channel_name or not socket_id:
|
|
45
|
+
return Response(
|
|
46
|
+
{"error": "Missing channel_name or socket_id"},
|
|
47
|
+
status=status.HTTP_400_BAD_REQUEST,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
# Extract the namespace from the channel name.
|
|
51
|
+
if channel_name.startswith("private-"):
|
|
52
|
+
namespace = channel_name[len("private-"):]
|
|
53
|
+
else:
|
|
54
|
+
namespace = channel_name
|
|
55
|
+
|
|
56
|
+
# Retrieve the broadcast emitter from the global event bus.
|
|
57
|
+
if not config.event_bus or not config.event_bus.broadcast_emitter:
|
|
58
|
+
return Response(
|
|
59
|
+
{"error": "Broadcast emitter is not configured."},
|
|
60
|
+
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
event_emitter: AbstractEventEmitter = config.event_bus.broadcast_emitter
|
|
64
|
+
|
|
65
|
+
# Use the event emitter's permission check
|
|
66
|
+
if not event_emitter.has_permission(request, namespace):
|
|
67
|
+
return Response(
|
|
68
|
+
{"error": "Permission denied for accessing channel."},
|
|
69
|
+
status=status.HTTP_403_FORBIDDEN,
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
# Delegate authentication to the event emitter.
|
|
73
|
+
response = event_emitter.authenticate(request)
|
|
74
|
+
logger.debug(f"Authentication successful for channel: {channel_name}")
|
|
75
|
+
return Response(response, status=status.HTTP_200_OK)
|
|
76
|
+
|
|
77
|
+
class ModelListView(APIView):
|
|
78
|
+
"""
|
|
79
|
+
Returns a list of registered model names.
|
|
80
|
+
"""
|
|
81
|
+
|
|
82
|
+
permission_classes = [ORMBridgeViewAccessGate]
|
|
83
|
+
|
|
84
|
+
def get(self, request, *args, **kwargs):
|
|
85
|
+
model_names = []
|
|
86
|
+
for model in registry._models_config.keys():
|
|
87
|
+
model_name = config.orm_provider.get_model_name(model)
|
|
88
|
+
model_names.append(model_name)
|
|
89
|
+
return Response(model_names, status=status.HTTP_200_OK)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
class ModelView(APIView):
|
|
93
|
+
|
|
94
|
+
permission_classes = [permission_class]
|
|
95
|
+
|
|
96
|
+
@transaction.atomic
|
|
97
|
+
def post(self, request, model_name):
|
|
98
|
+
processor = RequestProcessor(config=config, registry=registry)
|
|
99
|
+
timeout_ms = getattr(settings, 'STATEZERO_QUERY_TIMEOUT_MS', 1000)
|
|
100
|
+
try:
|
|
101
|
+
with config.context_manager(timeout_ms):
|
|
102
|
+
result = processor.process_request(req=request)
|
|
103
|
+
except Exception as original_exception:
|
|
104
|
+
return explicit_exception_handler(original_exception)
|
|
105
|
+
return Response(result, status=status.HTTP_200_OK)
|
|
106
|
+
|
|
107
|
+
class SchemaView(APIView):
|
|
108
|
+
permission_classes = [ORMBridgeViewAccessGate]
|
|
109
|
+
|
|
110
|
+
def get(self, request, model_name):
|
|
111
|
+
processor = RequestProcessor(config=config, registry=registry)
|
|
112
|
+
try:
|
|
113
|
+
result = processor.process_schema(req=request)
|
|
114
|
+
except Exception as original_exception:
|
|
115
|
+
return explicit_exception_handler(original_exception)
|
|
116
|
+
return Response(result, status=status.HTTP_200_OK)
|
|
117
|
+
|
|
118
|
+
class FileUploadView(APIView):
|
|
119
|
+
"""Standard file upload - returns permanent URL"""
|
|
120
|
+
parser_classes = [MultiPartParser]
|
|
121
|
+
permission_classes = [permission_class]
|
|
122
|
+
|
|
123
|
+
def post(self, request):
|
|
124
|
+
file = request.FILES.get('file')
|
|
125
|
+
if not file:
|
|
126
|
+
return Response({'error': 'No file provided'}, status=400)
|
|
127
|
+
|
|
128
|
+
upload_dir = getattr(settings, 'STATEZERO_UPLOAD_DIR', 'statezero')
|
|
129
|
+
full_path = f"{upload_dir}/{file.name}"
|
|
130
|
+
|
|
131
|
+
file_path = default_storage.save(full_path, file)
|
|
132
|
+
file_url = default_storage.url(file_path)
|
|
133
|
+
|
|
134
|
+
response_data = {
|
|
135
|
+
'file_path': file_path,
|
|
136
|
+
'file_url': file_url,
|
|
137
|
+
'original_name': file.name,
|
|
138
|
+
'size': file.size
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
# Execute callbacks
|
|
142
|
+
self._execute_callbacks(request, file, file_path, response_data)
|
|
143
|
+
|
|
144
|
+
return Response(response_data)
|
|
145
|
+
|
|
146
|
+
def _execute_callbacks(self, request, uploaded_file, file_path, response_data):
|
|
147
|
+
"""Execute configured file upload callbacks"""
|
|
148
|
+
if config.file_upload_callbacks:
|
|
149
|
+
for callback_path in config.file_upload_callbacks:
|
|
150
|
+
try:
|
|
151
|
+
callback = import_string(callback_path)
|
|
152
|
+
callback(
|
|
153
|
+
request=request,
|
|
154
|
+
uploaded_file=uploaded_file,
|
|
155
|
+
file_path=file_path,
|
|
156
|
+
response_data=response_data
|
|
157
|
+
)
|
|
158
|
+
except Exception as e:
|
|
159
|
+
logger.error(f"File upload callback failed: {e}")
|
|
160
|
+
|
|
161
|
+
class FastUploadView(APIView):
|
|
162
|
+
"""Fast upload with S3 presigned URLs - single or multipart based on chunks"""
|
|
163
|
+
permission_classes = [permission_class]
|
|
164
|
+
|
|
165
|
+
def post(self, request):
|
|
166
|
+
action = request.data.get('action', 'initiate')
|
|
167
|
+
|
|
168
|
+
if action == 'initiate':
|
|
169
|
+
return self._initiate_upload(request)
|
|
170
|
+
elif action == 'complete':
|
|
171
|
+
return self._complete_upload(request)
|
|
172
|
+
else:
|
|
173
|
+
return Response({'error': 'Invalid action'}, status=400)
|
|
174
|
+
|
|
175
|
+
def _initiate_upload(self, request):
|
|
176
|
+
"""Generate presigned URLs - single or multipart based on num_chunks"""
|
|
177
|
+
filename = request.data.get('filename')
|
|
178
|
+
content_type = request.data.get('content_type')
|
|
179
|
+
file_size = request.data.get('file_size', 0)
|
|
180
|
+
num_chunks_str = request.data.get('num_chunks', 1) # Client decides chunking
|
|
181
|
+
num_chunks = int(num_chunks_str)
|
|
182
|
+
|
|
183
|
+
if not filename:
|
|
184
|
+
return Response({'error': 'filename required'}, status=400)
|
|
185
|
+
|
|
186
|
+
# Generate file path
|
|
187
|
+
upload_dir = getattr(settings, 'STATEZERO_UPLOAD_DIR', 'statezero')
|
|
188
|
+
file_path = f"{upload_dir}/{filename}"
|
|
189
|
+
|
|
190
|
+
if not content_type:
|
|
191
|
+
content_type, _ = mimetypes.guess_type(filename)
|
|
192
|
+
content_type = content_type or 'application/octet-stream'
|
|
193
|
+
|
|
194
|
+
if not self._is_s3_storage():
|
|
195
|
+
return Response({'error': 'Fast upload requires S3 storage backend'}, status=400)
|
|
196
|
+
|
|
197
|
+
try:
|
|
198
|
+
s3_client = self._get_s3_client()
|
|
199
|
+
|
|
200
|
+
if num_chunks == 1:
|
|
201
|
+
# Single upload (existing logic)
|
|
202
|
+
presigned_url = s3_client.generate_presigned_url(
|
|
203
|
+
ClientMethod='put_object',
|
|
204
|
+
Params={
|
|
205
|
+
'Bucket': settings.AWS_STORAGE_BUCKET_NAME,
|
|
206
|
+
'Key': file_path,
|
|
207
|
+
'ContentType': content_type,
|
|
208
|
+
},
|
|
209
|
+
ExpiresIn=3600,
|
|
210
|
+
HttpMethod='PUT',
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
return Response({
|
|
214
|
+
'upload_type': 'single',
|
|
215
|
+
'upload_url': presigned_url,
|
|
216
|
+
'file_path': file_path,
|
|
217
|
+
'content_type': content_type
|
|
218
|
+
})
|
|
219
|
+
|
|
220
|
+
else:
|
|
221
|
+
# Multipart upload
|
|
222
|
+
if num_chunks > 10000:
|
|
223
|
+
return Response({'error': 'Too many chunks (max 10,000)'}, status=400)
|
|
224
|
+
|
|
225
|
+
# Initiate multipart upload
|
|
226
|
+
response = s3_client.create_multipart_upload(
|
|
227
|
+
Bucket=settings.AWS_STORAGE_BUCKET_NAME,
|
|
228
|
+
Key=file_path,
|
|
229
|
+
ContentType=content_type
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
upload_id = response['UploadId']
|
|
233
|
+
|
|
234
|
+
# Generate presigned URLs for all parts
|
|
235
|
+
upload_urls = {}
|
|
236
|
+
for part_number in range(1, num_chunks + 1):
|
|
237
|
+
url = s3_client.generate_presigned_url(
|
|
238
|
+
ClientMethod='upload_part',
|
|
239
|
+
Params={
|
|
240
|
+
'Bucket': settings.AWS_STORAGE_BUCKET_NAME,
|
|
241
|
+
'Key': file_path,
|
|
242
|
+
'PartNumber': part_number,
|
|
243
|
+
'UploadId': upload_id,
|
|
244
|
+
},
|
|
245
|
+
ExpiresIn=3600,
|
|
246
|
+
HttpMethod='PUT'
|
|
247
|
+
)
|
|
248
|
+
upload_urls[part_number] = url
|
|
249
|
+
|
|
250
|
+
return Response({
|
|
251
|
+
'upload_type': 'multipart',
|
|
252
|
+
'upload_id': upload_id,
|
|
253
|
+
'upload_urls': upload_urls, # All URLs at once
|
|
254
|
+
'file_path': file_path,
|
|
255
|
+
'content_type': content_type
|
|
256
|
+
})
|
|
257
|
+
|
|
258
|
+
except Exception as e:
|
|
259
|
+
logger.error(f"Upload initiation failed: {e}")
|
|
260
|
+
return Response({'error': 'Upload unavailable'}, status=500)
|
|
261
|
+
|
|
262
|
+
def _complete_upload(self, request):
|
|
263
|
+
"""Complete upload - single or multipart"""
|
|
264
|
+
file_path = request.data.get('file_path')
|
|
265
|
+
original_name = request.data.get('original_name')
|
|
266
|
+
upload_id = request.data.get('upload_id') # Only present for multipart
|
|
267
|
+
parts = request.data.get('parts', []) # Only present for multipart
|
|
268
|
+
|
|
269
|
+
if not file_path:
|
|
270
|
+
return Response({'error': 'file_path required'}, status=400)
|
|
271
|
+
|
|
272
|
+
try:
|
|
273
|
+
if upload_id and parts:
|
|
274
|
+
# Complete multipart upload
|
|
275
|
+
s3_client = self._get_s3_client()
|
|
276
|
+
|
|
277
|
+
# Sort parts by PartNumber to ensure correct order
|
|
278
|
+
sorted_parts = sorted(parts, key=lambda x: x['PartNumber'])
|
|
279
|
+
|
|
280
|
+
response = s3_client.complete_multipart_upload(
|
|
281
|
+
Bucket=settings.AWS_STORAGE_BUCKET_NAME,
|
|
282
|
+
Key=file_path,
|
|
283
|
+
UploadId=upload_id,
|
|
284
|
+
MultipartUpload={'Parts': sorted_parts}
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
logger.info(f"Multipart upload completed for {file_path}")
|
|
288
|
+
|
|
289
|
+
# For single uploads, file is already there after PUT
|
|
290
|
+
# For multipart, it's now assembled
|
|
291
|
+
|
|
292
|
+
if not default_storage.exists(file_path):
|
|
293
|
+
return Response({'error': 'File not found'}, status=404)
|
|
294
|
+
|
|
295
|
+
return Response({
|
|
296
|
+
'file_path': file_path,
|
|
297
|
+
'file_url': default_storage.url(file_path),
|
|
298
|
+
'original_name': original_name,
|
|
299
|
+
'size': default_storage.size(file_path)
|
|
300
|
+
})
|
|
301
|
+
|
|
302
|
+
except Exception as e:
|
|
303
|
+
logger.error(f"Upload completion failed: {e}")
|
|
304
|
+
# Clean up failed multipart upload
|
|
305
|
+
if upload_id:
|
|
306
|
+
try:
|
|
307
|
+
s3_client = self._get_s3_client()
|
|
308
|
+
s3_client.abort_multipart_upload(
|
|
309
|
+
Bucket=settings.AWS_STORAGE_BUCKET_NAME,
|
|
310
|
+
Key=file_path,
|
|
311
|
+
UploadId=upload_id
|
|
312
|
+
)
|
|
313
|
+
logger.info(f"Aborted failed multipart upload {upload_id}")
|
|
314
|
+
except Exception as cleanup_error:
|
|
315
|
+
logger.error(f"Failed to abort multipart upload: {cleanup_error}")
|
|
316
|
+
return Response({'error': 'Upload completion failed'}, status=500)
|
|
317
|
+
|
|
318
|
+
def _get_s3_client(self):
|
|
319
|
+
"""Get S3 client"""
|
|
320
|
+
import boto3
|
|
321
|
+
return boto3.client(
|
|
322
|
+
"s3",
|
|
323
|
+
region_name=settings.AWS_S3_REGION_NAME,
|
|
324
|
+
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
|
|
325
|
+
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
|
326
|
+
endpoint_url=getattr(settings, 'AWS_S3_ENDPOINT_URL', None)
|
|
327
|
+
)
|
|
328
|
+
|
|
329
|
+
def _is_s3_storage(self) -> bool:
|
|
330
|
+
"""Check if using S3-compatible storage"""
|
|
331
|
+
try:
|
|
332
|
+
from storages.backends.s3boto3 import S3Boto3Storage
|
|
333
|
+
from storages.backends.s3 import S3Storage
|
|
334
|
+
except ImportError:
|
|
335
|
+
return False
|
|
336
|
+
return isinstance(default_storage, (S3Boto3Storage, S3Storage))
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
"""
|
|
2
|
+
statezero: A framework for model synchronization and event handling across different ORMs.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from statezero.core.config import AppConfig, ModelConfig, Registry
|
|
6
|
+
from statezero.core.interfaces import (AbstractCustomQueryset,
|
|
7
|
+
AbstractDataSerializer,
|
|
8
|
+
AbstractEventEmitter,
|
|
9
|
+
AbstractORMProvider, AbstractPermission,
|
|
10
|
+
AbstractSchemaGenerator)
|
|
11
|
+
from statezero.core.types import ActionType, ORMField, ORMModel, RequestType
|
|
12
|
+
|
|
13
|
+
__all__ = [
|
|
14
|
+
# Types
|
|
15
|
+
"ActionType",
|
|
16
|
+
"ORMField",
|
|
17
|
+
"RequestType",
|
|
18
|
+
"ORMModel",
|
|
19
|
+
# Configuration
|
|
20
|
+
"AppConfig",
|
|
21
|
+
"ModelConfig",
|
|
22
|
+
"Registry",
|
|
23
|
+
"app_config",
|
|
24
|
+
"global_registry",
|
|
25
|
+
# Abstract Base Classes
|
|
26
|
+
"AbstractCustomQueryset",
|
|
27
|
+
"AbstractORMProvider",
|
|
28
|
+
"AbstractDataSerializer",
|
|
29
|
+
"AbstractSchemaGenerator",
|
|
30
|
+
"AbstractEventEmitter",
|
|
31
|
+
"AbstractPermission"
|
|
32
|
+
]
|
|
33
|
+
|
|
34
|
+
__version__ = "0.1.0"
|