statezero 0.1.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- statezero/__init__.py +0 -0
- statezero/adaptors/__init__.py +0 -0
- statezero/adaptors/django/__init__.py +0 -0
- statezero/adaptors/django/apps.py +97 -0
- statezero/adaptors/django/config.py +99 -0
- statezero/adaptors/django/context_manager.py +12 -0
- statezero/adaptors/django/event_emitters.py +78 -0
- statezero/adaptors/django/exception_handler.py +98 -0
- statezero/adaptors/django/extensions/__init__.py +0 -0
- statezero/adaptors/django/extensions/custom_field_serializers/__init__.py +0 -0
- statezero/adaptors/django/extensions/custom_field_serializers/file_fields.py +141 -0
- statezero/adaptors/django/extensions/custom_field_serializers/money_field.py +75 -0
- statezero/adaptors/django/f_handler.py +312 -0
- statezero/adaptors/django/helpers.py +153 -0
- statezero/adaptors/django/middleware.py +10 -0
- statezero/adaptors/django/migrations/0001_initial.py +33 -0
- statezero/adaptors/django/migrations/0002_delete_modelviewsubscription.py +16 -0
- statezero/adaptors/django/migrations/__init__.py +0 -0
- statezero/adaptors/django/orm.py +915 -0
- statezero/adaptors/django/permissions.py +252 -0
- statezero/adaptors/django/query_optimizer.py +772 -0
- statezero/adaptors/django/schemas.py +324 -0
- statezero/adaptors/django/search_providers/__init__.py +0 -0
- statezero/adaptors/django/search_providers/basic_search.py +24 -0
- statezero/adaptors/django/search_providers/postgres_search.py +51 -0
- statezero/adaptors/django/serializers.py +554 -0
- statezero/adaptors/django/urls.py +14 -0
- statezero/adaptors/django/views.py +336 -0
- statezero/core/__init__.py +34 -0
- statezero/core/ast_parser.py +821 -0
- statezero/core/ast_validator.py +266 -0
- statezero/core/classes.py +167 -0
- statezero/core/config.py +263 -0
- statezero/core/context_storage.py +4 -0
- statezero/core/event_bus.py +175 -0
- statezero/core/event_emitters.py +60 -0
- statezero/core/exceptions.py +106 -0
- statezero/core/interfaces.py +492 -0
- statezero/core/process_request.py +184 -0
- statezero/core/types.py +63 -0
- statezero-0.1.0b1.dist-info/METADATA +252 -0
- statezero-0.1.0b1.dist-info/RECORD +45 -0
- statezero-0.1.0b1.dist-info/WHEEL +5 -0
- statezero-0.1.0b1.dist-info/licenses/license.md +117 -0
- statezero-0.1.0b1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,821 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
from typing import Any, Callable, Dict, List, Optional, Set, Type, Union, Tuple, Literal
|
|
3
|
+
from collections import deque
|
|
4
|
+
import networkx as nx
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
from statezero.core.config import AppConfig, Registry
|
|
8
|
+
from statezero.core.interfaces import AbstractDataSerializer, AbstractPermission, AbstractORMProvider
|
|
9
|
+
from statezero.core.types import ActionType, ORMModel, RequestType
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ResponseType(Enum):
|
|
13
|
+
INSTANCE = "instance"
|
|
14
|
+
QUERYSET = "queryset"
|
|
15
|
+
NUMBER = "number"
|
|
16
|
+
BOOLEAN = "boolean"
|
|
17
|
+
NONE = "none"
|
|
18
|
+
|
|
19
|
+
class ASTParser:
|
|
20
|
+
"""
|
|
21
|
+
Parses an abstract syntax tree (AST) representing an ORM operation.
|
|
22
|
+
Delegates each operation (create, update, delete, etc.) to a dedicated handler
|
|
23
|
+
and hardcodes the response type in the metadata based on the operation.
|
|
24
|
+
"""
|
|
25
|
+
def __init__(
|
|
26
|
+
self,
|
|
27
|
+
engine: AbstractORMProvider,
|
|
28
|
+
serializer: AbstractDataSerializer,
|
|
29
|
+
model: Type,
|
|
30
|
+
config: AppConfig,
|
|
31
|
+
registry: Registry,
|
|
32
|
+
serializer_options: Optional[Dict[str, Any]] = None,
|
|
33
|
+
request: Optional[RequestType] = None,
|
|
34
|
+
):
|
|
35
|
+
self.engine = engine
|
|
36
|
+
self.serializer = serializer
|
|
37
|
+
self.model = model
|
|
38
|
+
self.config = config
|
|
39
|
+
self.registry = registry
|
|
40
|
+
self.serializer_options = serializer_options or {}
|
|
41
|
+
self.request = request
|
|
42
|
+
|
|
43
|
+
# Process field selection if present
|
|
44
|
+
requested_fields = self.serializer_options.get("fields", [])
|
|
45
|
+
|
|
46
|
+
# Configure the serializer options
|
|
47
|
+
self.depth = int(self.serializer_options.get("depth", 0))
|
|
48
|
+
|
|
49
|
+
# If Process fields are provided, override the user supplied depth
|
|
50
|
+
if requested_fields:
|
|
51
|
+
self.depth = max((field.count('__') for field in requested_fields), default=0) + 1
|
|
52
|
+
|
|
53
|
+
# Get the raw field map
|
|
54
|
+
self.read_fields_map = self._get_operation_field_map(
|
|
55
|
+
requested_fields=requested_fields,
|
|
56
|
+
depth=self.depth,
|
|
57
|
+
operation_type='read'
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
# Create/update operations should use depth 0 for performance
|
|
61
|
+
self.create_fields_map = self._get_operation_field_map(
|
|
62
|
+
requested_fields=requested_fields,
|
|
63
|
+
depth=0, # Nested writes are not supported
|
|
64
|
+
operation_type='create'
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
self.update_fields_map = self._get_operation_field_map(
|
|
68
|
+
requested_fields=requested_fields,
|
|
69
|
+
depth=0, # Nested writes are not supported
|
|
70
|
+
operation_type='update'
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
# Add field maps to serializer options
|
|
74
|
+
self.serializer_options["read_fields_map"] = self.read_fields_map
|
|
75
|
+
self.serializer_options["create_fields_map"] = self.create_fields_map
|
|
76
|
+
self.serializer_options["update_fields_map"] = self.update_fields_map
|
|
77
|
+
|
|
78
|
+
# Lookup table mapping AST op types to handler methods.
|
|
79
|
+
self.handlers: Dict[str, Callable[[Dict[str, Any]], Dict[str, Any]]] = {
|
|
80
|
+
"create": self._handle_create,
|
|
81
|
+
"update": self._handle_update,
|
|
82
|
+
"delete": self._handle_delete,
|
|
83
|
+
"get": self._handle_get,
|
|
84
|
+
"get_or_create": self._handle_get_or_create,
|
|
85
|
+
"update_or_create": self._handle_update_or_create,
|
|
86
|
+
"first": self._handle_first,
|
|
87
|
+
"last": self._handle_last,
|
|
88
|
+
"exists": self._handle_exists,
|
|
89
|
+
"count": self._handle_aggregate,
|
|
90
|
+
"sum": self._handle_aggregate,
|
|
91
|
+
"avg": self._handle_aggregate,
|
|
92
|
+
"min": self._handle_aggregate,
|
|
93
|
+
"max": self._handle_aggregate,
|
|
94
|
+
"aggregate": self._handle_aggregate,
|
|
95
|
+
"update_instance": self._handle_update_instance,
|
|
96
|
+
"delete_instance": self._handle_delete_instance,
|
|
97
|
+
}
|
|
98
|
+
self.default_handler = self._handle_read
|
|
99
|
+
|
|
100
|
+
def _process_nested_field_strings(self, orm_provider: AbstractORMProvider, field_strings, available_fields_map):
|
|
101
|
+
"""
|
|
102
|
+
Build a fields map from a list of dotted field strings like ['fk__m2m', 'field', 'fk__m2m__field'],
|
|
103
|
+
respecting the available fields for each model.
|
|
104
|
+
|
|
105
|
+
Args:
|
|
106
|
+
orm_provider: The ORM provider to use for model traversal
|
|
107
|
+
field_strings: List of field strings in the format 'relation__field' or 'field'
|
|
108
|
+
available_fields_map: Dict mapping model names to sets of available fields
|
|
109
|
+
|
|
110
|
+
Returns:
|
|
111
|
+
Dict[str, Set[str]]: Dictionary mapping model names to sets of field names
|
|
112
|
+
"""
|
|
113
|
+
fields_map = {}
|
|
114
|
+
model_graph: nx.DiGraph = orm_provider.build_model_graph(self.model)
|
|
115
|
+
|
|
116
|
+
# Start with the root model
|
|
117
|
+
root_model_name = orm_provider.get_model_name(self.model)
|
|
118
|
+
fields_map[root_model_name] = set()
|
|
119
|
+
|
|
120
|
+
for field_string in field_strings:
|
|
121
|
+
parts = field_string.split('__')
|
|
122
|
+
current_model = self.model
|
|
123
|
+
current_model_name = root_model_name
|
|
124
|
+
|
|
125
|
+
# Process each part of the field string
|
|
126
|
+
for i, part in enumerate(parts):
|
|
127
|
+
# Check if this field is available for this model
|
|
128
|
+
if current_model_name in available_fields_map and part in available_fields_map[current_model_name]:
|
|
129
|
+
# Add the current field to the current model's field set
|
|
130
|
+
fields_map.setdefault(current_model_name, set()).add(part)
|
|
131
|
+
|
|
132
|
+
# If this is the last part, we might need to include all fields if it's a relation
|
|
133
|
+
if i == len(parts) - 1:
|
|
134
|
+
# Find the field node in the graph to check if it's a relation
|
|
135
|
+
field_nodes = [
|
|
136
|
+
node for node in model_graph.successors(current_model_name)
|
|
137
|
+
if model_graph.nodes[node].get("data") and
|
|
138
|
+
model_graph.nodes[node].get("data").field_name == part
|
|
139
|
+
]
|
|
140
|
+
|
|
141
|
+
if field_nodes:
|
|
142
|
+
field_node = field_nodes[0]
|
|
143
|
+
field_data = model_graph.nodes[field_node].get("data")
|
|
144
|
+
|
|
145
|
+
# If this is a relation field, include all available fields of the related model
|
|
146
|
+
if field_data and field_data.is_relation and field_data.related_model:
|
|
147
|
+
related_model_name = field_data.related_model
|
|
148
|
+
|
|
149
|
+
# Include all available fields for this related model
|
|
150
|
+
if related_model_name in available_fields_map:
|
|
151
|
+
fields_map.setdefault(related_model_name, set()).update(
|
|
152
|
+
available_fields_map[related_model_name]
|
|
153
|
+
)
|
|
154
|
+
break
|
|
155
|
+
|
|
156
|
+
# Otherwise, we need to traverse to the related model if allowed
|
|
157
|
+
# First, check if the relation field is available
|
|
158
|
+
if current_model_name not in available_fields_map or part not in available_fields_map[current_model_name]:
|
|
159
|
+
# The relation field is not available, stop traversing
|
|
160
|
+
break
|
|
161
|
+
|
|
162
|
+
# Find the field node in the graph
|
|
163
|
+
field_nodes = [
|
|
164
|
+
node for node in model_graph.successors(current_model_name)
|
|
165
|
+
if model_graph.nodes[node].get("data") and
|
|
166
|
+
model_graph.nodes[node].get("data").field_name == part
|
|
167
|
+
]
|
|
168
|
+
|
|
169
|
+
if not field_nodes:
|
|
170
|
+
# Field not found, skip to next field string
|
|
171
|
+
break
|
|
172
|
+
|
|
173
|
+
field_node = field_nodes[0]
|
|
174
|
+
field_data = model_graph.nodes[field_node].get("data")
|
|
175
|
+
|
|
176
|
+
# If this is a relation field, move to the related model
|
|
177
|
+
if field_data and field_data.is_relation and field_data.related_model:
|
|
178
|
+
related_model = orm_provider.get_model_by_name(field_data.related_model)
|
|
179
|
+
current_model = related_model
|
|
180
|
+
current_model_name = field_data.related_model
|
|
181
|
+
else:
|
|
182
|
+
# Not a relation field, stop traversing
|
|
183
|
+
break
|
|
184
|
+
|
|
185
|
+
return fields_map
|
|
186
|
+
|
|
187
|
+
def _get_operation_field_map(self, requested_fields: Optional[Set[str]] = None, depth=0, operation_type: Literal["read", "create", "update"]='read') -> Dict[str, Set[str]]:
|
|
188
|
+
"""
|
|
189
|
+
Build a fields map for a specific operation type.
|
|
190
|
+
|
|
191
|
+
Args:
|
|
192
|
+
requested_fields: Optional set of explicitly requested fields
|
|
193
|
+
depth: Maximum depth for related models to include
|
|
194
|
+
operation_type: Operation type ('read', 'create', 'update')
|
|
195
|
+
|
|
196
|
+
Returns:
|
|
197
|
+
Dict[str, Set[str]]: Fields map with model names as keys and sets of field names as values
|
|
198
|
+
"""
|
|
199
|
+
# Build a fields map specific to this operation type
|
|
200
|
+
fields_map = self._get_depth_based_fields(
|
|
201
|
+
orm_provider=self.engine,
|
|
202
|
+
depth=depth,
|
|
203
|
+
operation_type=operation_type
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
if requested_fields:
|
|
207
|
+
fields_map = self._process_nested_field_strings(
|
|
208
|
+
orm_provider=self.engine,
|
|
209
|
+
field_strings=requested_fields,
|
|
210
|
+
available_fields_map=fields_map
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
return fields_map
|
|
214
|
+
|
|
215
|
+
def _has_operation_permission(self, model, operation_type):
|
|
216
|
+
"""
|
|
217
|
+
Check if the current request has permission for the specified operation on the model.
|
|
218
|
+
|
|
219
|
+
Args:
|
|
220
|
+
model: Model to check permissions for
|
|
221
|
+
operation_type: The type of operation ('read', 'create', 'update', 'delete')
|
|
222
|
+
|
|
223
|
+
Returns:
|
|
224
|
+
Boolean indicating if permission is granted for the operation
|
|
225
|
+
"""
|
|
226
|
+
try:
|
|
227
|
+
model_config = self.registry.get_config(model)
|
|
228
|
+
allowed_actions = set()
|
|
229
|
+
|
|
230
|
+
# Collect all allowed actions from all permissions
|
|
231
|
+
for permission_cls in model_config.permissions:
|
|
232
|
+
permission: AbstractPermission = permission_cls()
|
|
233
|
+
allowed_actions.update(permission.allowed_actions(self.request, model))
|
|
234
|
+
|
|
235
|
+
# Map operation types to ActionType enum values
|
|
236
|
+
operation_to_action = {
|
|
237
|
+
'read': ActionType.READ,
|
|
238
|
+
'create': ActionType.CREATE,
|
|
239
|
+
'update': ActionType.UPDATE,
|
|
240
|
+
'delete': ActionType.DELETE
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
# Check if the required action is in the set of allowed actions
|
|
244
|
+
required_action = operation_to_action.get(operation_type, ActionType.READ)
|
|
245
|
+
return required_action in allowed_actions
|
|
246
|
+
except (ValueError, KeyError):
|
|
247
|
+
# Model not registered or permissions not set up
|
|
248
|
+
return False # Default to denying access for security
|
|
249
|
+
|
|
250
|
+
def _get_depth_based_fields(self, orm_provider: AbstractORMProvider, depth=0, operation_type='read'):
|
|
251
|
+
"""
|
|
252
|
+
Build a fields map by traversing the model graph up to the specified depth.
|
|
253
|
+
Uses operation-specific field permissions.
|
|
254
|
+
|
|
255
|
+
Args:
|
|
256
|
+
depth: Maximum depth to traverse in relationship graph
|
|
257
|
+
operation_type: Operation type for field permissions ('read', 'create', 'update')
|
|
258
|
+
|
|
259
|
+
Returns:
|
|
260
|
+
Dict[str, Set[str]]: Dictionary mapping model names to sets of field names
|
|
261
|
+
"""
|
|
262
|
+
fields_map = {}
|
|
263
|
+
visited = set()
|
|
264
|
+
model_graph: nx.DiGraph = orm_provider.build_model_graph(self.model)
|
|
265
|
+
|
|
266
|
+
# Start BFS from the root model
|
|
267
|
+
queue = deque([(self.model, 0)])
|
|
268
|
+
|
|
269
|
+
while queue:
|
|
270
|
+
current_model, current_depth = queue.popleft()
|
|
271
|
+
model_name = orm_provider.get_model_name(current_model)
|
|
272
|
+
|
|
273
|
+
# Skip if we've already visited this model at this depth or lower
|
|
274
|
+
if (model_name, current_depth) in visited:
|
|
275
|
+
continue
|
|
276
|
+
visited.add((model_name, current_depth))
|
|
277
|
+
|
|
278
|
+
# Check if we have permission to read this model
|
|
279
|
+
if not self._has_operation_permission(current_model, operation_type=operation_type):
|
|
280
|
+
continue
|
|
281
|
+
|
|
282
|
+
# Get fields allowed for this operation type
|
|
283
|
+
allowed_fields = self._get_operation_fields(current_model, operation_type)
|
|
284
|
+
|
|
285
|
+
# Initialize fields set for this model
|
|
286
|
+
fields_map.setdefault(model_name, set())
|
|
287
|
+
|
|
288
|
+
# Collect all directly accessible fields from the model
|
|
289
|
+
for node in model_graph.successors(model_name):
|
|
290
|
+
# Each successor of the model node is a field node
|
|
291
|
+
field_data = model_graph.nodes[node].get("data")
|
|
292
|
+
if field_data:
|
|
293
|
+
field_name = field_data.field_name
|
|
294
|
+
# Add this field to the fields map if it's in allowed_fields
|
|
295
|
+
if field_name in allowed_fields:
|
|
296
|
+
fields_map[model_name].add(field_name)
|
|
297
|
+
|
|
298
|
+
# Stop traversing if we've reached max depth
|
|
299
|
+
if current_depth >= depth:
|
|
300
|
+
continue
|
|
301
|
+
|
|
302
|
+
# Now, traverse relation fields to add related models
|
|
303
|
+
for node in model_graph.successors(model_name):
|
|
304
|
+
field_data = model_graph.nodes[node].get("data")
|
|
305
|
+
if field_data and field_data.is_relation and field_data.related_model:
|
|
306
|
+
field_name = field_data.field_name
|
|
307
|
+
# Only traverse relations we have permission to access
|
|
308
|
+
if field_name in allowed_fields:
|
|
309
|
+
# Get the related model and add it to the queue
|
|
310
|
+
related_model = orm_provider.get_model_by_name(
|
|
311
|
+
field_data.related_model
|
|
312
|
+
)
|
|
313
|
+
queue.append((related_model, current_depth + 1))
|
|
314
|
+
|
|
315
|
+
return fields_map
|
|
316
|
+
|
|
317
|
+
def _get_operation_fields(self, model: ORMModel, operation_type: Literal["read", "create", "update"]):
|
|
318
|
+
"""
|
|
319
|
+
Get the appropriate field set for a specific operation.
|
|
320
|
+
|
|
321
|
+
Args:
|
|
322
|
+
model: Model to get fields for
|
|
323
|
+
operation_type: The operation type ('read', 'create', 'update')
|
|
324
|
+
|
|
325
|
+
Returns:
|
|
326
|
+
Set of field names allowed for the operation
|
|
327
|
+
"""
|
|
328
|
+
try:
|
|
329
|
+
model_config = self.registry.get_config(model)
|
|
330
|
+
all_fields = self.engine.get_fields(model)
|
|
331
|
+
|
|
332
|
+
# Initialize with no fields allowed
|
|
333
|
+
allowed_fields = set()
|
|
334
|
+
|
|
335
|
+
for permission_cls in model_config.permissions:
|
|
336
|
+
permission: AbstractPermission = permission_cls()
|
|
337
|
+
|
|
338
|
+
# Get the appropriate field set based on operation
|
|
339
|
+
if operation_type == 'read':
|
|
340
|
+
fields: Union[Set[str], Literal["__all__"]] = permission.visible_fields(self.request, model)
|
|
341
|
+
elif operation_type == 'create':
|
|
342
|
+
fields: Union[Set[str], Literal["__all__"]] = permission.create_fields(self.request, model)
|
|
343
|
+
elif operation_type == 'update':
|
|
344
|
+
fields: Union[Set[str], Literal["__all__"]] = permission.editable_fields(self.request, model)
|
|
345
|
+
else:
|
|
346
|
+
fields = set() # Default to no fields for unknown operations
|
|
347
|
+
|
|
348
|
+
# If any permission allows all fields
|
|
349
|
+
if fields == "__all__":
|
|
350
|
+
return all_fields
|
|
351
|
+
|
|
352
|
+
# Add allowed fields from this permission
|
|
353
|
+
else: # Ensure we're not operating on the string "__all__"
|
|
354
|
+
fields &= all_fields # Ensure fields actually exist
|
|
355
|
+
allowed_fields |= fields
|
|
356
|
+
|
|
357
|
+
return allowed_fields
|
|
358
|
+
|
|
359
|
+
except (ValueError, KeyError):
|
|
360
|
+
# Model not registered or permissions not set up
|
|
361
|
+
return set() # Default to allowing no fields for security
|
|
362
|
+
|
|
363
|
+
def parse(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
364
|
+
"""
|
|
365
|
+
Applies common query modifiers (related fetching, filtering,
|
|
366
|
+
ordering, field selection) then delegates the operation to a handler.
|
|
367
|
+
"""
|
|
368
|
+
self._apply_related(ast)
|
|
369
|
+
self._apply_filter(ast)
|
|
370
|
+
self._apply_search(ast)
|
|
371
|
+
self._apply_exclude(ast)
|
|
372
|
+
self._apply_ordering(ast)
|
|
373
|
+
self._apply_field_selection(ast)
|
|
374
|
+
|
|
375
|
+
op_type = ast.get("type", "read")
|
|
376
|
+
handler = self.handlers.get(op_type, self.default_handler)
|
|
377
|
+
return handler(ast)
|
|
378
|
+
|
|
379
|
+
def _apply_related(self, ast: Dict[str, Any]) -> None:
|
|
380
|
+
if "selectRelated" in ast and isinstance(ast["selectRelated"], list):
|
|
381
|
+
self.engine.select_related(ast["selectRelated"])
|
|
382
|
+
if "prefetchRelated" in ast and isinstance(ast["prefetchRelated"], list):
|
|
383
|
+
self.engine.prefetch_related(ast["prefetchRelated"])
|
|
384
|
+
|
|
385
|
+
def _apply_filter(self, ast: Dict[str, Any]) -> None:
|
|
386
|
+
"""Apply filter from AST to the queryset."""
|
|
387
|
+
if "filter" in ast and ast["filter"]:
|
|
388
|
+
self.engine.filter_node(ast["filter"])
|
|
389
|
+
|
|
390
|
+
def _apply_exclude(self, ast: Dict[str, Any]) -> None:
|
|
391
|
+
"""Apply exclude from AST to the queryset."""
|
|
392
|
+
if "exclude" in ast and ast["exclude"]:
|
|
393
|
+
self.engine.exclude_node(ast["exclude"])
|
|
394
|
+
|
|
395
|
+
def _apply_ordering(self, ast: Dict[str, Any]) -> None:
|
|
396
|
+
if "orderBy" in ast:
|
|
397
|
+
self.engine.order_by(ast["orderBy"])
|
|
398
|
+
|
|
399
|
+
def _apply_field_selection(self, ast: Dict[str, Any]) -> None:
|
|
400
|
+
if "fields" in ast and isinstance(ast["fields"], list):
|
|
401
|
+
self.engine.select_fields(ast["fields"])
|
|
402
|
+
|
|
403
|
+
def _apply_search(self, ast: Dict[str, Any]) -> None:
|
|
404
|
+
"""
|
|
405
|
+
If search properties are present at the top level of the AST,
|
|
406
|
+
apply the search using the adapter's search_node() method.
|
|
407
|
+
|
|
408
|
+
Expects the AST to have a top-level "search" key containing:
|
|
409
|
+
- searchQuery: the search term
|
|
410
|
+
- searchFields: an array of field names (which may be empty)
|
|
411
|
+
|
|
412
|
+
Uses the model's configuration (from the registry) for searchable fields,
|
|
413
|
+
and if the frontend provides searchFields (even an empty list), uses that value.
|
|
414
|
+
"""
|
|
415
|
+
search_data = ast.get("search")
|
|
416
|
+
if not search_data:
|
|
417
|
+
return
|
|
418
|
+
|
|
419
|
+
search_query = search_data.get("searchQuery")
|
|
420
|
+
if not search_query:
|
|
421
|
+
return
|
|
422
|
+
|
|
423
|
+
# Load the model configuration from the registry.
|
|
424
|
+
model_config = self.registry.get_config(self.model)
|
|
425
|
+
config_search_fields = set(getattr(model_config, "searchable_fields", []))
|
|
426
|
+
if not config_search_fields:
|
|
427
|
+
return
|
|
428
|
+
|
|
429
|
+
# Use frontend-provided searchFields if available.
|
|
430
|
+
frontend_fields = search_data.get("searchFields")
|
|
431
|
+
if frontend_fields is not None:
|
|
432
|
+
final_search_fields = config_search_fields.intersection(set(frontend_fields))
|
|
433
|
+
else:
|
|
434
|
+
final_search_fields = config_search_fields
|
|
435
|
+
|
|
436
|
+
# Delegate to the ORM adapter's search_node() method.
|
|
437
|
+
self.engine.search_node(search_query, final_search_fields)
|
|
438
|
+
|
|
439
|
+
# --- Operation Handlers with Hard-Coded Response Types ---
|
|
440
|
+
|
|
441
|
+
def _handle_create(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
442
|
+
data = ast.get("data", {})
|
|
443
|
+
validated_data = self.serializer.deserialize(
|
|
444
|
+
model=self.model, data=data, partial=False, request=self.request, fields_map= self.create_fields_map
|
|
445
|
+
)
|
|
446
|
+
record = self.engine.create(validated_data, self.serializer, self.request, self.create_fields_map)
|
|
447
|
+
serialized = self.serializer.serialize(
|
|
448
|
+
record, self.model, many=False, depth= self.depth, fields_map= self.read_fields_map
|
|
449
|
+
)
|
|
450
|
+
return {
|
|
451
|
+
"data": serialized,
|
|
452
|
+
"metadata": {"created": True, "response_type": ResponseType.INSTANCE.value},
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
def _handle_update(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
456
|
+
data = ast.get("data", {})
|
|
457
|
+
validated_data = self.serializer.deserialize(
|
|
458
|
+
model=self.model,
|
|
459
|
+
data=data,
|
|
460
|
+
partial=True,
|
|
461
|
+
request=self.request,
|
|
462
|
+
fields_map=self.update_fields_map
|
|
463
|
+
)
|
|
464
|
+
ast["data"] = validated_data
|
|
465
|
+
|
|
466
|
+
# Retrieve permissions from the registry
|
|
467
|
+
permissions = self.registry.get_config(self.model).permissions
|
|
468
|
+
|
|
469
|
+
# Get the readable fields for this model using our existing method
|
|
470
|
+
readable_fields = self._get_operation_fields(self.model, 'read')
|
|
471
|
+
|
|
472
|
+
# Update records and get the count and affected instance IDs
|
|
473
|
+
updated_count, updated_instances = self.engine.update(
|
|
474
|
+
ast,
|
|
475
|
+
self.request,
|
|
476
|
+
permissions,
|
|
477
|
+
readable_fields=readable_fields # Pass readable fields to the update method
|
|
478
|
+
)
|
|
479
|
+
|
|
480
|
+
data = self.serializer.serialize(
|
|
481
|
+
updated_instances,
|
|
482
|
+
self.model,
|
|
483
|
+
many=True,
|
|
484
|
+
depth=0, # Always use depth=0 for updates
|
|
485
|
+
fields_map=self.read_fields_map
|
|
486
|
+
)
|
|
487
|
+
|
|
488
|
+
return {
|
|
489
|
+
"data": data,
|
|
490
|
+
"metadata": {
|
|
491
|
+
"updated": True,
|
|
492
|
+
"updated_count": updated_count,
|
|
493
|
+
"response_type": ResponseType.QUERYSET.value,
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
|
|
497
|
+
def _handle_delete(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
498
|
+
permissions = self.registry.get_config(self.model).permissions
|
|
499
|
+
deleted_count, rows_deleted = self.engine.delete(ast, self.request, permissions)
|
|
500
|
+
return {
|
|
501
|
+
"data": None,
|
|
502
|
+
"metadata": {
|
|
503
|
+
"deleted": True,
|
|
504
|
+
"deleted_count": deleted_count,
|
|
505
|
+
"rows_deleted": rows_deleted,
|
|
506
|
+
"response_type": ResponseType.NUMBER.value,
|
|
507
|
+
},
|
|
508
|
+
}
|
|
509
|
+
|
|
510
|
+
def _handle_update_instance(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
511
|
+
# Extract and deserialize the data.
|
|
512
|
+
raw_data = ast.get("data", {})
|
|
513
|
+
# Allow partial updates.
|
|
514
|
+
validated_data = self.serializer.deserialize(
|
|
515
|
+
model=self.model, data=raw_data, partial=True, request=self.request, fields_map= self.update_fields_map
|
|
516
|
+
)
|
|
517
|
+
# Replace raw data with validated data in the AST.
|
|
518
|
+
ast["data"] = validated_data
|
|
519
|
+
|
|
520
|
+
# Retrieve permissions from the self.registry.
|
|
521
|
+
permissions = self.registry.get_config(self.model).permissions
|
|
522
|
+
|
|
523
|
+
# Delegate to the engine's instance-based update method.
|
|
524
|
+
updated_instance = self.engine.update_instance(ast, self.request, permissions, self.serializer, fields_map=self.update_fields_map)
|
|
525
|
+
|
|
526
|
+
# Serialize the updated instance for the response.
|
|
527
|
+
serialized = self.serializer.serialize(
|
|
528
|
+
updated_instance, self.model, many=False, depth= self.depth, fields_map= self.read_fields_map
|
|
529
|
+
)
|
|
530
|
+
return {
|
|
531
|
+
"data": serialized,
|
|
532
|
+
"metadata": {"updated": True, "response_type": ResponseType.INSTANCE.value},
|
|
533
|
+
}
|
|
534
|
+
|
|
535
|
+
def _handle_delete_instance(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
536
|
+
"""
|
|
537
|
+
Handles deletion of a single instance.
|
|
538
|
+
Typically, no additional data deserialization is needed beyond the filter,
|
|
539
|
+
so we simply verify that a filter is provided and then delegate to the engine.
|
|
540
|
+
"""
|
|
541
|
+
filter_ast = ast.get("filter")
|
|
542
|
+
if not filter_ast:
|
|
543
|
+
raise ValueError("Filter is required for delete_instance operation")
|
|
544
|
+
|
|
545
|
+
# If needed, you could deserialize the filter here.
|
|
546
|
+
# For example, if your serializer has a method to process filter conditions,
|
|
547
|
+
# you could call it. Otherwise, assume the filter is valid.
|
|
548
|
+
|
|
549
|
+
# Retrieve permissions from the self.registry.
|
|
550
|
+
permissions = self.registry.get_config(self.model).permissions
|
|
551
|
+
|
|
552
|
+
# Delegate to the engine's instance-based delete method.
|
|
553
|
+
deleted_count = self.engine.delete_instance(ast, self.request, permissions)
|
|
554
|
+
|
|
555
|
+
return {
|
|
556
|
+
"data": deleted_count,
|
|
557
|
+
"metadata": {"deleted": True, "response_type": ResponseType.BOOLEAN.value},
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
def _handle_get(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
561
|
+
# Retrieve permissions from the registry
|
|
562
|
+
permissions = self.registry.get_config(self.model).permissions
|
|
563
|
+
record = self.engine.get(ast, self.request, permissions)
|
|
564
|
+
serialized = self.serializer.serialize(
|
|
565
|
+
record, self.model, many=False, depth= self.depth, fields_map= self.read_fields_map
|
|
566
|
+
)
|
|
567
|
+
return {
|
|
568
|
+
"data": serialized,
|
|
569
|
+
"metadata": {"get": True, "response_type": ResponseType.INSTANCE.value},
|
|
570
|
+
}
|
|
571
|
+
|
|
572
|
+
def _handle_get_or_create(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
573
|
+
# Validate and split lookup/defaults (without extra wrapping)
|
|
574
|
+
validated_lookup, validated_defaults = self._validate_and_split_lookup_defaults(ast, partial=True)
|
|
575
|
+
|
|
576
|
+
# Optionally update the AST if needed:
|
|
577
|
+
ast["lookup"] = validated_lookup
|
|
578
|
+
ast["defaults"] = validated_defaults
|
|
579
|
+
|
|
580
|
+
# Retrieve permissions from configuration
|
|
581
|
+
permissions = self.registry.get_config(self.model).permissions
|
|
582
|
+
|
|
583
|
+
# Call the ORM layer and pass the serializer and request/permissions
|
|
584
|
+
record, created = self.engine.get_or_create(
|
|
585
|
+
{"lookup": ast.get("lookup", {}), "defaults": ast.get("defaults", {})},
|
|
586
|
+
serializer=self.serializer,
|
|
587
|
+
req=self.request,
|
|
588
|
+
permissions=permissions,
|
|
589
|
+
create_fields_map=self.create_fields_map
|
|
590
|
+
)
|
|
591
|
+
|
|
592
|
+
serialized = self.serializer.serialize(
|
|
593
|
+
record, self.model, many=False, depth= self.depth, fields_map= self.read_fields_map
|
|
594
|
+
)
|
|
595
|
+
return {
|
|
596
|
+
"data": serialized,
|
|
597
|
+
"metadata": {
|
|
598
|
+
"created": created,
|
|
599
|
+
"response_type": ResponseType.INSTANCE.value,
|
|
600
|
+
},
|
|
601
|
+
}
|
|
602
|
+
|
|
603
|
+
def _handle_update_or_create(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
604
|
+
# Validate and split lookup/defaults.
|
|
605
|
+
validated_lookup, validated_defaults = self._validate_and_split_lookup_defaults(ast, partial=True)
|
|
606
|
+
|
|
607
|
+
# Optionally update the AST if needed:
|
|
608
|
+
ast["lookup"] = validated_lookup
|
|
609
|
+
ast["defaults"] = validated_defaults
|
|
610
|
+
|
|
611
|
+
# Retrieve permissions from configuration.
|
|
612
|
+
permissions = self.registry.get_config(self.model).permissions
|
|
613
|
+
|
|
614
|
+
# Call the ORM update_or_create method, passing the serializer, request, and permissions.
|
|
615
|
+
record, created = self.engine.update_or_create(
|
|
616
|
+
{"lookup": ast.get("lookup", {}), "defaults": ast.get("defaults", {})},
|
|
617
|
+
req=self.request,
|
|
618
|
+
serializer=self.serializer,
|
|
619
|
+
permissions=permissions,
|
|
620
|
+
update_fields_map= self.update_fields_map,
|
|
621
|
+
create_fields_map= self.create_fields_map
|
|
622
|
+
)
|
|
623
|
+
|
|
624
|
+
serialized = self.serializer.serialize(
|
|
625
|
+
record, self.model, many=False, depth= self.depth, fields_map= self.read_fields_map
|
|
626
|
+
)
|
|
627
|
+
return {
|
|
628
|
+
"data": serialized,
|
|
629
|
+
"metadata": {
|
|
630
|
+
"created": created,
|
|
631
|
+
"response_type": ResponseType.INSTANCE.value,
|
|
632
|
+
},
|
|
633
|
+
}
|
|
634
|
+
|
|
635
|
+
def _handle_first(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
636
|
+
record = self.engine.first()
|
|
637
|
+
serialized = self.serializer.serialize(
|
|
638
|
+
record, self.model, many=False, depth= self.depth, fields_map= self.read_fields_map
|
|
639
|
+
)
|
|
640
|
+
return {
|
|
641
|
+
"data": serialized,
|
|
642
|
+
"metadata": {"first": True, "response_type": ResponseType.INSTANCE.value},
|
|
643
|
+
}
|
|
644
|
+
|
|
645
|
+
def _handle_last(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
646
|
+
record = self.engine.last()
|
|
647
|
+
serialized = self.serializer.serialize(
|
|
648
|
+
record, self.model, many=False, depth= self.depth, fields_map= self.read_fields_map
|
|
649
|
+
)
|
|
650
|
+
return {
|
|
651
|
+
"data": serialized,
|
|
652
|
+
"metadata": {"last": True, "response_type": ResponseType.INSTANCE.value},
|
|
653
|
+
}
|
|
654
|
+
|
|
655
|
+
def _handle_exists(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
656
|
+
exists_flag = self.engine.exists()
|
|
657
|
+
return {
|
|
658
|
+
"data": exists_flag,
|
|
659
|
+
"metadata": {"exists": exists_flag, "response_type": ResponseType.NUMBER.value},
|
|
660
|
+
}
|
|
661
|
+
|
|
662
|
+
def _handle_aggregate(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
663
|
+
op_type = ast.get("type")
|
|
664
|
+
if op_type == "aggregate":
|
|
665
|
+
aggs = ast.get("aggregates", {})
|
|
666
|
+
agg_list = []
|
|
667
|
+
for func, field in aggs.items():
|
|
668
|
+
agg_list.append(
|
|
669
|
+
{"function": func, "field": field, "alias": f"{field}_{func}"}
|
|
670
|
+
)
|
|
671
|
+
result = self.engine.aggregate(agg_list)
|
|
672
|
+
return {
|
|
673
|
+
"data": result,
|
|
674
|
+
"metadata": {
|
|
675
|
+
"aggregate": True,
|
|
676
|
+
"response_type": ResponseType.NUMBER.value,
|
|
677
|
+
},
|
|
678
|
+
}
|
|
679
|
+
else:
|
|
680
|
+
field = ast.get("field")
|
|
681
|
+
if not field:
|
|
682
|
+
raise ValueError("Field must be provided for aggregate operations.")
|
|
683
|
+
if op_type == "count":
|
|
684
|
+
result_val = self.engine.count(field)
|
|
685
|
+
return {
|
|
686
|
+
"data": result_val,
|
|
687
|
+
"metadata": {
|
|
688
|
+
"count": True,
|
|
689
|
+
"response_type": ResponseType.NUMBER.value,
|
|
690
|
+
},
|
|
691
|
+
}
|
|
692
|
+
elif op_type == "sum":
|
|
693
|
+
result_val = self.engine.sum(field)
|
|
694
|
+
return {
|
|
695
|
+
"data": result_val,
|
|
696
|
+
"metadata": {
|
|
697
|
+
"sum": True,
|
|
698
|
+
"response_type": ResponseType.NUMBER.value,
|
|
699
|
+
},
|
|
700
|
+
}
|
|
701
|
+
elif op_type == "avg":
|
|
702
|
+
result_val = self.engine.avg(field)
|
|
703
|
+
return {
|
|
704
|
+
"data": result_val,
|
|
705
|
+
"metadata": {
|
|
706
|
+
"avg": True,
|
|
707
|
+
"response_type": ResponseType.NUMBER.value,
|
|
708
|
+
},
|
|
709
|
+
}
|
|
710
|
+
elif op_type == "min":
|
|
711
|
+
result_val = self.engine.min(field)
|
|
712
|
+
return {
|
|
713
|
+
"data": result_val,
|
|
714
|
+
"metadata": {
|
|
715
|
+
"min": True,
|
|
716
|
+
"response_type": ResponseType.NUMBER.value,
|
|
717
|
+
},
|
|
718
|
+
}
|
|
719
|
+
elif op_type == "max":
|
|
720
|
+
result_val = self.engine.max(field)
|
|
721
|
+
return {
|
|
722
|
+
"data": result_val,
|
|
723
|
+
"metadata": {
|
|
724
|
+
"max": True,
|
|
725
|
+
"response_type": ResponseType.NUMBER.value,
|
|
726
|
+
},
|
|
727
|
+
}
|
|
728
|
+
|
|
729
|
+
def _handle_read(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
730
|
+
offset_raw = self.serializer_options.get("offset", 0)
|
|
731
|
+
limit_raw = self.serializer_options.get("limit", self.config.default_limit)
|
|
732
|
+
offset_val = int(offset_raw) if offset_raw is not None else None
|
|
733
|
+
limit_val = int(limit_raw) if limit_raw is not None else None
|
|
734
|
+
|
|
735
|
+
# Retrieve permissions from configuration
|
|
736
|
+
permissions = self.registry.get_config(self.model).permissions
|
|
737
|
+
|
|
738
|
+
# Fetch list with bulk permission checks
|
|
739
|
+
rows = self.engine.fetch_list(
|
|
740
|
+
offset=offset_val,
|
|
741
|
+
limit=limit_val,
|
|
742
|
+
req=self.request,
|
|
743
|
+
permissions=permissions,
|
|
744
|
+
)
|
|
745
|
+
|
|
746
|
+
serialized = self.serializer.serialize(
|
|
747
|
+
rows, self.model, many=True, depth= self.depth, fields_map= self.read_fields_map
|
|
748
|
+
)
|
|
749
|
+
return {
|
|
750
|
+
"data": serialized,
|
|
751
|
+
"metadata": {"read": True, "response_type": ResponseType.QUERYSET.value},
|
|
752
|
+
}
|
|
753
|
+
|
|
754
|
+
# --- Helper Methods ---
|
|
755
|
+
|
|
756
|
+
def _validate_and_split_lookup_defaults(self, ast: Dict[str, Any], partial: bool = False) -> Tuple[Dict[str, str]]:
|
|
757
|
+
"""
|
|
758
|
+
Validates the lookups and the defaults separately, using appropriate field maps for each.
|
|
759
|
+
Lookup uses read_fields_map, defaults uses create_fields_map.
|
|
760
|
+
"""
|
|
761
|
+
raw_lookup = ast.get("lookup", {})
|
|
762
|
+
raw_defaults = ast.get("defaults", {})
|
|
763
|
+
|
|
764
|
+
# Validate lookup with read_fields_map (for filtering)
|
|
765
|
+
validated_lookup = self.serializer.deserialize(
|
|
766
|
+
model=self.model,
|
|
767
|
+
data=raw_lookup,
|
|
768
|
+
partial=partial,
|
|
769
|
+
request=self.request,
|
|
770
|
+
fields_map=self.read_fields_map
|
|
771
|
+
)
|
|
772
|
+
|
|
773
|
+
# Validate defaults with create_fields_map (for creation)
|
|
774
|
+
validated_defaults = self.serializer.deserialize(
|
|
775
|
+
model=self.model,
|
|
776
|
+
data=raw_defaults,
|
|
777
|
+
partial=partial,
|
|
778
|
+
request=self.request,
|
|
779
|
+
fields_map=self.create_fields_map
|
|
780
|
+
)
|
|
781
|
+
|
|
782
|
+
return validated_lookup, validated_defaults
|
|
783
|
+
|
|
784
|
+
# --- Static Methods for Operation Extraction ---
|
|
785
|
+
|
|
786
|
+
@staticmethod
|
|
787
|
+
def _extract_all_operations(ast_node: Dict[str, Any]) -> Set[str]:
|
|
788
|
+
ops: Set[str] = set()
|
|
789
|
+
if "type" in ast_node:
|
|
790
|
+
ops.add(ast_node["type"])
|
|
791
|
+
for value in ast_node.values():
|
|
792
|
+
if isinstance(value, dict):
|
|
793
|
+
ops |= ASTParser._extract_all_operations(value)
|
|
794
|
+
elif isinstance(value, list):
|
|
795
|
+
for item in value:
|
|
796
|
+
if isinstance(item, dict):
|
|
797
|
+
ops |= ASTParser._extract_all_operations(item)
|
|
798
|
+
return ops
|
|
799
|
+
|
|
800
|
+
@staticmethod
|
|
801
|
+
def get_requested_action_types(ast: Dict[str, Any]) -> Set[ActionType]:
|
|
802
|
+
all_ops = ASTParser._extract_all_operations(ast)
|
|
803
|
+
OPERATION_MAPPING = {
|
|
804
|
+
"create": ActionType.CREATE,
|
|
805
|
+
"update": ActionType.UPDATE,
|
|
806
|
+
"update_or_create": ActionType.UPDATE,
|
|
807
|
+
"delete": ActionType.DELETE,
|
|
808
|
+
"get": ActionType.READ,
|
|
809
|
+
"get_or_create": ActionType.READ,
|
|
810
|
+
"first": ActionType.READ,
|
|
811
|
+
"last": ActionType.READ,
|
|
812
|
+
"read": ActionType.READ,
|
|
813
|
+
"exists": ActionType.READ,
|
|
814
|
+
"count": ActionType.READ,
|
|
815
|
+
"sum": ActionType.READ,
|
|
816
|
+
"avg": ActionType.READ,
|
|
817
|
+
"min": ActionType.READ,
|
|
818
|
+
"max": ActionType.READ,
|
|
819
|
+
"aggregate": ActionType.READ,
|
|
820
|
+
}
|
|
821
|
+
return {OPERATION_MAPPING.get(op, ActionType.READ) for op in all_ops}
|