statezero 0.1.0b3__py3-none-any.whl → 0.1.0b5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of statezero might be problematic. Click here for more details.
- statezero/adaptors/django/orm.py +224 -174
- statezero/adaptors/django/serializers.py +45 -26
- statezero/core/ast_parser.py +315 -175
- statezero/core/hook_checks.py +86 -0
- statezero/core/interfaces.py +193 -69
- statezero/core/process_request.py +1 -1
- {statezero-0.1.0b3.dist-info → statezero-0.1.0b5.dist-info}/METADATA +1 -1
- {statezero-0.1.0b3.dist-info → statezero-0.1.0b5.dist-info}/RECORD +11 -10
- {statezero-0.1.0b3.dist-info → statezero-0.1.0b5.dist-info}/WHEEL +0 -0
- {statezero-0.1.0b3.dist-info → statezero-0.1.0b5.dist-info}/licenses/license.md +0 -0
- {statezero-0.1.0b3.dist-info → statezero-0.1.0b5.dist-info}/top_level.txt +0 -0
statezero/core/ast_parser.py
CHANGED
|
@@ -5,7 +5,11 @@ import networkx as nx
|
|
|
5
5
|
|
|
6
6
|
|
|
7
7
|
from statezero.core.config import AppConfig, Registry
|
|
8
|
-
from statezero.core.interfaces import
|
|
8
|
+
from statezero.core.interfaces import (
|
|
9
|
+
AbstractDataSerializer,
|
|
10
|
+
AbstractPermission,
|
|
11
|
+
AbstractORMProvider,
|
|
12
|
+
)
|
|
9
13
|
from statezero.core.types import ActionType, ORMModel, RequestType
|
|
10
14
|
|
|
11
15
|
|
|
@@ -16,12 +20,14 @@ class ResponseType(Enum):
|
|
|
16
20
|
BOOLEAN = "boolean"
|
|
17
21
|
NONE = "none"
|
|
18
22
|
|
|
23
|
+
|
|
19
24
|
class ASTParser:
|
|
20
25
|
"""
|
|
21
26
|
Parses an abstract syntax tree (AST) representing an ORM operation.
|
|
22
27
|
Delegates each operation (create, update, delete, etc.) to a dedicated handler
|
|
23
28
|
and hardcodes the response type in the metadata based on the operation.
|
|
24
29
|
"""
|
|
30
|
+
|
|
25
31
|
def __init__(
|
|
26
32
|
self,
|
|
27
33
|
engine: AbstractORMProvider,
|
|
@@ -29,6 +35,7 @@ class ASTParser:
|
|
|
29
35
|
model: Type,
|
|
30
36
|
config: AppConfig,
|
|
31
37
|
registry: Registry,
|
|
38
|
+
base_queryset: Any, # ADD: Base queryset to manage state
|
|
32
39
|
serializer_options: Optional[Dict[str, Any]] = None,
|
|
33
40
|
request: Optional[RequestType] = None,
|
|
34
41
|
):
|
|
@@ -37,37 +44,38 @@ class ASTParser:
|
|
|
37
44
|
self.model = model
|
|
38
45
|
self.config = config
|
|
39
46
|
self.registry = registry
|
|
47
|
+
self.current_queryset = base_queryset # ADD: Track current queryset state
|
|
40
48
|
self.serializer_options = serializer_options or {}
|
|
41
49
|
self.request = request
|
|
42
50
|
|
|
43
51
|
# Process field selection if present
|
|
44
52
|
requested_fields = self.serializer_options.get("fields", [])
|
|
45
|
-
|
|
53
|
+
|
|
46
54
|
# Configure the serializer options
|
|
47
55
|
self.depth = int(self.serializer_options.get("depth", 0))
|
|
48
56
|
|
|
49
57
|
# If Process fields are provided, override the user supplied depth
|
|
50
58
|
if requested_fields:
|
|
51
|
-
self.depth =
|
|
52
|
-
|
|
59
|
+
self.depth = (
|
|
60
|
+
max((field.count("__") for field in requested_fields), default=0) + 1
|
|
61
|
+
)
|
|
62
|
+
|
|
53
63
|
# Get the raw field map
|
|
54
64
|
self.read_fields_map = self._get_operation_field_map(
|
|
55
|
-
requested_fields=requested_fields,
|
|
56
|
-
depth=self.depth,
|
|
57
|
-
operation_type='read'
|
|
65
|
+
requested_fields=requested_fields, depth=self.depth, operation_type="read"
|
|
58
66
|
)
|
|
59
67
|
|
|
60
68
|
# Create/update operations should use depth 0 for performance
|
|
61
69
|
self.create_fields_map = self._get_operation_field_map(
|
|
62
|
-
requested_fields=requested_fields,
|
|
70
|
+
requested_fields=requested_fields,
|
|
63
71
|
depth=0, # Nested writes are not supported
|
|
64
|
-
operation_type=
|
|
72
|
+
operation_type="create",
|
|
65
73
|
)
|
|
66
74
|
|
|
67
75
|
self.update_fields_map = self._get_operation_field_map(
|
|
68
|
-
requested_fields=requested_fields,
|
|
76
|
+
requested_fields=requested_fields,
|
|
69
77
|
depth=0, # Nested writes are not supported
|
|
70
|
-
operation_type=
|
|
78
|
+
operation_type="update",
|
|
71
79
|
)
|
|
72
80
|
|
|
73
81
|
# Add field maps to serializer options
|
|
@@ -97,149 +105,168 @@ class ASTParser:
|
|
|
97
105
|
}
|
|
98
106
|
self.default_handler = self._handle_read
|
|
99
107
|
|
|
100
|
-
def _process_nested_field_strings(
|
|
108
|
+
def _process_nested_field_strings(
|
|
109
|
+
self, orm_provider: AbstractORMProvider, field_strings, available_fields_map
|
|
110
|
+
):
|
|
101
111
|
"""
|
|
102
112
|
Build a fields map from a list of dotted field strings like ['fk__m2m', 'field', 'fk__m2m__field'],
|
|
103
113
|
respecting the available fields for each model.
|
|
104
|
-
|
|
114
|
+
|
|
105
115
|
Args:
|
|
106
116
|
orm_provider: The ORM provider to use for model traversal
|
|
107
117
|
field_strings: List of field strings in the format 'relation__field' or 'field'
|
|
108
118
|
available_fields_map: Dict mapping model names to sets of available fields
|
|
109
|
-
|
|
119
|
+
|
|
110
120
|
Returns:
|
|
111
121
|
Dict[str, Set[str]]: Dictionary mapping model names to sets of field names
|
|
112
122
|
"""
|
|
113
123
|
fields_map = {}
|
|
114
124
|
model_graph: nx.DiGraph = orm_provider.build_model_graph(self.model)
|
|
115
|
-
|
|
125
|
+
|
|
116
126
|
# Start with the root model
|
|
117
127
|
root_model_name = orm_provider.get_model_name(self.model)
|
|
118
128
|
fields_map[root_model_name] = set()
|
|
119
|
-
|
|
129
|
+
|
|
120
130
|
for field_string in field_strings:
|
|
121
|
-
parts = field_string.split(
|
|
131
|
+
parts = field_string.split("__")
|
|
122
132
|
current_model = self.model
|
|
123
133
|
current_model_name = root_model_name
|
|
124
|
-
|
|
134
|
+
|
|
125
135
|
# Process each part of the field string
|
|
126
136
|
for i, part in enumerate(parts):
|
|
127
137
|
# Check if this field is available for this model
|
|
128
|
-
if
|
|
138
|
+
if (
|
|
139
|
+
current_model_name in available_fields_map
|
|
140
|
+
and part in available_fields_map[current_model_name]
|
|
141
|
+
):
|
|
129
142
|
# Add the current field to the current model's field set
|
|
130
143
|
fields_map.setdefault(current_model_name, set()).add(part)
|
|
131
|
-
|
|
144
|
+
|
|
132
145
|
# If this is the last part, we might need to include all fields if it's a relation
|
|
133
146
|
if i == len(parts) - 1:
|
|
134
147
|
# Find the field node in the graph to check if it's a relation
|
|
135
148
|
field_nodes = [
|
|
136
|
-
node
|
|
137
|
-
|
|
138
|
-
model_graph.nodes[node].get("data")
|
|
149
|
+
node
|
|
150
|
+
for node in model_graph.successors(current_model_name)
|
|
151
|
+
if model_graph.nodes[node].get("data")
|
|
152
|
+
and model_graph.nodes[node].get("data").field_name == part
|
|
139
153
|
]
|
|
140
|
-
|
|
154
|
+
|
|
141
155
|
if field_nodes:
|
|
142
156
|
field_node = field_nodes[0]
|
|
143
157
|
field_data = model_graph.nodes[field_node].get("data")
|
|
144
|
-
|
|
158
|
+
|
|
145
159
|
# If this is a relation field, include all available fields of the related model
|
|
146
|
-
if
|
|
160
|
+
if (
|
|
161
|
+
field_data
|
|
162
|
+
and field_data.is_relation
|
|
163
|
+
and field_data.related_model
|
|
164
|
+
):
|
|
147
165
|
related_model_name = field_data.related_model
|
|
148
|
-
|
|
166
|
+
|
|
149
167
|
# Include all available fields for this related model
|
|
150
168
|
if related_model_name in available_fields_map:
|
|
151
169
|
fields_map.setdefault(related_model_name, set()).update(
|
|
152
170
|
available_fields_map[related_model_name]
|
|
153
171
|
)
|
|
154
172
|
break
|
|
155
|
-
|
|
173
|
+
|
|
156
174
|
# Otherwise, we need to traverse to the related model if allowed
|
|
157
175
|
# First, check if the relation field is available
|
|
158
|
-
if
|
|
176
|
+
if (
|
|
177
|
+
current_model_name not in available_fields_map
|
|
178
|
+
or part not in available_fields_map[current_model_name]
|
|
179
|
+
):
|
|
159
180
|
# The relation field is not available, stop traversing
|
|
160
181
|
break
|
|
161
|
-
|
|
182
|
+
|
|
162
183
|
# Find the field node in the graph
|
|
163
184
|
field_nodes = [
|
|
164
|
-
node
|
|
165
|
-
|
|
166
|
-
model_graph.nodes[node].get("data")
|
|
185
|
+
node
|
|
186
|
+
for node in model_graph.successors(current_model_name)
|
|
187
|
+
if model_graph.nodes[node].get("data")
|
|
188
|
+
and model_graph.nodes[node].get("data").field_name == part
|
|
167
189
|
]
|
|
168
|
-
|
|
190
|
+
|
|
169
191
|
if not field_nodes:
|
|
170
192
|
# Field not found, skip to next field string
|
|
171
193
|
break
|
|
172
|
-
|
|
194
|
+
|
|
173
195
|
field_node = field_nodes[0]
|
|
174
196
|
field_data = model_graph.nodes[field_node].get("data")
|
|
175
|
-
|
|
197
|
+
|
|
176
198
|
# If this is a relation field, move to the related model
|
|
177
199
|
if field_data and field_data.is_relation and field_data.related_model:
|
|
178
|
-
related_model = orm_provider.get_model_by_name(
|
|
200
|
+
related_model = orm_provider.get_model_by_name(
|
|
201
|
+
field_data.related_model
|
|
202
|
+
)
|
|
179
203
|
current_model = related_model
|
|
180
204
|
current_model_name = field_data.related_model
|
|
181
205
|
else:
|
|
182
206
|
# Not a relation field, stop traversing
|
|
183
207
|
break
|
|
184
|
-
|
|
208
|
+
|
|
185
209
|
return fields_map
|
|
186
210
|
|
|
187
|
-
def _get_operation_field_map(
|
|
211
|
+
def _get_operation_field_map(
|
|
212
|
+
self,
|
|
213
|
+
requested_fields: Optional[Set[str]] = None,
|
|
214
|
+
depth=0,
|
|
215
|
+
operation_type: Literal["read", "create", "update"] = "read",
|
|
216
|
+
) -> Dict[str, Set[str]]:
|
|
188
217
|
"""
|
|
189
218
|
Build a fields map for a specific operation type.
|
|
190
|
-
|
|
219
|
+
|
|
191
220
|
Args:
|
|
192
221
|
requested_fields: Optional set of explicitly requested fields
|
|
193
222
|
depth: Maximum depth for related models to include
|
|
194
223
|
operation_type: Operation type ('read', 'create', 'update')
|
|
195
|
-
|
|
224
|
+
|
|
196
225
|
Returns:
|
|
197
226
|
Dict[str, Set[str]]: Fields map with model names as keys and sets of field names as values
|
|
198
227
|
"""
|
|
199
228
|
# Build a fields map specific to this operation type
|
|
200
229
|
fields_map = self._get_depth_based_fields(
|
|
201
|
-
orm_provider=self.engine,
|
|
202
|
-
depth=depth,
|
|
203
|
-
operation_type=operation_type
|
|
230
|
+
orm_provider=self.engine, depth=depth, operation_type=operation_type
|
|
204
231
|
)
|
|
205
232
|
|
|
206
233
|
if requested_fields:
|
|
207
234
|
fields_map = self._process_nested_field_strings(
|
|
208
235
|
orm_provider=self.engine,
|
|
209
236
|
field_strings=requested_fields,
|
|
210
|
-
available_fields_map=fields_map
|
|
237
|
+
available_fields_map=fields_map,
|
|
211
238
|
)
|
|
212
|
-
|
|
239
|
+
|
|
213
240
|
return fields_map
|
|
214
|
-
|
|
241
|
+
|
|
215
242
|
def _has_operation_permission(self, model, operation_type):
|
|
216
243
|
"""
|
|
217
244
|
Check if the current request has permission for the specified operation on the model.
|
|
218
|
-
|
|
245
|
+
|
|
219
246
|
Args:
|
|
220
247
|
model: Model to check permissions for
|
|
221
248
|
operation_type: The type of operation ('read', 'create', 'update', 'delete')
|
|
222
|
-
|
|
249
|
+
|
|
223
250
|
Returns:
|
|
224
251
|
Boolean indicating if permission is granted for the operation
|
|
225
252
|
"""
|
|
226
253
|
try:
|
|
227
254
|
model_config = self.registry.get_config(model)
|
|
228
255
|
allowed_actions = set()
|
|
229
|
-
|
|
256
|
+
|
|
230
257
|
# Collect all allowed actions from all permissions
|
|
231
258
|
for permission_cls in model_config.permissions:
|
|
232
259
|
permission: AbstractPermission = permission_cls()
|
|
233
260
|
allowed_actions.update(permission.allowed_actions(self.request, model))
|
|
234
|
-
|
|
261
|
+
|
|
235
262
|
# Map operation types to ActionType enum values
|
|
236
263
|
operation_to_action = {
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
264
|
+
"read": ActionType.READ,
|
|
265
|
+
"create": ActionType.CREATE,
|
|
266
|
+
"update": ActionType.UPDATE,
|
|
267
|
+
"delete": ActionType.DELETE,
|
|
241
268
|
}
|
|
242
|
-
|
|
269
|
+
|
|
243
270
|
# Check if the required action is in the set of allowed actions
|
|
244
271
|
required_action = operation_to_action.get(operation_type, ActionType.READ)
|
|
245
272
|
return required_action in allowed_actions
|
|
@@ -247,44 +274,48 @@ class ASTParser:
|
|
|
247
274
|
# Model not registered or permissions not set up
|
|
248
275
|
return False # Default to denying access for security
|
|
249
276
|
|
|
250
|
-
def _get_depth_based_fields(
|
|
277
|
+
def _get_depth_based_fields(
|
|
278
|
+
self, orm_provider: AbstractORMProvider, depth=0, operation_type="read"
|
|
279
|
+
):
|
|
251
280
|
"""
|
|
252
281
|
Build a fields map by traversing the model graph up to the specified depth.
|
|
253
282
|
Uses operation-specific field permissions.
|
|
254
|
-
|
|
283
|
+
|
|
255
284
|
Args:
|
|
256
285
|
depth: Maximum depth to traverse in relationship graph
|
|
257
286
|
operation_type: Operation type for field permissions ('read', 'create', 'update')
|
|
258
|
-
|
|
287
|
+
|
|
259
288
|
Returns:
|
|
260
289
|
Dict[str, Set[str]]: Dictionary mapping model names to sets of field names
|
|
261
290
|
"""
|
|
262
291
|
fields_map = {}
|
|
263
292
|
visited = set()
|
|
264
293
|
model_graph: nx.DiGraph = orm_provider.build_model_graph(self.model)
|
|
265
|
-
|
|
294
|
+
|
|
266
295
|
# Start BFS from the root model
|
|
267
296
|
queue = deque([(self.model, 0)])
|
|
268
|
-
|
|
297
|
+
|
|
269
298
|
while queue:
|
|
270
299
|
current_model, current_depth = queue.popleft()
|
|
271
300
|
model_name = orm_provider.get_model_name(current_model)
|
|
272
|
-
|
|
301
|
+
|
|
273
302
|
# Skip if we've already visited this model at this depth or lower
|
|
274
303
|
if (model_name, current_depth) in visited:
|
|
275
304
|
continue
|
|
276
305
|
visited.add((model_name, current_depth))
|
|
277
|
-
|
|
306
|
+
|
|
278
307
|
# Check if we have permission to read this model
|
|
279
|
-
if not self._has_operation_permission(
|
|
308
|
+
if not self._has_operation_permission(
|
|
309
|
+
current_model, operation_type=operation_type
|
|
310
|
+
):
|
|
280
311
|
continue
|
|
281
|
-
|
|
312
|
+
|
|
282
313
|
# Get fields allowed for this operation type
|
|
283
314
|
allowed_fields = self._get_operation_fields(current_model, operation_type)
|
|
284
|
-
|
|
315
|
+
|
|
285
316
|
# Initialize fields set for this model
|
|
286
317
|
fields_map.setdefault(model_name, set())
|
|
287
|
-
|
|
318
|
+
|
|
288
319
|
# Collect all directly accessible fields from the model
|
|
289
320
|
for node in model_graph.successors(model_name):
|
|
290
321
|
# Each successor of the model node is a field node
|
|
@@ -294,11 +325,11 @@ class ASTParser:
|
|
|
294
325
|
# Add this field to the fields map if it's in allowed_fields
|
|
295
326
|
if field_name in allowed_fields:
|
|
296
327
|
fields_map[model_name].add(field_name)
|
|
297
|
-
|
|
328
|
+
|
|
298
329
|
# Stop traversing if we've reached max depth
|
|
299
330
|
if current_depth >= depth:
|
|
300
331
|
continue
|
|
301
|
-
|
|
332
|
+
|
|
302
333
|
# Now, traverse relation fields to add related models
|
|
303
334
|
for node in model_graph.successors(model_name):
|
|
304
335
|
field_data = model_graph.nodes[node].get("data")
|
|
@@ -311,51 +342,59 @@ class ASTParser:
|
|
|
311
342
|
field_data.related_model
|
|
312
343
|
)
|
|
313
344
|
queue.append((related_model, current_depth + 1))
|
|
314
|
-
|
|
345
|
+
|
|
315
346
|
return fields_map
|
|
316
347
|
|
|
317
|
-
def _get_operation_fields(
|
|
348
|
+
def _get_operation_fields(
|
|
349
|
+
self, model: ORMModel, operation_type: Literal["read", "create", "update"]
|
|
350
|
+
):
|
|
318
351
|
"""
|
|
319
352
|
Get the appropriate field set for a specific operation.
|
|
320
|
-
|
|
353
|
+
|
|
321
354
|
Args:
|
|
322
355
|
model: Model to get fields for
|
|
323
356
|
operation_type: The operation type ('read', 'create', 'update')
|
|
324
|
-
|
|
357
|
+
|
|
325
358
|
Returns:
|
|
326
359
|
Set of field names allowed for the operation
|
|
327
360
|
"""
|
|
328
361
|
try:
|
|
329
362
|
model_config = self.registry.get_config(model)
|
|
330
363
|
all_fields = self.engine.get_fields(model)
|
|
331
|
-
|
|
364
|
+
|
|
332
365
|
# Initialize with no fields allowed
|
|
333
366
|
allowed_fields = set()
|
|
334
|
-
|
|
367
|
+
|
|
335
368
|
for permission_cls in model_config.permissions:
|
|
336
369
|
permission: AbstractPermission = permission_cls()
|
|
337
|
-
|
|
370
|
+
|
|
338
371
|
# Get the appropriate field set based on operation
|
|
339
|
-
if operation_type ==
|
|
340
|
-
fields: Union[Set[str], Literal["__all__"]] =
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
elif operation_type ==
|
|
344
|
-
fields: Union[Set[str], Literal["__all__"]] =
|
|
372
|
+
if operation_type == "read":
|
|
373
|
+
fields: Union[Set[str], Literal["__all__"]] = (
|
|
374
|
+
permission.visible_fields(self.request, model)
|
|
375
|
+
)
|
|
376
|
+
elif operation_type == "create":
|
|
377
|
+
fields: Union[Set[str], Literal["__all__"]] = (
|
|
378
|
+
permission.create_fields(self.request, model)
|
|
379
|
+
)
|
|
380
|
+
elif operation_type == "update":
|
|
381
|
+
fields: Union[Set[str], Literal["__all__"]] = (
|
|
382
|
+
permission.editable_fields(self.request, model)
|
|
383
|
+
)
|
|
345
384
|
else:
|
|
346
385
|
fields = set() # Default to no fields for unknown operations
|
|
347
|
-
|
|
386
|
+
|
|
348
387
|
# If any permission allows all fields
|
|
349
388
|
if fields == "__all__":
|
|
350
389
|
return all_fields
|
|
351
|
-
|
|
390
|
+
|
|
352
391
|
# Add allowed fields from this permission
|
|
353
392
|
else: # Ensure we're not operating on the string "__all__"
|
|
354
393
|
fields &= all_fields # Ensure fields actually exist
|
|
355
394
|
allowed_fields |= fields
|
|
356
|
-
|
|
395
|
+
|
|
357
396
|
return allowed_fields
|
|
358
|
-
|
|
397
|
+
|
|
359
398
|
except (ValueError, KeyError):
|
|
360
399
|
# Model not registered or permissions not set up
|
|
361
400
|
return set() # Default to allowing no fields for security
|
|
@@ -377,32 +416,47 @@ class ASTParser:
|
|
|
377
416
|
return handler(ast)
|
|
378
417
|
|
|
379
418
|
def _apply_related(self, ast: Dict[str, Any]) -> None:
|
|
419
|
+
"""UPDATED: Apply select_related and prefetch_related, updating current queryset."""
|
|
380
420
|
if "selectRelated" in ast and isinstance(ast["selectRelated"], list):
|
|
381
|
-
self.engine.select_related(
|
|
421
|
+
self.current_queryset = self.engine.select_related(
|
|
422
|
+
self.current_queryset, ast["selectRelated"]
|
|
423
|
+
)
|
|
382
424
|
if "prefetchRelated" in ast and isinstance(ast["prefetchRelated"], list):
|
|
383
|
-
self.engine.prefetch_related(
|
|
425
|
+
self.current_queryset = self.engine.prefetch_related(
|
|
426
|
+
self.current_queryset, ast["prefetchRelated"]
|
|
427
|
+
)
|
|
384
428
|
|
|
385
429
|
def _apply_filter(self, ast: Dict[str, Any]) -> None:
|
|
386
|
-
"""Apply filter from AST to the queryset."""
|
|
430
|
+
"""UPDATED: Apply filter from AST to the queryset, updating current queryset."""
|
|
387
431
|
if "filter" in ast and ast["filter"]:
|
|
388
|
-
self.engine.filter_node(
|
|
432
|
+
self.current_queryset = self.engine.filter_node(
|
|
433
|
+
self.current_queryset, ast["filter"]
|
|
434
|
+
)
|
|
389
435
|
|
|
390
436
|
def _apply_exclude(self, ast: Dict[str, Any]) -> None:
|
|
391
|
-
"""Apply exclude from AST to the queryset."""
|
|
437
|
+
"""UPDATED: Apply exclude from AST to the queryset, updating current queryset."""
|
|
392
438
|
if "exclude" in ast and ast["exclude"]:
|
|
393
|
-
self.engine.exclude_node(
|
|
439
|
+
self.current_queryset = self.engine.exclude_node(
|
|
440
|
+
self.current_queryset, ast["exclude"]
|
|
441
|
+
)
|
|
394
442
|
|
|
395
443
|
def _apply_ordering(self, ast: Dict[str, Any]) -> None:
|
|
444
|
+
"""UPDATED: Apply ordering, updating current queryset."""
|
|
396
445
|
if "orderBy" in ast:
|
|
397
|
-
self.engine.order_by(
|
|
446
|
+
self.current_queryset = self.engine.order_by(
|
|
447
|
+
self.current_queryset, ast["orderBy"]
|
|
448
|
+
)
|
|
398
449
|
|
|
399
450
|
def _apply_field_selection(self, ast: Dict[str, Any]) -> None:
|
|
451
|
+
"""UPDATED: Apply field selection, updating current queryset."""
|
|
400
452
|
if "fields" in ast and isinstance(ast["fields"], list):
|
|
401
|
-
self.engine.select_fields(
|
|
453
|
+
self.current_queryset = self.engine.select_fields(
|
|
454
|
+
self.current_queryset, ast["fields"]
|
|
455
|
+
)
|
|
402
456
|
|
|
403
457
|
def _apply_search(self, ast: Dict[str, Any]) -> None:
|
|
404
458
|
"""
|
|
405
|
-
If search properties are present at the top level of the AST,
|
|
459
|
+
UPDATED: If search properties are present at the top level of the AST,
|
|
406
460
|
apply the search using the adapter's search_node() method.
|
|
407
461
|
|
|
408
462
|
Expects the AST to have a top-level "search" key containing:
|
|
@@ -429,23 +483,42 @@ class ASTParser:
|
|
|
429
483
|
# Use frontend-provided searchFields if available.
|
|
430
484
|
frontend_fields = search_data.get("searchFields")
|
|
431
485
|
if frontend_fields is not None:
|
|
432
|
-
final_search_fields = config_search_fields.intersection(
|
|
486
|
+
final_search_fields = config_search_fields.intersection(
|
|
487
|
+
set(frontend_fields)
|
|
488
|
+
)
|
|
433
489
|
else:
|
|
434
490
|
final_search_fields = config_search_fields
|
|
435
491
|
|
|
436
|
-
# Delegate to the ORM adapter's search_node() method.
|
|
437
|
-
self.engine.search_node(
|
|
492
|
+
# UPDATED: Delegate to the ORM adapter's search_node() method with queryset.
|
|
493
|
+
self.current_queryset = self.engine.search_node(
|
|
494
|
+
self.current_queryset, search_query, final_search_fields
|
|
495
|
+
)
|
|
438
496
|
|
|
439
497
|
# --- Operation Handlers with Hard-Coded Response Types ---
|
|
440
498
|
|
|
441
499
|
def _handle_create(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
500
|
+
"""UPDATED: Pass model explicitly to create method."""
|
|
442
501
|
data = ast.get("data", {})
|
|
443
502
|
validated_data = self.serializer.deserialize(
|
|
444
|
-
model=self.model,
|
|
503
|
+
model=self.model,
|
|
504
|
+
data=data,
|
|
505
|
+
partial=False,
|
|
506
|
+
request=self.request,
|
|
507
|
+
fields_map=self.create_fields_map,
|
|
508
|
+
)
|
|
509
|
+
record = self.engine.create(
|
|
510
|
+
self.model,
|
|
511
|
+
validated_data,
|
|
512
|
+
self.serializer,
|
|
513
|
+
self.request,
|
|
514
|
+
self.create_fields_map,
|
|
445
515
|
)
|
|
446
|
-
record = self.engine.create(validated_data, self.serializer, self.request, self.create_fields_map)
|
|
447
516
|
serialized = self.serializer.serialize(
|
|
448
|
-
record,
|
|
517
|
+
record,
|
|
518
|
+
self.model,
|
|
519
|
+
many=False,
|
|
520
|
+
depth=self.depth,
|
|
521
|
+
fields_map=self.read_fields_map,
|
|
449
522
|
)
|
|
450
523
|
return {
|
|
451
524
|
"data": serialized,
|
|
@@ -453,50 +526,55 @@ class ASTParser:
|
|
|
453
526
|
}
|
|
454
527
|
|
|
455
528
|
def _handle_update(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
529
|
+
"""UPDATED: Pass current queryset to update method."""
|
|
456
530
|
data = ast.get("data", {})
|
|
457
531
|
validated_data = self.serializer.deserialize(
|
|
458
|
-
model=self.model,
|
|
459
|
-
data=data,
|
|
460
|
-
partial=True,
|
|
461
|
-
request=self.request,
|
|
462
|
-
fields_map=self.update_fields_map
|
|
532
|
+
model=self.model,
|
|
533
|
+
data=data,
|
|
534
|
+
partial=True,
|
|
535
|
+
request=self.request,
|
|
536
|
+
fields_map=self.update_fields_map,
|
|
463
537
|
)
|
|
464
538
|
ast["data"] = validated_data
|
|
465
|
-
|
|
539
|
+
|
|
466
540
|
# Retrieve permissions from the registry
|
|
467
541
|
permissions = self.registry.get_config(self.model).permissions
|
|
468
|
-
|
|
542
|
+
|
|
469
543
|
# Get the readable fields for this model using our existing method
|
|
470
|
-
readable_fields = self._get_operation_fields(self.model,
|
|
471
|
-
|
|
472
|
-
# Update records and get the count and affected instance IDs
|
|
544
|
+
readable_fields = self._get_operation_fields(self.model, "read")
|
|
545
|
+
|
|
546
|
+
# UPDATED: Update records and get the count and affected instance IDs
|
|
473
547
|
updated_count, updated_instances = self.engine.update(
|
|
474
|
-
|
|
475
|
-
|
|
548
|
+
self.current_queryset, # Pass current queryset
|
|
549
|
+
ast,
|
|
550
|
+
self.request,
|
|
476
551
|
permissions,
|
|
477
|
-
readable_fields=readable_fields # Pass readable fields to the update method
|
|
552
|
+
readable_fields=readable_fields, # Pass readable fields to the update method
|
|
478
553
|
)
|
|
479
|
-
|
|
554
|
+
|
|
480
555
|
data = self.serializer.serialize(
|
|
481
|
-
updated_instances,
|
|
482
|
-
self.model,
|
|
483
|
-
many=True,
|
|
556
|
+
updated_instances,
|
|
557
|
+
self.model,
|
|
558
|
+
many=True,
|
|
484
559
|
depth=0, # Always use depth=0 for updates
|
|
485
|
-
fields_map=self.read_fields_map
|
|
560
|
+
fields_map=self.read_fields_map,
|
|
486
561
|
)
|
|
487
|
-
|
|
562
|
+
|
|
488
563
|
return {
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
564
|
+
"data": data,
|
|
565
|
+
"metadata": {
|
|
566
|
+
"updated": True,
|
|
567
|
+
"updated_count": updated_count,
|
|
568
|
+
"response_type": ResponseType.QUERYSET.value,
|
|
569
|
+
},
|
|
570
|
+
}
|
|
496
571
|
|
|
497
572
|
def _handle_delete(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
573
|
+
"""UPDATED: Pass current queryset to delete method."""
|
|
498
574
|
permissions = self.registry.get_config(self.model).permissions
|
|
499
|
-
deleted_count, rows_deleted = self.engine.delete(
|
|
575
|
+
deleted_count, rows_deleted = self.engine.delete(
|
|
576
|
+
self.current_queryset, ast, self.request, permissions
|
|
577
|
+
)
|
|
500
578
|
return {
|
|
501
579
|
"data": None,
|
|
502
580
|
"metadata": {
|
|
@@ -508,11 +586,16 @@ class ASTParser:
|
|
|
508
586
|
}
|
|
509
587
|
|
|
510
588
|
def _handle_update_instance(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
589
|
+
"""UPDATED: Pass model explicitly to update_instance method."""
|
|
511
590
|
# Extract and deserialize the data.
|
|
512
591
|
raw_data = ast.get("data", {})
|
|
513
592
|
# Allow partial updates.
|
|
514
593
|
validated_data = self.serializer.deserialize(
|
|
515
|
-
model=self.model,
|
|
594
|
+
model=self.model,
|
|
595
|
+
data=raw_data,
|
|
596
|
+
partial=True,
|
|
597
|
+
request=self.request,
|
|
598
|
+
fields_map=self.update_fields_map,
|
|
516
599
|
)
|
|
517
600
|
# Replace raw data with validated data in the AST.
|
|
518
601
|
ast["data"] = validated_data
|
|
@@ -520,12 +603,23 @@ class ASTParser:
|
|
|
520
603
|
# Retrieve permissions from the self.registry.
|
|
521
604
|
permissions = self.registry.get_config(self.model).permissions
|
|
522
605
|
|
|
523
|
-
# Delegate to the engine's instance-based update method.
|
|
524
|
-
updated_instance = self.engine.update_instance(
|
|
606
|
+
# UPDATED: Delegate to the engine's instance-based update method.
|
|
607
|
+
updated_instance = self.engine.update_instance(
|
|
608
|
+
self.model,
|
|
609
|
+
ast,
|
|
610
|
+
self.request,
|
|
611
|
+
permissions,
|
|
612
|
+
self.serializer,
|
|
613
|
+
fields_map=self.update_fields_map,
|
|
614
|
+
)
|
|
525
615
|
|
|
526
616
|
# Serialize the updated instance for the response.
|
|
527
617
|
serialized = self.serializer.serialize(
|
|
528
|
-
updated_instance,
|
|
618
|
+
updated_instance,
|
|
619
|
+
self.model,
|
|
620
|
+
many=False,
|
|
621
|
+
depth=self.depth,
|
|
622
|
+
fields_map=self.read_fields_map,
|
|
529
623
|
)
|
|
530
624
|
return {
|
|
531
625
|
"data": serialized,
|
|
@@ -534,7 +628,7 @@ class ASTParser:
|
|
|
534
628
|
|
|
535
629
|
def _handle_delete_instance(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
536
630
|
"""
|
|
537
|
-
Handles deletion of a single instance.
|
|
631
|
+
UPDATED: Handles deletion of a single instance.
|
|
538
632
|
Typically, no additional data deserialization is needed beyond the filter,
|
|
539
633
|
so we simply verify that a filter is provided and then delegate to the engine.
|
|
540
634
|
"""
|
|
@@ -549,8 +643,10 @@ class ASTParser:
|
|
|
549
643
|
# Retrieve permissions from the self.registry.
|
|
550
644
|
permissions = self.registry.get_config(self.model).permissions
|
|
551
645
|
|
|
552
|
-
# Delegate to the engine's instance-based delete method.
|
|
553
|
-
deleted_count = self.engine.delete_instance(
|
|
646
|
+
# UPDATED: Delegate to the engine's instance-based delete method.
|
|
647
|
+
deleted_count = self.engine.delete_instance(
|
|
648
|
+
self.model, ast, self.request, permissions
|
|
649
|
+
)
|
|
554
650
|
|
|
555
651
|
return {
|
|
556
652
|
"data": deleted_count,
|
|
@@ -558,11 +654,16 @@ class ASTParser:
|
|
|
558
654
|
}
|
|
559
655
|
|
|
560
656
|
def _handle_get(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
657
|
+
"""UPDATED: Pass current queryset to get method."""
|
|
561
658
|
# Retrieve permissions from the registry
|
|
562
659
|
permissions = self.registry.get_config(self.model).permissions
|
|
563
|
-
record = self.engine.get(ast, self.request, permissions)
|
|
660
|
+
record = self.engine.get(self.current_queryset, ast, self.request, permissions)
|
|
564
661
|
serialized = self.serializer.serialize(
|
|
565
|
-
record,
|
|
662
|
+
record,
|
|
663
|
+
self.model,
|
|
664
|
+
many=False,
|
|
665
|
+
depth=self.depth,
|
|
666
|
+
fields_map=self.read_fields_map,
|
|
566
667
|
)
|
|
567
668
|
return {
|
|
568
669
|
"data": serialized,
|
|
@@ -570,8 +671,11 @@ class ASTParser:
|
|
|
570
671
|
}
|
|
571
672
|
|
|
572
673
|
def _handle_get_or_create(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
674
|
+
"""UPDATED: Pass current queryset to get_or_create method."""
|
|
573
675
|
# Validate and split lookup/defaults (without extra wrapping)
|
|
574
|
-
validated_lookup, validated_defaults = self._validate_and_split_lookup_defaults(
|
|
676
|
+
validated_lookup, validated_defaults = self._validate_and_split_lookup_defaults(
|
|
677
|
+
ast, partial=True
|
|
678
|
+
)
|
|
575
679
|
|
|
576
680
|
# Optionally update the AST if needed:
|
|
577
681
|
ast["lookup"] = validated_lookup
|
|
@@ -580,17 +684,22 @@ class ASTParser:
|
|
|
580
684
|
# Retrieve permissions from configuration
|
|
581
685
|
permissions = self.registry.get_config(self.model).permissions
|
|
582
686
|
|
|
583
|
-
# Call the ORM layer and pass the serializer and request/permissions
|
|
687
|
+
# UPDATED: Call the ORM layer and pass the serializer and request/permissions
|
|
584
688
|
record, created = self.engine.get_or_create(
|
|
689
|
+
self.current_queryset, # Pass current queryset
|
|
585
690
|
{"lookup": ast.get("lookup", {}), "defaults": ast.get("defaults", {})},
|
|
586
691
|
serializer=self.serializer,
|
|
587
692
|
req=self.request,
|
|
588
693
|
permissions=permissions,
|
|
589
|
-
create_fields_map=self.create_fields_map
|
|
694
|
+
create_fields_map=self.create_fields_map,
|
|
590
695
|
)
|
|
591
696
|
|
|
592
697
|
serialized = self.serializer.serialize(
|
|
593
|
-
record,
|
|
698
|
+
record,
|
|
699
|
+
self.model,
|
|
700
|
+
many=False,
|
|
701
|
+
depth=self.depth,
|
|
702
|
+
fields_map=self.read_fields_map,
|
|
594
703
|
)
|
|
595
704
|
return {
|
|
596
705
|
"data": serialized,
|
|
@@ -601,8 +710,11 @@ class ASTParser:
|
|
|
601
710
|
}
|
|
602
711
|
|
|
603
712
|
def _handle_update_or_create(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
713
|
+
"""UPDATED: Pass current queryset to update_or_create method."""
|
|
604
714
|
# Validate and split lookup/defaults.
|
|
605
|
-
validated_lookup, validated_defaults = self._validate_and_split_lookup_defaults(
|
|
715
|
+
validated_lookup, validated_defaults = self._validate_and_split_lookup_defaults(
|
|
716
|
+
ast, partial=True
|
|
717
|
+
)
|
|
606
718
|
|
|
607
719
|
# Optionally update the AST if needed:
|
|
608
720
|
ast["lookup"] = validated_lookup
|
|
@@ -611,18 +723,23 @@ class ASTParser:
|
|
|
611
723
|
# Retrieve permissions from configuration.
|
|
612
724
|
permissions = self.registry.get_config(self.model).permissions
|
|
613
725
|
|
|
614
|
-
# Call the ORM update_or_create method, passing the serializer, request, and permissions.
|
|
726
|
+
# UPDATED: Call the ORM update_or_create method, passing the serializer, request, and permissions.
|
|
615
727
|
record, created = self.engine.update_or_create(
|
|
728
|
+
self.current_queryset, # Pass current queryset
|
|
616
729
|
{"lookup": ast.get("lookup", {}), "defaults": ast.get("defaults", {})},
|
|
617
730
|
req=self.request,
|
|
618
731
|
serializer=self.serializer,
|
|
619
732
|
permissions=permissions,
|
|
620
|
-
update_fields_map=
|
|
621
|
-
create_fields_map=
|
|
733
|
+
update_fields_map=self.update_fields_map,
|
|
734
|
+
create_fields_map=self.create_fields_map,
|
|
622
735
|
)
|
|
623
736
|
|
|
624
737
|
serialized = self.serializer.serialize(
|
|
625
|
-
record,
|
|
738
|
+
record,
|
|
739
|
+
self.model,
|
|
740
|
+
many=False,
|
|
741
|
+
depth=self.depth,
|
|
742
|
+
fields_map=self.read_fields_map,
|
|
626
743
|
)
|
|
627
744
|
return {
|
|
628
745
|
"data": serialized,
|
|
@@ -633,9 +750,14 @@ class ASTParser:
|
|
|
633
750
|
}
|
|
634
751
|
|
|
635
752
|
def _handle_first(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
636
|
-
|
|
753
|
+
"""UPDATED: Pass current queryset to first method."""
|
|
754
|
+
record = self.engine.first(self.current_queryset)
|
|
637
755
|
serialized = self.serializer.serialize(
|
|
638
|
-
record,
|
|
756
|
+
record,
|
|
757
|
+
self.model,
|
|
758
|
+
many=False,
|
|
759
|
+
depth=self.depth,
|
|
760
|
+
fields_map=self.read_fields_map,
|
|
639
761
|
)
|
|
640
762
|
return {
|
|
641
763
|
"data": serialized,
|
|
@@ -643,9 +765,14 @@ class ASTParser:
|
|
|
643
765
|
}
|
|
644
766
|
|
|
645
767
|
def _handle_last(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
646
|
-
|
|
768
|
+
"""UPDATED: Pass current queryset to last method."""
|
|
769
|
+
record = self.engine.last(self.current_queryset)
|
|
647
770
|
serialized = self.serializer.serialize(
|
|
648
|
-
record,
|
|
771
|
+
record,
|
|
772
|
+
self.model,
|
|
773
|
+
many=False,
|
|
774
|
+
depth=self.depth,
|
|
775
|
+
fields_map=self.read_fields_map,
|
|
649
776
|
)
|
|
650
777
|
return {
|
|
651
778
|
"data": serialized,
|
|
@@ -653,13 +780,18 @@ class ASTParser:
|
|
|
653
780
|
}
|
|
654
781
|
|
|
655
782
|
def _handle_exists(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
656
|
-
|
|
783
|
+
"""UPDATED: Pass current queryset to exists method."""
|
|
784
|
+
exists_flag = self.engine.exists(self.current_queryset)
|
|
657
785
|
return {
|
|
658
786
|
"data": exists_flag,
|
|
659
|
-
"metadata": {
|
|
787
|
+
"metadata": {
|
|
788
|
+
"exists": exists_flag,
|
|
789
|
+
"response_type": ResponseType.NUMBER.value,
|
|
790
|
+
},
|
|
660
791
|
}
|
|
661
792
|
|
|
662
793
|
def _handle_aggregate(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
794
|
+
"""UPDATED: Pass current queryset to all aggregate methods."""
|
|
663
795
|
op_type = ast.get("type")
|
|
664
796
|
if op_type == "aggregate":
|
|
665
797
|
aggs = ast.get("aggregates", {})
|
|
@@ -668,7 +800,7 @@ class ASTParser:
|
|
|
668
800
|
agg_list.append(
|
|
669
801
|
{"function": func, "field": field, "alias": f"{field}_{func}"}
|
|
670
802
|
)
|
|
671
|
-
result = self.engine.aggregate(agg_list)
|
|
803
|
+
result = self.engine.aggregate(self.current_queryset, agg_list)
|
|
672
804
|
return {
|
|
673
805
|
"data": result,
|
|
674
806
|
"metadata": {
|
|
@@ -681,7 +813,7 @@ class ASTParser:
|
|
|
681
813
|
if not field:
|
|
682
814
|
raise ValueError("Field must be provided for aggregate operations.")
|
|
683
815
|
if op_type == "count":
|
|
684
|
-
result_val = self.engine.count(field)
|
|
816
|
+
result_val = self.engine.count(self.current_queryset, field)
|
|
685
817
|
return {
|
|
686
818
|
"data": result_val,
|
|
687
819
|
"metadata": {
|
|
@@ -690,7 +822,7 @@ class ASTParser:
|
|
|
690
822
|
},
|
|
691
823
|
}
|
|
692
824
|
elif op_type == "sum":
|
|
693
|
-
result_val = self.engine.sum(field)
|
|
825
|
+
result_val = self.engine.sum(self.current_queryset, field)
|
|
694
826
|
return {
|
|
695
827
|
"data": result_val,
|
|
696
828
|
"metadata": {
|
|
@@ -699,7 +831,7 @@ class ASTParser:
|
|
|
699
831
|
},
|
|
700
832
|
}
|
|
701
833
|
elif op_type == "avg":
|
|
702
|
-
result_val = self.engine.avg(field)
|
|
834
|
+
result_val = self.engine.avg(self.current_queryset, field)
|
|
703
835
|
return {
|
|
704
836
|
"data": result_val,
|
|
705
837
|
"metadata": {
|
|
@@ -708,7 +840,7 @@ class ASTParser:
|
|
|
708
840
|
},
|
|
709
841
|
}
|
|
710
842
|
elif op_type == "min":
|
|
711
|
-
result_val = self.engine.min(field)
|
|
843
|
+
result_val = self.engine.min(self.current_queryset, field)
|
|
712
844
|
return {
|
|
713
845
|
"data": result_val,
|
|
714
846
|
"metadata": {
|
|
@@ -717,7 +849,7 @@ class ASTParser:
|
|
|
717
849
|
},
|
|
718
850
|
}
|
|
719
851
|
elif op_type == "max":
|
|
720
|
-
result_val = self.engine.max(field)
|
|
852
|
+
result_val = self.engine.max(self.current_queryset, field)
|
|
721
853
|
return {
|
|
722
854
|
"data": result_val,
|
|
723
855
|
"metadata": {
|
|
@@ -727,6 +859,7 @@ class ASTParser:
|
|
|
727
859
|
}
|
|
728
860
|
|
|
729
861
|
def _handle_read(self, ast: Dict[str, Any]) -> Dict[str, Any]:
|
|
862
|
+
"""UPDATED: Pass current queryset to fetch_list method."""
|
|
730
863
|
offset_raw = self.serializer_options.get("offset", 0)
|
|
731
864
|
limit_raw = self.serializer_options.get("limit", self.config.default_limit)
|
|
732
865
|
offset_val = int(offset_raw) if offset_raw is not None else None
|
|
@@ -735,8 +868,9 @@ class ASTParser:
|
|
|
735
868
|
# Retrieve permissions from configuration
|
|
736
869
|
permissions = self.registry.get_config(self.model).permissions
|
|
737
870
|
|
|
738
|
-
# Fetch list with bulk permission checks
|
|
871
|
+
# UPDATED: Fetch list with bulk permission checks
|
|
739
872
|
rows = self.engine.fetch_list(
|
|
873
|
+
self.current_queryset, # Pass current queryset
|
|
740
874
|
offset=offset_val,
|
|
741
875
|
limit=limit_val,
|
|
742
876
|
req=self.request,
|
|
@@ -744,7 +878,11 @@ class ASTParser:
|
|
|
744
878
|
)
|
|
745
879
|
|
|
746
880
|
serialized = self.serializer.serialize(
|
|
747
|
-
rows,
|
|
881
|
+
rows,
|
|
882
|
+
self.model,
|
|
883
|
+
many=True,
|
|
884
|
+
depth=self.depth,
|
|
885
|
+
fields_map=self.read_fields_map,
|
|
748
886
|
)
|
|
749
887
|
return {
|
|
750
888
|
"data": serialized,
|
|
@@ -753,34 +891,36 @@ class ASTParser:
|
|
|
753
891
|
|
|
754
892
|
# --- Helper Methods ---
|
|
755
893
|
|
|
756
|
-
def _validate_and_split_lookup_defaults(
|
|
894
|
+
def _validate_and_split_lookup_defaults(
|
|
895
|
+
self, ast: Dict[str, Any], partial: bool = False
|
|
896
|
+
) -> Tuple[Dict[str, str]]:
|
|
757
897
|
"""
|
|
758
898
|
Validates the lookups and the defaults separately, using appropriate field maps for each.
|
|
759
899
|
Lookup uses read_fields_map, defaults uses create_fields_map.
|
|
760
900
|
"""
|
|
761
901
|
raw_lookup = ast.get("lookup", {})
|
|
762
902
|
raw_defaults = ast.get("defaults", {})
|
|
763
|
-
|
|
903
|
+
|
|
764
904
|
# Validate lookup with read_fields_map (for filtering)
|
|
765
905
|
validated_lookup = self.serializer.deserialize(
|
|
766
|
-
model=self.model,
|
|
767
|
-
data=raw_lookup,
|
|
768
|
-
partial=partial,
|
|
769
|
-
request=self.request,
|
|
770
|
-
fields_map=self.read_fields_map
|
|
906
|
+
model=self.model,
|
|
907
|
+
data=raw_lookup,
|
|
908
|
+
partial=partial,
|
|
909
|
+
request=self.request,
|
|
910
|
+
fields_map=self.read_fields_map,
|
|
771
911
|
)
|
|
772
|
-
|
|
912
|
+
|
|
773
913
|
# Validate defaults with create_fields_map (for creation)
|
|
774
914
|
validated_defaults = self.serializer.deserialize(
|
|
775
|
-
model=self.model,
|
|
776
|
-
data=raw_defaults,
|
|
777
|
-
partial=partial,
|
|
778
|
-
request=self.request,
|
|
779
|
-
fields_map=self.create_fields_map
|
|
915
|
+
model=self.model,
|
|
916
|
+
data=raw_defaults,
|
|
917
|
+
partial=partial,
|
|
918
|
+
request=self.request,
|
|
919
|
+
fields_map=self.create_fields_map,
|
|
780
920
|
)
|
|
781
|
-
|
|
921
|
+
|
|
782
922
|
return validated_lookup, validated_defaults
|
|
783
|
-
|
|
923
|
+
|
|
784
924
|
# --- Static Methods for Operation Extraction ---
|
|
785
925
|
|
|
786
926
|
@staticmethod
|