django-bulk-hooks 0.2.9__py3-none-any.whl → 0.2.93__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- django_bulk_hooks/__init__.py +20 -27
- django_bulk_hooks/changeset.py +214 -230
- django_bulk_hooks/conditions.py +12 -12
- django_bulk_hooks/decorators.py +68 -26
- django_bulk_hooks/dispatcher.py +369 -58
- django_bulk_hooks/factory.py +541 -565
- django_bulk_hooks/handler.py +106 -115
- django_bulk_hooks/helpers.py +258 -99
- django_bulk_hooks/manager.py +134 -130
- django_bulk_hooks/models.py +89 -76
- django_bulk_hooks/operations/__init__.py +5 -5
- django_bulk_hooks/operations/analyzer.py +299 -172
- django_bulk_hooks/operations/bulk_executor.py +742 -437
- django_bulk_hooks/operations/coordinator.py +928 -472
- django_bulk_hooks/operations/field_utils.py +335 -0
- django_bulk_hooks/operations/mti_handler.py +696 -473
- django_bulk_hooks/operations/mti_plans.py +103 -87
- django_bulk_hooks/operations/record_classifier.py +196 -0
- django_bulk_hooks/queryset.py +233 -189
- django_bulk_hooks/registry.py +276 -288
- {django_bulk_hooks-0.2.9.dist-info → django_bulk_hooks-0.2.93.dist-info}/METADATA +55 -4
- django_bulk_hooks-0.2.93.dist-info/RECORD +27 -0
- django_bulk_hooks/debug_utils.py +0 -145
- django_bulk_hooks-0.2.9.dist-info/RECORD +0 -26
- {django_bulk_hooks-0.2.9.dist-info → django_bulk_hooks-0.2.93.dist-info}/LICENSE +0 -0
- {django_bulk_hooks-0.2.9.dist-info → django_bulk_hooks-0.2.93.dist-info}/WHEEL +0 -0
django_bulk_hooks/dispatcher.py
CHANGED
|
@@ -6,7 +6,6 @@ similar to Salesforce's hook framework.
|
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
8
|
import logging
|
|
9
|
-
from typing import Optional
|
|
10
9
|
|
|
11
10
|
logger = logging.getLogger(__name__)
|
|
12
11
|
|
|
@@ -38,7 +37,6 @@ class HookDispatcher:
|
|
|
38
37
|
operation,
|
|
39
38
|
event_prefix,
|
|
40
39
|
bypass_hooks=False,
|
|
41
|
-
bypass_validation=False,
|
|
42
40
|
):
|
|
43
41
|
"""
|
|
44
42
|
Execute operation with full hook lifecycle.
|
|
@@ -54,7 +52,6 @@ class HookDispatcher:
|
|
|
54
52
|
operation: Callable that performs the actual DB operation
|
|
55
53
|
event_prefix: 'create', 'update', or 'delete'
|
|
56
54
|
bypass_hooks: Skip all hooks if True
|
|
57
|
-
bypass_validation: Skip validation hooks if True
|
|
58
55
|
|
|
59
56
|
Returns:
|
|
60
57
|
Result of operation
|
|
@@ -63,8 +60,7 @@ class HookDispatcher:
|
|
|
63
60
|
return operation()
|
|
64
61
|
|
|
65
62
|
# VALIDATE phase
|
|
66
|
-
|
|
67
|
-
self.dispatch(changeset, f"validate_{event_prefix}", bypass_hooks=False)
|
|
63
|
+
self.dispatch(changeset, f"validate_{event_prefix}", bypass_hooks=False)
|
|
68
64
|
|
|
69
65
|
# BEFORE phase
|
|
70
66
|
self.dispatch(changeset, f"before_{event_prefix}", bypass_hooks=False)
|
|
@@ -104,14 +100,84 @@ class HookDispatcher:
|
|
|
104
100
|
# Get hooks sorted by priority (deterministic order)
|
|
105
101
|
hooks = self.registry.get_hooks(changeset.model_cls, event)
|
|
106
102
|
|
|
103
|
+
logger.debug(f"🧵 DISPATCH: changeset.model_cls={changeset.model_cls.__name__}, event={event}")
|
|
104
|
+
logger.debug(f"🎣 HOOKS_FOUND: {len(hooks)} hooks for {changeset.model_cls.__name__}.{event}: {[f'{h[0].__name__}.{h[1]}' for h in hooks]}")
|
|
105
|
+
|
|
107
106
|
if not hooks:
|
|
108
107
|
return
|
|
109
108
|
|
|
110
|
-
#
|
|
109
|
+
# Create an operation key that includes the changeset model to avoid
|
|
110
|
+
# deduplicating hooks across different operations on the same records
|
|
111
|
+
# This prevents the same hook from executing multiple times for MTI inheritance chains
|
|
112
|
+
# but allows different operations on the same records to execute their hooks
|
|
113
|
+
record_ids = set()
|
|
114
|
+
for change in changeset.changes:
|
|
115
|
+
if change.new_record and change.new_record.pk:
|
|
116
|
+
record_ids.add(change.new_record.pk)
|
|
117
|
+
if change.old_record and change.old_record.pk:
|
|
118
|
+
record_ids.add(change.old_record.pk)
|
|
119
|
+
|
|
120
|
+
# Sort record IDs safely (handle Mock objects and other non-comparable types)
|
|
121
|
+
try:
|
|
122
|
+
sorted_record_ids = tuple(sorted(record_ids, key=lambda x: str(x)))
|
|
123
|
+
except (TypeError, AttributeError):
|
|
124
|
+
# Fallback for non-comparable objects (like Mock objects in tests)
|
|
125
|
+
sorted_record_ids = tuple(record_ids)
|
|
126
|
+
|
|
127
|
+
# Include changeset model and operation details to make the key more specific
|
|
128
|
+
operation_meta = getattr(changeset, 'operation_meta', {}) or {}
|
|
129
|
+
operation_type = getattr(changeset, 'operation_type', 'unknown')
|
|
130
|
+
|
|
131
|
+
# Include update_kwargs if present to distinguish different queryset operations
|
|
132
|
+
update_kwargs = operation_meta.get('update_kwargs', {})
|
|
133
|
+
if update_kwargs:
|
|
134
|
+
try:
|
|
135
|
+
# Convert to a hashable representation
|
|
136
|
+
update_kwargs_key = tuple(sorted((k, str(v)) for k, v in update_kwargs.items()))
|
|
137
|
+
except (TypeError, AttributeError):
|
|
138
|
+
# Fallback if values are not convertible to string
|
|
139
|
+
update_kwargs_key = tuple(sorted(update_kwargs.keys()))
|
|
140
|
+
else:
|
|
141
|
+
update_kwargs_key = ()
|
|
142
|
+
|
|
143
|
+
operation_key = (event, changeset.model_cls.__name__, operation_type, sorted_record_ids, update_kwargs_key)
|
|
144
|
+
|
|
145
|
+
# Track executed hooks to prevent duplicates in MTI inheritance chains
|
|
146
|
+
if not hasattr(self, '_executed_hooks'):
|
|
147
|
+
self._executed_hooks = set()
|
|
148
|
+
|
|
149
|
+
# Filter out hooks that have already been executed for this operation
|
|
150
|
+
unique_hooks = []
|
|
151
|
+
skipped_hooks = []
|
|
111
152
|
for handler_cls, method_name, condition, priority in hooks:
|
|
112
|
-
|
|
153
|
+
hook_key = (handler_cls, method_name, operation_key)
|
|
154
|
+
if hook_key not in self._executed_hooks:
|
|
155
|
+
unique_hooks.append((handler_cls, method_name, condition, priority))
|
|
156
|
+
self._executed_hooks.add(hook_key)
|
|
157
|
+
else:
|
|
158
|
+
skipped_hooks.append((handler_cls.__name__, method_name))
|
|
159
|
+
|
|
160
|
+
# Debug logging for hook deduplication
|
|
161
|
+
if skipped_hooks:
|
|
162
|
+
logger.debug(f"⏭️ SKIPPED_DUPS: {len(skipped_hooks)} duplicate hooks: {[f'{cls}.{method}' for cls, method in skipped_hooks]}")
|
|
163
|
+
|
|
164
|
+
if unique_hooks:
|
|
165
|
+
logger.debug(f"✅ EXECUTING_UNIQUE: {len(unique_hooks)} unique hooks: {[f'{h[0].__name__}.{h[1]}' for h in unique_hooks]}")
|
|
113
166
|
|
|
114
|
-
|
|
167
|
+
if not unique_hooks:
|
|
168
|
+
return
|
|
169
|
+
|
|
170
|
+
# Execute hooks in priority order
|
|
171
|
+
logger.info(f"🔥 HOOKS: Executing {len(unique_hooks)} hooks for {changeset.model_cls.__name__}.{event}")
|
|
172
|
+
for handler_cls, method_name, condition, priority in unique_hooks:
|
|
173
|
+
logger.info(f" → {handler_cls.__name__}.{method_name} (priority={priority})")
|
|
174
|
+
self._execute_hook(handler_cls, method_name, condition, changeset, event)
|
|
175
|
+
|
|
176
|
+
def _reset_executed_hooks(self):
|
|
177
|
+
"""Reset the executed hooks tracking for a new operation."""
|
|
178
|
+
self._executed_hooks = set()
|
|
179
|
+
|
|
180
|
+
def _execute_hook(self, handler_cls, method_name, condition, changeset, event):
|
|
115
181
|
"""
|
|
116
182
|
Execute a single hook with condition checking.
|
|
117
183
|
|
|
@@ -120,14 +186,73 @@ class HookDispatcher:
|
|
|
120
186
|
method_name: Name of the method to call
|
|
121
187
|
condition: Optional condition to filter records
|
|
122
188
|
changeset: ChangeSet with all record changes
|
|
189
|
+
event: The hook event (e.g., 'before_create')
|
|
123
190
|
"""
|
|
124
|
-
#
|
|
191
|
+
# Use DI factory to create handler instance EARLY to access method decorators
|
|
192
|
+
from django_bulk_hooks.factory import create_hook_instance
|
|
193
|
+
|
|
194
|
+
handler = create_hook_instance(handler_cls)
|
|
195
|
+
method = getattr(handler, method_name)
|
|
196
|
+
|
|
197
|
+
# PRELOAD @select_related RELATIONSHIPS BEFORE CONDITION EVALUATION
|
|
198
|
+
# This ensures both conditions and hook methods have access to preloaded relationships
|
|
199
|
+
|
|
200
|
+
# Check if method has @select_related decorator
|
|
201
|
+
preload_func = getattr(method, "_select_related_preload", None)
|
|
202
|
+
if preload_func:
|
|
203
|
+
# Preload relationships to prevent N+1 queries in both conditions and hook methods
|
|
204
|
+
try:
|
|
205
|
+
model_cls_override = getattr(handler, "model_cls", None)
|
|
206
|
+
|
|
207
|
+
# Get FK fields being updated to avoid preloading conflicting relationships
|
|
208
|
+
skip_fields = changeset.operation_meta.get("fk_fields_being_updated", set())
|
|
209
|
+
|
|
210
|
+
# Preload for new_records (needed for condition evaluation and hook execution)
|
|
211
|
+
if changeset.new_records:
|
|
212
|
+
preload_func(
|
|
213
|
+
changeset.new_records,
|
|
214
|
+
model_cls=model_cls_override,
|
|
215
|
+
skip_fields=skip_fields,
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
# Also preload for old_records (for conditions that check previous values)
|
|
219
|
+
if changeset.old_records:
|
|
220
|
+
preload_func(
|
|
221
|
+
changeset.old_records,
|
|
222
|
+
model_cls=model_cls_override,
|
|
223
|
+
skip_fields=skip_fields,
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
# Mark that relationships have been preloaded to avoid duplicate condition preloading
|
|
227
|
+
changeset.operation_meta['relationships_preloaded'] = True
|
|
228
|
+
logger.debug(f"🔗 @select_related: Preloaded relationships for {handler_cls.__name__}.{method_name}")
|
|
229
|
+
|
|
230
|
+
except Exception as e:
|
|
231
|
+
logger.warning(f"Failed to preload relationships for {handler_cls.__name__}.{method_name}: {e}")
|
|
232
|
+
|
|
233
|
+
# SPECIAL HANDLING: Explicit @select_related support for BEFORE_CREATE hooks
|
|
234
|
+
# (This can stay for additional BEFORE_CREATE-specific logic if needed)
|
|
235
|
+
select_related_fields = getattr(method, "_select_related_fields", None)
|
|
236
|
+
if select_related_fields and event == "before_create" and changeset.new_records:
|
|
237
|
+
self._preload_select_related_for_before_create(changeset, select_related_fields)
|
|
238
|
+
|
|
239
|
+
# NOW condition evaluation is safe - relationships are preloaded
|
|
240
|
+
if condition:
|
|
241
|
+
# Skip per-hook preloading if relationships were already preloaded upfront
|
|
242
|
+
if not changeset.operation_meta.get('relationships_preloaded', False):
|
|
243
|
+
condition_relationships = self._extract_condition_relationships(condition, changeset.model_cls)
|
|
244
|
+
logger.info(f"🔍 CONDITION: {handler_cls.__name__}.{method_name} has condition, extracted relationships: {condition_relationships}")
|
|
245
|
+
if condition_relationships:
|
|
246
|
+
logger.info(f"🔗 PRELOADING: Preloading condition relationships for {len(changeset.changes)} records")
|
|
247
|
+
self._preload_condition_relationships(changeset, condition_relationships)
|
|
248
|
+
else:
|
|
249
|
+
logger.debug(f"🔍 CONDITION: {handler_cls.__name__}.{method_name} has condition (relationships already preloaded)")
|
|
250
|
+
|
|
251
|
+
# Filter records based on condition (now safe - relationships are preloaded)
|
|
125
252
|
if condition:
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
if condition.check(change.new_record, change.old_record)
|
|
130
|
-
]
|
|
253
|
+
logger.info(f"⚡ EVALUATING: Checking condition for {handler_cls.__name__}.{method_name} on {len(changeset.changes)} records")
|
|
254
|
+
filtered_changes = [change for change in changeset.changes if condition.check(change.new_record, change.old_record)]
|
|
255
|
+
logger.info(f"✅ CONDITION: {len(filtered_changes)}/{len(changeset.changes)} records passed condition filter")
|
|
131
256
|
|
|
132
257
|
if not filtered_changes:
|
|
133
258
|
# No records match condition, skip this hook
|
|
@@ -146,63 +271,25 @@ class HookDispatcher:
|
|
|
146
271
|
# No condition, use full changeset
|
|
147
272
|
filtered_changeset = changeset
|
|
148
273
|
|
|
149
|
-
# Use DI factory to create handler instance
|
|
150
|
-
from django_bulk_hooks.factory import create_hook_instance
|
|
151
|
-
|
|
152
|
-
handler = create_hook_instance(handler_cls)
|
|
153
|
-
method = getattr(handler, method_name)
|
|
154
|
-
|
|
155
|
-
# Check if method has @select_related decorator
|
|
156
|
-
preload_func = getattr(method, "_select_related_preload", None)
|
|
157
|
-
if preload_func:
|
|
158
|
-
# Preload relationships to prevent N+1 queries
|
|
159
|
-
try:
|
|
160
|
-
model_cls_override = getattr(handler, "model_cls", None)
|
|
161
|
-
|
|
162
|
-
# Preload for new_records
|
|
163
|
-
if filtered_changeset.new_records:
|
|
164
|
-
logger.debug(
|
|
165
|
-
f"Preloading relationships for {len(filtered_changeset.new_records)} "
|
|
166
|
-
f"new_records for {handler_cls.__name__}.{method_name}"
|
|
167
|
-
)
|
|
168
|
-
preload_func(
|
|
169
|
-
filtered_changeset.new_records, model_cls=model_cls_override
|
|
170
|
-
)
|
|
171
|
-
|
|
172
|
-
# Also preload for old_records (for conditions that check previous values)
|
|
173
|
-
if filtered_changeset.old_records:
|
|
174
|
-
logger.debug(
|
|
175
|
-
f"Preloading relationships for {len(filtered_changeset.old_records)} "
|
|
176
|
-
f"old_records for {handler_cls.__name__}.{method_name}"
|
|
177
|
-
)
|
|
178
|
-
preload_func(
|
|
179
|
-
filtered_changeset.old_records, model_cls=model_cls_override
|
|
180
|
-
)
|
|
181
|
-
except Exception:
|
|
182
|
-
logger.debug(
|
|
183
|
-
"select_related preload failed for %s.%s",
|
|
184
|
-
handler_cls.__name__,
|
|
185
|
-
method_name,
|
|
186
|
-
exc_info=True,
|
|
187
|
-
)
|
|
188
|
-
|
|
189
274
|
# Execute hook with ChangeSet
|
|
190
|
-
#
|
|
275
|
+
#
|
|
191
276
|
# ARCHITECTURE NOTE: Hook Contract
|
|
192
277
|
# ====================================
|
|
193
278
|
# All hooks must accept **kwargs for forward compatibility.
|
|
194
279
|
# We pass: changeset, new_records, old_records
|
|
195
|
-
#
|
|
280
|
+
#
|
|
196
281
|
# Old hooks that don't use changeset: def hook(self, new_records, old_records, **kwargs)
|
|
197
282
|
# New hooks that do use changeset: def hook(self, changeset, new_records, old_records, **kwargs)
|
|
198
|
-
#
|
|
283
|
+
#
|
|
199
284
|
# This is standard Python framework design (see Django signals, Flask hooks, etc.)
|
|
285
|
+
logger.info(f" 🚀 Executing: {handler_cls.__name__}.{method_name}")
|
|
200
286
|
try:
|
|
201
287
|
method(
|
|
202
288
|
changeset=filtered_changeset,
|
|
203
289
|
new_records=filtered_changeset.new_records,
|
|
204
290
|
old_records=filtered_changeset.old_records,
|
|
205
291
|
)
|
|
292
|
+
logger.info(f" ✅ Completed: {handler_cls.__name__}.{method_name}")
|
|
206
293
|
except Exception as e:
|
|
207
294
|
# Fail-fast: re-raise to rollback transaction
|
|
208
295
|
logger.error(
|
|
@@ -211,9 +298,223 @@ class HookDispatcher:
|
|
|
211
298
|
)
|
|
212
299
|
raise
|
|
213
300
|
|
|
301
|
+
def _extract_condition_relationships(self, condition, model_cls):
|
|
302
|
+
"""
|
|
303
|
+
Extract relationship paths that a condition might access.
|
|
304
|
+
|
|
305
|
+
Args:
|
|
306
|
+
condition: HookCondition instance
|
|
307
|
+
model_cls: The model class
|
|
308
|
+
|
|
309
|
+
Returns:
|
|
310
|
+
set: Set of relationship field names to preload
|
|
311
|
+
"""
|
|
312
|
+
relationships = set()
|
|
313
|
+
|
|
314
|
+
# Guard against Mock objects and non-condition objects
|
|
315
|
+
if not hasattr(condition, 'check') or hasattr(condition, '_mock_name'):
|
|
316
|
+
return relationships
|
|
317
|
+
|
|
318
|
+
# Handle different condition types
|
|
319
|
+
if hasattr(condition, 'field'):
|
|
320
|
+
# Extract relationships from field path (e.g., "status__value" -> "status")
|
|
321
|
+
field_path = condition.field
|
|
322
|
+
if isinstance(field_path, str):
|
|
323
|
+
if '__' in field_path:
|
|
324
|
+
# Take the first part before __ (the relationship to preload)
|
|
325
|
+
rel_field = field_path.split('__')[0]
|
|
326
|
+
|
|
327
|
+
# Normalize FK field names: business_id -> business
|
|
328
|
+
if rel_field.endswith('_id'):
|
|
329
|
+
potential_field_name = rel_field[:-3] # Remove '_id'
|
|
330
|
+
if self._is_relationship_field(model_cls, potential_field_name):
|
|
331
|
+
rel_field = potential_field_name
|
|
332
|
+
|
|
333
|
+
relationships.add(rel_field)
|
|
334
|
+
else:
|
|
335
|
+
# Handle single field (no __ notation)
|
|
336
|
+
rel_field = field_path
|
|
337
|
+
|
|
338
|
+
# Normalize FK field names: business_id -> business
|
|
339
|
+
if rel_field.endswith('_id'):
|
|
340
|
+
potential_field_name = rel_field[:-3] # Remove '_id'
|
|
341
|
+
if self._is_relationship_field(model_cls, potential_field_name):
|
|
342
|
+
rel_field = potential_field_name
|
|
343
|
+
|
|
344
|
+
# Only add if it's actually a relationship field
|
|
345
|
+
if self._is_relationship_field(model_cls, rel_field):
|
|
346
|
+
relationships.add(rel_field)
|
|
347
|
+
|
|
348
|
+
# Handle composite conditions (AndCondition, OrCondition)
|
|
349
|
+
if hasattr(condition, 'cond1') and hasattr(condition, 'cond2'):
|
|
350
|
+
relationships.update(self._extract_condition_relationships(condition.cond1, model_cls))
|
|
351
|
+
relationships.update(self._extract_condition_relationships(condition.cond2, model_cls))
|
|
352
|
+
|
|
353
|
+
# Handle NotCondition
|
|
354
|
+
if hasattr(condition, 'cond'):
|
|
355
|
+
relationships.update(self._extract_condition_relationships(condition.cond, model_cls))
|
|
356
|
+
|
|
357
|
+
return relationships
|
|
358
|
+
|
|
359
|
+
def _is_relationship_field(self, model_cls, field_name):
|
|
360
|
+
"""Check if a field is a relationship field."""
|
|
361
|
+
try:
|
|
362
|
+
field = model_cls._meta.get_field(field_name)
|
|
363
|
+
return field.is_relation and not field.many_to_many
|
|
364
|
+
except:
|
|
365
|
+
return False
|
|
366
|
+
|
|
367
|
+
def _preload_condition_relationships(self, changeset, relationships):
|
|
368
|
+
"""
|
|
369
|
+
Preload relationships needed for condition evaluation.
|
|
370
|
+
|
|
371
|
+
This prevents N+1 queries when conditions access relationships on both
|
|
372
|
+
old_records and new_records (e.g., HasChanged conditions).
|
|
373
|
+
|
|
374
|
+
Args:
|
|
375
|
+
changeset: ChangeSet with records
|
|
376
|
+
relationships: Set of relationship field names to preload
|
|
377
|
+
"""
|
|
378
|
+
if not relationships:
|
|
379
|
+
return
|
|
380
|
+
|
|
381
|
+
# Use Django's select_related to preload relationships
|
|
382
|
+
relationship_list = list(relationships)
|
|
383
|
+
|
|
384
|
+
# Collect all unique PKs from both new_records and old_records
|
|
385
|
+
all_ids = set()
|
|
386
|
+
|
|
387
|
+
# Add PKs from new_records
|
|
388
|
+
if changeset.new_records:
|
|
389
|
+
all_ids.update(obj.pk for obj in changeset.new_records if obj.pk is not None)
|
|
390
|
+
|
|
391
|
+
# Add PKs from old_records
|
|
392
|
+
if changeset.old_records:
|
|
393
|
+
all_ids.update(obj.pk for obj in changeset.old_records if obj.pk is not None)
|
|
394
|
+
|
|
395
|
+
# Bulk preload relationships for all records that have PKs
|
|
396
|
+
if all_ids:
|
|
397
|
+
preloaded = changeset.model_cls.objects.filter(pk__in=list(all_ids)).select_related(*relationship_list).in_bulk()
|
|
398
|
+
|
|
399
|
+
# Update new_records with preloaded relationships
|
|
400
|
+
if changeset.new_records:
|
|
401
|
+
for obj in changeset.new_records:
|
|
402
|
+
if obj.pk and obj.pk in preloaded:
|
|
403
|
+
preloaded_obj = preloaded[obj.pk]
|
|
404
|
+
for rel in relationship_list:
|
|
405
|
+
if hasattr(preloaded_obj, rel):
|
|
406
|
+
setattr(obj, rel, getattr(preloaded_obj, rel))
|
|
407
|
+
|
|
408
|
+
# Update old_records with preloaded relationships
|
|
409
|
+
if changeset.old_records:
|
|
410
|
+
for obj in changeset.old_records:
|
|
411
|
+
if obj.pk and obj.pk in preloaded:
|
|
412
|
+
preloaded_obj = preloaded[obj.pk]
|
|
413
|
+
for rel in relationship_list:
|
|
414
|
+
if hasattr(preloaded_obj, rel):
|
|
415
|
+
setattr(obj, rel, getattr(preloaded_obj, rel))
|
|
416
|
+
|
|
417
|
+
# Handle unsaved new_records by preloading their FK targets (bulk query to avoid N+1)
|
|
418
|
+
if changeset.new_records:
|
|
419
|
+
# Collect FK IDs for each relationship from unsaved records
|
|
420
|
+
field_ids_map = {rel: set() for rel in relationship_list}
|
|
421
|
+
|
|
422
|
+
for obj in changeset.new_records:
|
|
423
|
+
if obj.pk is None: # Unsaved object
|
|
424
|
+
for rel in relationship_list:
|
|
425
|
+
if hasattr(obj, f'{rel}_id'):
|
|
426
|
+
rel_id = getattr(obj, f'{rel}_id')
|
|
427
|
+
if rel_id:
|
|
428
|
+
field_ids_map[rel].add(rel_id)
|
|
429
|
+
|
|
430
|
+
# Bulk load relationships for unsaved records
|
|
431
|
+
field_objects_map = {}
|
|
432
|
+
for rel, ids in field_ids_map.items():
|
|
433
|
+
if not ids:
|
|
434
|
+
continue
|
|
435
|
+
try:
|
|
436
|
+
rel_model = getattr(changeset.model_cls._meta.get_field(rel).remote_field, 'model')
|
|
437
|
+
field_objects_map[rel] = rel_model.objects.in_bulk(ids)
|
|
438
|
+
except Exception:
|
|
439
|
+
field_objects_map[rel] = {}
|
|
440
|
+
|
|
441
|
+
# Attach relationships to unsaved records
|
|
442
|
+
for obj in changeset.new_records:
|
|
443
|
+
if obj.pk is None: # Unsaved object
|
|
444
|
+
for rel in relationship_list:
|
|
445
|
+
rel_id = getattr(obj, f'{rel}_id', None)
|
|
446
|
+
if rel_id and rel in field_objects_map:
|
|
447
|
+
rel_obj = field_objects_map[rel].get(rel_id)
|
|
448
|
+
if rel_obj:
|
|
449
|
+
setattr(obj, rel, rel_obj)
|
|
450
|
+
|
|
451
|
+
def _preload_select_related_for_before_create(self, changeset, select_related_fields):
|
|
452
|
+
"""
|
|
453
|
+
Explicit bulk preloading for @select_related on BEFORE_CREATE hooks.
|
|
454
|
+
|
|
455
|
+
This method provides guaranteed N+1 elimination by:
|
|
456
|
+
1. Collecting all FK IDs from unsaved new_records
|
|
457
|
+
2. Bulk querying related objects
|
|
458
|
+
3. Attaching relationships to each record
|
|
459
|
+
|
|
460
|
+
Args:
|
|
461
|
+
changeset: ChangeSet with new_records (unsaved objects)
|
|
462
|
+
select_related_fields: List of field names to preload (e.g., ['financial_account'])
|
|
463
|
+
"""
|
|
464
|
+
# Ensure select_related_fields is actually iterable (not a Mock in tests)
|
|
465
|
+
if not select_related_fields or not changeset.new_records or not hasattr(select_related_fields, '__iter__'):
|
|
466
|
+
return
|
|
467
|
+
|
|
468
|
+
logger.info(f"🔗 BULK PRELOAD: Preloading {select_related_fields} for {len(changeset.new_records)} unsaved records")
|
|
469
|
+
|
|
470
|
+
# Collect FK IDs for each field
|
|
471
|
+
field_ids_map = {field: set() for field in select_related_fields}
|
|
472
|
+
|
|
473
|
+
for record in changeset.new_records:
|
|
474
|
+
for field in select_related_fields:
|
|
475
|
+
fk_id = getattr(record, f'{field}_id', None)
|
|
476
|
+
if fk_id is not None:
|
|
477
|
+
field_ids_map[field].add(fk_id)
|
|
478
|
+
|
|
479
|
+
# Bulk query related objects for each field
|
|
480
|
+
field_objects_map = {}
|
|
481
|
+
for field, ids in field_ids_map.items():
|
|
482
|
+
if not ids:
|
|
483
|
+
continue
|
|
484
|
+
|
|
485
|
+
try:
|
|
486
|
+
# Get the related model
|
|
487
|
+
relation_field = changeset.model_cls._meta.get_field(field)
|
|
488
|
+
if not relation_field.is_relation:
|
|
489
|
+
continue
|
|
490
|
+
|
|
491
|
+
related_model = relation_field.remote_field.model
|
|
492
|
+
|
|
493
|
+
# Bulk query: related_model.objects.filter(id__in=ids)
|
|
494
|
+
field_objects_map[field] = related_model.objects.in_bulk(ids)
|
|
495
|
+
logger.info(f" ✅ Bulk loaded {len(field_objects_map[field])} {related_model.__name__} objects for field '{field}'")
|
|
496
|
+
|
|
497
|
+
except Exception as e:
|
|
498
|
+
logger.warning(f" ❌ Failed to bulk load field '{field}': {e}")
|
|
499
|
+
field_objects_map[field] = {}
|
|
500
|
+
|
|
501
|
+
# Attach relationships to each record
|
|
502
|
+
for record in changeset.new_records:
|
|
503
|
+
for field in select_related_fields:
|
|
504
|
+
fk_id = getattr(record, f'{field}_id', None)
|
|
505
|
+
if fk_id is not None and field in field_objects_map:
|
|
506
|
+
related_obj = field_objects_map[field].get(fk_id)
|
|
507
|
+
if related_obj is not None:
|
|
508
|
+
setattr(record, field, related_obj)
|
|
509
|
+
# Also cache in Django's fields_cache for consistency
|
|
510
|
+
if hasattr(record, '_state') and hasattr(record._state, 'fields_cache'):
|
|
511
|
+
record._state.fields_cache[field] = related_obj
|
|
512
|
+
|
|
513
|
+
logger.info(f"🔗 BULK PRELOAD: Completed relationship attachment for {len(changeset.new_records)} records")
|
|
514
|
+
|
|
214
515
|
|
|
215
516
|
# Global dispatcher instance
|
|
216
|
-
_dispatcher:
|
|
517
|
+
_dispatcher: HookDispatcher | None = None
|
|
217
518
|
|
|
218
519
|
|
|
219
520
|
def get_dispatcher():
|
|
@@ -233,3 +534,13 @@ def get_dispatcher():
|
|
|
233
534
|
# Create dispatcher with the registry instance
|
|
234
535
|
_dispatcher = HookDispatcher(get_registry())
|
|
235
536
|
return _dispatcher
|
|
537
|
+
|
|
538
|
+
|
|
539
|
+
def reset_dispatcher():
|
|
540
|
+
"""
|
|
541
|
+
Reset the global dispatcher instance.
|
|
542
|
+
|
|
543
|
+
Useful for testing to ensure clean state between tests.
|
|
544
|
+
"""
|
|
545
|
+
global _dispatcher
|
|
546
|
+
_dispatcher = None
|