django-bulk-hooks 0.2.44__py3-none-any.whl → 0.2.93__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,18 +6,38 @@ a clean, simple API for the QuerySet to use.
6
6
  """
7
7
 
8
8
  import logging
9
+ from dataclasses import dataclass
10
+ from typing import Any
11
+ from typing import Callable
12
+ from typing import Dict
13
+ from typing import List
14
+ from typing import Optional
15
+ from typing import Set
16
+ from typing import Tuple
9
17
 
10
18
  from django.core.exceptions import FieldDoesNotExist
11
19
  from django.db import transaction
20
+ from django.db.models import Model
12
21
  from django.db.models import QuerySet
13
22
 
23
+ from django_bulk_hooks.changeset import ChangeSet
24
+ from django_bulk_hooks.changeset import RecordChange
25
+ from django_bulk_hooks.context import get_bypass_hooks
14
26
  from django_bulk_hooks.helpers import build_changeset_for_create
15
27
  from django_bulk_hooks.helpers import build_changeset_for_delete
16
28
  from django_bulk_hooks.helpers import build_changeset_for_update
29
+ from django_bulk_hooks.helpers import extract_pks
17
30
 
18
31
  logger = logging.getLogger(__name__)
19
32
 
20
33
 
34
+ @dataclass
35
+ class InstanceSnapshot:
36
+ """Snapshot of instance state for modification tracking."""
37
+
38
+ field_values: Dict[str, Any]
39
+
40
+
21
41
  class BulkOperationCoordinator:
22
42
  """
23
43
  Single entry point for coordinating bulk operations.
@@ -26,11 +46,13 @@ class BulkOperationCoordinator:
26
46
  for the QuerySet. It wires up services and coordinates the hook
27
47
  lifecycle for each operation type.
28
48
 
29
- Services are created lazily and cached.
49
+ Services are created lazily and cached for performance.
30
50
  """
31
51
 
52
+ # Constants
53
+ UPSERT_TIMESTAMP_THRESHOLD_SECONDS = 1.0
32
54
 
33
- def __init__(self, queryset):
55
+ def __init__(self, queryset: QuerySet):
34
56
  """
35
57
  Initialize coordinator for a queryset.
36
58
 
@@ -40,77 +62,96 @@ class BulkOperationCoordinator:
40
62
  self.queryset = queryset
41
63
  self.model_cls = queryset.model
42
64
 
43
- # Lazy initialization
65
+ # Lazy-initialized services
44
66
  self._analyzer = None
45
67
  self._mti_handler = None
46
68
  self._record_classifier = None
47
69
  self._executor = None
48
70
  self._dispatcher = None
49
71
 
72
+ # ==================== SERVICE PROPERTIES ====================
73
+
74
+ def _get_or_create_service(self, service_name: str, service_class: type, *args, **kwargs) -> Any:
75
+ """
76
+ Generic lazy service initialization with caching.
77
+
78
+ Args:
79
+ service_name: Name of the service attribute (e.g., 'analyzer')
80
+ service_class: The class to instantiate
81
+ *args, **kwargs: Arguments to pass to the service constructor
82
+
83
+ Returns:
84
+ The service instance
85
+ """
86
+ attr_name = f"_{service_name}"
87
+ service = getattr(self, attr_name)
88
+
89
+ if service is None:
90
+ service = service_class(*args, **kwargs)
91
+ setattr(self, attr_name, service)
92
+
93
+ return service
94
+
50
95
  @property
51
96
  def analyzer(self):
52
- """Get or create ModelAnalyzer"""
53
- if self._analyzer is None:
54
- from django_bulk_hooks.operations.analyzer import ModelAnalyzer
97
+ """Get or create ModelAnalyzer."""
98
+ from django_bulk_hooks.operations.analyzer import ModelAnalyzer
55
99
 
56
- self._analyzer = ModelAnalyzer(self.model_cls)
57
- return self._analyzer
100
+ return self._get_or_create_service("analyzer", ModelAnalyzer, self.model_cls)
58
101
 
59
102
  @property
60
103
  def mti_handler(self):
61
- """Get or create MTIHandler"""
62
- if self._mti_handler is None:
63
- from django_bulk_hooks.operations.mti_handler import MTIHandler
104
+ """Get or create MTIHandler."""
105
+ from django_bulk_hooks.operations.mti_handler import MTIHandler
64
106
 
65
- self._mti_handler = MTIHandler(self.model_cls)
66
- return self._mti_handler
107
+ return self._get_or_create_service("mti_handler", MTIHandler, self.model_cls)
67
108
 
68
109
  @property
69
110
  def record_classifier(self):
70
- """Get or create RecordClassifier"""
71
- if self._record_classifier is None:
72
- from django_bulk_hooks.operations.record_classifier import RecordClassifier
111
+ """Get or create RecordClassifier."""
112
+ from django_bulk_hooks.operations.record_classifier import RecordClassifier
73
113
 
74
- self._record_classifier = RecordClassifier(self.model_cls)
75
- return self._record_classifier
114
+ return self._get_or_create_service("record_classifier", RecordClassifier, self.model_cls)
76
115
 
77
116
  @property
78
117
  def executor(self):
79
- """Get or create BulkExecutor"""
80
- if self._executor is None:
81
- from django_bulk_hooks.operations.bulk_executor import BulkExecutor
82
-
83
- self._executor = BulkExecutor(
84
- queryset=self.queryset,
85
- analyzer=self.analyzer,
86
- mti_handler=self.mti_handler,
87
- record_classifier=self.record_classifier,
88
- )
89
- return self._executor
118
+ """Get or create BulkExecutor."""
119
+ from django_bulk_hooks.operations.bulk_executor import BulkExecutor
120
+
121
+ return self._get_or_create_service(
122
+ "executor",
123
+ BulkExecutor,
124
+ queryset=self.queryset,
125
+ analyzer=self.analyzer,
126
+ mti_handler=self.mti_handler,
127
+ record_classifier=self.record_classifier,
128
+ )
90
129
 
91
130
  @property
92
131
  def dispatcher(self):
93
- """Get or create Dispatcher"""
94
- if self._dispatcher is None:
95
- from django_bulk_hooks.dispatcher import get_dispatcher
132
+ """Get or create Dispatcher."""
133
+ from django_bulk_hooks.dispatcher import get_dispatcher
96
134
 
97
- self._dispatcher = get_dispatcher()
98
- return self._dispatcher
135
+ return self._get_or_create_service("dispatcher", get_dispatcher)
136
+
137
+ @property
138
+ def inheritance_chain(self) -> List[type]:
139
+ """Single source of truth for MTI inheritance chain."""
140
+ return self.mti_handler.get_inheritance_chain()
99
141
 
100
142
  # ==================== PUBLIC API ====================
101
143
 
102
144
  @transaction.atomic
103
145
  def create(
104
146
  self,
105
- objs,
106
- batch_size=None,
107
- ignore_conflicts=False,
108
- update_conflicts=False,
109
- update_fields=None,
110
- unique_fields=None,
111
- bypass_hooks=False,
112
- bypass_validation=False,
113
- ):
147
+ objs: List[Model],
148
+ batch_size: Optional[int] = None,
149
+ ignore_conflicts: bool = False,
150
+ update_conflicts: bool = False,
151
+ update_fields: Optional[List[str]] = None,
152
+ unique_fields: Optional[List[str]] = None,
153
+ bypass_hooks: bool = False,
154
+ ) -> List[Model]:
114
155
  """
115
156
  Execute bulk create with hooks.
116
157
 
@@ -122,7 +163,6 @@ class BulkOperationCoordinator:
122
163
  update_fields: Fields to update on conflict
123
164
  unique_fields: Fields to check for conflicts
124
165
  bypass_hooks: Skip all hooks if True
125
- bypass_validation: Skip validation hooks if True
126
166
 
127
167
  Returns:
128
168
  List of created objects
@@ -130,22 +170,11 @@ class BulkOperationCoordinator:
130
170
  if not objs:
131
171
  return objs
132
172
 
133
- # Validate
134
173
  self.analyzer.validate_for_create(objs)
135
174
 
136
- # For upsert operations, classify records upfront
137
- existing_record_ids = set()
138
- existing_pks_map = {}
139
- if update_conflicts and unique_fields:
140
- existing_record_ids, existing_pks_map = self.record_classifier.classify_for_upsert(
141
- objs, unique_fields
142
- )
143
- logger.info(
144
- f"Upsert operation: {len(existing_record_ids)} existing, "
145
- f"{len(objs) - len(existing_record_ids)} new records"
146
- )
175
+ # Handle upsert classification upfront
176
+ existing_record_ids, existing_pks_map = self._classify_upsert_records(objs, update_conflicts, unique_fields)
147
177
 
148
- # Build initial changeset
149
178
  changeset = build_changeset_for_create(
150
179
  self.model_cls,
151
180
  objs,
@@ -156,7 +185,6 @@ class BulkOperationCoordinator:
156
185
  unique_fields=unique_fields,
157
186
  )
158
187
 
159
- # Execute with hook lifecycle
160
188
  def operation():
161
189
  return self.executor.bulk_create(
162
190
  objs,
@@ -174,18 +202,16 @@ class BulkOperationCoordinator:
174
202
  operation=operation,
175
203
  event_prefix="create",
176
204
  bypass_hooks=bypass_hooks,
177
- bypass_validation=bypass_validation,
178
205
  )
179
206
 
180
207
  @transaction.atomic
181
208
  def update(
182
209
  self,
183
- objs,
184
- fields,
185
- batch_size=None,
186
- bypass_hooks=False,
187
- bypass_validation=False,
188
- ):
210
+ objs: List[Model],
211
+ fields: List[str],
212
+ batch_size: Optional[int] = None,
213
+ bypass_hooks: bool = False,
214
+ ) -> int:
189
215
  """
190
216
  Execute bulk update with hooks.
191
217
 
@@ -194,7 +220,6 @@ class BulkOperationCoordinator:
194
220
  fields: List of field names to update
195
221
  batch_size: Number of objects per batch
196
222
  bypass_hooks: Skip all hooks if True
197
- bypass_validation: Skip validation hooks if True
198
223
 
199
224
  Returns:
200
225
  Number of objects updated
@@ -202,27 +227,11 @@ class BulkOperationCoordinator:
202
227
  if not objs:
203
228
  return 0
204
229
 
205
- # Validate
206
230
  self.analyzer.validate_for_update(objs)
207
231
 
208
- # Fetch old records using analyzer (single source of truth)
209
232
  old_records_map = self.analyzer.fetch_old_records_map(objs)
233
+ changeset = self._build_update_changeset(objs, fields, old_records_map)
210
234
 
211
- # Build changeset
212
- from django_bulk_hooks.changeset import ChangeSet
213
- from django_bulk_hooks.changeset import RecordChange
214
-
215
- changes = [
216
- RecordChange(
217
- new_record=obj,
218
- old_record=old_records_map.get(obj.pk),
219
- changed_fields=fields,
220
- )
221
- for obj in objs
222
- ]
223
- changeset = ChangeSet(self.model_cls, changes, "update", {"fields": fields})
224
-
225
- # Execute with hook lifecycle
226
235
  def operation():
227
236
  return self.executor.bulk_update(objs, fields, batch_size=batch_size)
228
237
 
@@ -231,19 +240,20 @@ class BulkOperationCoordinator:
231
240
  operation=operation,
232
241
  event_prefix="update",
233
242
  bypass_hooks=bypass_hooks,
234
- bypass_validation=bypass_validation,
235
243
  )
236
244
 
237
245
  @transaction.atomic
238
246
  def update_queryset(
239
- self, update_kwargs, bypass_hooks=False, bypass_validation=False,
240
- ):
247
+ self,
248
+ update_kwargs: Dict[str, Any],
249
+ bypass_hooks: bool = False,
250
+ ) -> int:
241
251
  """
242
252
  Execute queryset.update() with full hook support.
243
-
253
+
244
254
  ARCHITECTURE & PERFORMANCE TRADE-OFFS
245
255
  ======================================
246
-
256
+
247
257
  To support hooks with queryset.update(), we must:
248
258
  1. Fetch old state (SELECT all matching rows)
249
259
  2. Execute database update (UPDATE in SQL)
@@ -252,171 +262,435 @@ class BulkOperationCoordinator:
252
262
  5. Run BEFORE_UPDATE hooks (CAN modify instances)
253
263
  6. Persist BEFORE_UPDATE modifications (bulk_update)
254
264
  7. Run AFTER_UPDATE hooks (read-only side effects)
255
-
265
+
256
266
  Performance Cost:
257
267
  - 2 SELECT queries (before/after)
258
268
  - 1 UPDATE query (actual update)
259
269
  - 1 bulk_update (if hooks modify data)
260
-
270
+
261
271
  Trade-off: Hooks require loading data into Python. If you need
262
272
  maximum performance and don't need hooks, use bypass_hooks=True.
263
-
264
- Hook Semantics:
265
- - BEFORE_UPDATE hooks run after the DB update and CAN modify instances
266
- - Modifications are auto-persisted (framework handles complexity)
267
- - AFTER_UPDATE hooks run after BEFORE_UPDATE and are read-only
268
- - This enables cascade logic and computed fields based on DB values
269
- - User expectation: BEFORE_UPDATE hooks can modify data
270
-
271
- Why this approach works well:
272
- - Allows hooks to see Subquery/F() computed values
273
- - Enables HasChanged conditions on complex expressions
274
- - Maintains SQL performance (Subquery stays in database)
275
- - Meets user expectations: BEFORE_UPDATE can modify instances
276
- - Clean separation: BEFORE for modifications, AFTER for side effects
277
-
278
- For true "prevent write" semantics, intercept at a higher level
279
- or use bulk_update() directly (which has true before semantics).
280
- """
281
- from django_bulk_hooks.context import get_bypass_hooks
282
-
283
- # Fast path: no hooks at all
273
+
274
+ Args:
275
+ update_kwargs: Dict of fields to update
276
+ bypass_hooks: Skip all hooks if True
277
+
278
+ Returns:
279
+ Number of rows updated
280
+ """
284
281
  if bypass_hooks or get_bypass_hooks():
285
282
  return QuerySet.update(self.queryset, **update_kwargs)
286
283
 
287
- # Full hook lifecycle path
288
- return self._execute_queryset_update_with_hooks(
289
- update_kwargs=update_kwargs,
290
- bypass_validation=bypass_validation,
284
+ return self._execute_queryset_update_with_hooks(update_kwargs)
285
+
286
+ @transaction.atomic
287
+ def delete(self, bypass_hooks: bool = False) -> Tuple[int, Dict[str, int]]:
288
+ """
289
+ Execute delete with hooks.
290
+
291
+ Args:
292
+ bypass_hooks: Skip all hooks if True
293
+
294
+ Returns:
295
+ Tuple of (count, details dict)
296
+ """
297
+ objs = list(self.queryset)
298
+ if not objs:
299
+ return (0, {})
300
+
301
+ self.analyzer.validate_for_delete(objs)
302
+
303
+ changeset = build_changeset_for_delete(self.model_cls, objs)
304
+
305
+ def operation():
306
+ return QuerySet.delete(self.queryset)
307
+
308
+ return self._execute_with_mti_hooks(
309
+ changeset=changeset,
310
+ operation=operation,
311
+ event_prefix="delete",
312
+ bypass_hooks=bypass_hooks,
291
313
  )
292
314
 
315
+ def clean(self, objs: List[Model], is_create: Optional[bool] = None) -> None:
316
+ """
317
+ Execute validation hooks only (no database operations).
318
+
319
+ This is used by Django's clean() method to hook VALIDATE_* events
320
+ without performing the actual operation.
321
+
322
+ Args:
323
+ objs: List of model instances to validate
324
+ is_create: True for create, False for update, None to auto-detect
325
+ """
326
+ if not objs:
327
+ return
328
+
329
+ # Auto-detect operation type
330
+ if is_create is None:
331
+ is_create = objs[0].pk is None
332
+
333
+ # Validate based on operation type
334
+ if is_create:
335
+ self.analyzer.validate_for_create(objs)
336
+ changeset = build_changeset_for_create(self.model_cls, objs)
337
+ event = "validate_create"
338
+ else:
339
+ self.analyzer.validate_for_update(objs)
340
+ changeset = build_changeset_for_update(self.model_cls, objs, {})
341
+ event = "validate_update"
342
+
343
+ # Dispatch validation event
344
+ models_in_chain = self.inheritance_chain
345
+ self._dispatch_hooks_for_models(models_in_chain, changeset, event)
346
+
347
+ # ==================== QUERYSET UPDATE IMPLEMENTATION ====================
348
+
293
349
  def _execute_queryset_update_with_hooks(
294
- self, update_kwargs, bypass_validation=False,
295
- ):
350
+ self,
351
+ update_kwargs: Dict[str, Any],
352
+ ) -> int:
296
353
  """
297
354
  Execute queryset update with full hook lifecycle support.
298
-
299
- This method implements the fetch-update-fetch pattern required
300
- to support hooks with queryset.update(). BEFORE_UPDATE hooks can
301
- modify instances and modifications are auto-persisted.
302
-
355
+
356
+ Implements the fetch-update-fetch pattern required to support hooks
357
+ with queryset.update(). BEFORE_UPDATE hooks can modify instances
358
+ and modifications are auto-persisted.
359
+
303
360
  Args:
304
361
  update_kwargs: Dict of fields to update
305
- bypass_validation: Skip validation hooks if True
306
-
362
+
307
363
  Returns:
308
364
  Number of rows updated
309
365
  """
310
- # Step 1: Fetch old state (before database update)
311
- old_instances = list(self.queryset)
366
+ # Step 1: Fetch old state with relationships preloaded
367
+ hook_relationships = self._extract_hook_relationships()
368
+ old_instances = self._fetch_instances_with_relationships(self.queryset, hook_relationships)
369
+
312
370
  if not old_instances:
313
371
  return 0
314
372
 
315
373
  old_records_map = {inst.pk: inst for inst in old_instances}
316
374
 
317
375
  # Step 2: Execute native Django update
318
- # Use stored reference to parent class method - clean and simple
319
376
  update_count = QuerySet.update(self.queryset, **update_kwargs)
320
-
321
377
  if update_count == 0:
322
378
  return 0
323
379
 
324
- # Step 3: Fetch new state (after database update)
325
- # This captures any Subquery/F() computed values
326
- # Use primary keys to fetch updated instances since queryset filters may no longer match
327
- pks = [inst.pk for inst in old_instances]
328
- new_instances = list(self.model_cls.objects.filter(pk__in=pks))
380
+ # Step 3: Fetch new state after update
381
+ pks = extract_pks(old_instances)
382
+ new_queryset = self.model_cls.objects.filter(pk__in=pks)
383
+ new_instances = self._fetch_instances_with_relationships(new_queryset, hook_relationships)
329
384
 
330
- # Step 4: Build changeset
385
+ # Step 4: Build changeset and run hook lifecycle
331
386
  changeset = build_changeset_for_update(
332
387
  self.model_cls,
333
388
  new_instances,
334
389
  update_kwargs,
335
390
  old_records_map=old_records_map,
336
391
  )
337
-
338
- # Mark as queryset update for potential hook inspection
339
392
  changeset.operation_meta["is_queryset_update"] = True
340
393
  changeset.operation_meta["allows_modifications"] = True
341
394
 
342
- # Step 5: Get MTI inheritance chain
343
- models_in_chain = [self.model_cls]
344
- if self.mti_handler.is_mti_model():
345
- models_in_chain.extend(self.mti_handler.get_parent_models())
346
-
347
- # Step 6: Run VALIDATE hooks (if not bypassed)
348
- if not bypass_validation:
349
- for model_cls in models_in_chain:
350
- model_changeset = self._build_changeset_for_model(changeset, model_cls)
351
- self.dispatcher.dispatch(
352
- model_changeset,
353
- "validate_update",
354
- bypass_hooks=False,
355
- )
356
-
357
- # Step 7: Run BEFORE_UPDATE hooks with modification tracking
358
- modified_fields = self._run_before_update_hooks_with_tracking(
359
- new_instances,
360
- models_in_chain,
361
- changeset,
362
- )
395
+ models_in_chain = self.inheritance_chain
396
+
397
+ # Step 5: VALIDATE phase
398
+ self._dispatch_hooks_for_models(models_in_chain, changeset, "validate_update", bypass_hooks=False)
399
+
400
+ # Step 6: BEFORE_UPDATE phase with modification tracking
401
+ modified_fields = self._run_before_update_hooks_with_tracking(new_instances, models_in_chain, changeset)
363
402
 
364
- # Step 8: Auto-persist BEFORE_UPDATE modifications
403
+ # Step 7: Auto-persist BEFORE_UPDATE modifications
365
404
  if modified_fields:
366
405
  self._persist_hook_modifications(new_instances, modified_fields)
367
406
 
368
- # Step 9: Take snapshot before AFTER_UPDATE hooks
369
- pre_after_hook_state = self._snapshot_instance_state(new_instances)
407
+ # Step 8: AFTER_UPDATE phase (read-only)
408
+ pre_after_state = self._snapshot_instance_state(new_instances)
409
+ self._dispatch_hooks_for_models(models_in_chain, changeset, "after_update", bypass_hooks=False)
370
410
 
371
- # Step 10: Run AFTER_UPDATE hooks (read-only side effects)
372
- for model_cls in models_in_chain:
373
- model_changeset = self._build_changeset_for_model(changeset, model_cls)
374
- self.dispatcher.dispatch(
375
- model_changeset,
376
- "after_update",
377
- bypass_hooks=False,
378
- )
379
-
380
- # Step 11: Auto-persist AFTER_UPDATE modifications (if any)
381
- after_modified_fields = self._detect_modifications(new_instances, pre_after_hook_state)
411
+ # Step 9: Auto-persist any AFTER_UPDATE modifications (should be rare)
412
+ after_modified_fields = self._detect_modifications(new_instances, pre_after_state)
382
413
  if after_modified_fields:
414
+ logger.warning("AFTER_UPDATE hooks modified fields: %s. Consider moving modifications to BEFORE_UPDATE.", after_modified_fields)
383
415
  self._persist_hook_modifications(new_instances, after_modified_fields)
384
416
 
385
417
  return update_count
386
418
 
387
- def _run_before_update_hooks_with_tracking(self, instances, models_in_chain, changeset):
419
+ def _run_before_update_hooks_with_tracking(self, instances: List[Model], models_in_chain: List[type], changeset: ChangeSet) -> Set[str]:
388
420
  """
389
421
  Run BEFORE_UPDATE hooks and detect modifications.
390
-
391
- This is what users expect - BEFORE_UPDATE hooks can modify instances
392
- and those modifications will be automatically persisted. The framework
393
- handles the complexity internally.
394
-
422
+
395
423
  Returns:
396
424
  Set of field names that were modified by hooks
397
425
  """
398
- # Snapshot current state
399
426
  pre_hook_state = self._snapshot_instance_state(instances)
427
+ self._dispatch_hooks_for_models(models_in_chain, changeset, "before_update", bypass_hooks=False)
428
+ return self._detect_modifications(instances, pre_hook_state)
429
+
430
+ # ==================== MTI HOOK ORCHESTRATION ====================
431
+
432
+ def _execute_with_mti_hooks(
433
+ self,
434
+ changeset: ChangeSet,
435
+ operation: Callable,
436
+ event_prefix: str,
437
+ bypass_hooks: bool = False,
438
+ ) -> Any:
439
+ """
440
+ Execute operation with hooks for entire MTI inheritance chain.
441
+
442
+ This ensures parent model hooks fire when child instances are
443
+ created/updated/deleted in MTI scenarios.
444
+
445
+ Args:
446
+ changeset: ChangeSet for the child model
447
+ operation: Callable that performs the actual DB operation
448
+ event_prefix: 'create', 'update', or 'delete'
449
+ bypass_hooks: Skip all hooks if True
450
+
451
+ Returns:
452
+ Result of operation
453
+ """
454
+ if bypass_hooks:
455
+ return operation()
456
+
457
+ self.dispatcher._reset_executed_hooks()
458
+ logger.debug("Starting %s operation for %s", event_prefix, changeset.model_cls.__name__)
459
+
460
+ models_in_chain = self.inheritance_chain
461
+
462
+ # Preload relationships needed by hook conditions (prevents N+1)
463
+ self._preload_condition_relationships_for_operation(changeset, models_in_chain)
464
+
465
+ # VALIDATE phase
466
+ self._dispatch_hooks_for_models(models_in_chain, changeset, f"validate_{event_prefix}")
467
+
468
+ # BEFORE phase
469
+ self._dispatch_hooks_for_models(models_in_chain, changeset, f"before_{event_prefix}")
470
+
471
+ # Execute operation
472
+ result = operation()
473
+
474
+ # AFTER phase (handle upsert splitting for create operations)
475
+ if result and isinstance(result, list) and event_prefix == "create":
476
+ if self._is_upsert_operation(result):
477
+ self._dispatch_upsert_after_hooks(result, models_in_chain)
478
+ else:
479
+ after_changeset = build_changeset_for_create(changeset.model_cls, result)
480
+ self._dispatch_hooks_for_models(models_in_chain, after_changeset, f"after_{event_prefix}")
481
+ else:
482
+ self._dispatch_hooks_for_models(models_in_chain, changeset, f"after_{event_prefix}")
483
+
484
+ return result
485
+
486
+ def _dispatch_hooks_for_models(
487
+ self,
488
+ models_in_chain: List[type],
489
+ changeset: ChangeSet,
490
+ event_suffix: str,
491
+ bypass_hooks: bool = False,
492
+ ) -> None:
493
+ """
494
+ Dispatch hooks for all models in inheritance chain.
495
+
496
+ Args:
497
+ models_in_chain: List of model classes in MTI inheritance chain
498
+ changeset: The changeset to use as base
499
+ event_suffix: Event name suffix (e.g., 'before_create')
500
+ bypass_hooks: Whether to skip hook execution
501
+ """
502
+ logger.debug("Dispatching %s to %d models: %s", event_suffix, len(models_in_chain), [m.__name__ for m in models_in_chain])
400
503
 
401
- # Run BEFORE_UPDATE hooks
402
504
  for model_cls in models_in_chain:
403
505
  model_changeset = self._build_changeset_for_model(changeset, model_cls)
404
- self.dispatcher.dispatch(
405
- model_changeset,
406
- "before_update",
407
- bypass_hooks=False,
408
- )
506
+ self.dispatcher.dispatch(model_changeset, event_suffix, bypass_hooks=bypass_hooks)
409
507
 
410
- # Detect modifications
411
- return self._detect_modifications(instances, pre_hook_state)
508
+ def _build_changeset_for_model(self, original_changeset: ChangeSet, target_model_cls: type) -> ChangeSet:
509
+ """
510
+ Build a changeset for a specific model in the MTI inheritance chain.
511
+
512
+ This allows parent model hooks to receive the same instances but with
513
+ the correct model_cls for hook registration matching.
412
514
 
413
- def _snapshot_instance_state(self, instances):
515
+ Args:
516
+ original_changeset: The original changeset (for child model)
517
+ target_model_cls: The model class to build changeset for
518
+
519
+ Returns:
520
+ ChangeSet for the target model
521
+ """
522
+ return ChangeSet(
523
+ model_cls=target_model_cls,
524
+ changes=original_changeset.changes,
525
+ operation_type=original_changeset.operation_type,
526
+ operation_meta=original_changeset.operation_meta,
527
+ )
528
+
529
+ # ==================== UPSERT HANDLING ====================
530
+
531
+ def _classify_upsert_records(
532
+ self,
533
+ objs: List[Model],
534
+ update_conflicts: bool,
535
+ unique_fields: Optional[List[str]],
536
+ ) -> Tuple[Set[Any], Dict[Any, Any]]:
537
+ """
538
+ Classify records for upsert operations.
539
+
540
+ Args:
541
+ objs: List of model instances
542
+ update_conflicts: Whether this is an upsert operation
543
+ unique_fields: Fields to check for conflicts
544
+
545
+ Returns:
546
+ Tuple of (existing_record_ids, existing_pks_map)
547
+ """
548
+ if not (update_conflicts and unique_fields):
549
+ return set(), {}
550
+
551
+ query_model = None
552
+ if self.mti_handler.is_mti_model():
553
+ query_model = self.mti_handler.find_model_with_unique_fields(unique_fields)
554
+ logger.info("MTI model detected: querying %s for unique fields %s", query_model.__name__, unique_fields)
555
+
556
+ existing_ids, existing_pks = self.record_classifier.classify_for_upsert(objs, unique_fields, query_model=query_model)
557
+
558
+ logger.info("Upsert classification: %d existing, %d new records", len(existing_ids), len(objs) - len(existing_ids))
559
+
560
+ return existing_ids, existing_pks
561
+
562
+ def _is_upsert_operation(self, result_objects: List[Model]) -> bool:
563
+ """Check if the operation was an upsert (with update_conflicts=True)."""
564
+ if not result_objects:
565
+ return False
566
+ return hasattr(result_objects[0], "_bulk_hooks_upsert_metadata")
567
+
568
+ def _dispatch_upsert_after_hooks(self, result_objects: List[Model], models_in_chain: List[type]) -> None:
569
+ """
570
+ Dispatch after hooks for upsert operations, splitting by create/update.
571
+
572
+ This matches Salesforce behavior where created records fire after_create
573
+ and updated records fire after_update hooks.
574
+
575
+ Args:
576
+ result_objects: List of objects returned from the operation
577
+ models_in_chain: List of model classes in the MTI inheritance chain
578
+ """
579
+ created, updated = self._classify_upsert_results(result_objects)
580
+
581
+ logger.info("Upsert after hooks: %d created, %d updated", len(created), len(updated))
582
+
583
+ if created:
584
+ create_changeset = build_changeset_for_create(self.model_cls, created)
585
+ create_changeset.operation_meta["relationships_preloaded"] = True
586
+ self._dispatch_hooks_for_models(models_in_chain, create_changeset, "after_create", bypass_hooks=False)
587
+
588
+ if updated:
589
+ old_records_map = self.analyzer.fetch_old_records_map(updated)
590
+ update_changeset = build_changeset_for_update(self.model_cls, updated, {}, old_records_map=old_records_map)
591
+ update_changeset.operation_meta["relationships_preloaded"] = True
592
+ self._dispatch_hooks_for_models(models_in_chain, update_changeset, "after_update", bypass_hooks=False)
593
+
594
+ self._cleanup_upsert_metadata(result_objects)
595
+
596
+ def _classify_upsert_results(self, result_objects: List[Model]) -> Tuple[List[Model], List[Model]]:
597
+ """
598
+ Classify upsert results into created and updated objects.
599
+
600
+ Returns:
601
+ Tuple of (created_objects, updated_objects)
602
+ """
603
+ created_objects = []
604
+ updated_objects = []
605
+ objects_needing_timestamp_check = []
606
+
607
+ # First pass: collect objects with metadata
608
+ for obj in result_objects:
609
+ if hasattr(obj, "_bulk_hooks_was_created"):
610
+ if obj._bulk_hooks_was_created:
611
+ created_objects.append(obj)
612
+ else:
613
+ updated_objects.append(obj)
614
+ else:
615
+ objects_needing_timestamp_check.append(obj)
616
+
617
+ # Second pass: bulk check timestamps for objects without metadata
618
+ if objects_needing_timestamp_check:
619
+ created, updated = self._classify_by_timestamps(objects_needing_timestamp_check)
620
+ created_objects.extend(created)
621
+ updated_objects.extend(updated)
622
+
623
+ return created_objects, updated_objects
624
+
625
+ def _classify_by_timestamps(self, objects: List[Model]) -> Tuple[List[Model], List[Model]]:
626
+ """
627
+ Classify objects as created or updated based on timestamp comparison.
628
+
629
+ Returns:
630
+ Tuple of (created_objects, updated_objects)
631
+ """
632
+ created = []
633
+ updated = []
634
+
635
+ # Group by model class to handle MTI scenarios
636
+ objects_by_model = {}
637
+ for obj in objects:
638
+ model_cls = obj.__class__
639
+ objects_by_model.setdefault(model_cls, []).append(obj)
640
+
641
+ # Process each model class
642
+ for model_cls, objs in objects_by_model.items():
643
+ if not (hasattr(model_cls, "created_at") and hasattr(model_cls, "updated_at")):
644
+ # No timestamp fields, default to created
645
+ created.extend(objs)
646
+ continue
647
+
648
+ # Bulk fetch timestamps
649
+ pks = extract_pks(objs)
650
+ if not pks:
651
+ created.extend(objs)
652
+ continue
653
+
654
+ timestamp_map = {
655
+ record["pk"]: (record["created_at"], record["updated_at"])
656
+ for record in model_cls.objects.filter(pk__in=pks).values("pk", "created_at", "updated_at")
657
+ }
658
+
659
+ # Classify based on timestamp difference
660
+ for obj in objs:
661
+ if obj.pk not in timestamp_map:
662
+ created.append(obj)
663
+ continue
664
+
665
+ created_at, updated_at = timestamp_map[obj.pk]
666
+ if not (created_at and updated_at):
667
+ created.append(obj)
668
+ continue
669
+
670
+ time_diff = abs((updated_at - created_at).total_seconds())
671
+ if time_diff <= self.UPSERT_TIMESTAMP_THRESHOLD_SECONDS:
672
+ created.append(obj)
673
+ else:
674
+ updated.append(obj)
675
+
676
+ return created, updated
677
+
678
+ def _cleanup_upsert_metadata(self, result_objects: List[Model]) -> None:
679
+ """Clean up temporary metadata added during upsert operations."""
680
+ for obj in result_objects:
681
+ for attr in ("_bulk_hooks_was_created", "_bulk_hooks_upsert_metadata"):
682
+ if hasattr(obj, attr):
683
+ delattr(obj, attr)
684
+
685
+ # ==================== INSTANCE STATE TRACKING ====================
686
+
687
+ def _snapshot_instance_state(self, instances: List[Model]) -> Dict[Any, Dict[str, Any]]:
414
688
  """
415
689
  Create a snapshot of current instance field values.
416
-
690
+
417
691
  Args:
418
692
  instances: List of model instances
419
-
693
+
420
694
  Returns:
421
695
  Dict mapping pk -> {field_name: value}
422
696
  """
@@ -428,29 +702,31 @@ class BulkOperationCoordinator:
428
702
 
429
703
  field_values = {}
430
704
  for field in self.model_cls._meta.get_fields():
431
- # Skip relations that aren't concrete fields
705
+ # Skip non-concrete fields
432
706
  if field.many_to_many or field.one_to_many:
433
707
  continue
434
708
 
435
- field_name = field.name
436
709
  try:
437
- field_values[field_name] = getattr(instance, field_name)
710
+ field_values[field.name] = getattr(instance, field.name)
438
711
  except (AttributeError, FieldDoesNotExist):
439
- # Field not accessible (e.g., deferred field)
440
- field_values[field_name] = None
712
+ field_values[field.name] = None
441
713
 
442
714
  snapshot[instance.pk] = field_values
443
715
 
444
716
  return snapshot
445
717
 
446
- def _detect_modifications(self, instances, pre_hook_state):
718
+ def _detect_modifications(
719
+ self,
720
+ instances: List[Model],
721
+ pre_hook_state: Dict[Any, Dict[str, Any]],
722
+ ) -> Set[str]:
447
723
  """
448
724
  Detect which fields were modified by comparing to snapshot.
449
-
725
+
450
726
  Args:
451
727
  instances: List of model instances
452
- pre_hook_state: Previous state snapshot from _snapshot_instance_state
453
-
728
+ pre_hook_state: Previous state snapshot
729
+
454
730
  Returns:
455
731
  Set of field names that were modified
456
732
  """
@@ -468,314 +744,185 @@ class BulkOperationCoordinator:
468
744
  except (AttributeError, FieldDoesNotExist):
469
745
  current_value = None
470
746
 
471
- # Compare values
472
747
  if current_value != old_value:
473
748
  modified_fields.add(field_name)
474
749
 
475
750
  return modified_fields
476
751
 
477
- def _persist_hook_modifications(self, instances, modified_fields):
752
+ def _persist_hook_modifications(self, instances: List[Model], modified_fields: Set[str]) -> None:
478
753
  """
479
754
  Persist modifications made by hooks using bulk_update.
480
-
481
- This creates a "cascade" effect similar to Salesforce workflows.
482
-
755
+
483
756
  Args:
484
757
  instances: List of modified instances
485
758
  modified_fields: Set of field names that were modified
486
759
  """
487
- logger.info(
488
- f"Hooks modified {len(modified_fields)} field(s): "
489
- f"{', '.join(sorted(modified_fields))}",
490
- )
760
+ logger.info("Hooks modified %d field(s): %s", len(modified_fields), ", ".join(sorted(modified_fields)))
491
761
  logger.info("Auto-persisting modifications via bulk_update")
492
762
 
493
763
  # Use Django's bulk_update directly (not our hook version)
494
- # Create a fresh QuerySet to avoid recursion
495
764
  fresh_qs = QuerySet(model=self.model_cls, using=self.queryset.db)
496
765
  QuerySet.bulk_update(fresh_qs, instances, list(modified_fields))
497
766
 
498
- @transaction.atomic
499
- def delete(self, bypass_hooks=False, bypass_validation=False):
767
+ # ==================== RELATIONSHIP PRELOADING ====================
768
+
769
+ def _fetch_instances_with_relationships(
770
+ self,
771
+ queryset: QuerySet,
772
+ relationships: Set[str],
773
+ ) -> List[Model]:
500
774
  """
501
- Execute delete with hooks.
775
+ Fetch instances with relationships preloaded.
502
776
 
503
777
  Args:
504
- bypass_hooks: Skip all hooks if True
505
- bypass_validation: Skip validation hooks if True
778
+ queryset: QuerySet to fetch from
779
+ relationships: Set of relationship names to preload
506
780
 
507
781
  Returns:
508
- Tuple of (count, details dict)
782
+ List of model instances with relationships loaded
509
783
  """
510
- # Get objects
511
- objs = list(self.queryset)
512
- if not objs:
513
- return 0, {}
514
-
515
- # Validate
516
- self.analyzer.validate_for_delete(objs)
517
-
518
- # Build changeset
519
- changeset = build_changeset_for_delete(self.model_cls, objs)
520
-
521
- # Execute with hook lifecycle
522
- def operation():
523
- # Use stored reference to parent method - clean and simple
524
- return QuerySet.delete(self.queryset)
784
+ if relationships:
785
+ logger.info("Fetching instances with select_related(%s)", list(relationships))
786
+ queryset = queryset.select_related(*relationships)
787
+ else:
788
+ logger.info("Fetching instances without select_related")
525
789
 
526
- return self._execute_with_mti_hooks(
527
- changeset=changeset,
528
- operation=operation,
529
- event_prefix="delete",
530
- bypass_hooks=bypass_hooks,
531
- bypass_validation=bypass_validation,
532
- )
790
+ return list(queryset)
533
791
 
534
- def clean(self, objs, is_create=None):
792
+ def _preload_condition_relationships_for_operation(
793
+ self,
794
+ changeset: ChangeSet,
795
+ models_in_chain: List[type],
796
+ ) -> None:
535
797
  """
536
- Execute validation hooks only (no database operations).
798
+ Preload relationships needed by hook conditions for this operation.
537
799
 
538
- This is used by Django's clean() method to hook VALIDATE_* events
539
- without performing the actual operation.
800
+ This prevents N+1 queries by loading all necessary relationships upfront.
540
801
 
541
802
  Args:
542
- objs: List of model instances to validate
543
- is_create: True for create, False for update, None to auto-detect
544
-
545
- Returns:
546
- None
803
+ changeset: The changeset for this operation
804
+ models_in_chain: List of model classes in inheritance chain
547
805
  """
548
- if not objs:
549
- return
550
-
551
- # Auto-detect if is_create not specified
552
- if is_create is None:
553
- is_create = objs[0].pk is None
806
+ relationships = self._extract_condition_relationships_for_operation(changeset, models_in_chain)
554
807
 
555
- # Build changeset based on operation type
556
- if is_create:
557
- changeset = build_changeset_for_create(self.model_cls, objs)
558
- event = "validate_create"
808
+ if relationships:
809
+ logger.info("Bulk preloading %d condition relationships for %s hooks", len(relationships), changeset.model_cls.__name__)
810
+ self.dispatcher._preload_condition_relationships(changeset, relationships)
811
+ changeset.operation_meta["relationships_preloaded"] = True
559
812
  else:
560
- # For update validation, no old records needed - hooks handle their own queries
561
- changeset = build_changeset_for_update(self.model_cls, objs, {})
562
- event = "validate_update"
563
-
564
- # Dispatch validation event only
565
- self.dispatcher.dispatch(changeset, event, bypass_hooks=False)
566
-
567
- # ==================== MTI PARENT HOOK SUPPORT ====================
813
+ logger.info("No condition relationships to preload for %s hooks", changeset.model_cls.__name__)
568
814
 
569
- def _build_changeset_for_model(self, original_changeset, target_model_cls):
570
- """
571
- Build a changeset for a specific model in the MTI inheritance chain.
572
-
573
- This allows parent model hooks to receive the same instances but with
574
- the correct model_cls for hook registration matching.
575
-
576
- Args:
577
- original_changeset: The original changeset (for child model)
578
- target_model_cls: The model class to build changeset for (parent model)
579
-
580
- Returns:
581
- ChangeSet for the target model
582
- """
583
- from django_bulk_hooks.changeset import ChangeSet
584
-
585
- # Create new changeset with target model but same record changes
586
- return ChangeSet(
587
- model_cls=target_model_cls,
588
- changes=original_changeset.changes,
589
- operation_type=original_changeset.operation_type,
590
- operation_meta=original_changeset.operation_meta,
591
- )
592
-
593
- def _execute_with_mti_hooks(
815
+ def _extract_condition_relationships_for_operation(
594
816
  self,
595
- changeset,
596
- operation,
597
- event_prefix,
598
- bypass_hooks=False,
599
- bypass_validation=False,
600
- ):
817
+ changeset: ChangeSet,
818
+ models_in_chain: List[type],
819
+ ) -> Set[str]:
601
820
  """
602
- Execute operation with hooks for entire MTI inheritance chain.
603
-
604
- This method dispatches hooks for both child and parent models when
605
- dealing with MTI models, ensuring parent model hooks fire when
606
- child instances are created/updated/deleted.
607
-
821
+ Extract relationships needed by hook conditions for this operation.
822
+
608
823
  Args:
609
- changeset: ChangeSet for the child model
610
- operation: Callable that performs the actual DB operation
611
- event_prefix: 'create', 'update', or 'delete'
612
- bypass_hooks: Skip all hooks if True
613
- bypass_validation: Skip validation hooks if True
614
-
824
+ changeset: The changeset for this operation
825
+ models_in_chain: List of model classes in inheritance chain
826
+
615
827
  Returns:
616
- Result of operation
828
+ Set of relationship field names to preload
617
829
  """
618
- if bypass_hooks:
619
- return operation()
830
+ relationships = set()
831
+ event_prefix = changeset.operation_type
832
+ events_to_check = [f"validate_{event_prefix}", f"before_{event_prefix}", f"after_{event_prefix}"]
620
833
 
621
- # Get all models in inheritance chain
622
- models_in_chain = [changeset.model_cls]
623
- if self.mti_handler.is_mti_model():
624
- parent_models = self.mti_handler.get_parent_models()
625
- models_in_chain.extend(parent_models)
626
-
627
- # VALIDATE phase - for all models in chain
628
- if not bypass_validation:
629
- for model_cls in models_in_chain:
630
- model_changeset = self._build_changeset_for_model(changeset, model_cls)
631
- self.dispatcher.dispatch(model_changeset, f"validate_{event_prefix}", bypass_hooks=False)
632
-
633
- # BEFORE phase - for all models in chain
634
834
  for model_cls in models_in_chain:
635
- model_changeset = self._build_changeset_for_model(changeset, model_cls)
636
- self.dispatcher.dispatch(model_changeset, f"before_{event_prefix}", bypass_hooks=False)
835
+ for event in events_to_check:
836
+ hooks = self.dispatcher.registry.get_hooks(model_cls, event)
637
837
 
638
- # Execute the actual operation
639
- result = operation()
838
+ for handler_cls, method_name, condition, priority in hooks:
839
+ if condition:
840
+ condition_rels = self.dispatcher._extract_condition_relationships(condition, model_cls)
841
+ relationships.update(condition_rels)
640
842
 
641
- # AFTER phase - for all models in chain
642
- # Use result if operation returns modified data (for create operations)
643
- if result and isinstance(result, list) and event_prefix == "create":
644
- # Check if this was an upsert operation
645
- is_upsert = self._is_upsert_operation(result)
646
- if is_upsert:
647
- # Split hooks for upsert: after_create for created, after_update for updated
648
- self._dispatch_upsert_after_hooks(result, models_in_chain)
649
- else:
650
- # Normal create operation
651
- from django_bulk_hooks.helpers import build_changeset_for_create
652
- changeset = build_changeset_for_create(changeset.model_cls, result)
653
-
654
- for model_cls in models_in_chain:
655
- model_changeset = self._build_changeset_for_model(changeset, model_cls)
656
- self.dispatcher.dispatch(model_changeset, f"after_{event_prefix}", bypass_hooks=False)
657
- else:
658
- # Non-create operations (update, delete)
659
- for model_cls in models_in_chain:
660
- model_changeset = self._build_changeset_for_model(changeset, model_cls)
661
- self.dispatcher.dispatch(model_changeset, f"after_{event_prefix}", bypass_hooks=False)
662
-
663
- return result
843
+ return relationships
664
844
 
665
- def _get_fk_fields_being_updated(self, update_kwargs):
845
+ def _extract_hook_relationships(self) -> Set[str]:
666
846
  """
667
- Get the relationship names for FK fields being updated.
847
+ Extract all relationship paths that hooks might access.
668
848
 
669
- This helps @select_related avoid preloading relationships that are
670
- being modified, which can cause cache conflicts.
671
-
672
- Args:
673
- update_kwargs: Dict of fields being updated
849
+ This includes both condition relationships and @select_related decorators
850
+ for the model and its MTI parents. Prevents N+1 queries during bulk operations.
674
851
 
675
852
  Returns:
676
- Set of relationship names (e.g., {'business'}) for FK fields being updated
677
- """
678
- fk_relationships = set()
679
-
680
- for field_name in update_kwargs.keys():
681
- try:
682
- field = self.model_cls._meta.get_field(field_name)
683
- if (field.is_relation and
684
- not field.many_to_many and
685
- not field.one_to_many and
686
- hasattr(field, "attname") and
687
- field.attname == field_name):
688
- # This is a FK field being updated by its attname (e.g., business_id)
689
- # Add the relationship name (e.g., 'business') to skip list
690
- fk_relationships.add(field.name)
691
- except FieldDoesNotExist:
692
- # If field lookup fails, skip it
693
- continue
853
+ Set of relationship field names to preload with select_related
854
+ """
855
+ relationships = set()
856
+ models_to_check = self.inheritance_chain
857
+ events_to_check = ["before_update", "after_update", "validate_update"]
858
+
859
+ for model_cls in models_to_check:
860
+ logger.info("Checking hooks for model %s", model_cls.__name__)
861
+
862
+ for event in events_to_check:
863
+ hooks = self.dispatcher.registry.get_hooks(model_cls, event)
864
+ logger.info("Found %d hooks for %s.%s", len(hooks), model_cls.__name__, event)
865
+
866
+ for handler_cls, method_name, condition, priority in hooks:
867
+ # Extract from conditions
868
+ if condition:
869
+ condition_rels = self.dispatcher._extract_condition_relationships(condition, model_cls)
870
+ if condition_rels:
871
+ logger.info("Condition relationships for %s.%s: %s", model_cls.__name__, method_name, condition_rels)
872
+ relationships.update(condition_rels)
873
+
874
+ # Extract from @select_related decorators
875
+ try:
876
+ method = getattr(handler_cls, method_name, None)
877
+ if method:
878
+ select_related_fields = getattr(method, "_select_related_fields", None)
879
+ if select_related_fields and hasattr(select_related_fields, "__iter__"):
880
+ logger.info(
881
+ "@select_related fields on %s.%s: %s", handler_cls.__name__, method_name, list(select_related_fields)
882
+ )
883
+ relationships.update(select_related_fields)
884
+ except Exception as e:
885
+ logger.warning("Failed to extract @select_related from %s.%s: %s", handler_cls.__name__, method_name, e)
886
+
887
+ # Also preload all forward FK relationships on the model (aggressive approach)
888
+ try:
889
+ for field in self.model_cls._meta.get_fields():
890
+ if field.is_relation and not field.many_to_many and not field.one_to_many:
891
+ relationships.add(field.name)
892
+ logger.info("AUTO: Adding FK relationship field %s", field.name)
893
+ except Exception as e:
894
+ logger.warning("Failed to extract all relationship fields: %s", e)
694
895
 
695
- return fk_relationships
896
+ logger.info("Total extracted relationships for %s: %s", self.model_cls.__name__, list(relationships))
696
897
 
697
- def _is_upsert_operation(self, result_objects):
698
- """
699
- Check if the operation was an upsert (mixed create/update).
700
-
701
- Args:
702
- result_objects: List of objects returned from the operation
703
-
704
- Returns:
705
- True if this was an upsert operation, False otherwise
706
- """
707
- if not result_objects:
708
- return False
709
-
710
- # Check if any object has upsert metadata
711
- return hasattr(result_objects[0], '_bulk_hooks_upsert_metadata')
898
+ return relationships
712
899
 
713
- def _dispatch_upsert_after_hooks(self, result_objects, models_in_chain):
900
+ # ==================== HELPER METHODS ====================
901
+
902
+ def _build_update_changeset(
903
+ self,
904
+ objs: List[Model],
905
+ fields: List[str],
906
+ old_records_map: Dict[Any, Model],
907
+ ) -> ChangeSet:
714
908
  """
715
- Dispatch after hooks for upsert operations, splitting by create/update.
716
-
717
- This matches Salesforce behavior:
718
- - Records that were created fire after_create hooks
719
- - Records that were updated fire after_update hooks
720
-
909
+ Build a changeset for bulk update operations.
910
+
721
911
  Args:
722
- result_objects: List of objects returned from the operation
723
- models_in_chain: List of model classes in the MTI inheritance chain
912
+ objs: List of model instances to update
913
+ fields: List of field names to update
914
+ old_records_map: Map of pk -> old record
915
+
916
+ Returns:
917
+ ChangeSet for the update operation
724
918
  """
725
- # Split objects by operation type
726
- created_objects = []
727
- updated_objects = []
728
-
729
- for obj in result_objects:
730
- was_created = getattr(obj, '_bulk_hooks_was_created', True)
731
- if was_created:
732
- created_objects.append(obj)
733
- else:
734
- updated_objects.append(obj)
735
-
736
- logger.info(
737
- f"Upsert after hooks: {len(created_objects)} created, "
738
- f"{len(updated_objects)} updated"
739
- )
740
-
741
- # Dispatch after_create hooks for created objects
742
- if created_objects:
743
- from django_bulk_hooks.helpers import build_changeset_for_create
744
- create_changeset = build_changeset_for_create(self.model_cls, created_objects)
745
-
746
- for model_cls in models_in_chain:
747
- model_changeset = self._build_changeset_for_model(create_changeset, model_cls)
748
- self.dispatcher.dispatch(model_changeset, "after_create", bypass_hooks=False)
749
-
750
- # Dispatch after_update hooks for updated objects
751
- if updated_objects:
752
- # Fetch old records for proper change detection
753
- old_records_map = self.analyzer.fetch_old_records_map(updated_objects)
754
-
755
- from django_bulk_hooks.helpers import build_changeset_for_update
756
- update_changeset = build_changeset_for_update(
757
- self.model_cls,
758
- updated_objects,
759
- update_kwargs={}, # Empty since we don't know specific fields
760
- old_records_map=old_records_map,
919
+ changes = [
920
+ RecordChange(
921
+ new_record=obj,
922
+ old_record=old_records_map.get(obj.pk),
923
+ changed_fields=fields,
761
924
  )
762
-
763
- for model_cls in models_in_chain:
764
- model_changeset = self._build_changeset_for_model(update_changeset, model_cls)
765
- self.dispatcher.dispatch(model_changeset, "after_update", bypass_hooks=False)
766
-
767
- # Clean up temporary metadata
768
- self._cleanup_upsert_metadata(result_objects)
925
+ for obj in objs
926
+ ]
769
927
 
770
- def _cleanup_upsert_metadata(self, result_objects):
771
- """
772
- Clean up temporary metadata added during upsert operations.
773
-
774
- Args:
775
- result_objects: List of objects to clean up
776
- """
777
- for obj in result_objects:
778
- if hasattr(obj, '_bulk_hooks_was_created'):
779
- delattr(obj, '_bulk_hooks_was_created')
780
- if hasattr(obj, '_bulk_hooks_upsert_metadata'):
781
- delattr(obj, '_bulk_hooks_upsert_metadata')
928
+ return ChangeSet(self.model_cls, changes, "update", {"fields": fields})