django-bulk-hooks 0.2.44__py3-none-any.whl → 0.2.93__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- django_bulk_hooks/__init__.py +0 -3
- django_bulk_hooks/changeset.py +214 -230
- django_bulk_hooks/conditions.py +7 -3
- django_bulk_hooks/decorators.py +5 -15
- django_bulk_hooks/dispatcher.py +546 -242
- django_bulk_hooks/handler.py +2 -2
- django_bulk_hooks/helpers.py +258 -100
- django_bulk_hooks/manager.py +134 -130
- django_bulk_hooks/models.py +89 -75
- django_bulk_hooks/operations/analyzer.py +466 -315
- django_bulk_hooks/operations/bulk_executor.py +608 -413
- django_bulk_hooks/operations/coordinator.py +601 -454
- django_bulk_hooks/operations/field_utils.py +335 -0
- django_bulk_hooks/operations/mti_handler.py +696 -511
- django_bulk_hooks/operations/mti_plans.py +103 -96
- django_bulk_hooks/operations/record_classifier.py +35 -23
- django_bulk_hooks/queryset.py +60 -15
- django_bulk_hooks/registry.py +0 -2
- {django_bulk_hooks-0.2.44.dist-info → django_bulk_hooks-0.2.93.dist-info}/METADATA +55 -4
- django_bulk_hooks-0.2.93.dist-info/RECORD +27 -0
- django_bulk_hooks-0.2.44.dist-info/RECORD +0 -26
- {django_bulk_hooks-0.2.44.dist-info → django_bulk_hooks-0.2.93.dist-info}/LICENSE +0 -0
- {django_bulk_hooks-0.2.44.dist-info → django_bulk_hooks-0.2.93.dist-info}/WHEEL +0 -0
django_bulk_hooks/dispatcher.py
CHANGED
|
@@ -1,242 +1,546 @@
|
|
|
1
|
-
"""
|
|
2
|
-
HookDispatcher: Single execution path for all hooks.
|
|
3
|
-
|
|
4
|
-
Provides deterministic, priority-ordered hook execution,
|
|
5
|
-
similar to Salesforce's hook framework.
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
import logging
|
|
9
|
-
|
|
10
|
-
logger = logging.getLogger(__name__)
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
class HookDispatcher:
|
|
14
|
-
"""
|
|
15
|
-
Single execution path for all hooks.
|
|
16
|
-
|
|
17
|
-
Responsibilities:
|
|
18
|
-
- Execute hooks in priority order
|
|
19
|
-
- Filter records based on conditions
|
|
20
|
-
- Provide ChangeSet context to hooks
|
|
21
|
-
- Fail-fast error propagation
|
|
22
|
-
- Manage complete operation lifecycle (VALIDATE, BEFORE, AFTER)
|
|
23
|
-
"""
|
|
24
|
-
|
|
25
|
-
def __init__(self, registry):
|
|
26
|
-
"""
|
|
27
|
-
Initialize the dispatcher.
|
|
28
|
-
|
|
29
|
-
Args:
|
|
30
|
-
registry: The hook registry (provides get_hooks method)
|
|
31
|
-
"""
|
|
32
|
-
self.registry = registry
|
|
33
|
-
|
|
34
|
-
def execute_operation_with_hooks(
|
|
35
|
-
self,
|
|
36
|
-
changeset,
|
|
37
|
-
operation,
|
|
38
|
-
event_prefix,
|
|
39
|
-
bypass_hooks=False,
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
#
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
#
|
|
72
|
-
result
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
hooks
|
|
105
|
-
|
|
106
|
-
if not hooks:
|
|
107
|
-
return
|
|
108
|
-
|
|
109
|
-
#
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
#
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
1
|
+
"""
|
|
2
|
+
HookDispatcher: Single execution path for all hooks.
|
|
3
|
+
|
|
4
|
+
Provides deterministic, priority-ordered hook execution,
|
|
5
|
+
similar to Salesforce's hook framework.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import logging
|
|
9
|
+
|
|
10
|
+
logger = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class HookDispatcher:
|
|
14
|
+
"""
|
|
15
|
+
Single execution path for all hooks.
|
|
16
|
+
|
|
17
|
+
Responsibilities:
|
|
18
|
+
- Execute hooks in priority order
|
|
19
|
+
- Filter records based on conditions
|
|
20
|
+
- Provide ChangeSet context to hooks
|
|
21
|
+
- Fail-fast error propagation
|
|
22
|
+
- Manage complete operation lifecycle (VALIDATE, BEFORE, AFTER)
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
def __init__(self, registry):
|
|
26
|
+
"""
|
|
27
|
+
Initialize the dispatcher.
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
registry: The hook registry (provides get_hooks method)
|
|
31
|
+
"""
|
|
32
|
+
self.registry = registry
|
|
33
|
+
|
|
34
|
+
def execute_operation_with_hooks(
|
|
35
|
+
self,
|
|
36
|
+
changeset,
|
|
37
|
+
operation,
|
|
38
|
+
event_prefix,
|
|
39
|
+
bypass_hooks=False,
|
|
40
|
+
):
|
|
41
|
+
"""
|
|
42
|
+
Execute operation with full hook lifecycle.
|
|
43
|
+
|
|
44
|
+
This is the high-level method that coordinates the complete lifecycle:
|
|
45
|
+
1. VALIDATE_{event}
|
|
46
|
+
2. BEFORE_{event}
|
|
47
|
+
3. Actual operation
|
|
48
|
+
4. AFTER_{event}
|
|
49
|
+
|
|
50
|
+
Args:
|
|
51
|
+
changeset: ChangeSet for the operation
|
|
52
|
+
operation: Callable that performs the actual DB operation
|
|
53
|
+
event_prefix: 'create', 'update', or 'delete'
|
|
54
|
+
bypass_hooks: Skip all hooks if True
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
Result of operation
|
|
58
|
+
"""
|
|
59
|
+
if bypass_hooks:
|
|
60
|
+
return operation()
|
|
61
|
+
|
|
62
|
+
# VALIDATE phase
|
|
63
|
+
self.dispatch(changeset, f"validate_{event_prefix}", bypass_hooks=False)
|
|
64
|
+
|
|
65
|
+
# BEFORE phase
|
|
66
|
+
self.dispatch(changeset, f"before_{event_prefix}", bypass_hooks=False)
|
|
67
|
+
|
|
68
|
+
# Execute the actual operation
|
|
69
|
+
result = operation()
|
|
70
|
+
|
|
71
|
+
# AFTER phase - use result if operation returns modified data
|
|
72
|
+
if result and isinstance(result, list) and event_prefix == "create":
|
|
73
|
+
# For create, rebuild changeset with assigned PKs
|
|
74
|
+
from django_bulk_hooks.helpers import build_changeset_for_create
|
|
75
|
+
|
|
76
|
+
changeset = build_changeset_for_create(changeset.model_cls, result)
|
|
77
|
+
|
|
78
|
+
self.dispatch(changeset, f"after_{event_prefix}", bypass_hooks=False)
|
|
79
|
+
|
|
80
|
+
return result
|
|
81
|
+
|
|
82
|
+
def dispatch(self, changeset, event, bypass_hooks=False):
|
|
83
|
+
"""
|
|
84
|
+
Dispatch hooks for a changeset with deterministic ordering.
|
|
85
|
+
|
|
86
|
+
This is the single execution path for ALL hooks in the system.
|
|
87
|
+
|
|
88
|
+
Args:
|
|
89
|
+
changeset: ChangeSet instance with record changes
|
|
90
|
+
event: Event name (e.g., 'after_update', 'before_create')
|
|
91
|
+
bypass_hooks: If True, skip all hook execution
|
|
92
|
+
|
|
93
|
+
Raises:
|
|
94
|
+
Exception: Any exception raised by a hook (fails fast)
|
|
95
|
+
RecursionError: If hooks create an infinite loop (Python's built-in limit)
|
|
96
|
+
"""
|
|
97
|
+
if bypass_hooks:
|
|
98
|
+
return
|
|
99
|
+
|
|
100
|
+
# Get hooks sorted by priority (deterministic order)
|
|
101
|
+
hooks = self.registry.get_hooks(changeset.model_cls, event)
|
|
102
|
+
|
|
103
|
+
logger.debug(f"🧵 DISPATCH: changeset.model_cls={changeset.model_cls.__name__}, event={event}")
|
|
104
|
+
logger.debug(f"🎣 HOOKS_FOUND: {len(hooks)} hooks for {changeset.model_cls.__name__}.{event}: {[f'{h[0].__name__}.{h[1]}' for h in hooks]}")
|
|
105
|
+
|
|
106
|
+
if not hooks:
|
|
107
|
+
return
|
|
108
|
+
|
|
109
|
+
# Create an operation key that includes the changeset model to avoid
|
|
110
|
+
# deduplicating hooks across different operations on the same records
|
|
111
|
+
# This prevents the same hook from executing multiple times for MTI inheritance chains
|
|
112
|
+
# but allows different operations on the same records to execute their hooks
|
|
113
|
+
record_ids = set()
|
|
114
|
+
for change in changeset.changes:
|
|
115
|
+
if change.new_record and change.new_record.pk:
|
|
116
|
+
record_ids.add(change.new_record.pk)
|
|
117
|
+
if change.old_record and change.old_record.pk:
|
|
118
|
+
record_ids.add(change.old_record.pk)
|
|
119
|
+
|
|
120
|
+
# Sort record IDs safely (handle Mock objects and other non-comparable types)
|
|
121
|
+
try:
|
|
122
|
+
sorted_record_ids = tuple(sorted(record_ids, key=lambda x: str(x)))
|
|
123
|
+
except (TypeError, AttributeError):
|
|
124
|
+
# Fallback for non-comparable objects (like Mock objects in tests)
|
|
125
|
+
sorted_record_ids = tuple(record_ids)
|
|
126
|
+
|
|
127
|
+
# Include changeset model and operation details to make the key more specific
|
|
128
|
+
operation_meta = getattr(changeset, 'operation_meta', {}) or {}
|
|
129
|
+
operation_type = getattr(changeset, 'operation_type', 'unknown')
|
|
130
|
+
|
|
131
|
+
# Include update_kwargs if present to distinguish different queryset operations
|
|
132
|
+
update_kwargs = operation_meta.get('update_kwargs', {})
|
|
133
|
+
if update_kwargs:
|
|
134
|
+
try:
|
|
135
|
+
# Convert to a hashable representation
|
|
136
|
+
update_kwargs_key = tuple(sorted((k, str(v)) for k, v in update_kwargs.items()))
|
|
137
|
+
except (TypeError, AttributeError):
|
|
138
|
+
# Fallback if values are not convertible to string
|
|
139
|
+
update_kwargs_key = tuple(sorted(update_kwargs.keys()))
|
|
140
|
+
else:
|
|
141
|
+
update_kwargs_key = ()
|
|
142
|
+
|
|
143
|
+
operation_key = (event, changeset.model_cls.__name__, operation_type, sorted_record_ids, update_kwargs_key)
|
|
144
|
+
|
|
145
|
+
# Track executed hooks to prevent duplicates in MTI inheritance chains
|
|
146
|
+
if not hasattr(self, '_executed_hooks'):
|
|
147
|
+
self._executed_hooks = set()
|
|
148
|
+
|
|
149
|
+
# Filter out hooks that have already been executed for this operation
|
|
150
|
+
unique_hooks = []
|
|
151
|
+
skipped_hooks = []
|
|
152
|
+
for handler_cls, method_name, condition, priority in hooks:
|
|
153
|
+
hook_key = (handler_cls, method_name, operation_key)
|
|
154
|
+
if hook_key not in self._executed_hooks:
|
|
155
|
+
unique_hooks.append((handler_cls, method_name, condition, priority))
|
|
156
|
+
self._executed_hooks.add(hook_key)
|
|
157
|
+
else:
|
|
158
|
+
skipped_hooks.append((handler_cls.__name__, method_name))
|
|
159
|
+
|
|
160
|
+
# Debug logging for hook deduplication
|
|
161
|
+
if skipped_hooks:
|
|
162
|
+
logger.debug(f"⏭️ SKIPPED_DUPS: {len(skipped_hooks)} duplicate hooks: {[f'{cls}.{method}' for cls, method in skipped_hooks]}")
|
|
163
|
+
|
|
164
|
+
if unique_hooks:
|
|
165
|
+
logger.debug(f"✅ EXECUTING_UNIQUE: {len(unique_hooks)} unique hooks: {[f'{h[0].__name__}.{h[1]}' for h in unique_hooks]}")
|
|
166
|
+
|
|
167
|
+
if not unique_hooks:
|
|
168
|
+
return
|
|
169
|
+
|
|
170
|
+
# Execute hooks in priority order
|
|
171
|
+
logger.info(f"🔥 HOOKS: Executing {len(unique_hooks)} hooks for {changeset.model_cls.__name__}.{event}")
|
|
172
|
+
for handler_cls, method_name, condition, priority in unique_hooks:
|
|
173
|
+
logger.info(f" → {handler_cls.__name__}.{method_name} (priority={priority})")
|
|
174
|
+
self._execute_hook(handler_cls, method_name, condition, changeset, event)
|
|
175
|
+
|
|
176
|
+
def _reset_executed_hooks(self):
|
|
177
|
+
"""Reset the executed hooks tracking for a new operation."""
|
|
178
|
+
self._executed_hooks = set()
|
|
179
|
+
|
|
180
|
+
def _execute_hook(self, handler_cls, method_name, condition, changeset, event):
|
|
181
|
+
"""
|
|
182
|
+
Execute a single hook with condition checking.
|
|
183
|
+
|
|
184
|
+
Args:
|
|
185
|
+
handler_cls: The hook handler class
|
|
186
|
+
method_name: Name of the method to call
|
|
187
|
+
condition: Optional condition to filter records
|
|
188
|
+
changeset: ChangeSet with all record changes
|
|
189
|
+
event: The hook event (e.g., 'before_create')
|
|
190
|
+
"""
|
|
191
|
+
# Use DI factory to create handler instance EARLY to access method decorators
|
|
192
|
+
from django_bulk_hooks.factory import create_hook_instance
|
|
193
|
+
|
|
194
|
+
handler = create_hook_instance(handler_cls)
|
|
195
|
+
method = getattr(handler, method_name)
|
|
196
|
+
|
|
197
|
+
# PRELOAD @select_related RELATIONSHIPS BEFORE CONDITION EVALUATION
|
|
198
|
+
# This ensures both conditions and hook methods have access to preloaded relationships
|
|
199
|
+
|
|
200
|
+
# Check if method has @select_related decorator
|
|
201
|
+
preload_func = getattr(method, "_select_related_preload", None)
|
|
202
|
+
if preload_func:
|
|
203
|
+
# Preload relationships to prevent N+1 queries in both conditions and hook methods
|
|
204
|
+
try:
|
|
205
|
+
model_cls_override = getattr(handler, "model_cls", None)
|
|
206
|
+
|
|
207
|
+
# Get FK fields being updated to avoid preloading conflicting relationships
|
|
208
|
+
skip_fields = changeset.operation_meta.get("fk_fields_being_updated", set())
|
|
209
|
+
|
|
210
|
+
# Preload for new_records (needed for condition evaluation and hook execution)
|
|
211
|
+
if changeset.new_records:
|
|
212
|
+
preload_func(
|
|
213
|
+
changeset.new_records,
|
|
214
|
+
model_cls=model_cls_override,
|
|
215
|
+
skip_fields=skip_fields,
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
# Also preload for old_records (for conditions that check previous values)
|
|
219
|
+
if changeset.old_records:
|
|
220
|
+
preload_func(
|
|
221
|
+
changeset.old_records,
|
|
222
|
+
model_cls=model_cls_override,
|
|
223
|
+
skip_fields=skip_fields,
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
# Mark that relationships have been preloaded to avoid duplicate condition preloading
|
|
227
|
+
changeset.operation_meta['relationships_preloaded'] = True
|
|
228
|
+
logger.debug(f"🔗 @select_related: Preloaded relationships for {handler_cls.__name__}.{method_name}")
|
|
229
|
+
|
|
230
|
+
except Exception as e:
|
|
231
|
+
logger.warning(f"Failed to preload relationships for {handler_cls.__name__}.{method_name}: {e}")
|
|
232
|
+
|
|
233
|
+
# SPECIAL HANDLING: Explicit @select_related support for BEFORE_CREATE hooks
|
|
234
|
+
# (This can stay for additional BEFORE_CREATE-specific logic if needed)
|
|
235
|
+
select_related_fields = getattr(method, "_select_related_fields", None)
|
|
236
|
+
if select_related_fields and event == "before_create" and changeset.new_records:
|
|
237
|
+
self._preload_select_related_for_before_create(changeset, select_related_fields)
|
|
238
|
+
|
|
239
|
+
# NOW condition evaluation is safe - relationships are preloaded
|
|
240
|
+
if condition:
|
|
241
|
+
# Skip per-hook preloading if relationships were already preloaded upfront
|
|
242
|
+
if not changeset.operation_meta.get('relationships_preloaded', False):
|
|
243
|
+
condition_relationships = self._extract_condition_relationships(condition, changeset.model_cls)
|
|
244
|
+
logger.info(f"🔍 CONDITION: {handler_cls.__name__}.{method_name} has condition, extracted relationships: {condition_relationships}")
|
|
245
|
+
if condition_relationships:
|
|
246
|
+
logger.info(f"🔗 PRELOADING: Preloading condition relationships for {len(changeset.changes)} records")
|
|
247
|
+
self._preload_condition_relationships(changeset, condition_relationships)
|
|
248
|
+
else:
|
|
249
|
+
logger.debug(f"🔍 CONDITION: {handler_cls.__name__}.{method_name} has condition (relationships already preloaded)")
|
|
250
|
+
|
|
251
|
+
# Filter records based on condition (now safe - relationships are preloaded)
|
|
252
|
+
if condition:
|
|
253
|
+
logger.info(f"⚡ EVALUATING: Checking condition for {handler_cls.__name__}.{method_name} on {len(changeset.changes)} records")
|
|
254
|
+
filtered_changes = [change for change in changeset.changes if condition.check(change.new_record, change.old_record)]
|
|
255
|
+
logger.info(f"✅ CONDITION: {len(filtered_changes)}/{len(changeset.changes)} records passed condition filter")
|
|
256
|
+
|
|
257
|
+
if not filtered_changes:
|
|
258
|
+
# No records match condition, skip this hook
|
|
259
|
+
return
|
|
260
|
+
|
|
261
|
+
# Create filtered changeset
|
|
262
|
+
from django_bulk_hooks.changeset import ChangeSet
|
|
263
|
+
|
|
264
|
+
filtered_changeset = ChangeSet(
|
|
265
|
+
changeset.model_cls,
|
|
266
|
+
filtered_changes,
|
|
267
|
+
changeset.operation_type,
|
|
268
|
+
changeset.operation_meta,
|
|
269
|
+
)
|
|
270
|
+
else:
|
|
271
|
+
# No condition, use full changeset
|
|
272
|
+
filtered_changeset = changeset
|
|
273
|
+
|
|
274
|
+
# Execute hook with ChangeSet
|
|
275
|
+
#
|
|
276
|
+
# ARCHITECTURE NOTE: Hook Contract
|
|
277
|
+
# ====================================
|
|
278
|
+
# All hooks must accept **kwargs for forward compatibility.
|
|
279
|
+
# We pass: changeset, new_records, old_records
|
|
280
|
+
#
|
|
281
|
+
# Old hooks that don't use changeset: def hook(self, new_records, old_records, **kwargs)
|
|
282
|
+
# New hooks that do use changeset: def hook(self, changeset, new_records, old_records, **kwargs)
|
|
283
|
+
#
|
|
284
|
+
# This is standard Python framework design (see Django signals, Flask hooks, etc.)
|
|
285
|
+
logger.info(f" 🚀 Executing: {handler_cls.__name__}.{method_name}")
|
|
286
|
+
try:
|
|
287
|
+
method(
|
|
288
|
+
changeset=filtered_changeset,
|
|
289
|
+
new_records=filtered_changeset.new_records,
|
|
290
|
+
old_records=filtered_changeset.old_records,
|
|
291
|
+
)
|
|
292
|
+
logger.info(f" ✅ Completed: {handler_cls.__name__}.{method_name}")
|
|
293
|
+
except Exception as e:
|
|
294
|
+
# Fail-fast: re-raise to rollback transaction
|
|
295
|
+
logger.error(
|
|
296
|
+
f"Hook {handler_cls.__name__}.{method_name} failed: {e}",
|
|
297
|
+
exc_info=True,
|
|
298
|
+
)
|
|
299
|
+
raise
|
|
300
|
+
|
|
301
|
+
def _extract_condition_relationships(self, condition, model_cls):
|
|
302
|
+
"""
|
|
303
|
+
Extract relationship paths that a condition might access.
|
|
304
|
+
|
|
305
|
+
Args:
|
|
306
|
+
condition: HookCondition instance
|
|
307
|
+
model_cls: The model class
|
|
308
|
+
|
|
309
|
+
Returns:
|
|
310
|
+
set: Set of relationship field names to preload
|
|
311
|
+
"""
|
|
312
|
+
relationships = set()
|
|
313
|
+
|
|
314
|
+
# Guard against Mock objects and non-condition objects
|
|
315
|
+
if not hasattr(condition, 'check') or hasattr(condition, '_mock_name'):
|
|
316
|
+
return relationships
|
|
317
|
+
|
|
318
|
+
# Handle different condition types
|
|
319
|
+
if hasattr(condition, 'field'):
|
|
320
|
+
# Extract relationships from field path (e.g., "status__value" -> "status")
|
|
321
|
+
field_path = condition.field
|
|
322
|
+
if isinstance(field_path, str):
|
|
323
|
+
if '__' in field_path:
|
|
324
|
+
# Take the first part before __ (the relationship to preload)
|
|
325
|
+
rel_field = field_path.split('__')[0]
|
|
326
|
+
|
|
327
|
+
# Normalize FK field names: business_id -> business
|
|
328
|
+
if rel_field.endswith('_id'):
|
|
329
|
+
potential_field_name = rel_field[:-3] # Remove '_id'
|
|
330
|
+
if self._is_relationship_field(model_cls, potential_field_name):
|
|
331
|
+
rel_field = potential_field_name
|
|
332
|
+
|
|
333
|
+
relationships.add(rel_field)
|
|
334
|
+
else:
|
|
335
|
+
# Handle single field (no __ notation)
|
|
336
|
+
rel_field = field_path
|
|
337
|
+
|
|
338
|
+
# Normalize FK field names: business_id -> business
|
|
339
|
+
if rel_field.endswith('_id'):
|
|
340
|
+
potential_field_name = rel_field[:-3] # Remove '_id'
|
|
341
|
+
if self._is_relationship_field(model_cls, potential_field_name):
|
|
342
|
+
rel_field = potential_field_name
|
|
343
|
+
|
|
344
|
+
# Only add if it's actually a relationship field
|
|
345
|
+
if self._is_relationship_field(model_cls, rel_field):
|
|
346
|
+
relationships.add(rel_field)
|
|
347
|
+
|
|
348
|
+
# Handle composite conditions (AndCondition, OrCondition)
|
|
349
|
+
if hasattr(condition, 'cond1') and hasattr(condition, 'cond2'):
|
|
350
|
+
relationships.update(self._extract_condition_relationships(condition.cond1, model_cls))
|
|
351
|
+
relationships.update(self._extract_condition_relationships(condition.cond2, model_cls))
|
|
352
|
+
|
|
353
|
+
# Handle NotCondition
|
|
354
|
+
if hasattr(condition, 'cond'):
|
|
355
|
+
relationships.update(self._extract_condition_relationships(condition.cond, model_cls))
|
|
356
|
+
|
|
357
|
+
return relationships
|
|
358
|
+
|
|
359
|
+
def _is_relationship_field(self, model_cls, field_name):
|
|
360
|
+
"""Check if a field is a relationship field."""
|
|
361
|
+
try:
|
|
362
|
+
field = model_cls._meta.get_field(field_name)
|
|
363
|
+
return field.is_relation and not field.many_to_many
|
|
364
|
+
except:
|
|
365
|
+
return False
|
|
366
|
+
|
|
367
|
+
def _preload_condition_relationships(self, changeset, relationships):
|
|
368
|
+
"""
|
|
369
|
+
Preload relationships needed for condition evaluation.
|
|
370
|
+
|
|
371
|
+
This prevents N+1 queries when conditions access relationships on both
|
|
372
|
+
old_records and new_records (e.g., HasChanged conditions).
|
|
373
|
+
|
|
374
|
+
Args:
|
|
375
|
+
changeset: ChangeSet with records
|
|
376
|
+
relationships: Set of relationship field names to preload
|
|
377
|
+
"""
|
|
378
|
+
if not relationships:
|
|
379
|
+
return
|
|
380
|
+
|
|
381
|
+
# Use Django's select_related to preload relationships
|
|
382
|
+
relationship_list = list(relationships)
|
|
383
|
+
|
|
384
|
+
# Collect all unique PKs from both new_records and old_records
|
|
385
|
+
all_ids = set()
|
|
386
|
+
|
|
387
|
+
# Add PKs from new_records
|
|
388
|
+
if changeset.new_records:
|
|
389
|
+
all_ids.update(obj.pk for obj in changeset.new_records if obj.pk is not None)
|
|
390
|
+
|
|
391
|
+
# Add PKs from old_records
|
|
392
|
+
if changeset.old_records:
|
|
393
|
+
all_ids.update(obj.pk for obj in changeset.old_records if obj.pk is not None)
|
|
394
|
+
|
|
395
|
+
# Bulk preload relationships for all records that have PKs
|
|
396
|
+
if all_ids:
|
|
397
|
+
preloaded = changeset.model_cls.objects.filter(pk__in=list(all_ids)).select_related(*relationship_list).in_bulk()
|
|
398
|
+
|
|
399
|
+
# Update new_records with preloaded relationships
|
|
400
|
+
if changeset.new_records:
|
|
401
|
+
for obj in changeset.new_records:
|
|
402
|
+
if obj.pk and obj.pk in preloaded:
|
|
403
|
+
preloaded_obj = preloaded[obj.pk]
|
|
404
|
+
for rel in relationship_list:
|
|
405
|
+
if hasattr(preloaded_obj, rel):
|
|
406
|
+
setattr(obj, rel, getattr(preloaded_obj, rel))
|
|
407
|
+
|
|
408
|
+
# Update old_records with preloaded relationships
|
|
409
|
+
if changeset.old_records:
|
|
410
|
+
for obj in changeset.old_records:
|
|
411
|
+
if obj.pk and obj.pk in preloaded:
|
|
412
|
+
preloaded_obj = preloaded[obj.pk]
|
|
413
|
+
for rel in relationship_list:
|
|
414
|
+
if hasattr(preloaded_obj, rel):
|
|
415
|
+
setattr(obj, rel, getattr(preloaded_obj, rel))
|
|
416
|
+
|
|
417
|
+
# Handle unsaved new_records by preloading their FK targets (bulk query to avoid N+1)
|
|
418
|
+
if changeset.new_records:
|
|
419
|
+
# Collect FK IDs for each relationship from unsaved records
|
|
420
|
+
field_ids_map = {rel: set() for rel in relationship_list}
|
|
421
|
+
|
|
422
|
+
for obj in changeset.new_records:
|
|
423
|
+
if obj.pk is None: # Unsaved object
|
|
424
|
+
for rel in relationship_list:
|
|
425
|
+
if hasattr(obj, f'{rel}_id'):
|
|
426
|
+
rel_id = getattr(obj, f'{rel}_id')
|
|
427
|
+
if rel_id:
|
|
428
|
+
field_ids_map[rel].add(rel_id)
|
|
429
|
+
|
|
430
|
+
# Bulk load relationships for unsaved records
|
|
431
|
+
field_objects_map = {}
|
|
432
|
+
for rel, ids in field_ids_map.items():
|
|
433
|
+
if not ids:
|
|
434
|
+
continue
|
|
435
|
+
try:
|
|
436
|
+
rel_model = getattr(changeset.model_cls._meta.get_field(rel).remote_field, 'model')
|
|
437
|
+
field_objects_map[rel] = rel_model.objects.in_bulk(ids)
|
|
438
|
+
except Exception:
|
|
439
|
+
field_objects_map[rel] = {}
|
|
440
|
+
|
|
441
|
+
# Attach relationships to unsaved records
|
|
442
|
+
for obj in changeset.new_records:
|
|
443
|
+
if obj.pk is None: # Unsaved object
|
|
444
|
+
for rel in relationship_list:
|
|
445
|
+
rel_id = getattr(obj, f'{rel}_id', None)
|
|
446
|
+
if rel_id and rel in field_objects_map:
|
|
447
|
+
rel_obj = field_objects_map[rel].get(rel_id)
|
|
448
|
+
if rel_obj:
|
|
449
|
+
setattr(obj, rel, rel_obj)
|
|
450
|
+
|
|
451
|
+
def _preload_select_related_for_before_create(self, changeset, select_related_fields):
|
|
452
|
+
"""
|
|
453
|
+
Explicit bulk preloading for @select_related on BEFORE_CREATE hooks.
|
|
454
|
+
|
|
455
|
+
This method provides guaranteed N+1 elimination by:
|
|
456
|
+
1. Collecting all FK IDs from unsaved new_records
|
|
457
|
+
2. Bulk querying related objects
|
|
458
|
+
3. Attaching relationships to each record
|
|
459
|
+
|
|
460
|
+
Args:
|
|
461
|
+
changeset: ChangeSet with new_records (unsaved objects)
|
|
462
|
+
select_related_fields: List of field names to preload (e.g., ['financial_account'])
|
|
463
|
+
"""
|
|
464
|
+
# Ensure select_related_fields is actually iterable (not a Mock in tests)
|
|
465
|
+
if not select_related_fields or not changeset.new_records or not hasattr(select_related_fields, '__iter__'):
|
|
466
|
+
return
|
|
467
|
+
|
|
468
|
+
logger.info(f"🔗 BULK PRELOAD: Preloading {select_related_fields} for {len(changeset.new_records)} unsaved records")
|
|
469
|
+
|
|
470
|
+
# Collect FK IDs for each field
|
|
471
|
+
field_ids_map = {field: set() for field in select_related_fields}
|
|
472
|
+
|
|
473
|
+
for record in changeset.new_records:
|
|
474
|
+
for field in select_related_fields:
|
|
475
|
+
fk_id = getattr(record, f'{field}_id', None)
|
|
476
|
+
if fk_id is not None:
|
|
477
|
+
field_ids_map[field].add(fk_id)
|
|
478
|
+
|
|
479
|
+
# Bulk query related objects for each field
|
|
480
|
+
field_objects_map = {}
|
|
481
|
+
for field, ids in field_ids_map.items():
|
|
482
|
+
if not ids:
|
|
483
|
+
continue
|
|
484
|
+
|
|
485
|
+
try:
|
|
486
|
+
# Get the related model
|
|
487
|
+
relation_field = changeset.model_cls._meta.get_field(field)
|
|
488
|
+
if not relation_field.is_relation:
|
|
489
|
+
continue
|
|
490
|
+
|
|
491
|
+
related_model = relation_field.remote_field.model
|
|
492
|
+
|
|
493
|
+
# Bulk query: related_model.objects.filter(id__in=ids)
|
|
494
|
+
field_objects_map[field] = related_model.objects.in_bulk(ids)
|
|
495
|
+
logger.info(f" ✅ Bulk loaded {len(field_objects_map[field])} {related_model.__name__} objects for field '{field}'")
|
|
496
|
+
|
|
497
|
+
except Exception as e:
|
|
498
|
+
logger.warning(f" ❌ Failed to bulk load field '{field}': {e}")
|
|
499
|
+
field_objects_map[field] = {}
|
|
500
|
+
|
|
501
|
+
# Attach relationships to each record
|
|
502
|
+
for record in changeset.new_records:
|
|
503
|
+
for field in select_related_fields:
|
|
504
|
+
fk_id = getattr(record, f'{field}_id', None)
|
|
505
|
+
if fk_id is not None and field in field_objects_map:
|
|
506
|
+
related_obj = field_objects_map[field].get(fk_id)
|
|
507
|
+
if related_obj is not None:
|
|
508
|
+
setattr(record, field, related_obj)
|
|
509
|
+
# Also cache in Django's fields_cache for consistency
|
|
510
|
+
if hasattr(record, '_state') and hasattr(record._state, 'fields_cache'):
|
|
511
|
+
record._state.fields_cache[field] = related_obj
|
|
512
|
+
|
|
513
|
+
logger.info(f"🔗 BULK PRELOAD: Completed relationship attachment for {len(changeset.new_records)} records")
|
|
514
|
+
|
|
515
|
+
|
|
516
|
+
# Global dispatcher instance
|
|
517
|
+
_dispatcher: HookDispatcher | None = None
|
|
518
|
+
|
|
519
|
+
|
|
520
|
+
def get_dispatcher():
|
|
521
|
+
"""
|
|
522
|
+
Get the global dispatcher instance.
|
|
523
|
+
|
|
524
|
+
Creates the dispatcher on first access (singleton pattern).
|
|
525
|
+
|
|
526
|
+
Returns:
|
|
527
|
+
HookDispatcher instance
|
|
528
|
+
"""
|
|
529
|
+
global _dispatcher
|
|
530
|
+
if _dispatcher is None:
|
|
531
|
+
# Import here to avoid circular dependency
|
|
532
|
+
from django_bulk_hooks.registry import get_registry
|
|
533
|
+
|
|
534
|
+
# Create dispatcher with the registry instance
|
|
535
|
+
_dispatcher = HookDispatcher(get_registry())
|
|
536
|
+
return _dispatcher
|
|
537
|
+
|
|
538
|
+
|
|
539
|
+
def reset_dispatcher():
|
|
540
|
+
"""
|
|
541
|
+
Reset the global dispatcher instance.
|
|
542
|
+
|
|
543
|
+
Useful for testing to ensure clean state between tests.
|
|
544
|
+
"""
|
|
545
|
+
global _dispatcher
|
|
546
|
+
_dispatcher = None
|