django-bulk-hooks 0.1.249__tar.gz → 0.1.251__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of django-bulk-hooks might be problematic. Click here for more details.

Files changed (17) hide show
  1. {django_bulk_hooks-0.1.249 → django_bulk_hooks-0.1.251}/PKG-INFO +3 -3
  2. {django_bulk_hooks-0.1.249 → django_bulk_hooks-0.1.251}/django_bulk_hooks/__init__.py +4 -4
  3. {django_bulk_hooks-0.1.249 → django_bulk_hooks-0.1.251}/django_bulk_hooks/handler.py +188 -188
  4. {django_bulk_hooks-0.1.249 → django_bulk_hooks-0.1.251}/django_bulk_hooks/queryset.py +376 -105
  5. {django_bulk_hooks-0.1.249 → django_bulk_hooks-0.1.251}/pyproject.toml +1 -1
  6. {django_bulk_hooks-0.1.249 → django_bulk_hooks-0.1.251}/LICENSE +0 -0
  7. {django_bulk_hooks-0.1.249 → django_bulk_hooks-0.1.251}/README.md +0 -0
  8. {django_bulk_hooks-0.1.249 → django_bulk_hooks-0.1.251}/django_bulk_hooks/conditions.py +0 -0
  9. {django_bulk_hooks-0.1.249 → django_bulk_hooks-0.1.251}/django_bulk_hooks/constants.py +0 -0
  10. {django_bulk_hooks-0.1.249 → django_bulk_hooks-0.1.251}/django_bulk_hooks/context.py +0 -0
  11. {django_bulk_hooks-0.1.249 → django_bulk_hooks-0.1.251}/django_bulk_hooks/decorators.py +0 -0
  12. {django_bulk_hooks-0.1.249 → django_bulk_hooks-0.1.251}/django_bulk_hooks/engine.py +0 -0
  13. {django_bulk_hooks-0.1.249 → django_bulk_hooks-0.1.251}/django_bulk_hooks/enums.py +0 -0
  14. {django_bulk_hooks-0.1.249 → django_bulk_hooks-0.1.251}/django_bulk_hooks/manager.py +0 -0
  15. {django_bulk_hooks-0.1.249 → django_bulk_hooks-0.1.251}/django_bulk_hooks/models.py +0 -0
  16. {django_bulk_hooks-0.1.249 → django_bulk_hooks-0.1.251}/django_bulk_hooks/priority.py +0 -0
  17. {django_bulk_hooks-0.1.249 → django_bulk_hooks-0.1.251}/django_bulk_hooks/registry.py +0 -0
@@ -1,8 +1,7 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.3
2
2
  Name: django-bulk-hooks
3
- Version: 0.1.249
3
+ Version: 0.1.251
4
4
  Summary: Hook-style hooks for Django bulk operations like bulk_create and bulk_update.
5
- Home-page: https://github.com/AugendLimited/django-bulk-hooks
6
5
  License: MIT
7
6
  Keywords: django,bulk,hooks
8
7
  Author: Konrad Beck
@@ -14,6 +13,7 @@ Classifier: Programming Language :: Python :: 3.11
14
13
  Classifier: Programming Language :: Python :: 3.12
15
14
  Classifier: Programming Language :: Python :: 3.13
16
15
  Requires-Dist: Django (>=4.0)
16
+ Project-URL: Homepage, https://github.com/AugendLimited/django-bulk-hooks
17
17
  Project-URL: Repository, https://github.com/AugendLimited/django-bulk-hooks
18
18
  Description-Content-Type: text/markdown
19
19
 
@@ -1,4 +1,4 @@
1
- from django_bulk_hooks.handler import Hook as HookClass
2
- from django_bulk_hooks.manager import BulkHookManager
3
-
4
- __all__ = ["BulkHookManager", "HookClass"]
1
+ from django_bulk_hooks.handler import Hook as HookClass
2
+ from django_bulk_hooks.manager import BulkHookManager
3
+
4
+ __all__ = ["BulkHookManager", "HookClass"]
@@ -1,188 +1,188 @@
1
- import logging
2
- import threading
3
- from collections import deque
4
-
5
- from django.db import transaction
6
-
7
- from django_bulk_hooks.registry import get_hooks, register_hook
8
-
9
- logger = logging.getLogger(__name__)
10
-
11
-
12
- # Thread-local hook context and hook state
13
- class HookVars(threading.local):
14
- def __init__(self):
15
- self.new = None
16
- self.old = None
17
- self.event = None
18
- self.model = None
19
- self.depth = 0
20
-
21
-
22
- hook_vars = HookVars()
23
-
24
- # Hook queue per thread
25
- _hook_context = threading.local()
26
-
27
-
28
- def get_hook_queue():
29
- if not hasattr(_hook_context, "queue"):
30
- _hook_context.queue = deque()
31
- return _hook_context.queue
32
-
33
-
34
- class HookContextState:
35
- @property
36
- def is_before(self):
37
- return hook_vars.event.startswith("before_") if hook_vars.event else False
38
-
39
- @property
40
- def is_after(self):
41
- return hook_vars.event.startswith("after_") if hook_vars.event else False
42
-
43
- @property
44
- def is_create(self):
45
- return "create" in hook_vars.event if hook_vars.event else False
46
-
47
- @property
48
- def is_update(self):
49
- return "update" in hook_vars.event if hook_vars.event else False
50
-
51
- @property
52
- def new(self):
53
- return hook_vars.new
54
-
55
- @property
56
- def old(self):
57
- return hook_vars.old
58
-
59
- @property
60
- def model(self):
61
- return hook_vars.model
62
-
63
-
64
- HookContext = HookContextState()
65
-
66
-
67
- class HookMeta(type):
68
- _registered = set()
69
-
70
- def __new__(mcs, name, bases, namespace):
71
- cls = super().__new__(mcs, name, bases, namespace)
72
- for method_name, method in namespace.items():
73
- if hasattr(method, "hooks_hooks"):
74
- for model_cls, event, condition, priority in method.hooks_hooks:
75
- key = (model_cls, event, cls, method_name)
76
- if key not in HookMeta._registered:
77
- register_hook(
78
- model=model_cls,
79
- event=event,
80
- handler_cls=cls,
81
- method_name=method_name,
82
- condition=condition,
83
- priority=priority,
84
- )
85
- HookMeta._registered.add(key)
86
- return cls
87
-
88
-
89
- class Hook(metaclass=HookMeta):
90
- @classmethod
91
- def handle(
92
- cls,
93
- event: str,
94
- model: type,
95
- *,
96
- new_records: list = None,
97
- old_records: list = None,
98
- **kwargs,
99
- ) -> None:
100
- queue = get_hook_queue()
101
- queue.append((cls, event, model, new_records, old_records, kwargs))
102
- logger.debug(f"Added item to queue: {event}, depth: {hook_vars.depth}")
103
-
104
- # If we're already processing hooks (depth > 0), don't process the queue
105
- # The outermost call will process the entire queue
106
- if hook_vars.depth > 0:
107
- logger.debug(f"Depth > 0, returning without processing queue")
108
- return
109
-
110
- # Process the entire queue
111
- logger.debug(f"Processing queue with {len(queue)} items")
112
- while queue:
113
- item = queue.popleft()
114
- if len(item) == 6:
115
- cls_, event_, model_, new_, old_, kw_ = item
116
- logger.debug(f"Processing queue item: {event_}")
117
- # Call _process on the Hook class, not the calling class
118
- Hook._process(event_, model_, new_, old_, **kw_)
119
- else:
120
- logger.warning(f"Invalid queue item format: {item}")
121
- continue
122
-
123
- @classmethod
124
- def _process(
125
- cls,
126
- event,
127
- model,
128
- new_records,
129
- old_records,
130
- **kwargs,
131
- ):
132
- hook_vars.depth += 1
133
- hook_vars.new = new_records
134
- hook_vars.old = old_records
135
- hook_vars.event = event
136
- hook_vars.model = model
137
-
138
- hooks = sorted(get_hooks(model, event), key=lambda x: x[3])
139
- logger.debug(f"Found {len(hooks)} hooks for {event}")
140
-
141
- def _execute():
142
- logger.debug(f"Executing {len(hooks)} hooks for {event}")
143
- new_local = new_records or []
144
- old_local = old_records or []
145
- if len(old_local) < len(new_local):
146
- old_local += [None] * (len(new_local) - len(old_local))
147
-
148
- for handler_cls, method_name, condition, priority in hooks:
149
- logger.debug(f"Processing hook {handler_cls.__name__}.{method_name}")
150
- if condition is not None:
151
- checks = [
152
- condition.check(n, o) for n, o in zip(new_local, old_local)
153
- ]
154
- if not any(checks):
155
- logger.debug(f"Condition failed for {handler_cls.__name__}.{method_name}")
156
- continue
157
-
158
- handler = handler_cls()
159
- method = getattr(handler, method_name)
160
- logger.debug(f"Executing {handler_cls.__name__}.{method_name}")
161
-
162
- try:
163
- method(
164
- new_records=new_local,
165
- old_records=old_local,
166
- **kwargs,
167
- )
168
- logger.debug(f"Successfully executed {handler_cls.__name__}.{method_name}")
169
- except Exception:
170
- logger.exception(
171
- "Error in hook %s.%s", handler_cls.__name__, method_name
172
- )
173
-
174
- conn = transaction.get_connection()
175
- logger.debug(f"Transaction in_atomic_block: {conn.in_atomic_block}, event: {event}")
176
- try:
177
- if conn.in_atomic_block and event.startswith("after_"):
178
- logger.debug(f"Deferring {event} to on_commit")
179
- transaction.on_commit(_execute)
180
- else:
181
- logger.debug(f"Executing {event} immediately")
182
- _execute()
183
- finally:
184
- hook_vars.new = None
185
- hook_vars.old = None
186
- hook_vars.event = None
187
- hook_vars.model = None
188
- hook_vars.depth -= 1
1
+ import logging
2
+ import threading
3
+ from collections import deque
4
+
5
+ from django.db import transaction
6
+
7
+ from django_bulk_hooks.registry import get_hooks, register_hook
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ # Thread-local hook context and hook state
13
+ class HookVars(threading.local):
14
+ def __init__(self):
15
+ self.new = None
16
+ self.old = None
17
+ self.event = None
18
+ self.model = None
19
+ self.depth = 0
20
+
21
+
22
+ hook_vars = HookVars()
23
+
24
+ # Hook queue per thread
25
+ _hook_context = threading.local()
26
+
27
+
28
+ def get_hook_queue():
29
+ if not hasattr(_hook_context, "queue"):
30
+ _hook_context.queue = deque()
31
+ return _hook_context.queue
32
+
33
+
34
+ class HookContextState:
35
+ @property
36
+ def is_before(self):
37
+ return hook_vars.event.startswith("before_") if hook_vars.event else False
38
+
39
+ @property
40
+ def is_after(self):
41
+ return hook_vars.event.startswith("after_") if hook_vars.event else False
42
+
43
+ @property
44
+ def is_create(self):
45
+ return "create" in hook_vars.event if hook_vars.event else False
46
+
47
+ @property
48
+ def is_update(self):
49
+ return "update" in hook_vars.event if hook_vars.event else False
50
+
51
+ @property
52
+ def new(self):
53
+ return hook_vars.new
54
+
55
+ @property
56
+ def old(self):
57
+ return hook_vars.old
58
+
59
+ @property
60
+ def model(self):
61
+ return hook_vars.model
62
+
63
+
64
+ HookContext = HookContextState()
65
+
66
+
67
+ class HookMeta(type):
68
+ _registered = set()
69
+
70
+ def __new__(mcs, name, bases, namespace):
71
+ cls = super().__new__(mcs, name, bases, namespace)
72
+ for method_name, method in namespace.items():
73
+ if hasattr(method, "hooks_hooks"):
74
+ for model_cls, event, condition, priority in method.hooks_hooks:
75
+ key = (model_cls, event, cls, method_name)
76
+ if key not in HookMeta._registered:
77
+ register_hook(
78
+ model=model_cls,
79
+ event=event,
80
+ handler_cls=cls,
81
+ method_name=method_name,
82
+ condition=condition,
83
+ priority=priority,
84
+ )
85
+ HookMeta._registered.add(key)
86
+ return cls
87
+
88
+
89
+ class Hook(metaclass=HookMeta):
90
+ @classmethod
91
+ def handle(
92
+ cls,
93
+ event: str,
94
+ model: type,
95
+ *,
96
+ new_records: list = None,
97
+ old_records: list = None,
98
+ **kwargs,
99
+ ) -> None:
100
+ queue = get_hook_queue()
101
+ queue.append((cls, event, model, new_records, old_records, kwargs))
102
+ logger.debug(f"Added item to queue: {event}, depth: {hook_vars.depth}")
103
+
104
+ # If we're already processing hooks (depth > 0), don't process the queue
105
+ # The outermost call will process the entire queue
106
+ if hook_vars.depth > 0:
107
+ logger.debug(f"Depth > 0, returning without processing queue")
108
+ return
109
+
110
+ # Process the entire queue
111
+ logger.debug(f"Processing queue with {len(queue)} items")
112
+ while queue:
113
+ item = queue.popleft()
114
+ if len(item) == 6:
115
+ cls_, event_, model_, new_, old_, kw_ = item
116
+ logger.debug(f"Processing queue item: {event_}")
117
+ # Call _process on the Hook class, not the calling class
118
+ Hook._process(event_, model_, new_, old_, **kw_)
119
+ else:
120
+ logger.warning(f"Invalid queue item format: {item}")
121
+ continue
122
+
123
+ @classmethod
124
+ def _process(
125
+ cls,
126
+ event,
127
+ model,
128
+ new_records,
129
+ old_records,
130
+ **kwargs,
131
+ ):
132
+ hook_vars.depth += 1
133
+ hook_vars.new = new_records
134
+ hook_vars.old = old_records
135
+ hook_vars.event = event
136
+ hook_vars.model = model
137
+
138
+ hooks = sorted(get_hooks(model, event), key=lambda x: x[3])
139
+ logger.debug(f"Found {len(hooks)} hooks for {event}")
140
+
141
+ def _execute():
142
+ logger.debug(f"Executing {len(hooks)} hooks for {event}")
143
+ new_local = new_records or []
144
+ old_local = old_records or []
145
+ if len(old_local) < len(new_local):
146
+ old_local += [None] * (len(new_local) - len(old_local))
147
+
148
+ for handler_cls, method_name, condition, priority in hooks:
149
+ logger.debug(f"Processing hook {handler_cls.__name__}.{method_name}")
150
+ if condition is not None:
151
+ checks = [
152
+ condition.check(n, o) for n, o in zip(new_local, old_local)
153
+ ]
154
+ if not any(checks):
155
+ logger.debug(f"Condition failed for {handler_cls.__name__}.{method_name}")
156
+ continue
157
+
158
+ handler = handler_cls()
159
+ method = getattr(handler, method_name)
160
+ logger.debug(f"Executing {handler_cls.__name__}.{method_name}")
161
+
162
+ try:
163
+ method(
164
+ new_records=new_local,
165
+ old_records=old_local,
166
+ **kwargs,
167
+ )
168
+ logger.debug(f"Successfully executed {handler_cls.__name__}.{method_name}")
169
+ except Exception:
170
+ logger.exception(
171
+ "Error in hook %s.%s", handler_cls.__name__, method_name
172
+ )
173
+
174
+ conn = transaction.get_connection()
175
+ logger.debug(f"Transaction in_atomic_block: {conn.in_atomic_block}, event: {event}")
176
+ try:
177
+ if conn.in_atomic_block and event.startswith("after_"):
178
+ logger.debug(f"Deferring {event} to on_commit")
179
+ transaction.on_commit(_execute)
180
+ else:
181
+ logger.debug(f"Executing {event} immediately")
182
+ _execute()
183
+ finally:
184
+ hook_vars.new = None
185
+ hook_vars.old = None
186
+ hook_vars.event = None
187
+ hook_vars.model = None
188
+ hook_vars.depth -= 1
@@ -1,9 +1,11 @@
1
1
  import logging
2
2
 
3
3
  from django.db import models, transaction
4
- from django.db.models import AutoField, Case, Field, Subquery, Value, When
4
+ from django.db.models import AutoField, Case, Field, Value, When
5
5
 
6
6
  from django_bulk_hooks import engine
7
+
8
+ logger = logging.getLogger(__name__)
7
9
  from django_bulk_hooks.constants import (
8
10
  AFTER_CREATE,
9
11
  AFTER_DELETE,
@@ -18,12 +20,9 @@ from django_bulk_hooks.constants import (
18
20
  from django_bulk_hooks.context import (
19
21
  HookContext,
20
22
  get_bulk_update_value_map,
21
- get_bypass_hooks,
22
23
  set_bulk_update_value_map,
23
24
  )
24
25
 
25
- logger = logging.getLogger(__name__)
26
-
27
26
 
28
27
  class HookQuerySetMixin:
29
28
  """
@@ -46,6 +45,21 @@ class HookQuerySetMixin:
46
45
  # Then run business logic hooks
47
46
  engine.run(model_cls, BEFORE_DELETE, objs, ctx=ctx)
48
47
 
48
+ # Before deletion, ensure all related fields are properly cached
49
+ # to avoid DoesNotExist errors in AFTER_DELETE hooks
50
+ for obj in objs:
51
+ if obj.pk is not None:
52
+ # Cache all foreign key relationships by accessing them
53
+ for field in model_cls._meta.fields:
54
+ if field.is_relation and not field.many_to_many and not field.one_to_many:
55
+ try:
56
+ # Access the related field to cache it before deletion
57
+ getattr(obj, field.name)
58
+ except Exception:
59
+ # If we can't access the field (e.g., already deleted, no permission, etc.)
60
+ # continue with other fields
61
+ pass
62
+
49
63
  # Use Django's standard delete() method
50
64
  result = super().delete()
51
65
 
@@ -56,81 +70,372 @@ class HookQuerySetMixin:
56
70
 
57
71
  @transaction.atomic
58
72
  def update(self, **kwargs):
59
- """Simplified update method that handles hooks cleanly."""
60
73
  logger.debug(f"Entering update method with {len(kwargs)} kwargs")
61
74
  instances = list(self)
62
75
  if not instances:
63
76
  return 0
64
77
 
65
78
  model_cls = self.model
66
- pks = [obj.pk for obj in instances if obj.pk is not None]
67
-
68
- # For test compatibility - if no PKs, create mock PKs
69
- if not pks and instances:
70
- for i, instance in enumerate(instances):
71
- if instance.pk is None:
72
- instance.pk = i + 1
73
- pks = [obj.pk for obj in instances]
79
+ pks = [obj.pk for obj in instances]
74
80
 
75
- # Load originals for hook comparison
81
+ # Load originals for hook comparison and ensure they match the order of instances
82
+ # Use the base manager to avoid recursion
76
83
  original_map = {
77
84
  obj.pk: obj for obj in model_cls._base_manager.filter(pk__in=pks)
78
85
  }
79
86
  originals = [original_map.get(obj.pk) for obj in instances]
80
87
 
81
- # Check for Subquery updates
82
- from django.db.models import Subquery, Value
88
+ # Check if any of the update values are Subquery objects
89
+ try:
90
+ from django.db.models import Subquery
83
91
 
84
- has_subquery = any(isinstance(v, Subquery) for v in kwargs.values())
85
- logger.debug(f"Subquery detection result: {has_subquery}")
92
+ logger.debug(f"Successfully imported Subquery from django.db.models")
93
+ except ImportError as e:
94
+ logger.error(f"Failed to import Subquery: {e}")
95
+ raise
86
96
 
87
- # Skip hooks if bypassed
88
- from django_bulk_hooks.context import (
89
- get_bulk_update_value_map,
90
- get_bypass_hooks,
97
+ logger.debug(f"Checking for Subquery objects in {len(kwargs)} kwargs")
98
+
99
+ subquery_detected = []
100
+ for key, value in kwargs.items():
101
+ is_subquery = isinstance(value, Subquery)
102
+ logger.debug(
103
+ f"Key '{key}': type={type(value).__name__}, is_subquery={is_subquery}"
104
+ )
105
+ if is_subquery:
106
+ subquery_detected.append(key)
107
+
108
+ has_subquery = len(subquery_detected) > 0
109
+ logger.debug(
110
+ f"Subquery detection result: {has_subquery}, detected keys: {subquery_detected}"
91
111
  )
92
112
 
93
- if get_bypass_hooks():
94
- logger.debug("update: hooks explicitly bypassed")
95
- return super().update(**kwargs)
113
+ # Debug logging for Subquery detection
114
+ logger.debug(f"Update kwargs: {list(kwargs.keys())}")
115
+ logger.debug(
116
+ f"Update kwargs types: {[(k, type(v).__name__) for k, v in kwargs.items()]}"
117
+ )
118
+
119
+ if has_subquery:
120
+ logger.debug(
121
+ f"Detected Subquery in update: {[k for k, v in kwargs.items() if isinstance(v, Subquery)]}"
122
+ )
123
+ else:
124
+ # Check if we missed any Subquery objects
125
+ for k, v in kwargs.items():
126
+ if hasattr(v, "query") and hasattr(v, "resolve_expression"):
127
+ logger.warning(
128
+ f"Potential Subquery-like object detected but not recognized: {k}={type(v).__name__}"
129
+ )
130
+ logger.warning(
131
+ f"Object attributes: query={hasattr(v, 'query')}, resolve_expression={hasattr(v, 'resolve_expression')}"
132
+ )
133
+ logger.warning(
134
+ f"Object dir: {[attr for attr in dir(v) if not attr.startswith('_')][:10]}"
135
+ )
96
136
 
97
- ctx = HookContext(model_cls, bypass_hooks=False)
98
- logger.debug("update: running hooks with Salesforce-style behavior")
137
+ # Apply field updates to instances
138
+ # If a per-object value map exists (from bulk_update), prefer it over kwargs
139
+ # IMPORTANT: Do not assign Django expression objects (e.g., Subquery/Case/F)
140
+ # to in-memory instances before running BEFORE_UPDATE hooks. Hooks must not
141
+ # receive unresolved expression objects.
142
+ per_object_values = get_bulk_update_value_map()
99
143
 
144
+ # For Subquery updates, skip all in-memory field assignments to prevent
145
+ # expression objects from reaching hooks
100
146
  if has_subquery:
101
- # For Subquery updates: database first, then hooks
102
- logger.debug("Using two-stage update for Subquery")
147
+ logger.debug(
148
+ "Skipping in-memory field assignments due to Subquery detection"
149
+ )
150
+ else:
151
+ for obj in instances:
152
+ if per_object_values and obj.pk in per_object_values:
153
+ for field, value in per_object_values[obj.pk].items():
154
+ setattr(obj, field, value)
155
+ else:
156
+ for field, value in kwargs.items():
157
+ # Skip assigning expression-like objects (they will be handled at DB level)
158
+ is_expression_like = hasattr(value, "resolve_expression")
159
+ if is_expression_like:
160
+ # Special-case Value() which can be unwrapped safely
161
+ if isinstance(value, Value):
162
+ try:
163
+ setattr(obj, field, value.value)
164
+ except Exception:
165
+ # If Value cannot be unwrapped for any reason, skip assignment
166
+ continue
167
+ else:
168
+ # Do not assign unresolved expressions to in-memory objects
169
+ logger.debug(
170
+ f"Skipping assignment of expression {type(value).__name__} to field {field}"
171
+ )
172
+ continue
173
+ else:
174
+ setattr(obj, field, value)
175
+
176
+ # Salesforce-style trigger behavior: Always run hooks, rely on Django's stack overflow protection
177
+ from django_bulk_hooks.context import get_bypass_hooks
178
+
179
+ current_bypass_hooks = get_bypass_hooks()
180
+
181
+ # Only skip hooks if explicitly bypassed (not for recursion prevention)
182
+ if current_bypass_hooks:
183
+ logger.debug("update: hooks explicitly bypassed")
184
+ ctx = HookContext(model_cls, bypass_hooks=True)
185
+ else:
186
+ # Always run hooks - Django will handle stack overflow protection
187
+ logger.debug("update: running hooks with Salesforce-style behavior")
188
+ ctx = HookContext(model_cls, bypass_hooks=False)
189
+
190
+ # Run validation hooks first
191
+ engine.run(model_cls, VALIDATE_UPDATE, instances, originals, ctx=ctx)
192
+
193
+ # For Subquery updates, skip BEFORE_UPDATE hooks here - they'll run after refresh
194
+ if not has_subquery:
195
+ # Then run BEFORE_UPDATE hooks for non-Subquery updates
196
+ engine.run(model_cls, BEFORE_UPDATE, instances, originals, ctx=ctx)
197
+
198
+ # Persist any additional field mutations made by BEFORE_UPDATE hooks.
199
+ # Build CASE statements per modified field not already present in kwargs.
200
+ # Note: For Subquery updates, this will be empty since hooks haven't run yet
201
+ # For Subquery updates, hook modifications are handled later via bulk_update
202
+ if not has_subquery:
203
+ modified_fields = self._detect_modified_fields(instances, originals)
204
+ extra_fields = [f for f in modified_fields if f not in kwargs]
205
+ else:
206
+ extra_fields = [] # Skip for Subquery updates
207
+
208
+ if extra_fields:
209
+ case_statements = {}
210
+ for field_name in extra_fields:
211
+ try:
212
+ field_obj = model_cls._meta.get_field(field_name)
213
+ except Exception:
214
+ # Skip unknown fields
215
+ continue
216
+
217
+ when_statements = []
218
+ for obj in instances:
219
+ obj_pk = getattr(obj, "pk", None)
220
+ if obj_pk is None:
221
+ continue
222
+
223
+ # Determine value and output field
224
+ if getattr(field_obj, "is_relation", False):
225
+ # For FK fields, store the raw id and target field output type
226
+ value = getattr(obj, field_obj.attname, None)
227
+ output_field = field_obj.target_field
228
+ target_name = (
229
+ field_obj.attname
230
+ ) # use column name (e.g., fk_id)
231
+ else:
232
+ value = getattr(obj, field_name)
233
+ output_field = field_obj
234
+ target_name = field_name
235
+
236
+ # Special handling for Subquery and other expression values in CASE statements
237
+ if isinstance(value, Subquery):
238
+ logger.debug(
239
+ f"Creating When statement with Subquery for {field_name}"
240
+ )
241
+ # Ensure the Subquery has proper output_field
242
+ if (
243
+ not hasattr(value, "output_field")
244
+ or value.output_field is None
245
+ ):
246
+ value.output_field = output_field
247
+ logger.debug(
248
+ f"Set output_field for Subquery in When statement to {output_field}"
249
+ )
250
+ when_statements.append(When(pk=obj_pk, then=value))
251
+ elif hasattr(value, "resolve_expression"):
252
+ # Handle other expression objects (Case, F, etc.)
253
+ logger.debug(
254
+ f"Creating When statement with expression for {field_name}: {type(value).__name__}"
255
+ )
256
+ when_statements.append(When(pk=obj_pk, then=value))
257
+ else:
258
+ when_statements.append(
259
+ When(
260
+ pk=obj_pk,
261
+ then=Value(value, output_field=output_field),
262
+ )
263
+ )
264
+
265
+ if when_statements:
266
+ case_statements[target_name] = Case(
267
+ *when_statements, output_field=output_field
268
+ )
103
269
 
104
- # Stage 1: Execute the database update first
105
- logger.debug("Stage 1: Executing Subquery update")
106
- update_count = super().update(**kwargs)
107
- logger.debug(f"Subquery update completed, affected {update_count} records")
270
+ # Merge extra CASE updates into kwargs for DB update
271
+ if case_statements:
272
+ logger.debug(
273
+ f"Adding case statements to kwargs: {list(case_statements.keys())}"
274
+ )
275
+ for field_name, case_stmt in case_statements.items():
276
+ logger.debug(
277
+ f"Case statement for {field_name}: {type(case_stmt).__name__}"
278
+ )
279
+ # Check if the case statement contains Subquery objects
280
+ if hasattr(case_stmt, "get_source_expressions"):
281
+ source_exprs = case_stmt.get_source_expressions()
282
+ for expr in source_exprs:
283
+ if isinstance(expr, Subquery):
284
+ logger.debug(
285
+ f"Case statement for {field_name} contains Subquery"
286
+ )
287
+ elif hasattr(expr, "get_source_expressions"):
288
+ # Check nested expressions (like Value objects)
289
+ nested_exprs = expr.get_source_expressions()
290
+ for nested_expr in nested_exprs:
291
+ if isinstance(nested_expr, Subquery):
292
+ logger.debug(
293
+ f"Case statement for {field_name} contains nested Subquery"
294
+ )
295
+
296
+ kwargs = {**kwargs, **case_statements}
297
+
298
+ # Use Django's built-in update logic directly
299
+ # Call the base QuerySet implementation to avoid recursion
300
+
301
+ # Additional safety check: ensure Subquery objects are properly handled
302
+ # This prevents the "cannot adapt type 'Subquery'" error
303
+ safe_kwargs = {}
304
+ logger.debug(f"Processing {len(kwargs)} kwargs for safety check")
305
+
306
+ for key, value in kwargs.items():
307
+ logger.debug(
308
+ f"Processing key '{key}' with value type {type(value).__name__}"
309
+ )
310
+
311
+ if isinstance(value, Subquery):
312
+ logger.debug(f"Found Subquery for field {key}")
313
+ # Ensure Subquery has proper output_field
314
+ if not hasattr(value, "output_field") or value.output_field is None:
315
+ logger.warning(
316
+ f"Subquery for field {key} missing output_field, attempting to infer"
317
+ )
318
+ # Try to infer from the model field
319
+ try:
320
+ field = model_cls._meta.get_field(key)
321
+ logger.debug(f"Inferred field type: {type(field).__name__}")
322
+ value = value.resolve_expression(None, None)
323
+ value.output_field = field
324
+ logger.debug(f"Set output_field to {field}")
325
+ except Exception as e:
326
+ logger.error(
327
+ f"Failed to infer output_field for Subquery on {key}: {e}"
328
+ )
329
+ raise
330
+ else:
331
+ logger.debug(
332
+ f"Subquery for field {key} already has output_field: {value.output_field}"
333
+ )
334
+ safe_kwargs[key] = value
335
+ elif hasattr(value, "get_source_expressions") and hasattr(
336
+ value, "resolve_expression"
337
+ ):
338
+ # Handle Case statements and other complex expressions
339
+ logger.debug(
340
+ f"Found complex expression for field {key}: {type(value).__name__}"
341
+ )
342
+
343
+ # Check if this expression contains any Subquery objects
344
+ source_expressions = value.get_source_expressions()
345
+ has_nested_subquery = False
346
+
347
+ for expr in source_expressions:
348
+ if isinstance(expr, Subquery):
349
+ has_nested_subquery = True
350
+ logger.debug(f"Found nested Subquery in {type(value).__name__}")
351
+ # Ensure the nested Subquery has proper output_field
352
+ if (
353
+ not hasattr(expr, "output_field")
354
+ or expr.output_field is None
355
+ ):
356
+ try:
357
+ field = model_cls._meta.get_field(key)
358
+ expr.output_field = field
359
+ logger.debug(
360
+ f"Set output_field for nested Subquery to {field}"
361
+ )
362
+ except Exception as e:
363
+ logger.error(
364
+ f"Failed to set output_field for nested Subquery: {e}"
365
+ )
366
+ raise
108
367
 
109
- # Stage 2: Refresh instances with computed values
110
- logger.debug("Stage 2: Refreshing instances with Subquery results")
111
- refreshed_map = {
368
+ if has_nested_subquery:
369
+ logger.debug(
370
+ f"Expression contains Subquery, ensuring proper output_field"
371
+ )
372
+ # Try to resolve the expression to ensure it's properly formatted
373
+ try:
374
+ resolved_value = value.resolve_expression(None, None)
375
+ safe_kwargs[key] = resolved_value
376
+ logger.debug(f"Successfully resolved expression for {key}")
377
+ except Exception as e:
378
+ logger.error(f"Failed to resolve expression for {key}: {e}")
379
+ raise
380
+ else:
381
+ safe_kwargs[key] = value
382
+ else:
383
+ logger.debug(
384
+ f"Non-Subquery value for field {key}: {type(value).__name__}"
385
+ )
386
+ safe_kwargs[key] = value
387
+
388
+ logger.debug(f"Safe kwargs keys: {list(safe_kwargs.keys())}")
389
+ logger.debug(
390
+ f"Safe kwargs types: {[(k, type(v).__name__) for k, v in safe_kwargs.items()]}"
391
+ )
392
+
393
+ logger.debug(f"Calling super().update() with {len(safe_kwargs)} kwargs")
394
+ try:
395
+ update_count = super().update(**safe_kwargs)
396
+ logger.debug(f"Super update successful, count: {update_count}")
397
+ except Exception as e:
398
+ logger.error(f"Super update failed: {e}")
399
+ logger.error(f"Exception type: {type(e).__name__}")
400
+ logger.error(f"Safe kwargs that caused failure: {safe_kwargs}")
401
+ raise
402
+
403
+ # If we used Subquery objects, refresh the instances to get computed values
404
+ # and run BEFORE_UPDATE hooks so HasChanged conditions work correctly
405
+ if has_subquery and instances and not current_bypass_hooks:
406
+ logger.debug(
407
+ "Refreshing instances with Subquery computed values before running hooks"
408
+ )
409
+ # Simple refresh of model fields without fetching related objects
410
+ # Subquery updates only affect the model's own fields, not relationships
411
+ refreshed_instances = {
112
412
  obj.pk: obj for obj in model_cls._base_manager.filter(pk__in=pks)
113
413
  }
414
+
415
+ # Bulk update all instances in memory and save pre-hook state
114
416
  pre_hook_state = {}
115
417
  for instance in instances:
116
- if instance.pk in refreshed_map:
117
- refreshed = refreshed_map[instance.pk]
118
- # Save pre-hook state for comparison
418
+ if instance.pk in refreshed_instances:
419
+ refreshed_instance = refreshed_instances[instance.pk]
420
+ # Save current state before modifying for hook comparison
119
421
  pre_hook_values = {}
120
422
  for field in model_cls._meta.fields:
121
423
  if field.name != "id":
122
- field_value = getattr(refreshed, field.name)
123
- pre_hook_values[field.name] = field_value
124
- setattr(instance, field.name, field_value)
424
+ pre_hook_values[field.name] = getattr(
425
+ refreshed_instance, field.name
426
+ )
427
+ setattr(
428
+ instance,
429
+ field.name,
430
+ getattr(refreshed_instance, field.name),
431
+ )
125
432
  pre_hook_state[instance.pk] = pre_hook_values
126
433
 
127
- # Stage 3: Run hooks with refreshed data
128
- logger.debug("Stage 3: Running hooks with refreshed instances")
129
- engine.run(model_cls, VALIDATE_UPDATE, instances, originals, ctx=ctx)
434
+ # Now run BEFORE_UPDATE hooks with refreshed instances so conditions work
435
+ logger.debug("Running BEFORE_UPDATE hooks after Subquery refresh")
130
436
  engine.run(model_cls, BEFORE_UPDATE, instances, originals, ctx=ctx)
131
437
 
132
- # Stage 4: Persist hook modifications
133
- logger.debug("Stage 4: Detecting hook modifications for bulk_update")
438
+ # Check if hooks modified any fields and persist them with bulk_update
134
439
  hook_modified_fields = set()
135
440
  for instance in instances:
136
441
  if instance.pk in pre_hook_state:
@@ -140,65 +445,22 @@ class HookQuerySetMixin:
140
445
  if current_value != pre_hook_value:
141
446
  hook_modified_fields.add(field_name)
142
447
 
448
+ hook_modified_fields = list(hook_modified_fields)
143
449
  if hook_modified_fields:
144
- hook_modified_fields = list(hook_modified_fields)
145
450
  logger.debug(
146
451
  f"Running bulk_update for hook-modified fields: {hook_modified_fields}"
147
452
  )
453
+ # Use bulk_update to persist hook modifications, bypassing hooks to avoid recursion
148
454
  model_cls.objects.bulk_update(
149
455
  instances, hook_modified_fields, bypass_hooks=True
150
456
  )
151
457
 
458
+ # Salesforce-style: Always run AFTER_UPDATE hooks unless explicitly bypassed
459
+ if not current_bypass_hooks:
460
+ logger.debug("update: running AFTER_UPDATE")
461
+ engine.run(model_cls, AFTER_UPDATE, instances, originals, ctx=ctx)
152
462
  else:
153
- # For regular updates: hooks first, then database
154
- logger.debug("Using single-stage update for non-Subquery")
155
-
156
- # Apply field updates to instances
157
- per_object_values = get_bulk_update_value_map()
158
- for instance in instances:
159
- if per_object_values and instance.pk in per_object_values:
160
- for field, value in per_object_values[instance.pk].items():
161
- setattr(instance, field, value)
162
- else:
163
- for field, value in kwargs.items():
164
- # Skip assigning expression-like objects (they will be handled at DB level)
165
- if hasattr(value, "resolve_expression"):
166
- # Special-case Value() which can be unwrapped safely
167
- if isinstance(value, Value):
168
- try:
169
- setattr(instance, field, value.value)
170
- except Exception:
171
- continue
172
- else:
173
- logger.debug(
174
- f"Skipping assignment of expression {type(value).__name__} to field {field}"
175
- )
176
- continue
177
- else:
178
- setattr(instance, field, value)
179
-
180
- # Run hooks
181
- engine.run(model_cls, VALIDATE_UPDATE, instances, originals, ctx=ctx)
182
- engine.run(model_cls, BEFORE_UPDATE, instances, originals, ctx=ctx)
183
-
184
- # Execute database update
185
- update_count = super().update(**kwargs)
186
- logger.debug(f"Super update successful, count: {update_count}")
187
-
188
- # Detect and persist additional hook modifications
189
- hook_modified_fields = self._detect_modified_fields(instances, originals)
190
- extra_fields = [f for f in hook_modified_fields if f not in kwargs]
191
- if extra_fields:
192
- logger.debug(
193
- f"Running bulk_update for hook-modified fields: {extra_fields}"
194
- )
195
- model_cls.objects.bulk_update(
196
- instances, extra_fields, bypass_hooks=True
197
- )
198
-
199
- # Always run AFTER_UPDATE hooks
200
- logger.debug("update: running AFTER_UPDATE")
201
- engine.run(model_cls, AFTER_UPDATE, instances, originals, ctx=ctx)
463
+ logger.debug("update: AFTER_UPDATE explicitly bypassed")
202
464
 
203
465
  return update_count
204
466
 
@@ -372,14 +634,7 @@ class HookQuerySetMixin:
372
634
  field_values = {}
373
635
  for field_name in fields:
374
636
  # Capture raw values assigned on the object (not expressions)
375
- value = getattr(obj, field_name)
376
- # Skip expression objects that should not be passed to hooks
377
- if hasattr(value, "resolve_expression"):
378
- logger.debug(
379
- f"Skipping expression {type(value).__name__} for field {field_name} in bulk_update value map"
380
- )
381
- continue
382
- field_values[field_name] = value
637
+ field_values[field_name] = getattr(obj, field_name)
383
638
  if field_values:
384
639
  value_map[obj.pk] = field_values
385
640
 
@@ -907,6 +1162,22 @@ class HookQuerySetMixin:
907
1162
  ctx = HookContext(model_cls, bypass_hooks=True)
908
1163
  logger.debug("bulk_delete bypassed hooks")
909
1164
 
1165
+ # Before deletion, ensure all related fields are properly cached
1166
+ # to avoid DoesNotExist errors in AFTER_DELETE hooks
1167
+ if not bypass_hooks:
1168
+ for obj in objs:
1169
+ if obj.pk is not None:
1170
+ # Cache all foreign key relationships by accessing them
1171
+ for field in model_cls._meta.fields:
1172
+ if field.is_relation and not field.many_to_many and not field.one_to_many:
1173
+ try:
1174
+ # Access the related field to cache it before deletion
1175
+ getattr(obj, field.name)
1176
+ except Exception:
1177
+ # If we can't access the field (e.g., already deleted, no permission, etc.)
1178
+ # continue with other fields
1179
+ pass
1180
+
910
1181
  # Use Django's standard delete() method on the queryset
911
1182
  pks = [obj.pk for obj in objs if obj.pk is not None]
912
1183
  if pks:
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "django-bulk-hooks"
3
- version = "0.1.249"
3
+ version = "0.1.251"
4
4
  description = "Hook-style hooks for Django bulk operations like bulk_create and bulk_update."
5
5
  authors = ["Konrad Beck <konrad.beck@merchantcapital.co.za>"]
6
6
  readme = "README.md"