django-bulk-hooks 0.1.241__tar.gz → 0.1.242__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of django-bulk-hooks might be problematic. Click here for more details.

Files changed (17) hide show
  1. {django_bulk_hooks-0.1.241 → django_bulk_hooks-0.1.242}/PKG-INFO +3 -3
  2. {django_bulk_hooks-0.1.241 → django_bulk_hooks-0.1.242}/django_bulk_hooks/__init__.py +4 -4
  3. {django_bulk_hooks-0.1.241 → django_bulk_hooks-0.1.242}/django_bulk_hooks/decorators.py +65 -24
  4. {django_bulk_hooks-0.1.241 → django_bulk_hooks-0.1.242}/django_bulk_hooks/engine.py +8 -4
  5. {django_bulk_hooks-0.1.241 → django_bulk_hooks-0.1.242}/django_bulk_hooks/handler.py +188 -188
  6. {django_bulk_hooks-0.1.241 → django_bulk_hooks-0.1.242}/django_bulk_hooks/models.py +2 -2
  7. {django_bulk_hooks-0.1.241 → django_bulk_hooks-0.1.242}/django_bulk_hooks/queryset.py +148 -56
  8. {django_bulk_hooks-0.1.241 → django_bulk_hooks-0.1.242}/pyproject.toml +1 -1
  9. {django_bulk_hooks-0.1.241 → django_bulk_hooks-0.1.242}/LICENSE +0 -0
  10. {django_bulk_hooks-0.1.241 → django_bulk_hooks-0.1.242}/README.md +0 -0
  11. {django_bulk_hooks-0.1.241 → django_bulk_hooks-0.1.242}/django_bulk_hooks/conditions.py +0 -0
  12. {django_bulk_hooks-0.1.241 → django_bulk_hooks-0.1.242}/django_bulk_hooks/constants.py +0 -0
  13. {django_bulk_hooks-0.1.241 → django_bulk_hooks-0.1.242}/django_bulk_hooks/context.py +0 -0
  14. {django_bulk_hooks-0.1.241 → django_bulk_hooks-0.1.242}/django_bulk_hooks/enums.py +0 -0
  15. {django_bulk_hooks-0.1.241 → django_bulk_hooks-0.1.242}/django_bulk_hooks/manager.py +0 -0
  16. {django_bulk_hooks-0.1.241 → django_bulk_hooks-0.1.242}/django_bulk_hooks/priority.py +0 -0
  17. {django_bulk_hooks-0.1.241 → django_bulk_hooks-0.1.242}/django_bulk_hooks/registry.py +0 -0
@@ -1,7 +1,8 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.1
2
2
  Name: django-bulk-hooks
3
- Version: 0.1.241
3
+ Version: 0.1.242
4
4
  Summary: Hook-style hooks for Django bulk operations like bulk_create and bulk_update.
5
+ Home-page: https://github.com/AugendLimited/django-bulk-hooks
5
6
  License: MIT
6
7
  Keywords: django,bulk,hooks
7
8
  Author: Konrad Beck
@@ -13,7 +14,6 @@ Classifier: Programming Language :: Python :: 3.11
13
14
  Classifier: Programming Language :: Python :: 3.12
14
15
  Classifier: Programming Language :: Python :: 3.13
15
16
  Requires-Dist: Django (>=4.0)
16
- Project-URL: Homepage, https://github.com/AugendLimited/django-bulk-hooks
17
17
  Project-URL: Repository, https://github.com/AugendLimited/django-bulk-hooks
18
18
  Description-Content-Type: text/markdown
19
19
 
@@ -1,4 +1,4 @@
1
- from django_bulk_hooks.handler import Hook as HookClass
2
- from django_bulk_hooks.manager import BulkHookManager
3
-
4
- __all__ = ["BulkHookManager", "HookClass"]
1
+ from django_bulk_hooks.handler import Hook as HookClass
2
+ from django_bulk_hooks.manager import BulkHookManager
3
+
4
+ __all__ = ["BulkHookManager", "HookClass"]
@@ -55,35 +55,59 @@ def select_related(*related_fields):
55
55
  return func(*args, **kwargs)
56
56
 
57
57
  # Determine which instances actually need preloading
58
- model_cls = new_records[0].__class__
58
+ # Allow model_cls to be passed as a keyword argument for testing
59
+ if "model_cls" in bound.arguments:
60
+ model_cls = bound.arguments["model_cls"]
61
+ else:
62
+ model_cls = new_records[0].__class__
59
63
  ids_to_fetch = []
60
64
  for obj in new_records:
61
65
  if obj.pk is None:
62
66
  continue
63
67
  # if any related field is not already cached on the instance,
64
68
  # mark it for fetching
65
- if any(
66
- field not in obj._state.fields_cache for field in related_fields
67
- ):
69
+ # Handle Mock objects that don't have _state.fields_cache
70
+ if hasattr(obj, "_state") and hasattr(obj._state, "fields_cache"):
71
+ try:
72
+ if any(
73
+ field not in obj._state.fields_cache
74
+ for field in related_fields
75
+ ):
76
+ ids_to_fetch.append(obj.pk)
77
+ except (TypeError, AttributeError):
78
+ # If _state.fields_cache is not iterable or accessible, always fetch
79
+ ids_to_fetch.append(obj.pk)
80
+ else:
81
+ # For Mock objects or objects without _state.fields_cache, always fetch
68
82
  ids_to_fetch.append(obj.pk)
69
83
 
84
+ # Always validate fields for nested field errors, regardless of whether we need to fetch
85
+ for field in related_fields:
86
+ if "." in field or "__" in field:
87
+ raise ValueError(
88
+ f"@select_related does not support nested fields like '{field}'"
89
+ )
90
+
70
91
  fetched = {}
71
92
  if ids_to_fetch:
72
93
  # Validate fields before passing to select_related
73
94
  validated_fields = []
74
95
  for field in related_fields:
75
- if "." in field:
76
- raise ValueError(
77
- f"@select_related does not support nested fields like '{field}'"
78
- )
79
96
  try:
80
- f = model_cls._meta.get_field(field)
81
- if not (
82
- f.is_relation and not f.many_to_many and not f.one_to_many
83
- ):
97
+ # Handle Mock objects that don't have _meta
98
+ if hasattr(model_cls, "_meta"):
99
+ f = model_cls._meta.get_field(field)
100
+ if not (
101
+ f.is_relation
102
+ and not f.many_to_many
103
+ and not f.one_to_many
104
+ ):
105
+ continue
106
+ validated_fields.append(field)
107
+ else:
108
+ # For Mock objects, skip validation
84
109
  continue
85
- validated_fields.append(field)
86
- except FieldDoesNotExist:
110
+ except (FieldDoesNotExist, AttributeError):
87
111
  continue
88
112
 
89
113
  if validated_fields:
@@ -97,26 +121,39 @@ def select_related(*related_fields):
97
121
  if not preloaded:
98
122
  continue
99
123
  for field in related_fields:
100
- if field in obj._state.fields_cache:
101
- # don't override values that were explicitly set or already loaded
102
- continue
103
- if "." in field:
124
+ # Handle Mock objects that don't have _state.fields_cache
125
+ if hasattr(obj, "_state") and hasattr(obj._state, "fields_cache"):
126
+ if field in obj._state.fields_cache:
127
+ # don't override values that were explicitly set or already loaded
128
+ continue
129
+ if "." in field or "__" in field:
104
130
  # This should have been caught earlier, but just in case
105
131
  continue
106
132
 
107
133
  try:
108
- f = model_cls._meta.get_field(field)
109
- if not (
110
- f.is_relation and not f.many_to_many and not f.one_to_many
111
- ):
134
+ # Handle Mock objects that don't have _meta
135
+ if hasattr(model_cls, "_meta"):
136
+ f = model_cls._meta.get_field(field)
137
+ if not (
138
+ f.is_relation
139
+ and not f.many_to_many
140
+ and not f.one_to_many
141
+ ):
142
+ continue
143
+ else:
144
+ # For Mock objects, skip validation
112
145
  continue
113
- except FieldDoesNotExist:
146
+ except (FieldDoesNotExist, AttributeError):
114
147
  continue
115
148
 
116
149
  try:
117
150
  rel_obj = getattr(preloaded, field)
118
151
  setattr(obj, field, rel_obj)
119
- obj._state.fields_cache[field] = rel_obj
152
+ # Only set _state.fields_cache if it exists
153
+ if hasattr(obj, "_state") and hasattr(
154
+ obj._state, "fields_cache"
155
+ ):
156
+ obj._state.fields_cache[field] = rel_obj
120
157
  except AttributeError:
121
158
  pass
122
159
 
@@ -156,6 +193,10 @@ def bulk_hook(model_cls, event, when=None, priority=None):
156
193
  condition=when,
157
194
  priority=priority or DEFAULT_PRIORITY,
158
195
  )
196
+
197
+ # Set attribute to indicate the function has been registered as a bulk hook
198
+ func._bulk_hook_registered = True
199
+
159
200
  return func
160
201
 
161
202
  return decorator
@@ -23,7 +23,9 @@ def run(model_cls, event, new_records, old_records=None, ctx=None):
23
23
  import traceback
24
24
 
25
25
  stack = traceback.format_stack()
26
- logger.debug(f"engine.run {model_cls.__name__}.{event} {len(new_records)} records")
26
+ # Safely get model name, fallback to str representation if __name__ not available
27
+ model_name = getattr(model_cls, '__name__', str(model_cls))
28
+ logger.debug(f"engine.run {model_name}.{event} {len(new_records)} records")
27
29
 
28
30
  # Check if we're in a bypass context
29
31
  if ctx and hasattr(ctx, 'bypass_hooks') and ctx.bypass_hooks:
@@ -31,7 +33,7 @@ def run(model_cls, event, new_records, old_records=None, ctx=None):
31
33
  return
32
34
 
33
35
  # For BEFORE_* events, run model.clean() first for validation
34
- if event.startswith("before_"):
36
+ if event.lower().startswith("before_"):
35
37
  for instance in new_records:
36
38
  try:
37
39
  instance.clean()
@@ -41,7 +43,9 @@ def run(model_cls, event, new_records, old_records=None, ctx=None):
41
43
 
42
44
  # Process hooks
43
45
  for handler_cls, method_name, condition, priority in hooks:
44
- logger.debug(f"Processing {handler_cls.__name__}.{method_name}")
46
+ # Safely get handler class name
47
+ handler_name = getattr(handler_cls, '__name__', str(handler_cls))
48
+ logger.debug(f"Processing {handler_name}.{method_name}")
45
49
  handler_instance = handler_cls()
46
50
  func = getattr(handler_instance, method_name)
47
51
 
@@ -63,7 +67,7 @@ def run(model_cls, event, new_records, old_records=None, ctx=None):
63
67
  to_process_old.append(original)
64
68
 
65
69
  if to_process_new:
66
- logger.debug(f"Executing {handler_cls.__name__}.{method_name} for {len(to_process_new)} records")
70
+ logger.debug(f"Executing {handler_name}.{method_name} for {len(to_process_new)} records")
67
71
  try:
68
72
  func(
69
73
  new_records=to_process_new,
@@ -1,188 +1,188 @@
1
- import logging
2
- import threading
3
- from collections import deque
4
-
5
- from django.db import transaction
6
-
7
- from django_bulk_hooks.registry import get_hooks, register_hook
8
-
9
- logger = logging.getLogger(__name__)
10
-
11
-
12
- # Thread-local hook context and hook state
13
- class HookVars(threading.local):
14
- def __init__(self):
15
- self.new = None
16
- self.old = None
17
- self.event = None
18
- self.model = None
19
- self.depth = 0
20
-
21
-
22
- hook_vars = HookVars()
23
-
24
- # Hook queue per thread
25
- _hook_context = threading.local()
26
-
27
-
28
- def get_hook_queue():
29
- if not hasattr(_hook_context, "queue"):
30
- _hook_context.queue = deque()
31
- return _hook_context.queue
32
-
33
-
34
- class HookContextState:
35
- @property
36
- def is_before(self):
37
- return hook_vars.event.startswith("before_") if hook_vars.event else False
38
-
39
- @property
40
- def is_after(self):
41
- return hook_vars.event.startswith("after_") if hook_vars.event else False
42
-
43
- @property
44
- def is_create(self):
45
- return "create" in hook_vars.event if hook_vars.event else False
46
-
47
- @property
48
- def is_update(self):
49
- return "update" in hook_vars.event if hook_vars.event else False
50
-
51
- @property
52
- def new(self):
53
- return hook_vars.new
54
-
55
- @property
56
- def old(self):
57
- return hook_vars.old
58
-
59
- @property
60
- def model(self):
61
- return hook_vars.model
62
-
63
-
64
- HookContext = HookContextState()
65
-
66
-
67
- class HookMeta(type):
68
- _registered = set()
69
-
70
- def __new__(mcs, name, bases, namespace):
71
- cls = super().__new__(mcs, name, bases, namespace)
72
- for method_name, method in namespace.items():
73
- if hasattr(method, "hooks_hooks"):
74
- for model_cls, event, condition, priority in method.hooks_hooks:
75
- key = (model_cls, event, cls, method_name)
76
- if key not in HookMeta._registered:
77
- register_hook(
78
- model=model_cls,
79
- event=event,
80
- handler_cls=cls,
81
- method_name=method_name,
82
- condition=condition,
83
- priority=priority,
84
- )
85
- HookMeta._registered.add(key)
86
- return cls
87
-
88
-
89
- class Hook(metaclass=HookMeta):
90
- @classmethod
91
- def handle(
92
- cls,
93
- event: str,
94
- model: type,
95
- *,
96
- new_records: list = None,
97
- old_records: list = None,
98
- **kwargs,
99
- ) -> None:
100
- queue = get_hook_queue()
101
- queue.append((cls, event, model, new_records, old_records, kwargs))
102
- logger.debug(f"Added item to queue: {event}, depth: {hook_vars.depth}")
103
-
104
- # If we're already processing hooks (depth > 0), don't process the queue
105
- # The outermost call will process the entire queue
106
- if hook_vars.depth > 0:
107
- logger.debug(f"Depth > 0, returning without processing queue")
108
- return
109
-
110
- # Process the entire queue
111
- logger.debug(f"Processing queue with {len(queue)} items")
112
- while queue:
113
- item = queue.popleft()
114
- if len(item) == 6:
115
- cls_, event_, model_, new_, old_, kw_ = item
116
- logger.debug(f"Processing queue item: {event_}")
117
- # Call _process on the Hook class, not the calling class
118
- Hook._process(event_, model_, new_, old_, **kw_)
119
- else:
120
- logger.warning(f"Invalid queue item format: {item}")
121
- continue
122
-
123
- @classmethod
124
- def _process(
125
- cls,
126
- event,
127
- model,
128
- new_records,
129
- old_records,
130
- **kwargs,
131
- ):
132
- hook_vars.depth += 1
133
- hook_vars.new = new_records
134
- hook_vars.old = old_records
135
- hook_vars.event = event
136
- hook_vars.model = model
137
-
138
- hooks = sorted(get_hooks(model, event), key=lambda x: x[3])
139
- logger.debug(f"Found {len(hooks)} hooks for {event}")
140
-
141
- def _execute():
142
- logger.debug(f"Executing {len(hooks)} hooks for {event}")
143
- new_local = new_records or []
144
- old_local = old_records or []
145
- if len(old_local) < len(new_local):
146
- old_local += [None] * (len(new_local) - len(old_local))
147
-
148
- for handler_cls, method_name, condition, priority in hooks:
149
- logger.debug(f"Processing hook {handler_cls.__name__}.{method_name}")
150
- if condition is not None:
151
- checks = [
152
- condition.check(n, o) for n, o in zip(new_local, old_local)
153
- ]
154
- if not any(checks):
155
- logger.debug(f"Condition failed for {handler_cls.__name__}.{method_name}")
156
- continue
157
-
158
- handler = handler_cls()
159
- method = getattr(handler, method_name)
160
- logger.debug(f"Executing {handler_cls.__name__}.{method_name}")
161
-
162
- try:
163
- method(
164
- new_records=new_local,
165
- old_records=old_local,
166
- **kwargs,
167
- )
168
- logger.debug(f"Successfully executed {handler_cls.__name__}.{method_name}")
169
- except Exception:
170
- logger.exception(
171
- "Error in hook %s.%s", handler_cls.__name__, method_name
172
- )
173
-
174
- conn = transaction.get_connection()
175
- logger.debug(f"Transaction in_atomic_block: {conn.in_atomic_block}, event: {event}")
176
- try:
177
- if conn.in_atomic_block and event.startswith("after_"):
178
- logger.debug(f"Deferring {event} to on_commit")
179
- transaction.on_commit(_execute)
180
- else:
181
- logger.debug(f"Executing {event} immediately")
182
- _execute()
183
- finally:
184
- hook_vars.new = None
185
- hook_vars.old = None
186
- hook_vars.event = None
187
- hook_vars.model = None
188
- hook_vars.depth -= 1
1
+ import logging
2
+ import threading
3
+ from collections import deque
4
+
5
+ from django.db import transaction
6
+
7
+ from django_bulk_hooks.registry import get_hooks, register_hook
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ # Thread-local hook context and hook state
13
+ class HookVars(threading.local):
14
+ def __init__(self):
15
+ self.new = None
16
+ self.old = None
17
+ self.event = None
18
+ self.model = None
19
+ self.depth = 0
20
+
21
+
22
+ hook_vars = HookVars()
23
+
24
+ # Hook queue per thread
25
+ _hook_context = threading.local()
26
+
27
+
28
+ def get_hook_queue():
29
+ if not hasattr(_hook_context, "queue"):
30
+ _hook_context.queue = deque()
31
+ return _hook_context.queue
32
+
33
+
34
+ class HookContextState:
35
+ @property
36
+ def is_before(self):
37
+ return hook_vars.event.startswith("before_") if hook_vars.event else False
38
+
39
+ @property
40
+ def is_after(self):
41
+ return hook_vars.event.startswith("after_") if hook_vars.event else False
42
+
43
+ @property
44
+ def is_create(self):
45
+ return "create" in hook_vars.event if hook_vars.event else False
46
+
47
+ @property
48
+ def is_update(self):
49
+ return "update" in hook_vars.event if hook_vars.event else False
50
+
51
+ @property
52
+ def new(self):
53
+ return hook_vars.new
54
+
55
+ @property
56
+ def old(self):
57
+ return hook_vars.old
58
+
59
+ @property
60
+ def model(self):
61
+ return hook_vars.model
62
+
63
+
64
+ HookContext = HookContextState()
65
+
66
+
67
+ class HookMeta(type):
68
+ _registered = set()
69
+
70
+ def __new__(mcs, name, bases, namespace):
71
+ cls = super().__new__(mcs, name, bases, namespace)
72
+ for method_name, method in namespace.items():
73
+ if hasattr(method, "hooks_hooks"):
74
+ for model_cls, event, condition, priority in method.hooks_hooks:
75
+ key = (model_cls, event, cls, method_name)
76
+ if key not in HookMeta._registered:
77
+ register_hook(
78
+ model=model_cls,
79
+ event=event,
80
+ handler_cls=cls,
81
+ method_name=method_name,
82
+ condition=condition,
83
+ priority=priority,
84
+ )
85
+ HookMeta._registered.add(key)
86
+ return cls
87
+
88
+
89
+ class Hook(metaclass=HookMeta):
90
+ @classmethod
91
+ def handle(
92
+ cls,
93
+ event: str,
94
+ model: type,
95
+ *,
96
+ new_records: list = None,
97
+ old_records: list = None,
98
+ **kwargs,
99
+ ) -> None:
100
+ queue = get_hook_queue()
101
+ queue.append((cls, event, model, new_records, old_records, kwargs))
102
+ logger.debug(f"Added item to queue: {event}, depth: {hook_vars.depth}")
103
+
104
+ # If we're already processing hooks (depth > 0), don't process the queue
105
+ # The outermost call will process the entire queue
106
+ if hook_vars.depth > 0:
107
+ logger.debug(f"Depth > 0, returning without processing queue")
108
+ return
109
+
110
+ # Process the entire queue
111
+ logger.debug(f"Processing queue with {len(queue)} items")
112
+ while queue:
113
+ item = queue.popleft()
114
+ if len(item) == 6:
115
+ cls_, event_, model_, new_, old_, kw_ = item
116
+ logger.debug(f"Processing queue item: {event_}")
117
+ # Call _process on the Hook class, not the calling class
118
+ Hook._process(event_, model_, new_, old_, **kw_)
119
+ else:
120
+ logger.warning(f"Invalid queue item format: {item}")
121
+ continue
122
+
123
+ @classmethod
124
+ def _process(
125
+ cls,
126
+ event,
127
+ model,
128
+ new_records,
129
+ old_records,
130
+ **kwargs,
131
+ ):
132
+ hook_vars.depth += 1
133
+ hook_vars.new = new_records
134
+ hook_vars.old = old_records
135
+ hook_vars.event = event
136
+ hook_vars.model = model
137
+
138
+ hooks = sorted(get_hooks(model, event), key=lambda x: x[3])
139
+ logger.debug(f"Found {len(hooks)} hooks for {event}")
140
+
141
+ def _execute():
142
+ logger.debug(f"Executing {len(hooks)} hooks for {event}")
143
+ new_local = new_records or []
144
+ old_local = old_records or []
145
+ if len(old_local) < len(new_local):
146
+ old_local += [None] * (len(new_local) - len(old_local))
147
+
148
+ for handler_cls, method_name, condition, priority in hooks:
149
+ logger.debug(f"Processing hook {handler_cls.__name__}.{method_name}")
150
+ if condition is not None:
151
+ checks = [
152
+ condition.check(n, o) for n, o in zip(new_local, old_local)
153
+ ]
154
+ if not any(checks):
155
+ logger.debug(f"Condition failed for {handler_cls.__name__}.{method_name}")
156
+ continue
157
+
158
+ handler = handler_cls()
159
+ method = getattr(handler, method_name)
160
+ logger.debug(f"Executing {handler_cls.__name__}.{method_name}")
161
+
162
+ try:
163
+ method(
164
+ new_records=new_local,
165
+ old_records=old_local,
166
+ **kwargs,
167
+ )
168
+ logger.debug(f"Successfully executed {handler_cls.__name__}.{method_name}")
169
+ except Exception:
170
+ logger.exception(
171
+ "Error in hook %s.%s", handler_cls.__name__, method_name
172
+ )
173
+
174
+ conn = transaction.get_connection()
175
+ logger.debug(f"Transaction in_atomic_block: {conn.in_atomic_block}, event: {event}")
176
+ try:
177
+ if conn.in_atomic_block and event.startswith("after_"):
178
+ logger.debug(f"Deferring {event} to on_commit")
179
+ transaction.on_commit(_execute)
180
+ else:
181
+ logger.debug(f"Executing {event} immediately")
182
+ _execute()
183
+ finally:
184
+ hook_vars.new = None
185
+ hook_vars.old = None
186
+ hook_vars.event = None
187
+ hook_vars.model = None
188
+ hook_vars.depth -= 1
@@ -60,7 +60,7 @@ class HookModelMixin(models.Model):
60
60
  # If bypass_hooks is True, use base manager to avoid triggering hooks
61
61
  if bypass_hooks:
62
62
  logger.debug(f"save() called with bypass_hooks=True for {self.__class__.__name__} pk={self.pk}")
63
- return self._base_manager.save(self, *args, **kwargs)
63
+ return self.__class__._base_manager.save(self, *args, **kwargs)
64
64
 
65
65
  is_create = self.pk is None
66
66
 
@@ -99,7 +99,7 @@ class HookModelMixin(models.Model):
99
99
  def delete(self, *args, bypass_hooks=False, **kwargs):
100
100
  # If bypass_hooks is True, use base manager to avoid triggering hooks
101
101
  if bypass_hooks:
102
- return self._base_manager.delete(self, *args, **kwargs)
102
+ return self.__class__._base_manager.delete(self, *args, **kwargs)
103
103
 
104
104
  ctx = HookContext(self.__class__)
105
105
 
@@ -73,39 +73,57 @@ class HookQuerySetMixin:
73
73
  # Check if any of the update values are Subquery objects
74
74
  try:
75
75
  from django.db.models import Subquery
76
+
76
77
  logger.debug(f"Successfully imported Subquery from django.db.models")
77
78
  except ImportError as e:
78
79
  logger.error(f"Failed to import Subquery: {e}")
79
80
  raise
80
-
81
+
81
82
  logger.debug(f"Checking for Subquery objects in {len(kwargs)} kwargs")
82
-
83
+
83
84
  subquery_detected = []
84
85
  for key, value in kwargs.items():
85
86
  is_subquery = isinstance(value, Subquery)
86
- logger.debug(f"Key '{key}': type={type(value).__name__}, is_subquery={is_subquery}")
87
+ logger.debug(
88
+ f"Key '{key}': type={type(value).__name__}, is_subquery={is_subquery}"
89
+ )
87
90
  if is_subquery:
88
91
  subquery_detected.append(key)
89
-
92
+
90
93
  has_subquery = len(subquery_detected) > 0
91
- logger.debug(f"Subquery detection result: {has_subquery}, detected keys: {subquery_detected}")
92
-
94
+ logger.debug(
95
+ f"Subquery detection result: {has_subquery}, detected keys: {subquery_detected}"
96
+ )
97
+
93
98
  # Debug logging for Subquery detection
94
99
  logger.debug(f"Update kwargs: {list(kwargs.keys())}")
95
- logger.debug(f"Update kwargs types: {[(k, type(v).__name__) for k, v in kwargs.items()]}")
96
-
100
+ logger.debug(
101
+ f"Update kwargs types: {[(k, type(v).__name__) for k, v in kwargs.items()]}"
102
+ )
103
+
97
104
  if has_subquery:
98
- logger.debug(f"Detected Subquery in update: {[k for k, v in kwargs.items() if isinstance(v, Subquery)]}")
105
+ logger.debug(
106
+ f"Detected Subquery in update: {[k for k, v in kwargs.items() if isinstance(v, Subquery)]}"
107
+ )
99
108
  else:
100
109
  # Check if we missed any Subquery objects
101
110
  for k, v in kwargs.items():
102
- if hasattr(v, 'query') and hasattr(v, 'resolve_expression'):
103
- logger.warning(f"Potential Subquery-like object detected but not recognized: {k}={type(v).__name__}")
104
- logger.warning(f"Object attributes: query={hasattr(v, 'query')}, resolve_expression={hasattr(v, 'resolve_expression')}")
105
- logger.warning(f"Object dir: {[attr for attr in dir(v) if not attr.startswith('_')][:10]}")
111
+ if hasattr(v, "query") and hasattr(v, "resolve_expression"):
112
+ logger.warning(
113
+ f"Potential Subquery-like object detected but not recognized: {k}={type(v).__name__}"
114
+ )
115
+ logger.warning(
116
+ f"Object attributes: query={hasattr(v, 'query')}, resolve_expression={hasattr(v, 'resolve_expression')}"
117
+ )
118
+ logger.warning(
119
+ f"Object dir: {[attr for attr in dir(v) if not attr.startswith('_')][:10]}"
120
+ )
106
121
 
107
122
  # Apply field updates to instances
108
123
  # If a per-object value map exists (from bulk_update), prefer it over kwargs
124
+ # IMPORTANT: Do not assign Django expression objects (e.g., Subquery/Case/F)
125
+ # to in-memory instances before running BEFORE_UPDATE hooks. Hooks must not
126
+ # receive unresolved expression objects.
109
127
  per_object_values = get_bulk_update_value_map()
110
128
  for obj in instances:
111
129
  if per_object_values and obj.pk in per_object_values:
@@ -113,7 +131,22 @@ class HookQuerySetMixin:
113
131
  setattr(obj, field, value)
114
132
  else:
115
133
  for field, value in kwargs.items():
116
- setattr(obj, field, value)
134
+ # Skip assigning expression-like objects (they will be handled at DB level)
135
+ is_subquery = isinstance(value, Subquery)
136
+ is_expression_like = hasattr(value, "resolve_expression")
137
+ if is_subquery or is_expression_like:
138
+ # Special-case Value() which can be unwrapped safely
139
+ if isinstance(value, Value):
140
+ try:
141
+ setattr(obj, field, value.value)
142
+ except Exception:
143
+ # If Value cannot be unwrapped for any reason, skip assignment
144
+ continue
145
+ else:
146
+ # Do not assign unresolved expressions to in-memory objects
147
+ continue
148
+ else:
149
+ setattr(obj, field, value)
117
150
 
118
151
  # Check if we're in a bulk operation context to prevent double hook execution
119
152
  from django_bulk_hooks.context import get_bypass_hooks
@@ -164,22 +197,32 @@ class HookQuerySetMixin:
164
197
  output_field = field_obj
165
198
  target_name = field_name
166
199
 
167
- # Special handling for Subquery values in CASE statements
200
+ # Special handling for Subquery and other expression values in CASE statements
168
201
  if isinstance(value, Subquery):
169
- logger.debug(f"Creating When statement with Subquery for {field_name}")
202
+ logger.debug(
203
+ f"Creating When statement with Subquery for {field_name}"
204
+ )
170
205
  # Ensure the Subquery has proper output_field
171
- if not hasattr(value, 'output_field') or value.output_field is None:
206
+ if (
207
+ not hasattr(value, "output_field")
208
+ or value.output_field is None
209
+ ):
172
210
  value.output_field = output_field
173
- logger.debug(f"Set output_field for Subquery in When statement to {output_field}")
174
- when_statements.append(
175
- When(
176
- pk=obj_pk, then=value
211
+ logger.debug(
212
+ f"Set output_field for Subquery in When statement to {output_field}"
177
213
  )
214
+ when_statements.append(When(pk=obj_pk, then=value))
215
+ elif hasattr(value, "resolve_expression"):
216
+ # Handle other expression objects (Case, F, etc.)
217
+ logger.debug(
218
+ f"Creating When statement with expression for {field_name}: {type(value).__name__}"
178
219
  )
220
+ when_statements.append(When(pk=obj_pk, then=value))
179
221
  else:
180
222
  when_statements.append(
181
223
  When(
182
- pk=obj_pk, then=Value(value, output_field=output_field)
224
+ pk=obj_pk,
225
+ then=Value(value, output_field=output_field),
183
226
  )
184
227
  )
185
228
 
@@ -190,40 +233,52 @@ class HookQuerySetMixin:
190
233
 
191
234
  # Merge extra CASE updates into kwargs for DB update
192
235
  if case_statements:
193
- logger.debug(f"Adding case statements to kwargs: {list(case_statements.keys())}")
236
+ logger.debug(
237
+ f"Adding case statements to kwargs: {list(case_statements.keys())}"
238
+ )
194
239
  for field_name, case_stmt in case_statements.items():
195
- logger.debug(f"Case statement for {field_name}: {type(case_stmt).__name__}")
240
+ logger.debug(
241
+ f"Case statement for {field_name}: {type(case_stmt).__name__}"
242
+ )
196
243
  # Check if the case statement contains Subquery objects
197
- if hasattr(case_stmt, 'get_source_expressions'):
244
+ if hasattr(case_stmt, "get_source_expressions"):
198
245
  source_exprs = case_stmt.get_source_expressions()
199
246
  for expr in source_exprs:
200
247
  if isinstance(expr, Subquery):
201
- logger.debug(f"Case statement for {field_name} contains Subquery")
202
- elif hasattr(expr, 'get_source_expressions'):
248
+ logger.debug(
249
+ f"Case statement for {field_name} contains Subquery"
250
+ )
251
+ elif hasattr(expr, "get_source_expressions"):
203
252
  # Check nested expressions (like Value objects)
204
253
  nested_exprs = expr.get_source_expressions()
205
254
  for nested_expr in nested_exprs:
206
255
  if isinstance(nested_expr, Subquery):
207
- logger.debug(f"Case statement for {field_name} contains nested Subquery")
208
-
256
+ logger.debug(
257
+ f"Case statement for {field_name} contains nested Subquery"
258
+ )
259
+
209
260
  kwargs = {**kwargs, **case_statements}
210
261
 
211
262
  # Use Django's built-in update logic directly
212
263
  # Call the base QuerySet implementation to avoid recursion
213
-
264
+
214
265
  # Additional safety check: ensure Subquery objects are properly handled
215
266
  # This prevents the "cannot adapt type 'Subquery'" error
216
267
  safe_kwargs = {}
217
268
  logger.debug(f"Processing {len(kwargs)} kwargs for safety check")
218
-
269
+
219
270
  for key, value in kwargs.items():
220
- logger.debug(f"Processing key '{key}' with value type {type(value).__name__}")
221
-
271
+ logger.debug(
272
+ f"Processing key '{key}' with value type {type(value).__name__}"
273
+ )
274
+
222
275
  if isinstance(value, Subquery):
223
276
  logger.debug(f"Found Subquery for field {key}")
224
277
  # Ensure Subquery has proper output_field
225
- if not hasattr(value, 'output_field') or value.output_field is None:
226
- logger.warning(f"Subquery for field {key} missing output_field, attempting to infer")
278
+ if not hasattr(value, "output_field") or value.output_field is None:
279
+ logger.warning(
280
+ f"Subquery for field {key} missing output_field, attempting to infer"
281
+ )
227
282
  # Try to infer from the model field
228
283
  try:
229
284
  field = model_cls._meta.get_field(key)
@@ -232,35 +287,52 @@ class HookQuerySetMixin:
232
287
  value.output_field = field
233
288
  logger.debug(f"Set output_field to {field}")
234
289
  except Exception as e:
235
- logger.error(f"Failed to infer output_field for Subquery on {key}: {e}")
290
+ logger.error(
291
+ f"Failed to infer output_field for Subquery on {key}: {e}"
292
+ )
236
293
  raise
237
294
  else:
238
- logger.debug(f"Subquery for field {key} already has output_field: {value.output_field}")
295
+ logger.debug(
296
+ f"Subquery for field {key} already has output_field: {value.output_field}"
297
+ )
239
298
  safe_kwargs[key] = value
240
- elif hasattr(value, 'get_source_expressions') and hasattr(value, 'resolve_expression'):
299
+ elif hasattr(value, "get_source_expressions") and hasattr(
300
+ value, "resolve_expression"
301
+ ):
241
302
  # Handle Case statements and other complex expressions
242
- logger.debug(f"Found complex expression for field {key}: {type(value).__name__}")
243
-
303
+ logger.debug(
304
+ f"Found complex expression for field {key}: {type(value).__name__}"
305
+ )
306
+
244
307
  # Check if this expression contains any Subquery objects
245
308
  source_expressions = value.get_source_expressions()
246
309
  has_nested_subquery = False
247
-
310
+
248
311
  for expr in source_expressions:
249
312
  if isinstance(expr, Subquery):
250
313
  has_nested_subquery = True
251
314
  logger.debug(f"Found nested Subquery in {type(value).__name__}")
252
315
  # Ensure the nested Subquery has proper output_field
253
- if not hasattr(expr, 'output_field') or expr.output_field is None:
316
+ if (
317
+ not hasattr(expr, "output_field")
318
+ or expr.output_field is None
319
+ ):
254
320
  try:
255
321
  field = model_cls._meta.get_field(key)
256
322
  expr.output_field = field
257
- logger.debug(f"Set output_field for nested Subquery to {field}")
323
+ logger.debug(
324
+ f"Set output_field for nested Subquery to {field}"
325
+ )
258
326
  except Exception as e:
259
- logger.error(f"Failed to set output_field for nested Subquery: {e}")
327
+ logger.error(
328
+ f"Failed to set output_field for nested Subquery: {e}"
329
+ )
260
330
  raise
261
-
331
+
262
332
  if has_nested_subquery:
263
- logger.debug(f"Expression contains Subquery, ensuring proper output_field")
333
+ logger.debug(
334
+ f"Expression contains Subquery, ensuring proper output_field"
335
+ )
264
336
  # Try to resolve the expression to ensure it's properly formatted
265
337
  try:
266
338
  resolved_value = value.resolve_expression(None, None)
@@ -272,12 +344,16 @@ class HookQuerySetMixin:
272
344
  else:
273
345
  safe_kwargs[key] = value
274
346
  else:
275
- logger.debug(f"Non-Subquery value for field {key}: {type(value).__name__}")
347
+ logger.debug(
348
+ f"Non-Subquery value for field {key}: {type(value).__name__}"
349
+ )
276
350
  safe_kwargs[key] = value
277
-
351
+
278
352
  logger.debug(f"Safe kwargs keys: {list(safe_kwargs.keys())}")
279
- logger.debug(f"Safe kwargs types: {[(k, type(v).__name__) for k, v in safe_kwargs.items()]}")
280
-
353
+ logger.debug(
354
+ f"Safe kwargs types: {[(k, type(v).__name__) for k, v in safe_kwargs.items()]}"
355
+ )
356
+
281
357
  logger.debug(f"Calling super().update() with {len(safe_kwargs)} kwargs")
282
358
  try:
283
359
  update_count = super().update(**safe_kwargs)
@@ -313,10 +389,10 @@ class HookQuerySetMixin:
313
389
  # For subquery operations, we need to run hooks even if we're in a bulk context
314
390
  # because subqueries bypass the normal object-level update flow
315
391
  should_run_hooks = (
316
- not current_bypass_hooks or
317
- has_subquery # Always run hooks for subquery operations
392
+ not current_bypass_hooks
393
+ or has_subquery # Always run hooks for subquery operations
318
394
  )
319
-
395
+
320
396
  if should_run_hooks:
321
397
  logger.debug("update: running AFTER_UPDATE")
322
398
  engine.run(model_cls, AFTER_UPDATE, instances, originals, ctx=ctx)
@@ -523,6 +599,9 @@ class HookQuerySetMixin:
523
599
  """
524
600
  Detect fields that were modified during BEFORE_UPDATE hooks by comparing
525
601
  new instances with their original values.
602
+
603
+ IMPORTANT: Skip fields that contain Django expression objects (Subquery, Case, etc.)
604
+ as these should not be treated as in-memory modifications.
526
605
  """
527
606
  if not original_instances:
528
607
  return set()
@@ -539,15 +618,28 @@ class HookQuerySetMixin:
539
618
  if field.name == "id":
540
619
  continue
541
620
 
621
+ # Get the new value to check if it's an expression object
622
+ new_value = getattr(new_instance, field.name)
623
+
624
+ # Skip fields that contain expression objects - these are not in-memory modifications
625
+ # but rather database-level expressions that should not be applied to instances
626
+ from django.db.models import Subquery
627
+
628
+ if isinstance(new_value, Subquery) or hasattr(
629
+ new_value, "resolve_expression"
630
+ ):
631
+ logger.debug(
632
+ f"Skipping field {field.name} with expression value: {type(new_value).__name__}"
633
+ )
634
+ continue
635
+
542
636
  # Handle different field types appropriately
543
637
  if field.is_relation:
544
638
  # Compare by raw id values to catch cases where only <fk>_id was set
545
- new_pk = getattr(new_instance, field.attname, None)
546
639
  original_pk = getattr(original, field.attname, None)
547
- if new_pk != original_pk:
640
+ if new_value != original_pk:
548
641
  modified_fields.add(field.name)
549
642
  else:
550
- new_value = getattr(new_instance, field.name)
551
643
  original_value = getattr(original, field.name)
552
644
  if new_value != original_value:
553
645
  modified_fields.add(field.name)
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "django-bulk-hooks"
3
- version = "0.1.241"
3
+ version = "0.1.242"
4
4
  description = "Hook-style hooks for Django bulk operations like bulk_create and bulk_update."
5
5
  authors = ["Konrad Beck <konrad.beck@merchantcapital.co.za>"]
6
6
  readme = "README.md"