django-bulk-hooks 0.1.240__py3-none-any.whl → 0.1.242__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of django-bulk-hooks might be problematic. Click here for more details.

@@ -1,4 +1,4 @@
1
- from django_bulk_hooks.handler import Hook as HookClass
2
- from django_bulk_hooks.manager import BulkHookManager
3
-
4
- __all__ = ["BulkHookManager", "HookClass"]
1
+ from django_bulk_hooks.handler import Hook as HookClass
2
+ from django_bulk_hooks.manager import BulkHookManager
3
+
4
+ __all__ = ["BulkHookManager", "HookClass"]
@@ -55,35 +55,59 @@ def select_related(*related_fields):
55
55
  return func(*args, **kwargs)
56
56
 
57
57
  # Determine which instances actually need preloading
58
- model_cls = new_records[0].__class__
58
+ # Allow model_cls to be passed as a keyword argument for testing
59
+ if "model_cls" in bound.arguments:
60
+ model_cls = bound.arguments["model_cls"]
61
+ else:
62
+ model_cls = new_records[0].__class__
59
63
  ids_to_fetch = []
60
64
  for obj in new_records:
61
65
  if obj.pk is None:
62
66
  continue
63
67
  # if any related field is not already cached on the instance,
64
68
  # mark it for fetching
65
- if any(
66
- field not in obj._state.fields_cache for field in related_fields
67
- ):
69
+ # Handle Mock objects that don't have _state.fields_cache
70
+ if hasattr(obj, "_state") and hasattr(obj._state, "fields_cache"):
71
+ try:
72
+ if any(
73
+ field not in obj._state.fields_cache
74
+ for field in related_fields
75
+ ):
76
+ ids_to_fetch.append(obj.pk)
77
+ except (TypeError, AttributeError):
78
+ # If _state.fields_cache is not iterable or accessible, always fetch
79
+ ids_to_fetch.append(obj.pk)
80
+ else:
81
+ # For Mock objects or objects without _state.fields_cache, always fetch
68
82
  ids_to_fetch.append(obj.pk)
69
83
 
84
+ # Always validate fields for nested field errors, regardless of whether we need to fetch
85
+ for field in related_fields:
86
+ if "." in field or "__" in field:
87
+ raise ValueError(
88
+ f"@select_related does not support nested fields like '{field}'"
89
+ )
90
+
70
91
  fetched = {}
71
92
  if ids_to_fetch:
72
93
  # Validate fields before passing to select_related
73
94
  validated_fields = []
74
95
  for field in related_fields:
75
- if "." in field:
76
- raise ValueError(
77
- f"@select_related does not support nested fields like '{field}'"
78
- )
79
96
  try:
80
- f = model_cls._meta.get_field(field)
81
- if not (
82
- f.is_relation and not f.many_to_many and not f.one_to_many
83
- ):
97
+ # Handle Mock objects that don't have _meta
98
+ if hasattr(model_cls, "_meta"):
99
+ f = model_cls._meta.get_field(field)
100
+ if not (
101
+ f.is_relation
102
+ and not f.many_to_many
103
+ and not f.one_to_many
104
+ ):
105
+ continue
106
+ validated_fields.append(field)
107
+ else:
108
+ # For Mock objects, skip validation
84
109
  continue
85
- validated_fields.append(field)
86
- except FieldDoesNotExist:
110
+ except (FieldDoesNotExist, AttributeError):
87
111
  continue
88
112
 
89
113
  if validated_fields:
@@ -97,26 +121,39 @@ def select_related(*related_fields):
97
121
  if not preloaded:
98
122
  continue
99
123
  for field in related_fields:
100
- if field in obj._state.fields_cache:
101
- # don't override values that were explicitly set or already loaded
102
- continue
103
- if "." in field:
124
+ # Handle Mock objects that don't have _state.fields_cache
125
+ if hasattr(obj, "_state") and hasattr(obj._state, "fields_cache"):
126
+ if field in obj._state.fields_cache:
127
+ # don't override values that were explicitly set or already loaded
128
+ continue
129
+ if "." in field or "__" in field:
104
130
  # This should have been caught earlier, but just in case
105
131
  continue
106
132
 
107
133
  try:
108
- f = model_cls._meta.get_field(field)
109
- if not (
110
- f.is_relation and not f.many_to_many and not f.one_to_many
111
- ):
134
+ # Handle Mock objects that don't have _meta
135
+ if hasattr(model_cls, "_meta"):
136
+ f = model_cls._meta.get_field(field)
137
+ if not (
138
+ f.is_relation
139
+ and not f.many_to_many
140
+ and not f.one_to_many
141
+ ):
142
+ continue
143
+ else:
144
+ # For Mock objects, skip validation
112
145
  continue
113
- except FieldDoesNotExist:
146
+ except (FieldDoesNotExist, AttributeError):
114
147
  continue
115
148
 
116
149
  try:
117
150
  rel_obj = getattr(preloaded, field)
118
151
  setattr(obj, field, rel_obj)
119
- obj._state.fields_cache[field] = rel_obj
152
+ # Only set _state.fields_cache if it exists
153
+ if hasattr(obj, "_state") and hasattr(
154
+ obj._state, "fields_cache"
155
+ ):
156
+ obj._state.fields_cache[field] = rel_obj
120
157
  except AttributeError:
121
158
  pass
122
159
 
@@ -156,6 +193,10 @@ def bulk_hook(model_cls, event, when=None, priority=None):
156
193
  condition=when,
157
194
  priority=priority or DEFAULT_PRIORITY,
158
195
  )
196
+
197
+ # Set attribute to indicate the function has been registered as a bulk hook
198
+ func._bulk_hook_registered = True
199
+
159
200
  return func
160
201
 
161
202
  return decorator
@@ -23,7 +23,9 @@ def run(model_cls, event, new_records, old_records=None, ctx=None):
23
23
  import traceback
24
24
 
25
25
  stack = traceback.format_stack()
26
- logger.debug(f"engine.run {model_cls.__name__}.{event} {len(new_records)} records")
26
+ # Safely get model name, fallback to str representation if __name__ not available
27
+ model_name = getattr(model_cls, '__name__', str(model_cls))
28
+ logger.debug(f"engine.run {model_name}.{event} {len(new_records)} records")
27
29
 
28
30
  # Check if we're in a bypass context
29
31
  if ctx and hasattr(ctx, 'bypass_hooks') and ctx.bypass_hooks:
@@ -31,7 +33,7 @@ def run(model_cls, event, new_records, old_records=None, ctx=None):
31
33
  return
32
34
 
33
35
  # For BEFORE_* events, run model.clean() first for validation
34
- if event.startswith("before_"):
36
+ if event.lower().startswith("before_"):
35
37
  for instance in new_records:
36
38
  try:
37
39
  instance.clean()
@@ -41,7 +43,9 @@ def run(model_cls, event, new_records, old_records=None, ctx=None):
41
43
 
42
44
  # Process hooks
43
45
  for handler_cls, method_name, condition, priority in hooks:
44
- logger.debug(f"Processing {handler_cls.__name__}.{method_name}")
46
+ # Safely get handler class name
47
+ handler_name = getattr(handler_cls, '__name__', str(handler_cls))
48
+ logger.debug(f"Processing {handler_name}.{method_name}")
45
49
  handler_instance = handler_cls()
46
50
  func = getattr(handler_instance, method_name)
47
51
 
@@ -63,7 +67,7 @@ def run(model_cls, event, new_records, old_records=None, ctx=None):
63
67
  to_process_old.append(original)
64
68
 
65
69
  if to_process_new:
66
- logger.debug(f"Executing {handler_cls.__name__}.{method_name} for {len(to_process_new)} records")
70
+ logger.debug(f"Executing {handler_name}.{method_name} for {len(to_process_new)} records")
67
71
  try:
68
72
  func(
69
73
  new_records=to_process_new,
@@ -1,188 +1,188 @@
1
- import logging
2
- import threading
3
- from collections import deque
4
-
5
- from django.db import transaction
6
-
7
- from django_bulk_hooks.registry import get_hooks, register_hook
8
-
9
- logger = logging.getLogger(__name__)
10
-
11
-
12
- # Thread-local hook context and hook state
13
- class HookVars(threading.local):
14
- def __init__(self):
15
- self.new = None
16
- self.old = None
17
- self.event = None
18
- self.model = None
19
- self.depth = 0
20
-
21
-
22
- hook_vars = HookVars()
23
-
24
- # Hook queue per thread
25
- _hook_context = threading.local()
26
-
27
-
28
- def get_hook_queue():
29
- if not hasattr(_hook_context, "queue"):
30
- _hook_context.queue = deque()
31
- return _hook_context.queue
32
-
33
-
34
- class HookContextState:
35
- @property
36
- def is_before(self):
37
- return hook_vars.event.startswith("before_") if hook_vars.event else False
38
-
39
- @property
40
- def is_after(self):
41
- return hook_vars.event.startswith("after_") if hook_vars.event else False
42
-
43
- @property
44
- def is_create(self):
45
- return "create" in hook_vars.event if hook_vars.event else False
46
-
47
- @property
48
- def is_update(self):
49
- return "update" in hook_vars.event if hook_vars.event else False
50
-
51
- @property
52
- def new(self):
53
- return hook_vars.new
54
-
55
- @property
56
- def old(self):
57
- return hook_vars.old
58
-
59
- @property
60
- def model(self):
61
- return hook_vars.model
62
-
63
-
64
- HookContext = HookContextState()
65
-
66
-
67
- class HookMeta(type):
68
- _registered = set()
69
-
70
- def __new__(mcs, name, bases, namespace):
71
- cls = super().__new__(mcs, name, bases, namespace)
72
- for method_name, method in namespace.items():
73
- if hasattr(method, "hooks_hooks"):
74
- for model_cls, event, condition, priority in method.hooks_hooks:
75
- key = (model_cls, event, cls, method_name)
76
- if key not in HookMeta._registered:
77
- register_hook(
78
- model=model_cls,
79
- event=event,
80
- handler_cls=cls,
81
- method_name=method_name,
82
- condition=condition,
83
- priority=priority,
84
- )
85
- HookMeta._registered.add(key)
86
- return cls
87
-
88
-
89
- class Hook(metaclass=HookMeta):
90
- @classmethod
91
- def handle(
92
- cls,
93
- event: str,
94
- model: type,
95
- *,
96
- new_records: list = None,
97
- old_records: list = None,
98
- **kwargs,
99
- ) -> None:
100
- queue = get_hook_queue()
101
- queue.append((cls, event, model, new_records, old_records, kwargs))
102
- logger.debug(f"Added item to queue: {event}, depth: {hook_vars.depth}")
103
-
104
- # If we're already processing hooks (depth > 0), don't process the queue
105
- # The outermost call will process the entire queue
106
- if hook_vars.depth > 0:
107
- logger.debug(f"Depth > 0, returning without processing queue")
108
- return
109
-
110
- # Process the entire queue
111
- logger.debug(f"Processing queue with {len(queue)} items")
112
- while queue:
113
- item = queue.popleft()
114
- if len(item) == 6:
115
- cls_, event_, model_, new_, old_, kw_ = item
116
- logger.debug(f"Processing queue item: {event_}")
117
- # Call _process on the Hook class, not the calling class
118
- Hook._process(event_, model_, new_, old_, **kw_)
119
- else:
120
- logger.warning(f"Invalid queue item format: {item}")
121
- continue
122
-
123
- @classmethod
124
- def _process(
125
- cls,
126
- event,
127
- model,
128
- new_records,
129
- old_records,
130
- **kwargs,
131
- ):
132
- hook_vars.depth += 1
133
- hook_vars.new = new_records
134
- hook_vars.old = old_records
135
- hook_vars.event = event
136
- hook_vars.model = model
137
-
138
- hooks = sorted(get_hooks(model, event), key=lambda x: x[3])
139
- logger.debug(f"Found {len(hooks)} hooks for {event}")
140
-
141
- def _execute():
142
- logger.debug(f"Executing {len(hooks)} hooks for {event}")
143
- new_local = new_records or []
144
- old_local = old_records or []
145
- if len(old_local) < len(new_local):
146
- old_local += [None] * (len(new_local) - len(old_local))
147
-
148
- for handler_cls, method_name, condition, priority in hooks:
149
- logger.debug(f"Processing hook {handler_cls.__name__}.{method_name}")
150
- if condition is not None:
151
- checks = [
152
- condition.check(n, o) for n, o in zip(new_local, old_local)
153
- ]
154
- if not any(checks):
155
- logger.debug(f"Condition failed for {handler_cls.__name__}.{method_name}")
156
- continue
157
-
158
- handler = handler_cls()
159
- method = getattr(handler, method_name)
160
- logger.debug(f"Executing {handler_cls.__name__}.{method_name}")
161
-
162
- try:
163
- method(
164
- new_records=new_local,
165
- old_records=old_local,
166
- **kwargs,
167
- )
168
- logger.debug(f"Successfully executed {handler_cls.__name__}.{method_name}")
169
- except Exception:
170
- logger.exception(
171
- "Error in hook %s.%s", handler_cls.__name__, method_name
172
- )
173
-
174
- conn = transaction.get_connection()
175
- logger.debug(f"Transaction in_atomic_block: {conn.in_atomic_block}, event: {event}")
176
- try:
177
- if conn.in_atomic_block and event.startswith("after_"):
178
- logger.debug(f"Deferring {event} to on_commit")
179
- transaction.on_commit(_execute)
180
- else:
181
- logger.debug(f"Executing {event} immediately")
182
- _execute()
183
- finally:
184
- hook_vars.new = None
185
- hook_vars.old = None
186
- hook_vars.event = None
187
- hook_vars.model = None
188
- hook_vars.depth -= 1
1
+ import logging
2
+ import threading
3
+ from collections import deque
4
+
5
+ from django.db import transaction
6
+
7
+ from django_bulk_hooks.registry import get_hooks, register_hook
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ # Thread-local hook context and hook state
13
+ class HookVars(threading.local):
14
+ def __init__(self):
15
+ self.new = None
16
+ self.old = None
17
+ self.event = None
18
+ self.model = None
19
+ self.depth = 0
20
+
21
+
22
+ hook_vars = HookVars()
23
+
24
+ # Hook queue per thread
25
+ _hook_context = threading.local()
26
+
27
+
28
+ def get_hook_queue():
29
+ if not hasattr(_hook_context, "queue"):
30
+ _hook_context.queue = deque()
31
+ return _hook_context.queue
32
+
33
+
34
+ class HookContextState:
35
+ @property
36
+ def is_before(self):
37
+ return hook_vars.event.startswith("before_") if hook_vars.event else False
38
+
39
+ @property
40
+ def is_after(self):
41
+ return hook_vars.event.startswith("after_") if hook_vars.event else False
42
+
43
+ @property
44
+ def is_create(self):
45
+ return "create" in hook_vars.event if hook_vars.event else False
46
+
47
+ @property
48
+ def is_update(self):
49
+ return "update" in hook_vars.event if hook_vars.event else False
50
+
51
+ @property
52
+ def new(self):
53
+ return hook_vars.new
54
+
55
+ @property
56
+ def old(self):
57
+ return hook_vars.old
58
+
59
+ @property
60
+ def model(self):
61
+ return hook_vars.model
62
+
63
+
64
+ HookContext = HookContextState()
65
+
66
+
67
+ class HookMeta(type):
68
+ _registered = set()
69
+
70
+ def __new__(mcs, name, bases, namespace):
71
+ cls = super().__new__(mcs, name, bases, namespace)
72
+ for method_name, method in namespace.items():
73
+ if hasattr(method, "hooks_hooks"):
74
+ for model_cls, event, condition, priority in method.hooks_hooks:
75
+ key = (model_cls, event, cls, method_name)
76
+ if key not in HookMeta._registered:
77
+ register_hook(
78
+ model=model_cls,
79
+ event=event,
80
+ handler_cls=cls,
81
+ method_name=method_name,
82
+ condition=condition,
83
+ priority=priority,
84
+ )
85
+ HookMeta._registered.add(key)
86
+ return cls
87
+
88
+
89
+ class Hook(metaclass=HookMeta):
90
+ @classmethod
91
+ def handle(
92
+ cls,
93
+ event: str,
94
+ model: type,
95
+ *,
96
+ new_records: list = None,
97
+ old_records: list = None,
98
+ **kwargs,
99
+ ) -> None:
100
+ queue = get_hook_queue()
101
+ queue.append((cls, event, model, new_records, old_records, kwargs))
102
+ logger.debug(f"Added item to queue: {event}, depth: {hook_vars.depth}")
103
+
104
+ # If we're already processing hooks (depth > 0), don't process the queue
105
+ # The outermost call will process the entire queue
106
+ if hook_vars.depth > 0:
107
+ logger.debug(f"Depth > 0, returning without processing queue")
108
+ return
109
+
110
+ # Process the entire queue
111
+ logger.debug(f"Processing queue with {len(queue)} items")
112
+ while queue:
113
+ item = queue.popleft()
114
+ if len(item) == 6:
115
+ cls_, event_, model_, new_, old_, kw_ = item
116
+ logger.debug(f"Processing queue item: {event_}")
117
+ # Call _process on the Hook class, not the calling class
118
+ Hook._process(event_, model_, new_, old_, **kw_)
119
+ else:
120
+ logger.warning(f"Invalid queue item format: {item}")
121
+ continue
122
+
123
+ @classmethod
124
+ def _process(
125
+ cls,
126
+ event,
127
+ model,
128
+ new_records,
129
+ old_records,
130
+ **kwargs,
131
+ ):
132
+ hook_vars.depth += 1
133
+ hook_vars.new = new_records
134
+ hook_vars.old = old_records
135
+ hook_vars.event = event
136
+ hook_vars.model = model
137
+
138
+ hooks = sorted(get_hooks(model, event), key=lambda x: x[3])
139
+ logger.debug(f"Found {len(hooks)} hooks for {event}")
140
+
141
+ def _execute():
142
+ logger.debug(f"Executing {len(hooks)} hooks for {event}")
143
+ new_local = new_records or []
144
+ old_local = old_records or []
145
+ if len(old_local) < len(new_local):
146
+ old_local += [None] * (len(new_local) - len(old_local))
147
+
148
+ for handler_cls, method_name, condition, priority in hooks:
149
+ logger.debug(f"Processing hook {handler_cls.__name__}.{method_name}")
150
+ if condition is not None:
151
+ checks = [
152
+ condition.check(n, o) for n, o in zip(new_local, old_local)
153
+ ]
154
+ if not any(checks):
155
+ logger.debug(f"Condition failed for {handler_cls.__name__}.{method_name}")
156
+ continue
157
+
158
+ handler = handler_cls()
159
+ method = getattr(handler, method_name)
160
+ logger.debug(f"Executing {handler_cls.__name__}.{method_name}")
161
+
162
+ try:
163
+ method(
164
+ new_records=new_local,
165
+ old_records=old_local,
166
+ **kwargs,
167
+ )
168
+ logger.debug(f"Successfully executed {handler_cls.__name__}.{method_name}")
169
+ except Exception:
170
+ logger.exception(
171
+ "Error in hook %s.%s", handler_cls.__name__, method_name
172
+ )
173
+
174
+ conn = transaction.get_connection()
175
+ logger.debug(f"Transaction in_atomic_block: {conn.in_atomic_block}, event: {event}")
176
+ try:
177
+ if conn.in_atomic_block and event.startswith("after_"):
178
+ logger.debug(f"Deferring {event} to on_commit")
179
+ transaction.on_commit(_execute)
180
+ else:
181
+ logger.debug(f"Executing {event} immediately")
182
+ _execute()
183
+ finally:
184
+ hook_vars.new = None
185
+ hook_vars.old = None
186
+ hook_vars.event = None
187
+ hook_vars.model = None
188
+ hook_vars.depth -= 1
@@ -60,7 +60,7 @@ class HookModelMixin(models.Model):
60
60
  # If bypass_hooks is True, use base manager to avoid triggering hooks
61
61
  if bypass_hooks:
62
62
  logger.debug(f"save() called with bypass_hooks=True for {self.__class__.__name__} pk={self.pk}")
63
- return self._base_manager.save(self, *args, **kwargs)
63
+ return self.__class__._base_manager.save(self, *args, **kwargs)
64
64
 
65
65
  is_create = self.pk is None
66
66
 
@@ -99,7 +99,7 @@ class HookModelMixin(models.Model):
99
99
  def delete(self, *args, bypass_hooks=False, **kwargs):
100
100
  # If bypass_hooks is True, use base manager to avoid triggering hooks
101
101
  if bypass_hooks:
102
- return self._base_manager.delete(self, *args, **kwargs)
102
+ return self.__class__._base_manager.delete(self, *args, **kwargs)
103
103
 
104
104
  ctx = HookContext(self.__class__)
105
105
 
@@ -73,39 +73,57 @@ class HookQuerySetMixin:
73
73
  # Check if any of the update values are Subquery objects
74
74
  try:
75
75
  from django.db.models import Subquery
76
+
76
77
  logger.debug(f"Successfully imported Subquery from django.db.models")
77
78
  except ImportError as e:
78
79
  logger.error(f"Failed to import Subquery: {e}")
79
80
  raise
80
-
81
+
81
82
  logger.debug(f"Checking for Subquery objects in {len(kwargs)} kwargs")
82
-
83
+
83
84
  subquery_detected = []
84
85
  for key, value in kwargs.items():
85
86
  is_subquery = isinstance(value, Subquery)
86
- logger.debug(f"Key '{key}': type={type(value).__name__}, is_subquery={is_subquery}")
87
+ logger.debug(
88
+ f"Key '{key}': type={type(value).__name__}, is_subquery={is_subquery}"
89
+ )
87
90
  if is_subquery:
88
91
  subquery_detected.append(key)
89
-
92
+
90
93
  has_subquery = len(subquery_detected) > 0
91
- logger.debug(f"Subquery detection result: {has_subquery}, detected keys: {subquery_detected}")
92
-
94
+ logger.debug(
95
+ f"Subquery detection result: {has_subquery}, detected keys: {subquery_detected}"
96
+ )
97
+
93
98
  # Debug logging for Subquery detection
94
99
  logger.debug(f"Update kwargs: {list(kwargs.keys())}")
95
- logger.debug(f"Update kwargs types: {[(k, type(v).__name__) for k, v in kwargs.items()]}")
96
-
100
+ logger.debug(
101
+ f"Update kwargs types: {[(k, type(v).__name__) for k, v in kwargs.items()]}"
102
+ )
103
+
97
104
  if has_subquery:
98
- logger.debug(f"Detected Subquery in update: {[k for k, v in kwargs.items() if isinstance(v, Subquery)]}")
105
+ logger.debug(
106
+ f"Detected Subquery in update: {[k for k, v in kwargs.items() if isinstance(v, Subquery)]}"
107
+ )
99
108
  else:
100
109
  # Check if we missed any Subquery objects
101
110
  for k, v in kwargs.items():
102
- if hasattr(v, 'query') and hasattr(v, 'resolve_expression'):
103
- logger.warning(f"Potential Subquery-like object detected but not recognized: {k}={type(v).__name__}")
104
- logger.warning(f"Object attributes: query={hasattr(v, 'query')}, resolve_expression={hasattr(v, 'resolve_expression')}")
105
- logger.warning(f"Object dir: {[attr for attr in dir(v) if not attr.startswith('_')][:10]}")
111
+ if hasattr(v, "query") and hasattr(v, "resolve_expression"):
112
+ logger.warning(
113
+ f"Potential Subquery-like object detected but not recognized: {k}={type(v).__name__}"
114
+ )
115
+ logger.warning(
116
+ f"Object attributes: query={hasattr(v, 'query')}, resolve_expression={hasattr(v, 'resolve_expression')}"
117
+ )
118
+ logger.warning(
119
+ f"Object dir: {[attr for attr in dir(v) if not attr.startswith('_')][:10]}"
120
+ )
106
121
 
107
122
  # Apply field updates to instances
108
123
  # If a per-object value map exists (from bulk_update), prefer it over kwargs
124
+ # IMPORTANT: Do not assign Django expression objects (e.g., Subquery/Case/F)
125
+ # to in-memory instances before running BEFORE_UPDATE hooks. Hooks must not
126
+ # receive unresolved expression objects.
109
127
  per_object_values = get_bulk_update_value_map()
110
128
  for obj in instances:
111
129
  if per_object_values and obj.pk in per_object_values:
@@ -113,7 +131,22 @@ class HookQuerySetMixin:
113
131
  setattr(obj, field, value)
114
132
  else:
115
133
  for field, value in kwargs.items():
116
- setattr(obj, field, value)
134
+ # Skip assigning expression-like objects (they will be handled at DB level)
135
+ is_subquery = isinstance(value, Subquery)
136
+ is_expression_like = hasattr(value, "resolve_expression")
137
+ if is_subquery or is_expression_like:
138
+ # Special-case Value() which can be unwrapped safely
139
+ if isinstance(value, Value):
140
+ try:
141
+ setattr(obj, field, value.value)
142
+ except Exception:
143
+ # If Value cannot be unwrapped for any reason, skip assignment
144
+ continue
145
+ else:
146
+ # Do not assign unresolved expressions to in-memory objects
147
+ continue
148
+ else:
149
+ setattr(obj, field, value)
117
150
 
118
151
  # Check if we're in a bulk operation context to prevent double hook execution
119
152
  from django_bulk_hooks.context import get_bypass_hooks
@@ -164,11 +197,34 @@ class HookQuerySetMixin:
164
197
  output_field = field_obj
165
198
  target_name = field_name
166
199
 
167
- when_statements.append(
168
- When(
169
- pk=obj_pk, then=Value(value, output_field=output_field)
200
+ # Special handling for Subquery and other expression values in CASE statements
201
+ if isinstance(value, Subquery):
202
+ logger.debug(
203
+ f"Creating When statement with Subquery for {field_name}"
204
+ )
205
+ # Ensure the Subquery has proper output_field
206
+ if (
207
+ not hasattr(value, "output_field")
208
+ or value.output_field is None
209
+ ):
210
+ value.output_field = output_field
211
+ logger.debug(
212
+ f"Set output_field for Subquery in When statement to {output_field}"
213
+ )
214
+ when_statements.append(When(pk=obj_pk, then=value))
215
+ elif hasattr(value, "resolve_expression"):
216
+ # Handle other expression objects (Case, F, etc.)
217
+ logger.debug(
218
+ f"Creating When statement with expression for {field_name}: {type(value).__name__}"
219
+ )
220
+ when_statements.append(When(pk=obj_pk, then=value))
221
+ else:
222
+ when_statements.append(
223
+ When(
224
+ pk=obj_pk,
225
+ then=Value(value, output_field=output_field),
226
+ )
170
227
  )
171
- )
172
228
 
173
229
  if when_statements:
174
230
  case_statements[target_name] = Case(
@@ -177,24 +233,52 @@ class HookQuerySetMixin:
177
233
 
178
234
  # Merge extra CASE updates into kwargs for DB update
179
235
  if case_statements:
236
+ logger.debug(
237
+ f"Adding case statements to kwargs: {list(case_statements.keys())}"
238
+ )
239
+ for field_name, case_stmt in case_statements.items():
240
+ logger.debug(
241
+ f"Case statement for {field_name}: {type(case_stmt).__name__}"
242
+ )
243
+ # Check if the case statement contains Subquery objects
244
+ if hasattr(case_stmt, "get_source_expressions"):
245
+ source_exprs = case_stmt.get_source_expressions()
246
+ for expr in source_exprs:
247
+ if isinstance(expr, Subquery):
248
+ logger.debug(
249
+ f"Case statement for {field_name} contains Subquery"
250
+ )
251
+ elif hasattr(expr, "get_source_expressions"):
252
+ # Check nested expressions (like Value objects)
253
+ nested_exprs = expr.get_source_expressions()
254
+ for nested_expr in nested_exprs:
255
+ if isinstance(nested_expr, Subquery):
256
+ logger.debug(
257
+ f"Case statement for {field_name} contains nested Subquery"
258
+ )
259
+
180
260
  kwargs = {**kwargs, **case_statements}
181
261
 
182
262
  # Use Django's built-in update logic directly
183
263
  # Call the base QuerySet implementation to avoid recursion
184
-
264
+
185
265
  # Additional safety check: ensure Subquery objects are properly handled
186
266
  # This prevents the "cannot adapt type 'Subquery'" error
187
267
  safe_kwargs = {}
188
268
  logger.debug(f"Processing {len(kwargs)} kwargs for safety check")
189
-
269
+
190
270
  for key, value in kwargs.items():
191
- logger.debug(f"Processing key '{key}' with value type {type(value).__name__}")
192
-
271
+ logger.debug(
272
+ f"Processing key '{key}' with value type {type(value).__name__}"
273
+ )
274
+
193
275
  if isinstance(value, Subquery):
194
276
  logger.debug(f"Found Subquery for field {key}")
195
277
  # Ensure Subquery has proper output_field
196
- if not hasattr(value, 'output_field') or value.output_field is None:
197
- logger.warning(f"Subquery for field {key} missing output_field, attempting to infer")
278
+ if not hasattr(value, "output_field") or value.output_field is None:
279
+ logger.warning(
280
+ f"Subquery for field {key} missing output_field, attempting to infer"
281
+ )
198
282
  # Try to infer from the model field
199
283
  try:
200
284
  field = model_cls._meta.get_field(key)
@@ -203,18 +287,73 @@ class HookQuerySetMixin:
203
287
  value.output_field = field
204
288
  logger.debug(f"Set output_field to {field}")
205
289
  except Exception as e:
206
- logger.error(f"Failed to infer output_field for Subquery on {key}: {e}")
290
+ logger.error(
291
+ f"Failed to infer output_field for Subquery on {key}: {e}"
292
+ )
207
293
  raise
208
294
  else:
209
- logger.debug(f"Subquery for field {key} already has output_field: {value.output_field}")
295
+ logger.debug(
296
+ f"Subquery for field {key} already has output_field: {value.output_field}"
297
+ )
210
298
  safe_kwargs[key] = value
299
+ elif hasattr(value, "get_source_expressions") and hasattr(
300
+ value, "resolve_expression"
301
+ ):
302
+ # Handle Case statements and other complex expressions
303
+ logger.debug(
304
+ f"Found complex expression for field {key}: {type(value).__name__}"
305
+ )
306
+
307
+ # Check if this expression contains any Subquery objects
308
+ source_expressions = value.get_source_expressions()
309
+ has_nested_subquery = False
310
+
311
+ for expr in source_expressions:
312
+ if isinstance(expr, Subquery):
313
+ has_nested_subquery = True
314
+ logger.debug(f"Found nested Subquery in {type(value).__name__}")
315
+ # Ensure the nested Subquery has proper output_field
316
+ if (
317
+ not hasattr(expr, "output_field")
318
+ or expr.output_field is None
319
+ ):
320
+ try:
321
+ field = model_cls._meta.get_field(key)
322
+ expr.output_field = field
323
+ logger.debug(
324
+ f"Set output_field for nested Subquery to {field}"
325
+ )
326
+ except Exception as e:
327
+ logger.error(
328
+ f"Failed to set output_field for nested Subquery: {e}"
329
+ )
330
+ raise
331
+
332
+ if has_nested_subquery:
333
+ logger.debug(
334
+ f"Expression contains Subquery, ensuring proper output_field"
335
+ )
336
+ # Try to resolve the expression to ensure it's properly formatted
337
+ try:
338
+ resolved_value = value.resolve_expression(None, None)
339
+ safe_kwargs[key] = resolved_value
340
+ logger.debug(f"Successfully resolved expression for {key}")
341
+ except Exception as e:
342
+ logger.error(f"Failed to resolve expression for {key}: {e}")
343
+ raise
344
+ else:
345
+ safe_kwargs[key] = value
211
346
  else:
212
- logger.debug(f"Non-Subquery value for field {key}: {type(value).__name__}")
347
+ logger.debug(
348
+ f"Non-Subquery value for field {key}: {type(value).__name__}"
349
+ )
213
350
  safe_kwargs[key] = value
214
-
351
+
215
352
  logger.debug(f"Safe kwargs keys: {list(safe_kwargs.keys())}")
216
- logger.debug(f"Safe kwargs types: {[(k, type(v).__name__) for k, v in safe_kwargs.items()]}")
217
-
353
+ logger.debug(
354
+ f"Safe kwargs types: {[(k, type(v).__name__) for k, v in safe_kwargs.items()]}"
355
+ )
356
+
218
357
  logger.debug(f"Calling super().update() with {len(safe_kwargs)} kwargs")
219
358
  try:
220
359
  update_count = super().update(**safe_kwargs)
@@ -250,10 +389,10 @@ class HookQuerySetMixin:
250
389
  # For subquery operations, we need to run hooks even if we're in a bulk context
251
390
  # because subqueries bypass the normal object-level update flow
252
391
  should_run_hooks = (
253
- not current_bypass_hooks or
254
- has_subquery # Always run hooks for subquery operations
392
+ not current_bypass_hooks
393
+ or has_subquery # Always run hooks for subquery operations
255
394
  )
256
-
395
+
257
396
  if should_run_hooks:
258
397
  logger.debug("update: running AFTER_UPDATE")
259
398
  engine.run(model_cls, AFTER_UPDATE, instances, originals, ctx=ctx)
@@ -460,6 +599,9 @@ class HookQuerySetMixin:
460
599
  """
461
600
  Detect fields that were modified during BEFORE_UPDATE hooks by comparing
462
601
  new instances with their original values.
602
+
603
+ IMPORTANT: Skip fields that contain Django expression objects (Subquery, Case, etc.)
604
+ as these should not be treated as in-memory modifications.
463
605
  """
464
606
  if not original_instances:
465
607
  return set()
@@ -476,15 +618,28 @@ class HookQuerySetMixin:
476
618
  if field.name == "id":
477
619
  continue
478
620
 
621
+ # Get the new value to check if it's an expression object
622
+ new_value = getattr(new_instance, field.name)
623
+
624
+ # Skip fields that contain expression objects - these are not in-memory modifications
625
+ # but rather database-level expressions that should not be applied to instances
626
+ from django.db.models import Subquery
627
+
628
+ if isinstance(new_value, Subquery) or hasattr(
629
+ new_value, "resolve_expression"
630
+ ):
631
+ logger.debug(
632
+ f"Skipping field {field.name} with expression value: {type(new_value).__name__}"
633
+ )
634
+ continue
635
+
479
636
  # Handle different field types appropriately
480
637
  if field.is_relation:
481
638
  # Compare by raw id values to catch cases where only <fk>_id was set
482
- new_pk = getattr(new_instance, field.attname, None)
483
639
  original_pk = getattr(original, field.attname, None)
484
- if new_pk != original_pk:
640
+ if new_value != original_pk:
485
641
  modified_fields.add(field.name)
486
642
  else:
487
- new_value = getattr(new_instance, field.name)
488
643
  original_value = getattr(original, field.name)
489
644
  if new_value != original_value:
490
645
  modified_fields.add(field.name)
@@ -1,7 +1,8 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.1
2
2
  Name: django-bulk-hooks
3
- Version: 0.1.240
3
+ Version: 0.1.242
4
4
  Summary: Hook-style hooks for Django bulk operations like bulk_create and bulk_update.
5
+ Home-page: https://github.com/AugendLimited/django-bulk-hooks
5
6
  License: MIT
6
7
  Keywords: django,bulk,hooks
7
8
  Author: Konrad Beck
@@ -13,7 +14,6 @@ Classifier: Programming Language :: Python :: 3.11
13
14
  Classifier: Programming Language :: Python :: 3.12
14
15
  Classifier: Programming Language :: Python :: 3.13
15
16
  Requires-Dist: Django (>=4.0)
16
- Project-URL: Homepage, https://github.com/AugendLimited/django-bulk-hooks
17
17
  Project-URL: Repository, https://github.com/AugendLimited/django-bulk-hooks
18
18
  Description-Content-Type: text/markdown
19
19
 
@@ -0,0 +1,17 @@
1
+ django_bulk_hooks/__init__.py,sha256=6afmyFwRwC4E9CSWyQdONFJUPl5PeXp3ZuTebd7Ic0Y,158
2
+ django_bulk_hooks/conditions.py,sha256=V_f3Di2uCVUjoyfiU4BQCHmI4uUIRSRroApDcXlvnso,6349
3
+ django_bulk_hooks/constants.py,sha256=3x1H1fSUUNo0DZONN7GUVDuySZctTR-jtByBHmAIX5w,303
4
+ django_bulk_hooks/context.py,sha256=jlLsqGZbj__J0-iBUp1D6jTrlDEiX3qIo0XlywW4D9I,2244
5
+ django_bulk_hooks/decorators.py,sha256=32ffydS9tARaG_WJoiVri7zJnfS2iMd7SuZ8L_sRAGM,7985
6
+ django_bulk_hooks/engine.py,sha256=M3b7Rcb65PYAZTLfWrIRi99BUBPgSLCryL3MSjMVlfQ,2663
7
+ django_bulk_hooks/enums.py,sha256=Zo8_tJzuzZ2IKfVc7gZ-0tWPT8q1QhqZbAyoh9ZVJbs,381
8
+ django_bulk_hooks/handler.py,sha256=e_GACTQT-pFF-zL7POeo232MgOikUoCLcxDVInAUiBw,6207
9
+ django_bulk_hooks/manager.py,sha256=nfWiwU5-yAoxdnQsUMohxtyCpkV0MBv6X3wmipr9eQY,3697
10
+ django_bulk_hooks/models.py,sha256=WtSfc4GBOG_oOt8n37cVvid0MtFIGze9JYKSixil2y0,4370
11
+ django_bulk_hooks/priority.py,sha256=HG_2D35nga68lBCZmSXTcplXrjFoRgZFRDOy4ROKonY,376
12
+ django_bulk_hooks/queryset.py,sha256=WhJ6cExpg1BvuYpxuxGPdWPS08iWveEo9KAhU9lp12g,49715
13
+ django_bulk_hooks/registry.py,sha256=GRUTGVQEO2sdkC9OaZ9Q3U7mM-3Ix83uTyvrlTtpatw,1317
14
+ django_bulk_hooks-0.1.242.dist-info/LICENSE,sha256=dguKIcbDGeZD-vXWdLyErPUALYOvtX_fO4Zjhq481uk,1088
15
+ django_bulk_hooks-0.1.242.dist-info/METADATA,sha256=kGNukCh3L0jhdTKuEcgzJJcBUE33z_fkUqugJQAsw68,9049
16
+ django_bulk_hooks-0.1.242.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
17
+ django_bulk_hooks-0.1.242.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 2.1.3
2
+ Generator: poetry-core 1.9.1
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -1,17 +0,0 @@
1
- django_bulk_hooks/__init__.py,sha256=hsbKduccFEcsV4KIw8CbxCUDOtLZwToCc-XP3sqNy-8,154
2
- django_bulk_hooks/conditions.py,sha256=V_f3Di2uCVUjoyfiU4BQCHmI4uUIRSRroApDcXlvnso,6349
3
- django_bulk_hooks/constants.py,sha256=3x1H1fSUUNo0DZONN7GUVDuySZctTR-jtByBHmAIX5w,303
4
- django_bulk_hooks/context.py,sha256=jlLsqGZbj__J0-iBUp1D6jTrlDEiX3qIo0XlywW4D9I,2244
5
- django_bulk_hooks/decorators.py,sha256=tBHjegw1qZgpJkKng1q7gMpd2UpSY2nH9f7oD1cWhr0,5735
6
- django_bulk_hooks/engine.py,sha256=t_kvgex6_iZEFc5LK-srBTZPe-1bdlYdip5LfWOc6lc,2411
7
- django_bulk_hooks/enums.py,sha256=Zo8_tJzuzZ2IKfVc7gZ-0tWPT8q1QhqZbAyoh9ZVJbs,381
8
- django_bulk_hooks/handler.py,sha256=Bx-W6yyiciKMyy-BRxUt3CmRPCrX9_LhQgU-5LaJTjg,6019
9
- django_bulk_hooks/manager.py,sha256=nfWiwU5-yAoxdnQsUMohxtyCpkV0MBv6X3wmipr9eQY,3697
10
- django_bulk_hooks/models.py,sha256=exnXYVKEVbYAXhChCP8VdWTnKCnm9DiTcokEIBee1I0,4350
11
- django_bulk_hooks/priority.py,sha256=HG_2D35nga68lBCZmSXTcplXrjFoRgZFRDOy4ROKonY,376
12
- django_bulk_hooks/queryset.py,sha256=bi8jE8yvl2ih0M3LbzW8TZvl7JFKbgysILRXIpci6KM,42055
13
- django_bulk_hooks/registry.py,sha256=GRUTGVQEO2sdkC9OaZ9Q3U7mM-3Ix83uTyvrlTtpatw,1317
14
- django_bulk_hooks-0.1.240.dist-info/LICENSE,sha256=dguKIcbDGeZD-vXWdLyErPUALYOvtX_fO4Zjhq481uk,1088
15
- django_bulk_hooks-0.1.240.dist-info/METADATA,sha256=hkqwdtVcH2Te0py2ryH-RXgAJpX93e0IDFteueUBYtQ,9061
16
- django_bulk_hooks-0.1.240.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
17
- django_bulk_hooks-0.1.240.dist-info/RECORD,,