django-bulk-hooks 0.1.274__tar.gz → 0.1.275__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of django-bulk-hooks might be problematic. Click here for more details.

Files changed (17) hide show
  1. {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.275}/PKG-INFO +3 -3
  2. {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.275}/django_bulk_hooks/__init__.py +4 -4
  3. {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.275}/django_bulk_hooks/handler.py +188 -188
  4. {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.275}/django_bulk_hooks/queryset.py +114 -36
  5. {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.275}/pyproject.toml +1 -1
  6. {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.275}/LICENSE +0 -0
  7. {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.275}/README.md +0 -0
  8. {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.275}/django_bulk_hooks/conditions.py +0 -0
  9. {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.275}/django_bulk_hooks/constants.py +0 -0
  10. {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.275}/django_bulk_hooks/context.py +0 -0
  11. {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.275}/django_bulk_hooks/decorators.py +0 -0
  12. {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.275}/django_bulk_hooks/engine.py +0 -0
  13. {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.275}/django_bulk_hooks/enums.py +0 -0
  14. {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.275}/django_bulk_hooks/manager.py +0 -0
  15. {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.275}/django_bulk_hooks/models.py +0 -0
  16. {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.275}/django_bulk_hooks/priority.py +0 -0
  17. {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.275}/django_bulk_hooks/registry.py +0 -0
@@ -1,7 +1,8 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.1
2
2
  Name: django-bulk-hooks
3
- Version: 0.1.274
3
+ Version: 0.1.275
4
4
  Summary: Hook-style hooks for Django bulk operations like bulk_create and bulk_update.
5
+ Home-page: https://github.com/AugendLimited/django-bulk-hooks
5
6
  License: MIT
6
7
  Keywords: django,bulk,hooks
7
8
  Author: Konrad Beck
@@ -13,7 +14,6 @@ Classifier: Programming Language :: Python :: 3.11
13
14
  Classifier: Programming Language :: Python :: 3.12
14
15
  Classifier: Programming Language :: Python :: 3.13
15
16
  Requires-Dist: django (>=5.2.0,<6.0.0)
16
- Project-URL: Homepage, https://github.com/AugendLimited/django-bulk-hooks
17
17
  Project-URL: Repository, https://github.com/AugendLimited/django-bulk-hooks
18
18
  Description-Content-Type: text/markdown
19
19
 
@@ -1,4 +1,4 @@
1
- from django_bulk_hooks.handler import Hook as HookClass
2
- from django_bulk_hooks.manager import BulkHookManager
3
-
4
- __all__ = ["BulkHookManager", "HookClass"]
1
+ from django_bulk_hooks.handler import Hook as HookClass
2
+ from django_bulk_hooks.manager import BulkHookManager
3
+
4
+ __all__ = ["BulkHookManager", "HookClass"]
@@ -1,188 +1,188 @@
1
- import logging
2
- import threading
3
- from collections import deque
4
-
5
- from django.db import transaction
6
-
7
- from django_bulk_hooks.registry import get_hooks, register_hook
8
-
9
- logger = logging.getLogger(__name__)
10
-
11
-
12
- # Thread-local hook context and hook state
13
- class HookVars(threading.local):
14
- def __init__(self):
15
- self.new = None
16
- self.old = None
17
- self.event = None
18
- self.model = None
19
- self.depth = 0
20
-
21
-
22
- hook_vars = HookVars()
23
-
24
- # Hook queue per thread
25
- _hook_context = threading.local()
26
-
27
-
28
- def get_hook_queue():
29
- if not hasattr(_hook_context, "queue"):
30
- _hook_context.queue = deque()
31
- return _hook_context.queue
32
-
33
-
34
- class HookContextState:
35
- @property
36
- def is_before(self):
37
- return hook_vars.event.startswith("before_") if hook_vars.event else False
38
-
39
- @property
40
- def is_after(self):
41
- return hook_vars.event.startswith("after_") if hook_vars.event else False
42
-
43
- @property
44
- def is_create(self):
45
- return "create" in hook_vars.event if hook_vars.event else False
46
-
47
- @property
48
- def is_update(self):
49
- return "update" in hook_vars.event if hook_vars.event else False
50
-
51
- @property
52
- def new(self):
53
- return hook_vars.new
54
-
55
- @property
56
- def old(self):
57
- return hook_vars.old
58
-
59
- @property
60
- def model(self):
61
- return hook_vars.model
62
-
63
-
64
- HookContext = HookContextState()
65
-
66
-
67
- class HookMeta(type):
68
- _registered = set()
69
-
70
- def __new__(mcs, name, bases, namespace):
71
- cls = super().__new__(mcs, name, bases, namespace)
72
- for method_name, method in namespace.items():
73
- if hasattr(method, "hooks_hooks"):
74
- for model_cls, event, condition, priority in method.hooks_hooks:
75
- key = (model_cls, event, cls, method_name)
76
- if key not in HookMeta._registered:
77
- register_hook(
78
- model=model_cls,
79
- event=event,
80
- handler_cls=cls,
81
- method_name=method_name,
82
- condition=condition,
83
- priority=priority,
84
- )
85
- HookMeta._registered.add(key)
86
- return cls
87
-
88
-
89
- class Hook(metaclass=HookMeta):
90
- @classmethod
91
- def handle(
92
- cls,
93
- event: str,
94
- model: type,
95
- *,
96
- new_records: list = None,
97
- old_records: list = None,
98
- **kwargs,
99
- ) -> None:
100
- queue = get_hook_queue()
101
- queue.append((cls, event, model, new_records, old_records, kwargs))
102
- logger.debug(f"Added item to queue: {event}, depth: {hook_vars.depth}")
103
-
104
- # If we're already processing hooks (depth > 0), don't process the queue
105
- # The outermost call will process the entire queue
106
- if hook_vars.depth > 0:
107
- logger.debug(f"Depth > 0, returning without processing queue")
108
- return
109
-
110
- # Process the entire queue
111
- logger.debug(f"Processing queue with {len(queue)} items")
112
- while queue:
113
- item = queue.popleft()
114
- if len(item) == 6:
115
- cls_, event_, model_, new_, old_, kw_ = item
116
- logger.debug(f"Processing queue item: {event_}")
117
- # Call _process on the Hook class, not the calling class
118
- Hook._process(event_, model_, new_, old_, **kw_)
119
- else:
120
- logger.warning(f"Invalid queue item format: {item}")
121
- continue
122
-
123
- @classmethod
124
- def _process(
125
- cls,
126
- event,
127
- model,
128
- new_records,
129
- old_records,
130
- **kwargs,
131
- ):
132
- hook_vars.depth += 1
133
- hook_vars.new = new_records
134
- hook_vars.old = old_records
135
- hook_vars.event = event
136
- hook_vars.model = model
137
-
138
- hooks = sorted(get_hooks(model, event), key=lambda x: x[3])
139
- logger.debug(f"Found {len(hooks)} hooks for {event}")
140
-
141
- def _execute():
142
- logger.debug(f"Executing {len(hooks)} hooks for {event}")
143
- new_local = new_records or []
144
- old_local = old_records or []
145
- if len(old_local) < len(new_local):
146
- old_local += [None] * (len(new_local) - len(old_local))
147
-
148
- for handler_cls, method_name, condition, priority in hooks:
149
- logger.debug(f"Processing hook {handler_cls.__name__}.{method_name}")
150
- if condition is not None:
151
- checks = [
152
- condition.check(n, o) for n, o in zip(new_local, old_local)
153
- ]
154
- if not any(checks):
155
- logger.debug(f"Condition failed for {handler_cls.__name__}.{method_name}")
156
- continue
157
-
158
- handler = handler_cls()
159
- method = getattr(handler, method_name)
160
- logger.debug(f"Executing {handler_cls.__name__}.{method_name}")
161
-
162
- try:
163
- method(
164
- new_records=new_local,
165
- old_records=old_local,
166
- **kwargs,
167
- )
168
- logger.debug(f"Successfully executed {handler_cls.__name__}.{method_name}")
169
- except Exception:
170
- logger.exception(
171
- "Error in hook %s.%s", handler_cls.__name__, method_name
172
- )
173
-
174
- conn = transaction.get_connection()
175
- logger.debug(f"Transaction in_atomic_block: {conn.in_atomic_block}, event: {event}")
176
- try:
177
- if conn.in_atomic_block and event.startswith("after_"):
178
- logger.debug(f"Deferring {event} to on_commit")
179
- transaction.on_commit(_execute)
180
- else:
181
- logger.debug(f"Executing {event} immediately")
182
- _execute()
183
- finally:
184
- hook_vars.new = None
185
- hook_vars.old = None
186
- hook_vars.event = None
187
- hook_vars.model = None
188
- hook_vars.depth -= 1
1
+ import logging
2
+ import threading
3
+ from collections import deque
4
+
5
+ from django.db import transaction
6
+
7
+ from django_bulk_hooks.registry import get_hooks, register_hook
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ # Thread-local hook context and hook state
13
+ class HookVars(threading.local):
14
+ def __init__(self):
15
+ self.new = None
16
+ self.old = None
17
+ self.event = None
18
+ self.model = None
19
+ self.depth = 0
20
+
21
+
22
+ hook_vars = HookVars()
23
+
24
+ # Hook queue per thread
25
+ _hook_context = threading.local()
26
+
27
+
28
+ def get_hook_queue():
29
+ if not hasattr(_hook_context, "queue"):
30
+ _hook_context.queue = deque()
31
+ return _hook_context.queue
32
+
33
+
34
+ class HookContextState:
35
+ @property
36
+ def is_before(self):
37
+ return hook_vars.event.startswith("before_") if hook_vars.event else False
38
+
39
+ @property
40
+ def is_after(self):
41
+ return hook_vars.event.startswith("after_") if hook_vars.event else False
42
+
43
+ @property
44
+ def is_create(self):
45
+ return "create" in hook_vars.event if hook_vars.event else False
46
+
47
+ @property
48
+ def is_update(self):
49
+ return "update" in hook_vars.event if hook_vars.event else False
50
+
51
+ @property
52
+ def new(self):
53
+ return hook_vars.new
54
+
55
+ @property
56
+ def old(self):
57
+ return hook_vars.old
58
+
59
+ @property
60
+ def model(self):
61
+ return hook_vars.model
62
+
63
+
64
+ HookContext = HookContextState()
65
+
66
+
67
+ class HookMeta(type):
68
+ _registered = set()
69
+
70
+ def __new__(mcs, name, bases, namespace):
71
+ cls = super().__new__(mcs, name, bases, namespace)
72
+ for method_name, method in namespace.items():
73
+ if hasattr(method, "hooks_hooks"):
74
+ for model_cls, event, condition, priority in method.hooks_hooks:
75
+ key = (model_cls, event, cls, method_name)
76
+ if key not in HookMeta._registered:
77
+ register_hook(
78
+ model=model_cls,
79
+ event=event,
80
+ handler_cls=cls,
81
+ method_name=method_name,
82
+ condition=condition,
83
+ priority=priority,
84
+ )
85
+ HookMeta._registered.add(key)
86
+ return cls
87
+
88
+
89
+ class Hook(metaclass=HookMeta):
90
+ @classmethod
91
+ def handle(
92
+ cls,
93
+ event: str,
94
+ model: type,
95
+ *,
96
+ new_records: list = None,
97
+ old_records: list = None,
98
+ **kwargs,
99
+ ) -> None:
100
+ queue = get_hook_queue()
101
+ queue.append((cls, event, model, new_records, old_records, kwargs))
102
+ logger.debug(f"Added item to queue: {event}, depth: {hook_vars.depth}")
103
+
104
+ # If we're already processing hooks (depth > 0), don't process the queue
105
+ # The outermost call will process the entire queue
106
+ if hook_vars.depth > 0:
107
+ logger.debug(f"Depth > 0, returning without processing queue")
108
+ return
109
+
110
+ # Process the entire queue
111
+ logger.debug(f"Processing queue with {len(queue)} items")
112
+ while queue:
113
+ item = queue.popleft()
114
+ if len(item) == 6:
115
+ cls_, event_, model_, new_, old_, kw_ = item
116
+ logger.debug(f"Processing queue item: {event_}")
117
+ # Call _process on the Hook class, not the calling class
118
+ Hook._process(event_, model_, new_, old_, **kw_)
119
+ else:
120
+ logger.warning(f"Invalid queue item format: {item}")
121
+ continue
122
+
123
+ @classmethod
124
+ def _process(
125
+ cls,
126
+ event,
127
+ model,
128
+ new_records,
129
+ old_records,
130
+ **kwargs,
131
+ ):
132
+ hook_vars.depth += 1
133
+ hook_vars.new = new_records
134
+ hook_vars.old = old_records
135
+ hook_vars.event = event
136
+ hook_vars.model = model
137
+
138
+ hooks = sorted(get_hooks(model, event), key=lambda x: x[3])
139
+ logger.debug(f"Found {len(hooks)} hooks for {event}")
140
+
141
+ def _execute():
142
+ logger.debug(f"Executing {len(hooks)} hooks for {event}")
143
+ new_local = new_records or []
144
+ old_local = old_records or []
145
+ if len(old_local) < len(new_local):
146
+ old_local += [None] * (len(new_local) - len(old_local))
147
+
148
+ for handler_cls, method_name, condition, priority in hooks:
149
+ logger.debug(f"Processing hook {handler_cls.__name__}.{method_name}")
150
+ if condition is not None:
151
+ checks = [
152
+ condition.check(n, o) for n, o in zip(new_local, old_local)
153
+ ]
154
+ if not any(checks):
155
+ logger.debug(f"Condition failed for {handler_cls.__name__}.{method_name}")
156
+ continue
157
+
158
+ handler = handler_cls()
159
+ method = getattr(handler, method_name)
160
+ logger.debug(f"Executing {handler_cls.__name__}.{method_name}")
161
+
162
+ try:
163
+ method(
164
+ new_records=new_local,
165
+ old_records=old_local,
166
+ **kwargs,
167
+ )
168
+ logger.debug(f"Successfully executed {handler_cls.__name__}.{method_name}")
169
+ except Exception:
170
+ logger.exception(
171
+ "Error in hook %s.%s", handler_cls.__name__, method_name
172
+ )
173
+
174
+ conn = transaction.get_connection()
175
+ logger.debug(f"Transaction in_atomic_block: {conn.in_atomic_block}, event: {event}")
176
+ try:
177
+ if conn.in_atomic_block and event.startswith("after_"):
178
+ logger.debug(f"Deferring {event} to on_commit")
179
+ transaction.on_commit(_execute)
180
+ else:
181
+ logger.debug(f"Executing {event} immediately")
182
+ _execute()
183
+ finally:
184
+ hook_vars.new = None
185
+ hook_vars.old = None
186
+ hook_vars.event = None
187
+ hook_vars.model = None
188
+ hook_vars.depth -= 1
@@ -486,11 +486,14 @@ class HookQuerySetMixin:
486
486
  passed through to the correct logic. For MTI, only a subset of options may be supported.
487
487
  """
488
488
  model_cls, ctx, originals = self._setup_bulk_operation(
489
- objs, "bulk_create", require_pks=False,
490
- bypass_hooks=bypass_hooks, bypass_validation=bypass_validation,
489
+ objs,
490
+ "bulk_create",
491
+ require_pks=False,
492
+ bypass_hooks=bypass_hooks,
493
+ bypass_validation=bypass_validation,
491
494
  update_conflicts=update_conflicts,
492
495
  unique_fields=unique_fields,
493
- update_fields=update_fields
496
+ update_fields=update_fields,
494
497
  )
495
498
 
496
499
  # When you bulk insert you don't get the primary keys back (if it's an
@@ -524,9 +527,7 @@ class HookQuerySetMixin:
524
527
  existing_records = []
525
528
  new_records = []
526
529
 
527
- # Store the records for AFTER hooks to avoid duplicate queries
528
- ctx.upsert_existing_records = existing_records
529
- ctx.upsert_new_records = new_records
530
+ # We'll store the records for AFTER hooks after classification is complete
530
531
 
531
532
  # Build a filter to check which records already exist
532
533
  unique_values = []
@@ -535,10 +536,12 @@ class HookQuerySetMixin:
535
536
  query_fields = {} # Track which database field to use for each unique field
536
537
  for field_name in unique_fields:
537
538
  # First check for _id field (more reliable for ForeignKeys)
538
- if hasattr(obj, field_name + '_id'):
539
+ if hasattr(obj, field_name + "_id"):
539
540
  # Handle ForeignKey fields where _id suffix is used
540
- unique_value[field_name] = getattr(obj, field_name + '_id')
541
- query_fields[field_name] = field_name + '_id' # Use _id field for query
541
+ unique_value[field_name] = getattr(obj, field_name + "_id")
542
+ query_fields[field_name] = (
543
+ field_name + "_id"
544
+ ) # Use _id field for query
542
545
  elif hasattr(obj, field_name):
543
546
  unique_value[field_name] = getattr(obj, field_name)
544
547
  query_fields[field_name] = field_name
@@ -558,8 +561,12 @@ class HookQuerySetMixin:
558
561
  filter_kwargs[db_field_name] = value
559
562
  existing_filters |= Q(**filter_kwargs)
560
563
 
561
- logger.debug(f"DEBUG: Existence check query filters: {existing_filters}")
562
- logger.debug(f"DEBUG: Unique fields for values_list: {unique_fields}")
564
+ logger.debug(
565
+ f"DEBUG: Existence check query filters: {existing_filters}"
566
+ )
567
+ logger.debug(
568
+ f"DEBUG: Unique fields for values_list: {unique_fields}"
569
+ )
563
570
 
564
571
  # Get all existing records in one query and create a lookup set
565
572
  # We need to use the original unique_fields for values_list to maintain consistency
@@ -581,7 +588,7 @@ class HookQuerySetMixin:
581
588
  db_value = existing_record[i]
582
589
  # Check if this field uses _id suffix in the query
583
590
  query_field_name = query_fields[field_name]
584
- if query_field_name.endswith('_id'):
591
+ if query_field_name.endswith("_id"):
585
592
  # Convert to string to match how we extract from objects
586
593
  converted_record.append(str(db_value))
587
594
  else:
@@ -589,35 +596,58 @@ class HookQuerySetMixin:
589
596
  converted_tuple = tuple(converted_record)
590
597
  existing_records_lookup.add(converted_tuple)
591
598
 
592
- logger.debug(f"DEBUG: Found {len(raw_existing)} existing records from DB")
593
- logger.debug(f"DEBUG: Existing records lookup set: {existing_records_lookup}")
599
+ logger.debug(
600
+ f"DEBUG: Found {len(raw_existing)} existing records from DB"
601
+ )
602
+ logger.debug(
603
+ f"DEBUG: Existing records lookup set: {existing_records_lookup}"
604
+ )
594
605
 
595
606
  # Separate records based on whether they already exist
596
607
  for obj in objs:
597
608
  obj_unique_value = {}
598
609
  for field_name in unique_fields:
599
610
  # First check for _id field (more reliable for ForeignKeys)
600
- if hasattr(obj, field_name + '_id'):
611
+ if hasattr(obj, field_name + "_id"):
601
612
  # Handle ForeignKey fields where _id suffix is used
602
- obj_unique_value[field_name] = getattr(obj, field_name + '_id')
613
+ obj_unique_value[field_name] = getattr(
614
+ obj, field_name + "_id"
615
+ )
603
616
  elif hasattr(obj, field_name):
604
617
  obj_unique_value[field_name] = getattr(obj, field_name)
605
618
 
606
619
  # Check if this record already exists using our bulk lookup
607
620
  if obj_unique_value:
608
621
  # Convert object values to tuple for comparison with existing records
609
- obj_unique_tuple = tuple(
610
- obj_unique_value[field_name]
611
- for field_name in unique_fields
622
+ # Apply the same type conversion as we did for database values
623
+ obj_unique_tuple = []
624
+ for field_name in unique_fields:
625
+ value = obj_unique_value[field_name]
626
+ # Check if this field uses _id suffix in the query
627
+ query_field_name = query_fields[field_name]
628
+ if query_field_name.endswith("_id"):
629
+ # Convert to string to match how we convert DB values
630
+ obj_unique_tuple.append(str(value))
631
+ else:
632
+ obj_unique_tuple.append(value)
633
+ obj_unique_tuple = tuple(obj_unique_tuple)
634
+
635
+ logger.debug(
636
+ f"DEBUG: Object unique tuple: {obj_unique_tuple}"
637
+ )
638
+ logger.debug(
639
+ f"DEBUG: Object unique value: {obj_unique_value}"
612
640
  )
613
- logger.debug(f"DEBUG: Object unique tuple: {obj_unique_tuple}")
614
- logger.debug(f"DEBUG: Object unique value: {obj_unique_value}")
615
641
  if obj_unique_tuple in existing_records_lookup:
616
642
  existing_records.append(obj)
617
- logger.debug(f"DEBUG: Found existing record for tuple: {obj_unique_tuple}")
643
+ logger.debug(
644
+ f"DEBUG: Found existing record for tuple: {obj_unique_tuple}"
645
+ )
618
646
  else:
619
647
  new_records.append(obj)
620
- logger.debug(f"DEBUG: No existing record found for tuple: {obj_unique_tuple}")
648
+ logger.debug(
649
+ f"DEBUG: No existing record found for tuple: {obj_unique_tuple}"
650
+ )
621
651
  else:
622
652
  # If we can't determine uniqueness, treat as new
623
653
  new_records.append(obj)
@@ -625,6 +655,10 @@ class HookQuerySetMixin:
625
655
  # If no unique fields, treat all as new
626
656
  new_records = objs
627
657
 
658
+ # Store the classified records for AFTER hooks to avoid duplicate queries
659
+ ctx.upsert_existing_records = existing_records
660
+ ctx.upsert_new_records = new_records
661
+
628
662
  # Handle auto_now fields intelligently for upsert operations
629
663
  # Only set auto_now fields on records that will actually be created
630
664
  self._handle_auto_now_fields(new_records, add=True)
@@ -992,7 +1026,9 @@ class HookQuerySetMixin:
992
1026
 
993
1027
  changed_fields = self._detect_changed_fields(objs)
994
1028
  is_mti = self._is_multi_table_inheritance()
995
- hook_context, originals = self._init_hook_context(bypass_hooks, objs, "bulk_update")
1029
+ hook_context, originals = self._init_hook_context(
1030
+ bypass_hooks, objs, "bulk_update"
1031
+ )
996
1032
 
997
1033
  fields_set, auto_now_fields, custom_update_fields = self._prepare_update_fields(
998
1034
  changed_fields
@@ -1175,7 +1211,9 @@ class HookQuerySetMixin:
1175
1211
  operation_name,
1176
1212
  )
1177
1213
 
1178
- def _init_hook_context(self, bypass_hooks: bool, objs, operation_name="bulk_update"):
1214
+ def _init_hook_context(
1215
+ self, bypass_hooks: bool, objs, operation_name="bulk_update"
1216
+ ):
1179
1217
  """
1180
1218
  Initialize the hook context for bulk operations.
1181
1219
 
@@ -1192,7 +1230,9 @@ class HookQuerySetMixin:
1192
1230
  model_cls = self.model
1193
1231
 
1194
1232
  if bypass_hooks:
1195
- logger.debug("%s: hooks bypassed for %s", operation_name, model_cls.__name__)
1233
+ logger.debug(
1234
+ "%s: hooks bypassed for %s", operation_name, model_cls.__name__
1235
+ )
1196
1236
  ctx = HookContext(model_cls, bypass_hooks=True)
1197
1237
  else:
1198
1238
  logger.debug("%s: hooks enabled for %s", operation_name, model_cls.__name__)
@@ -1311,7 +1351,18 @@ class HookQuerySetMixin:
1311
1351
 
1312
1352
  return list(set(handled_fields)) # Remove duplicates
1313
1353
 
1314
- def _execute_hooks_with_operation(self, operation_func, validate_hook, before_hook, after_hook, objs, originals=None, ctx=None, bypass_hooks=False, bypass_validation=False):
1354
+ def _execute_hooks_with_operation(
1355
+ self,
1356
+ operation_func,
1357
+ validate_hook,
1358
+ before_hook,
1359
+ after_hook,
1360
+ objs,
1361
+ originals=None,
1362
+ ctx=None,
1363
+ bypass_hooks=False,
1364
+ bypass_validation=False,
1365
+ ):
1315
1366
  """
1316
1367
  Execute the complete hook lifecycle around a database operation.
1317
1368
 
@@ -1371,10 +1422,21 @@ class HookQuerySetMixin:
1371
1422
  param_str = f", {', '.join(param_parts)}"
1372
1423
 
1373
1424
  # Use both print and logger for consistency with existing patterns
1374
- print(f"DEBUG: {operation_name} called for {model_cls.__name__} with {len(objs)} objects{param_str}")
1375
- logger.debug(f"{operation_name} called for {model_cls.__name__} with {len(objs)} objects{param_str}")
1425
+ print(
1426
+ f"DEBUG: {operation_name} called for {model_cls.__name__} with {len(objs)} objects{param_str}"
1427
+ )
1428
+ logger.debug(
1429
+ f"{operation_name} called for {model_cls.__name__} with {len(objs)} objects{param_str}"
1430
+ )
1376
1431
 
1377
- def _execute_delete_hooks_with_operation(self, operation_func, objs, ctx=None, bypass_hooks=False, bypass_validation=False):
1432
+ def _execute_delete_hooks_with_operation(
1433
+ self,
1434
+ operation_func,
1435
+ objs,
1436
+ ctx=None,
1437
+ bypass_hooks=False,
1438
+ bypass_validation=False,
1439
+ ):
1378
1440
  """
1379
1441
  Execute hooks for delete operations with special field caching logic.
1380
1442
 
@@ -1426,7 +1488,15 @@ class HookQuerySetMixin:
1426
1488
 
1427
1489
  return result
1428
1490
 
1429
- def _setup_bulk_operation(self, objs, operation_name, require_pks=False, bypass_hooks=False, bypass_validation=False, **log_kwargs):
1491
+ def _setup_bulk_operation(
1492
+ self,
1493
+ objs,
1494
+ operation_name,
1495
+ require_pks=False,
1496
+ bypass_hooks=False,
1497
+ bypass_validation=False,
1498
+ **log_kwargs,
1499
+ ):
1430
1500
  """
1431
1501
  Common setup logic for bulk operations.
1432
1502
 
@@ -1445,7 +1515,9 @@ class HookQuerySetMixin:
1445
1515
  self._log_bulk_operation_start(operation_name, objs, **log_kwargs)
1446
1516
 
1447
1517
  # Validate objects
1448
- self._validate_objects(objs, require_pks=require_pks, operation_name=operation_name)
1518
+ self._validate_objects(
1519
+ objs, require_pks=require_pks, operation_name=operation_name
1520
+ )
1449
1521
 
1450
1522
  # Initialize hook context
1451
1523
  ctx, originals = self._init_hook_context(bypass_hooks, objs, operation_name)
@@ -1986,8 +2058,11 @@ class HookQuerySetMixin:
1986
2058
  return 0
1987
2059
 
1988
2060
  model_cls, ctx, _ = self._setup_bulk_operation(
1989
- objs, "bulk_delete", require_pks=True,
1990
- bypass_hooks=bypass_hooks, bypass_validation=bypass_validation
2061
+ objs,
2062
+ "bulk_delete",
2063
+ require_pks=True,
2064
+ bypass_hooks=bypass_hooks,
2065
+ bypass_validation=bypass_validation,
1991
2066
  )
1992
2067
 
1993
2068
  # Execute the database operation with hooks
@@ -2000,8 +2075,11 @@ class HookQuerySetMixin:
2000
2075
  return 0
2001
2076
 
2002
2077
  result = self._execute_delete_hooks_with_operation(
2003
- delete_operation, objs, ctx=ctx,
2004
- bypass_hooks=bypass_hooks, bypass_validation=bypass_validation
2078
+ delete_operation,
2079
+ objs,
2080
+ ctx=ctx,
2081
+ bypass_hooks=bypass_hooks,
2082
+ bypass_validation=bypass_validation,
2005
2083
  )
2006
2084
 
2007
2085
  return result
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "django-bulk-hooks"
3
- version = "0.1.274"
3
+ version = "0.1.275"
4
4
  description = "Hook-style hooks for Django bulk operations like bulk_create and bulk_update."
5
5
  authors = ["Konrad Beck <konrad.beck@merchantcapital.co.za>"]
6
6
  readme = "README.md"