django-bulk-hooks 0.1.273__tar.gz → 0.1.275__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of django-bulk-hooks might be problematic. Click here for more details.

Files changed (17) hide show
  1. {django_bulk_hooks-0.1.273 → django_bulk_hooks-0.1.275}/PKG-INFO +3 -3
  2. {django_bulk_hooks-0.1.273 → django_bulk_hooks-0.1.275}/django_bulk_hooks/__init__.py +4 -4
  3. {django_bulk_hooks-0.1.273 → django_bulk_hooks-0.1.275}/django_bulk_hooks/handler.py +188 -188
  4. {django_bulk_hooks-0.1.273 → django_bulk_hooks-0.1.275}/django_bulk_hooks/queryset.py +129 -39
  5. {django_bulk_hooks-0.1.273 → django_bulk_hooks-0.1.275}/pyproject.toml +1 -1
  6. {django_bulk_hooks-0.1.273 → django_bulk_hooks-0.1.275}/LICENSE +0 -0
  7. {django_bulk_hooks-0.1.273 → django_bulk_hooks-0.1.275}/README.md +0 -0
  8. {django_bulk_hooks-0.1.273 → django_bulk_hooks-0.1.275}/django_bulk_hooks/conditions.py +0 -0
  9. {django_bulk_hooks-0.1.273 → django_bulk_hooks-0.1.275}/django_bulk_hooks/constants.py +0 -0
  10. {django_bulk_hooks-0.1.273 → django_bulk_hooks-0.1.275}/django_bulk_hooks/context.py +0 -0
  11. {django_bulk_hooks-0.1.273 → django_bulk_hooks-0.1.275}/django_bulk_hooks/decorators.py +0 -0
  12. {django_bulk_hooks-0.1.273 → django_bulk_hooks-0.1.275}/django_bulk_hooks/engine.py +0 -0
  13. {django_bulk_hooks-0.1.273 → django_bulk_hooks-0.1.275}/django_bulk_hooks/enums.py +0 -0
  14. {django_bulk_hooks-0.1.273 → django_bulk_hooks-0.1.275}/django_bulk_hooks/manager.py +0 -0
  15. {django_bulk_hooks-0.1.273 → django_bulk_hooks-0.1.275}/django_bulk_hooks/models.py +0 -0
  16. {django_bulk_hooks-0.1.273 → django_bulk_hooks-0.1.275}/django_bulk_hooks/priority.py +0 -0
  17. {django_bulk_hooks-0.1.273 → django_bulk_hooks-0.1.275}/django_bulk_hooks/registry.py +0 -0
@@ -1,7 +1,8 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.1
2
2
  Name: django-bulk-hooks
3
- Version: 0.1.273
3
+ Version: 0.1.275
4
4
  Summary: Hook-style hooks for Django bulk operations like bulk_create and bulk_update.
5
+ Home-page: https://github.com/AugendLimited/django-bulk-hooks
5
6
  License: MIT
6
7
  Keywords: django,bulk,hooks
7
8
  Author: Konrad Beck
@@ -13,7 +14,6 @@ Classifier: Programming Language :: Python :: 3.11
13
14
  Classifier: Programming Language :: Python :: 3.12
14
15
  Classifier: Programming Language :: Python :: 3.13
15
16
  Requires-Dist: django (>=5.2.0,<6.0.0)
16
- Project-URL: Homepage, https://github.com/AugendLimited/django-bulk-hooks
17
17
  Project-URL: Repository, https://github.com/AugendLimited/django-bulk-hooks
18
18
  Description-Content-Type: text/markdown
19
19
 
@@ -1,4 +1,4 @@
1
- from django_bulk_hooks.handler import Hook as HookClass
2
- from django_bulk_hooks.manager import BulkHookManager
3
-
4
- __all__ = ["BulkHookManager", "HookClass"]
1
+ from django_bulk_hooks.handler import Hook as HookClass
2
+ from django_bulk_hooks.manager import BulkHookManager
3
+
4
+ __all__ = ["BulkHookManager", "HookClass"]
@@ -1,188 +1,188 @@
1
- import logging
2
- import threading
3
- from collections import deque
4
-
5
- from django.db import transaction
6
-
7
- from django_bulk_hooks.registry import get_hooks, register_hook
8
-
9
- logger = logging.getLogger(__name__)
10
-
11
-
12
- # Thread-local hook context and hook state
13
- class HookVars(threading.local):
14
- def __init__(self):
15
- self.new = None
16
- self.old = None
17
- self.event = None
18
- self.model = None
19
- self.depth = 0
20
-
21
-
22
- hook_vars = HookVars()
23
-
24
- # Hook queue per thread
25
- _hook_context = threading.local()
26
-
27
-
28
- def get_hook_queue():
29
- if not hasattr(_hook_context, "queue"):
30
- _hook_context.queue = deque()
31
- return _hook_context.queue
32
-
33
-
34
- class HookContextState:
35
- @property
36
- def is_before(self):
37
- return hook_vars.event.startswith("before_") if hook_vars.event else False
38
-
39
- @property
40
- def is_after(self):
41
- return hook_vars.event.startswith("after_") if hook_vars.event else False
42
-
43
- @property
44
- def is_create(self):
45
- return "create" in hook_vars.event if hook_vars.event else False
46
-
47
- @property
48
- def is_update(self):
49
- return "update" in hook_vars.event if hook_vars.event else False
50
-
51
- @property
52
- def new(self):
53
- return hook_vars.new
54
-
55
- @property
56
- def old(self):
57
- return hook_vars.old
58
-
59
- @property
60
- def model(self):
61
- return hook_vars.model
62
-
63
-
64
- HookContext = HookContextState()
65
-
66
-
67
- class HookMeta(type):
68
- _registered = set()
69
-
70
- def __new__(mcs, name, bases, namespace):
71
- cls = super().__new__(mcs, name, bases, namespace)
72
- for method_name, method in namespace.items():
73
- if hasattr(method, "hooks_hooks"):
74
- for model_cls, event, condition, priority in method.hooks_hooks:
75
- key = (model_cls, event, cls, method_name)
76
- if key not in HookMeta._registered:
77
- register_hook(
78
- model=model_cls,
79
- event=event,
80
- handler_cls=cls,
81
- method_name=method_name,
82
- condition=condition,
83
- priority=priority,
84
- )
85
- HookMeta._registered.add(key)
86
- return cls
87
-
88
-
89
- class Hook(metaclass=HookMeta):
90
- @classmethod
91
- def handle(
92
- cls,
93
- event: str,
94
- model: type,
95
- *,
96
- new_records: list = None,
97
- old_records: list = None,
98
- **kwargs,
99
- ) -> None:
100
- queue = get_hook_queue()
101
- queue.append((cls, event, model, new_records, old_records, kwargs))
102
- logger.debug(f"Added item to queue: {event}, depth: {hook_vars.depth}")
103
-
104
- # If we're already processing hooks (depth > 0), don't process the queue
105
- # The outermost call will process the entire queue
106
- if hook_vars.depth > 0:
107
- logger.debug(f"Depth > 0, returning without processing queue")
108
- return
109
-
110
- # Process the entire queue
111
- logger.debug(f"Processing queue with {len(queue)} items")
112
- while queue:
113
- item = queue.popleft()
114
- if len(item) == 6:
115
- cls_, event_, model_, new_, old_, kw_ = item
116
- logger.debug(f"Processing queue item: {event_}")
117
- # Call _process on the Hook class, not the calling class
118
- Hook._process(event_, model_, new_, old_, **kw_)
119
- else:
120
- logger.warning(f"Invalid queue item format: {item}")
121
- continue
122
-
123
- @classmethod
124
- def _process(
125
- cls,
126
- event,
127
- model,
128
- new_records,
129
- old_records,
130
- **kwargs,
131
- ):
132
- hook_vars.depth += 1
133
- hook_vars.new = new_records
134
- hook_vars.old = old_records
135
- hook_vars.event = event
136
- hook_vars.model = model
137
-
138
- hooks = sorted(get_hooks(model, event), key=lambda x: x[3])
139
- logger.debug(f"Found {len(hooks)} hooks for {event}")
140
-
141
- def _execute():
142
- logger.debug(f"Executing {len(hooks)} hooks for {event}")
143
- new_local = new_records or []
144
- old_local = old_records or []
145
- if len(old_local) < len(new_local):
146
- old_local += [None] * (len(new_local) - len(old_local))
147
-
148
- for handler_cls, method_name, condition, priority in hooks:
149
- logger.debug(f"Processing hook {handler_cls.__name__}.{method_name}")
150
- if condition is not None:
151
- checks = [
152
- condition.check(n, o) for n, o in zip(new_local, old_local)
153
- ]
154
- if not any(checks):
155
- logger.debug(f"Condition failed for {handler_cls.__name__}.{method_name}")
156
- continue
157
-
158
- handler = handler_cls()
159
- method = getattr(handler, method_name)
160
- logger.debug(f"Executing {handler_cls.__name__}.{method_name}")
161
-
162
- try:
163
- method(
164
- new_records=new_local,
165
- old_records=old_local,
166
- **kwargs,
167
- )
168
- logger.debug(f"Successfully executed {handler_cls.__name__}.{method_name}")
169
- except Exception:
170
- logger.exception(
171
- "Error in hook %s.%s", handler_cls.__name__, method_name
172
- )
173
-
174
- conn = transaction.get_connection()
175
- logger.debug(f"Transaction in_atomic_block: {conn.in_atomic_block}, event: {event}")
176
- try:
177
- if conn.in_atomic_block and event.startswith("after_"):
178
- logger.debug(f"Deferring {event} to on_commit")
179
- transaction.on_commit(_execute)
180
- else:
181
- logger.debug(f"Executing {event} immediately")
182
- _execute()
183
- finally:
184
- hook_vars.new = None
185
- hook_vars.old = None
186
- hook_vars.event = None
187
- hook_vars.model = None
188
- hook_vars.depth -= 1
1
+ import logging
2
+ import threading
3
+ from collections import deque
4
+
5
+ from django.db import transaction
6
+
7
+ from django_bulk_hooks.registry import get_hooks, register_hook
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ # Thread-local hook context and hook state
13
+ class HookVars(threading.local):
14
+ def __init__(self):
15
+ self.new = None
16
+ self.old = None
17
+ self.event = None
18
+ self.model = None
19
+ self.depth = 0
20
+
21
+
22
+ hook_vars = HookVars()
23
+
24
+ # Hook queue per thread
25
+ _hook_context = threading.local()
26
+
27
+
28
+ def get_hook_queue():
29
+ if not hasattr(_hook_context, "queue"):
30
+ _hook_context.queue = deque()
31
+ return _hook_context.queue
32
+
33
+
34
+ class HookContextState:
35
+ @property
36
+ def is_before(self):
37
+ return hook_vars.event.startswith("before_") if hook_vars.event else False
38
+
39
+ @property
40
+ def is_after(self):
41
+ return hook_vars.event.startswith("after_") if hook_vars.event else False
42
+
43
+ @property
44
+ def is_create(self):
45
+ return "create" in hook_vars.event if hook_vars.event else False
46
+
47
+ @property
48
+ def is_update(self):
49
+ return "update" in hook_vars.event if hook_vars.event else False
50
+
51
+ @property
52
+ def new(self):
53
+ return hook_vars.new
54
+
55
+ @property
56
+ def old(self):
57
+ return hook_vars.old
58
+
59
+ @property
60
+ def model(self):
61
+ return hook_vars.model
62
+
63
+
64
+ HookContext = HookContextState()
65
+
66
+
67
+ class HookMeta(type):
68
+ _registered = set()
69
+
70
+ def __new__(mcs, name, bases, namespace):
71
+ cls = super().__new__(mcs, name, bases, namespace)
72
+ for method_name, method in namespace.items():
73
+ if hasattr(method, "hooks_hooks"):
74
+ for model_cls, event, condition, priority in method.hooks_hooks:
75
+ key = (model_cls, event, cls, method_name)
76
+ if key not in HookMeta._registered:
77
+ register_hook(
78
+ model=model_cls,
79
+ event=event,
80
+ handler_cls=cls,
81
+ method_name=method_name,
82
+ condition=condition,
83
+ priority=priority,
84
+ )
85
+ HookMeta._registered.add(key)
86
+ return cls
87
+
88
+
89
+ class Hook(metaclass=HookMeta):
90
+ @classmethod
91
+ def handle(
92
+ cls,
93
+ event: str,
94
+ model: type,
95
+ *,
96
+ new_records: list = None,
97
+ old_records: list = None,
98
+ **kwargs,
99
+ ) -> None:
100
+ queue = get_hook_queue()
101
+ queue.append((cls, event, model, new_records, old_records, kwargs))
102
+ logger.debug(f"Added item to queue: {event}, depth: {hook_vars.depth}")
103
+
104
+ # If we're already processing hooks (depth > 0), don't process the queue
105
+ # The outermost call will process the entire queue
106
+ if hook_vars.depth > 0:
107
+ logger.debug(f"Depth > 0, returning without processing queue")
108
+ return
109
+
110
+ # Process the entire queue
111
+ logger.debug(f"Processing queue with {len(queue)} items")
112
+ while queue:
113
+ item = queue.popleft()
114
+ if len(item) == 6:
115
+ cls_, event_, model_, new_, old_, kw_ = item
116
+ logger.debug(f"Processing queue item: {event_}")
117
+ # Call _process on the Hook class, not the calling class
118
+ Hook._process(event_, model_, new_, old_, **kw_)
119
+ else:
120
+ logger.warning(f"Invalid queue item format: {item}")
121
+ continue
122
+
123
+ @classmethod
124
+ def _process(
125
+ cls,
126
+ event,
127
+ model,
128
+ new_records,
129
+ old_records,
130
+ **kwargs,
131
+ ):
132
+ hook_vars.depth += 1
133
+ hook_vars.new = new_records
134
+ hook_vars.old = old_records
135
+ hook_vars.event = event
136
+ hook_vars.model = model
137
+
138
+ hooks = sorted(get_hooks(model, event), key=lambda x: x[3])
139
+ logger.debug(f"Found {len(hooks)} hooks for {event}")
140
+
141
+ def _execute():
142
+ logger.debug(f"Executing {len(hooks)} hooks for {event}")
143
+ new_local = new_records or []
144
+ old_local = old_records or []
145
+ if len(old_local) < len(new_local):
146
+ old_local += [None] * (len(new_local) - len(old_local))
147
+
148
+ for handler_cls, method_name, condition, priority in hooks:
149
+ logger.debug(f"Processing hook {handler_cls.__name__}.{method_name}")
150
+ if condition is not None:
151
+ checks = [
152
+ condition.check(n, o) for n, o in zip(new_local, old_local)
153
+ ]
154
+ if not any(checks):
155
+ logger.debug(f"Condition failed for {handler_cls.__name__}.{method_name}")
156
+ continue
157
+
158
+ handler = handler_cls()
159
+ method = getattr(handler, method_name)
160
+ logger.debug(f"Executing {handler_cls.__name__}.{method_name}")
161
+
162
+ try:
163
+ method(
164
+ new_records=new_local,
165
+ old_records=old_local,
166
+ **kwargs,
167
+ )
168
+ logger.debug(f"Successfully executed {handler_cls.__name__}.{method_name}")
169
+ except Exception:
170
+ logger.exception(
171
+ "Error in hook %s.%s", handler_cls.__name__, method_name
172
+ )
173
+
174
+ conn = transaction.get_connection()
175
+ logger.debug(f"Transaction in_atomic_block: {conn.in_atomic_block}, event: {event}")
176
+ try:
177
+ if conn.in_atomic_block and event.startswith("after_"):
178
+ logger.debug(f"Deferring {event} to on_commit")
179
+ transaction.on_commit(_execute)
180
+ else:
181
+ logger.debug(f"Executing {event} immediately")
182
+ _execute()
183
+ finally:
184
+ hook_vars.new = None
185
+ hook_vars.old = None
186
+ hook_vars.event = None
187
+ hook_vars.model = None
188
+ hook_vars.depth -= 1
@@ -486,11 +486,14 @@ class HookQuerySetMixin:
486
486
  passed through to the correct logic. For MTI, only a subset of options may be supported.
487
487
  """
488
488
  model_cls, ctx, originals = self._setup_bulk_operation(
489
- objs, "bulk_create", require_pks=False,
490
- bypass_hooks=bypass_hooks, bypass_validation=bypass_validation,
489
+ objs,
490
+ "bulk_create",
491
+ require_pks=False,
492
+ bypass_hooks=bypass_hooks,
493
+ bypass_validation=bypass_validation,
491
494
  update_conflicts=update_conflicts,
492
495
  unique_fields=unique_fields,
493
- update_fields=update_fields
496
+ update_fields=update_fields,
494
497
  )
495
498
 
496
499
  # When you bulk insert you don't get the primary keys back (if it's an
@@ -524,9 +527,7 @@ class HookQuerySetMixin:
524
527
  existing_records = []
525
528
  new_records = []
526
529
 
527
- # Store the records for AFTER hooks to avoid duplicate queries
528
- ctx.upsert_existing_records = existing_records
529
- ctx.upsert_new_records = new_records
530
+ # We'll store the records for AFTER hooks after classification is complete
530
531
 
531
532
  # Build a filter to check which records already exist
532
533
  unique_values = []
@@ -535,10 +536,12 @@ class HookQuerySetMixin:
535
536
  query_fields = {} # Track which database field to use for each unique field
536
537
  for field_name in unique_fields:
537
538
  # First check for _id field (more reliable for ForeignKeys)
538
- if hasattr(obj, field_name + '_id'):
539
+ if hasattr(obj, field_name + "_id"):
539
540
  # Handle ForeignKey fields where _id suffix is used
540
- unique_value[field_name] = getattr(obj, field_name + '_id')
541
- query_fields[field_name] = field_name + '_id' # Use _id field for query
541
+ unique_value[field_name] = getattr(obj, field_name + "_id")
542
+ query_fields[field_name] = (
543
+ field_name + "_id"
544
+ ) # Use _id field for query
542
545
  elif hasattr(obj, field_name):
543
546
  unique_value[field_name] = getattr(obj, field_name)
544
547
  query_fields[field_name] = field_name
@@ -558,8 +561,12 @@ class HookQuerySetMixin:
558
561
  filter_kwargs[db_field_name] = value
559
562
  existing_filters |= Q(**filter_kwargs)
560
563
 
561
- logger.debug(f"DEBUG: Existence check query filters: {existing_filters}")
562
- logger.debug(f"DEBUG: Unique fields for values_list: {unique_fields}")
564
+ logger.debug(
565
+ f"DEBUG: Existence check query filters: {existing_filters}"
566
+ )
567
+ logger.debug(
568
+ f"DEBUG: Unique fields for values_list: {unique_fields}"
569
+ )
563
570
 
564
571
  # Get all existing records in one query and create a lookup set
565
572
  # We need to use the original unique_fields for values_list to maintain consistency
@@ -571,41 +578,76 @@ class HookQuerySetMixin:
571
578
  raw_existing = list(existing_query.values_list(*unique_fields))
572
579
  logger.debug(f"DEBUG: Raw existing records from DB: {raw_existing}")
573
580
 
574
- existing_records_found = []
581
+ # Convert database values to match object types for comparison
582
+ # This handles cases where object values are strings but DB values are integers
583
+ existing_records_lookup = set()
575
584
  for existing_record in raw_existing:
576
- # Convert tuple to a hashable key for lookup
577
- existing_records_lookup.add(existing_record)
578
- existing_records_found.append(existing_record)
585
+ # Convert each value in the tuple to match the type from object extraction
586
+ converted_record = []
587
+ for i, field_name in enumerate(unique_fields):
588
+ db_value = existing_record[i]
589
+ # Check if this field uses _id suffix in the query
590
+ query_field_name = query_fields[field_name]
591
+ if query_field_name.endswith("_id"):
592
+ # Convert to string to match how we extract from objects
593
+ converted_record.append(str(db_value))
594
+ else:
595
+ converted_record.append(db_value)
596
+ converted_tuple = tuple(converted_record)
597
+ existing_records_lookup.add(converted_tuple)
579
598
 
580
- logger.debug(f"DEBUG: Found {len(existing_records_found)} existing records: {existing_records_found}")
581
- logger.debug(f"DEBUG: Existing records lookup set: {existing_records_lookup}")
599
+ logger.debug(
600
+ f"DEBUG: Found {len(raw_existing)} existing records from DB"
601
+ )
602
+ logger.debug(
603
+ f"DEBUG: Existing records lookup set: {existing_records_lookup}"
604
+ )
582
605
 
583
606
  # Separate records based on whether they already exist
584
607
  for obj in objs:
585
608
  obj_unique_value = {}
586
609
  for field_name in unique_fields:
587
610
  # First check for _id field (more reliable for ForeignKeys)
588
- if hasattr(obj, field_name + '_id'):
611
+ if hasattr(obj, field_name + "_id"):
589
612
  # Handle ForeignKey fields where _id suffix is used
590
- obj_unique_value[field_name] = getattr(obj, field_name + '_id')
613
+ obj_unique_value[field_name] = getattr(
614
+ obj, field_name + "_id"
615
+ )
591
616
  elif hasattr(obj, field_name):
592
617
  obj_unique_value[field_name] = getattr(obj, field_name)
593
618
 
594
619
  # Check if this record already exists using our bulk lookup
595
620
  if obj_unique_value:
596
621
  # Convert object values to tuple for comparison with existing records
597
- obj_unique_tuple = tuple(
598
- obj_unique_value[field_name]
599
- for field_name in unique_fields
622
+ # Apply the same type conversion as we did for database values
623
+ obj_unique_tuple = []
624
+ for field_name in unique_fields:
625
+ value = obj_unique_value[field_name]
626
+ # Check if this field uses _id suffix in the query
627
+ query_field_name = query_fields[field_name]
628
+ if query_field_name.endswith("_id"):
629
+ # Convert to string to match how we convert DB values
630
+ obj_unique_tuple.append(str(value))
631
+ else:
632
+ obj_unique_tuple.append(value)
633
+ obj_unique_tuple = tuple(obj_unique_tuple)
634
+
635
+ logger.debug(
636
+ f"DEBUG: Object unique tuple: {obj_unique_tuple}"
637
+ )
638
+ logger.debug(
639
+ f"DEBUG: Object unique value: {obj_unique_value}"
600
640
  )
601
- logger.debug(f"DEBUG: Object unique tuple: {obj_unique_tuple}")
602
- logger.debug(f"DEBUG: Object unique value: {obj_unique_value}")
603
641
  if obj_unique_tuple in existing_records_lookup:
604
642
  existing_records.append(obj)
605
- logger.debug(f"DEBUG: Found existing record for tuple: {obj_unique_tuple}")
643
+ logger.debug(
644
+ f"DEBUG: Found existing record for tuple: {obj_unique_tuple}"
645
+ )
606
646
  else:
607
647
  new_records.append(obj)
608
- logger.debug(f"DEBUG: No existing record found for tuple: {obj_unique_tuple}")
648
+ logger.debug(
649
+ f"DEBUG: No existing record found for tuple: {obj_unique_tuple}"
650
+ )
609
651
  else:
610
652
  # If we can't determine uniqueness, treat as new
611
653
  new_records.append(obj)
@@ -613,6 +655,10 @@ class HookQuerySetMixin:
613
655
  # If no unique fields, treat all as new
614
656
  new_records = objs
615
657
 
658
+ # Store the classified records for AFTER hooks to avoid duplicate queries
659
+ ctx.upsert_existing_records = existing_records
660
+ ctx.upsert_new_records = new_records
661
+
616
662
  # Handle auto_now fields intelligently for upsert operations
617
663
  # Only set auto_now fields on records that will actually be created
618
664
  self._handle_auto_now_fields(new_records, add=True)
@@ -980,7 +1026,9 @@ class HookQuerySetMixin:
980
1026
 
981
1027
  changed_fields = self._detect_changed_fields(objs)
982
1028
  is_mti = self._is_multi_table_inheritance()
983
- hook_context, originals = self._init_hook_context(bypass_hooks, objs, "bulk_update")
1029
+ hook_context, originals = self._init_hook_context(
1030
+ bypass_hooks, objs, "bulk_update"
1031
+ )
984
1032
 
985
1033
  fields_set, auto_now_fields, custom_update_fields = self._prepare_update_fields(
986
1034
  changed_fields
@@ -1163,7 +1211,9 @@ class HookQuerySetMixin:
1163
1211
  operation_name,
1164
1212
  )
1165
1213
 
1166
- def _init_hook_context(self, bypass_hooks: bool, objs, operation_name="bulk_update"):
1214
+ def _init_hook_context(
1215
+ self, bypass_hooks: bool, objs, operation_name="bulk_update"
1216
+ ):
1167
1217
  """
1168
1218
  Initialize the hook context for bulk operations.
1169
1219
 
@@ -1180,7 +1230,9 @@ class HookQuerySetMixin:
1180
1230
  model_cls = self.model
1181
1231
 
1182
1232
  if bypass_hooks:
1183
- logger.debug("%s: hooks bypassed for %s", operation_name, model_cls.__name__)
1233
+ logger.debug(
1234
+ "%s: hooks bypassed for %s", operation_name, model_cls.__name__
1235
+ )
1184
1236
  ctx = HookContext(model_cls, bypass_hooks=True)
1185
1237
  else:
1186
1238
  logger.debug("%s: hooks enabled for %s", operation_name, model_cls.__name__)
@@ -1299,7 +1351,18 @@ class HookQuerySetMixin:
1299
1351
 
1300
1352
  return list(set(handled_fields)) # Remove duplicates
1301
1353
 
1302
- def _execute_hooks_with_operation(self, operation_func, validate_hook, before_hook, after_hook, objs, originals=None, ctx=None, bypass_hooks=False, bypass_validation=False):
1354
+ def _execute_hooks_with_operation(
1355
+ self,
1356
+ operation_func,
1357
+ validate_hook,
1358
+ before_hook,
1359
+ after_hook,
1360
+ objs,
1361
+ originals=None,
1362
+ ctx=None,
1363
+ bypass_hooks=False,
1364
+ bypass_validation=False,
1365
+ ):
1303
1366
  """
1304
1367
  Execute the complete hook lifecycle around a database operation.
1305
1368
 
@@ -1359,10 +1422,21 @@ class HookQuerySetMixin:
1359
1422
  param_str = f", {', '.join(param_parts)}"
1360
1423
 
1361
1424
  # Use both print and logger for consistency with existing patterns
1362
- print(f"DEBUG: {operation_name} called for {model_cls.__name__} with {len(objs)} objects{param_str}")
1363
- logger.debug(f"{operation_name} called for {model_cls.__name__} with {len(objs)} objects{param_str}")
1425
+ print(
1426
+ f"DEBUG: {operation_name} called for {model_cls.__name__} with {len(objs)} objects{param_str}"
1427
+ )
1428
+ logger.debug(
1429
+ f"{operation_name} called for {model_cls.__name__} with {len(objs)} objects{param_str}"
1430
+ )
1364
1431
 
1365
- def _execute_delete_hooks_with_operation(self, operation_func, objs, ctx=None, bypass_hooks=False, bypass_validation=False):
1432
+ def _execute_delete_hooks_with_operation(
1433
+ self,
1434
+ operation_func,
1435
+ objs,
1436
+ ctx=None,
1437
+ bypass_hooks=False,
1438
+ bypass_validation=False,
1439
+ ):
1366
1440
  """
1367
1441
  Execute hooks for delete operations with special field caching logic.
1368
1442
 
@@ -1414,7 +1488,15 @@ class HookQuerySetMixin:
1414
1488
 
1415
1489
  return result
1416
1490
 
1417
- def _setup_bulk_operation(self, objs, operation_name, require_pks=False, bypass_hooks=False, bypass_validation=False, **log_kwargs):
1491
+ def _setup_bulk_operation(
1492
+ self,
1493
+ objs,
1494
+ operation_name,
1495
+ require_pks=False,
1496
+ bypass_hooks=False,
1497
+ bypass_validation=False,
1498
+ **log_kwargs,
1499
+ ):
1418
1500
  """
1419
1501
  Common setup logic for bulk operations.
1420
1502
 
@@ -1433,7 +1515,9 @@ class HookQuerySetMixin:
1433
1515
  self._log_bulk_operation_start(operation_name, objs, **log_kwargs)
1434
1516
 
1435
1517
  # Validate objects
1436
- self._validate_objects(objs, require_pks=require_pks, operation_name=operation_name)
1518
+ self._validate_objects(
1519
+ objs, require_pks=require_pks, operation_name=operation_name
1520
+ )
1437
1521
 
1438
1522
  # Initialize hook context
1439
1523
  ctx, originals = self._init_hook_context(bypass_hooks, objs, operation_name)
@@ -1974,8 +2058,11 @@ class HookQuerySetMixin:
1974
2058
  return 0
1975
2059
 
1976
2060
  model_cls, ctx, _ = self._setup_bulk_operation(
1977
- objs, "bulk_delete", require_pks=True,
1978
- bypass_hooks=bypass_hooks, bypass_validation=bypass_validation
2061
+ objs,
2062
+ "bulk_delete",
2063
+ require_pks=True,
2064
+ bypass_hooks=bypass_hooks,
2065
+ bypass_validation=bypass_validation,
1979
2066
  )
1980
2067
 
1981
2068
  # Execute the database operation with hooks
@@ -1988,8 +2075,11 @@ class HookQuerySetMixin:
1988
2075
  return 0
1989
2076
 
1990
2077
  result = self._execute_delete_hooks_with_operation(
1991
- delete_operation, objs, ctx=ctx,
1992
- bypass_hooks=bypass_hooks, bypass_validation=bypass_validation
2078
+ delete_operation,
2079
+ objs,
2080
+ ctx=ctx,
2081
+ bypass_hooks=bypass_hooks,
2082
+ bypass_validation=bypass_validation,
1993
2083
  )
1994
2084
 
1995
2085
  return result
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "django-bulk-hooks"
3
- version = "0.1.273"
3
+ version = "0.1.275"
4
4
  description = "Hook-style hooks for Django bulk operations like bulk_create and bulk_update."
5
5
  authors = ["Konrad Beck <konrad.beck@merchantcapital.co.za>"]
6
6
  readme = "README.md"