django-bulk-hooks 0.1.250__py3-none-any.whl → 0.1.252__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of django-bulk-hooks might be problematic. Click here for more details.
- django_bulk_hooks/__init__.py +4 -4
- django_bulk_hooks/handler.py +188 -188
- django_bulk_hooks/queryset.py +163 -5
- {django_bulk_hooks-0.1.250.dist-info → django_bulk_hooks-0.1.252.dist-info}/METADATA +3 -3
- {django_bulk_hooks-0.1.250.dist-info → django_bulk_hooks-0.1.252.dist-info}/RECORD +7 -7
- {django_bulk_hooks-0.1.250.dist-info → django_bulk_hooks-0.1.252.dist-info}/WHEEL +1 -1
- {django_bulk_hooks-0.1.250.dist-info → django_bulk_hooks-0.1.252.dist-info}/LICENSE +0 -0
django_bulk_hooks/__init__.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from django_bulk_hooks.handler import Hook as HookClass
|
|
2
|
-
from django_bulk_hooks.manager import BulkHookManager
|
|
3
|
-
|
|
4
|
-
__all__ = ["BulkHookManager", "HookClass"]
|
|
1
|
+
from django_bulk_hooks.handler import Hook as HookClass
|
|
2
|
+
from django_bulk_hooks.manager import BulkHookManager
|
|
3
|
+
|
|
4
|
+
__all__ = ["BulkHookManager", "HookClass"]
|
django_bulk_hooks/handler.py
CHANGED
|
@@ -1,188 +1,188 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
import threading
|
|
3
|
-
from collections import deque
|
|
4
|
-
|
|
5
|
-
from django.db import transaction
|
|
6
|
-
|
|
7
|
-
from django_bulk_hooks.registry import get_hooks, register_hook
|
|
8
|
-
|
|
9
|
-
logger = logging.getLogger(__name__)
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
# Thread-local hook context and hook state
|
|
13
|
-
class HookVars(threading.local):
|
|
14
|
-
def __init__(self):
|
|
15
|
-
self.new = None
|
|
16
|
-
self.old = None
|
|
17
|
-
self.event = None
|
|
18
|
-
self.model = None
|
|
19
|
-
self.depth = 0
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
hook_vars = HookVars()
|
|
23
|
-
|
|
24
|
-
# Hook queue per thread
|
|
25
|
-
_hook_context = threading.local()
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
def get_hook_queue():
|
|
29
|
-
if not hasattr(_hook_context, "queue"):
|
|
30
|
-
_hook_context.queue = deque()
|
|
31
|
-
return _hook_context.queue
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
class HookContextState:
|
|
35
|
-
@property
|
|
36
|
-
def is_before(self):
|
|
37
|
-
return hook_vars.event.startswith("before_") if hook_vars.event else False
|
|
38
|
-
|
|
39
|
-
@property
|
|
40
|
-
def is_after(self):
|
|
41
|
-
return hook_vars.event.startswith("after_") if hook_vars.event else False
|
|
42
|
-
|
|
43
|
-
@property
|
|
44
|
-
def is_create(self):
|
|
45
|
-
return "create" in hook_vars.event if hook_vars.event else False
|
|
46
|
-
|
|
47
|
-
@property
|
|
48
|
-
def is_update(self):
|
|
49
|
-
return "update" in hook_vars.event if hook_vars.event else False
|
|
50
|
-
|
|
51
|
-
@property
|
|
52
|
-
def new(self):
|
|
53
|
-
return hook_vars.new
|
|
54
|
-
|
|
55
|
-
@property
|
|
56
|
-
def old(self):
|
|
57
|
-
return hook_vars.old
|
|
58
|
-
|
|
59
|
-
@property
|
|
60
|
-
def model(self):
|
|
61
|
-
return hook_vars.model
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
HookContext = HookContextState()
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
class HookMeta(type):
|
|
68
|
-
_registered = set()
|
|
69
|
-
|
|
70
|
-
def __new__(mcs, name, bases, namespace):
|
|
71
|
-
cls = super().__new__(mcs, name, bases, namespace)
|
|
72
|
-
for method_name, method in namespace.items():
|
|
73
|
-
if hasattr(method, "hooks_hooks"):
|
|
74
|
-
for model_cls, event, condition, priority in method.hooks_hooks:
|
|
75
|
-
key = (model_cls, event, cls, method_name)
|
|
76
|
-
if key not in HookMeta._registered:
|
|
77
|
-
register_hook(
|
|
78
|
-
model=model_cls,
|
|
79
|
-
event=event,
|
|
80
|
-
handler_cls=cls,
|
|
81
|
-
method_name=method_name,
|
|
82
|
-
condition=condition,
|
|
83
|
-
priority=priority,
|
|
84
|
-
)
|
|
85
|
-
HookMeta._registered.add(key)
|
|
86
|
-
return cls
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
class Hook(metaclass=HookMeta):
|
|
90
|
-
@classmethod
|
|
91
|
-
def handle(
|
|
92
|
-
cls,
|
|
93
|
-
event: str,
|
|
94
|
-
model: type,
|
|
95
|
-
*,
|
|
96
|
-
new_records: list = None,
|
|
97
|
-
old_records: list = None,
|
|
98
|
-
**kwargs,
|
|
99
|
-
) -> None:
|
|
100
|
-
queue = get_hook_queue()
|
|
101
|
-
queue.append((cls, event, model, new_records, old_records, kwargs))
|
|
102
|
-
logger.debug(f"Added item to queue: {event}, depth: {hook_vars.depth}")
|
|
103
|
-
|
|
104
|
-
# If we're already processing hooks (depth > 0), don't process the queue
|
|
105
|
-
# The outermost call will process the entire queue
|
|
106
|
-
if hook_vars.depth > 0:
|
|
107
|
-
logger.debug(f"Depth > 0, returning without processing queue")
|
|
108
|
-
return
|
|
109
|
-
|
|
110
|
-
# Process the entire queue
|
|
111
|
-
logger.debug(f"Processing queue with {len(queue)} items")
|
|
112
|
-
while queue:
|
|
113
|
-
item = queue.popleft()
|
|
114
|
-
if len(item) == 6:
|
|
115
|
-
cls_, event_, model_, new_, old_, kw_ = item
|
|
116
|
-
logger.debug(f"Processing queue item: {event_}")
|
|
117
|
-
# Call _process on the Hook class, not the calling class
|
|
118
|
-
Hook._process(event_, model_, new_, old_, **kw_)
|
|
119
|
-
else:
|
|
120
|
-
logger.warning(f"Invalid queue item format: {item}")
|
|
121
|
-
continue
|
|
122
|
-
|
|
123
|
-
@classmethod
|
|
124
|
-
def _process(
|
|
125
|
-
cls,
|
|
126
|
-
event,
|
|
127
|
-
model,
|
|
128
|
-
new_records,
|
|
129
|
-
old_records,
|
|
130
|
-
**kwargs,
|
|
131
|
-
):
|
|
132
|
-
hook_vars.depth += 1
|
|
133
|
-
hook_vars.new = new_records
|
|
134
|
-
hook_vars.old = old_records
|
|
135
|
-
hook_vars.event = event
|
|
136
|
-
hook_vars.model = model
|
|
137
|
-
|
|
138
|
-
hooks = sorted(get_hooks(model, event), key=lambda x: x[3])
|
|
139
|
-
logger.debug(f"Found {len(hooks)} hooks for {event}")
|
|
140
|
-
|
|
141
|
-
def _execute():
|
|
142
|
-
logger.debug(f"Executing {len(hooks)} hooks for {event}")
|
|
143
|
-
new_local = new_records or []
|
|
144
|
-
old_local = old_records or []
|
|
145
|
-
if len(old_local) < len(new_local):
|
|
146
|
-
old_local += [None] * (len(new_local) - len(old_local))
|
|
147
|
-
|
|
148
|
-
for handler_cls, method_name, condition, priority in hooks:
|
|
149
|
-
logger.debug(f"Processing hook {handler_cls.__name__}.{method_name}")
|
|
150
|
-
if condition is not None:
|
|
151
|
-
checks = [
|
|
152
|
-
condition.check(n, o) for n, o in zip(new_local, old_local)
|
|
153
|
-
]
|
|
154
|
-
if not any(checks):
|
|
155
|
-
logger.debug(f"Condition failed for {handler_cls.__name__}.{method_name}")
|
|
156
|
-
continue
|
|
157
|
-
|
|
158
|
-
handler = handler_cls()
|
|
159
|
-
method = getattr(handler, method_name)
|
|
160
|
-
logger.debug(f"Executing {handler_cls.__name__}.{method_name}")
|
|
161
|
-
|
|
162
|
-
try:
|
|
163
|
-
method(
|
|
164
|
-
new_records=new_local,
|
|
165
|
-
old_records=old_local,
|
|
166
|
-
**kwargs,
|
|
167
|
-
)
|
|
168
|
-
logger.debug(f"Successfully executed {handler_cls.__name__}.{method_name}")
|
|
169
|
-
except Exception:
|
|
170
|
-
logger.exception(
|
|
171
|
-
"Error in hook %s.%s", handler_cls.__name__, method_name
|
|
172
|
-
)
|
|
173
|
-
|
|
174
|
-
conn = transaction.get_connection()
|
|
175
|
-
logger.debug(f"Transaction in_atomic_block: {conn.in_atomic_block}, event: {event}")
|
|
176
|
-
try:
|
|
177
|
-
if conn.in_atomic_block and event.startswith("after_"):
|
|
178
|
-
logger.debug(f"Deferring {event} to on_commit")
|
|
179
|
-
transaction.on_commit(_execute)
|
|
180
|
-
else:
|
|
181
|
-
logger.debug(f"Executing {event} immediately")
|
|
182
|
-
_execute()
|
|
183
|
-
finally:
|
|
184
|
-
hook_vars.new = None
|
|
185
|
-
hook_vars.old = None
|
|
186
|
-
hook_vars.event = None
|
|
187
|
-
hook_vars.model = None
|
|
188
|
-
hook_vars.depth -= 1
|
|
1
|
+
import logging
|
|
2
|
+
import threading
|
|
3
|
+
from collections import deque
|
|
4
|
+
|
|
5
|
+
from django.db import transaction
|
|
6
|
+
|
|
7
|
+
from django_bulk_hooks.registry import get_hooks, register_hook
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
# Thread-local hook context and hook state
|
|
13
|
+
class HookVars(threading.local):
|
|
14
|
+
def __init__(self):
|
|
15
|
+
self.new = None
|
|
16
|
+
self.old = None
|
|
17
|
+
self.event = None
|
|
18
|
+
self.model = None
|
|
19
|
+
self.depth = 0
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
hook_vars = HookVars()
|
|
23
|
+
|
|
24
|
+
# Hook queue per thread
|
|
25
|
+
_hook_context = threading.local()
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def get_hook_queue():
|
|
29
|
+
if not hasattr(_hook_context, "queue"):
|
|
30
|
+
_hook_context.queue = deque()
|
|
31
|
+
return _hook_context.queue
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class HookContextState:
|
|
35
|
+
@property
|
|
36
|
+
def is_before(self):
|
|
37
|
+
return hook_vars.event.startswith("before_") if hook_vars.event else False
|
|
38
|
+
|
|
39
|
+
@property
|
|
40
|
+
def is_after(self):
|
|
41
|
+
return hook_vars.event.startswith("after_") if hook_vars.event else False
|
|
42
|
+
|
|
43
|
+
@property
|
|
44
|
+
def is_create(self):
|
|
45
|
+
return "create" in hook_vars.event if hook_vars.event else False
|
|
46
|
+
|
|
47
|
+
@property
|
|
48
|
+
def is_update(self):
|
|
49
|
+
return "update" in hook_vars.event if hook_vars.event else False
|
|
50
|
+
|
|
51
|
+
@property
|
|
52
|
+
def new(self):
|
|
53
|
+
return hook_vars.new
|
|
54
|
+
|
|
55
|
+
@property
|
|
56
|
+
def old(self):
|
|
57
|
+
return hook_vars.old
|
|
58
|
+
|
|
59
|
+
@property
|
|
60
|
+
def model(self):
|
|
61
|
+
return hook_vars.model
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
HookContext = HookContextState()
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class HookMeta(type):
|
|
68
|
+
_registered = set()
|
|
69
|
+
|
|
70
|
+
def __new__(mcs, name, bases, namespace):
|
|
71
|
+
cls = super().__new__(mcs, name, bases, namespace)
|
|
72
|
+
for method_name, method in namespace.items():
|
|
73
|
+
if hasattr(method, "hooks_hooks"):
|
|
74
|
+
for model_cls, event, condition, priority in method.hooks_hooks:
|
|
75
|
+
key = (model_cls, event, cls, method_name)
|
|
76
|
+
if key not in HookMeta._registered:
|
|
77
|
+
register_hook(
|
|
78
|
+
model=model_cls,
|
|
79
|
+
event=event,
|
|
80
|
+
handler_cls=cls,
|
|
81
|
+
method_name=method_name,
|
|
82
|
+
condition=condition,
|
|
83
|
+
priority=priority,
|
|
84
|
+
)
|
|
85
|
+
HookMeta._registered.add(key)
|
|
86
|
+
return cls
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
class Hook(metaclass=HookMeta):
|
|
90
|
+
@classmethod
|
|
91
|
+
def handle(
|
|
92
|
+
cls,
|
|
93
|
+
event: str,
|
|
94
|
+
model: type,
|
|
95
|
+
*,
|
|
96
|
+
new_records: list = None,
|
|
97
|
+
old_records: list = None,
|
|
98
|
+
**kwargs,
|
|
99
|
+
) -> None:
|
|
100
|
+
queue = get_hook_queue()
|
|
101
|
+
queue.append((cls, event, model, new_records, old_records, kwargs))
|
|
102
|
+
logger.debug(f"Added item to queue: {event}, depth: {hook_vars.depth}")
|
|
103
|
+
|
|
104
|
+
# If we're already processing hooks (depth > 0), don't process the queue
|
|
105
|
+
# The outermost call will process the entire queue
|
|
106
|
+
if hook_vars.depth > 0:
|
|
107
|
+
logger.debug(f"Depth > 0, returning without processing queue")
|
|
108
|
+
return
|
|
109
|
+
|
|
110
|
+
# Process the entire queue
|
|
111
|
+
logger.debug(f"Processing queue with {len(queue)} items")
|
|
112
|
+
while queue:
|
|
113
|
+
item = queue.popleft()
|
|
114
|
+
if len(item) == 6:
|
|
115
|
+
cls_, event_, model_, new_, old_, kw_ = item
|
|
116
|
+
logger.debug(f"Processing queue item: {event_}")
|
|
117
|
+
# Call _process on the Hook class, not the calling class
|
|
118
|
+
Hook._process(event_, model_, new_, old_, **kw_)
|
|
119
|
+
else:
|
|
120
|
+
logger.warning(f"Invalid queue item format: {item}")
|
|
121
|
+
continue
|
|
122
|
+
|
|
123
|
+
@classmethod
|
|
124
|
+
def _process(
|
|
125
|
+
cls,
|
|
126
|
+
event,
|
|
127
|
+
model,
|
|
128
|
+
new_records,
|
|
129
|
+
old_records,
|
|
130
|
+
**kwargs,
|
|
131
|
+
):
|
|
132
|
+
hook_vars.depth += 1
|
|
133
|
+
hook_vars.new = new_records
|
|
134
|
+
hook_vars.old = old_records
|
|
135
|
+
hook_vars.event = event
|
|
136
|
+
hook_vars.model = model
|
|
137
|
+
|
|
138
|
+
hooks = sorted(get_hooks(model, event), key=lambda x: x[3])
|
|
139
|
+
logger.debug(f"Found {len(hooks)} hooks for {event}")
|
|
140
|
+
|
|
141
|
+
def _execute():
|
|
142
|
+
logger.debug(f"Executing {len(hooks)} hooks for {event}")
|
|
143
|
+
new_local = new_records or []
|
|
144
|
+
old_local = old_records or []
|
|
145
|
+
if len(old_local) < len(new_local):
|
|
146
|
+
old_local += [None] * (len(new_local) - len(old_local))
|
|
147
|
+
|
|
148
|
+
for handler_cls, method_name, condition, priority in hooks:
|
|
149
|
+
logger.debug(f"Processing hook {handler_cls.__name__}.{method_name}")
|
|
150
|
+
if condition is not None:
|
|
151
|
+
checks = [
|
|
152
|
+
condition.check(n, o) for n, o in zip(new_local, old_local)
|
|
153
|
+
]
|
|
154
|
+
if not any(checks):
|
|
155
|
+
logger.debug(f"Condition failed for {handler_cls.__name__}.{method_name}")
|
|
156
|
+
continue
|
|
157
|
+
|
|
158
|
+
handler = handler_cls()
|
|
159
|
+
method = getattr(handler, method_name)
|
|
160
|
+
logger.debug(f"Executing {handler_cls.__name__}.{method_name}")
|
|
161
|
+
|
|
162
|
+
try:
|
|
163
|
+
method(
|
|
164
|
+
new_records=new_local,
|
|
165
|
+
old_records=old_local,
|
|
166
|
+
**kwargs,
|
|
167
|
+
)
|
|
168
|
+
logger.debug(f"Successfully executed {handler_cls.__name__}.{method_name}")
|
|
169
|
+
except Exception:
|
|
170
|
+
logger.exception(
|
|
171
|
+
"Error in hook %s.%s", handler_cls.__name__, method_name
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
conn = transaction.get_connection()
|
|
175
|
+
logger.debug(f"Transaction in_atomic_block: {conn.in_atomic_block}, event: {event}")
|
|
176
|
+
try:
|
|
177
|
+
if conn.in_atomic_block and event.startswith("after_"):
|
|
178
|
+
logger.debug(f"Deferring {event} to on_commit")
|
|
179
|
+
transaction.on_commit(_execute)
|
|
180
|
+
else:
|
|
181
|
+
logger.debug(f"Executing {event} immediately")
|
|
182
|
+
_execute()
|
|
183
|
+
finally:
|
|
184
|
+
hook_vars.new = None
|
|
185
|
+
hook_vars.old = None
|
|
186
|
+
hook_vars.event = None
|
|
187
|
+
hook_vars.model = None
|
|
188
|
+
hook_vars.depth -= 1
|
django_bulk_hooks/queryset.py
CHANGED
|
@@ -45,6 +45,21 @@ class HookQuerySetMixin:
|
|
|
45
45
|
# Then run business logic hooks
|
|
46
46
|
engine.run(model_cls, BEFORE_DELETE, objs, ctx=ctx)
|
|
47
47
|
|
|
48
|
+
# Before deletion, ensure all related fields are properly cached
|
|
49
|
+
# to avoid DoesNotExist errors in AFTER_DELETE hooks
|
|
50
|
+
for obj in objs:
|
|
51
|
+
if obj.pk is not None:
|
|
52
|
+
# Cache all foreign key relationships by accessing them
|
|
53
|
+
for field in model_cls._meta.fields:
|
|
54
|
+
if field.is_relation and not field.many_to_many and not field.one_to_many:
|
|
55
|
+
try:
|
|
56
|
+
# Access the related field to cache it before deletion
|
|
57
|
+
getattr(obj, field.name)
|
|
58
|
+
except Exception:
|
|
59
|
+
# If we can't access the field (e.g., already deleted, no permission, etc.)
|
|
60
|
+
# continue with other fields
|
|
61
|
+
pass
|
|
62
|
+
|
|
48
63
|
# Use Django's standard delete() method
|
|
49
64
|
result = super().delete()
|
|
50
65
|
|
|
@@ -505,9 +520,75 @@ class HookQuerySetMixin:
|
|
|
505
520
|
# Fire hooks before DB ops
|
|
506
521
|
if not bypass_hooks:
|
|
507
522
|
ctx = HookContext(model_cls, bypass_hooks=False) # Pass bypass_hooks
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
523
|
+
|
|
524
|
+
if update_conflicts and unique_fields:
|
|
525
|
+
# For upsert operations, we need to determine which records will be created vs updated
|
|
526
|
+
# Check which records already exist in the database based on unique fields
|
|
527
|
+
existing_records = []
|
|
528
|
+
new_records = []
|
|
529
|
+
|
|
530
|
+
# Build a filter to check which records already exist
|
|
531
|
+
unique_values = []
|
|
532
|
+
for obj in objs:
|
|
533
|
+
unique_value = {}
|
|
534
|
+
for field_name in unique_fields:
|
|
535
|
+
if hasattr(obj, field_name):
|
|
536
|
+
unique_value[field_name] = getattr(obj, field_name)
|
|
537
|
+
if unique_value:
|
|
538
|
+
unique_values.append(unique_value)
|
|
539
|
+
|
|
540
|
+
if unique_values:
|
|
541
|
+
# Query the database to see which records already exist
|
|
542
|
+
from django.db.models import Q
|
|
543
|
+
existing_filters = Q()
|
|
544
|
+
for unique_value in unique_values:
|
|
545
|
+
filter_kwargs = {}
|
|
546
|
+
for field_name, value in unique_value.items():
|
|
547
|
+
filter_kwargs[field_name] = value
|
|
548
|
+
existing_filters |= Q(**filter_kwargs)
|
|
549
|
+
|
|
550
|
+
existing_pks = set(
|
|
551
|
+
model_cls.objects.filter(existing_filters).values_list('pk', flat=True)
|
|
552
|
+
)
|
|
553
|
+
|
|
554
|
+
# Separate records based on whether they already exist
|
|
555
|
+
for obj in objs:
|
|
556
|
+
obj_unique_value = {}
|
|
557
|
+
for field_name in unique_fields:
|
|
558
|
+
if hasattr(obj, field_name):
|
|
559
|
+
obj_unique_value[field_name] = getattr(obj, field_name)
|
|
560
|
+
|
|
561
|
+
# Check if this record already exists
|
|
562
|
+
if obj_unique_value:
|
|
563
|
+
existing_q = Q()
|
|
564
|
+
for field_name, value in obj_unique_value.items():
|
|
565
|
+
existing_q &= Q(**{field_name: value})
|
|
566
|
+
|
|
567
|
+
if model_cls.objects.filter(existing_q).exists():
|
|
568
|
+
existing_records.append(obj)
|
|
569
|
+
else:
|
|
570
|
+
new_records.append(obj)
|
|
571
|
+
else:
|
|
572
|
+
# If we can't determine uniqueness, treat as new
|
|
573
|
+
new_records.append(obj)
|
|
574
|
+
else:
|
|
575
|
+
# If no unique fields, treat all as new
|
|
576
|
+
new_records = objs
|
|
577
|
+
|
|
578
|
+
# Run validation hooks on all records
|
|
579
|
+
if not bypass_validation:
|
|
580
|
+
engine.run(model_cls, VALIDATE_CREATE, objs, ctx=ctx)
|
|
581
|
+
|
|
582
|
+
# Run appropriate BEFORE hooks based on what will happen
|
|
583
|
+
if new_records:
|
|
584
|
+
engine.run(model_cls, BEFORE_CREATE, new_records, ctx=ctx)
|
|
585
|
+
if existing_records:
|
|
586
|
+
engine.run(model_cls, BEFORE_UPDATE, existing_records, ctx=ctx)
|
|
587
|
+
else:
|
|
588
|
+
# For regular create operations, run create hooks before DB ops
|
|
589
|
+
if not bypass_validation:
|
|
590
|
+
engine.run(model_cls, VALIDATE_CREATE, objs, ctx=ctx)
|
|
591
|
+
engine.run(model_cls, BEFORE_CREATE, objs, ctx=ctx)
|
|
511
592
|
else:
|
|
512
593
|
ctx = HookContext(model_cls, bypass_hooks=True) # Pass bypass_hooks
|
|
513
594
|
logger.debug("bulk_create bypassed hooks")
|
|
@@ -542,9 +623,70 @@ class HookQuerySetMixin:
|
|
|
542
623
|
unique_fields=unique_fields,
|
|
543
624
|
)
|
|
544
625
|
|
|
545
|
-
# Fire
|
|
626
|
+
# Fire AFTER hooks
|
|
546
627
|
if not bypass_hooks:
|
|
547
|
-
|
|
628
|
+
if update_conflicts and unique_fields:
|
|
629
|
+
# For upsert operations, we need to determine which records were actually created vs updated
|
|
630
|
+
# Use the same logic as before to separate records
|
|
631
|
+
existing_records = []
|
|
632
|
+
new_records = []
|
|
633
|
+
|
|
634
|
+
# Build a filter to check which records already exist
|
|
635
|
+
unique_values = []
|
|
636
|
+
for obj in objs:
|
|
637
|
+
unique_value = {}
|
|
638
|
+
for field_name in unique_fields:
|
|
639
|
+
if hasattr(obj, field_name):
|
|
640
|
+
unique_value[field_name] = getattr(obj, field_name)
|
|
641
|
+
if unique_value:
|
|
642
|
+
unique_values.append(unique_value)
|
|
643
|
+
|
|
644
|
+
if unique_values:
|
|
645
|
+
# Query the database to see which records already exist
|
|
646
|
+
from django.db.models import Q
|
|
647
|
+
existing_filters = Q()
|
|
648
|
+
for unique_value in unique_values:
|
|
649
|
+
filter_kwargs = {}
|
|
650
|
+
for field_name, value in unique_value.items():
|
|
651
|
+
filter_kwargs[field_name] = value
|
|
652
|
+
existing_filters |= Q(**filter_kwargs)
|
|
653
|
+
|
|
654
|
+
existing_pks = set(
|
|
655
|
+
model_cls.objects.filter(existing_filters).values_list('pk', flat=True)
|
|
656
|
+
)
|
|
657
|
+
|
|
658
|
+
# Separate records based on whether they already exist
|
|
659
|
+
for obj in objs:
|
|
660
|
+
obj_unique_value = {}
|
|
661
|
+
for field_name in unique_fields:
|
|
662
|
+
if hasattr(obj, field_name):
|
|
663
|
+
obj_unique_value[field_name] = getattr(obj, field_name)
|
|
664
|
+
|
|
665
|
+
# Check if this record already exists
|
|
666
|
+
if obj_unique_value:
|
|
667
|
+
existing_q = Q()
|
|
668
|
+
for field_name, value in obj_unique_value.items():
|
|
669
|
+
existing_q &= Q(**{field_name: value})
|
|
670
|
+
|
|
671
|
+
if model_cls.objects.filter(existing_q).exists():
|
|
672
|
+
existing_records.append(obj)
|
|
673
|
+
else:
|
|
674
|
+
new_records.append(obj)
|
|
675
|
+
else:
|
|
676
|
+
# If we can't determine uniqueness, treat as new
|
|
677
|
+
new_records.append(obj)
|
|
678
|
+
else:
|
|
679
|
+
# If no unique fields, treat all as new
|
|
680
|
+
new_records = objs
|
|
681
|
+
|
|
682
|
+
# Run appropriate AFTER hooks based on what actually happened
|
|
683
|
+
if new_records:
|
|
684
|
+
engine.run(model_cls, AFTER_CREATE, new_records, ctx=ctx)
|
|
685
|
+
if existing_records:
|
|
686
|
+
engine.run(model_cls, AFTER_UPDATE, existing_records, ctx=ctx)
|
|
687
|
+
else:
|
|
688
|
+
# For regular create operations, run create hooks after DB ops
|
|
689
|
+
engine.run(model_cls, AFTER_CREATE, objs, ctx=ctx)
|
|
548
690
|
|
|
549
691
|
return result
|
|
550
692
|
|
|
@@ -1147,6 +1289,22 @@ class HookQuerySetMixin:
|
|
|
1147
1289
|
ctx = HookContext(model_cls, bypass_hooks=True)
|
|
1148
1290
|
logger.debug("bulk_delete bypassed hooks")
|
|
1149
1291
|
|
|
1292
|
+
# Before deletion, ensure all related fields are properly cached
|
|
1293
|
+
# to avoid DoesNotExist errors in AFTER_DELETE hooks
|
|
1294
|
+
if not bypass_hooks:
|
|
1295
|
+
for obj in objs:
|
|
1296
|
+
if obj.pk is not None:
|
|
1297
|
+
# Cache all foreign key relationships by accessing them
|
|
1298
|
+
for field in model_cls._meta.fields:
|
|
1299
|
+
if field.is_relation and not field.many_to_many and not field.one_to_many:
|
|
1300
|
+
try:
|
|
1301
|
+
# Access the related field to cache it before deletion
|
|
1302
|
+
getattr(obj, field.name)
|
|
1303
|
+
except Exception:
|
|
1304
|
+
# If we can't access the field (e.g., already deleted, no permission, etc.)
|
|
1305
|
+
# continue with other fields
|
|
1306
|
+
pass
|
|
1307
|
+
|
|
1150
1308
|
# Use Django's standard delete() method on the queryset
|
|
1151
1309
|
pks = [obj.pk for obj in objs if obj.pk is not None]
|
|
1152
1310
|
if pks:
|
|
@@ -1,8 +1,7 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
2
|
Name: django-bulk-hooks
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.252
|
|
4
4
|
Summary: Hook-style hooks for Django bulk operations like bulk_create and bulk_update.
|
|
5
|
-
Home-page: https://github.com/AugendLimited/django-bulk-hooks
|
|
6
5
|
License: MIT
|
|
7
6
|
Keywords: django,bulk,hooks
|
|
8
7
|
Author: Konrad Beck
|
|
@@ -14,6 +13,7 @@ Classifier: Programming Language :: Python :: 3.11
|
|
|
14
13
|
Classifier: Programming Language :: Python :: 3.12
|
|
15
14
|
Classifier: Programming Language :: Python :: 3.13
|
|
16
15
|
Requires-Dist: Django (>=4.0)
|
|
16
|
+
Project-URL: Homepage, https://github.com/AugendLimited/django-bulk-hooks
|
|
17
17
|
Project-URL: Repository, https://github.com/AugendLimited/django-bulk-hooks
|
|
18
18
|
Description-Content-Type: text/markdown
|
|
19
19
|
|
|
@@ -1,17 +1,17 @@
|
|
|
1
|
-
django_bulk_hooks/__init__.py,sha256=
|
|
1
|
+
django_bulk_hooks/__init__.py,sha256=hsbKduccFEcsV4KIw8CbxCUDOtLZwToCc-XP3sqNy-8,154
|
|
2
2
|
django_bulk_hooks/conditions.py,sha256=V_f3Di2uCVUjoyfiU4BQCHmI4uUIRSRroApDcXlvnso,6349
|
|
3
3
|
django_bulk_hooks/constants.py,sha256=3x1H1fSUUNo0DZONN7GUVDuySZctTR-jtByBHmAIX5w,303
|
|
4
4
|
django_bulk_hooks/context.py,sha256=jlLsqGZbj__J0-iBUp1D6jTrlDEiX3qIo0XlywW4D9I,2244
|
|
5
5
|
django_bulk_hooks/decorators.py,sha256=32ffydS9tARaG_WJoiVri7zJnfS2iMd7SuZ8L_sRAGM,7985
|
|
6
6
|
django_bulk_hooks/engine.py,sha256=M3b7Rcb65PYAZTLfWrIRi99BUBPgSLCryL3MSjMVlfQ,2663
|
|
7
7
|
django_bulk_hooks/enums.py,sha256=Zo8_tJzuzZ2IKfVc7gZ-0tWPT8q1QhqZbAyoh9ZVJbs,381
|
|
8
|
-
django_bulk_hooks/handler.py,sha256=
|
|
8
|
+
django_bulk_hooks/handler.py,sha256=Bx-W6yyiciKMyy-BRxUt3CmRPCrX9_LhQgU-5LaJTjg,6019
|
|
9
9
|
django_bulk_hooks/manager.py,sha256=nfWiwU5-yAoxdnQsUMohxtyCpkV0MBv6X3wmipr9eQY,3697
|
|
10
10
|
django_bulk_hooks/models.py,sha256=WtSfc4GBOG_oOt8n37cVvid0MtFIGze9JYKSixil2y0,4370
|
|
11
11
|
django_bulk_hooks/priority.py,sha256=HG_2D35nga68lBCZmSXTcplXrjFoRgZFRDOy4ROKonY,376
|
|
12
|
-
django_bulk_hooks/queryset.py,sha256=
|
|
12
|
+
django_bulk_hooks/queryset.py,sha256=whv6QunaxRhos0nBlx600mKeleovjdv3XdeMSULh7n8,60739
|
|
13
13
|
django_bulk_hooks/registry.py,sha256=GRUTGVQEO2sdkC9OaZ9Q3U7mM-3Ix83uTyvrlTtpatw,1317
|
|
14
|
-
django_bulk_hooks-0.1.
|
|
15
|
-
django_bulk_hooks-0.1.
|
|
16
|
-
django_bulk_hooks-0.1.
|
|
17
|
-
django_bulk_hooks-0.1.
|
|
14
|
+
django_bulk_hooks-0.1.252.dist-info/LICENSE,sha256=dguKIcbDGeZD-vXWdLyErPUALYOvtX_fO4Zjhq481uk,1088
|
|
15
|
+
django_bulk_hooks-0.1.252.dist-info/METADATA,sha256=xg9nPjRhZJPXem1plWls94mzENbAAoNiWI43J931rpE,9061
|
|
16
|
+
django_bulk_hooks-0.1.252.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
|
17
|
+
django_bulk_hooks-0.1.252.dist-info/RECORD,,
|
|
File without changes
|