django-bulk-hooks 0.1.274__tar.gz → 0.1.276__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of django-bulk-hooks might be problematic. Click here for more details.
- {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.276}/PKG-INFO +3 -3
- {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.276}/django_bulk_hooks/__init__.py +4 -4
- {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.276}/django_bulk_hooks/handler.py +188 -188
- {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.276}/django_bulk_hooks/queryset.py +118 -42
- {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.276}/pyproject.toml +1 -1
- {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.276}/LICENSE +0 -0
- {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.276}/README.md +0 -0
- {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.276}/django_bulk_hooks/conditions.py +0 -0
- {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.276}/django_bulk_hooks/constants.py +0 -0
- {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.276}/django_bulk_hooks/context.py +0 -0
- {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.276}/django_bulk_hooks/decorators.py +0 -0
- {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.276}/django_bulk_hooks/engine.py +0 -0
- {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.276}/django_bulk_hooks/enums.py +0 -0
- {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.276}/django_bulk_hooks/manager.py +0 -0
- {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.276}/django_bulk_hooks/models.py +0 -0
- {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.276}/django_bulk_hooks/priority.py +0 -0
- {django_bulk_hooks-0.1.274 → django_bulk_hooks-0.1.276}/django_bulk_hooks/registry.py +0 -0
|
@@ -1,7 +1,8 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
2
|
Name: django-bulk-hooks
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.276
|
|
4
4
|
Summary: Hook-style hooks for Django bulk operations like bulk_create and bulk_update.
|
|
5
|
+
Home-page: https://github.com/AugendLimited/django-bulk-hooks
|
|
5
6
|
License: MIT
|
|
6
7
|
Keywords: django,bulk,hooks
|
|
7
8
|
Author: Konrad Beck
|
|
@@ -13,7 +14,6 @@ Classifier: Programming Language :: Python :: 3.11
|
|
|
13
14
|
Classifier: Programming Language :: Python :: 3.12
|
|
14
15
|
Classifier: Programming Language :: Python :: 3.13
|
|
15
16
|
Requires-Dist: django (>=5.2.0,<6.0.0)
|
|
16
|
-
Project-URL: Homepage, https://github.com/AugendLimited/django-bulk-hooks
|
|
17
17
|
Project-URL: Repository, https://github.com/AugendLimited/django-bulk-hooks
|
|
18
18
|
Description-Content-Type: text/markdown
|
|
19
19
|
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from django_bulk_hooks.handler import Hook as HookClass
|
|
2
|
-
from django_bulk_hooks.manager import BulkHookManager
|
|
3
|
-
|
|
4
|
-
__all__ = ["BulkHookManager", "HookClass"]
|
|
1
|
+
from django_bulk_hooks.handler import Hook as HookClass
|
|
2
|
+
from django_bulk_hooks.manager import BulkHookManager
|
|
3
|
+
|
|
4
|
+
__all__ = ["BulkHookManager", "HookClass"]
|
|
@@ -1,188 +1,188 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
import threading
|
|
3
|
-
from collections import deque
|
|
4
|
-
|
|
5
|
-
from django.db import transaction
|
|
6
|
-
|
|
7
|
-
from django_bulk_hooks.registry import get_hooks, register_hook
|
|
8
|
-
|
|
9
|
-
logger = logging.getLogger(__name__)
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
# Thread-local hook context and hook state
|
|
13
|
-
class HookVars(threading.local):
|
|
14
|
-
def __init__(self):
|
|
15
|
-
self.new = None
|
|
16
|
-
self.old = None
|
|
17
|
-
self.event = None
|
|
18
|
-
self.model = None
|
|
19
|
-
self.depth = 0
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
hook_vars = HookVars()
|
|
23
|
-
|
|
24
|
-
# Hook queue per thread
|
|
25
|
-
_hook_context = threading.local()
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
def get_hook_queue():
|
|
29
|
-
if not hasattr(_hook_context, "queue"):
|
|
30
|
-
_hook_context.queue = deque()
|
|
31
|
-
return _hook_context.queue
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
class HookContextState:
|
|
35
|
-
@property
|
|
36
|
-
def is_before(self):
|
|
37
|
-
return hook_vars.event.startswith("before_") if hook_vars.event else False
|
|
38
|
-
|
|
39
|
-
@property
|
|
40
|
-
def is_after(self):
|
|
41
|
-
return hook_vars.event.startswith("after_") if hook_vars.event else False
|
|
42
|
-
|
|
43
|
-
@property
|
|
44
|
-
def is_create(self):
|
|
45
|
-
return "create" in hook_vars.event if hook_vars.event else False
|
|
46
|
-
|
|
47
|
-
@property
|
|
48
|
-
def is_update(self):
|
|
49
|
-
return "update" in hook_vars.event if hook_vars.event else False
|
|
50
|
-
|
|
51
|
-
@property
|
|
52
|
-
def new(self):
|
|
53
|
-
return hook_vars.new
|
|
54
|
-
|
|
55
|
-
@property
|
|
56
|
-
def old(self):
|
|
57
|
-
return hook_vars.old
|
|
58
|
-
|
|
59
|
-
@property
|
|
60
|
-
def model(self):
|
|
61
|
-
return hook_vars.model
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
HookContext = HookContextState()
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
class HookMeta(type):
|
|
68
|
-
_registered = set()
|
|
69
|
-
|
|
70
|
-
def __new__(mcs, name, bases, namespace):
|
|
71
|
-
cls = super().__new__(mcs, name, bases, namespace)
|
|
72
|
-
for method_name, method in namespace.items():
|
|
73
|
-
if hasattr(method, "hooks_hooks"):
|
|
74
|
-
for model_cls, event, condition, priority in method.hooks_hooks:
|
|
75
|
-
key = (model_cls, event, cls, method_name)
|
|
76
|
-
if key not in HookMeta._registered:
|
|
77
|
-
register_hook(
|
|
78
|
-
model=model_cls,
|
|
79
|
-
event=event,
|
|
80
|
-
handler_cls=cls,
|
|
81
|
-
method_name=method_name,
|
|
82
|
-
condition=condition,
|
|
83
|
-
priority=priority,
|
|
84
|
-
)
|
|
85
|
-
HookMeta._registered.add(key)
|
|
86
|
-
return cls
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
class Hook(metaclass=HookMeta):
|
|
90
|
-
@classmethod
|
|
91
|
-
def handle(
|
|
92
|
-
cls,
|
|
93
|
-
event: str,
|
|
94
|
-
model: type,
|
|
95
|
-
*,
|
|
96
|
-
new_records: list = None,
|
|
97
|
-
old_records: list = None,
|
|
98
|
-
**kwargs,
|
|
99
|
-
) -> None:
|
|
100
|
-
queue = get_hook_queue()
|
|
101
|
-
queue.append((cls, event, model, new_records, old_records, kwargs))
|
|
102
|
-
logger.debug(f"Added item to queue: {event}, depth: {hook_vars.depth}")
|
|
103
|
-
|
|
104
|
-
# If we're already processing hooks (depth > 0), don't process the queue
|
|
105
|
-
# The outermost call will process the entire queue
|
|
106
|
-
if hook_vars.depth > 0:
|
|
107
|
-
logger.debug(f"Depth > 0, returning without processing queue")
|
|
108
|
-
return
|
|
109
|
-
|
|
110
|
-
# Process the entire queue
|
|
111
|
-
logger.debug(f"Processing queue with {len(queue)} items")
|
|
112
|
-
while queue:
|
|
113
|
-
item = queue.popleft()
|
|
114
|
-
if len(item) == 6:
|
|
115
|
-
cls_, event_, model_, new_, old_, kw_ = item
|
|
116
|
-
logger.debug(f"Processing queue item: {event_}")
|
|
117
|
-
# Call _process on the Hook class, not the calling class
|
|
118
|
-
Hook._process(event_, model_, new_, old_, **kw_)
|
|
119
|
-
else:
|
|
120
|
-
logger.warning(f"Invalid queue item format: {item}")
|
|
121
|
-
continue
|
|
122
|
-
|
|
123
|
-
@classmethod
|
|
124
|
-
def _process(
|
|
125
|
-
cls,
|
|
126
|
-
event,
|
|
127
|
-
model,
|
|
128
|
-
new_records,
|
|
129
|
-
old_records,
|
|
130
|
-
**kwargs,
|
|
131
|
-
):
|
|
132
|
-
hook_vars.depth += 1
|
|
133
|
-
hook_vars.new = new_records
|
|
134
|
-
hook_vars.old = old_records
|
|
135
|
-
hook_vars.event = event
|
|
136
|
-
hook_vars.model = model
|
|
137
|
-
|
|
138
|
-
hooks = sorted(get_hooks(model, event), key=lambda x: x[3])
|
|
139
|
-
logger.debug(f"Found {len(hooks)} hooks for {event}")
|
|
140
|
-
|
|
141
|
-
def _execute():
|
|
142
|
-
logger.debug(f"Executing {len(hooks)} hooks for {event}")
|
|
143
|
-
new_local = new_records or []
|
|
144
|
-
old_local = old_records or []
|
|
145
|
-
if len(old_local) < len(new_local):
|
|
146
|
-
old_local += [None] * (len(new_local) - len(old_local))
|
|
147
|
-
|
|
148
|
-
for handler_cls, method_name, condition, priority in hooks:
|
|
149
|
-
logger.debug(f"Processing hook {handler_cls.__name__}.{method_name}")
|
|
150
|
-
if condition is not None:
|
|
151
|
-
checks = [
|
|
152
|
-
condition.check(n, o) for n, o in zip(new_local, old_local)
|
|
153
|
-
]
|
|
154
|
-
if not any(checks):
|
|
155
|
-
logger.debug(f"Condition failed for {handler_cls.__name__}.{method_name}")
|
|
156
|
-
continue
|
|
157
|
-
|
|
158
|
-
handler = handler_cls()
|
|
159
|
-
method = getattr(handler, method_name)
|
|
160
|
-
logger.debug(f"Executing {handler_cls.__name__}.{method_name}")
|
|
161
|
-
|
|
162
|
-
try:
|
|
163
|
-
method(
|
|
164
|
-
new_records=new_local,
|
|
165
|
-
old_records=old_local,
|
|
166
|
-
**kwargs,
|
|
167
|
-
)
|
|
168
|
-
logger.debug(f"Successfully executed {handler_cls.__name__}.{method_name}")
|
|
169
|
-
except Exception:
|
|
170
|
-
logger.exception(
|
|
171
|
-
"Error in hook %s.%s", handler_cls.__name__, method_name
|
|
172
|
-
)
|
|
173
|
-
|
|
174
|
-
conn = transaction.get_connection()
|
|
175
|
-
logger.debug(f"Transaction in_atomic_block: {conn.in_atomic_block}, event: {event}")
|
|
176
|
-
try:
|
|
177
|
-
if conn.in_atomic_block and event.startswith("after_"):
|
|
178
|
-
logger.debug(f"Deferring {event} to on_commit")
|
|
179
|
-
transaction.on_commit(_execute)
|
|
180
|
-
else:
|
|
181
|
-
logger.debug(f"Executing {event} immediately")
|
|
182
|
-
_execute()
|
|
183
|
-
finally:
|
|
184
|
-
hook_vars.new = None
|
|
185
|
-
hook_vars.old = None
|
|
186
|
-
hook_vars.event = None
|
|
187
|
-
hook_vars.model = None
|
|
188
|
-
hook_vars.depth -= 1
|
|
1
|
+
import logging
|
|
2
|
+
import threading
|
|
3
|
+
from collections import deque
|
|
4
|
+
|
|
5
|
+
from django.db import transaction
|
|
6
|
+
|
|
7
|
+
from django_bulk_hooks.registry import get_hooks, register_hook
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
# Thread-local hook context and hook state
|
|
13
|
+
class HookVars(threading.local):
|
|
14
|
+
def __init__(self):
|
|
15
|
+
self.new = None
|
|
16
|
+
self.old = None
|
|
17
|
+
self.event = None
|
|
18
|
+
self.model = None
|
|
19
|
+
self.depth = 0
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
hook_vars = HookVars()
|
|
23
|
+
|
|
24
|
+
# Hook queue per thread
|
|
25
|
+
_hook_context = threading.local()
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def get_hook_queue():
|
|
29
|
+
if not hasattr(_hook_context, "queue"):
|
|
30
|
+
_hook_context.queue = deque()
|
|
31
|
+
return _hook_context.queue
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class HookContextState:
|
|
35
|
+
@property
|
|
36
|
+
def is_before(self):
|
|
37
|
+
return hook_vars.event.startswith("before_") if hook_vars.event else False
|
|
38
|
+
|
|
39
|
+
@property
|
|
40
|
+
def is_after(self):
|
|
41
|
+
return hook_vars.event.startswith("after_") if hook_vars.event else False
|
|
42
|
+
|
|
43
|
+
@property
|
|
44
|
+
def is_create(self):
|
|
45
|
+
return "create" in hook_vars.event if hook_vars.event else False
|
|
46
|
+
|
|
47
|
+
@property
|
|
48
|
+
def is_update(self):
|
|
49
|
+
return "update" in hook_vars.event if hook_vars.event else False
|
|
50
|
+
|
|
51
|
+
@property
|
|
52
|
+
def new(self):
|
|
53
|
+
return hook_vars.new
|
|
54
|
+
|
|
55
|
+
@property
|
|
56
|
+
def old(self):
|
|
57
|
+
return hook_vars.old
|
|
58
|
+
|
|
59
|
+
@property
|
|
60
|
+
def model(self):
|
|
61
|
+
return hook_vars.model
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
HookContext = HookContextState()
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class HookMeta(type):
|
|
68
|
+
_registered = set()
|
|
69
|
+
|
|
70
|
+
def __new__(mcs, name, bases, namespace):
|
|
71
|
+
cls = super().__new__(mcs, name, bases, namespace)
|
|
72
|
+
for method_name, method in namespace.items():
|
|
73
|
+
if hasattr(method, "hooks_hooks"):
|
|
74
|
+
for model_cls, event, condition, priority in method.hooks_hooks:
|
|
75
|
+
key = (model_cls, event, cls, method_name)
|
|
76
|
+
if key not in HookMeta._registered:
|
|
77
|
+
register_hook(
|
|
78
|
+
model=model_cls,
|
|
79
|
+
event=event,
|
|
80
|
+
handler_cls=cls,
|
|
81
|
+
method_name=method_name,
|
|
82
|
+
condition=condition,
|
|
83
|
+
priority=priority,
|
|
84
|
+
)
|
|
85
|
+
HookMeta._registered.add(key)
|
|
86
|
+
return cls
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
class Hook(metaclass=HookMeta):
|
|
90
|
+
@classmethod
|
|
91
|
+
def handle(
|
|
92
|
+
cls,
|
|
93
|
+
event: str,
|
|
94
|
+
model: type,
|
|
95
|
+
*,
|
|
96
|
+
new_records: list = None,
|
|
97
|
+
old_records: list = None,
|
|
98
|
+
**kwargs,
|
|
99
|
+
) -> None:
|
|
100
|
+
queue = get_hook_queue()
|
|
101
|
+
queue.append((cls, event, model, new_records, old_records, kwargs))
|
|
102
|
+
logger.debug(f"Added item to queue: {event}, depth: {hook_vars.depth}")
|
|
103
|
+
|
|
104
|
+
# If we're already processing hooks (depth > 0), don't process the queue
|
|
105
|
+
# The outermost call will process the entire queue
|
|
106
|
+
if hook_vars.depth > 0:
|
|
107
|
+
logger.debug(f"Depth > 0, returning without processing queue")
|
|
108
|
+
return
|
|
109
|
+
|
|
110
|
+
# Process the entire queue
|
|
111
|
+
logger.debug(f"Processing queue with {len(queue)} items")
|
|
112
|
+
while queue:
|
|
113
|
+
item = queue.popleft()
|
|
114
|
+
if len(item) == 6:
|
|
115
|
+
cls_, event_, model_, new_, old_, kw_ = item
|
|
116
|
+
logger.debug(f"Processing queue item: {event_}")
|
|
117
|
+
# Call _process on the Hook class, not the calling class
|
|
118
|
+
Hook._process(event_, model_, new_, old_, **kw_)
|
|
119
|
+
else:
|
|
120
|
+
logger.warning(f"Invalid queue item format: {item}")
|
|
121
|
+
continue
|
|
122
|
+
|
|
123
|
+
@classmethod
|
|
124
|
+
def _process(
|
|
125
|
+
cls,
|
|
126
|
+
event,
|
|
127
|
+
model,
|
|
128
|
+
new_records,
|
|
129
|
+
old_records,
|
|
130
|
+
**kwargs,
|
|
131
|
+
):
|
|
132
|
+
hook_vars.depth += 1
|
|
133
|
+
hook_vars.new = new_records
|
|
134
|
+
hook_vars.old = old_records
|
|
135
|
+
hook_vars.event = event
|
|
136
|
+
hook_vars.model = model
|
|
137
|
+
|
|
138
|
+
hooks = sorted(get_hooks(model, event), key=lambda x: x[3])
|
|
139
|
+
logger.debug(f"Found {len(hooks)} hooks for {event}")
|
|
140
|
+
|
|
141
|
+
def _execute():
|
|
142
|
+
logger.debug(f"Executing {len(hooks)} hooks for {event}")
|
|
143
|
+
new_local = new_records or []
|
|
144
|
+
old_local = old_records or []
|
|
145
|
+
if len(old_local) < len(new_local):
|
|
146
|
+
old_local += [None] * (len(new_local) - len(old_local))
|
|
147
|
+
|
|
148
|
+
for handler_cls, method_name, condition, priority in hooks:
|
|
149
|
+
logger.debug(f"Processing hook {handler_cls.__name__}.{method_name}")
|
|
150
|
+
if condition is not None:
|
|
151
|
+
checks = [
|
|
152
|
+
condition.check(n, o) for n, o in zip(new_local, old_local)
|
|
153
|
+
]
|
|
154
|
+
if not any(checks):
|
|
155
|
+
logger.debug(f"Condition failed for {handler_cls.__name__}.{method_name}")
|
|
156
|
+
continue
|
|
157
|
+
|
|
158
|
+
handler = handler_cls()
|
|
159
|
+
method = getattr(handler, method_name)
|
|
160
|
+
logger.debug(f"Executing {handler_cls.__name__}.{method_name}")
|
|
161
|
+
|
|
162
|
+
try:
|
|
163
|
+
method(
|
|
164
|
+
new_records=new_local,
|
|
165
|
+
old_records=old_local,
|
|
166
|
+
**kwargs,
|
|
167
|
+
)
|
|
168
|
+
logger.debug(f"Successfully executed {handler_cls.__name__}.{method_name}")
|
|
169
|
+
except Exception:
|
|
170
|
+
logger.exception(
|
|
171
|
+
"Error in hook %s.%s", handler_cls.__name__, method_name
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
conn = transaction.get_connection()
|
|
175
|
+
logger.debug(f"Transaction in_atomic_block: {conn.in_atomic_block}, event: {event}")
|
|
176
|
+
try:
|
|
177
|
+
if conn.in_atomic_block and event.startswith("after_"):
|
|
178
|
+
logger.debug(f"Deferring {event} to on_commit")
|
|
179
|
+
transaction.on_commit(_execute)
|
|
180
|
+
else:
|
|
181
|
+
logger.debug(f"Executing {event} immediately")
|
|
182
|
+
_execute()
|
|
183
|
+
finally:
|
|
184
|
+
hook_vars.new = None
|
|
185
|
+
hook_vars.old = None
|
|
186
|
+
hook_vars.event = None
|
|
187
|
+
hook_vars.model = None
|
|
188
|
+
hook_vars.depth -= 1
|
|
@@ -486,11 +486,14 @@ class HookQuerySetMixin:
|
|
|
486
486
|
passed through to the correct logic. For MTI, only a subset of options may be supported.
|
|
487
487
|
"""
|
|
488
488
|
model_cls, ctx, originals = self._setup_bulk_operation(
|
|
489
|
-
objs,
|
|
490
|
-
|
|
489
|
+
objs,
|
|
490
|
+
"bulk_create",
|
|
491
|
+
require_pks=False,
|
|
492
|
+
bypass_hooks=bypass_hooks,
|
|
493
|
+
bypass_validation=bypass_validation,
|
|
491
494
|
update_conflicts=update_conflicts,
|
|
492
495
|
unique_fields=unique_fields,
|
|
493
|
-
update_fields=update_fields
|
|
496
|
+
update_fields=update_fields,
|
|
494
497
|
)
|
|
495
498
|
|
|
496
499
|
# When you bulk insert you don't get the primary keys back (if it's an
|
|
@@ -524,9 +527,7 @@ class HookQuerySetMixin:
|
|
|
524
527
|
existing_records = []
|
|
525
528
|
new_records = []
|
|
526
529
|
|
|
527
|
-
#
|
|
528
|
-
ctx.upsert_existing_records = existing_records
|
|
529
|
-
ctx.upsert_new_records = new_records
|
|
530
|
+
# We'll store the records for AFTER hooks after classification is complete
|
|
530
531
|
|
|
531
532
|
# Build a filter to check which records already exist
|
|
532
533
|
unique_values = []
|
|
@@ -535,10 +536,12 @@ class HookQuerySetMixin:
|
|
|
535
536
|
query_fields = {} # Track which database field to use for each unique field
|
|
536
537
|
for field_name in unique_fields:
|
|
537
538
|
# First check for _id field (more reliable for ForeignKeys)
|
|
538
|
-
if hasattr(obj, field_name +
|
|
539
|
+
if hasattr(obj, field_name + "_id"):
|
|
539
540
|
# Handle ForeignKey fields where _id suffix is used
|
|
540
|
-
unique_value[field_name] = getattr(obj, field_name +
|
|
541
|
-
query_fields[field_name] =
|
|
541
|
+
unique_value[field_name] = getattr(obj, field_name + "_id")
|
|
542
|
+
query_fields[field_name] = (
|
|
543
|
+
field_name + "_id"
|
|
544
|
+
) # Use _id field for query
|
|
542
545
|
elif hasattr(obj, field_name):
|
|
543
546
|
unique_value[field_name] = getattr(obj, field_name)
|
|
544
547
|
query_fields[field_name] = field_name
|
|
@@ -558,8 +561,12 @@ class HookQuerySetMixin:
|
|
|
558
561
|
filter_kwargs[db_field_name] = value
|
|
559
562
|
existing_filters |= Q(**filter_kwargs)
|
|
560
563
|
|
|
561
|
-
logger.debug(
|
|
562
|
-
|
|
564
|
+
logger.debug(
|
|
565
|
+
f"DEBUG: Existence check query filters: {existing_filters}"
|
|
566
|
+
)
|
|
567
|
+
logger.debug(
|
|
568
|
+
f"DEBUG: Unique fields for values_list: {unique_fields}"
|
|
569
|
+
)
|
|
563
570
|
|
|
564
571
|
# Get all existing records in one query and create a lookup set
|
|
565
572
|
# We need to use the original unique_fields for values_list to maintain consistency
|
|
@@ -579,45 +586,66 @@ class HookQuerySetMixin:
|
|
|
579
586
|
converted_record = []
|
|
580
587
|
for i, field_name in enumerate(unique_fields):
|
|
581
588
|
db_value = existing_record[i]
|
|
582
|
-
#
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
# Convert to string to match how we extract from objects
|
|
586
|
-
converted_record.append(str(db_value))
|
|
587
|
-
else:
|
|
588
|
-
converted_record.append(db_value)
|
|
589
|
+
# Convert all values to strings for consistent comparison
|
|
590
|
+
# This ensures all database values are strings like object values
|
|
591
|
+
converted_record.append(str(db_value))
|
|
589
592
|
converted_tuple = tuple(converted_record)
|
|
590
593
|
existing_records_lookup.add(converted_tuple)
|
|
591
594
|
|
|
592
|
-
logger.debug(
|
|
593
|
-
|
|
595
|
+
logger.debug(
|
|
596
|
+
f"DEBUG: Found {len(raw_existing)} existing records from DB"
|
|
597
|
+
)
|
|
598
|
+
logger.debug(
|
|
599
|
+
f"DEBUG: Existing records lookup set: {existing_records_lookup}"
|
|
600
|
+
)
|
|
594
601
|
|
|
595
602
|
# Separate records based on whether they already exist
|
|
596
603
|
for obj in objs:
|
|
597
604
|
obj_unique_value = {}
|
|
598
605
|
for field_name in unique_fields:
|
|
599
606
|
# First check for _id field (more reliable for ForeignKeys)
|
|
600
|
-
if hasattr(obj, field_name +
|
|
607
|
+
if hasattr(obj, field_name + "_id"):
|
|
601
608
|
# Handle ForeignKey fields where _id suffix is used
|
|
602
|
-
obj_unique_value[field_name] = getattr(
|
|
609
|
+
obj_unique_value[field_name] = getattr(
|
|
610
|
+
obj, field_name + "_id"
|
|
611
|
+
)
|
|
603
612
|
elif hasattr(obj, field_name):
|
|
604
613
|
obj_unique_value[field_name] = getattr(obj, field_name)
|
|
605
614
|
|
|
606
615
|
# Check if this record already exists using our bulk lookup
|
|
607
616
|
if obj_unique_value:
|
|
608
617
|
# Convert object values to tuple for comparison with existing records
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
618
|
+
# Apply the same type conversion as we did for database values
|
|
619
|
+
obj_unique_tuple = []
|
|
620
|
+
for field_name in unique_fields:
|
|
621
|
+
value = obj_unique_value[field_name]
|
|
622
|
+
# Check if this field uses _id suffix in the query
|
|
623
|
+
query_field_name = query_fields[field_name]
|
|
624
|
+
if query_field_name.endswith("_id"):
|
|
625
|
+
# Convert to string to match how we convert DB values
|
|
626
|
+
obj_unique_tuple.append(str(value))
|
|
627
|
+
else:
|
|
628
|
+
# For non-_id fields, also convert to string for consistency
|
|
629
|
+
# This ensures all values are strings like in the database lookup
|
|
630
|
+
obj_unique_tuple.append(str(value))
|
|
631
|
+
obj_unique_tuple = tuple(obj_unique_tuple)
|
|
632
|
+
|
|
633
|
+
logger.debug(
|
|
634
|
+
f"DEBUG: Object unique tuple: {obj_unique_tuple}"
|
|
635
|
+
)
|
|
636
|
+
logger.debug(
|
|
637
|
+
f"DEBUG: Object unique value: {obj_unique_value}"
|
|
612
638
|
)
|
|
613
|
-
logger.debug(f"DEBUG: Object unique tuple: {obj_unique_tuple}")
|
|
614
|
-
logger.debug(f"DEBUG: Object unique value: {obj_unique_value}")
|
|
615
639
|
if obj_unique_tuple in existing_records_lookup:
|
|
616
640
|
existing_records.append(obj)
|
|
617
|
-
logger.debug(
|
|
641
|
+
logger.debug(
|
|
642
|
+
f"DEBUG: Found existing record for tuple: {obj_unique_tuple}"
|
|
643
|
+
)
|
|
618
644
|
else:
|
|
619
645
|
new_records.append(obj)
|
|
620
|
-
logger.debug(
|
|
646
|
+
logger.debug(
|
|
647
|
+
f"DEBUG: No existing record found for tuple: {obj_unique_tuple}"
|
|
648
|
+
)
|
|
621
649
|
else:
|
|
622
650
|
# If we can't determine uniqueness, treat as new
|
|
623
651
|
new_records.append(obj)
|
|
@@ -625,6 +653,10 @@ class HookQuerySetMixin:
|
|
|
625
653
|
# If no unique fields, treat all as new
|
|
626
654
|
new_records = objs
|
|
627
655
|
|
|
656
|
+
# Store the classified records for AFTER hooks to avoid duplicate queries
|
|
657
|
+
ctx.upsert_existing_records = existing_records
|
|
658
|
+
ctx.upsert_new_records = new_records
|
|
659
|
+
|
|
628
660
|
# Handle auto_now fields intelligently for upsert operations
|
|
629
661
|
# Only set auto_now fields on records that will actually be created
|
|
630
662
|
self._handle_auto_now_fields(new_records, add=True)
|
|
@@ -992,7 +1024,9 @@ class HookQuerySetMixin:
|
|
|
992
1024
|
|
|
993
1025
|
changed_fields = self._detect_changed_fields(objs)
|
|
994
1026
|
is_mti = self._is_multi_table_inheritance()
|
|
995
|
-
hook_context, originals = self._init_hook_context(
|
|
1027
|
+
hook_context, originals = self._init_hook_context(
|
|
1028
|
+
bypass_hooks, objs, "bulk_update"
|
|
1029
|
+
)
|
|
996
1030
|
|
|
997
1031
|
fields_set, auto_now_fields, custom_update_fields = self._prepare_update_fields(
|
|
998
1032
|
changed_fields
|
|
@@ -1175,7 +1209,9 @@ class HookQuerySetMixin:
|
|
|
1175
1209
|
operation_name,
|
|
1176
1210
|
)
|
|
1177
1211
|
|
|
1178
|
-
def _init_hook_context(
|
|
1212
|
+
def _init_hook_context(
|
|
1213
|
+
self, bypass_hooks: bool, objs, operation_name="bulk_update"
|
|
1214
|
+
):
|
|
1179
1215
|
"""
|
|
1180
1216
|
Initialize the hook context for bulk operations.
|
|
1181
1217
|
|
|
@@ -1192,7 +1228,9 @@ class HookQuerySetMixin:
|
|
|
1192
1228
|
model_cls = self.model
|
|
1193
1229
|
|
|
1194
1230
|
if bypass_hooks:
|
|
1195
|
-
logger.debug(
|
|
1231
|
+
logger.debug(
|
|
1232
|
+
"%s: hooks bypassed for %s", operation_name, model_cls.__name__
|
|
1233
|
+
)
|
|
1196
1234
|
ctx = HookContext(model_cls, bypass_hooks=True)
|
|
1197
1235
|
else:
|
|
1198
1236
|
logger.debug("%s: hooks enabled for %s", operation_name, model_cls.__name__)
|
|
@@ -1311,7 +1349,18 @@ class HookQuerySetMixin:
|
|
|
1311
1349
|
|
|
1312
1350
|
return list(set(handled_fields)) # Remove duplicates
|
|
1313
1351
|
|
|
1314
|
-
def _execute_hooks_with_operation(
|
|
1352
|
+
def _execute_hooks_with_operation(
|
|
1353
|
+
self,
|
|
1354
|
+
operation_func,
|
|
1355
|
+
validate_hook,
|
|
1356
|
+
before_hook,
|
|
1357
|
+
after_hook,
|
|
1358
|
+
objs,
|
|
1359
|
+
originals=None,
|
|
1360
|
+
ctx=None,
|
|
1361
|
+
bypass_hooks=False,
|
|
1362
|
+
bypass_validation=False,
|
|
1363
|
+
):
|
|
1315
1364
|
"""
|
|
1316
1365
|
Execute the complete hook lifecycle around a database operation.
|
|
1317
1366
|
|
|
@@ -1371,10 +1420,21 @@ class HookQuerySetMixin:
|
|
|
1371
1420
|
param_str = f", {', '.join(param_parts)}"
|
|
1372
1421
|
|
|
1373
1422
|
# Use both print and logger for consistency with existing patterns
|
|
1374
|
-
print(
|
|
1375
|
-
|
|
1423
|
+
print(
|
|
1424
|
+
f"DEBUG: {operation_name} called for {model_cls.__name__} with {len(objs)} objects{param_str}"
|
|
1425
|
+
)
|
|
1426
|
+
logger.debug(
|
|
1427
|
+
f"{operation_name} called for {model_cls.__name__} with {len(objs)} objects{param_str}"
|
|
1428
|
+
)
|
|
1376
1429
|
|
|
1377
|
-
def _execute_delete_hooks_with_operation(
|
|
1430
|
+
def _execute_delete_hooks_with_operation(
|
|
1431
|
+
self,
|
|
1432
|
+
operation_func,
|
|
1433
|
+
objs,
|
|
1434
|
+
ctx=None,
|
|
1435
|
+
bypass_hooks=False,
|
|
1436
|
+
bypass_validation=False,
|
|
1437
|
+
):
|
|
1378
1438
|
"""
|
|
1379
1439
|
Execute hooks for delete operations with special field caching logic.
|
|
1380
1440
|
|
|
@@ -1426,7 +1486,15 @@ class HookQuerySetMixin:
|
|
|
1426
1486
|
|
|
1427
1487
|
return result
|
|
1428
1488
|
|
|
1429
|
-
def _setup_bulk_operation(
|
|
1489
|
+
def _setup_bulk_operation(
|
|
1490
|
+
self,
|
|
1491
|
+
objs,
|
|
1492
|
+
operation_name,
|
|
1493
|
+
require_pks=False,
|
|
1494
|
+
bypass_hooks=False,
|
|
1495
|
+
bypass_validation=False,
|
|
1496
|
+
**log_kwargs,
|
|
1497
|
+
):
|
|
1430
1498
|
"""
|
|
1431
1499
|
Common setup logic for bulk operations.
|
|
1432
1500
|
|
|
@@ -1445,7 +1513,9 @@ class HookQuerySetMixin:
|
|
|
1445
1513
|
self._log_bulk_operation_start(operation_name, objs, **log_kwargs)
|
|
1446
1514
|
|
|
1447
1515
|
# Validate objects
|
|
1448
|
-
self._validate_objects(
|
|
1516
|
+
self._validate_objects(
|
|
1517
|
+
objs, require_pks=require_pks, operation_name=operation_name
|
|
1518
|
+
)
|
|
1449
1519
|
|
|
1450
1520
|
# Initialize hook context
|
|
1451
1521
|
ctx, originals = self._init_hook_context(bypass_hooks, objs, operation_name)
|
|
@@ -1986,8 +2056,11 @@ class HookQuerySetMixin:
|
|
|
1986
2056
|
return 0
|
|
1987
2057
|
|
|
1988
2058
|
model_cls, ctx, _ = self._setup_bulk_operation(
|
|
1989
|
-
objs,
|
|
1990
|
-
|
|
2059
|
+
objs,
|
|
2060
|
+
"bulk_delete",
|
|
2061
|
+
require_pks=True,
|
|
2062
|
+
bypass_hooks=bypass_hooks,
|
|
2063
|
+
bypass_validation=bypass_validation,
|
|
1991
2064
|
)
|
|
1992
2065
|
|
|
1993
2066
|
# Execute the database operation with hooks
|
|
@@ -2000,8 +2073,11 @@ class HookQuerySetMixin:
|
|
|
2000
2073
|
return 0
|
|
2001
2074
|
|
|
2002
2075
|
result = self._execute_delete_hooks_with_operation(
|
|
2003
|
-
delete_operation,
|
|
2004
|
-
|
|
2076
|
+
delete_operation,
|
|
2077
|
+
objs,
|
|
2078
|
+
ctx=ctx,
|
|
2079
|
+
bypass_hooks=bypass_hooks,
|
|
2080
|
+
bypass_validation=bypass_validation,
|
|
2005
2081
|
)
|
|
2006
2082
|
|
|
2007
2083
|
return result
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "django-bulk-hooks"
|
|
3
|
-
version = "0.1.
|
|
3
|
+
version = "0.1.276"
|
|
4
4
|
description = "Hook-style hooks for Django bulk operations like bulk_create and bulk_update."
|
|
5
5
|
authors = ["Konrad Beck <konrad.beck@merchantcapital.co.za>"]
|
|
6
6
|
readme = "README.md"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|