p1-taskqueue 0.1.12__tar.gz → 0.1.14__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of p1-taskqueue might be problematic. Click here for more details.
- {p1_taskqueue-0.1.12 → p1_taskqueue-0.1.14}/PKG-INFO +2 -1
- {p1_taskqueue-0.1.12 → p1_taskqueue-0.1.14}/pyproject.toml +2 -1
- {p1_taskqueue-0.1.12 → p1_taskqueue-0.1.14}/src/p1_taskqueue.egg-info/PKG-INFO +2 -1
- {p1_taskqueue-0.1.12 → p1_taskqueue-0.1.14}/src/p1_taskqueue.egg-info/SOURCES.txt +1 -0
- {p1_taskqueue-0.1.12 → p1_taskqueue-0.1.14}/src/p1_taskqueue.egg-info/requires.txt +1 -0
- {p1_taskqueue-0.1.12 → p1_taskqueue-0.1.14}/src/taskqueue/celery_app.py +15 -1
- {p1_taskqueue-0.1.12 → p1_taskqueue-0.1.14}/src/taskqueue/cmanager.py +139 -46
- {p1_taskqueue-0.1.12 → p1_taskqueue-0.1.14}/src/taskqueue/libs/helper_test.py +26 -0
- p1_taskqueue-0.1.14/src/taskqueue/slack_notifier.py +51 -0
- {p1_taskqueue-0.1.12 → p1_taskqueue-0.1.14}/tests/test_cmanager.py +386 -2
- {p1_taskqueue-0.1.12 → p1_taskqueue-0.1.14}/tests/test_helper_test_functions.py +80 -0
- {p1_taskqueue-0.1.12 → p1_taskqueue-0.1.14}/tests/test_test_utils.py +101 -2
- {p1_taskqueue-0.1.12 → p1_taskqueue-0.1.14}/README.md +0 -0
- {p1_taskqueue-0.1.12 → p1_taskqueue-0.1.14}/setup.cfg +0 -0
- {p1_taskqueue-0.1.12 → p1_taskqueue-0.1.14}/src/p1_taskqueue.egg-info/dependency_links.txt +0 -0
- {p1_taskqueue-0.1.12 → p1_taskqueue-0.1.14}/src/p1_taskqueue.egg-info/top_level.txt +0 -0
- {p1_taskqueue-0.1.12 → p1_taskqueue-0.1.14}/src/taskqueue/__init__.py +0 -0
- {p1_taskqueue-0.1.12 → p1_taskqueue-0.1.14}/src/taskqueue/libs/__init__.py +0 -0
- {p1_taskqueue-0.1.12 → p1_taskqueue-0.1.14}/tests/test_celery_app.py +0 -0
- {p1_taskqueue-0.1.12 → p1_taskqueue-0.1.14}/tests/test_return_values.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: p1-taskqueue
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.14
|
|
4
4
|
Summary: A Task Queue Wrapper for Dekoruma Backend
|
|
5
5
|
Author-email: Chalvin <engineering@dekoruma.com>
|
|
6
6
|
Project-URL: Homepage, https://github.com/Dekoruma/p1-taskqueue
|
|
@@ -19,6 +19,7 @@ Requires-Dist: kombu>=5.5.4
|
|
|
19
19
|
Requires-Dist: django>=4.0.0
|
|
20
20
|
Requires-Dist: django-celery-results>=2.6.0
|
|
21
21
|
Requires-Dist: django-celery-beat>=2.8.1
|
|
22
|
+
Requires-Dist: requests>=2.32.3
|
|
22
23
|
Provides-Extra: dev
|
|
23
24
|
Requires-Dist: pytest>=7.0.0; extra == "dev"
|
|
24
25
|
Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
|
|
@@ -5,7 +5,7 @@ build-backend = "setuptools.build_meta"
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "p1-taskqueue"
|
|
7
7
|
# DO NOT CHANGE THIS VERSION - it gets automatically replaced by CI/CD with the git tag version
|
|
8
|
-
version = "0.1.
|
|
8
|
+
version = "0.1.14"
|
|
9
9
|
description = "A Task Queue Wrapper for Dekoruma Backend"
|
|
10
10
|
authors = [
|
|
11
11
|
{name = "Chalvin", email = "engineering@dekoruma.com"}
|
|
@@ -26,6 +26,7 @@ dependencies = [
|
|
|
26
26
|
"django>=4.0.0",
|
|
27
27
|
"django-celery-results>=2.6.0",
|
|
28
28
|
"django-celery-beat>=2.8.1",
|
|
29
|
+
"requests>=2.32.3",
|
|
29
30
|
]
|
|
30
31
|
|
|
31
32
|
[project.optional-dependencies]
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: p1-taskqueue
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.14
|
|
4
4
|
Summary: A Task Queue Wrapper for Dekoruma Backend
|
|
5
5
|
Author-email: Chalvin <engineering@dekoruma.com>
|
|
6
6
|
Project-URL: Homepage, https://github.com/Dekoruma/p1-taskqueue
|
|
@@ -19,6 +19,7 @@ Requires-Dist: kombu>=5.5.4
|
|
|
19
19
|
Requires-Dist: django>=4.0.0
|
|
20
20
|
Requires-Dist: django-celery-results>=2.6.0
|
|
21
21
|
Requires-Dist: django-celery-beat>=2.8.1
|
|
22
|
+
Requires-Dist: requests>=2.32.3
|
|
22
23
|
Provides-Extra: dev
|
|
23
24
|
Requires-Dist: pytest>=7.0.0; extra == "dev"
|
|
24
25
|
Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
|
|
@@ -2,10 +2,14 @@
|
|
|
2
2
|
Celery application setup for TaskQueue.
|
|
3
3
|
Reads configuration from Django settings and auto-configures queues with DLQ.
|
|
4
4
|
"""
|
|
5
|
+
import logging
|
|
6
|
+
|
|
5
7
|
from celery import Celery
|
|
6
8
|
from kombu import Exchange
|
|
7
9
|
from kombu import Queue
|
|
8
10
|
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
9
13
|
|
|
10
14
|
def get_django_settings():
|
|
11
15
|
"""Get Django settings, fail fast if not properly configured."""
|
|
@@ -63,7 +67,6 @@ def setup_queues(app, settings, celery_config):
|
|
|
63
67
|
queue_names = ['default', 'high', 'low']
|
|
64
68
|
dlq_name_prefix = getattr(settings, 'TASKQUEUE_DLQ_NAME_PREFIX', 'dlq')
|
|
65
69
|
|
|
66
|
-
# Create exchanges
|
|
67
70
|
main_exchange = Exchange(app_name, type='direct')
|
|
68
71
|
dlx_exchange = Exchange(f'{app_name}.dlx', type='direct')
|
|
69
72
|
|
|
@@ -95,5 +98,16 @@ def setup_queues(app, settings, celery_config):
|
|
|
95
98
|
'task_queues': tuple(queues),
|
|
96
99
|
})
|
|
97
100
|
|
|
101
|
+
try:
|
|
102
|
+
with app.connection_or_acquire() as conn:
|
|
103
|
+
main_exchange.declare(channel=conn.default_channel)
|
|
104
|
+
dlx_exchange.declare(channel=conn.default_channel)
|
|
105
|
+
|
|
106
|
+
for queue in queues:
|
|
107
|
+
queue.declare(channel=conn.default_channel)
|
|
108
|
+
except Exception as e:
|
|
109
|
+
logger.warning(
|
|
110
|
+
f"[TaskQueue] Failed to declare queues: {str(e.__class__.__name__)} {e}")
|
|
111
|
+
|
|
98
112
|
|
|
99
113
|
celery_app = create_celery_app()
|
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
import importlib
|
|
2
2
|
import inspect
|
|
3
3
|
import logging
|
|
4
|
-
from datetime import datetime
|
|
5
|
-
from datetime import timedelta
|
|
6
4
|
from typing import Any
|
|
7
5
|
from typing import Dict
|
|
8
6
|
from typing import Tuple
|
|
9
7
|
|
|
10
8
|
from celery import shared_task
|
|
9
|
+
from celery.exceptions import Reject
|
|
10
|
+
from taskqueue.slack_notifier import SlackbotManager
|
|
11
11
|
|
|
12
12
|
# Setup logger
|
|
13
13
|
logger = logging.getLogger(__name__)
|
|
@@ -50,7 +50,8 @@ def _extract_init_args_from_instance(instance: Any) -> Tuple[list, dict]:
|
|
|
50
50
|
def _split_function_and_queue_kwargs(kwargs: Dict[str, Any]) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
|
51
51
|
# To prevent confusion whether a kwargs is for function or queue kwargs(i.e celery options and on_commit),
|
|
52
52
|
# ignore confusing kwargs while give warning
|
|
53
|
-
supported_queue_keys = {"channel", "retry",
|
|
53
|
+
supported_queue_keys = {"channel", "retry",
|
|
54
|
+
"on_commit", "job_timeout", "use_legacy_executor"}
|
|
54
55
|
ignored_non_function_keys = {
|
|
55
56
|
"queue", "countdown", "eta", "expires", "priority", "task_id", "routing_key",
|
|
56
57
|
"serializer", "compression", "headers", "link", "link_error", "retry_policy",
|
|
@@ -74,6 +75,17 @@ def _split_function_and_queue_kwargs(kwargs: Dict[str, Any]) -> Tuple[Dict[str,
|
|
|
74
75
|
return func_kwargs, queue_kwargs
|
|
75
76
|
|
|
76
77
|
|
|
78
|
+
def _build_callable_task_call(func: Any, func_args: tuple, func_kwargs: dict) -> Tuple[str, list, dict]:
|
|
79
|
+
task_name = "taskqueue.cmanager.callable_executor"
|
|
80
|
+
task_args = []
|
|
81
|
+
task_kwargs = {
|
|
82
|
+
"callable_obj": func,
|
|
83
|
+
"args": list(func_args),
|
|
84
|
+
"kwargs": dict(func_kwargs),
|
|
85
|
+
}
|
|
86
|
+
return task_name, task_args, task_kwargs
|
|
87
|
+
|
|
88
|
+
|
|
77
89
|
def _build_dynamic_task_call(func: Any, *args: Any, **func_kwargs: Any) -> Tuple[str, list, dict]:
|
|
78
90
|
if _is_class_method(func):
|
|
79
91
|
instance = getattr(func, "__self__")
|
|
@@ -155,47 +167,21 @@ class CManager:
|
|
|
155
167
|
'enqueue_op_type', K_ENQUEUE_OP_TYPE_ENQUEUE)
|
|
156
168
|
|
|
157
169
|
try:
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
raise ValueError(
|
|
161
|
-
"enqueue requires a callable as the first positional argument")
|
|
162
|
-
func = args[0]
|
|
163
|
-
func_args = args[1:]
|
|
164
|
-
|
|
165
|
-
elif enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE_AT:
|
|
166
|
-
if len(args) < 2:
|
|
167
|
-
raise ValueError(
|
|
168
|
-
"enqueue_at requires (eta_datetime, func, *func_args)")
|
|
169
|
-
eta: datetime = args[0]
|
|
170
|
-
func = args[1]
|
|
171
|
-
func_args = args[2:]
|
|
172
|
-
|
|
173
|
-
elif enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE_IN:
|
|
174
|
-
if len(args) < 2:
|
|
175
|
-
raise ValueError(
|
|
176
|
-
"enqueue_in requires (countdown_delta, func, *func_args)")
|
|
177
|
-
delta: timedelta = args[0]
|
|
178
|
-
func = args[1]
|
|
179
|
-
func_args = args[2:]
|
|
180
|
-
else:
|
|
181
|
-
raise ValueError(
|
|
182
|
-
f"Unknown enqueue operation type: {enqueue_op_type}")
|
|
183
|
-
|
|
184
|
-
func_kwargs, queue_kwargs = _split_function_and_queue_kwargs(
|
|
185
|
-
kwargs)
|
|
170
|
+
func, func_args, func_kwargs, queue_options = self._parse_enqueue_args(
|
|
171
|
+
enqueue_op_type, args, kwargs)
|
|
186
172
|
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
queue_kwargs["eta"] = eta
|
|
190
|
-
elif enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE_IN:
|
|
191
|
-
queue_kwargs = dict(queue_kwargs)
|
|
192
|
-
queue_kwargs["countdown"] = int(delta.total_seconds())
|
|
173
|
+
use_legacy_executor = queue_options.pop(
|
|
174
|
+
'use_legacy_executor', True)
|
|
193
175
|
|
|
194
|
-
|
|
195
|
-
|
|
176
|
+
if use_legacy_executor:
|
|
177
|
+
task_name, task_args, task_kwargs = _build_dynamic_task_call(
|
|
178
|
+
func, *func_args, **func_kwargs)
|
|
179
|
+
else:
|
|
180
|
+
task_name, task_args, task_kwargs = _build_callable_task_call(
|
|
181
|
+
func, func_args, func_kwargs)
|
|
196
182
|
|
|
197
183
|
task_id = self._send_task(task_name, task_args,
|
|
198
|
-
task_kwargs,
|
|
184
|
+
task_kwargs, queue_options)
|
|
199
185
|
|
|
200
186
|
logger.info('[_enqueue_op_base %s] Submit Celery Task SUCCESS, task_name: %s args: %s, kwargs: %s, task_id: %s' % (
|
|
201
187
|
enqueue_op_type, task_name, task_args, task_kwargs, task_id))
|
|
@@ -205,6 +191,46 @@ class CManager:
|
|
|
205
191
|
enqueue_op_type, str(e), args, kwargs))
|
|
206
192
|
raise e
|
|
207
193
|
|
|
194
|
+
def _parse_enqueue_args(self, enqueue_op_type: str, args: tuple, kwargs: dict) -> Tuple[Any, tuple, dict, dict]:
|
|
195
|
+
"""Parse enqueue arguments and return func, func_args, func_kwargs, and queue_options."""
|
|
196
|
+
if enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE:
|
|
197
|
+
if not args:
|
|
198
|
+
raise ValueError(
|
|
199
|
+
"enqueue requires a callable as the first positional argument")
|
|
200
|
+
func = args[0]
|
|
201
|
+
func_args = args[1:]
|
|
202
|
+
eta, delta = None, None
|
|
203
|
+
|
|
204
|
+
elif enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE_AT:
|
|
205
|
+
if len(args) < 2:
|
|
206
|
+
raise ValueError(
|
|
207
|
+
"enqueue_at requires (eta_datetime, func, *func_args)")
|
|
208
|
+
eta = args[0]
|
|
209
|
+
func = args[1]
|
|
210
|
+
func_args = args[2:]
|
|
211
|
+
delta = None
|
|
212
|
+
|
|
213
|
+
elif enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE_IN:
|
|
214
|
+
if len(args) < 2:
|
|
215
|
+
raise ValueError(
|
|
216
|
+
"enqueue_in requires (countdown_delta, func, *func_args)")
|
|
217
|
+
delta = args[0]
|
|
218
|
+
func = args[1]
|
|
219
|
+
func_args = args[2:]
|
|
220
|
+
eta = None
|
|
221
|
+
else:
|
|
222
|
+
raise ValueError(
|
|
223
|
+
f"Unknown enqueue operation type: {enqueue_op_type}")
|
|
224
|
+
|
|
225
|
+
func_kwargs, queue_options = _split_function_and_queue_kwargs(kwargs)
|
|
226
|
+
|
|
227
|
+
if eta is not None:
|
|
228
|
+
queue_options["eta"] = eta
|
|
229
|
+
elif delta is not None:
|
|
230
|
+
queue_options["countdown"] = int(delta.total_seconds())
|
|
231
|
+
|
|
232
|
+
return func, func_args, func_kwargs, queue_options
|
|
233
|
+
|
|
208
234
|
def _send_task(self, task_name: str, task_args: list, task_kwargs: dict, queue_kwargs: Dict[str, Any]) -> str:
|
|
209
235
|
celery_app = self._get_celery_app()
|
|
210
236
|
|
|
@@ -237,7 +263,54 @@ class CManager:
|
|
|
237
263
|
cm = CManager()
|
|
238
264
|
|
|
239
265
|
|
|
240
|
-
@shared_task(bind=True, max_retries=K_MAX_RETRY_COUNT)
|
|
266
|
+
@shared_task(bind=True, max_retries=K_MAX_RETRY_COUNT, acks_late=True, reject_on_worker_lost=True)
|
|
267
|
+
def callable_executor(self, callable_obj=None, args=None, kwargs=None, retry=None):
|
|
268
|
+
job_id = self.request.id
|
|
269
|
+
try:
|
|
270
|
+
args = args or []
|
|
271
|
+
kwargs = kwargs or {}
|
|
272
|
+
callable_name = getattr(callable_obj, '__name__', str(callable_obj))
|
|
273
|
+
|
|
274
|
+
logger.info(
|
|
275
|
+
f"[TaskQueue] Executing callable: {callable_name} with args: {args} and kwargs: {kwargs}, job_id: {job_id}")
|
|
276
|
+
|
|
277
|
+
callable_obj(*args, **kwargs)
|
|
278
|
+
|
|
279
|
+
logger.info(
|
|
280
|
+
f"[TaskQueue] Callable execution completed successfully, callable: {callable_name}, args: {args}, kwargs: {kwargs}, job_id: {job_id}")
|
|
281
|
+
return None
|
|
282
|
+
except Exception as e:
|
|
283
|
+
logger.exception(
|
|
284
|
+
f"[TaskQueue] Error executing callable: {callable_name}, args: {args}, kwargs: {kwargs}, error_class: {e.__class__.__name__}, error: {e}, job_id: {job_id}")
|
|
285
|
+
|
|
286
|
+
current_retries = getattr(self.request, 'retries', 0) or 0
|
|
287
|
+
max_retries = self.max_retries or K_MAX_RETRY_COUNT
|
|
288
|
+
if isinstance(retry, dict) and 'max_retries' in retry:
|
|
289
|
+
max_retries = retry['max_retries']
|
|
290
|
+
|
|
291
|
+
if current_retries >= max_retries:
|
|
292
|
+
logger.error(
|
|
293
|
+
f"[TaskQueue] Max retries ({max_retries}) reached for callable: {callable_name}, job_id: {job_id}")
|
|
294
|
+
self.update_state(state='FAILURE', meta={
|
|
295
|
+
'exc_type': type(e).__name__, 'exc_message': str(e)})
|
|
296
|
+
|
|
297
|
+
SlackbotManager.send_message(
|
|
298
|
+
f"Job Failed Too Many Times - Moving back to dlq.\n"
|
|
299
|
+
f"function name: {callable_name}\n"
|
|
300
|
+
f"args: {args}\n"
|
|
301
|
+
f"kwargs: {kwargs}"
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
raise Reject(reason=str(e), requeue=False)
|
|
305
|
+
|
|
306
|
+
countdown = K_DEFAULT_RETRY_COUNTDOWN
|
|
307
|
+
if isinstance(retry, dict) and 'countdown' in retry:
|
|
308
|
+
countdown = retry['countdown']
|
|
309
|
+
|
|
310
|
+
raise self.retry(exc=e, countdown=countdown, max_retries=max_retries)
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
@shared_task(bind=True, max_retries=K_MAX_RETRY_COUNT, acks_late=True, reject_on_worker_lost=True)
|
|
241
314
|
def dynamic_function_executor(self, module_path=None, function_name=None, args=None, kwargs=None, retry=None):
|
|
242
315
|
job_id = self.request.id
|
|
243
316
|
try:
|
|
@@ -261,8 +334,18 @@ def dynamic_function_executor(self, module_path=None, function_name=None, args=N
|
|
|
261
334
|
|
|
262
335
|
if current_retries >= max_retries:
|
|
263
336
|
logger.error(
|
|
264
|
-
f"[TaskQueue] Max retries ({max_retries}) reached for function: {function_name},
|
|
265
|
-
|
|
337
|
+
f"[TaskQueue] Max retries ({max_retries}) reached for function: {function_name}, job_id: {job_id}")
|
|
338
|
+
self.update_state(state='FAILURE', meta={
|
|
339
|
+
'exc_type': type(e).__name__, 'exc_message': str(e)})
|
|
340
|
+
|
|
341
|
+
SlackbotManager.send_message(
|
|
342
|
+
f"Job Failed Too Many Times - Moving back to dlq.\n"
|
|
343
|
+
f"function name: {function_name}\n"
|
|
344
|
+
f"args: {args}\n"
|
|
345
|
+
f"kwargs: {kwargs}"
|
|
346
|
+
)
|
|
347
|
+
|
|
348
|
+
raise Reject(reason=str(e), requeue=False)
|
|
266
349
|
|
|
267
350
|
countdown = K_DEFAULT_RETRY_COUNTDOWN
|
|
268
351
|
if isinstance(retry, dict) and 'countdown' in retry:
|
|
@@ -271,7 +354,7 @@ def dynamic_function_executor(self, module_path=None, function_name=None, args=N
|
|
|
271
354
|
raise self.retry(exc=e, countdown=countdown, max_retries=max_retries)
|
|
272
355
|
|
|
273
356
|
|
|
274
|
-
@shared_task(bind=True, max_retries=K_MAX_RETRY_COUNT)
|
|
357
|
+
@shared_task(bind=True, max_retries=K_MAX_RETRY_COUNT, acks_late=True, reject_on_worker_lost=True)
|
|
275
358
|
def dynamic_class_method_executor(self, module_path=None, class_name=None, method_name=None, args=None, kwargs=None, init_args=None, init_kwargs=None, retry=None):
|
|
276
359
|
job_id = self.request.id
|
|
277
360
|
try:
|
|
@@ -299,8 +382,18 @@ def dynamic_class_method_executor(self, module_path=None, class_name=None, metho
|
|
|
299
382
|
|
|
300
383
|
if current_retries >= max_retries:
|
|
301
384
|
logger.error(
|
|
302
|
-
f"[TaskQueue] Max retries ({max_retries}) reached for method: {method_name},
|
|
303
|
-
|
|
385
|
+
f"[TaskQueue] Max retries ({max_retries}) reached for method: {method_name}, job_id: {job_id}")
|
|
386
|
+
self.update_state(state='FAILURE', meta={
|
|
387
|
+
'exc_type': type(e).__name__, 'exc_message': str(e)})
|
|
388
|
+
|
|
389
|
+
SlackbotManager.send_message(
|
|
390
|
+
f"Job Failed Too Many Times - Moving back to dlq.\n"
|
|
391
|
+
f"function name: {class_name}.{method_name}\n"
|
|
392
|
+
f"args: {args}\n"
|
|
393
|
+
f"kwargs: {kwargs}"
|
|
394
|
+
)
|
|
395
|
+
|
|
396
|
+
raise Reject(reason=str(e), requeue=False)
|
|
304
397
|
|
|
305
398
|
countdown = K_DEFAULT_RETRY_COUNTDOWN
|
|
306
399
|
if isinstance(retry, dict) and 'countdown' in retry:
|
|
@@ -70,6 +70,19 @@ def celery_worker_burst(include_func_names: List[str], channel: str = "default")
|
|
|
70
70
|
method_name = task_kwargs.get('method_name', '')
|
|
71
71
|
if module_path and class_name and method_name:
|
|
72
72
|
full_func_name = f"{module_path}.{class_name}.{method_name}"
|
|
73
|
+
elif task_name.endswith("callable_executor"):
|
|
74
|
+
callable_obj = task_kwargs.get('callable_obj')
|
|
75
|
+
if callable_obj:
|
|
76
|
+
module_path = getattr(
|
|
77
|
+
callable_obj, '__module__', '')
|
|
78
|
+
func_name = getattr(
|
|
79
|
+
callable_obj, '__name__', '')
|
|
80
|
+
if hasattr(callable_obj, '__self__'):
|
|
81
|
+
class_name = callable_obj.__self__.__class__.__name__
|
|
82
|
+
if module_path and class_name and func_name:
|
|
83
|
+
full_func_name = f"{module_path}.{class_name}.{func_name}"
|
|
84
|
+
elif module_path and func_name:
|
|
85
|
+
full_func_name = f"{module_path}.{func_name}"
|
|
73
86
|
|
|
74
87
|
should_execute = full_func_name in included_set if full_func_name else False
|
|
75
88
|
|
|
@@ -136,6 +149,19 @@ def get_queued_tasks(channel: str = "default"):
|
|
|
136
149
|
method_name = task_kwargs.get('method_name', '')
|
|
137
150
|
if module_path and class_name and method_name:
|
|
138
151
|
full_func_name = f"{module_path}.{class_name}.{method_name}"
|
|
152
|
+
elif task_name and task_name.endswith("callable_executor"):
|
|
153
|
+
callable_obj = task_kwargs.get('callable_obj')
|
|
154
|
+
if callable_obj:
|
|
155
|
+
module_path = getattr(
|
|
156
|
+
callable_obj, '__module__', '')
|
|
157
|
+
func_name = getattr(
|
|
158
|
+
callable_obj, '__name__', '')
|
|
159
|
+
if hasattr(callable_obj, '__self__'):
|
|
160
|
+
class_name = callable_obj.__self__.__class__.__name__
|
|
161
|
+
if module_path and class_name and func_name:
|
|
162
|
+
full_func_name = f"{module_path}.{class_name}.{func_name}"
|
|
163
|
+
elif module_path and func_name:
|
|
164
|
+
full_func_name = f"{module_path}.{func_name}"
|
|
139
165
|
|
|
140
166
|
queued_tasks.append({
|
|
141
167
|
'task_name': task_name,
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Slack notification for TaskQueue.
|
|
3
|
+
"""
|
|
4
|
+
import json
|
|
5
|
+
import logging
|
|
6
|
+
|
|
7
|
+
import requests
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class SlackbotManager:
|
|
13
|
+
|
|
14
|
+
@classmethod
|
|
15
|
+
def send_message(cls, message: str) -> None:
|
|
16
|
+
try:
|
|
17
|
+
from django.conf import settings
|
|
18
|
+
except ImportError:
|
|
19
|
+
return
|
|
20
|
+
|
|
21
|
+
if not getattr(settings, 'TASKQUEUE_SLACK_ENABLED', False):
|
|
22
|
+
return
|
|
23
|
+
|
|
24
|
+
hook_url = getattr(settings, 'TASKQUEUE_SLACK_HOOK_URL', None)
|
|
25
|
+
if not hook_url:
|
|
26
|
+
return
|
|
27
|
+
|
|
28
|
+
channel = getattr(
|
|
29
|
+
settings, 'TASKQUEUE_SLACK_CHANNEL_NAME', '#tech-automation')
|
|
30
|
+
username = getattr(
|
|
31
|
+
settings, 'TASKQUEUE_SLACK_USERNAME', 'TaskQueueBot')
|
|
32
|
+
icon_emoji = getattr(
|
|
33
|
+
settings, 'TASKQUEUE_SLACK_ICON_EMOJI', ':robot_face:')
|
|
34
|
+
|
|
35
|
+
is_staging = getattr(settings, 'IS_RUN_IN_STAGING_ENV', False)
|
|
36
|
+
if is_staging:
|
|
37
|
+
message = '[STAGING] ' + message
|
|
38
|
+
|
|
39
|
+
try:
|
|
40
|
+
requests.post(
|
|
41
|
+
hook_url,
|
|
42
|
+
data=json.dumps({
|
|
43
|
+
'channel': channel,
|
|
44
|
+
'username': username,
|
|
45
|
+
'text': message,
|
|
46
|
+
'icon_emoji': icon_emoji,
|
|
47
|
+
}),
|
|
48
|
+
headers={"Content-Type": "application/json"}
|
|
49
|
+
)
|
|
50
|
+
except Exception as e:
|
|
51
|
+
logger.exception('[TaskQueue Slack] Error: %s', str(e))
|
|
@@ -1,9 +1,11 @@
|
|
|
1
1
|
from datetime import datetime
|
|
2
2
|
from datetime import timedelta
|
|
3
|
+
from typing import Any
|
|
3
4
|
from unittest.mock import Mock
|
|
4
5
|
from unittest.mock import patch
|
|
5
6
|
|
|
6
7
|
import pytest
|
|
8
|
+
from taskqueue.cmanager import _build_callable_task_call
|
|
7
9
|
from taskqueue.cmanager import _build_dynamic_task_call
|
|
8
10
|
from taskqueue.cmanager import _is_class_method
|
|
9
11
|
from taskqueue.cmanager import _split_function_and_queue_kwargs
|
|
@@ -159,6 +161,149 @@ class TestSplitFunctionAndQueueKwargs:
|
|
|
159
161
|
assert queue_kwargs == {}
|
|
160
162
|
|
|
161
163
|
|
|
164
|
+
class TestParseEnqueueArgs:
|
|
165
|
+
|
|
166
|
+
def test__parse_enqueue_args_given_basic_enqueue_expect_correct_parsing(self):
|
|
167
|
+
cm = CManager()
|
|
168
|
+
args = (test_function, 1, 2, 3)
|
|
169
|
+
kwargs = {
|
|
170
|
+
'user_id': 123,
|
|
171
|
+
'data': 'test',
|
|
172
|
+
'channel': 'high',
|
|
173
|
+
'retry': {'max_retries': 5}
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
func, func_args, func_kwargs, queue_options = cm._parse_enqueue_args(
|
|
177
|
+
'enqueue', args, kwargs
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
assert func == test_function
|
|
181
|
+
assert func_args == (1, 2, 3)
|
|
182
|
+
assert func_kwargs == {'user_id': 123, 'data': 'test'}
|
|
183
|
+
assert queue_options == {
|
|
184
|
+
'channel': 'high',
|
|
185
|
+
'retry': {'max_retries': 5}
|
|
186
|
+
}
|
|
187
|
+
assert 'eta' not in queue_options
|
|
188
|
+
assert 'countdown' not in queue_options
|
|
189
|
+
|
|
190
|
+
def test__parse_enqueue_args_given_enqueue_at_expect_eta_in_queue_options(self):
|
|
191
|
+
cm = CManager()
|
|
192
|
+
eta = datetime(2025, 12, 31, 23, 59, 59)
|
|
193
|
+
args = (eta, test_function, 'arg1', 'arg2')
|
|
194
|
+
kwargs = {
|
|
195
|
+
'user_id': 456,
|
|
196
|
+
'channel': 'default'
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
func, func_args, func_kwargs, queue_options = cm._parse_enqueue_args(
|
|
200
|
+
'enqueue_at', args, kwargs
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
assert func == test_function
|
|
204
|
+
assert func_args == ('arg1', 'arg2')
|
|
205
|
+
assert func_kwargs == {'user_id': 456}
|
|
206
|
+
assert queue_options['eta'] == eta
|
|
207
|
+
assert queue_options['channel'] == 'default'
|
|
208
|
+
assert 'countdown' not in queue_options
|
|
209
|
+
|
|
210
|
+
def test__parse_enqueue_args_given_enqueue_in_expect_countdown_in_queue_options(self):
|
|
211
|
+
cm = CManager()
|
|
212
|
+
delta = timedelta(seconds=300)
|
|
213
|
+
args = (delta, test_function, 'arg1')
|
|
214
|
+
kwargs = {
|
|
215
|
+
'data': {'key': 'value'},
|
|
216
|
+
'retry': {'max_retries': 3}
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
func, func_args, func_kwargs, queue_options = cm._parse_enqueue_args(
|
|
220
|
+
'enqueue_in', args, kwargs
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
assert func == test_function
|
|
224
|
+
assert func_args == ('arg1',)
|
|
225
|
+
assert func_kwargs == {'data': {'key': 'value'}}
|
|
226
|
+
assert queue_options['countdown'] == 300
|
|
227
|
+
assert queue_options['retry'] == {'max_retries': 3}
|
|
228
|
+
assert 'eta' not in queue_options
|
|
229
|
+
|
|
230
|
+
def test__parse_enqueue_args_given_no_func_args_expect_empty_tuple(self):
|
|
231
|
+
cm = CManager()
|
|
232
|
+
args = (test_function,)
|
|
233
|
+
kwargs = {'user_id': 789}
|
|
234
|
+
|
|
235
|
+
func, func_args, func_kwargs, queue_options = cm._parse_enqueue_args(
|
|
236
|
+
'enqueue', args, kwargs
|
|
237
|
+
)
|
|
238
|
+
|
|
239
|
+
assert func == test_function
|
|
240
|
+
assert func_args == ()
|
|
241
|
+
assert func_kwargs == {'user_id': 789}
|
|
242
|
+
|
|
243
|
+
def test__parse_enqueue_args_given_no_kwargs_expect_empty_dicts(self):
|
|
244
|
+
cm = CManager()
|
|
245
|
+
args = (test_function, 1, 2)
|
|
246
|
+
kwargs = {}
|
|
247
|
+
|
|
248
|
+
func, func_args, func_kwargs, queue_options = cm._parse_enqueue_args(
|
|
249
|
+
'enqueue', args, kwargs
|
|
250
|
+
)
|
|
251
|
+
|
|
252
|
+
assert func == test_function
|
|
253
|
+
assert func_args == (1, 2)
|
|
254
|
+
assert func_kwargs == {}
|
|
255
|
+
assert queue_options == {}
|
|
256
|
+
|
|
257
|
+
def test__parse_enqueue_args_given_no_args_expect_value_error(self):
|
|
258
|
+
cm = CManager()
|
|
259
|
+
args = ()
|
|
260
|
+
kwargs = {}
|
|
261
|
+
|
|
262
|
+
with pytest.raises(ValueError, match="enqueue requires a callable as the first positional argument"):
|
|
263
|
+
cm._parse_enqueue_args('enqueue', args, kwargs)
|
|
264
|
+
|
|
265
|
+
def test__parse_enqueue_args_given_enqueue_at_insufficient_args_expect_value_error(self):
|
|
266
|
+
cm = CManager()
|
|
267
|
+
args = (datetime.now(),)
|
|
268
|
+
kwargs = {}
|
|
269
|
+
|
|
270
|
+
with pytest.raises(ValueError, match="enqueue_at requires \\(eta_datetime, func, \\*func_args\\)"):
|
|
271
|
+
cm._parse_enqueue_args('enqueue_at', args, kwargs)
|
|
272
|
+
|
|
273
|
+
def test__parse_enqueue_args_given_enqueue_in_insufficient_args_expect_value_error(self):
|
|
274
|
+
cm = CManager()
|
|
275
|
+
args = (timedelta(seconds=60),)
|
|
276
|
+
kwargs = {}
|
|
277
|
+
|
|
278
|
+
with pytest.raises(ValueError, match="enqueue_in requires \\(countdown_delta, func, \\*func_args\\)"):
|
|
279
|
+
cm._parse_enqueue_args('enqueue_in', args, kwargs)
|
|
280
|
+
|
|
281
|
+
def test__parse_enqueue_args_given_unknown_op_type_expect_value_error(self):
|
|
282
|
+
cm = CManager()
|
|
283
|
+
args = (test_function,)
|
|
284
|
+
kwargs = {}
|
|
285
|
+
|
|
286
|
+
with pytest.raises(ValueError, match="Unknown enqueue operation type: invalid_op"):
|
|
287
|
+
cm._parse_enqueue_args('invalid_op', args, kwargs)
|
|
288
|
+
|
|
289
|
+
def test__parse_enqueue_args_given_use_legacy_executor_expect_in_queue_options(self):
|
|
290
|
+
cm = CManager()
|
|
291
|
+
args = (test_function, 1, 2)
|
|
292
|
+
kwargs = {
|
|
293
|
+
'use_legacy_executor': False,
|
|
294
|
+
'user_id': 123
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
func, func_args, func_kwargs, queue_options = cm._parse_enqueue_args(
|
|
298
|
+
'enqueue', args, kwargs
|
|
299
|
+
)
|
|
300
|
+
|
|
301
|
+
assert func == test_function
|
|
302
|
+
assert func_args == (1, 2)
|
|
303
|
+
assert func_kwargs == {'user_id': 123}
|
|
304
|
+
assert queue_options['use_legacy_executor'] is False
|
|
305
|
+
|
|
306
|
+
|
|
162
307
|
class TestBuildDynamicTaskCall:
|
|
163
308
|
|
|
164
309
|
def test__build_dynamic_task_call_given_function_expect_function_executor_task(self):
|
|
@@ -357,12 +502,11 @@ class TestDynamicTaskExecutors:
|
|
|
357
502
|
|
|
358
503
|
from taskqueue.cmanager import dynamic_function_executor
|
|
359
504
|
|
|
360
|
-
# Mock the request object to simulate max retries reached
|
|
361
505
|
mock_self = Mock()
|
|
362
506
|
mock_self.request.retries = K_MAX_RETRY_COUNT
|
|
363
507
|
mock_self.max_retries = K_MAX_RETRY_COUNT
|
|
364
508
|
|
|
365
|
-
with pytest.raises(Exception):
|
|
509
|
+
with pytest.raises(Exception):
|
|
366
510
|
dynamic_function_executor(
|
|
367
511
|
mock_self, "invalid_module", "test_function",
|
|
368
512
|
retry={"max_retries": K_MAX_RETRY_COUNT}
|
|
@@ -542,3 +686,243 @@ class TestDynamicClassMethodExecutorWithInitArgs:
|
|
|
542
686
|
)
|
|
543
687
|
|
|
544
688
|
assert result is None
|
|
689
|
+
|
|
690
|
+
|
|
691
|
+
class TestBuildCallableTaskCall:
|
|
692
|
+
|
|
693
|
+
def test__build_callable_task_call_given_function_expect_callable_executor_task(self) -> None:
|
|
694
|
+
task_name, task_args, task_kwargs = _build_callable_task_call(
|
|
695
|
+
test_function, (1, 2), {'key': 'value'}
|
|
696
|
+
)
|
|
697
|
+
|
|
698
|
+
assert task_name == "taskqueue.cmanager.callable_executor"
|
|
699
|
+
assert task_args == []
|
|
700
|
+
assert task_kwargs == {
|
|
701
|
+
'callable_obj': test_function,
|
|
702
|
+
'args': [1, 2],
|
|
703
|
+
'kwargs': {'key': 'value'}
|
|
704
|
+
}
|
|
705
|
+
|
|
706
|
+
def test__build_callable_task_call_given_lambda_expect_callable_executor_task(self) -> None:
|
|
707
|
+
def lambda_func(x):
|
|
708
|
+
return x * 2
|
|
709
|
+
|
|
710
|
+
task_name, task_args, task_kwargs = _build_callable_task_call(
|
|
711
|
+
lambda_func, (5,), {}
|
|
712
|
+
)
|
|
713
|
+
|
|
714
|
+
assert task_name == "taskqueue.cmanager.callable_executor"
|
|
715
|
+
assert task_args == []
|
|
716
|
+
assert task_kwargs == {
|
|
717
|
+
'callable_obj': lambda_func,
|
|
718
|
+
'args': [5],
|
|
719
|
+
'kwargs': {}
|
|
720
|
+
}
|
|
721
|
+
|
|
722
|
+
def test__build_callable_task_call_given_bound_method_expect_callable_executor_task(self) -> None:
|
|
723
|
+
instance = SampleClass()
|
|
724
|
+
task_name, task_args, task_kwargs = _build_callable_task_call(
|
|
725
|
+
instance.test_method, (1, 2), {'key': 'value'}
|
|
726
|
+
)
|
|
727
|
+
|
|
728
|
+
assert task_name == "taskqueue.cmanager.callable_executor"
|
|
729
|
+
assert task_args == []
|
|
730
|
+
assert task_kwargs == {
|
|
731
|
+
'callable_obj': instance.test_method,
|
|
732
|
+
'args': [1, 2],
|
|
733
|
+
'kwargs': {'key': 'value'}
|
|
734
|
+
}
|
|
735
|
+
|
|
736
|
+
def test__build_callable_task_call_given_no_args_expect_empty_lists(self) -> None:
|
|
737
|
+
task_name, task_args, task_kwargs = _build_callable_task_call(
|
|
738
|
+
test_function, (), {}
|
|
739
|
+
)
|
|
740
|
+
|
|
741
|
+
assert task_name == "taskqueue.cmanager.callable_executor"
|
|
742
|
+
assert task_args == []
|
|
743
|
+
assert task_kwargs == {
|
|
744
|
+
'callable_obj': test_function,
|
|
745
|
+
'args': [],
|
|
746
|
+
'kwargs': {}
|
|
747
|
+
}
|
|
748
|
+
|
|
749
|
+
def test_callable_executor_given_function_expect_function_executed(self) -> None:
|
|
750
|
+
from taskqueue.cmanager import callable_executor
|
|
751
|
+
|
|
752
|
+
assert hasattr(callable_executor, 'delay')
|
|
753
|
+
assert hasattr(callable_executor, 'apply_async')
|
|
754
|
+
|
|
755
|
+
def test_callable_executor_given_simple_function_expect_success(self) -> None:
|
|
756
|
+
from taskqueue.cmanager import callable_executor
|
|
757
|
+
|
|
758
|
+
call_tracker = []
|
|
759
|
+
|
|
760
|
+
def simple_func(x, y):
|
|
761
|
+
call_tracker.append((x, y))
|
|
762
|
+
return x + y
|
|
763
|
+
|
|
764
|
+
result = callable_executor(
|
|
765
|
+
callable_obj=simple_func,
|
|
766
|
+
args=[3, 5],
|
|
767
|
+
kwargs={},
|
|
768
|
+
retry=None
|
|
769
|
+
)
|
|
770
|
+
|
|
771
|
+
assert result is None
|
|
772
|
+
assert call_tracker == [(3, 5)]
|
|
773
|
+
|
|
774
|
+
def test_callable_executor_given_function_with_kwargs_expect_success(self) -> None:
|
|
775
|
+
from taskqueue.cmanager import callable_executor
|
|
776
|
+
|
|
777
|
+
call_tracker: list[tuple[str, int]] = []
|
|
778
|
+
|
|
779
|
+
def func_with_kwargs(name: str, age: int = 0) -> None:
|
|
780
|
+
call_tracker.append((name, age))
|
|
781
|
+
|
|
782
|
+
result = callable_executor(
|
|
783
|
+
callable_obj=func_with_kwargs,
|
|
784
|
+
args=["Alice"],
|
|
785
|
+
kwargs={"age": 30},
|
|
786
|
+
retry=None
|
|
787
|
+
)
|
|
788
|
+
|
|
789
|
+
assert result is None
|
|
790
|
+
assert call_tracker == [("Alice", 30)]
|
|
791
|
+
|
|
792
|
+
def test_callable_executor_given_lambda_expect_success(self) -> None:
|
|
793
|
+
from taskqueue.cmanager import callable_executor
|
|
794
|
+
|
|
795
|
+
result_tracker: list[int] = []
|
|
796
|
+
|
|
797
|
+
def lambda_func(x, y):
|
|
798
|
+
return result_tracker.append(x * y)
|
|
799
|
+
|
|
800
|
+
result = callable_executor(
|
|
801
|
+
callable_obj=lambda_func,
|
|
802
|
+
args=[4, 5],
|
|
803
|
+
kwargs={},
|
|
804
|
+
retry=None
|
|
805
|
+
)
|
|
806
|
+
|
|
807
|
+
assert result is None
|
|
808
|
+
assert result_tracker == [20]
|
|
809
|
+
|
|
810
|
+
def test_callable_executor_given_bound_method_expect_success(self) -> None:
|
|
811
|
+
from taskqueue.cmanager import callable_executor
|
|
812
|
+
|
|
813
|
+
instance = SampleClassWithInit("TestUser", age=25)
|
|
814
|
+
|
|
815
|
+
result = callable_executor(
|
|
816
|
+
callable_obj=instance.process,
|
|
817
|
+
args=[],
|
|
818
|
+
kwargs={},
|
|
819
|
+
retry=None
|
|
820
|
+
)
|
|
821
|
+
|
|
822
|
+
assert result is None
|
|
823
|
+
|
|
824
|
+
def test_callable_executor_given_bound_method_with_args_expect_success(self) -> None:
|
|
825
|
+
from taskqueue.cmanager import callable_executor
|
|
826
|
+
|
|
827
|
+
instance = SampleClassWithInit("TestUser", age=25)
|
|
828
|
+
|
|
829
|
+
result = callable_executor(
|
|
830
|
+
callable_obj=instance.process_with_args,
|
|
831
|
+
args=["Hello"],
|
|
832
|
+
kwargs={},
|
|
833
|
+
retry=None
|
|
834
|
+
)
|
|
835
|
+
|
|
836
|
+
assert result is None
|
|
837
|
+
|
|
838
|
+
def test_callable_executor_given_none_args_expect_default_to_empty(self) -> None:
|
|
839
|
+
from taskqueue.cmanager import callable_executor
|
|
840
|
+
|
|
841
|
+
call_tracker: list[str] = []
|
|
842
|
+
|
|
843
|
+
def no_args_func() -> None:
|
|
844
|
+
call_tracker.append("called")
|
|
845
|
+
|
|
846
|
+
result = callable_executor(
|
|
847
|
+
callable_obj=no_args_func,
|
|
848
|
+
args=None,
|
|
849
|
+
kwargs=None,
|
|
850
|
+
retry=None
|
|
851
|
+
)
|
|
852
|
+
|
|
853
|
+
assert result is None
|
|
854
|
+
assert call_tracker == ["called"]
|
|
855
|
+
|
|
856
|
+
def test_callable_executor_given_callable_with_side_effects_expect_side_effects_executed(self) -> None:
|
|
857
|
+
from taskqueue.cmanager import callable_executor
|
|
858
|
+
|
|
859
|
+
side_effects: dict[str, int] = {"counter": 0}
|
|
860
|
+
|
|
861
|
+
def increment_counter(amount: int) -> None:
|
|
862
|
+
side_effects["counter"] += amount
|
|
863
|
+
|
|
864
|
+
callable_executor(
|
|
865
|
+
callable_obj=increment_counter,
|
|
866
|
+
args=[5],
|
|
867
|
+
kwargs={},
|
|
868
|
+
retry=None
|
|
869
|
+
)
|
|
870
|
+
|
|
871
|
+
assert side_effects["counter"] == 5
|
|
872
|
+
|
|
873
|
+
@patch('taskqueue.cmanager.logger')
|
|
874
|
+
@patch.object(CManager, '_send_task')
|
|
875
|
+
def test_cmanager_enqueue_given_use_legacy_executor_false_expect_callable_executor(self, mock_send_task: Any, mock_logger: Any) -> None:
|
|
876
|
+
cm = CManager()
|
|
877
|
+
cm.enqueue(test_function, 1, 2, key='value',
|
|
878
|
+
use_legacy_executor=False)
|
|
879
|
+
|
|
880
|
+
mock_send_task.assert_called_once()
|
|
881
|
+
call_args = mock_send_task.call_args
|
|
882
|
+
assert call_args[0][0] == "taskqueue.cmanager.callable_executor"
|
|
883
|
+
|
|
884
|
+
task_kwargs = call_args[0][2]
|
|
885
|
+
assert 'callable_obj' in task_kwargs
|
|
886
|
+
assert task_kwargs['callable_obj'] == test_function
|
|
887
|
+
assert task_kwargs['args'] == [1, 2]
|
|
888
|
+
assert task_kwargs['kwargs'] == {'key': 'value'}
|
|
889
|
+
|
|
890
|
+
@patch('taskqueue.cmanager.logger')
|
|
891
|
+
@patch.object(CManager, '_send_task')
|
|
892
|
+
def test_cmanager_enqueue_given_use_legacy_executor_true_expect_dynamic_executor(self, mock_send_task: Any, mock_logger: Any) -> None:
|
|
893
|
+
cm = CManager()
|
|
894
|
+
cm.enqueue(test_function, 1, 2, key='value',
|
|
895
|
+
use_legacy_executor=True)
|
|
896
|
+
|
|
897
|
+
mock_send_task.assert_called_once()
|
|
898
|
+
call_args = mock_send_task.call_args
|
|
899
|
+
assert call_args[0][0] == "taskqueue.cmanager.dynamic_function_executor"
|
|
900
|
+
|
|
901
|
+
task_kwargs = call_args[0][2]
|
|
902
|
+
assert 'module_path' in task_kwargs
|
|
903
|
+
assert 'function_name' in task_kwargs
|
|
904
|
+
|
|
905
|
+
@patch('taskqueue.cmanager.logger')
|
|
906
|
+
@patch.object(CManager, '_send_task')
|
|
907
|
+
def test_cmanager_enqueue_given_no_use_legacy_executor_expect_default_to_true(self, mock_send_task: Any, mock_logger: Any) -> None:
|
|
908
|
+
cm = CManager()
|
|
909
|
+
cm.enqueue(test_function, 1, 2)
|
|
910
|
+
|
|
911
|
+
mock_send_task.assert_called_once()
|
|
912
|
+
call_args = mock_send_task.call_args
|
|
913
|
+
assert call_args[0][0] == "taskqueue.cmanager.dynamic_function_executor"
|
|
914
|
+
|
|
915
|
+
@patch('taskqueue.cmanager.logger')
|
|
916
|
+
@patch.object(CManager, '_send_task')
|
|
917
|
+
def test_cmanager_enqueue_given_bound_method_with_callable_executor_expect_success(self, mock_send_task: Any, mock_logger: Any) -> None:
|
|
918
|
+
cm = CManager()
|
|
919
|
+
instance = SampleClassWithInit("Test", age=20)
|
|
920
|
+
|
|
921
|
+
cm.enqueue(instance.process, use_legacy_executor=False)
|
|
922
|
+
|
|
923
|
+
mock_send_task.assert_called_once()
|
|
924
|
+
call_args = mock_send_task.call_args
|
|
925
|
+
assert call_args[0][0] == "taskqueue.cmanager.callable_executor"
|
|
926
|
+
|
|
927
|
+
task_kwargs = call_args[0][2]
|
|
928
|
+
assert task_kwargs['callable_obj'] == instance.process
|
|
@@ -140,6 +140,86 @@ class TestHelperTest:
|
|
|
140
140
|
assert result[0]['kwargs']['class_name'] == 'MyClass'
|
|
141
141
|
assert result[0]['kwargs']['method_name'] == 'my_method'
|
|
142
142
|
|
|
143
|
+
@patch('taskqueue.libs.helper_test.current_app')
|
|
144
|
+
@patch('taskqueue.libs.helper_test.loads')
|
|
145
|
+
def test_get_queued_tasks_given_callable_executor_function_expect_correct_parsing(self, mock_loads, mock_current_app):
|
|
146
|
+
# Use a real function instead of mocking
|
|
147
|
+
def my_function():
|
|
148
|
+
pass
|
|
149
|
+
|
|
150
|
+
mock_message = MagicMock()
|
|
151
|
+
mock_message.headers = {
|
|
152
|
+
'task': 'taskqueue.cmanager.callable_executor'}
|
|
153
|
+
mock_message.body = b'mock_body'
|
|
154
|
+
mock_message.content_type = 'application/json'
|
|
155
|
+
mock_message.content_encoding = 'utf-8'
|
|
156
|
+
|
|
157
|
+
mock_loads.return_value = [[], {
|
|
158
|
+
'callable_obj': my_function,
|
|
159
|
+
'args': [],
|
|
160
|
+
'kwargs': {}
|
|
161
|
+
}]
|
|
162
|
+
|
|
163
|
+
mock_queue = MagicMock()
|
|
164
|
+
mock_queue.get.side_effect = [mock_message, None]
|
|
165
|
+
mock_current_app.amqp.queues = {
|
|
166
|
+
'default': MagicMock(return_value=mock_queue)}
|
|
167
|
+
|
|
168
|
+
mock_conn = MagicMock()
|
|
169
|
+
mock_chan = MagicMock()
|
|
170
|
+
mock_current_app.connection_for_read.return_value.__enter__.return_value = mock_conn
|
|
171
|
+
mock_conn.channel.return_value.__enter__.return_value = mock_chan
|
|
172
|
+
|
|
173
|
+
result = get_queued_tasks('default')
|
|
174
|
+
|
|
175
|
+
assert len(result) == 1
|
|
176
|
+
assert result[0]['task_name'] == 'taskqueue.cmanager.callable_executor'
|
|
177
|
+
assert result[0]['full_func_name'] == 'tests.test_helper_test_functions.my_function'
|
|
178
|
+
assert result[0]['args'] == []
|
|
179
|
+
assert result[0]['kwargs']['callable_obj'] == my_function
|
|
180
|
+
|
|
181
|
+
@patch('taskqueue.libs.helper_test.current_app')
|
|
182
|
+
@patch('taskqueue.libs.helper_test.loads')
|
|
183
|
+
def test_get_queued_tasks_given_callable_executor_method_expect_correct_parsing(self, mock_loads, mock_current_app):
|
|
184
|
+
# Use a real class and method instead of mocking
|
|
185
|
+
class MyClass:
|
|
186
|
+
def my_method(self):
|
|
187
|
+
pass
|
|
188
|
+
|
|
189
|
+
instance = MyClass()
|
|
190
|
+
bound_method = instance.my_method
|
|
191
|
+
|
|
192
|
+
mock_message = MagicMock()
|
|
193
|
+
mock_message.headers = {
|
|
194
|
+
'task': 'taskqueue.cmanager.callable_executor'}
|
|
195
|
+
mock_message.body = b'mock_body'
|
|
196
|
+
mock_message.content_type = 'application/json'
|
|
197
|
+
mock_message.content_encoding = 'utf-8'
|
|
198
|
+
|
|
199
|
+
mock_loads.return_value = [[], {
|
|
200
|
+
'callable_obj': bound_method,
|
|
201
|
+
'args': [],
|
|
202
|
+
'kwargs': {}
|
|
203
|
+
}]
|
|
204
|
+
|
|
205
|
+
mock_queue = MagicMock()
|
|
206
|
+
mock_queue.get.side_effect = [mock_message, None]
|
|
207
|
+
mock_current_app.amqp.queues = {
|
|
208
|
+
'default': MagicMock(return_value=mock_queue)}
|
|
209
|
+
|
|
210
|
+
mock_conn = MagicMock()
|
|
211
|
+
mock_chan = MagicMock()
|
|
212
|
+
mock_current_app.connection_for_read.return_value.__enter__.return_value = mock_conn
|
|
213
|
+
mock_conn.channel.return_value.__enter__.return_value = mock_chan
|
|
214
|
+
|
|
215
|
+
result = get_queued_tasks('default')
|
|
216
|
+
|
|
217
|
+
assert len(result) == 1
|
|
218
|
+
assert result[0]['task_name'] == 'taskqueue.cmanager.callable_executor'
|
|
219
|
+
assert result[0]['full_func_name'] == 'tests.test_helper_test_functions.MyClass.my_method'
|
|
220
|
+
assert result[0]['args'] == []
|
|
221
|
+
assert result[0]['kwargs']['callable_obj'] == bound_method
|
|
222
|
+
|
|
143
223
|
@patch('taskqueue.libs.helper_test.get_queued_tasks')
|
|
144
224
|
def test_is_task_in_queue_given_task_exists_expect_true(self, mock_get_queued_tasks):
|
|
145
225
|
mock_get_queued_tasks.return_value = [
|
|
@@ -88,7 +88,8 @@ class TestCeleryWorkerBurst:
|
|
|
88
88
|
mock_message.ack.assert_called_once()
|
|
89
89
|
mock_task.apply.assert_called_once_with(
|
|
90
90
|
args=[],
|
|
91
|
-
kwargs={'module_path': 'module.submodule',
|
|
91
|
+
kwargs={'module_path': 'module.submodule',
|
|
92
|
+
'function_name': 'test_function', 'args': [], 'kwargs': {}}
|
|
92
93
|
)
|
|
93
94
|
|
|
94
95
|
@patch('taskqueue.libs.helper_test.current_app')
|
|
@@ -130,7 +131,8 @@ class TestCeleryWorkerBurst:
|
|
|
130
131
|
mock_message.ack.assert_called_once()
|
|
131
132
|
mock_task.apply.assert_called_once_with(
|
|
132
133
|
args=[],
|
|
133
|
-
kwargs={'module_path': 'module.submodule', 'class_name': 'TestClass',
|
|
134
|
+
kwargs={'module_path': 'module.submodule', 'class_name': 'TestClass',
|
|
135
|
+
'method_name': 'test_method', 'args': [], 'kwargs': {}, 'init_args': [], 'init_kwargs': {}}
|
|
134
136
|
)
|
|
135
137
|
|
|
136
138
|
@patch('taskqueue.libs.helper_test.current_app')
|
|
@@ -292,3 +294,100 @@ class TestCeleryWorkerBurst:
|
|
|
292
294
|
"Failed to process task taskqueue.cmanager.dynamic_function_executor: Exception: Task execution failed"
|
|
293
295
|
)
|
|
294
296
|
mock_message.ack.assert_called_once()
|
|
297
|
+
|
|
298
|
+
@patch('taskqueue.libs.helper_test.current_app')
|
|
299
|
+
@patch('taskqueue.libs.helper_test.loads')
|
|
300
|
+
def test_celery_worker_burst_given_callable_executor_function_expect_execution(self, mock_loads, mock_current_app):
|
|
301
|
+
def test_function():
|
|
302
|
+
pass
|
|
303
|
+
|
|
304
|
+
mock_task = MagicMock()
|
|
305
|
+
mock_current_app.tasks = {
|
|
306
|
+
'taskqueue.cmanager.callable_executor': mock_task}
|
|
307
|
+
|
|
308
|
+
mock_message = MagicMock()
|
|
309
|
+
mock_message.headers = {
|
|
310
|
+
'task': 'taskqueue.cmanager.callable_executor'}
|
|
311
|
+
mock_message.body = b'mock_body'
|
|
312
|
+
mock_message.content_type = 'application/json'
|
|
313
|
+
mock_message.content_encoding = 'utf-8'
|
|
314
|
+
mock_message.acknowledged = False
|
|
315
|
+
|
|
316
|
+
mock_loads.return_value = [
|
|
317
|
+
[], {'callable_obj': test_function, 'args': [], 'kwargs': {}}]
|
|
318
|
+
|
|
319
|
+
mock_queue = MagicMock()
|
|
320
|
+
mock_queue.get.side_effect = [mock_message, None]
|
|
321
|
+
mock_queue_factory = MagicMock(return_value=mock_queue)
|
|
322
|
+
mock_current_app.amqp.queues = {'default': mock_queue_factory}
|
|
323
|
+
|
|
324
|
+
mock_conn = MagicMock()
|
|
325
|
+
mock_chan = MagicMock()
|
|
326
|
+
mock_current_app.connection_for_read.return_value.__enter__.return_value = mock_conn
|
|
327
|
+
mock_conn.channel.return_value.__enter__.return_value = mock_chan
|
|
328
|
+
|
|
329
|
+
with patch('taskqueue.libs.helper_test.logger') as mock_logger:
|
|
330
|
+
celery_worker_burst(
|
|
331
|
+
['tests.test_test_utils.test_function'])
|
|
332
|
+
|
|
333
|
+
mock_logger.info.assert_any_call(
|
|
334
|
+
"Executing task: tests.test_test_utils.test_function")
|
|
335
|
+
mock_logger.info.assert_any_call(
|
|
336
|
+
"Successfully executed task: tests.test_test_utils.test_function")
|
|
337
|
+
|
|
338
|
+
mock_message.ack.assert_called_once()
|
|
339
|
+
mock_task.apply.assert_called_once_with(
|
|
340
|
+
args=[],
|
|
341
|
+
kwargs={'callable_obj': test_function,
|
|
342
|
+
'args': [], 'kwargs': {}}
|
|
343
|
+
)
|
|
344
|
+
|
|
345
|
+
@patch('taskqueue.libs.helper_test.current_app')
|
|
346
|
+
@patch('taskqueue.libs.helper_test.loads')
|
|
347
|
+
def test_celery_worker_burst_given_callable_executor_method_expect_execution(self, mock_loads, mock_current_app):
|
|
348
|
+
class TestClass:
|
|
349
|
+
def test_method(self):
|
|
350
|
+
pass
|
|
351
|
+
|
|
352
|
+
instance = TestClass()
|
|
353
|
+
bound_method = instance.test_method
|
|
354
|
+
|
|
355
|
+
mock_task = MagicMock()
|
|
356
|
+
mock_current_app.tasks = {
|
|
357
|
+
'taskqueue.cmanager.callable_executor': mock_task}
|
|
358
|
+
|
|
359
|
+
mock_message = MagicMock()
|
|
360
|
+
mock_message.headers = {
|
|
361
|
+
'task': 'taskqueue.cmanager.callable_executor'}
|
|
362
|
+
mock_message.body = b'mock_body'
|
|
363
|
+
mock_message.content_type = 'application/json'
|
|
364
|
+
mock_message.content_encoding = 'utf-8'
|
|
365
|
+
mock_message.acknowledged = False
|
|
366
|
+
|
|
367
|
+
mock_loads.return_value = [
|
|
368
|
+
[], {'callable_obj': bound_method, 'args': [], 'kwargs': {}}]
|
|
369
|
+
|
|
370
|
+
mock_queue = MagicMock()
|
|
371
|
+
mock_queue.get.side_effect = [mock_message, None]
|
|
372
|
+
mock_queue_factory = MagicMock(return_value=mock_queue)
|
|
373
|
+
mock_current_app.amqp.queues = {'default': mock_queue_factory}
|
|
374
|
+
|
|
375
|
+
mock_conn = MagicMock()
|
|
376
|
+
mock_chan = MagicMock()
|
|
377
|
+
mock_current_app.connection_for_read.return_value.__enter__.return_value = mock_conn
|
|
378
|
+
mock_conn.channel.return_value.__enter__.return_value = mock_chan
|
|
379
|
+
|
|
380
|
+
with patch('taskqueue.libs.helper_test.logger') as mock_logger:
|
|
381
|
+
celery_worker_burst(
|
|
382
|
+
['tests.test_test_utils.TestClass.test_method'])
|
|
383
|
+
|
|
384
|
+
mock_logger.info.assert_any_call(
|
|
385
|
+
"Executing task: tests.test_test_utils.TestClass.test_method")
|
|
386
|
+
mock_logger.info.assert_any_call(
|
|
387
|
+
"Successfully executed task: tests.test_test_utils.TestClass.test_method")
|
|
388
|
+
|
|
389
|
+
mock_message.ack.assert_called_once()
|
|
390
|
+
mock_task.apply.assert_called_once_with(
|
|
391
|
+
args=[],
|
|
392
|
+
kwargs={'callable_obj': bound_method, 'args': [], 'kwargs': {}}
|
|
393
|
+
)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|