p1-taskqueue 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of p1-taskqueue might be problematic. Click here for more details.
- p1_taskqueue-0.1.0/PKG-INFO +41 -0
- p1_taskqueue-0.1.0/README.md +13 -0
- p1_taskqueue-0.1.0/pyproject.toml +45 -0
- p1_taskqueue-0.1.0/setup.cfg +4 -0
- p1_taskqueue-0.1.0/src/p1_taskqueue.egg-info/PKG-INFO +41 -0
- p1_taskqueue-0.1.0/src/p1_taskqueue.egg-info/SOURCES.txt +13 -0
- p1_taskqueue-0.1.0/src/p1_taskqueue.egg-info/dependency_links.txt +1 -0
- p1_taskqueue-0.1.0/src/p1_taskqueue.egg-info/requires.txt +13 -0
- p1_taskqueue-0.1.0/src/p1_taskqueue.egg-info/top_level.txt +1 -0
- p1_taskqueue-0.1.0/src/taskqueue/__init__.py +12 -0
- p1_taskqueue-0.1.0/src/taskqueue/celery_app.py +92 -0
- p1_taskqueue-0.1.0/src/taskqueue/cmanager.py +252 -0
- p1_taskqueue-0.1.0/tests/test_celery_app.py +186 -0
- p1_taskqueue-0.1.0/tests/test_cmanager.py +308 -0
- p1_taskqueue-0.1.0/tests/test_test_utils.py +294 -0
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: p1-taskqueue
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: A Task Queue Wrapper for Dekoruma Backend
|
|
5
|
+
Author-email: Chalvin <engineering@dekoruma.com>
|
|
6
|
+
Project-URL: Homepage, https://github.com/Dekoruma/p1-taskqueue
|
|
7
|
+
Project-URL: Repository, https://github.com/Dekoruma/p1-taskqueue.git
|
|
8
|
+
Project-URL: Issues, https://github.com/Dekoruma/p1-taskqueue/issues
|
|
9
|
+
Classifier: Development Status :: 3 - Alpha
|
|
10
|
+
Classifier: Intended Audience :: Developers
|
|
11
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
14
|
+
Requires-Python: >=3.11
|
|
15
|
+
Description-Content-Type: text/markdown
|
|
16
|
+
Requires-Dist: celery>=5.4.0
|
|
17
|
+
Requires-Dist: flower>=2.0.1
|
|
18
|
+
Requires-Dist: redis>=6.2.0
|
|
19
|
+
Requires-Dist: kombu>=5.3.4
|
|
20
|
+
Requires-Dist: django>=4.0.0
|
|
21
|
+
Provides-Extra: dev
|
|
22
|
+
Requires-Dist: pytest>=7.0.0; extra == "dev"
|
|
23
|
+
Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
|
|
24
|
+
Requires-Dist: black>=23.0.0; extra == "dev"
|
|
25
|
+
Requires-Dist: flake8>=6.0.0; extra == "dev"
|
|
26
|
+
Requires-Dist: mypy>=1.0.0; extra == "dev"
|
|
27
|
+
Requires-Dist: pre-commit>=3.0.0; extra == "dev"
|
|
28
|
+
|
|
29
|
+
# TaskQueue
|
|
30
|
+
|
|
31
|
+
A Task Queue Wrapper for Dekoruma Backend
|
|
32
|
+
|
|
33
|
+
## Description
|
|
34
|
+
|
|
35
|
+
TaskQueue is a Python package that provides a wrapper around Celery for task queue management. It includes automatic queue setup, Dead Letter Queue (DLQ) routing, and dynamic task execution capabilities.
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
## Deploy
|
|
39
|
+
- Push changes to main
|
|
40
|
+
- Create new TAG with version name (i.e 0.1.3)
|
|
41
|
+
- Check status on the Actions page on Github
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
# TaskQueue
|
|
2
|
+
|
|
3
|
+
A Task Queue Wrapper for Dekoruma Backend
|
|
4
|
+
|
|
5
|
+
## Description
|
|
6
|
+
|
|
7
|
+
TaskQueue is a Python package that provides a wrapper around Celery for task queue management. It includes automatic queue setup, Dead Letter Queue (DLQ) routing, and dynamic task execution capabilities.
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
## Deploy
|
|
11
|
+
- Push changes to main
|
|
12
|
+
- Create new TAG with version name (i.e 0.1.3)
|
|
13
|
+
- Check status on the Actions page on Github
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools>=61.0", "wheel"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "p1-taskqueue"
|
|
7
|
+
version = "0.1.0"
|
|
8
|
+
description = "A Task Queue Wrapper for Dekoruma Backend"
|
|
9
|
+
authors = [
|
|
10
|
+
{name = "Chalvin", email = "engineering@dekoruma.com"}
|
|
11
|
+
]
|
|
12
|
+
readme = "README.md"
|
|
13
|
+
requires-python = ">=3.11"
|
|
14
|
+
classifiers = [
|
|
15
|
+
"Development Status :: 3 - Alpha",
|
|
16
|
+
"Intended Audience :: Developers",
|
|
17
|
+
"License :: OSI Approved :: MIT License",
|
|
18
|
+
"Programming Language :: Python :: 3.11",
|
|
19
|
+
"Programming Language :: Python :: 3.12",
|
|
20
|
+
]
|
|
21
|
+
dependencies = [
|
|
22
|
+
"celery>=5.4.0",
|
|
23
|
+
"flower>=2.0.1",
|
|
24
|
+
"redis>=6.2.0",
|
|
25
|
+
"kombu>=5.3.4",
|
|
26
|
+
"django>=4.0.0",
|
|
27
|
+
]
|
|
28
|
+
|
|
29
|
+
[project.optional-dependencies]
|
|
30
|
+
dev = [
|
|
31
|
+
"pytest>=7.0.0",
|
|
32
|
+
"pytest-cov>=4.0.0",
|
|
33
|
+
"black>=23.0.0",
|
|
34
|
+
"flake8>=6.0.0",
|
|
35
|
+
"mypy>=1.0.0",
|
|
36
|
+
"pre-commit>=3.0.0",
|
|
37
|
+
]
|
|
38
|
+
|
|
39
|
+
[project.urls]
|
|
40
|
+
Homepage = "https://github.com/Dekoruma/p1-taskqueue"
|
|
41
|
+
Repository = "https://github.com/Dekoruma/p1-taskqueue.git"
|
|
42
|
+
Issues = "https://github.com/Dekoruma/p1-taskqueue/issues"
|
|
43
|
+
|
|
44
|
+
[tool.setuptools.packages.find]
|
|
45
|
+
where = ["src"]
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: p1-taskqueue
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: A Task Queue Wrapper for Dekoruma Backend
|
|
5
|
+
Author-email: Chalvin <engineering@dekoruma.com>
|
|
6
|
+
Project-URL: Homepage, https://github.com/Dekoruma/p1-taskqueue
|
|
7
|
+
Project-URL: Repository, https://github.com/Dekoruma/p1-taskqueue.git
|
|
8
|
+
Project-URL: Issues, https://github.com/Dekoruma/p1-taskqueue/issues
|
|
9
|
+
Classifier: Development Status :: 3 - Alpha
|
|
10
|
+
Classifier: Intended Audience :: Developers
|
|
11
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
14
|
+
Requires-Python: >=3.11
|
|
15
|
+
Description-Content-Type: text/markdown
|
|
16
|
+
Requires-Dist: celery>=5.4.0
|
|
17
|
+
Requires-Dist: flower>=2.0.1
|
|
18
|
+
Requires-Dist: redis>=6.2.0
|
|
19
|
+
Requires-Dist: kombu>=5.3.4
|
|
20
|
+
Requires-Dist: django>=4.0.0
|
|
21
|
+
Provides-Extra: dev
|
|
22
|
+
Requires-Dist: pytest>=7.0.0; extra == "dev"
|
|
23
|
+
Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
|
|
24
|
+
Requires-Dist: black>=23.0.0; extra == "dev"
|
|
25
|
+
Requires-Dist: flake8>=6.0.0; extra == "dev"
|
|
26
|
+
Requires-Dist: mypy>=1.0.0; extra == "dev"
|
|
27
|
+
Requires-Dist: pre-commit>=3.0.0; extra == "dev"
|
|
28
|
+
|
|
29
|
+
# TaskQueue
|
|
30
|
+
|
|
31
|
+
A Task Queue Wrapper for Dekoruma Backend
|
|
32
|
+
|
|
33
|
+
## Description
|
|
34
|
+
|
|
35
|
+
TaskQueue is a Python package that provides a wrapper around Celery for task queue management. It includes automatic queue setup, Dead Letter Queue (DLQ) routing, and dynamic task execution capabilities.
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
## Deploy
|
|
39
|
+
- Push changes to main
|
|
40
|
+
- Create new TAG with version name (i.e 0.1.3)
|
|
41
|
+
- Check status on the Actions page on Github
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
README.md
|
|
2
|
+
pyproject.toml
|
|
3
|
+
src/p1_taskqueue.egg-info/PKG-INFO
|
|
4
|
+
src/p1_taskqueue.egg-info/SOURCES.txt
|
|
5
|
+
src/p1_taskqueue.egg-info/dependency_links.txt
|
|
6
|
+
src/p1_taskqueue.egg-info/requires.txt
|
|
7
|
+
src/p1_taskqueue.egg-info/top_level.txt
|
|
8
|
+
src/taskqueue/__init__.py
|
|
9
|
+
src/taskqueue/celery_app.py
|
|
10
|
+
src/taskqueue/cmanager.py
|
|
11
|
+
tests/test_celery_app.py
|
|
12
|
+
tests/test_cmanager.py
|
|
13
|
+
tests/test_test_utils.py
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
taskqueue
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"""
|
|
2
|
+
TaskQueue - A Task Queue Wrapper for Dekoruma Backend.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
__version__ = "0.1.0"
|
|
6
|
+
__author__ = "Chalvin"
|
|
7
|
+
__email__ = "engineering@dekoruma.com"
|
|
8
|
+
|
|
9
|
+
from .cmanager import cm
|
|
10
|
+
from .celery_app import celery_app
|
|
11
|
+
|
|
12
|
+
__all__ = ["cm", "celery_app"]
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Celery application setup for TaskQueue.
|
|
3
|
+
Reads configuration from Django settings and auto-configures queues with DLQ.
|
|
4
|
+
"""
|
|
5
|
+
from celery import Celery
|
|
6
|
+
from kombu import Exchange
|
|
7
|
+
from kombu import Queue
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def get_django_settings():
|
|
11
|
+
"""Get Django settings, fail fast if not properly configured."""
|
|
12
|
+
try:
|
|
13
|
+
from django.conf import settings
|
|
14
|
+
return settings
|
|
15
|
+
except ImportError:
|
|
16
|
+
raise ImportError("[TaskQueue] Django settings not found.")
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def create_celery_app():
|
|
20
|
+
"""Create and configure the Celery application."""
|
|
21
|
+
settings = get_django_settings()
|
|
22
|
+
|
|
23
|
+
app_name = getattr(settings, 'TASKQUEUE_APP_NAME', 'taskqueue')
|
|
24
|
+
app = Celery(app_name)
|
|
25
|
+
|
|
26
|
+
celery_config = {
|
|
27
|
+
'broker_url': getattr(settings, 'CELERY_BROKER_URL', 'amqp://localhost:5672//'),
|
|
28
|
+
'result_backend': getattr(settings, 'CELERY_RESULT_BACKEND', 'rpc://localhost:5672//'),
|
|
29
|
+
'task_serializer': getattr(settings, 'CELERY_TASK_SERIALIZER', 'pickle'),
|
|
30
|
+
'result_serializer': getattr(settings, 'CELERY_RESULT_SERIALIZER', 'pickle'),
|
|
31
|
+
'accept_content': getattr(settings, 'CELERY_ACCEPT_CONTENT', ['pickle']),
|
|
32
|
+
'timezone': getattr(settings, 'CELERY_TIMEZONE', 'UTC+7'),
|
|
33
|
+
'task_track_started': getattr(settings, 'CELERY_TASK_TRACK_STARTED', True),
|
|
34
|
+
'task_time_limit': getattr(settings, 'CELERY_TASK_TIME_LIMIT', 30 * 60),
|
|
35
|
+
'task_soft_time_limit': getattr(settings, 'CELERY_TASK_SOFT_TIME_LIMIT', 25 * 60),
|
|
36
|
+
'task_always_eager': getattr(settings, 'CELERY_TASK_ALWAYS_EAGER', False),
|
|
37
|
+
'task_eager_propagates': getattr(settings, 'CELERY_TASK_EAGER_PROPAGATES', True),
|
|
38
|
+
'task_acks_late': True,
|
|
39
|
+
'task_reject_on_worker_lost': True,
|
|
40
|
+
'worker_prefetch_multiplier': 1,
|
|
41
|
+
'worker_max_tasks_per_child': 1000,
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
setup_queues(app, settings, celery_config)
|
|
45
|
+
app.conf.update(celery_config)
|
|
46
|
+
app.autodiscover_tasks(['taskqueue'])
|
|
47
|
+
|
|
48
|
+
return app
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def setup_queues(app, settings, celery_config):
|
|
52
|
+
app_name = getattr(settings, 'TASKQUEUE_APP_NAME', 'taskqueue')
|
|
53
|
+
queue_names = getattr(settings, 'TASKQUEUE_QUEUES',
|
|
54
|
+
['default', 'high', 'low'])
|
|
55
|
+
if queue_names is None:
|
|
56
|
+
queue_names = ['default', 'high', 'low']
|
|
57
|
+
dlq_name_prefix = getattr(settings, 'TASKQUEUE_DLQ_NAME_PREFIX', 'dlq')
|
|
58
|
+
|
|
59
|
+
# Create exchanges
|
|
60
|
+
main_exchange = Exchange(app_name, type='direct')
|
|
61
|
+
dlx_exchange = Exchange(f'{app_name}.dlx', type='direct')
|
|
62
|
+
|
|
63
|
+
queues = []
|
|
64
|
+
|
|
65
|
+
for queue_name in queue_names:
|
|
66
|
+
dlq_name = f'{dlq_name_prefix}.{queue_name}'
|
|
67
|
+
|
|
68
|
+
queue = Queue(
|
|
69
|
+
queue_name,
|
|
70
|
+
main_exchange,
|
|
71
|
+
routing_key=queue_name,
|
|
72
|
+
queue_arguments={
|
|
73
|
+
'x-dead-letter-exchange': f'{app_name}.dlx',
|
|
74
|
+
'x-dead-letter-routing-key': dlq_name
|
|
75
|
+
}
|
|
76
|
+
)
|
|
77
|
+
queues.append(queue)
|
|
78
|
+
|
|
79
|
+
for queue_name in queue_names:
|
|
80
|
+
dlq_name = f'{dlq_name_prefix}.{queue_name}'
|
|
81
|
+
dlq = Queue(dlq_name, dlx_exchange, routing_key=dlq_name)
|
|
82
|
+
queues.append(dlq)
|
|
83
|
+
|
|
84
|
+
celery_config.update({
|
|
85
|
+
'task_default_queue': 'default',
|
|
86
|
+
'task_default_exchange': app_name,
|
|
87
|
+
'task_default_exchange_type': 'direct',
|
|
88
|
+
'task_queues': tuple(queues),
|
|
89
|
+
})
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
celery_app = create_celery_app()
|
|
@@ -0,0 +1,252 @@
|
|
|
1
|
+
import importlib
|
|
2
|
+
import inspect
|
|
3
|
+
import logging
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from datetime import timedelta
|
|
6
|
+
from typing import Any
|
|
7
|
+
from typing import Dict
|
|
8
|
+
from typing import Tuple
|
|
9
|
+
|
|
10
|
+
from celery import shared_task
|
|
11
|
+
from celery.exceptions import Reject
|
|
12
|
+
|
|
13
|
+
# Setup logger
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
# Enqueue operation type constants
|
|
17
|
+
K_ENQUEUE_OP_TYPE_ENQUEUE = 'enqueue'
|
|
18
|
+
K_ENQUEUE_OP_TYPE_ENQUEUE_AT = 'enqueue_at'
|
|
19
|
+
K_ENQUEUE_OP_TYPE_ENQUEUE_IN = 'enqueue_in'
|
|
20
|
+
|
|
21
|
+
K_MAX_RETRY_COUNT = 3
|
|
22
|
+
K_DEFAULT_RETRY_COUNTDOWN = 10
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _is_class_method(func: Any) -> bool:
|
|
26
|
+
return inspect.ismethod(func) or (
|
|
27
|
+
hasattr(func, "__self__") and getattr(func, "__self__") is not None
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def _split_function_and_queue_kwargs(kwargs: Dict[str, Any]) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
|
32
|
+
# To prevent confusion whether a kwargs is for function or queue kwargs(i.e celery options and on_commit),
|
|
33
|
+
# ignore confusing kwargs while give warning
|
|
34
|
+
supported_queue_keys = {"channel", "retry", "on_commit", "job_timeout"}
|
|
35
|
+
ignored_non_function_keys = {
|
|
36
|
+
"queue", "countdown", "eta", "expires", "priority", "task_id", "routing_key",
|
|
37
|
+
"serializer", "compression", "headers", "link", "link_error", "retry_policy",
|
|
38
|
+
"shadow", "time_limit", "soft_time_limit", "reply_to", "group_id", "chord", "chain",
|
|
39
|
+
"result_ttl", "failure_ttl", "ttl", "depends_on", "at_front", "meta", "retry_count",
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
queue_kwargs: Dict[str, Any] = {}
|
|
43
|
+
func_kwargs: Dict[str, Any] = {}
|
|
44
|
+
|
|
45
|
+
for key, value in kwargs.items():
|
|
46
|
+
if key in supported_queue_keys:
|
|
47
|
+
queue_kwargs[key] = value
|
|
48
|
+
elif key in ignored_non_function_keys:
|
|
49
|
+
logger.warning(
|
|
50
|
+
f"[CManager] Unsupported celery args detected: {key}. Ignored.")
|
|
51
|
+
continue
|
|
52
|
+
else:
|
|
53
|
+
func_kwargs[key] = value
|
|
54
|
+
|
|
55
|
+
return func_kwargs, queue_kwargs
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _build_dynamic_task_call(func: Any, *args: Any, **func_kwargs: Any) -> Tuple[str, list, dict]:
|
|
59
|
+
if _is_class_method(func):
|
|
60
|
+
instance = getattr(func, "__self__")
|
|
61
|
+
klass = instance.__class__
|
|
62
|
+
module_path = klass.__module__
|
|
63
|
+
class_name = klass.__name__
|
|
64
|
+
method_name = func.__name__
|
|
65
|
+
task_name = "taskqueue.cmanager.dynamic_class_method_executor"
|
|
66
|
+
task_args = [module_path, class_name,
|
|
67
|
+
method_name, list(args), dict(func_kwargs)]
|
|
68
|
+
task_kwargs: Dict[str, Any] = {}
|
|
69
|
+
return task_name, task_args, task_kwargs
|
|
70
|
+
|
|
71
|
+
module_path = getattr(func, "__module__", None)
|
|
72
|
+
function_name = getattr(func, "__name__", None)
|
|
73
|
+
if not module_path or not function_name:
|
|
74
|
+
raise ValueError(
|
|
75
|
+
"Unsupported callable type for Celery enqueue. Provide a module-level function or a class method.")
|
|
76
|
+
|
|
77
|
+
task_name = "taskqueue.cmanager.dynamic_function_executor"
|
|
78
|
+
task_args = [module_path, function_name, list(args), dict(func_kwargs)]
|
|
79
|
+
task_kwargs = {}
|
|
80
|
+
return task_name, task_args, task_kwargs
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class CManager:
|
|
84
|
+
|
|
85
|
+
def __init__(self) -> None:
|
|
86
|
+
pass
|
|
87
|
+
|
|
88
|
+
def _get_celery_app(self):
|
|
89
|
+
"""Get the auto-configured Celery app instance."""
|
|
90
|
+
# Use taskqueue's built-in auto-configured celery app
|
|
91
|
+
from .celery_app import celery_app
|
|
92
|
+
return celery_app
|
|
93
|
+
|
|
94
|
+
def enqueue(self, *args: Any, **kwargs: Any) -> None:
|
|
95
|
+
self._enqueue_op(
|
|
96
|
+
*args, enqueue_op_type=K_ENQUEUE_OP_TYPE_ENQUEUE, **kwargs)
|
|
97
|
+
|
|
98
|
+
def enqueue_at(self, *args: Any, **kwargs: Any) -> None:
|
|
99
|
+
self._enqueue_op(
|
|
100
|
+
*args, enqueue_op_type=K_ENQUEUE_OP_TYPE_ENQUEUE_AT, **kwargs)
|
|
101
|
+
|
|
102
|
+
def enqueue_in(self, *args: Any, **kwargs: Any) -> None:
|
|
103
|
+
self._enqueue_op(
|
|
104
|
+
*args, enqueue_op_type=K_ENQUEUE_OP_TYPE_ENQUEUE_IN, **kwargs)
|
|
105
|
+
|
|
106
|
+
def _enqueue_op(self, *args: Any, **kwargs: Any) -> None:
|
|
107
|
+
on_commit = kwargs.pop('on_commit', False)
|
|
108
|
+
if on_commit:
|
|
109
|
+
try:
|
|
110
|
+
from django.db import transaction
|
|
111
|
+
transaction.on_commit(
|
|
112
|
+
lambda: self._enqueue_op_base(*args, **kwargs))
|
|
113
|
+
except ImportError:
|
|
114
|
+
raise RuntimeError(
|
|
115
|
+
"Django is not installed. Please install Django to use on_commit.")
|
|
116
|
+
else:
|
|
117
|
+
self._enqueue_op_base(*args, **kwargs)
|
|
118
|
+
|
|
119
|
+
def _enqueue_op_base(self, *args: Any, **kwargs: Any) -> None:
|
|
120
|
+
enqueue_op_type = kwargs.pop(
|
|
121
|
+
'enqueue_op_type', K_ENQUEUE_OP_TYPE_ENQUEUE)
|
|
122
|
+
|
|
123
|
+
try:
|
|
124
|
+
if enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE:
|
|
125
|
+
if not args:
|
|
126
|
+
raise ValueError(
|
|
127
|
+
"enqueue requires a callable as the first positional argument")
|
|
128
|
+
func = args[0]
|
|
129
|
+
func_args = args[1:]
|
|
130
|
+
|
|
131
|
+
elif enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE_AT:
|
|
132
|
+
if len(args) < 2:
|
|
133
|
+
raise ValueError(
|
|
134
|
+
"enqueue_at requires (eta_datetime, func, *func_args)")
|
|
135
|
+
eta: datetime = args[0]
|
|
136
|
+
func = args[1]
|
|
137
|
+
func_args = args[2:]
|
|
138
|
+
|
|
139
|
+
elif enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE_IN:
|
|
140
|
+
if len(args) < 2:
|
|
141
|
+
raise ValueError(
|
|
142
|
+
"enqueue_in requires (countdown_delta, func, *func_args)")
|
|
143
|
+
delta: timedelta = args[0]
|
|
144
|
+
func = args[1]
|
|
145
|
+
func_args = args[2:]
|
|
146
|
+
else:
|
|
147
|
+
raise ValueError(
|
|
148
|
+
f"Unknown enqueue operation type: {enqueue_op_type}")
|
|
149
|
+
|
|
150
|
+
func_kwargs, queue_kwargs = _split_function_and_queue_kwargs(
|
|
151
|
+
kwargs)
|
|
152
|
+
|
|
153
|
+
if enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE_AT:
|
|
154
|
+
queue_kwargs = dict(queue_kwargs)
|
|
155
|
+
queue_kwargs["eta"] = eta
|
|
156
|
+
elif enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE_IN:
|
|
157
|
+
queue_kwargs = dict(queue_kwargs)
|
|
158
|
+
queue_kwargs["countdown"] = int(delta.total_seconds())
|
|
159
|
+
|
|
160
|
+
task_name, task_args, task_kwargs = _build_dynamic_task_call(
|
|
161
|
+
func, *func_args, **func_kwargs)
|
|
162
|
+
|
|
163
|
+
self._send_task(task_name, task_args, task_kwargs, queue_kwargs)
|
|
164
|
+
|
|
165
|
+
logger.info('[_enqueue_op_base %s] Submit Celery Task SUCCESS, task_name: %s args: %s, kwargs: %s' % (
|
|
166
|
+
enqueue_op_type, task_name, task_args, task_kwargs))
|
|
167
|
+
|
|
168
|
+
except Exception as e:
|
|
169
|
+
logger.exception('[_enqueue_op_base %s] Submit Celery Task FAILED, error: %s, args: %s, kwargs: %s' % (
|
|
170
|
+
enqueue_op_type, str(e), args, kwargs))
|
|
171
|
+
raise e
|
|
172
|
+
|
|
173
|
+
def _send_task(self, task_name: str, task_args: list, task_kwargs: dict, queue_kwargs: Dict[str, Any]) -> None:
|
|
174
|
+
celery_app = self._get_celery_app()
|
|
175
|
+
|
|
176
|
+
queue_name = queue_kwargs.pop("channel", None)
|
|
177
|
+
job_timeout = queue_kwargs.pop("job_timeout", None)
|
|
178
|
+
retry_policy = queue_kwargs.pop("retry", None)
|
|
179
|
+
|
|
180
|
+
send_opts: Dict[str, Any] = {}
|
|
181
|
+
if queue_name:
|
|
182
|
+
send_opts["queue"] = queue_name
|
|
183
|
+
if job_timeout is not None:
|
|
184
|
+
send_opts["time_limit"] = job_timeout
|
|
185
|
+
if "countdown" in queue_kwargs:
|
|
186
|
+
send_opts["countdown"] = queue_kwargs["countdown"]
|
|
187
|
+
if "eta" in queue_kwargs:
|
|
188
|
+
send_opts["eta"] = queue_kwargs["eta"]
|
|
189
|
+
|
|
190
|
+
task_kwargs_with_retry = dict(task_kwargs)
|
|
191
|
+
if retry_policy is None:
|
|
192
|
+
task_kwargs_with_retry["retry"] = {
|
|
193
|
+
"max_retries": K_MAX_RETRY_COUNT, "countdown": K_DEFAULT_RETRY_COUNTDOWN}
|
|
194
|
+
else:
|
|
195
|
+
task_kwargs_with_retry["retry"] = retry_policy
|
|
196
|
+
|
|
197
|
+
celery_app.send_task(task_name, args=task_args,
|
|
198
|
+
kwargs=task_kwargs_with_retry, **send_opts)
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
cm = CManager()
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
# Dynamic task executors - handle function and class method execution
|
|
205
|
+
@shared_task(bind=True, max_retries=K_MAX_RETRY_COUNT)
|
|
206
|
+
def dynamic_function_executor(self, module_path, function_name, args=None, kwargs=None, retry=None):
|
|
207
|
+
try:
|
|
208
|
+
module = importlib.import_module(module_path)
|
|
209
|
+
function = getattr(module, function_name)
|
|
210
|
+
args = args or []
|
|
211
|
+
kwargs = kwargs or {}
|
|
212
|
+
return function(*args, **kwargs)
|
|
213
|
+
except Exception as e:
|
|
214
|
+
current_retries = getattr(self.request, 'retries', 0) or 0
|
|
215
|
+
max_retries = self.max_retries or K_MAX_RETRY_COUNT
|
|
216
|
+
if isinstance(retry, dict) and 'max_retries' in retry:
|
|
217
|
+
max_retries = retry['max_retries']
|
|
218
|
+
|
|
219
|
+
if current_retries >= max_retries:
|
|
220
|
+
raise Reject(str(e), requeue=False)
|
|
221
|
+
|
|
222
|
+
countdown = K_DEFAULT_RETRY_COUNTDOWN
|
|
223
|
+
if isinstance(retry, dict) and 'countdown' in retry:
|
|
224
|
+
countdown = retry['countdown']
|
|
225
|
+
|
|
226
|
+
raise self.retry(exc=e, countdown=countdown, max_retries=max_retries)
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
@shared_task(bind=True, max_retries=K_MAX_RETRY_COUNT)
|
|
230
|
+
def dynamic_class_method_executor(self, module_path, class_name, method_name, args=None, kwargs=None, retry=None):
|
|
231
|
+
try:
|
|
232
|
+
module = importlib.import_module(module_path)
|
|
233
|
+
class_obj = getattr(module, class_name)
|
|
234
|
+
instance = class_obj()
|
|
235
|
+
method = getattr(instance, method_name)
|
|
236
|
+
args = args or []
|
|
237
|
+
kwargs = kwargs or {}
|
|
238
|
+
return method(*args, **kwargs)
|
|
239
|
+
except Exception as e:
|
|
240
|
+
current_retries = getattr(self.request, 'retries', 0) or 0
|
|
241
|
+
max_retries = self.max_retries or K_MAX_RETRY_COUNT
|
|
242
|
+
if isinstance(retry, dict) and 'max_retries' in retry:
|
|
243
|
+
max_retries = retry['max_retries']
|
|
244
|
+
|
|
245
|
+
if current_retries >= max_retries:
|
|
246
|
+
raise Reject(str(e), requeue=False)
|
|
247
|
+
|
|
248
|
+
countdown = K_DEFAULT_RETRY_COUNTDOWN
|
|
249
|
+
if isinstance(retry, dict) and 'countdown' in retry:
|
|
250
|
+
countdown = retry['countdown']
|
|
251
|
+
|
|
252
|
+
raise self.retry(exc=e, countdown=countdown, max_retries=max_retries)
|
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
"""Tests for the celery_app module."""
|
|
2
|
+
from unittest.mock import MagicMock
|
|
3
|
+
from unittest.mock import patch
|
|
4
|
+
|
|
5
|
+
import pytest
|
|
6
|
+
from taskqueue.celery_app import create_celery_app
|
|
7
|
+
from taskqueue.celery_app import get_django_settings
|
|
8
|
+
from taskqueue.celery_app import setup_queues
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class TestGetDjangoSettings:
|
|
12
|
+
|
|
13
|
+
def test_get_django_settings_given_django_available_expect_return_settings(self):
|
|
14
|
+
with patch('django.conf.settings') as mock_settings:
|
|
15
|
+
result = get_django_settings()
|
|
16
|
+
assert result == mock_settings
|
|
17
|
+
|
|
18
|
+
def test_get_django_settings_given_django_not_available_expect_raise_import_error(self):
|
|
19
|
+
with patch('builtins.__import__', side_effect=ImportError("No module named 'django'")):
|
|
20
|
+
with pytest.raises(ImportError, match="\\[TaskQueue\\] Django settings not found\\."):
|
|
21
|
+
get_django_settings()
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class TestCreateCeleryApp:
|
|
25
|
+
|
|
26
|
+
@patch('taskqueue.celery_app.setup_queues')
|
|
27
|
+
@patch('taskqueue.celery_app.get_django_settings')
|
|
28
|
+
@patch('taskqueue.celery_app.Celery')
|
|
29
|
+
def test_create_celery_app_given_valid_settings_expect_celery_app_created(
|
|
30
|
+
self, mock_celery_class, mock_get_settings, mock_setup_queues
|
|
31
|
+
):
|
|
32
|
+
mock_settings = MagicMock()
|
|
33
|
+
mock_settings.TASKQUEUE_APP_NAME = 'testapp'
|
|
34
|
+
mock_settings.CELERY_BROKER_URL = 'redis://localhost:6379/0'
|
|
35
|
+
mock_settings.CELERY_RESULT_BACKEND = 'redis://localhost:6379/0'
|
|
36
|
+
mock_settings.CELERY_TASK_SERIALIZER = 'pickle'
|
|
37
|
+
mock_settings.CELERY_RESULT_SERIALIZER = 'pickle'
|
|
38
|
+
mock_settings.CELERY_ACCEPT_CONTENT = ['pickle']
|
|
39
|
+
mock_settings.CELERY_TIMEZONE = 'UTC'
|
|
40
|
+
mock_settings.CELERY_TASK_TRACK_STARTED = True
|
|
41
|
+
mock_settings.CELERY_TASK_TIME_LIMIT = 1800
|
|
42
|
+
mock_settings.CELERY_TASK_SOFT_TIME_LIMIT = 1500
|
|
43
|
+
mock_settings.CELERY_TASK_ALWAYS_EAGER = False
|
|
44
|
+
mock_settings.CELERY_TASK_EAGER_PROPAGATES = True
|
|
45
|
+
|
|
46
|
+
mock_get_settings.return_value = mock_settings
|
|
47
|
+
mock_app = MagicMock()
|
|
48
|
+
mock_celery_class.return_value = mock_app
|
|
49
|
+
|
|
50
|
+
result = create_celery_app()
|
|
51
|
+
|
|
52
|
+
mock_celery_class.assert_called_once_with('testapp')
|
|
53
|
+
mock_setup_queues.assert_called_once()
|
|
54
|
+
mock_app.conf.update.assert_called_once()
|
|
55
|
+
mock_app.autodiscover_tasks.assert_called_once_with(['taskqueue'])
|
|
56
|
+
assert result == mock_app
|
|
57
|
+
|
|
58
|
+
@patch('taskqueue.celery_app.setup_queues')
|
|
59
|
+
@patch('taskqueue.celery_app.get_django_settings')
|
|
60
|
+
@patch('taskqueue.celery_app.Celery')
|
|
61
|
+
def test_create_celery_app_given_missing_settings_expect_defaults_used(
|
|
62
|
+
self, mock_celery_class, mock_get_settings, mock_setup_queues
|
|
63
|
+
):
|
|
64
|
+
mock_settings = MagicMock()
|
|
65
|
+
del mock_settings.TASKQUEUE_APP_NAME
|
|
66
|
+
del mock_settings.CELERY_BROKER_URL
|
|
67
|
+
del mock_settings.CELERY_RESULT_BACKEND
|
|
68
|
+
del mock_settings.CELERY_TASK_SERIALIZER
|
|
69
|
+
del mock_settings.CELERY_RESULT_SERIALIZER
|
|
70
|
+
del mock_settings.CELERY_ACCEPT_CONTENT
|
|
71
|
+
del mock_settings.CELERY_TIMEZONE
|
|
72
|
+
del mock_settings.CELERY_TASK_TRACK_STARTED
|
|
73
|
+
del mock_settings.CELERY_TASK_TIME_LIMIT
|
|
74
|
+
del mock_settings.CELERY_TASK_SOFT_TIME_LIMIT
|
|
75
|
+
del mock_settings.CELERY_TASK_ALWAYS_EAGER
|
|
76
|
+
del mock_settings.CELERY_TASK_EAGER_PROPAGATES
|
|
77
|
+
|
|
78
|
+
mock_get_settings.return_value = mock_settings
|
|
79
|
+
mock_app = MagicMock()
|
|
80
|
+
mock_celery_class.return_value = mock_app
|
|
81
|
+
|
|
82
|
+
result = create_celery_app()
|
|
83
|
+
|
|
84
|
+
mock_celery_class.assert_called_once_with('taskqueue')
|
|
85
|
+
assert result == mock_app
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
class TestSetupQueues:
|
|
89
|
+
"""Test setup_queues function."""
|
|
90
|
+
|
|
91
|
+
def test_setup_queues_given_valid_settings_expect_queues_configured(self):
|
|
92
|
+
"""Test that setup_queues configures queues and DLQs correctly."""
|
|
93
|
+
mock_app = MagicMock()
|
|
94
|
+
mock_settings = MagicMock()
|
|
95
|
+
mock_settings.TASKQUEUE_APP_NAME = 'testapp'
|
|
96
|
+
mock_settings.TASKQUEUE_QUEUES = ['default', 'high', 'low']
|
|
97
|
+
mock_settings.TASKQUEUE_DLQ_NAME_PREFIX = 'dlq'
|
|
98
|
+
|
|
99
|
+
celery_config = {}
|
|
100
|
+
|
|
101
|
+
setup_queues(mock_app, mock_settings, celery_config)
|
|
102
|
+
|
|
103
|
+
# Assertions
|
|
104
|
+
assert celery_config['task_default_queue'] == 'default'
|
|
105
|
+
assert celery_config['task_default_exchange'] == 'testapp'
|
|
106
|
+
assert celery_config['task_default_exchange_type'] == 'direct'
|
|
107
|
+
assert len(celery_config['task_queues']) == 6
|
|
108
|
+
|
|
109
|
+
main_queues = [q for q in celery_config['task_queues']
|
|
110
|
+
if not q.name.startswith('dlq.')]
|
|
111
|
+
assert len(main_queues) == 3
|
|
112
|
+
assert any(q.name == 'default' for q in main_queues)
|
|
113
|
+
assert any(q.name == 'high' for q in main_queues)
|
|
114
|
+
assert any(q.name == 'low' for q in main_queues)
|
|
115
|
+
|
|
116
|
+
dlq_queues = [q for q in celery_config['task_queues']
|
|
117
|
+
if q.name.startswith('dlq.')]
|
|
118
|
+
assert len(dlq_queues) == 3
|
|
119
|
+
assert any(q.name == 'dlq.default' for q in dlq_queues)
|
|
120
|
+
assert any(q.name == 'dlq.high' for q in dlq_queues)
|
|
121
|
+
assert any(q.name == 'dlq.low' for q in dlq_queues)
|
|
122
|
+
|
|
123
|
+
def test_setup_queues_given_missing_settings_expect_defaults_used(self):
|
|
124
|
+
mock_app = MagicMock()
|
|
125
|
+
mock_settings = MagicMock()
|
|
126
|
+
del mock_settings.TASKQUEUE_APP_NAME
|
|
127
|
+
del mock_settings.TASKQUEUE_QUEUES
|
|
128
|
+
del mock_settings.TASKQUEUE_DLQ_NAME_PREFIX
|
|
129
|
+
|
|
130
|
+
celery_config = {}
|
|
131
|
+
|
|
132
|
+
setup_queues(mock_app, mock_settings, celery_config)
|
|
133
|
+
|
|
134
|
+
assert celery_config['task_default_queue'] == 'default'
|
|
135
|
+
assert celery_config['task_default_exchange'] == 'taskqueue'
|
|
136
|
+
assert celery_config['task_default_exchange_type'] == 'direct'
|
|
137
|
+
assert len(celery_config['task_queues']) == 6
|
|
138
|
+
|
|
139
|
+
def test_setup_queues_given_single_queue_expect_correct_configuration(self):
|
|
140
|
+
mock_app = MagicMock()
|
|
141
|
+
mock_settings = MagicMock()
|
|
142
|
+
mock_settings.TASKQUEUE_APP_NAME = 'singleapp'
|
|
143
|
+
mock_settings.TASKQUEUE_QUEUES = ['single']
|
|
144
|
+
mock_settings.TASKQUEUE_DLQ_NAME_PREFIX = 'dead'
|
|
145
|
+
|
|
146
|
+
celery_config = {}
|
|
147
|
+
|
|
148
|
+
setup_queues(mock_app, mock_settings, celery_config)
|
|
149
|
+
|
|
150
|
+
assert len(celery_config['task_queues']) == 2
|
|
151
|
+
|
|
152
|
+
main_queue = next(
|
|
153
|
+
q for q in celery_config['task_queues'] if q.name == 'single')
|
|
154
|
+
assert main_queue.queue_arguments['x-dead-letter-exchange'] == 'singleapp.dlx'
|
|
155
|
+
assert main_queue.queue_arguments['x-dead-letter-routing-key'] == 'dead.single'
|
|
156
|
+
|
|
157
|
+
dlq = next(
|
|
158
|
+
q for q in celery_config['task_queues'] if q.name == 'dead.single')
|
|
159
|
+
assert dlq.exchange.name == 'singleapp.dlx'
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
class TestCeleryAppIntegration:
|
|
163
|
+
"""Integration tests for the celery_app module."""
|
|
164
|
+
|
|
165
|
+
@patch('taskqueue.celery_app.get_django_settings')
|
|
166
|
+
def test_celery_app_import_given_django_configured_expect_app_created(self, mock_get_settings):
|
|
167
|
+
mock_settings = MagicMock()
|
|
168
|
+
mock_settings.TASKQUEUE_APP_NAME = 'testapp'
|
|
169
|
+
mock_settings.CELERY_BROKER_URL = 'redis://localhost:6379/0'
|
|
170
|
+
mock_settings.CELERY_RESULT_BACKEND = 'redis://localhost:6379/0'
|
|
171
|
+
mock_settings.CELERY_TASK_SERIALIZER = 'pickle'
|
|
172
|
+
mock_settings.CELERY_RESULT_SERIALIZER = 'pickle'
|
|
173
|
+
mock_settings.CELERY_ACCEPT_CONTENT = ['pickle']
|
|
174
|
+
mock_settings.CELERY_TIMEZONE = 'UTC'
|
|
175
|
+
mock_settings.CELERY_TASK_TRACK_STARTED = True
|
|
176
|
+
mock_settings.CELERY_TASK_TIME_LIMIT = 1800
|
|
177
|
+
mock_settings.CELERY_TASK_SOFT_TIME_LIMIT = 1500
|
|
178
|
+
mock_settings.CELERY_TASK_ALWAYS_EAGER = False
|
|
179
|
+
mock_settings.CELERY_TASK_EAGER_PROPAGATES = True
|
|
180
|
+
mock_settings.TASKQUEUE_QUEUES = ['default']
|
|
181
|
+
mock_settings.TASKQUEUE_DLQ_NAME_PREFIX = 'dlq'
|
|
182
|
+
|
|
183
|
+
mock_get_settings.return_value = mock_settings
|
|
184
|
+
|
|
185
|
+
from taskqueue.celery_app import celery_app
|
|
186
|
+
assert celery_app is not None
|
|
@@ -0,0 +1,308 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from datetime import timedelta
|
|
3
|
+
from unittest.mock import Mock
|
|
4
|
+
from unittest.mock import patch
|
|
5
|
+
|
|
6
|
+
import pytest
|
|
7
|
+
from taskqueue.cmanager import _build_dynamic_task_call
|
|
8
|
+
from taskqueue.cmanager import _is_class_method
|
|
9
|
+
from taskqueue.cmanager import _split_function_and_queue_kwargs
|
|
10
|
+
from taskqueue.cmanager import CManager
|
|
11
|
+
from taskqueue.cmanager import K_DEFAULT_RETRY_COUNTDOWN
|
|
12
|
+
from taskqueue.cmanager import K_MAX_RETRY_COUNT
|
|
13
|
+
# Import the functions and classes to test
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class TestClass:
|
|
17
|
+
|
|
18
|
+
def test_method(self):
|
|
19
|
+
pass
|
|
20
|
+
|
|
21
|
+
@classmethod
|
|
22
|
+
def class_method(cls):
|
|
23
|
+
pass
|
|
24
|
+
|
|
25
|
+
@staticmethod
|
|
26
|
+
def static_method():
|
|
27
|
+
pass
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def test_function():
|
|
31
|
+
"""Test function for testing function detection."""
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class TestIsClassMethod:
|
|
35
|
+
|
|
36
|
+
def test__is_class_method_given_function_expect_return_false(self):
|
|
37
|
+
result = _is_class_method(test_function)
|
|
38
|
+
assert result is False
|
|
39
|
+
|
|
40
|
+
def test__is_class_method_given_class_method_expect_return_true(self):
|
|
41
|
+
instance = TestClass()
|
|
42
|
+
result = _is_class_method(instance.test_method)
|
|
43
|
+
assert result is True
|
|
44
|
+
|
|
45
|
+
def test__is_class_method_given_classmethod_decorator_expect_return_true(self):
|
|
46
|
+
result = _is_class_method(TestClass.class_method)
|
|
47
|
+
assert result is True
|
|
48
|
+
|
|
49
|
+
def test__is_class_method_given_staticmethod_decorator_expect_return_false(self):
|
|
50
|
+
result = _is_class_method(TestClass.static_method)
|
|
51
|
+
assert result is False
|
|
52
|
+
|
|
53
|
+
def test__is_class_method_given_unbound_method_expect_return_false(self):
|
|
54
|
+
result = _is_class_method(TestClass.test_method)
|
|
55
|
+
assert result is False
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class TestSplitFunctionAndQueueKwargs:
|
|
59
|
+
|
|
60
|
+
def test__split_function_and_queue_kwargs_given_mixed_kwargs_expect_correct_split(self):
|
|
61
|
+
kwargs = {
|
|
62
|
+
'channel': 'high',
|
|
63
|
+
'retry': {'max_retries': 5},
|
|
64
|
+
'on_commit': True,
|
|
65
|
+
'job_timeout': 300,
|
|
66
|
+
'user_id': 123,
|
|
67
|
+
'data': {'key': 'value'}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
func_kwargs, queue_kwargs = _split_function_and_queue_kwargs(kwargs)
|
|
71
|
+
|
|
72
|
+
assert func_kwargs == {'user_id': 123, 'data': {'key': 'value'}}
|
|
73
|
+
assert queue_kwargs == {
|
|
74
|
+
'channel': 'high',
|
|
75
|
+
'retry': {'max_retries': 5},
|
|
76
|
+
'on_commit': True,
|
|
77
|
+
'job_timeout': 300
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
def test__split_function_and_queue_kwargs_given_only_function_kwargs_expect_empty_queue_kwargs(self):
|
|
81
|
+
kwargs = {'user_id': 123, 'data': 'test'}
|
|
82
|
+
|
|
83
|
+
func_kwargs, queue_kwargs = _split_function_and_queue_kwargs(kwargs)
|
|
84
|
+
|
|
85
|
+
assert func_kwargs == {'user_id': 123, 'data': 'test'}
|
|
86
|
+
assert queue_kwargs == {}
|
|
87
|
+
|
|
88
|
+
def test__split_function_and_queue_kwargs_given_only_queue_kwargs_expect_empty_func_kwargs(self):
|
|
89
|
+
kwargs = {'channel': 'default', 'retry': {'max_retries': 3}}
|
|
90
|
+
|
|
91
|
+
func_kwargs, queue_kwargs = _split_function_and_queue_kwargs(kwargs)
|
|
92
|
+
|
|
93
|
+
assert func_kwargs == {}
|
|
94
|
+
assert queue_kwargs == {
|
|
95
|
+
'channel': 'default', 'retry': {'max_retries': 3}}
|
|
96
|
+
|
|
97
|
+
def test__split_function_and_queue_kwargs_given_ignored_celery_keys_expect_they_are_ignored(self):
|
|
98
|
+
kwargs = {
|
|
99
|
+
'queue': 'default',
|
|
100
|
+
'countdown': 10,
|
|
101
|
+
'eta': datetime.now(),
|
|
102
|
+
'priority': 1,
|
|
103
|
+
'user_id': 123
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
func_kwargs, queue_kwargs = _split_function_and_queue_kwargs(kwargs)
|
|
107
|
+
|
|
108
|
+
assert func_kwargs == {'user_id': 123}
|
|
109
|
+
assert queue_kwargs == {}
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
class TestBuildDynamicTaskCall:
|
|
113
|
+
|
|
114
|
+
def test__build_dynamic_task_call_given_function_expect_function_executor_task(self):
|
|
115
|
+
task_name, task_args, task_kwargs = _build_dynamic_task_call(
|
|
116
|
+
test_function, 1, 2, key='value'
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
assert task_name == "taskqueue.cmanager.dynamic_function_executor"
|
|
120
|
+
assert task_args == ['tests.test_cmanager',
|
|
121
|
+
'test_function', [1, 2], {'key': 'value'}]
|
|
122
|
+
assert task_kwargs == {}
|
|
123
|
+
|
|
124
|
+
def test__build_dynamic_task_call_given_bound_method_expect_class_method_executor_task(self):
|
|
125
|
+
instance = TestClass()
|
|
126
|
+
task_name, task_args, task_kwargs = _build_dynamic_task_call(
|
|
127
|
+
instance.test_method, 1, 2, key='value'
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
assert task_name == "taskqueue.cmanager.dynamic_class_method_executor"
|
|
131
|
+
assert task_args == ['tests.test_cmanager',
|
|
132
|
+
'TestClass', 'test_method', [1, 2], {'key': 'value'}]
|
|
133
|
+
assert task_kwargs == {}
|
|
134
|
+
|
|
135
|
+
def test__build_dynamic_task_call_given_function_without_module_expect_raise_value_error(self):
|
|
136
|
+
mock_func = Mock()
|
|
137
|
+
mock_func.__module__ = None
|
|
138
|
+
mock_func.__name__ = 'test_func'
|
|
139
|
+
|
|
140
|
+
with pytest.raises(ValueError, match="Unsupported callable type for Celery enqueue"):
|
|
141
|
+
_build_dynamic_task_call(mock_func)
|
|
142
|
+
|
|
143
|
+
def test__build_dynamic_task_call_given_function_without_name_expect_raise_value_error(self):
|
|
144
|
+
mock_func = Mock()
|
|
145
|
+
mock_func.__module__ = 'test_module'
|
|
146
|
+
mock_func.__name__ = None
|
|
147
|
+
|
|
148
|
+
with pytest.raises(ValueError, match="Unsupported callable type for Celery enqueue"):
|
|
149
|
+
_build_dynamic_task_call(mock_func)
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
class TestCManager:
|
|
153
|
+
|
|
154
|
+
@patch('taskqueue.cmanager.logger')
|
|
155
|
+
@patch.object(CManager, '_send_task')
|
|
156
|
+
def test_cmanager_enqueue_given_function_expect_send_task_called(self, mock_send_task, mock_logger):
|
|
157
|
+
cm = CManager()
|
|
158
|
+
cm.enqueue(test_function, 1, 2, key='value')
|
|
159
|
+
|
|
160
|
+
mock_send_task.assert_called_once()
|
|
161
|
+
call_args = mock_send_task.call_args
|
|
162
|
+
assert call_args[0][0] == "taskqueue.cmanager.dynamic_function_executor"
|
|
163
|
+
|
|
164
|
+
@patch('taskqueue.cmanager.logger')
|
|
165
|
+
@patch.object(CManager, '_send_task')
|
|
166
|
+
def test_cmanager_enqueue_at_given_datetime_and_function_expect_send_task_called_with_eta(self, mock_send_task, mock_logger):
|
|
167
|
+
cm = CManager()
|
|
168
|
+
eta = datetime.now()
|
|
169
|
+
cm.enqueue_at(eta, test_function, 1, 2)
|
|
170
|
+
|
|
171
|
+
mock_send_task.assert_called_once()
|
|
172
|
+
call_args = mock_send_task.call_args
|
|
173
|
+
assert call_args[0][0] == "taskqueue.cmanager.dynamic_function_executor"
|
|
174
|
+
|
|
175
|
+
@patch('taskqueue.cmanager.logger')
|
|
176
|
+
@patch.object(CManager, '_send_task')
|
|
177
|
+
def test_cmanager_enqueue_in_given_timedelta_and_function_expect_send_task_called_with_countdown(self, mock_send_task, mock_logger):
|
|
178
|
+
cm = CManager()
|
|
179
|
+
delta = timedelta(seconds=60)
|
|
180
|
+
cm.enqueue_in(delta, test_function, 1, 2)
|
|
181
|
+
|
|
182
|
+
mock_send_task.assert_called_once()
|
|
183
|
+
call_args = mock_send_task.call_args
|
|
184
|
+
assert call_args[0][0] == "taskqueue.cmanager.dynamic_function_executor"
|
|
185
|
+
|
|
186
|
+
def test_cmanager_enqueue_given_no_args_expect_raise_value_error(self):
|
|
187
|
+
cm = CManager()
|
|
188
|
+
with pytest.raises(ValueError, match="enqueue requires a callable as the first positional argument"):
|
|
189
|
+
cm.enqueue()
|
|
190
|
+
|
|
191
|
+
def test_cmanager_enqueue_at_given_insufficient_args_expect_raise_value_error(self):
|
|
192
|
+
cm = CManager()
|
|
193
|
+
with pytest.raises(ValueError, match="enqueue_at requires \\(eta_datetime, func, \\*func_args\\)"):
|
|
194
|
+
cm.enqueue_at(datetime.now())
|
|
195
|
+
|
|
196
|
+
def test_cmanager_enqueue_in_given_insufficient_args_expect_raise_value_error(self):
|
|
197
|
+
cm = CManager()
|
|
198
|
+
with pytest.raises(ValueError, match="enqueue_in requires \\(countdown_delta, func, \\*func_args\\)"):
|
|
199
|
+
cm.enqueue_in(timedelta(seconds=10))
|
|
200
|
+
|
|
201
|
+
def test_cmanager_enqueue_op_given_unknown_type_expect_raise_value_error(self):
|
|
202
|
+
cm = CManager()
|
|
203
|
+
with pytest.raises(ValueError, match="Unknown enqueue operation type: invalid"):
|
|
204
|
+
cm._enqueue_op_base(test_function, enqueue_op_type='invalid')
|
|
205
|
+
|
|
206
|
+
@patch('django.db.transaction.on_commit')
|
|
207
|
+
@patch.object(CManager, '_enqueue_op_base')
|
|
208
|
+
def test_cmanager_enqueue_op_given_on_commit_true_expect_transaction_on_commit_called(self, mock_enqueue_op_base, mock_on_commit):
|
|
209
|
+
cm = CManager()
|
|
210
|
+
cm._enqueue_op(test_function, on_commit=True)
|
|
211
|
+
|
|
212
|
+
mock_on_commit.assert_called_once()
|
|
213
|
+
|
|
214
|
+
@patch('django.db.transaction.on_commit')
|
|
215
|
+
@patch.object(CManager, '_enqueue_op_base')
|
|
216
|
+
def test_cmanager_enqueue_op_given_on_commit_false_expect_enqueue_op_base_called_directly(self, mock_enqueue_op_base, mock_on_commit):
|
|
217
|
+
cm = CManager()
|
|
218
|
+
cm._enqueue_op(test_function, on_commit=False)
|
|
219
|
+
|
|
220
|
+
mock_enqueue_op_base.assert_called_once()
|
|
221
|
+
mock_on_commit.assert_not_called()
|
|
222
|
+
|
|
223
|
+
@patch('taskqueue.celery_app.celery_app')
|
|
224
|
+
def test_cmanager__send_task_given_task_args_expect_celery_app_send_task_called(self, mock_celery_app):
|
|
225
|
+
cm = CManager()
|
|
226
|
+
cm._send_task("test.task", [1, 2], {
|
|
227
|
+
"key": "value"}, {"channel": "high"})
|
|
228
|
+
|
|
229
|
+
mock_celery_app.send_task.assert_called_once()
|
|
230
|
+
call_args = mock_celery_app.send_task.call_args
|
|
231
|
+
# send_task is called with keyword arguments
|
|
232
|
+
args, kwargs = call_args
|
|
233
|
+
assert args[0] == "test.task"
|
|
234
|
+
assert kwargs["args"] == [1, 2]
|
|
235
|
+
# The retry policy is added automatically, so we need to check for both
|
|
236
|
+
expected_kwargs = {"key": "value", "retry": {
|
|
237
|
+
"max_retries": K_MAX_RETRY_COUNT, "countdown": K_DEFAULT_RETRY_COUNTDOWN}}
|
|
238
|
+
assert kwargs["kwargs"] == expected_kwargs
|
|
239
|
+
assert kwargs["queue"] == "high"
|
|
240
|
+
|
|
241
|
+
@patch('taskqueue.celery_app.celery_app')
|
|
242
|
+
def test_cmanager__send_task_given_no_retry_policy_expect_default_retry_policy_applied(self, mock_celery_app):
|
|
243
|
+
cm = CManager()
|
|
244
|
+
cm._send_task("test.task", [], {}, {})
|
|
245
|
+
|
|
246
|
+
mock_celery_app.send_task.assert_called_once()
|
|
247
|
+
call_args = mock_celery_app.send_task.call_args
|
|
248
|
+
args, kwargs = call_args
|
|
249
|
+
assert kwargs["kwargs"]["retry"] == {
|
|
250
|
+
"max_retries": K_MAX_RETRY_COUNT,
|
|
251
|
+
"countdown": K_DEFAULT_RETRY_COUNTDOWN
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
@patch('taskqueue.celery_app.celery_app')
|
|
255
|
+
def test_cmanager__send_task_given_custom_retry_policy_expect_custom_policy_used(self, mock_celery_app):
|
|
256
|
+
cm = CManager()
|
|
257
|
+
custom_retry = {"max_retries": 5, "countdown": 20}
|
|
258
|
+
cm._send_task("test.task", [], {}, {"retry": custom_retry})
|
|
259
|
+
|
|
260
|
+
mock_celery_app.send_task.assert_called_once()
|
|
261
|
+
call_args = mock_celery_app.send_task.call_args
|
|
262
|
+
args, kwargs = call_args
|
|
263
|
+
assert kwargs["kwargs"]["retry"] == custom_retry
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
class TestDynamicTaskExecutors:
|
|
267
|
+
|
|
268
|
+
def test_dynamic_function_executor_given_valid_module_and_function_expect_function_executed(self):
|
|
269
|
+
"""Test that dynamic_function_executor is properly decorated."""
|
|
270
|
+
from taskqueue.cmanager import dynamic_function_executor
|
|
271
|
+
|
|
272
|
+
# Just verify the function exists and is decorated
|
|
273
|
+
assert hasattr(dynamic_function_executor, 'delay')
|
|
274
|
+
assert hasattr(dynamic_function_executor, 'apply_async')
|
|
275
|
+
|
|
276
|
+
def test_dynamic_class_method_executor_given_valid_class_and_method_expect_method_executed(self):
|
|
277
|
+
"""Test that dynamic_class_method_executor is properly decorated."""
|
|
278
|
+
from taskqueue.cmanager import dynamic_class_method_executor
|
|
279
|
+
|
|
280
|
+
# Just verify the function exists and is decorated
|
|
281
|
+
assert hasattr(dynamic_class_method_executor, 'delay')
|
|
282
|
+
assert hasattr(dynamic_class_method_executor, 'apply_async')
|
|
283
|
+
|
|
284
|
+
@patch('taskqueue.cmanager.importlib.import_module')
|
|
285
|
+
def test_dynamic_function_executor_given_import_error_expect_retry_raised(self, mock_import_module):
|
|
286
|
+
mock_import_module.side_effect = ImportError("Module not found")
|
|
287
|
+
|
|
288
|
+
from taskqueue.cmanager import dynamic_function_executor
|
|
289
|
+
|
|
290
|
+
with pytest.raises(Exception): # retry is raised
|
|
291
|
+
dynamic_function_executor("invalid_module", "test_function")
|
|
292
|
+
|
|
293
|
+
@patch('taskqueue.cmanager.importlib.import_module')
|
|
294
|
+
def test_dynamic_function_executor_given_max_retries_reached_expect_reject_raised(self, mock_import_module):
|
|
295
|
+
mock_import_module.side_effect = ImportError("Module not found")
|
|
296
|
+
|
|
297
|
+
from taskqueue.cmanager import dynamic_function_executor
|
|
298
|
+
|
|
299
|
+
# Mock the request object to simulate max retries reached
|
|
300
|
+
mock_self = Mock()
|
|
301
|
+
mock_self.request.retries = K_MAX_RETRY_COUNT
|
|
302
|
+
mock_self.max_retries = K_MAX_RETRY_COUNT
|
|
303
|
+
|
|
304
|
+
with pytest.raises(Exception): # Reject should be raised
|
|
305
|
+
dynamic_function_executor(
|
|
306
|
+
mock_self, "invalid_module", "test_function",
|
|
307
|
+
retry={"max_retries": K_MAX_RETRY_COUNT}
|
|
308
|
+
)
|
|
@@ -0,0 +1,294 @@
|
|
|
1
|
+
from unittest.mock import MagicMock
|
|
2
|
+
from unittest.mock import patch
|
|
3
|
+
|
|
4
|
+
from libs.test_utils import celery_worker_burst
|
|
5
|
+
from libs.test_utils import clear_all_celery_queues
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class TestClearAllCeleryQueues:
|
|
9
|
+
|
|
10
|
+
@patch('libs.test_utils.current_app')
|
|
11
|
+
def test_clear_all_celery_queues_given_multiple_queues_expect_all_purged(self, mock_current_app):
|
|
12
|
+
mock_queue1 = MagicMock()
|
|
13
|
+
mock_queue2 = MagicMock()
|
|
14
|
+
mock_queue_factory1 = MagicMock(return_value=mock_queue1)
|
|
15
|
+
mock_queue_factory2 = MagicMock(return_value=mock_queue2)
|
|
16
|
+
|
|
17
|
+
mock_current_app.amqp.queues.keys.return_value = ['queue1', 'queue2']
|
|
18
|
+
mock_current_app.amqp.queues = {
|
|
19
|
+
'queue1': mock_queue_factory1,
|
|
20
|
+
'queue2': mock_queue_factory2
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
mock_conn = MagicMock()
|
|
24
|
+
mock_chan = MagicMock()
|
|
25
|
+
mock_current_app.connection_for_read.return_value.__enter__.return_value = mock_conn
|
|
26
|
+
mock_conn.channel.return_value.__enter__.return_value = mock_chan
|
|
27
|
+
|
|
28
|
+
clear_all_celery_queues()
|
|
29
|
+
|
|
30
|
+
mock_queue_factory1.assert_called_once_with(mock_chan)
|
|
31
|
+
mock_queue_factory2.assert_called_once_with(mock_chan)
|
|
32
|
+
mock_queue1.purge.assert_called_once()
|
|
33
|
+
mock_queue2.purge.assert_called_once()
|
|
34
|
+
|
|
35
|
+
@patch('libs.test_utils.current_app')
|
|
36
|
+
def test_clear_all_celery_queues_given_empty_queues_expect_no_purge_calls(self, mock_current_app):
|
|
37
|
+
mock_current_app.amqp.queues.keys.return_value = []
|
|
38
|
+
mock_current_app.amqp.queues = {}
|
|
39
|
+
|
|
40
|
+
mock_conn = MagicMock()
|
|
41
|
+
mock_chan = MagicMock()
|
|
42
|
+
mock_current_app.connection_for_read.return_value.__enter__.return_value = mock_conn
|
|
43
|
+
mock_conn.channel.return_value.__enter__.return_value = mock_chan
|
|
44
|
+
|
|
45
|
+
clear_all_celery_queues()
|
|
46
|
+
|
|
47
|
+
mock_conn.channel.assert_called_once()
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class TestCeleryWorkerBurst:
|
|
51
|
+
|
|
52
|
+
@patch('libs.test_utils.current_app')
|
|
53
|
+
@patch('libs.test_utils.loads')
|
|
54
|
+
def test_celery_worker_burst_given_matching_function_executor_expect_execution(self, mock_loads, mock_current_app):
|
|
55
|
+
mock_task = MagicMock()
|
|
56
|
+
mock_current_app.tasks = {
|
|
57
|
+
'taskqueue.cmanager.dynamic_function_executor': mock_task}
|
|
58
|
+
|
|
59
|
+
mock_message = MagicMock()
|
|
60
|
+
mock_message.headers = {
|
|
61
|
+
'task': 'taskqueue.cmanager.dynamic_function_executor'}
|
|
62
|
+
mock_message.body = b'mock_body'
|
|
63
|
+
mock_message.content_type = 'application/json'
|
|
64
|
+
mock_message.content_encoding = 'utf-8'
|
|
65
|
+
mock_message.acknowledged = False
|
|
66
|
+
|
|
67
|
+
mock_loads.return_value = [
|
|
68
|
+
['module.submodule', 'test_function', [], {}], {}]
|
|
69
|
+
|
|
70
|
+
mock_queue = MagicMock()
|
|
71
|
+
mock_queue.get.side_effect = [mock_message, None]
|
|
72
|
+
mock_queue_factory = MagicMock(return_value=mock_queue)
|
|
73
|
+
mock_current_app.amqp.queues = {'default': mock_queue_factory}
|
|
74
|
+
|
|
75
|
+
mock_conn = MagicMock()
|
|
76
|
+
mock_chan = MagicMock()
|
|
77
|
+
mock_current_app.connection_for_read.return_value.__enter__.return_value = mock_conn
|
|
78
|
+
mock_conn.channel.return_value.__enter__.return_value = mock_chan
|
|
79
|
+
|
|
80
|
+
with patch('libs.test_utils.logger') as mock_logger:
|
|
81
|
+
celery_worker_burst(['module.submodule.test_function'])
|
|
82
|
+
|
|
83
|
+
mock_logger.info.assert_any_call(
|
|
84
|
+
"Executing task: module.submodule.test_function")
|
|
85
|
+
mock_logger.info.assert_any_call(
|
|
86
|
+
"Successfully executed task: module.submodule.test_function")
|
|
87
|
+
|
|
88
|
+
mock_message.ack.assert_called_once()
|
|
89
|
+
mock_task.apply.assert_called_once_with(
|
|
90
|
+
args=['module.submodule', 'test_function', [], {}],
|
|
91
|
+
kwargs={}
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
@patch('libs.test_utils.current_app')
|
|
95
|
+
@patch('libs.test_utils.loads')
|
|
96
|
+
def test_celery_worker_burst_given_matching_class_method_executor_expect_execution(self, mock_loads, mock_current_app):
|
|
97
|
+
mock_task = MagicMock()
|
|
98
|
+
mock_current_app.tasks = {
|
|
99
|
+
'taskqueue.cmanager.dynamic_class_method_executor': mock_task}
|
|
100
|
+
|
|
101
|
+
mock_message = MagicMock()
|
|
102
|
+
mock_message.headers = {
|
|
103
|
+
'task': 'taskqueue.cmanager.dynamic_class_method_executor'}
|
|
104
|
+
mock_message.body = b'mock_body'
|
|
105
|
+
mock_message.content_type = 'application/json'
|
|
106
|
+
mock_message.content_encoding = 'utf-8'
|
|
107
|
+
mock_message.acknowledged = False
|
|
108
|
+
|
|
109
|
+
mock_loads.return_value = [
|
|
110
|
+
['module.submodule', 'TestClass', 'test_method', [], {}], {}]
|
|
111
|
+
|
|
112
|
+
mock_queue = MagicMock()
|
|
113
|
+
mock_queue.get.side_effect = [mock_message, None]
|
|
114
|
+
mock_queue_factory = MagicMock(return_value=mock_queue)
|
|
115
|
+
mock_current_app.amqp.queues = {'default': mock_queue_factory}
|
|
116
|
+
|
|
117
|
+
mock_conn = MagicMock()
|
|
118
|
+
mock_chan = MagicMock()
|
|
119
|
+
mock_current_app.connection_for_read.return_value.__enter__.return_value = mock_conn
|
|
120
|
+
mock_conn.channel.return_value.__enter__.return_value = mock_chan
|
|
121
|
+
|
|
122
|
+
with patch('libs.test_utils.logger') as mock_logger:
|
|
123
|
+
celery_worker_burst(['module.submodule.TestClass.test_method'])
|
|
124
|
+
|
|
125
|
+
mock_logger.info.assert_any_call(
|
|
126
|
+
"Executing task: module.submodule.TestClass.test_method")
|
|
127
|
+
mock_logger.info.assert_any_call(
|
|
128
|
+
"Successfully executed task: module.submodule.TestClass.test_method")
|
|
129
|
+
|
|
130
|
+
mock_message.ack.assert_called_once()
|
|
131
|
+
mock_task.apply.assert_called_once_with(
|
|
132
|
+
args=['module.submodule', 'TestClass', 'test_method', [], {}],
|
|
133
|
+
kwargs={}
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
@patch('libs.test_utils.current_app')
|
|
137
|
+
@patch('libs.test_utils.loads')
|
|
138
|
+
def test_celery_worker_burst_given_non_matching_function_expect_skip(self, mock_loads, mock_current_app):
|
|
139
|
+
mock_task = MagicMock()
|
|
140
|
+
mock_current_app.tasks = {
|
|
141
|
+
'taskqueue.cmanager.dynamic_function_executor': mock_task}
|
|
142
|
+
|
|
143
|
+
mock_message = MagicMock()
|
|
144
|
+
mock_message.headers = {
|
|
145
|
+
'task': 'taskqueue.cmanager.dynamic_function_executor'}
|
|
146
|
+
mock_message.body = b'mock_body'
|
|
147
|
+
mock_message.content_type = 'application/json'
|
|
148
|
+
mock_message.content_encoding = 'utf-8'
|
|
149
|
+
mock_message.acknowledged = False
|
|
150
|
+
|
|
151
|
+
mock_loads.return_value = [
|
|
152
|
+
['module.submodule', 'other_function', [], {}], {}]
|
|
153
|
+
|
|
154
|
+
mock_queue = MagicMock()
|
|
155
|
+
mock_queue.get.side_effect = [mock_message, None]
|
|
156
|
+
mock_queue_factory = MagicMock(return_value=mock_queue)
|
|
157
|
+
mock_current_app.amqp.queues = {'default': mock_queue_factory}
|
|
158
|
+
|
|
159
|
+
mock_conn = MagicMock()
|
|
160
|
+
mock_chan = MagicMock()
|
|
161
|
+
mock_current_app.connection_for_read.return_value.__enter__.return_value = mock_conn
|
|
162
|
+
mock_conn.channel.return_value.__enter__.return_value = mock_chan
|
|
163
|
+
|
|
164
|
+
celery_worker_burst(['module.submodule.test_function'])
|
|
165
|
+
|
|
166
|
+
mock_message.ack.assert_called_once()
|
|
167
|
+
mock_task.apply.assert_not_called()
|
|
168
|
+
|
|
169
|
+
@patch('libs.test_utils.current_app')
|
|
170
|
+
def test_celery_worker_burst_given_invalid_task_name_expect_warning_and_skip(self, mock_current_app):
|
|
171
|
+
mock_current_app.tasks = {}
|
|
172
|
+
|
|
173
|
+
mock_message = MagicMock()
|
|
174
|
+
mock_message.headers = {'task': 'invalid.task.name'}
|
|
175
|
+
mock_message.body = b'mock_body'
|
|
176
|
+
mock_message.content_type = 'application/json'
|
|
177
|
+
mock_message.content_encoding = 'utf-8'
|
|
178
|
+
mock_message.acknowledged = False
|
|
179
|
+
|
|
180
|
+
mock_queue = MagicMock()
|
|
181
|
+
mock_queue.get.side_effect = [mock_message, None]
|
|
182
|
+
mock_queue_factory = MagicMock(return_value=mock_queue)
|
|
183
|
+
mock_current_app.amqp.queues = {'default': mock_queue_factory}
|
|
184
|
+
|
|
185
|
+
mock_conn = MagicMock()
|
|
186
|
+
mock_chan = MagicMock()
|
|
187
|
+
mock_current_app.connection_for_read.return_value.__enter__.return_value = mock_conn
|
|
188
|
+
mock_conn.channel.return_value.__enter__.return_value = mock_chan
|
|
189
|
+
|
|
190
|
+
with patch('libs.test_utils.logger') as mock_logger:
|
|
191
|
+
celery_worker_burst(['some.function'])
|
|
192
|
+
|
|
193
|
+
mock_logger.warning.assert_called_once_with(
|
|
194
|
+
"Invalid task 'invalid.task.name'. Skipping.")
|
|
195
|
+
mock_message.ack.assert_called_once()
|
|
196
|
+
|
|
197
|
+
@patch('libs.test_utils.current_app')
|
|
198
|
+
def test_celery_worker_burst_given_no_messages_expect_no_processing(self, mock_current_app):
|
|
199
|
+
mock_queue = MagicMock()
|
|
200
|
+
mock_queue.get.return_value = None
|
|
201
|
+
mock_queue_factory = MagicMock(return_value=mock_queue)
|
|
202
|
+
mock_current_app.amqp.queues = {'default': mock_queue_factory}
|
|
203
|
+
|
|
204
|
+
mock_conn = MagicMock()
|
|
205
|
+
mock_chan = MagicMock()
|
|
206
|
+
mock_current_app.connection_for_read.return_value.__enter__.return_value = mock_conn
|
|
207
|
+
mock_conn.channel.return_value.__enter__.return_value = mock_chan
|
|
208
|
+
|
|
209
|
+
celery_worker_burst(['some.function'])
|
|
210
|
+
|
|
211
|
+
mock_queue.get.assert_called_once_with(no_ack=False)
|
|
212
|
+
|
|
213
|
+
@patch('libs.test_utils.current_app')
|
|
214
|
+
@patch('libs.test_utils.loads')
|
|
215
|
+
def test_celery_worker_burst_given_custom_channel_expect_correct_queue_used(self, mock_loads, mock_current_app):
|
|
216
|
+
mock_task = MagicMock()
|
|
217
|
+
mock_current_app.tasks = {
|
|
218
|
+
'taskqueue.cmanager.dynamic_function_executor': mock_task}
|
|
219
|
+
|
|
220
|
+
mock_message = MagicMock()
|
|
221
|
+
mock_message.headers = {
|
|
222
|
+
'task': 'taskqueue.cmanager.dynamic_function_executor'}
|
|
223
|
+
mock_message.body = b'mock_body'
|
|
224
|
+
mock_message.content_type = 'application/json'
|
|
225
|
+
mock_message.content_encoding = 'utf-8'
|
|
226
|
+
mock_message.acknowledged = False
|
|
227
|
+
|
|
228
|
+
mock_loads.return_value = [
|
|
229
|
+
['module.submodule', 'test_function', [], {}], {}]
|
|
230
|
+
|
|
231
|
+
mock_queue = MagicMock()
|
|
232
|
+
mock_queue.get.side_effect = [mock_message, None]
|
|
233
|
+
mock_queue_factory = MagicMock(return_value=mock_queue)
|
|
234
|
+
mock_current_app.amqp.queues = {'custom_queue': mock_queue_factory}
|
|
235
|
+
|
|
236
|
+
mock_conn = MagicMock()
|
|
237
|
+
mock_chan = MagicMock()
|
|
238
|
+
mock_current_app.connection_for_read.return_value.__enter__.return_value = mock_conn
|
|
239
|
+
mock_conn.channel.return_value.__enter__.return_value = mock_chan
|
|
240
|
+
|
|
241
|
+
with patch('libs.test_utils.logger') as mock_logger:
|
|
242
|
+
celery_worker_burst(
|
|
243
|
+
['module.submodule.test_function'], channel='custom_queue')
|
|
244
|
+
|
|
245
|
+
mock_logger.info.assert_any_call(
|
|
246
|
+
"Executing task: module.submodule.test_function")
|
|
247
|
+
mock_logger.info.assert_any_call(
|
|
248
|
+
"Successfully executed task: module.submodule.test_function")
|
|
249
|
+
|
|
250
|
+
mock_queue_factory.assert_called_once_with(mock_chan)
|
|
251
|
+
|
|
252
|
+
@patch('libs.test_utils.current_app')
|
|
253
|
+
@patch('libs.test_utils.loads')
|
|
254
|
+
def test_celery_worker_burst_given_task_processing_error_expect_error_logged_and_ack(self, mock_loads, mock_current_app):
|
|
255
|
+
mock_task = MagicMock()
|
|
256
|
+
mock_task.apply.side_effect = Exception("Task execution failed")
|
|
257
|
+
mock_current_app.tasks = {
|
|
258
|
+
'taskqueue.cmanager.dynamic_function_executor': mock_task}
|
|
259
|
+
|
|
260
|
+
mock_message = MagicMock()
|
|
261
|
+
mock_message.headers = {
|
|
262
|
+
'task': 'taskqueue.cmanager.dynamic_function_executor'}
|
|
263
|
+
mock_message.body = b'mock_body'
|
|
264
|
+
mock_message.content_type = 'application/json'
|
|
265
|
+
mock_message.content_encoding = 'utf-8'
|
|
266
|
+
|
|
267
|
+
# Mock the acknowledged property to simulate ack behavior
|
|
268
|
+
def get_acknowledged():
|
|
269
|
+
return mock_message.ack.called
|
|
270
|
+
type(mock_message).acknowledged = property(
|
|
271
|
+
lambda self: get_acknowledged())
|
|
272
|
+
|
|
273
|
+
mock_loads.return_value = [
|
|
274
|
+
['module.submodule', 'test_function', [], {}], {}]
|
|
275
|
+
|
|
276
|
+
mock_queue = MagicMock()
|
|
277
|
+
mock_queue.get.side_effect = [mock_message, None]
|
|
278
|
+
mock_queue_factory = MagicMock(return_value=mock_queue)
|
|
279
|
+
mock_current_app.amqp.queues = {'default': mock_queue_factory}
|
|
280
|
+
|
|
281
|
+
mock_conn = MagicMock()
|
|
282
|
+
mock_chan = MagicMock()
|
|
283
|
+
mock_current_app.connection_for_read.return_value.__enter__.return_value = mock_conn
|
|
284
|
+
mock_conn.channel.return_value.__enter__.return_value = mock_chan
|
|
285
|
+
|
|
286
|
+
with patch('libs.test_utils.logger') as mock_logger:
|
|
287
|
+
celery_worker_burst(['module.submodule.test_function'])
|
|
288
|
+
|
|
289
|
+
mock_logger.info.assert_any_call(
|
|
290
|
+
"Executing task: module.submodule.test_function")
|
|
291
|
+
mock_logger.error.assert_called_once_with(
|
|
292
|
+
"Failed to process task taskqueue.cmanager.dynamic_function_executor: Exception: Task execution failed"
|
|
293
|
+
)
|
|
294
|
+
mock_message.ack.assert_called_once()
|