p1-taskqueue 0.1.32__tar.gz → 0.2.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/PKG-INFO +1 -1
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/pyproject.toml +1 -1
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/src/p1_taskqueue.egg-info/PKG-INFO +1 -1
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/src/p1_taskqueue.egg-info/SOURCES.txt +7 -0
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/src/taskqueue/celery_app.py +64 -54
- p1_taskqueue-0.2.1/src/taskqueue/dlq.py +192 -0
- p1_taskqueue-0.2.1/src/taskqueue/management/commands/__init__.py +0 -0
- p1_taskqueue-0.2.1/src/taskqueue/management/commands/retry_dlq_messages.py +33 -0
- p1_taskqueue-0.2.1/src/taskqueue/management/commands/setup_taskqueue.py +14 -0
- p1_taskqueue-0.2.1/src/taskqueue/migrations/__init__.py +0 -0
- p1_taskqueue-0.2.1/tests/test_celery_app.py +225 -0
- p1_taskqueue-0.2.1/tests/test_dlq.py +240 -0
- p1_taskqueue-0.2.1/tests/test_management_command.py +19 -0
- p1_taskqueue-0.1.32/tests/test_celery_app.py +0 -186
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/README.md +0 -0
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/setup.cfg +0 -0
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/src/p1_taskqueue.egg-info/dependency_links.txt +0 -0
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/src/p1_taskqueue.egg-info/requires.txt +0 -0
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/src/p1_taskqueue.egg-info/top_level.txt +0 -0
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/src/taskqueue/__init__.py +0 -0
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/src/taskqueue/apps.py +0 -0
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/src/taskqueue/cmanager.py +0 -0
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/src/taskqueue/libs/__init__.py +0 -0
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/src/taskqueue/libs/helper_test.py +0 -0
- {p1_taskqueue-0.1.32/src/taskqueue/migrations → p1_taskqueue-0.2.1/src/taskqueue/management}/__init__.py +0 -0
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/src/taskqueue/migrations/0001_initial.py +0 -0
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/src/taskqueue/migrations/0002_add_channel_to_taskreconstruction.py +0 -0
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/src/taskqueue/migrations/0003_alter_taskreconstruction_id.py +0 -0
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/src/taskqueue/migrations/0004_add_callable_name_meta_to_taskreconstruction.py +0 -0
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/src/taskqueue/models.py +0 -0
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/src/taskqueue/slack_notifier.py +0 -0
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/tests/test_cmanager.py +0 -0
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/tests/test_helper_test_functions.py +0 -0
- {p1_taskqueue-0.1.32 → p1_taskqueue-0.2.1}/tests/test_test_utils.py +0 -0
|
@@ -5,7 +5,7 @@ build-backend = "setuptools.build_meta"
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "p1-taskqueue"
|
|
7
7
|
# DO NOT CHANGE THIS VERSION - it gets automatically replaced by CI/CD with the git tag version
|
|
8
|
-
version = "0.1
|
|
8
|
+
version = "0.2.1"
|
|
9
9
|
description = "A Task Queue Wrapper for Dekoruma Backend"
|
|
10
10
|
authors = [
|
|
11
11
|
{name = "Chalvin", email = "engineering@dekoruma.com"}
|
|
@@ -9,10 +9,15 @@ src/taskqueue/__init__.py
|
|
|
9
9
|
src/taskqueue/apps.py
|
|
10
10
|
src/taskqueue/celery_app.py
|
|
11
11
|
src/taskqueue/cmanager.py
|
|
12
|
+
src/taskqueue/dlq.py
|
|
12
13
|
src/taskqueue/models.py
|
|
13
14
|
src/taskqueue/slack_notifier.py
|
|
14
15
|
src/taskqueue/libs/__init__.py
|
|
15
16
|
src/taskqueue/libs/helper_test.py
|
|
17
|
+
src/taskqueue/management/__init__.py
|
|
18
|
+
src/taskqueue/management/commands/__init__.py
|
|
19
|
+
src/taskqueue/management/commands/retry_dlq_messages.py
|
|
20
|
+
src/taskqueue/management/commands/setup_taskqueue.py
|
|
16
21
|
src/taskqueue/migrations/0001_initial.py
|
|
17
22
|
src/taskqueue/migrations/0002_add_channel_to_taskreconstruction.py
|
|
18
23
|
src/taskqueue/migrations/0003_alter_taskreconstruction_id.py
|
|
@@ -20,5 +25,7 @@ src/taskqueue/migrations/0004_add_callable_name_meta_to_taskreconstruction.py
|
|
|
20
25
|
src/taskqueue/migrations/__init__.py
|
|
21
26
|
tests/test_celery_app.py
|
|
22
27
|
tests/test_cmanager.py
|
|
28
|
+
tests/test_dlq.py
|
|
23
29
|
tests/test_helper_test_functions.py
|
|
30
|
+
tests/test_management_command.py
|
|
24
31
|
tests/test_test_utils.py
|
|
@@ -36,8 +36,10 @@ def create_celery_app():
|
|
|
36
36
|
'result_serializer': getattr(settings, 'CELERY_RESULT_SERIALIZER', 'pickle'),
|
|
37
37
|
'accept_content': getattr(settings, 'CELERY_ACCEPT_CONTENT', ['pickle']),
|
|
38
38
|
'timezone': getattr(settings, 'CELERY_TIMEZONE', 'UTC+7'),
|
|
39
|
-
'task_time_limit': getattr(settings, 'CELERY_TASK_TIME_LIMIT',
|
|
40
|
-
'task_soft_time_limit': getattr(
|
|
39
|
+
'task_time_limit': getattr(settings, 'CELERY_TASK_TIME_LIMIT', 10 * 60),
|
|
40
|
+
'task_soft_time_limit': getattr(
|
|
41
|
+
settings, 'CELERY_TASK_SOFT_TIME_LIMIT', 8 * 60
|
|
42
|
+
),
|
|
41
43
|
# 14 days
|
|
42
44
|
'result_expires': getattr(settings, 'CELERY_RESULT_EXPIRES', 14 * 24 * 60 * 60),
|
|
43
45
|
'task_track_started': True,
|
|
@@ -54,24 +56,19 @@ def create_celery_app():
|
|
|
54
56
|
'broker_pool_limit': 2,
|
|
55
57
|
}
|
|
56
58
|
|
|
57
|
-
|
|
59
|
+
configure_queues(settings, celery_config)
|
|
58
60
|
app.conf.update(celery_config)
|
|
59
61
|
app.autodiscover_tasks(['taskqueue'])
|
|
60
62
|
|
|
61
63
|
return app
|
|
62
64
|
|
|
63
65
|
|
|
64
|
-
def
|
|
66
|
+
def _build_queue_setup(settings):
|
|
65
67
|
app_name = getattr(settings, 'TASKQUEUE_APP_NAME', 'taskqueue')
|
|
66
68
|
queue_names = getattr(settings, 'TASKQUEUE_QUEUES',
|
|
67
69
|
['default', 'high', 'low'])
|
|
68
|
-
if queue_names is None:
|
|
69
|
-
queue_names = ['default', 'high', 'low']
|
|
70
70
|
dlq_name_prefix = getattr(settings, 'TASKQUEUE_DLQ_NAME_PREFIX', 'dlq')
|
|
71
71
|
|
|
72
|
-
logger.info(
|
|
73
|
-
f"[TaskQueue] Configuring app: {app_name}, queues: {queue_names}")
|
|
74
|
-
|
|
75
72
|
main_exchange = Exchange(app_name, type='direct')
|
|
76
73
|
dlx_exchange = Exchange(f'{app_name}.dlx', type='direct')
|
|
77
74
|
|
|
@@ -79,68 +76,81 @@ def setup_queues(app, settings, celery_config):
|
|
|
79
76
|
|
|
80
77
|
for queue_name in queue_names:
|
|
81
78
|
dlq_name = f'{dlq_name_prefix}.{queue_name}'
|
|
82
|
-
dlx_name = f'{app_name}.dlx'
|
|
83
|
-
|
|
84
|
-
queue_args = {
|
|
85
|
-
'x-dead-letter-exchange': dlx_name,
|
|
86
|
-
'x-dead-letter-routing-key': dlq_name
|
|
87
|
-
}
|
|
88
79
|
|
|
89
80
|
queue = Queue(
|
|
90
81
|
queue_name,
|
|
91
82
|
main_exchange,
|
|
92
83
|
routing_key=queue_name,
|
|
93
|
-
queue_arguments=
|
|
84
|
+
queue_arguments={
|
|
85
|
+
'x-dead-letter-exchange': dlx_exchange.name,
|
|
86
|
+
'x-dead-letter-routing-key': dlq_name,
|
|
87
|
+
},
|
|
94
88
|
)
|
|
95
89
|
queues.append(queue)
|
|
96
|
-
logger.info(
|
|
97
|
-
f"[TaskQueue] Queue '{queue_name}' configured with DLX: {dlx_name}, DLQ routing key: {dlq_name}")
|
|
98
90
|
|
|
99
|
-
for queue_name in queue_names:
|
|
100
|
-
dlq_name = f'{dlq_name_prefix}.{queue_name}'
|
|
101
91
|
dlq = Queue(dlq_name, dlx_exchange, routing_key=dlq_name)
|
|
102
92
|
queues.append(dlq)
|
|
103
|
-
logger.info(f"[TaskQueue] DLQ '{dlq_name}' configured")
|
|
104
93
|
|
|
105
|
-
|
|
106
|
-
'task_default_queue': 'default',
|
|
107
|
-
'task_default_exchange': app_name,
|
|
108
|
-
'task_default_exchange_type': 'direct',
|
|
109
|
-
'task_queues': tuple(queues),
|
|
110
|
-
})
|
|
94
|
+
return app_name, queues, main_exchange, dlx_exchange
|
|
111
95
|
|
|
112
|
-
try:
|
|
113
|
-
with app.connection_or_acquire() as conn:
|
|
114
|
-
channel = conn.default_channel
|
|
115
96
|
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
97
|
+
def configure_queues(settings, celery_config):
|
|
98
|
+
app_name, queues, _, _ = _build_queue_setup(settings)
|
|
99
|
+
celery_config.update(
|
|
100
|
+
{
|
|
101
|
+
'task_default_queue': 'default',
|
|
102
|
+
'task_default_exchange': app_name,
|
|
103
|
+
'task_default_exchange_type': 'direct',
|
|
104
|
+
'task_queues': queues,
|
|
105
|
+
}
|
|
106
|
+
)
|
|
121
107
|
|
|
108
|
+
|
|
109
|
+
def setup_queues(app=None, settings=None):
|
|
110
|
+
if settings is None:
|
|
111
|
+
settings = get_django_settings()
|
|
112
|
+
if app is None:
|
|
113
|
+
app = celery_app
|
|
114
|
+
|
|
115
|
+
app_name, queues, main_exchange, dlx_exchange = _build_queue_setup(
|
|
116
|
+
settings)
|
|
117
|
+
logger.info(
|
|
118
|
+
f"[TaskQueue] Declaring app: {app_name}, queues: {[queue.name for queue in queues]}")
|
|
119
|
+
app.conf.update(
|
|
120
|
+
{
|
|
121
|
+
'task_default_queue': 'default',
|
|
122
|
+
'task_default_exchange': app_name,
|
|
123
|
+
'task_default_exchange_type': 'direct',
|
|
124
|
+
'task_queues': queues,
|
|
125
|
+
}
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
with app.connection_or_acquire() as conn:
|
|
129
|
+
channel = conn.default_channel
|
|
130
|
+
|
|
131
|
+
try:
|
|
132
|
+
main_exchange.declare(channel=channel)
|
|
133
|
+
logger.info(f"[TaskQueue] Exchange declared: {app_name}")
|
|
134
|
+
except PreconditionFailed:
|
|
135
|
+
logger.info(f"[TaskQueue] Exchange already exists: {app_name}")
|
|
136
|
+
|
|
137
|
+
try:
|
|
138
|
+
dlx_exchange.declare(channel=channel)
|
|
139
|
+
logger.info(f"[TaskQueue] DLX Exchange declared: {app_name}.dlx")
|
|
140
|
+
except PreconditionFailed:
|
|
141
|
+
logger.info(
|
|
142
|
+
f"[TaskQueue] DLX Exchange already exists: {app_name}.dlx")
|
|
143
|
+
|
|
144
|
+
for queue in queues:
|
|
122
145
|
try:
|
|
123
|
-
|
|
124
|
-
logger.info(
|
|
125
|
-
f"[TaskQueue] DLX Exchange declared: {app_name}.dlx")
|
|
146
|
+
queue.declare(channel=channel)
|
|
147
|
+
logger.info(f"[TaskQueue] Queue declared: {queue.name}")
|
|
126
148
|
except PreconditionFailed:
|
|
127
149
|
logger.info(
|
|
128
|
-
f"[TaskQueue]
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
queue.declare(channel=channel)
|
|
133
|
-
logger.info(f"[TaskQueue] Queue declared: {queue.name}")
|
|
134
|
-
except PreconditionFailed:
|
|
135
|
-
logger.info(
|
|
136
|
-
f"[TaskQueue] Queue already exists with different config: {queue.name}. Using existing queue.")
|
|
137
|
-
except Exception as e:
|
|
138
|
-
logger.warning(
|
|
139
|
-
f"[TaskQueue] Failed to declare queue {queue.name}: {e}")
|
|
140
|
-
|
|
141
|
-
except Exception as e:
|
|
142
|
-
logger.warning(
|
|
143
|
-
f"[TaskQueue] Failed to setup queues: {str(e.__class__.__name__)} {e}")
|
|
150
|
+
f"[TaskQueue] Queue already exists with different config: {queue.name}. Using existing queue."
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
return queues
|
|
144
154
|
|
|
145
155
|
|
|
146
156
|
celery_app = create_celery_app()
|
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from datetime import timezone
|
|
4
|
+
from typing import Any
|
|
5
|
+
from typing import Dict
|
|
6
|
+
from typing import Optional
|
|
7
|
+
from typing import Tuple
|
|
8
|
+
|
|
9
|
+
from kombu import Producer
|
|
10
|
+
from taskqueue.cmanager import K_TASK_STATUS_REPUBLISHED
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
K_DEFAULT_MAX_ETA_DAYS = 3
|
|
15
|
+
K_DEFAULT_MAX_RETENTION_DAYS = 14
|
|
16
|
+
K_SUPPORTED_PUBLISH_PROPERTIES = (
|
|
17
|
+
"correlation_id",
|
|
18
|
+
"reply_to",
|
|
19
|
+
"expiration",
|
|
20
|
+
"message_id",
|
|
21
|
+
"timestamp",
|
|
22
|
+
"type",
|
|
23
|
+
"user_id",
|
|
24
|
+
"app_id",
|
|
25
|
+
"cluster_id",
|
|
26
|
+
"delivery_mode",
|
|
27
|
+
"priority",
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def _get_celery_app() -> Any:
|
|
32
|
+
from taskqueue.celery_app import celery_app
|
|
33
|
+
|
|
34
|
+
return celery_app
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _get_task_result_model():
|
|
38
|
+
from django_celery_results.models import TaskResult
|
|
39
|
+
|
|
40
|
+
return TaskResult
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _get_retry_limits() -> Tuple[int, int]:
|
|
44
|
+
from django.conf import settings
|
|
45
|
+
|
|
46
|
+
max_eta_days = getattr(
|
|
47
|
+
settings, "TASKQUEUE_DLQ_RETRY_MAX_ETA_DAYS", K_DEFAULT_MAX_ETA_DAYS
|
|
48
|
+
)
|
|
49
|
+
max_retention_days = getattr(
|
|
50
|
+
settings, "TASKQUEUE_DLQ_MAX_RETENTION_DAYS", K_DEFAULT_MAX_RETENTION_DAYS
|
|
51
|
+
)
|
|
52
|
+
return max_eta_days, max_retention_days
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def _extract_publish_properties(message: Any) -> Dict[str, Any]:
|
|
56
|
+
properties = getattr(message, "properties", {}) or {}
|
|
57
|
+
return {
|
|
58
|
+
key: properties[key]
|
|
59
|
+
for key in K_SUPPORTED_PUBLISH_PROPERTIES
|
|
60
|
+
if key in properties
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def retry_dlq_messages(
|
|
65
|
+
dlq_name: str, main_queue_name: str, app: Any = None
|
|
66
|
+
) -> Dict[str, int]:
|
|
67
|
+
app = app or _get_celery_app()
|
|
68
|
+
|
|
69
|
+
results = {
|
|
70
|
+
"moved": 0,
|
|
71
|
+
"skipped": 0,
|
|
72
|
+
"discarded": 0,
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
with app.connection_or_acquire() as conn:
|
|
76
|
+
channel = conn.default_channel
|
|
77
|
+
|
|
78
|
+
dlq_factory = app.amqp.queues.get(dlq_name)
|
|
79
|
+
if dlq_factory is None:
|
|
80
|
+
raise ValueError(f"DLQ '{dlq_name}' is not configured")
|
|
81
|
+
|
|
82
|
+
main_queue_factory = app.amqp.queues.get(main_queue_name)
|
|
83
|
+
if main_queue_factory is None:
|
|
84
|
+
raise ValueError(f"Queue '{main_queue_name}' is not configured")
|
|
85
|
+
|
|
86
|
+
dlq_queue = dlq_factory(channel)
|
|
87
|
+
main_queue = main_queue_factory(channel)
|
|
88
|
+
producer = Producer(channel)
|
|
89
|
+
|
|
90
|
+
while True:
|
|
91
|
+
message = dlq_queue.get(no_ack=False)
|
|
92
|
+
if not message:
|
|
93
|
+
break
|
|
94
|
+
|
|
95
|
+
headers = message.headers or {}
|
|
96
|
+
|
|
97
|
+
discard_info = get_discard_info(headers)
|
|
98
|
+
if discard_info:
|
|
99
|
+
task_name, task_id = discard_info
|
|
100
|
+
logger.warning(
|
|
101
|
+
f"[DLQRetry] Discarding task: {task_name} (ID: {task_id})"
|
|
102
|
+
)
|
|
103
|
+
message.ack()
|
|
104
|
+
results["discarded"] += 1
|
|
105
|
+
continue
|
|
106
|
+
|
|
107
|
+
if not should_retry_message(headers):
|
|
108
|
+
logger.warning(
|
|
109
|
+
f"[DLQRetry] Message from {dlq_name} is not eligible for retry"
|
|
110
|
+
)
|
|
111
|
+
message.ack()
|
|
112
|
+
results["skipped"] += 1
|
|
113
|
+
continue
|
|
114
|
+
|
|
115
|
+
producer.publish(
|
|
116
|
+
message.body,
|
|
117
|
+
exchange=main_queue.exchange,
|
|
118
|
+
routing_key=main_queue.routing_key,
|
|
119
|
+
headers=headers,
|
|
120
|
+
content_type=message.content_type,
|
|
121
|
+
content_encoding=message.content_encoding,
|
|
122
|
+
declare=[main_queue],
|
|
123
|
+
retry=True,
|
|
124
|
+
**_extract_publish_properties(message),
|
|
125
|
+
)
|
|
126
|
+
message.ack()
|
|
127
|
+
results["moved"] += 1
|
|
128
|
+
|
|
129
|
+
logger.info(
|
|
130
|
+
f"[DLQRetry] Moved {results['moved']} messages from {dlq_name} to {main_queue_name}"
|
|
131
|
+
)
|
|
132
|
+
return results
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def should_retry_message(headers: Dict[str, Any]) -> bool:
|
|
136
|
+
try:
|
|
137
|
+
task_id = headers.get("id")
|
|
138
|
+
|
|
139
|
+
if task_id:
|
|
140
|
+
try:
|
|
141
|
+
TaskResult = _get_task_result_model()
|
|
142
|
+
task_result = TaskResult.objects.filter(
|
|
143
|
+
task_id=task_id).first()
|
|
144
|
+
if task_result and task_result.status == K_TASK_STATUS_REPUBLISHED:
|
|
145
|
+
logger.info(
|
|
146
|
+
f"[DLQRetry] Task {task_id} has status '{K_TASK_STATUS_REPUBLISHED}', skipping retry"
|
|
147
|
+
)
|
|
148
|
+
return False
|
|
149
|
+
except Exception as e:
|
|
150
|
+
logger.warning(
|
|
151
|
+
f"[DLQRetry] Error checking TaskResult for task {task_id}: {e.__class__.__name__}: {e}"
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
eta = headers.get("eta")
|
|
155
|
+
if not eta:
|
|
156
|
+
logger.warning(
|
|
157
|
+
f"[DLQRetry] Message {task_id} has no ETA, skipping retry")
|
|
158
|
+
return False
|
|
159
|
+
|
|
160
|
+
eta_datetime = datetime.fromisoformat(eta.replace("Z", "+00:00"))
|
|
161
|
+
current_datetime = datetime.now(timezone.utc)
|
|
162
|
+
max_eta_days, _ = _get_retry_limits()
|
|
163
|
+
days_difference = (current_datetime - eta_datetime).days
|
|
164
|
+
|
|
165
|
+
return days_difference <= max_eta_days
|
|
166
|
+
except Exception as e:
|
|
167
|
+
logger.error(
|
|
168
|
+
f"[DLQRetry] Error checking ETA: {e.__class__.__name__}: {e}")
|
|
169
|
+
return False
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def get_discard_info(headers: Dict[str, Any]) -> Optional[Tuple[str, str]]:
|
|
173
|
+
try:
|
|
174
|
+
eta = headers.get("eta")
|
|
175
|
+
if not eta:
|
|
176
|
+
return None
|
|
177
|
+
|
|
178
|
+
eta_datetime = datetime.fromisoformat(eta.replace("Z", "+00:00"))
|
|
179
|
+
current_datetime = datetime.now(timezone.utc)
|
|
180
|
+
_, max_retention_days = _get_retry_limits()
|
|
181
|
+
days_difference = (current_datetime - eta_datetime).days
|
|
182
|
+
|
|
183
|
+
if days_difference > max_retention_days:
|
|
184
|
+
task_name = headers.get("task", "unknown")
|
|
185
|
+
task_id = headers.get("id", "unknown")
|
|
186
|
+
return task_name, task_id
|
|
187
|
+
|
|
188
|
+
return None
|
|
189
|
+
except Exception as e:
|
|
190
|
+
logger.error(
|
|
191
|
+
f"[DLQRetry] Error checking ETA: {e.__class__.__name__}: {e}")
|
|
192
|
+
return None
|
|
File without changes
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
from django.conf import settings
|
|
4
|
+
from django.core.management.base import BaseCommand
|
|
5
|
+
from taskqueue.dlq import retry_dlq_messages
|
|
6
|
+
|
|
7
|
+
logger = logging.getLogger(__name__)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class Command(BaseCommand):
|
|
11
|
+
help = "Retry messages from TaskQueue dead letter queues back to the main queues."
|
|
12
|
+
|
|
13
|
+
def handle(self, *args, **options):
|
|
14
|
+
logger.info("[DLQRetry] Starting DLQ retry process")
|
|
15
|
+
|
|
16
|
+
queue_names = getattr(settings, "TASKQUEUE_QUEUES", [
|
|
17
|
+
"default", "high", "low"])
|
|
18
|
+
dlq_prefix = getattr(settings, "TASKQUEUE_DLQ_NAME_PREFIX", "dlq")
|
|
19
|
+
|
|
20
|
+
total_moved = 0
|
|
21
|
+
total_skipped = 0
|
|
22
|
+
total_discarded = 0
|
|
23
|
+
|
|
24
|
+
for queue_name in queue_names:
|
|
25
|
+
dlq_name = f"{dlq_prefix}.{queue_name}"
|
|
26
|
+
result = retry_dlq_messages(dlq_name, queue_name)
|
|
27
|
+
total_moved += result["moved"]
|
|
28
|
+
total_skipped += result["skipped"]
|
|
29
|
+
total_discarded += result["discarded"]
|
|
30
|
+
|
|
31
|
+
logger.info("[DLQRetry] Finished DLQ retry process")
|
|
32
|
+
logger.info(
|
|
33
|
+
f'Retried {total_moved} messages, skipped {total_skipped}, discarded {total_discarded}.')
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
from django.core.management.base import BaseCommand
|
|
4
|
+
from taskqueue.celery_app import setup_queues
|
|
5
|
+
|
|
6
|
+
logger = logging.getLogger(__name__)
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class Command(BaseCommand):
|
|
10
|
+
help = "Declare TaskQueue exchanges and queues on the configured broker."
|
|
11
|
+
|
|
12
|
+
def handle(self, *args, **options):
|
|
13
|
+
queues = setup_queues()
|
|
14
|
+
logger.info(f"Declared {len(queues)} TaskQueue queues successfully.")
|
|
File without changes
|
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
"""Tests for the celery_app module."""
|
|
2
|
+
from unittest.mock import MagicMock
|
|
3
|
+
from unittest.mock import patch
|
|
4
|
+
|
|
5
|
+
import pytest
|
|
6
|
+
from taskqueue.celery_app import configure_queues
|
|
7
|
+
from taskqueue.celery_app import create_celery_app
|
|
8
|
+
from taskqueue.celery_app import get_django_settings
|
|
9
|
+
from taskqueue.celery_app import setup_queues
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class TestGetDjangoSettings:
|
|
13
|
+
def test_get_django_settings_given_django_available_expect_return_settings(self):
|
|
14
|
+
with patch("django.conf.settings") as mock_settings:
|
|
15
|
+
result = get_django_settings()
|
|
16
|
+
assert result == mock_settings
|
|
17
|
+
|
|
18
|
+
def test_get_django_settings_given_django_not_available_expect_raise_import_error(
|
|
19
|
+
self,
|
|
20
|
+
):
|
|
21
|
+
with patch(
|
|
22
|
+
"builtins.__import__", side_effect=ImportError("No module named 'django'")
|
|
23
|
+
):
|
|
24
|
+
with pytest.raises(
|
|
25
|
+
ImportError, match="\\[TaskQueue\\] Django settings not found\\."
|
|
26
|
+
):
|
|
27
|
+
get_django_settings()
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class TestCreateCeleryApp:
|
|
31
|
+
@patch("taskqueue.celery_app.configure_queues")
|
|
32
|
+
@patch("taskqueue.celery_app.setup_queues")
|
|
33
|
+
@patch("taskqueue.celery_app.get_django_settings")
|
|
34
|
+
@patch("taskqueue.celery_app.Celery")
|
|
35
|
+
def test_create_celery_app_given_valid_settings_expect_celery_app_created(
|
|
36
|
+
self,
|
|
37
|
+
mock_celery_class,
|
|
38
|
+
mock_get_settings,
|
|
39
|
+
mock_setup_queues,
|
|
40
|
+
mock_configure_queues,
|
|
41
|
+
):
|
|
42
|
+
mock_settings = MagicMock()
|
|
43
|
+
mock_settings.TASKQUEUE_APP_NAME = "testapp"
|
|
44
|
+
mock_settings.CELERY_BROKER_URL = "redis://localhost:6379/0"
|
|
45
|
+
mock_settings.CELERY_RESULT_BACKEND = "redis://localhost:6379/0"
|
|
46
|
+
mock_settings.CELERY_TASK_SERIALIZER = "pickle"
|
|
47
|
+
mock_settings.CELERY_RESULT_SERIALIZER = "pickle"
|
|
48
|
+
mock_settings.CELERY_ACCEPT_CONTENT = ["pickle"]
|
|
49
|
+
mock_settings.CELERY_TIMEZONE = "UTC"
|
|
50
|
+
mock_settings.CELERY_TASK_TRACK_STARTED = True
|
|
51
|
+
mock_settings.CELERY_TASK_TIME_LIMIT = 1800
|
|
52
|
+
mock_settings.CELERY_TASK_SOFT_TIME_LIMIT = 1500
|
|
53
|
+
mock_settings.CELERY_TASK_ALWAYS_EAGER = False
|
|
54
|
+
mock_settings.CELERY_TASK_EAGER_PROPAGATES = True
|
|
55
|
+
|
|
56
|
+
mock_get_settings.return_value = mock_settings
|
|
57
|
+
mock_app = MagicMock()
|
|
58
|
+
mock_celery_class.return_value = mock_app
|
|
59
|
+
|
|
60
|
+
result = create_celery_app()
|
|
61
|
+
|
|
62
|
+
mock_celery_class.assert_called_once_with("testapp")
|
|
63
|
+
mock_configure_queues.assert_called_once()
|
|
64
|
+
mock_setup_queues.assert_not_called()
|
|
65
|
+
mock_app.conf.update.assert_called_once()
|
|
66
|
+
mock_app.autodiscover_tasks.assert_called_once_with(["taskqueue"])
|
|
67
|
+
assert result == mock_app
|
|
68
|
+
|
|
69
|
+
@patch("taskqueue.celery_app.configure_queues")
|
|
70
|
+
@patch("taskqueue.celery_app.setup_queues")
|
|
71
|
+
@patch("taskqueue.celery_app.get_django_settings")
|
|
72
|
+
@patch("taskqueue.celery_app.Celery")
|
|
73
|
+
def test_create_celery_app_given_missing_settings_expect_defaults_used(
|
|
74
|
+
self,
|
|
75
|
+
mock_celery_class,
|
|
76
|
+
mock_get_settings,
|
|
77
|
+
mock_setup_queues,
|
|
78
|
+
mock_configure_queues,
|
|
79
|
+
):
|
|
80
|
+
mock_settings = MagicMock()
|
|
81
|
+
del mock_settings.TASKQUEUE_APP_NAME
|
|
82
|
+
del mock_settings.CELERY_BROKER_URL
|
|
83
|
+
del mock_settings.CELERY_RESULT_BACKEND
|
|
84
|
+
del mock_settings.CELERY_TASK_SERIALIZER
|
|
85
|
+
del mock_settings.CELERY_RESULT_SERIALIZER
|
|
86
|
+
del mock_settings.CELERY_ACCEPT_CONTENT
|
|
87
|
+
del mock_settings.CELERY_TIMEZONE
|
|
88
|
+
del mock_settings.CELERY_TASK_TRACK_STARTED
|
|
89
|
+
del mock_settings.CELERY_TASK_TIME_LIMIT
|
|
90
|
+
del mock_settings.CELERY_TASK_SOFT_TIME_LIMIT
|
|
91
|
+
del mock_settings.CELERY_TASK_ALWAYS_EAGER
|
|
92
|
+
del mock_settings.CELERY_TASK_EAGER_PROPAGATES
|
|
93
|
+
|
|
94
|
+
mock_get_settings.return_value = mock_settings
|
|
95
|
+
mock_app = MagicMock()
|
|
96
|
+
mock_celery_class.return_value = mock_app
|
|
97
|
+
|
|
98
|
+
result = create_celery_app()
|
|
99
|
+
|
|
100
|
+
mock_celery_class.assert_called_once_with("taskqueue")
|
|
101
|
+
mock_configure_queues.assert_called_once()
|
|
102
|
+
mock_setup_queues.assert_not_called()
|
|
103
|
+
assert result == mock_app
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
class TestConfigureQueues:
|
|
107
|
+
"""Test configure_queues function."""
|
|
108
|
+
|
|
109
|
+
def test_configure_queues_given_valid_settings_expect_queues_configured(self):
|
|
110
|
+
mock_settings = MagicMock()
|
|
111
|
+
mock_settings.TASKQUEUE_APP_NAME = "testapp"
|
|
112
|
+
mock_settings.TASKQUEUE_QUEUES = ["default", "high", "low"]
|
|
113
|
+
mock_settings.TASKQUEUE_DLQ_NAME_PREFIX = "dlq"
|
|
114
|
+
|
|
115
|
+
celery_config = {}
|
|
116
|
+
|
|
117
|
+
configure_queues(mock_settings, celery_config)
|
|
118
|
+
|
|
119
|
+
assert celery_config["task_default_queue"] == "default"
|
|
120
|
+
assert celery_config["task_default_exchange"] == "testapp"
|
|
121
|
+
assert celery_config["task_default_exchange_type"] == "direct"
|
|
122
|
+
assert len(celery_config["task_queues"]) == 6
|
|
123
|
+
|
|
124
|
+
main_queues = [
|
|
125
|
+
q for q in celery_config["task_queues"] if not q.name.startswith("dlq.")
|
|
126
|
+
]
|
|
127
|
+
assert len(main_queues) == 3
|
|
128
|
+
assert any(q.name == "default" for q in main_queues)
|
|
129
|
+
assert any(q.name == "high" for q in main_queues)
|
|
130
|
+
assert any(q.name == "low" for q in main_queues)
|
|
131
|
+
|
|
132
|
+
dlq_queues = [
|
|
133
|
+
q for q in celery_config["task_queues"] if q.name.startswith("dlq.")
|
|
134
|
+
]
|
|
135
|
+
assert len(dlq_queues) == 3
|
|
136
|
+
assert any(q.name == "dlq.default" for q in dlq_queues)
|
|
137
|
+
assert any(q.name == "dlq.high" for q in dlq_queues)
|
|
138
|
+
assert any(q.name == "dlq.low" for q in dlq_queues)
|
|
139
|
+
|
|
140
|
+
def test_configure_queues_given_missing_settings_expect_defaults_used(self):
|
|
141
|
+
mock_settings = MagicMock()
|
|
142
|
+
del mock_settings.TASKQUEUE_APP_NAME
|
|
143
|
+
del mock_settings.TASKQUEUE_QUEUES
|
|
144
|
+
del mock_settings.TASKQUEUE_DLQ_NAME_PREFIX
|
|
145
|
+
|
|
146
|
+
celery_config = {}
|
|
147
|
+
|
|
148
|
+
configure_queues(mock_settings, celery_config)
|
|
149
|
+
|
|
150
|
+
assert celery_config["task_default_queue"] == "default"
|
|
151
|
+
assert celery_config["task_default_exchange"] == "taskqueue"
|
|
152
|
+
assert celery_config["task_default_exchange_type"] == "direct"
|
|
153
|
+
assert len(celery_config["task_queues"]) == 6
|
|
154
|
+
|
|
155
|
+
def test_configure_queues_given_single_queue_expect_correct_configuration(self):
|
|
156
|
+
mock_settings = MagicMock()
|
|
157
|
+
mock_settings.TASKQUEUE_APP_NAME = "singleapp"
|
|
158
|
+
mock_settings.TASKQUEUE_QUEUES = ["single"]
|
|
159
|
+
mock_settings.TASKQUEUE_DLQ_NAME_PREFIX = "dead"
|
|
160
|
+
|
|
161
|
+
celery_config = {}
|
|
162
|
+
|
|
163
|
+
configure_queues(mock_settings, celery_config)
|
|
164
|
+
|
|
165
|
+
assert len(celery_config["task_queues"]) == 2
|
|
166
|
+
|
|
167
|
+
main_queue = next(
|
|
168
|
+
q for q in celery_config["task_queues"] if q.name == "single")
|
|
169
|
+
assert main_queue.queue_arguments["x-dead-letter-exchange"] == "singleapp.dlx"
|
|
170
|
+
assert main_queue.queue_arguments["x-dead-letter-routing-key"] == "dead.single"
|
|
171
|
+
|
|
172
|
+
dlq = next(
|
|
173
|
+
q for q in celery_config["task_queues"] if q.name == "dead.single")
|
|
174
|
+
assert dlq.exchange.name == "singleapp.dlx"
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
class TestSetupQueues:
|
|
178
|
+
@patch("taskqueue.celery_app.Queue.declare")
|
|
179
|
+
@patch("taskqueue.celery_app.Exchange.declare")
|
|
180
|
+
def test_setup_queues_given_valid_settings_expect_broker_declaration_runs(
|
|
181
|
+
self, mock_exchange_declare, mock_queue_declare
|
|
182
|
+
):
|
|
183
|
+
mock_app = MagicMock()
|
|
184
|
+
mock_settings = MagicMock()
|
|
185
|
+
mock_settings.TASKQUEUE_APP_NAME = "testapp"
|
|
186
|
+
mock_settings.TASKQUEUE_QUEUES = ["default", "high", "low"]
|
|
187
|
+
mock_settings.TASKQUEUE_DLQ_NAME_PREFIX = "dlq"
|
|
188
|
+
|
|
189
|
+
queues = setup_queues(mock_app, mock_settings)
|
|
190
|
+
|
|
191
|
+
mock_app.connection_or_acquire.assert_called_once()
|
|
192
|
+
mock_app.conf.update.assert_called_once()
|
|
193
|
+
assert len(queues) == 6
|
|
194
|
+
assert mock_exchange_declare.call_count == 2
|
|
195
|
+
assert mock_queue_declare.call_count == 6
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
class TestCeleryAppIntegration:
|
|
199
|
+
"""Integration tests for the celery_app module."""
|
|
200
|
+
|
|
201
|
+
@patch("taskqueue.celery_app.get_django_settings")
|
|
202
|
+
def test_celery_app_import_given_django_configured_expect_app_created(
|
|
203
|
+
self, mock_get_settings
|
|
204
|
+
):
|
|
205
|
+
mock_settings = MagicMock()
|
|
206
|
+
mock_settings.TASKQUEUE_APP_NAME = "testapp"
|
|
207
|
+
mock_settings.CELERY_BROKER_URL = "redis://localhost:6379/0"
|
|
208
|
+
mock_settings.CELERY_RESULT_BACKEND = "redis://localhost:6379/0"
|
|
209
|
+
mock_settings.CELERY_TASK_SERIALIZER = "pickle"
|
|
210
|
+
mock_settings.CELERY_RESULT_SERIALIZER = "pickle"
|
|
211
|
+
mock_settings.CELERY_ACCEPT_CONTENT = ["pickle"]
|
|
212
|
+
mock_settings.CELERY_TIMEZONE = "UTC"
|
|
213
|
+
mock_settings.CELERY_TASK_TRACK_STARTED = True
|
|
214
|
+
mock_settings.CELERY_TASK_TIME_LIMIT = 1800
|
|
215
|
+
mock_settings.CELERY_TASK_SOFT_TIME_LIMIT = 1500
|
|
216
|
+
mock_settings.CELERY_TASK_ALWAYS_EAGER = False
|
|
217
|
+
mock_settings.CELERY_TASK_EAGER_PROPAGATES = True
|
|
218
|
+
mock_settings.TASKQUEUE_QUEUES = ["default"]
|
|
219
|
+
mock_settings.TASKQUEUE_DLQ_NAME_PREFIX = "dlq"
|
|
220
|
+
|
|
221
|
+
mock_get_settings.return_value = mock_settings
|
|
222
|
+
|
|
223
|
+
from taskqueue.celery_app import celery_app
|
|
224
|
+
|
|
225
|
+
assert celery_app is not None
|
|
@@ -0,0 +1,240 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from datetime import timedelta
|
|
3
|
+
from datetime import timezone
|
|
4
|
+
from unittest.mock import MagicMock
|
|
5
|
+
from unittest.mock import patch
|
|
6
|
+
|
|
7
|
+
from taskqueue.cmanager import K_TASK_STATUS_REPUBLISHED
|
|
8
|
+
from taskqueue.dlq import get_discard_info
|
|
9
|
+
from taskqueue.dlq import retry_dlq_messages
|
|
10
|
+
from taskqueue.dlq import should_retry_message
|
|
11
|
+
from taskqueue.management.commands.retry_dlq_messages import Command
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class TestShouldRetryMessage:
|
|
15
|
+
@patch("taskqueue.dlq._get_task_result_model")
|
|
16
|
+
def test_should_retry_message_given_republished_task_expect_false(
|
|
17
|
+
self, mock_get_task_result_model
|
|
18
|
+
):
|
|
19
|
+
mock_task_result_model = MagicMock()
|
|
20
|
+
mock_task_result_model.objects.filter.return_value.first.return_value = (
|
|
21
|
+
MagicMock(status=K_TASK_STATUS_REPUBLISHED)
|
|
22
|
+
)
|
|
23
|
+
mock_get_task_result_model.return_value = mock_task_result_model
|
|
24
|
+
eta = (datetime.now(timezone.utc) - timedelta(days=1)).isoformat()
|
|
25
|
+
|
|
26
|
+
result = should_retry_message({"id": "task-1", "eta": eta})
|
|
27
|
+
|
|
28
|
+
assert result is False
|
|
29
|
+
|
|
30
|
+
@patch("taskqueue.dlq._get_task_result_model")
|
|
31
|
+
def test_should_retry_message_given_eta_within_limit_expect_true(
|
|
32
|
+
self, mock_get_task_result_model
|
|
33
|
+
):
|
|
34
|
+
mock_task_result_model = MagicMock()
|
|
35
|
+
mock_task_result_model.objects.filter.return_value.first.return_value = None
|
|
36
|
+
mock_get_task_result_model.return_value = mock_task_result_model
|
|
37
|
+
eta = (datetime.now(timezone.utc) - timedelta(days=2)).isoformat()
|
|
38
|
+
|
|
39
|
+
result = should_retry_message({"id": "task-1", "eta": eta})
|
|
40
|
+
|
|
41
|
+
assert result is True
|
|
42
|
+
|
|
43
|
+
@patch("taskqueue.dlq._get_task_result_model")
|
|
44
|
+
def test_should_retry_message_given_eta_older_than_limit_expect_false(
|
|
45
|
+
self, mock_get_task_result_model
|
|
46
|
+
):
|
|
47
|
+
mock_task_result_model = MagicMock()
|
|
48
|
+
mock_task_result_model.objects.filter.return_value.first.return_value = None
|
|
49
|
+
mock_get_task_result_model.return_value = mock_task_result_model
|
|
50
|
+
eta = (datetime.now(timezone.utc) - timedelta(days=8)).isoformat()
|
|
51
|
+
|
|
52
|
+
result = should_retry_message({"id": "task-1", "eta": eta})
|
|
53
|
+
|
|
54
|
+
assert result is False
|
|
55
|
+
|
|
56
|
+
@patch("taskqueue.dlq._get_task_result_model")
|
|
57
|
+
def test_should_retry_message_given_missing_eta_expect_false(
|
|
58
|
+
self, mock_get_task_result_model
|
|
59
|
+
):
|
|
60
|
+
mock_task_result_model = MagicMock()
|
|
61
|
+
mock_task_result_model.objects.filter.return_value.first.return_value = None
|
|
62
|
+
mock_get_task_result_model.return_value = mock_task_result_model
|
|
63
|
+
|
|
64
|
+
result = should_retry_message({"id": "task-1"})
|
|
65
|
+
|
|
66
|
+
assert result is False
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class TestGetDiscardInfo:
|
|
70
|
+
def test_get_discard_info_given_eta_beyond_retention_expect_discard_info(self):
|
|
71
|
+
eta = (datetime.now(timezone.utc) - timedelta(days=20)).isoformat()
|
|
72
|
+
|
|
73
|
+
result = get_discard_info(
|
|
74
|
+
{
|
|
75
|
+
"id": "task-1",
|
|
76
|
+
"task": "my.task",
|
|
77
|
+
"eta": eta,
|
|
78
|
+
}
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
assert result == ("my.task", "task-1")
|
|
82
|
+
|
|
83
|
+
def test_get_discard_info_given_eta_within_retention_expect_none(self):
|
|
84
|
+
eta = (datetime.now(timezone.utc) - timedelta(days=5)).isoformat()
|
|
85
|
+
|
|
86
|
+
result = get_discard_info(
|
|
87
|
+
{
|
|
88
|
+
"id": "task-1",
|
|
89
|
+
"task": "my.task",
|
|
90
|
+
"eta": eta,
|
|
91
|
+
}
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
assert result is None
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class TestRetryDlqMessages:
|
|
98
|
+
@patch("taskqueue.dlq.Producer")
|
|
99
|
+
@patch("taskqueue.dlq.get_discard_info", return_value=None)
|
|
100
|
+
@patch("taskqueue.dlq.should_retry_message", return_value=True)
|
|
101
|
+
def test_retry_dlq_messages_given_retryable_message_expect_republished(
|
|
102
|
+
self,
|
|
103
|
+
mock_should_retry_message,
|
|
104
|
+
mock_get_discard_info,
|
|
105
|
+
mock_producer_class,
|
|
106
|
+
):
|
|
107
|
+
eta = (datetime.now(timezone.utc) - timedelta(days=1)).isoformat()
|
|
108
|
+
mock_message = MagicMock()
|
|
109
|
+
mock_message.headers = {"id": "task-1", "eta": eta}
|
|
110
|
+
mock_message.body = b"raw-payload"
|
|
111
|
+
mock_message.content_type = "application/json"
|
|
112
|
+
mock_message.content_encoding = "utf-8"
|
|
113
|
+
mock_message.properties = {"priority": 5}
|
|
114
|
+
|
|
115
|
+
mock_dlq_queue = MagicMock()
|
|
116
|
+
mock_dlq_queue.get.side_effect = [mock_message, None]
|
|
117
|
+
|
|
118
|
+
mock_main_queue = MagicMock()
|
|
119
|
+
mock_main_queue.exchange = "test_taskqueue"
|
|
120
|
+
mock_main_queue.routing_key = "default"
|
|
121
|
+
|
|
122
|
+
mock_app = MagicMock()
|
|
123
|
+
mock_app.amqp.queues = {
|
|
124
|
+
"dlq.default": MagicMock(return_value=mock_dlq_queue),
|
|
125
|
+
"default": MagicMock(return_value=mock_main_queue),
|
|
126
|
+
}
|
|
127
|
+
mock_app.connection_or_acquire.return_value.__enter__.return_value.default_channel = "channel"
|
|
128
|
+
|
|
129
|
+
result = retry_dlq_messages("dlq.default", "default", app=mock_app)
|
|
130
|
+
|
|
131
|
+
mock_should_retry_message.assert_called_once_with(mock_message.headers)
|
|
132
|
+
mock_get_discard_info.assert_called_once_with(mock_message.headers)
|
|
133
|
+
mock_producer_class.return_value.publish.assert_called_once_with(
|
|
134
|
+
b"raw-payload",
|
|
135
|
+
exchange="test_taskqueue",
|
|
136
|
+
routing_key="default",
|
|
137
|
+
headers=mock_message.headers,
|
|
138
|
+
content_type="application/json",
|
|
139
|
+
content_encoding="utf-8",
|
|
140
|
+
declare=[mock_main_queue],
|
|
141
|
+
retry=True,
|
|
142
|
+
priority=5,
|
|
143
|
+
)
|
|
144
|
+
mock_message.ack.assert_called_once_with()
|
|
145
|
+
assert result == {"moved": 1, "skipped": 0, "discarded": 0}
|
|
146
|
+
|
|
147
|
+
@patch("taskqueue.dlq.Producer")
|
|
148
|
+
@patch("taskqueue.dlq.get_discard_info", return_value=("my.task", "task-1"))
|
|
149
|
+
def test_retry_dlq_messages_given_discarded_message_expect_ack_without_publish(
|
|
150
|
+
self,
|
|
151
|
+
mock_get_discard_info,
|
|
152
|
+
mock_producer_class,
|
|
153
|
+
):
|
|
154
|
+
eta = (datetime.now(timezone.utc) - timedelta(days=20)).isoformat()
|
|
155
|
+
mock_message = MagicMock()
|
|
156
|
+
mock_message.headers = {
|
|
157
|
+
"id": "task-1",
|
|
158
|
+
"task": "my.task",
|
|
159
|
+
"eta": eta,
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
mock_dlq_queue = MagicMock()
|
|
163
|
+
mock_dlq_queue.get.side_effect = [mock_message, None]
|
|
164
|
+
|
|
165
|
+
mock_main_queue = MagicMock()
|
|
166
|
+
mock_app = MagicMock()
|
|
167
|
+
mock_app.amqp.queues = {
|
|
168
|
+
"dlq.default": MagicMock(return_value=mock_dlq_queue),
|
|
169
|
+
"default": MagicMock(return_value=mock_main_queue),
|
|
170
|
+
}
|
|
171
|
+
mock_app.connection_or_acquire.return_value.__enter__.return_value.default_channel = "channel"
|
|
172
|
+
|
|
173
|
+
result = retry_dlq_messages("dlq.default", "default", app=mock_app)
|
|
174
|
+
|
|
175
|
+
mock_get_discard_info.assert_called_once_with(mock_message.headers)
|
|
176
|
+
mock_producer_class.return_value.publish.assert_not_called()
|
|
177
|
+
mock_message.ack.assert_called_once_with()
|
|
178
|
+
assert result == {"moved": 0, "skipped": 0, "discarded": 1}
|
|
179
|
+
|
|
180
|
+
@patch("taskqueue.dlq.Producer")
|
|
181
|
+
@patch("taskqueue.dlq.get_discard_info", return_value=None)
|
|
182
|
+
@patch("taskqueue.dlq.should_retry_message", return_value=False)
|
|
183
|
+
def test_retry_dlq_messages_given_skipped_message_expect_ack_without_publish(
|
|
184
|
+
self,
|
|
185
|
+
mock_should_retry_message,
|
|
186
|
+
mock_get_discard_info,
|
|
187
|
+
mock_producer_class,
|
|
188
|
+
):
|
|
189
|
+
eta = (datetime.now(timezone.utc) - timedelta(days=8)).isoformat()
|
|
190
|
+
mock_message = MagicMock()
|
|
191
|
+
mock_message.headers = {"id": "task-1", "eta": eta}
|
|
192
|
+
|
|
193
|
+
mock_dlq_queue = MagicMock()
|
|
194
|
+
mock_dlq_queue.get.side_effect = [mock_message, None]
|
|
195
|
+
|
|
196
|
+
mock_main_queue = MagicMock()
|
|
197
|
+
mock_app = MagicMock()
|
|
198
|
+
mock_app.amqp.queues = {
|
|
199
|
+
"dlq.default": MagicMock(return_value=mock_dlq_queue),
|
|
200
|
+
"default": MagicMock(return_value=mock_main_queue),
|
|
201
|
+
}
|
|
202
|
+
mock_app.connection_or_acquire.return_value.__enter__.return_value.default_channel = "channel"
|
|
203
|
+
|
|
204
|
+
result = retry_dlq_messages("dlq.default", "default", app=mock_app)
|
|
205
|
+
|
|
206
|
+
mock_get_discard_info.assert_called_once_with(mock_message.headers)
|
|
207
|
+
mock_should_retry_message.assert_called_once_with(mock_message.headers)
|
|
208
|
+
mock_producer_class.return_value.publish.assert_not_called()
|
|
209
|
+
mock_message.ack.assert_called_once_with()
|
|
210
|
+
assert result == {"moved": 0, "skipped": 1, "discarded": 0}
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
class TestRetryDlqMessagesCommand:
|
|
214
|
+
@patch("taskqueue.management.commands.retry_dlq_messages.retry_dlq_messages")
|
|
215
|
+
def test_command_given_multiple_queues_expect_each_dlq_processed(
|
|
216
|
+
self, mock_retry_dlq_messages, caplog
|
|
217
|
+
):
|
|
218
|
+
mock_retry_dlq_messages.side_effect = [
|
|
219
|
+
{"moved": 1, "skipped": 0, "discarded": 0},
|
|
220
|
+
{"moved": 2, "skipped": 1, "discarded": 1},
|
|
221
|
+
]
|
|
222
|
+
|
|
223
|
+
command = Command()
|
|
224
|
+
|
|
225
|
+
with patch(
|
|
226
|
+
"taskqueue.management.commands.retry_dlq_messages.settings"
|
|
227
|
+
) as mock_settings:
|
|
228
|
+
mock_settings.TASKQUEUE_QUEUES = ["default", "high"]
|
|
229
|
+
mock_settings.TASKQUEUE_DLQ_NAME_PREFIX = "dlq"
|
|
230
|
+
|
|
231
|
+
with caplog.at_level("INFO"):
|
|
232
|
+
command.handle()
|
|
233
|
+
|
|
234
|
+
assert mock_retry_dlq_messages.call_args_list[0].args == (
|
|
235
|
+
"dlq.default",
|
|
236
|
+
"default",
|
|
237
|
+
)
|
|
238
|
+
assert mock_retry_dlq_messages.call_args_list[1].args == (
|
|
239
|
+
"dlq.high", "high")
|
|
240
|
+
assert "Retried 3 messages, skipped 1, discarded 1." in caplog.text
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from unittest.mock import patch
|
|
2
|
+
|
|
3
|
+
from taskqueue.management.commands.setup_taskqueue import Command
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class TestSetupTaskqueueCommand:
|
|
7
|
+
@patch("taskqueue.management.commands.setup_taskqueue.setup_queues")
|
|
8
|
+
def test_setup_taskqueue_command_given_success_expect_queues_declared(
|
|
9
|
+
self, mock_setup_queues, caplog
|
|
10
|
+
):
|
|
11
|
+
mock_setup_queues.return_value = ["default", "high"]
|
|
12
|
+
|
|
13
|
+
command = Command()
|
|
14
|
+
|
|
15
|
+
with caplog.at_level("INFO"):
|
|
16
|
+
command.handle()
|
|
17
|
+
|
|
18
|
+
mock_setup_queues.assert_called_once_with()
|
|
19
|
+
assert "Declared 2 TaskQueue queues successfully." in caplog.text
|
|
@@ -1,186 +0,0 @@
|
|
|
1
|
-
"""Tests for the celery_app module."""
|
|
2
|
-
from unittest.mock import MagicMock
|
|
3
|
-
from unittest.mock import patch
|
|
4
|
-
|
|
5
|
-
import pytest
|
|
6
|
-
from taskqueue.celery_app import create_celery_app
|
|
7
|
-
from taskqueue.celery_app import get_django_settings
|
|
8
|
-
from taskqueue.celery_app import setup_queues
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class TestGetDjangoSettings:
|
|
12
|
-
|
|
13
|
-
def test_get_django_settings_given_django_available_expect_return_settings(self):
|
|
14
|
-
with patch('django.conf.settings') as mock_settings:
|
|
15
|
-
result = get_django_settings()
|
|
16
|
-
assert result == mock_settings
|
|
17
|
-
|
|
18
|
-
def test_get_django_settings_given_django_not_available_expect_raise_import_error(self):
|
|
19
|
-
with patch('builtins.__import__', side_effect=ImportError("No module named 'django'")):
|
|
20
|
-
with pytest.raises(ImportError, match="\\[TaskQueue\\] Django settings not found\\."):
|
|
21
|
-
get_django_settings()
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
class TestCreateCeleryApp:
|
|
25
|
-
|
|
26
|
-
@patch('taskqueue.celery_app.setup_queues')
|
|
27
|
-
@patch('taskqueue.celery_app.get_django_settings')
|
|
28
|
-
@patch('taskqueue.celery_app.Celery')
|
|
29
|
-
def test_create_celery_app_given_valid_settings_expect_celery_app_created(
|
|
30
|
-
self, mock_celery_class, mock_get_settings, mock_setup_queues
|
|
31
|
-
):
|
|
32
|
-
mock_settings = MagicMock()
|
|
33
|
-
mock_settings.TASKQUEUE_APP_NAME = 'testapp'
|
|
34
|
-
mock_settings.CELERY_BROKER_URL = 'redis://localhost:6379/0'
|
|
35
|
-
mock_settings.CELERY_RESULT_BACKEND = 'redis://localhost:6379/0'
|
|
36
|
-
mock_settings.CELERY_TASK_SERIALIZER = 'pickle'
|
|
37
|
-
mock_settings.CELERY_RESULT_SERIALIZER = 'pickle'
|
|
38
|
-
mock_settings.CELERY_ACCEPT_CONTENT = ['pickle']
|
|
39
|
-
mock_settings.CELERY_TIMEZONE = 'UTC'
|
|
40
|
-
mock_settings.CELERY_TASK_TRACK_STARTED = True
|
|
41
|
-
mock_settings.CELERY_TASK_TIME_LIMIT = 1800
|
|
42
|
-
mock_settings.CELERY_TASK_SOFT_TIME_LIMIT = 1500
|
|
43
|
-
mock_settings.CELERY_TASK_ALWAYS_EAGER = False
|
|
44
|
-
mock_settings.CELERY_TASK_EAGER_PROPAGATES = True
|
|
45
|
-
|
|
46
|
-
mock_get_settings.return_value = mock_settings
|
|
47
|
-
mock_app = MagicMock()
|
|
48
|
-
mock_celery_class.return_value = mock_app
|
|
49
|
-
|
|
50
|
-
result = create_celery_app()
|
|
51
|
-
|
|
52
|
-
mock_celery_class.assert_called_once_with('testapp')
|
|
53
|
-
# mock_setup_queues.assert_called_once()
|
|
54
|
-
mock_app.conf.update.assert_called_once()
|
|
55
|
-
mock_app.autodiscover_tasks.assert_called_once_with(['taskqueue'])
|
|
56
|
-
assert result == mock_app
|
|
57
|
-
|
|
58
|
-
@patch('taskqueue.celery_app.setup_queues')
|
|
59
|
-
@patch('taskqueue.celery_app.get_django_settings')
|
|
60
|
-
@patch('taskqueue.celery_app.Celery')
|
|
61
|
-
def test_create_celery_app_given_missing_settings_expect_defaults_used(
|
|
62
|
-
self, mock_celery_class, mock_get_settings, mock_setup_queues
|
|
63
|
-
):
|
|
64
|
-
mock_settings = MagicMock()
|
|
65
|
-
del mock_settings.TASKQUEUE_APP_NAME
|
|
66
|
-
del mock_settings.CELERY_BROKER_URL
|
|
67
|
-
del mock_settings.CELERY_RESULT_BACKEND
|
|
68
|
-
del mock_settings.CELERY_TASK_SERIALIZER
|
|
69
|
-
del mock_settings.CELERY_RESULT_SERIALIZER
|
|
70
|
-
del mock_settings.CELERY_ACCEPT_CONTENT
|
|
71
|
-
del mock_settings.CELERY_TIMEZONE
|
|
72
|
-
del mock_settings.CELERY_TASK_TRACK_STARTED
|
|
73
|
-
del mock_settings.CELERY_TASK_TIME_LIMIT
|
|
74
|
-
del mock_settings.CELERY_TASK_SOFT_TIME_LIMIT
|
|
75
|
-
del mock_settings.CELERY_TASK_ALWAYS_EAGER
|
|
76
|
-
del mock_settings.CELERY_TASK_EAGER_PROPAGATES
|
|
77
|
-
|
|
78
|
-
mock_get_settings.return_value = mock_settings
|
|
79
|
-
mock_app = MagicMock()
|
|
80
|
-
mock_celery_class.return_value = mock_app
|
|
81
|
-
|
|
82
|
-
result = create_celery_app()
|
|
83
|
-
|
|
84
|
-
mock_celery_class.assert_called_once_with('taskqueue')
|
|
85
|
-
assert result == mock_app
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
class TestSetupQueues:
|
|
89
|
-
"""Test setup_queues function."""
|
|
90
|
-
|
|
91
|
-
def test_setup_queues_given_valid_settings_expect_queues_configured(self):
|
|
92
|
-
"""Test that setup_queues configures queues and DLQs correctly."""
|
|
93
|
-
mock_app = MagicMock()
|
|
94
|
-
mock_settings = MagicMock()
|
|
95
|
-
mock_settings.TASKQUEUE_APP_NAME = 'testapp'
|
|
96
|
-
mock_settings.TASKQUEUE_QUEUES = ['default', 'high', 'low']
|
|
97
|
-
mock_settings.TASKQUEUE_DLQ_NAME_PREFIX = 'dlq'
|
|
98
|
-
|
|
99
|
-
celery_config = {}
|
|
100
|
-
|
|
101
|
-
setup_queues(mock_app, mock_settings, celery_config)
|
|
102
|
-
|
|
103
|
-
# Assertions
|
|
104
|
-
assert celery_config['task_default_queue'] == 'default'
|
|
105
|
-
assert celery_config['task_default_exchange'] == 'testapp'
|
|
106
|
-
assert celery_config['task_default_exchange_type'] == 'direct'
|
|
107
|
-
assert len(celery_config['task_queues']) == 6
|
|
108
|
-
|
|
109
|
-
main_queues = [q for q in celery_config['task_queues']
|
|
110
|
-
if not q.name.startswith('dlq.')]
|
|
111
|
-
assert len(main_queues) == 3
|
|
112
|
-
assert any(q.name == 'default' for q in main_queues)
|
|
113
|
-
assert any(q.name == 'high' for q in main_queues)
|
|
114
|
-
assert any(q.name == 'low' for q in main_queues)
|
|
115
|
-
|
|
116
|
-
dlq_queues = [q for q in celery_config['task_queues']
|
|
117
|
-
if q.name.startswith('dlq.')]
|
|
118
|
-
assert len(dlq_queues) == 3
|
|
119
|
-
assert any(q.name == 'dlq.default' for q in dlq_queues)
|
|
120
|
-
assert any(q.name == 'dlq.high' for q in dlq_queues)
|
|
121
|
-
assert any(q.name == 'dlq.low' for q in dlq_queues)
|
|
122
|
-
|
|
123
|
-
def test_setup_queues_given_missing_settings_expect_defaults_used(self):
|
|
124
|
-
mock_app = MagicMock()
|
|
125
|
-
mock_settings = MagicMock()
|
|
126
|
-
del mock_settings.TASKQUEUE_APP_NAME
|
|
127
|
-
del mock_settings.TASKQUEUE_QUEUES
|
|
128
|
-
del mock_settings.TASKQUEUE_DLQ_NAME_PREFIX
|
|
129
|
-
|
|
130
|
-
celery_config = {}
|
|
131
|
-
|
|
132
|
-
setup_queues(mock_app, mock_settings, celery_config)
|
|
133
|
-
|
|
134
|
-
assert celery_config['task_default_queue'] == 'default'
|
|
135
|
-
assert celery_config['task_default_exchange'] == 'taskqueue'
|
|
136
|
-
assert celery_config['task_default_exchange_type'] == 'direct'
|
|
137
|
-
assert len(celery_config['task_queues']) == 6
|
|
138
|
-
|
|
139
|
-
def test_setup_queues_given_single_queue_expect_correct_configuration(self):
|
|
140
|
-
mock_app = MagicMock()
|
|
141
|
-
mock_settings = MagicMock()
|
|
142
|
-
mock_settings.TASKQUEUE_APP_NAME = 'singleapp'
|
|
143
|
-
mock_settings.TASKQUEUE_QUEUES = ['single']
|
|
144
|
-
mock_settings.TASKQUEUE_DLQ_NAME_PREFIX = 'dead'
|
|
145
|
-
|
|
146
|
-
celery_config = {}
|
|
147
|
-
|
|
148
|
-
setup_queues(mock_app, mock_settings, celery_config)
|
|
149
|
-
|
|
150
|
-
assert len(celery_config['task_queues']) == 2
|
|
151
|
-
|
|
152
|
-
main_queue = next(
|
|
153
|
-
q for q in celery_config['task_queues'] if q.name == 'single')
|
|
154
|
-
assert main_queue.queue_arguments['x-dead-letter-exchange'] == 'singleapp.dlx'
|
|
155
|
-
assert main_queue.queue_arguments['x-dead-letter-routing-key'] == 'dead.single'
|
|
156
|
-
|
|
157
|
-
dlq = next(
|
|
158
|
-
q for q in celery_config['task_queues'] if q.name == 'dead.single')
|
|
159
|
-
assert dlq.exchange.name == 'singleapp.dlx'
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
class TestCeleryAppIntegration:
|
|
163
|
-
"""Integration tests for the celery_app module."""
|
|
164
|
-
|
|
165
|
-
@patch('taskqueue.celery_app.get_django_settings')
|
|
166
|
-
def test_celery_app_import_given_django_configured_expect_app_created(self, mock_get_settings):
|
|
167
|
-
mock_settings = MagicMock()
|
|
168
|
-
mock_settings.TASKQUEUE_APP_NAME = 'testapp'
|
|
169
|
-
mock_settings.CELERY_BROKER_URL = 'redis://localhost:6379/0'
|
|
170
|
-
mock_settings.CELERY_RESULT_BACKEND = 'redis://localhost:6379/0'
|
|
171
|
-
mock_settings.CELERY_TASK_SERIALIZER = 'pickle'
|
|
172
|
-
mock_settings.CELERY_RESULT_SERIALIZER = 'pickle'
|
|
173
|
-
mock_settings.CELERY_ACCEPT_CONTENT = ['pickle']
|
|
174
|
-
mock_settings.CELERY_TIMEZONE = 'UTC'
|
|
175
|
-
mock_settings.CELERY_TASK_TRACK_STARTED = True
|
|
176
|
-
mock_settings.CELERY_TASK_TIME_LIMIT = 1800
|
|
177
|
-
mock_settings.CELERY_TASK_SOFT_TIME_LIMIT = 1500
|
|
178
|
-
mock_settings.CELERY_TASK_ALWAYS_EAGER = False
|
|
179
|
-
mock_settings.CELERY_TASK_EAGER_PROPAGATES = True
|
|
180
|
-
mock_settings.TASKQUEUE_QUEUES = ['default']
|
|
181
|
-
mock_settings.TASKQUEUE_DLQ_NAME_PREFIX = 'dlq'
|
|
182
|
-
|
|
183
|
-
mock_get_settings.return_value = mock_settings
|
|
184
|
-
|
|
185
|
-
from taskqueue.celery_app import celery_app
|
|
186
|
-
assert celery_app is not None
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|