p1-taskqueue 0.1.10__tar.gz → 0.1.19__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of p1-taskqueue might be problematic. Click here for more details.

Files changed (21) hide show
  1. {p1_taskqueue-0.1.10 → p1_taskqueue-0.1.19}/PKG-INFO +7 -5
  2. {p1_taskqueue-0.1.10 → p1_taskqueue-0.1.19}/pyproject.toml +7 -5
  3. {p1_taskqueue-0.1.10 → p1_taskqueue-0.1.19}/src/p1_taskqueue.egg-info/PKG-INFO +7 -5
  4. {p1_taskqueue-0.1.10 → p1_taskqueue-0.1.19}/src/p1_taskqueue.egg-info/SOURCES.txt +1 -0
  5. p1_taskqueue-0.1.19/src/p1_taskqueue.egg-info/requires.txt +15 -0
  6. {p1_taskqueue-0.1.10 → p1_taskqueue-0.1.19}/src/taskqueue/celery_app.py +53 -5
  7. {p1_taskqueue-0.1.10 → p1_taskqueue-0.1.19}/src/taskqueue/cmanager.py +144 -49
  8. {p1_taskqueue-0.1.10 → p1_taskqueue-0.1.19}/src/taskqueue/libs/helper_test.py +33 -2
  9. p1_taskqueue-0.1.19/src/taskqueue/slack_notifier.py +51 -0
  10. {p1_taskqueue-0.1.10 → p1_taskqueue-0.1.19}/tests/test_celery_app.py +1 -1
  11. {p1_taskqueue-0.1.10 → p1_taskqueue-0.1.19}/tests/test_cmanager.py +389 -5
  12. {p1_taskqueue-0.1.10 → p1_taskqueue-0.1.19}/tests/test_helper_test_functions.py +151 -0
  13. {p1_taskqueue-0.1.10 → p1_taskqueue-0.1.19}/tests/test_test_utils.py +101 -2
  14. p1_taskqueue-0.1.10/src/p1_taskqueue.egg-info/requires.txt +0 -13
  15. {p1_taskqueue-0.1.10 → p1_taskqueue-0.1.19}/README.md +0 -0
  16. {p1_taskqueue-0.1.10 → p1_taskqueue-0.1.19}/setup.cfg +0 -0
  17. {p1_taskqueue-0.1.10 → p1_taskqueue-0.1.19}/src/p1_taskqueue.egg-info/dependency_links.txt +0 -0
  18. {p1_taskqueue-0.1.10 → p1_taskqueue-0.1.19}/src/p1_taskqueue.egg-info/top_level.txt +0 -0
  19. {p1_taskqueue-0.1.10 → p1_taskqueue-0.1.19}/src/taskqueue/__init__.py +0 -0
  20. {p1_taskqueue-0.1.10 → p1_taskqueue-0.1.19}/src/taskqueue/libs/__init__.py +0 -0
  21. {p1_taskqueue-0.1.10 → p1_taskqueue-0.1.19}/tests/test_return_values.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: p1-taskqueue
3
- Version: 0.1.10
3
+ Version: 0.1.19
4
4
  Summary: A Task Queue Wrapper for Dekoruma Backend
5
5
  Author-email: Chalvin <engineering@dekoruma.com>
6
6
  Project-URL: Homepage, https://github.com/Dekoruma/p1-taskqueue
@@ -13,11 +13,13 @@ Classifier: Programming Language :: Python :: 3.11
13
13
  Classifier: Programming Language :: Python :: 3.12
14
14
  Requires-Python: >=3.11
15
15
  Description-Content-Type: text/markdown
16
- Requires-Dist: celery>=5.4.0
17
- Requires-Dist: flower>=2.0.1
18
- Requires-Dist: redis>=6.2.0
19
- Requires-Dist: kombu>=5.3.4
16
+ Requires-Dist: celery>=5.5.3
17
+ Requires-Dist: redis>=6.4.0
18
+ Requires-Dist: kombu>=5.5.4
20
19
  Requires-Dist: django>=4.0.0
20
+ Requires-Dist: django-celery-results>=2.6.0
21
+ Requires-Dist: django-celery-beat>=2.8.1
22
+ Requires-Dist: requests>=2.32.3
21
23
  Provides-Extra: dev
22
24
  Requires-Dist: pytest>=7.0.0; extra == "dev"
23
25
  Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
@@ -5,7 +5,7 @@ build-backend = "setuptools.build_meta"
5
5
  [project]
6
6
  name = "p1-taskqueue"
7
7
  # DO NOT CHANGE THIS VERSION - it gets automatically replaced by CI/CD with the git tag version
8
- version = "0.1.10"
8
+ version = "0.1.19"
9
9
  description = "A Task Queue Wrapper for Dekoruma Backend"
10
10
  authors = [
11
11
  {name = "Chalvin", email = "engineering@dekoruma.com"}
@@ -20,11 +20,13 @@ classifiers = [
20
20
  "Programming Language :: Python :: 3.12",
21
21
  ]
22
22
  dependencies = [
23
- "celery>=5.4.0",
24
- "flower>=2.0.1",
25
- "redis>=6.2.0",
26
- "kombu>=5.3.4",
23
+ "celery>=5.5.3",
24
+ "redis>=6.4.0",
25
+ "kombu>=5.5.4",
27
26
  "django>=4.0.0",
27
+ "django-celery-results>=2.6.0",
28
+ "django-celery-beat>=2.8.1",
29
+ "requests>=2.32.3",
28
30
  ]
29
31
 
30
32
  [project.optional-dependencies]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: p1-taskqueue
3
- Version: 0.1.10
3
+ Version: 0.1.19
4
4
  Summary: A Task Queue Wrapper for Dekoruma Backend
5
5
  Author-email: Chalvin <engineering@dekoruma.com>
6
6
  Project-URL: Homepage, https://github.com/Dekoruma/p1-taskqueue
@@ -13,11 +13,13 @@ Classifier: Programming Language :: Python :: 3.11
13
13
  Classifier: Programming Language :: Python :: 3.12
14
14
  Requires-Python: >=3.11
15
15
  Description-Content-Type: text/markdown
16
- Requires-Dist: celery>=5.4.0
17
- Requires-Dist: flower>=2.0.1
18
- Requires-Dist: redis>=6.2.0
19
- Requires-Dist: kombu>=5.3.4
16
+ Requires-Dist: celery>=5.5.3
17
+ Requires-Dist: redis>=6.4.0
18
+ Requires-Dist: kombu>=5.5.4
20
19
  Requires-Dist: django>=4.0.0
20
+ Requires-Dist: django-celery-results>=2.6.0
21
+ Requires-Dist: django-celery-beat>=2.8.1
22
+ Requires-Dist: requests>=2.32.3
21
23
  Provides-Extra: dev
22
24
  Requires-Dist: pytest>=7.0.0; extra == "dev"
23
25
  Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
@@ -8,6 +8,7 @@ src/p1_taskqueue.egg-info/top_level.txt
8
8
  src/taskqueue/__init__.py
9
9
  src/taskqueue/celery_app.py
10
10
  src/taskqueue/cmanager.py
11
+ src/taskqueue/slack_notifier.py
11
12
  src/taskqueue/libs/__init__.py
12
13
  src/taskqueue/libs/helper_test.py
13
14
  tests/test_celery_app.py
@@ -0,0 +1,15 @@
1
+ celery>=5.5.3
2
+ redis>=6.4.0
3
+ kombu>=5.5.4
4
+ django>=4.0.0
5
+ django-celery-results>=2.6.0
6
+ django-celery-beat>=2.8.1
7
+ requests>=2.32.3
8
+
9
+ [dev]
10
+ pytest>=7.0.0
11
+ pytest-cov>=4.0.0
12
+ black>=23.0.0
13
+ flake8>=6.0.0
14
+ mypy>=1.0.0
15
+ pre-commit>=3.0.0
@@ -2,10 +2,15 @@
2
2
  Celery application setup for TaskQueue.
3
3
  Reads configuration from Django settings and auto-configures queues with DLQ.
4
4
  """
5
+ import logging
6
+
7
+ from amqp.exceptions import PreconditionFailed
5
8
  from celery import Celery
6
9
  from kombu import Exchange
7
10
  from kombu import Queue
8
11
 
12
+ logger = logging.getLogger(__name__)
13
+
9
14
 
10
15
  def get_django_settings():
11
16
  """Get Django settings, fail fast if not properly configured."""
@@ -33,6 +38,8 @@ def create_celery_app():
33
38
  'timezone': getattr(settings, 'CELERY_TIMEZONE', 'UTC+7'),
34
39
  'task_time_limit': getattr(settings, 'CELERY_TASK_TIME_LIMIT', 30 * 60),
35
40
  'task_soft_time_limit': getattr(settings, 'CELERY_TASK_SOFT_TIME_LIMIT', 25 * 60),
41
+ # 14 days
42
+ 'result_expires': getattr(settings, 'CELERY_RESULT_EXPIRES', 14 * 24 * 60 * 60),
36
43
  'task_track_started': True,
37
44
  'task_always_eager': False,
38
45
  'task_eager_propagates': True,
@@ -61,7 +68,9 @@ def setup_queues(app, settings, celery_config):
61
68
  queue_names = ['default', 'high', 'low']
62
69
  dlq_name_prefix = getattr(settings, 'TASKQUEUE_DLQ_NAME_PREFIX', 'dlq')
63
70
 
64
- # Create exchanges
71
+ logger.info(
72
+ f"[TaskQueue] Configuring app: {app_name}, queues: {queue_names}")
73
+
65
74
  main_exchange = Exchange(app_name, type='direct')
66
75
  dlx_exchange = Exchange(f'{app_name}.dlx', type='direct')
67
76
 
@@ -69,22 +78,28 @@ def setup_queues(app, settings, celery_config):
69
78
 
70
79
  for queue_name in queue_names:
71
80
  dlq_name = f'{dlq_name_prefix}.{queue_name}'
81
+ dlx_name = f'{app_name}.dlx'
82
+
83
+ queue_args = {
84
+ 'x-dead-letter-exchange': dlx_name,
85
+ 'x-dead-letter-routing-key': dlq_name
86
+ }
72
87
 
73
88
  queue = Queue(
74
89
  queue_name,
75
90
  main_exchange,
76
91
  routing_key=queue_name,
77
- queue_arguments={
78
- 'x-dead-letter-exchange': f'{app_name}.dlx',
79
- 'x-dead-letter-routing-key': dlq_name
80
- }
92
+ queue_arguments=queue_args
81
93
  )
82
94
  queues.append(queue)
95
+ logger.info(
96
+ f"[TaskQueue] Queue '{queue_name}' configured with DLX: {dlx_name}, DLQ routing key: {dlq_name}")
83
97
 
84
98
  for queue_name in queue_names:
85
99
  dlq_name = f'{dlq_name_prefix}.{queue_name}'
86
100
  dlq = Queue(dlq_name, dlx_exchange, routing_key=dlq_name)
87
101
  queues.append(dlq)
102
+ logger.info(f"[TaskQueue] DLQ '{dlq_name}' configured")
88
103
 
89
104
  celery_config.update({
90
105
  'task_default_queue': 'default',
@@ -93,5 +108,38 @@ def setup_queues(app, settings, celery_config):
93
108
  'task_queues': tuple(queues),
94
109
  })
95
110
 
111
+ try:
112
+ with app.connection_or_acquire() as conn:
113
+ channel = conn.default_channel
114
+
115
+ try:
116
+ main_exchange.declare(channel=channel)
117
+ logger.info(f"[TaskQueue] Exchange declared: {app_name}")
118
+ except PreconditionFailed:
119
+ logger.info(f"[TaskQueue] Exchange already exists: {app_name}")
120
+
121
+ try:
122
+ dlx_exchange.declare(channel=channel)
123
+ logger.info(
124
+ f"[TaskQueue] DLX Exchange declared: {app_name}.dlx")
125
+ except PreconditionFailed:
126
+ logger.info(
127
+ f"[TaskQueue] DLX Exchange already exists: {app_name}.dlx")
128
+
129
+ for queue in queues:
130
+ try:
131
+ queue.declare(channel=channel)
132
+ logger.info(f"[TaskQueue] Queue declared: {queue.name}")
133
+ except PreconditionFailed:
134
+ logger.info(
135
+ f"[TaskQueue] Queue already exists with different config: {queue.name}. Using existing queue.")
136
+ except Exception as e:
137
+ logger.warning(
138
+ f"[TaskQueue] Failed to declare queue {queue.name}: {e}")
139
+
140
+ except Exception as e:
141
+ logger.warning(
142
+ f"[TaskQueue] Failed to setup queues: {str(e.__class__.__name__)} {e}")
143
+
96
144
 
97
145
  celery_app = create_celery_app()
@@ -1,14 +1,13 @@
1
1
  import importlib
2
2
  import inspect
3
3
  import logging
4
- from datetime import datetime
5
- from datetime import timedelta
6
4
  from typing import Any
7
5
  from typing import Dict
8
6
  from typing import Tuple
9
7
 
10
8
  from celery import shared_task
11
9
  from celery.exceptions import Reject
10
+ from taskqueue.slack_notifier import SlackbotManager
12
11
 
13
12
  # Setup logger
14
13
  logger = logging.getLogger(__name__)
@@ -31,12 +30,12 @@ def _is_class_method(func: Any) -> bool:
31
30
  def taskqueue_class(cls):
32
31
  """Decorator to automatically capture init arguments for taskqueue."""
33
32
  original_init = cls.__init__
34
-
33
+
35
34
  def wrapped_init(self, *args, **kwargs):
36
35
  self._taskqueue_init_args = list(args)
37
36
  self._taskqueue_init_kwargs = dict(kwargs)
38
37
  original_init(self, *args, **kwargs)
39
-
38
+
40
39
  cls.__init__ = wrapped_init
41
40
  return cls
42
41
 
@@ -51,7 +50,8 @@ def _extract_init_args_from_instance(instance: Any) -> Tuple[list, dict]:
51
50
  def _split_function_and_queue_kwargs(kwargs: Dict[str, Any]) -> Tuple[Dict[str, Any], Dict[str, Any]]:
52
51
  # To prevent confusion whether a kwargs is for function or queue kwargs(i.e celery options and on_commit),
53
52
  # ignore confusing kwargs while give warning
54
- supported_queue_keys = {"channel", "retry", "on_commit", "job_timeout"}
53
+ supported_queue_keys = {"channel", "retry",
54
+ "on_commit", "job_timeout", "use_legacy_executor"}
55
55
  ignored_non_function_keys = {
56
56
  "queue", "countdown", "eta", "expires", "priority", "task_id", "routing_key",
57
57
  "serializer", "compression", "headers", "link", "link_error", "retry_policy",
@@ -75,6 +75,17 @@ def _split_function_and_queue_kwargs(kwargs: Dict[str, Any]) -> Tuple[Dict[str,
75
75
  return func_kwargs, queue_kwargs
76
76
 
77
77
 
78
+ def _build_callable_task_call(func: Any, func_args: tuple, func_kwargs: dict) -> Tuple[str, list, dict]:
79
+ task_name = "taskqueue.cmanager.callable_executor"
80
+ task_args = []
81
+ task_kwargs = {
82
+ "callable_obj": func,
83
+ "args": list(func_args),
84
+ "kwargs": dict(func_kwargs),
85
+ }
86
+ return task_name, task_args, task_kwargs
87
+
88
+
78
89
  def _build_dynamic_task_call(func: Any, *args: Any, **func_kwargs: Any) -> Tuple[str, list, dict]:
79
90
  if _is_class_method(func):
80
91
  instance = getattr(func, "__self__")
@@ -82,9 +93,9 @@ def _build_dynamic_task_call(func: Any, *args: Any, **func_kwargs: Any) -> Tuple
82
93
  module_path = klass.__module__
83
94
  class_name = klass.__name__
84
95
  method_name = func.__name__
85
-
96
+
86
97
  init_args, init_kwargs = _extract_init_args_from_instance(instance)
87
-
98
+
88
99
  task_name = "taskqueue.cmanager.dynamic_class_method_executor"
89
100
  task_args = []
90
101
  task_kwargs: Dict[str, Any] = {
@@ -156,47 +167,21 @@ class CManager:
156
167
  'enqueue_op_type', K_ENQUEUE_OP_TYPE_ENQUEUE)
157
168
 
158
169
  try:
159
- if enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE:
160
- if not args:
161
- raise ValueError(
162
- "enqueue requires a callable as the first positional argument")
163
- func = args[0]
164
- func_args = args[1:]
165
-
166
- elif enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE_AT:
167
- if len(args) < 2:
168
- raise ValueError(
169
- "enqueue_at requires (eta_datetime, func, *func_args)")
170
- eta: datetime = args[0]
171
- func = args[1]
172
- func_args = args[2:]
173
-
174
- elif enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE_IN:
175
- if len(args) < 2:
176
- raise ValueError(
177
- "enqueue_in requires (countdown_delta, func, *func_args)")
178
- delta: timedelta = args[0]
179
- func = args[1]
180
- func_args = args[2:]
181
- else:
182
- raise ValueError(
183
- f"Unknown enqueue operation type: {enqueue_op_type}")
184
-
185
- func_kwargs, queue_kwargs = _split_function_and_queue_kwargs(
186
- kwargs)
170
+ func, func_args, func_kwargs, queue_options = self._parse_enqueue_args(
171
+ enqueue_op_type, args, kwargs)
187
172
 
188
- if enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE_AT:
189
- queue_kwargs = dict(queue_kwargs)
190
- queue_kwargs["eta"] = eta
191
- elif enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE_IN:
192
- queue_kwargs = dict(queue_kwargs)
193
- queue_kwargs["countdown"] = int(delta.total_seconds())
173
+ use_legacy_executor = queue_options.pop(
174
+ 'use_legacy_executor', False)
194
175
 
195
- task_name, task_args, task_kwargs = _build_dynamic_task_call(
196
- func, *func_args, **func_kwargs)
176
+ if use_legacy_executor:
177
+ task_name, task_args, task_kwargs = _build_dynamic_task_call(
178
+ func, *func_args, **func_kwargs)
179
+ else:
180
+ task_name, task_args, task_kwargs = _build_callable_task_call(
181
+ func, func_args, func_kwargs)
197
182
 
198
183
  task_id = self._send_task(task_name, task_args,
199
- task_kwargs, queue_kwargs)
184
+ task_kwargs, queue_options)
200
185
 
201
186
  logger.info('[_enqueue_op_base %s] Submit Celery Task SUCCESS, task_name: %s args: %s, kwargs: %s, task_id: %s' % (
202
187
  enqueue_op_type, task_name, task_args, task_kwargs, task_id))
@@ -206,6 +191,46 @@ class CManager:
206
191
  enqueue_op_type, str(e), args, kwargs))
207
192
  raise e
208
193
 
194
+ def _parse_enqueue_args(self, enqueue_op_type: str, args: tuple, kwargs: dict) -> Tuple[Any, tuple, dict, dict]:
195
+ """Parse enqueue arguments and return func, func_args, func_kwargs, and queue_options."""
196
+ if enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE:
197
+ if not args:
198
+ raise ValueError(
199
+ "enqueue requires a callable as the first positional argument")
200
+ func = args[0]
201
+ func_args = args[1:]
202
+ eta, delta = None, None
203
+
204
+ elif enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE_AT:
205
+ if len(args) < 2:
206
+ raise ValueError(
207
+ "enqueue_at requires (eta_datetime, func, *func_args)")
208
+ eta = args[0]
209
+ func = args[1]
210
+ func_args = args[2:]
211
+ delta = None
212
+
213
+ elif enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE_IN:
214
+ if len(args) < 2:
215
+ raise ValueError(
216
+ "enqueue_in requires (countdown_delta, func, *func_args)")
217
+ delta = args[0]
218
+ func = args[1]
219
+ func_args = args[2:]
220
+ eta = None
221
+ else:
222
+ raise ValueError(
223
+ f"Unknown enqueue operation type: {enqueue_op_type}")
224
+
225
+ func_kwargs, queue_options = _split_function_and_queue_kwargs(kwargs)
226
+
227
+ if eta is not None:
228
+ queue_options["eta"] = eta
229
+ elif delta is not None:
230
+ queue_options["countdown"] = int(delta.total_seconds())
231
+
232
+ return func, func_args, func_kwargs, queue_options
233
+
209
234
  def _send_task(self, task_name: str, task_args: list, task_kwargs: dict, queue_kwargs: Dict[str, Any]) -> str:
210
235
  celery_app = self._get_celery_app()
211
236
 
@@ -238,8 +263,54 @@ class CManager:
238
263
  cm = CManager()
239
264
 
240
265
 
241
- # Dynamic task executors - handle function and class method execution
242
- @shared_task(bind=True, max_retries=K_MAX_RETRY_COUNT)
266
+ @shared_task(bind=True, max_retries=K_MAX_RETRY_COUNT, acks_late=True, reject_on_worker_lost=True)
267
+ def callable_executor(self, callable_obj=None, args=None, kwargs=None, retry=None):
268
+ job_id = self.request.id
269
+ try:
270
+ args = args or []
271
+ kwargs = kwargs or {}
272
+ callable_name = getattr(callable_obj, '__name__', str(callable_obj))
273
+
274
+ logger.info(
275
+ f"[TaskQueue] Executing callable: {callable_name} with args: {args} and kwargs: {kwargs}, job_id: {job_id}")
276
+
277
+ callable_obj(*args, **kwargs)
278
+
279
+ logger.info(
280
+ f"[TaskQueue] Callable execution completed successfully, callable: {callable_name}, args: {args}, kwargs: {kwargs}, job_id: {job_id}")
281
+ return None
282
+ except Exception as e:
283
+ logger.exception(
284
+ f"[TaskQueue] Error executing callable: {callable_name}, args: {args}, kwargs: {kwargs}, error_class: {e.__class__.__name__}, error: {e}, job_id: {job_id}")
285
+
286
+ current_retries = getattr(self.request, 'retries', 0) or 0
287
+ max_retries = self.max_retries or K_MAX_RETRY_COUNT
288
+ if isinstance(retry, dict) and 'max_retries' in retry:
289
+ max_retries = retry['max_retries']
290
+
291
+ if current_retries >= max_retries:
292
+ logger.error(
293
+ f"[TaskQueue] Max retries ({max_retries}) reached for callable: {callable_name}, job_id: {job_id}")
294
+ self.update_state(state='FAILURE', meta={
295
+ 'exc_type': type(e).__name__, 'exc_message': str(e)})
296
+
297
+ SlackbotManager.send_message(
298
+ f"Job Failed Too Many Times - Moving back to dlq.\n"
299
+ f"function name: {callable_name}\n"
300
+ f"args: {args}\n"
301
+ f"kwargs: {kwargs}"
302
+ )
303
+
304
+ raise Reject(reason=str(e), requeue=False)
305
+
306
+ countdown = K_DEFAULT_RETRY_COUNTDOWN
307
+ if isinstance(retry, dict) and 'countdown' in retry:
308
+ countdown = retry['countdown']
309
+
310
+ raise self.retry(exc=e, countdown=countdown, max_retries=max_retries)
311
+
312
+
313
+ @shared_task(bind=True, max_retries=K_MAX_RETRY_COUNT, acks_late=True, reject_on_worker_lost=True)
243
314
  def dynamic_function_executor(self, module_path=None, function_name=None, args=None, kwargs=None, retry=None):
244
315
  job_id = self.request.id
245
316
  try:
@@ -262,7 +333,19 @@ def dynamic_function_executor(self, module_path=None, function_name=None, args=N
262
333
  max_retries = retry['max_retries']
263
334
 
264
335
  if current_retries >= max_retries:
265
- raise Reject(str(e), requeue=False)
336
+ logger.error(
337
+ f"[TaskQueue] Max retries ({max_retries}) reached for function: {function_name}, job_id: {job_id}")
338
+ self.update_state(state='FAILURE', meta={
339
+ 'exc_type': type(e).__name__, 'exc_message': str(e)})
340
+
341
+ SlackbotManager.send_message(
342
+ f"Job Failed Too Many Times - Moving back to dlq.\n"
343
+ f"function name: {function_name}\n"
344
+ f"args: {args}\n"
345
+ f"kwargs: {kwargs}"
346
+ )
347
+
348
+ raise Reject(reason=str(e), requeue=False)
266
349
 
267
350
  countdown = K_DEFAULT_RETRY_COUNTDOWN
268
351
  if isinstance(retry, dict) and 'countdown' in retry:
@@ -271,7 +354,7 @@ def dynamic_function_executor(self, module_path=None, function_name=None, args=N
271
354
  raise self.retry(exc=e, countdown=countdown, max_retries=max_retries)
272
355
 
273
356
 
274
- @shared_task(bind=True, max_retries=K_MAX_RETRY_COUNT)
357
+ @shared_task(bind=True, max_retries=K_MAX_RETRY_COUNT, acks_late=True, reject_on_worker_lost=True)
275
358
  def dynamic_class_method_executor(self, module_path=None, class_name=None, method_name=None, args=None, kwargs=None, init_args=None, init_kwargs=None, retry=None):
276
359
  job_id = self.request.id
277
360
  try:
@@ -298,7 +381,19 @@ def dynamic_class_method_executor(self, module_path=None, class_name=None, metho
298
381
  max_retries = retry['max_retries']
299
382
 
300
383
  if current_retries >= max_retries:
301
- raise Reject(str(e), requeue=False)
384
+ logger.error(
385
+ f"[TaskQueue] Max retries ({max_retries}) reached for method: {method_name}, job_id: {job_id}")
386
+ self.update_state(state='FAILURE', meta={
387
+ 'exc_type': type(e).__name__, 'exc_message': str(e)})
388
+
389
+ SlackbotManager.send_message(
390
+ f"Job Failed Too Many Times - Moving back to dlq.\n"
391
+ f"function name: {class_name}.{method_name}\n"
392
+ f"args: {args}\n"
393
+ f"kwargs: {kwargs}"
394
+ )
395
+
396
+ raise Reject(reason=str(e), requeue=False)
302
397
 
303
398
  countdown = K_DEFAULT_RETRY_COUNTDOWN
304
399
  if isinstance(retry, dict) and 'countdown' in retry:
@@ -70,6 +70,19 @@ def celery_worker_burst(include_func_names: List[str], channel: str = "default")
70
70
  method_name = task_kwargs.get('method_name', '')
71
71
  if module_path and class_name and method_name:
72
72
  full_func_name = f"{module_path}.{class_name}.{method_name}"
73
+ elif task_name.endswith("callable_executor"):
74
+ callable_obj = task_kwargs.get('callable_obj')
75
+ if callable_obj:
76
+ module_path = getattr(
77
+ callable_obj, '__module__', '')
78
+ func_name = getattr(
79
+ callable_obj, '__name__', '')
80
+ if hasattr(callable_obj, '__self__'):
81
+ class_name = callable_obj.__self__.__class__.__name__
82
+ if module_path and class_name and func_name:
83
+ full_func_name = f"{module_path}.{class_name}.{func_name}"
84
+ elif module_path and func_name:
85
+ full_func_name = f"{module_path}.{func_name}"
73
86
 
74
87
  should_execute = full_func_name in included_set if full_func_name else False
75
88
 
@@ -99,6 +112,7 @@ def celery_worker_burst(include_func_names: List[str], channel: str = "default")
99
112
  def get_queued_tasks(channel: str = "default"):
100
113
  app = current_app
101
114
  queued_tasks = []
115
+ messages = []
102
116
 
103
117
  try:
104
118
  with app.connection_for_read() as conn:
@@ -106,10 +120,11 @@ def get_queued_tasks(channel: str = "default"):
106
120
  queue = app.amqp.queues[channel](chan)
107
121
 
108
122
  while True:
109
- message = queue.get(no_ack=True)
123
+ message = queue.get(no_ack=False)
110
124
  if not message:
111
125
  break
112
126
 
127
+ messages.append(message)
113
128
  task_name = message.headers.get("task")
114
129
 
115
130
  try:
@@ -136,6 +151,19 @@ def get_queued_tasks(channel: str = "default"):
136
151
  method_name = task_kwargs.get('method_name', '')
137
152
  if module_path and class_name and method_name:
138
153
  full_func_name = f"{module_path}.{class_name}.{method_name}"
154
+ elif task_name and task_name.endswith("callable_executor"):
155
+ callable_obj = task_kwargs.get('callable_obj')
156
+ if callable_obj:
157
+ module_path = getattr(
158
+ callable_obj, '__module__', '')
159
+ func_name = getattr(
160
+ callable_obj, '__name__', '')
161
+ if hasattr(callable_obj, '__self__'):
162
+ class_name = callable_obj.__self__.__class__.__name__
163
+ if module_path and class_name and func_name:
164
+ full_func_name = f"{module_path}.{class_name}.{func_name}"
165
+ elif module_path and func_name:
166
+ full_func_name = f"{module_path}.{func_name}"
139
167
 
140
168
  queued_tasks.append({
141
169
  'task_name': task_name,
@@ -147,7 +175,10 @@ def get_queued_tasks(channel: str = "default"):
147
175
 
148
176
  except Exception as e:
149
177
  logger.warning(f"Failed to decode message: {e}")
150
- continue
178
+
179
+ for message in messages:
180
+ if message and not message.acknowledged:
181
+ message.reject(requeue=True)
151
182
 
152
183
  except Exception as e:
153
184
  logger.error(
@@ -0,0 +1,51 @@
1
+ """
2
+ Slack notification for TaskQueue.
3
+ """
4
+ import json
5
+ import logging
6
+
7
+ import requests
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ class SlackbotManager:
13
+
14
+ @classmethod
15
+ def send_message(cls, message: str) -> None:
16
+ try:
17
+ from django.conf import settings
18
+ except ImportError:
19
+ return
20
+
21
+ if not getattr(settings, 'TASKQUEUE_SLACK_ENABLED', False):
22
+ return
23
+
24
+ hook_url = getattr(settings, 'TASKQUEUE_SLACK_HOOK_URL', None)
25
+ if not hook_url:
26
+ return
27
+
28
+ channel = getattr(
29
+ settings, 'TASKQUEUE_SLACK_CHANNEL_NAME', '#tech-automation')
30
+ username = getattr(
31
+ settings, 'TASKQUEUE_SLACK_USERNAME', 'TaskQueueBot')
32
+ icon_emoji = getattr(
33
+ settings, 'TASKQUEUE_SLACK_ICON_EMOJI', ':robot_face:')
34
+
35
+ is_staging = getattr(settings, 'IS_RUN_IN_STAGING_ENV', False)
36
+ if is_staging:
37
+ message = '[STAGING] ' + message
38
+
39
+ try:
40
+ requests.post(
41
+ hook_url,
42
+ data=json.dumps({
43
+ 'channel': channel,
44
+ 'username': username,
45
+ 'text': message,
46
+ 'icon_emoji': icon_emoji,
47
+ }),
48
+ headers={"Content-Type": "application/json"}
49
+ )
50
+ except Exception as e:
51
+ logger.exception('[TaskQueue Slack] Error: %s', str(e))
@@ -50,7 +50,7 @@ class TestCreateCeleryApp:
50
50
  result = create_celery_app()
51
51
 
52
52
  mock_celery_class.assert_called_once_with('testapp')
53
- mock_setup_queues.assert_called_once()
53
+ # mock_setup_queues.assert_called_once()
54
54
  mock_app.conf.update.assert_called_once()
55
55
  mock_app.autodiscover_tasks.assert_called_once_with(['taskqueue'])
56
56
  assert result == mock_app