p1-taskqueue 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of p1-taskqueue might be problematic. Click here for more details.

@@ -0,0 +1,41 @@
1
+ Metadata-Version: 2.4
2
+ Name: p1-taskqueue
3
+ Version: 0.1.0
4
+ Summary: A Task Queue Wrapper for Dekoruma Backend
5
+ Author-email: Chalvin <engineering@dekoruma.com>
6
+ Project-URL: Homepage, https://github.com/Dekoruma/p1-taskqueue
7
+ Project-URL: Repository, https://github.com/Dekoruma/p1-taskqueue.git
8
+ Project-URL: Issues, https://github.com/Dekoruma/p1-taskqueue/issues
9
+ Classifier: Development Status :: 3 - Alpha
10
+ Classifier: Intended Audience :: Developers
11
+ Classifier: License :: OSI Approved :: MIT License
12
+ Classifier: Programming Language :: Python :: 3.11
13
+ Classifier: Programming Language :: Python :: 3.12
14
+ Requires-Python: >=3.11
15
+ Description-Content-Type: text/markdown
16
+ Requires-Dist: celery>=5.4.0
17
+ Requires-Dist: flower>=2.0.1
18
+ Requires-Dist: redis>=6.2.0
19
+ Requires-Dist: kombu>=5.3.4
20
+ Requires-Dist: django>=4.0.0
21
+ Provides-Extra: dev
22
+ Requires-Dist: pytest>=7.0.0; extra == "dev"
23
+ Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
24
+ Requires-Dist: black>=23.0.0; extra == "dev"
25
+ Requires-Dist: flake8>=6.0.0; extra == "dev"
26
+ Requires-Dist: mypy>=1.0.0; extra == "dev"
27
+ Requires-Dist: pre-commit>=3.0.0; extra == "dev"
28
+
29
+ # TaskQueue
30
+
31
+ A Task Queue Wrapper for Dekoruma Backend
32
+
33
+ ## Description
34
+
35
+ TaskQueue is a Python package that provides a wrapper around Celery for task queue management. It includes automatic queue setup, Dead Letter Queue (DLQ) routing, and dynamic task execution capabilities.
36
+
37
+
38
+ ## Deploy
39
+ - Push changes to main
40
+ - Create new TAG with version name (i.e 0.1.3)
41
+ - Check status on the Actions page on Github
@@ -0,0 +1,7 @@
1
+ taskqueue/__init__.py,sha256=gVDUAurwUijthE9_36FmhAQTBf7veGgjnew-amrTrmg,241
2
+ taskqueue/celery_app.py,sha256=dUT-7XzsSQbr8vKrLv7f_6iYxTCUEJZHEt9fL-KIQ5U,3302
3
+ taskqueue/cmanager.py,sha256=Ec9Z6JgreJWR4p56qO0cAGAjO7d4UJmZ8vVmSOAetms,9795
4
+ p1_taskqueue-0.1.0.dist-info/METADATA,sha256=mawaq14uvvViLiFIa2p5_AEuEdcoYLKm-ba0REFdPiU,1508
5
+ p1_taskqueue-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
6
+ p1_taskqueue-0.1.0.dist-info/top_level.txt,sha256=hA3SM1ik2K8iPqtlt_-_nJ4TAePwHPN3vsOc4EiynqU,10
7
+ p1_taskqueue-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1 @@
1
+ taskqueue
taskqueue/__init__.py ADDED
@@ -0,0 +1,12 @@
1
+ """
2
+ TaskQueue - A Task Queue Wrapper for Dekoruma Backend.
3
+ """
4
+
5
+ __version__ = "0.1.0"
6
+ __author__ = "Chalvin"
7
+ __email__ = "engineering@dekoruma.com"
8
+
9
+ from .cmanager import cm
10
+ from .celery_app import celery_app
11
+
12
+ __all__ = ["cm", "celery_app"]
@@ -0,0 +1,92 @@
1
+ """
2
+ Celery application setup for TaskQueue.
3
+ Reads configuration from Django settings and auto-configures queues with DLQ.
4
+ """
5
+ from celery import Celery
6
+ from kombu import Exchange
7
+ from kombu import Queue
8
+
9
+
10
+ def get_django_settings():
11
+ """Get Django settings, fail fast if not properly configured."""
12
+ try:
13
+ from django.conf import settings
14
+ return settings
15
+ except ImportError:
16
+ raise ImportError("[TaskQueue] Django settings not found.")
17
+
18
+
19
+ def create_celery_app():
20
+ """Create and configure the Celery application."""
21
+ settings = get_django_settings()
22
+
23
+ app_name = getattr(settings, 'TASKQUEUE_APP_NAME', 'taskqueue')
24
+ app = Celery(app_name)
25
+
26
+ celery_config = {
27
+ 'broker_url': getattr(settings, 'CELERY_BROKER_URL', 'amqp://localhost:5672//'),
28
+ 'result_backend': getattr(settings, 'CELERY_RESULT_BACKEND', 'rpc://localhost:5672//'),
29
+ 'task_serializer': getattr(settings, 'CELERY_TASK_SERIALIZER', 'pickle'),
30
+ 'result_serializer': getattr(settings, 'CELERY_RESULT_SERIALIZER', 'pickle'),
31
+ 'accept_content': getattr(settings, 'CELERY_ACCEPT_CONTENT', ['pickle']),
32
+ 'timezone': getattr(settings, 'CELERY_TIMEZONE', 'UTC+7'),
33
+ 'task_track_started': getattr(settings, 'CELERY_TASK_TRACK_STARTED', True),
34
+ 'task_time_limit': getattr(settings, 'CELERY_TASK_TIME_LIMIT', 30 * 60),
35
+ 'task_soft_time_limit': getattr(settings, 'CELERY_TASK_SOFT_TIME_LIMIT', 25 * 60),
36
+ 'task_always_eager': getattr(settings, 'CELERY_TASK_ALWAYS_EAGER', False),
37
+ 'task_eager_propagates': getattr(settings, 'CELERY_TASK_EAGER_PROPAGATES', True),
38
+ 'task_acks_late': True,
39
+ 'task_reject_on_worker_lost': True,
40
+ 'worker_prefetch_multiplier': 1,
41
+ 'worker_max_tasks_per_child': 1000,
42
+ }
43
+
44
+ setup_queues(app, settings, celery_config)
45
+ app.conf.update(celery_config)
46
+ app.autodiscover_tasks(['taskqueue'])
47
+
48
+ return app
49
+
50
+
51
+ def setup_queues(app, settings, celery_config):
52
+ app_name = getattr(settings, 'TASKQUEUE_APP_NAME', 'taskqueue')
53
+ queue_names = getattr(settings, 'TASKQUEUE_QUEUES',
54
+ ['default', 'high', 'low'])
55
+ if queue_names is None:
56
+ queue_names = ['default', 'high', 'low']
57
+ dlq_name_prefix = getattr(settings, 'TASKQUEUE_DLQ_NAME_PREFIX', 'dlq')
58
+
59
+ # Create exchanges
60
+ main_exchange = Exchange(app_name, type='direct')
61
+ dlx_exchange = Exchange(f'{app_name}.dlx', type='direct')
62
+
63
+ queues = []
64
+
65
+ for queue_name in queue_names:
66
+ dlq_name = f'{dlq_name_prefix}.{queue_name}'
67
+
68
+ queue = Queue(
69
+ queue_name,
70
+ main_exchange,
71
+ routing_key=queue_name,
72
+ queue_arguments={
73
+ 'x-dead-letter-exchange': f'{app_name}.dlx',
74
+ 'x-dead-letter-routing-key': dlq_name
75
+ }
76
+ )
77
+ queues.append(queue)
78
+
79
+ for queue_name in queue_names:
80
+ dlq_name = f'{dlq_name_prefix}.{queue_name}'
81
+ dlq = Queue(dlq_name, dlx_exchange, routing_key=dlq_name)
82
+ queues.append(dlq)
83
+
84
+ celery_config.update({
85
+ 'task_default_queue': 'default',
86
+ 'task_default_exchange': app_name,
87
+ 'task_default_exchange_type': 'direct',
88
+ 'task_queues': tuple(queues),
89
+ })
90
+
91
+
92
+ celery_app = create_celery_app()
taskqueue/cmanager.py ADDED
@@ -0,0 +1,252 @@
1
+ import importlib
2
+ import inspect
3
+ import logging
4
+ from datetime import datetime
5
+ from datetime import timedelta
6
+ from typing import Any
7
+ from typing import Dict
8
+ from typing import Tuple
9
+
10
+ from celery import shared_task
11
+ from celery.exceptions import Reject
12
+
13
+ # Setup logger
14
+ logger = logging.getLogger(__name__)
15
+
16
+ # Enqueue operation type constants
17
+ K_ENQUEUE_OP_TYPE_ENQUEUE = 'enqueue'
18
+ K_ENQUEUE_OP_TYPE_ENQUEUE_AT = 'enqueue_at'
19
+ K_ENQUEUE_OP_TYPE_ENQUEUE_IN = 'enqueue_in'
20
+
21
+ K_MAX_RETRY_COUNT = 3
22
+ K_DEFAULT_RETRY_COUNTDOWN = 10
23
+
24
+
25
+ def _is_class_method(func: Any) -> bool:
26
+ return inspect.ismethod(func) or (
27
+ hasattr(func, "__self__") and getattr(func, "__self__") is not None
28
+ )
29
+
30
+
31
+ def _split_function_and_queue_kwargs(kwargs: Dict[str, Any]) -> Tuple[Dict[str, Any], Dict[str, Any]]:
32
+ # To prevent confusion whether a kwargs is for function or queue kwargs(i.e celery options and on_commit),
33
+ # ignore confusing kwargs while give warning
34
+ supported_queue_keys = {"channel", "retry", "on_commit", "job_timeout"}
35
+ ignored_non_function_keys = {
36
+ "queue", "countdown", "eta", "expires", "priority", "task_id", "routing_key",
37
+ "serializer", "compression", "headers", "link", "link_error", "retry_policy",
38
+ "shadow", "time_limit", "soft_time_limit", "reply_to", "group_id", "chord", "chain",
39
+ "result_ttl", "failure_ttl", "ttl", "depends_on", "at_front", "meta", "retry_count",
40
+ }
41
+
42
+ queue_kwargs: Dict[str, Any] = {}
43
+ func_kwargs: Dict[str, Any] = {}
44
+
45
+ for key, value in kwargs.items():
46
+ if key in supported_queue_keys:
47
+ queue_kwargs[key] = value
48
+ elif key in ignored_non_function_keys:
49
+ logger.warning(
50
+ f"[CManager] Unsupported celery args detected: {key}. Ignored.")
51
+ continue
52
+ else:
53
+ func_kwargs[key] = value
54
+
55
+ return func_kwargs, queue_kwargs
56
+
57
+
58
+ def _build_dynamic_task_call(func: Any, *args: Any, **func_kwargs: Any) -> Tuple[str, list, dict]:
59
+ if _is_class_method(func):
60
+ instance = getattr(func, "__self__")
61
+ klass = instance.__class__
62
+ module_path = klass.__module__
63
+ class_name = klass.__name__
64
+ method_name = func.__name__
65
+ task_name = "taskqueue.cmanager.dynamic_class_method_executor"
66
+ task_args = [module_path, class_name,
67
+ method_name, list(args), dict(func_kwargs)]
68
+ task_kwargs: Dict[str, Any] = {}
69
+ return task_name, task_args, task_kwargs
70
+
71
+ module_path = getattr(func, "__module__", None)
72
+ function_name = getattr(func, "__name__", None)
73
+ if not module_path or not function_name:
74
+ raise ValueError(
75
+ "Unsupported callable type for Celery enqueue. Provide a module-level function or a class method.")
76
+
77
+ task_name = "taskqueue.cmanager.dynamic_function_executor"
78
+ task_args = [module_path, function_name, list(args), dict(func_kwargs)]
79
+ task_kwargs = {}
80
+ return task_name, task_args, task_kwargs
81
+
82
+
83
+ class CManager:
84
+
85
+ def __init__(self) -> None:
86
+ pass
87
+
88
+ def _get_celery_app(self):
89
+ """Get the auto-configured Celery app instance."""
90
+ # Use taskqueue's built-in auto-configured celery app
91
+ from .celery_app import celery_app
92
+ return celery_app
93
+
94
+ def enqueue(self, *args: Any, **kwargs: Any) -> None:
95
+ self._enqueue_op(
96
+ *args, enqueue_op_type=K_ENQUEUE_OP_TYPE_ENQUEUE, **kwargs)
97
+
98
+ def enqueue_at(self, *args: Any, **kwargs: Any) -> None:
99
+ self._enqueue_op(
100
+ *args, enqueue_op_type=K_ENQUEUE_OP_TYPE_ENQUEUE_AT, **kwargs)
101
+
102
+ def enqueue_in(self, *args: Any, **kwargs: Any) -> None:
103
+ self._enqueue_op(
104
+ *args, enqueue_op_type=K_ENQUEUE_OP_TYPE_ENQUEUE_IN, **kwargs)
105
+
106
+ def _enqueue_op(self, *args: Any, **kwargs: Any) -> None:
107
+ on_commit = kwargs.pop('on_commit', False)
108
+ if on_commit:
109
+ try:
110
+ from django.db import transaction
111
+ transaction.on_commit(
112
+ lambda: self._enqueue_op_base(*args, **kwargs))
113
+ except ImportError:
114
+ raise RuntimeError(
115
+ "Django is not installed. Please install Django to use on_commit.")
116
+ else:
117
+ self._enqueue_op_base(*args, **kwargs)
118
+
119
+ def _enqueue_op_base(self, *args: Any, **kwargs: Any) -> None:
120
+ enqueue_op_type = kwargs.pop(
121
+ 'enqueue_op_type', K_ENQUEUE_OP_TYPE_ENQUEUE)
122
+
123
+ try:
124
+ if enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE:
125
+ if not args:
126
+ raise ValueError(
127
+ "enqueue requires a callable as the first positional argument")
128
+ func = args[0]
129
+ func_args = args[1:]
130
+
131
+ elif enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE_AT:
132
+ if len(args) < 2:
133
+ raise ValueError(
134
+ "enqueue_at requires (eta_datetime, func, *func_args)")
135
+ eta: datetime = args[0]
136
+ func = args[1]
137
+ func_args = args[2:]
138
+
139
+ elif enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE_IN:
140
+ if len(args) < 2:
141
+ raise ValueError(
142
+ "enqueue_in requires (countdown_delta, func, *func_args)")
143
+ delta: timedelta = args[0]
144
+ func = args[1]
145
+ func_args = args[2:]
146
+ else:
147
+ raise ValueError(
148
+ f"Unknown enqueue operation type: {enqueue_op_type}")
149
+
150
+ func_kwargs, queue_kwargs = _split_function_and_queue_kwargs(
151
+ kwargs)
152
+
153
+ if enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE_AT:
154
+ queue_kwargs = dict(queue_kwargs)
155
+ queue_kwargs["eta"] = eta
156
+ elif enqueue_op_type == K_ENQUEUE_OP_TYPE_ENQUEUE_IN:
157
+ queue_kwargs = dict(queue_kwargs)
158
+ queue_kwargs["countdown"] = int(delta.total_seconds())
159
+
160
+ task_name, task_args, task_kwargs = _build_dynamic_task_call(
161
+ func, *func_args, **func_kwargs)
162
+
163
+ self._send_task(task_name, task_args, task_kwargs, queue_kwargs)
164
+
165
+ logger.info('[_enqueue_op_base %s] Submit Celery Task SUCCESS, task_name: %s args: %s, kwargs: %s' % (
166
+ enqueue_op_type, task_name, task_args, task_kwargs))
167
+
168
+ except Exception as e:
169
+ logger.exception('[_enqueue_op_base %s] Submit Celery Task FAILED, error: %s, args: %s, kwargs: %s' % (
170
+ enqueue_op_type, str(e), args, kwargs))
171
+ raise e
172
+
173
+ def _send_task(self, task_name: str, task_args: list, task_kwargs: dict, queue_kwargs: Dict[str, Any]) -> None:
174
+ celery_app = self._get_celery_app()
175
+
176
+ queue_name = queue_kwargs.pop("channel", None)
177
+ job_timeout = queue_kwargs.pop("job_timeout", None)
178
+ retry_policy = queue_kwargs.pop("retry", None)
179
+
180
+ send_opts: Dict[str, Any] = {}
181
+ if queue_name:
182
+ send_opts["queue"] = queue_name
183
+ if job_timeout is not None:
184
+ send_opts["time_limit"] = job_timeout
185
+ if "countdown" in queue_kwargs:
186
+ send_opts["countdown"] = queue_kwargs["countdown"]
187
+ if "eta" in queue_kwargs:
188
+ send_opts["eta"] = queue_kwargs["eta"]
189
+
190
+ task_kwargs_with_retry = dict(task_kwargs)
191
+ if retry_policy is None:
192
+ task_kwargs_with_retry["retry"] = {
193
+ "max_retries": K_MAX_RETRY_COUNT, "countdown": K_DEFAULT_RETRY_COUNTDOWN}
194
+ else:
195
+ task_kwargs_with_retry["retry"] = retry_policy
196
+
197
+ celery_app.send_task(task_name, args=task_args,
198
+ kwargs=task_kwargs_with_retry, **send_opts)
199
+
200
+
201
+ cm = CManager()
202
+
203
+
204
+ # Dynamic task executors - handle function and class method execution
205
+ @shared_task(bind=True, max_retries=K_MAX_RETRY_COUNT)
206
+ def dynamic_function_executor(self, module_path, function_name, args=None, kwargs=None, retry=None):
207
+ try:
208
+ module = importlib.import_module(module_path)
209
+ function = getattr(module, function_name)
210
+ args = args or []
211
+ kwargs = kwargs or {}
212
+ return function(*args, **kwargs)
213
+ except Exception as e:
214
+ current_retries = getattr(self.request, 'retries', 0) or 0
215
+ max_retries = self.max_retries or K_MAX_RETRY_COUNT
216
+ if isinstance(retry, dict) and 'max_retries' in retry:
217
+ max_retries = retry['max_retries']
218
+
219
+ if current_retries >= max_retries:
220
+ raise Reject(str(e), requeue=False)
221
+
222
+ countdown = K_DEFAULT_RETRY_COUNTDOWN
223
+ if isinstance(retry, dict) and 'countdown' in retry:
224
+ countdown = retry['countdown']
225
+
226
+ raise self.retry(exc=e, countdown=countdown, max_retries=max_retries)
227
+
228
+
229
+ @shared_task(bind=True, max_retries=K_MAX_RETRY_COUNT)
230
+ def dynamic_class_method_executor(self, module_path, class_name, method_name, args=None, kwargs=None, retry=None):
231
+ try:
232
+ module = importlib.import_module(module_path)
233
+ class_obj = getattr(module, class_name)
234
+ instance = class_obj()
235
+ method = getattr(instance, method_name)
236
+ args = args or []
237
+ kwargs = kwargs or {}
238
+ return method(*args, **kwargs)
239
+ except Exception as e:
240
+ current_retries = getattr(self.request, 'retries', 0) or 0
241
+ max_retries = self.max_retries or K_MAX_RETRY_COUNT
242
+ if isinstance(retry, dict) and 'max_retries' in retry:
243
+ max_retries = retry['max_retries']
244
+
245
+ if current_retries >= max_retries:
246
+ raise Reject(str(e), requeue=False)
247
+
248
+ countdown = K_DEFAULT_RETRY_COUNTDOWN
249
+ if isinstance(retry, dict) and 'countdown' in retry:
250
+ countdown = retry['countdown']
251
+
252
+ raise self.retry(exc=e, countdown=countdown, max_retries=max_retries)