p1-taskqueue 0.1.21__tar.gz → 0.1.23__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. {p1_taskqueue-0.1.21 → p1_taskqueue-0.1.23}/PKG-INFO +1 -1
  2. {p1_taskqueue-0.1.21 → p1_taskqueue-0.1.23}/pyproject.toml +1 -1
  3. {p1_taskqueue-0.1.21 → p1_taskqueue-0.1.23}/src/p1_taskqueue.egg-info/PKG-INFO +1 -1
  4. {p1_taskqueue-0.1.21 → p1_taskqueue-0.1.23}/src/p1_taskqueue.egg-info/SOURCES.txt +4 -0
  5. p1_taskqueue-0.1.23/src/taskqueue/apps.py +8 -0
  6. {p1_taskqueue-0.1.21 → p1_taskqueue-0.1.23}/src/taskqueue/celery_app.py +1 -1
  7. {p1_taskqueue-0.1.21 → p1_taskqueue-0.1.23}/src/taskqueue/cmanager.py +141 -7
  8. p1_taskqueue-0.1.23/src/taskqueue/migrations/0001_initial.py +27 -0
  9. p1_taskqueue-0.1.23/src/taskqueue/migrations/__init__.py +0 -0
  10. p1_taskqueue-0.1.23/src/taskqueue/models.py +17 -0
  11. {p1_taskqueue-0.1.21 → p1_taskqueue-0.1.23}/README.md +0 -0
  12. {p1_taskqueue-0.1.21 → p1_taskqueue-0.1.23}/setup.cfg +0 -0
  13. {p1_taskqueue-0.1.21 → p1_taskqueue-0.1.23}/src/p1_taskqueue.egg-info/dependency_links.txt +0 -0
  14. {p1_taskqueue-0.1.21 → p1_taskqueue-0.1.23}/src/p1_taskqueue.egg-info/requires.txt +0 -0
  15. {p1_taskqueue-0.1.21 → p1_taskqueue-0.1.23}/src/p1_taskqueue.egg-info/top_level.txt +0 -0
  16. {p1_taskqueue-0.1.21 → p1_taskqueue-0.1.23}/src/taskqueue/__init__.py +0 -0
  17. {p1_taskqueue-0.1.21 → p1_taskqueue-0.1.23}/src/taskqueue/libs/__init__.py +0 -0
  18. {p1_taskqueue-0.1.21 → p1_taskqueue-0.1.23}/src/taskqueue/libs/helper_test.py +0 -0
  19. {p1_taskqueue-0.1.21 → p1_taskqueue-0.1.23}/src/taskqueue/slack_notifier.py +0 -0
  20. {p1_taskqueue-0.1.21 → p1_taskqueue-0.1.23}/tests/test_celery_app.py +0 -0
  21. {p1_taskqueue-0.1.21 → p1_taskqueue-0.1.23}/tests/test_cmanager.py +0 -0
  22. {p1_taskqueue-0.1.21 → p1_taskqueue-0.1.23}/tests/test_helper_test_functions.py +0 -0
  23. {p1_taskqueue-0.1.21 → p1_taskqueue-0.1.23}/tests/test_return_values.py +0 -0
  24. {p1_taskqueue-0.1.21 → p1_taskqueue-0.1.23}/tests/test_test_utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: p1-taskqueue
3
- Version: 0.1.21
3
+ Version: 0.1.23
4
4
  Summary: A Task Queue Wrapper for Dekoruma Backend
5
5
  Author-email: Chalvin <engineering@dekoruma.com>
6
6
  Project-URL: Homepage, https://github.com/Dekoruma/p1-taskqueue
@@ -5,7 +5,7 @@ build-backend = "setuptools.build_meta"
5
5
  [project]
6
6
  name = "p1-taskqueue"
7
7
  # DO NOT CHANGE THIS VERSION - it gets automatically replaced by CI/CD with the git tag version
8
- version = "0.1.21"
8
+ version = "0.1.23"
9
9
  description = "A Task Queue Wrapper for Dekoruma Backend"
10
10
  authors = [
11
11
  {name = "Chalvin", email = "engineering@dekoruma.com"}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: p1-taskqueue
3
- Version: 0.1.21
3
+ Version: 0.1.23
4
4
  Summary: A Task Queue Wrapper for Dekoruma Backend
5
5
  Author-email: Chalvin <engineering@dekoruma.com>
6
6
  Project-URL: Homepage, https://github.com/Dekoruma/p1-taskqueue
@@ -6,11 +6,15 @@ src/p1_taskqueue.egg-info/dependency_links.txt
6
6
  src/p1_taskqueue.egg-info/requires.txt
7
7
  src/p1_taskqueue.egg-info/top_level.txt
8
8
  src/taskqueue/__init__.py
9
+ src/taskqueue/apps.py
9
10
  src/taskqueue/celery_app.py
10
11
  src/taskqueue/cmanager.py
12
+ src/taskqueue/models.py
11
13
  src/taskqueue/slack_notifier.py
12
14
  src/taskqueue/libs/__init__.py
13
15
  src/taskqueue/libs/helper_test.py
16
+ src/taskqueue/migrations/0001_initial.py
17
+ src/taskqueue/migrations/__init__.py
14
18
  tests/test_celery_app.py
15
19
  tests/test_cmanager.py
16
20
  tests/test_helper_test_functions.py
@@ -0,0 +1,8 @@
1
+ from django.apps import AppConfig
2
+
3
+
4
+ class TaskqueueConfig(AppConfig):
5
+ default_auto_field = 'django.db.models.BigAutoField'
6
+ name = 'taskqueue'
7
+ verbose_name = 'TaskQueue'
8
+
@@ -31,7 +31,7 @@ def create_celery_app():
31
31
  # https://docs.celeryq.dev/en/latest/userguide/configuration.html
32
32
  celery_config = {
33
33
  'broker_url': getattr(settings, 'CELERY_BROKER_URL', 'amqp://localhost:5672//'),
34
- 'result_backend': getattr(settings, 'CELERY_RESULT_BACKEND', 'rpc://localhost:5672//'),
34
+ 'result_backend': getattr(settings, 'CELERY_RESULT_BACKEND', 'django-db'),
35
35
  'task_serializer': getattr(settings, 'CELERY_TASK_SERIALIZER', 'pickle'),
36
36
  'result_serializer': getattr(settings, 'CELERY_RESULT_SERIALIZER', 'pickle'),
37
37
  'accept_content': getattr(settings, 'CELERY_ACCEPT_CONTENT', ['pickle']),
@@ -1,6 +1,9 @@
1
+ import base64
1
2
  import importlib
2
3
  import inspect
3
4
  import logging
5
+ import pickle
6
+ import uuid
4
7
  from datetime import datetime
5
8
  from typing import Any
6
9
  from typing import Dict
@@ -18,8 +21,10 @@ K_ENQUEUE_OP_TYPE_ENQUEUE = 'enqueue'
18
21
  K_ENQUEUE_OP_TYPE_ENQUEUE_AT = 'enqueue_at'
19
22
  K_ENQUEUE_OP_TYPE_ENQUEUE_IN = 'enqueue_in'
20
23
 
21
- K_MAX_RETRY_COUNT = 3
24
+ K_MAX_RETRY_COUNT = 2
22
25
  K_DEFAULT_RETRY_COUNTDOWN = 3600
26
+ K_TASK_STATUS_REPUBLISHED = 'FAILURE - REPUBLISHED'
27
+ K_TASK_STATUS_FAILURE = 'FAILURE'
23
28
 
24
29
 
25
30
  def _is_class_method(func: Any) -> bool:
@@ -138,6 +143,63 @@ class CManager:
138
143
  from .celery_app import celery_app
139
144
  return celery_app
140
145
 
146
+ def republish_task(self, task_id: str) -> str:
147
+ try:
148
+ from taskqueue.models import TaskReconstruction
149
+ except ImportError:
150
+ raise ImportError(
151
+ "taskqueue.models is required for republishing tasks")
152
+
153
+ try:
154
+ task_recon = TaskReconstruction.objects.get(task_id=task_id)
155
+ except TaskReconstruction.DoesNotExist:
156
+ raise ValueError(f"Task with ID {task_id} not found")
157
+
158
+ try:
159
+ from django_celery_results.models import TaskResult
160
+ from django.utils import timezone
161
+ task_result = TaskResult.objects.filter(task_id=task_id).first()
162
+ if not task_result:
163
+ raise ValueError(
164
+ f"Task with ID {task_id} not found in TaskResult. Cannot republish task without TaskResult.")
165
+ if task_result.status != K_TASK_STATUS_FAILURE:
166
+ raise ValueError(
167
+ f"Task with ID {task_id} cannot be republished. Only tasks with status '{K_TASK_STATUS_FAILURE}' can be republished. Current status: {task_result.status}")
168
+
169
+ TaskResult.objects.filter(task_id=task_id).update(
170
+ status=K_TASK_STATUS_REPUBLISHED,
171
+ date_done=timezone.now(),
172
+ )
173
+ except ImportError:
174
+ raise ImportError(
175
+ "django_celery_results is required for republishing tasks")
176
+ except ValueError:
177
+ raise
178
+ except Exception as e:
179
+ logger.warning(f"[CManager] Failed to update old task status: {e}")
180
+ raise
181
+
182
+ task_name = task_recon.task_name
183
+ task_args = self._unpickle_data(task_recon.task_args)
184
+ task_kwargs = self._unpickle_data(task_recon.task_kwargs)
185
+ queue_options = task_recon.queue_options
186
+
187
+ eta = queue_options.get("eta")
188
+ if eta and isinstance(eta, str):
189
+ from datetime import datetime as dt
190
+ queue_options["eta"] = dt.fromisoformat(eta.replace('Z', '+00:00'))
191
+ eta = queue_options["eta"]
192
+
193
+ queue_name = queue_options.get("channel")
194
+ job_timeout = queue_options.get("job_timeout")
195
+ retry_policy = queue_options.get("retry")
196
+ countdown = queue_options.get("countdown")
197
+
198
+ new_task_id = self._create_task_result(task_name, task_args, task_kwargs,
199
+ queue_name, job_timeout, retry_policy, countdown, eta)
200
+
201
+ return self._send_task(task_name, task_args, task_kwargs, queue_options, new_task_id)
202
+
141
203
  def enqueue(self, *args: Any, **kwargs: Any) -> None:
142
204
  self._enqueue_op(
143
205
  *args, enqueue_op_type=K_ENQUEUE_OP_TYPE_ENQUEUE, **kwargs)
@@ -181,8 +243,17 @@ class CManager:
181
243
  task_name, task_args, task_kwargs = _build_callable_task_call(
182
244
  func, func_args, func_kwargs)
183
245
 
246
+ queue_name = queue_options.get("channel")
247
+ job_timeout = queue_options.get("job_timeout")
248
+ retry_policy = queue_options.get("retry")
249
+ countdown = queue_options.get("countdown")
250
+ eta = queue_options.get("eta")
251
+
252
+ task_id = self._create_task_result(task_name, task_args, task_kwargs,
253
+ queue_name, job_timeout, retry_policy, countdown, eta)
254
+
184
255
  task_id = self._send_task(task_name, task_args,
185
- task_kwargs, queue_options)
256
+ task_kwargs, queue_options, task_id)
186
257
 
187
258
  logger.info('[_enqueue_op_base %s] Submit Celery Task SUCCESS, task_name: %s args: %s, kwargs: %s, task_id: %s' % (
188
259
  enqueue_op_type, task_name, task_args, task_kwargs, task_id))
@@ -232,12 +303,71 @@ class CManager:
232
303
 
233
304
  return func, func_args, func_kwargs, queue_options
234
305
 
235
- def _send_task(self, task_name: str, task_args: list, task_kwargs: dict, queue_kwargs: Dict[str, Any]) -> str:
306
+ def _pickle_data(self, data: Any) -> str:
307
+ """Pickle data and return as base64-encoded string."""
308
+ pickled = pickle.dumps(data)
309
+ return base64.b64encode(pickled).decode('utf-8')
310
+
311
+ def _unpickle_data(self, data: str) -> Any:
312
+ """Unpickle base64-encoded pickled string."""
313
+ pickled = base64.b64decode(data.encode('utf-8'))
314
+ return pickle.loads(pickled)
315
+
316
+ def _create_task_result(self, task_name: str, task_args: list, task_kwargs: dict,
317
+ queue_name: str, job_timeout: int, retry_policy: dict, countdown: int, eta: Any) -> str:
318
+ task_id = str(uuid.uuid4())
319
+
320
+ try:
321
+ from django_celery_results.models import TaskResult
322
+ from django.utils import timezone
323
+
324
+ TaskResult.objects.create(
325
+ task_id=task_id,
326
+ task_name=task_name,
327
+ status='PENDING',
328
+ date_created=timezone.now(),
329
+ date_done=timezone.now(),
330
+ )
331
+ except ImportError:
332
+ pass
333
+ except Exception as e:
334
+ logger.warning(f"[CManager] Failed to create TaskResult: {e}")
335
+
336
+ try:
337
+ from taskqueue.models import TaskReconstruction
338
+
339
+ pickled_task_args = self._pickle_data(task_args)
340
+ pickled_task_kwargs = self._pickle_data(task_kwargs)
341
+
342
+ TaskReconstruction.objects.create(
343
+ task_id=task_id,
344
+ task_name=task_name,
345
+ task_args=pickled_task_args,
346
+ task_kwargs=pickled_task_kwargs,
347
+ queue_options={
348
+ "channel": queue_name,
349
+ "job_timeout": job_timeout,
350
+ "retry": retry_policy,
351
+ "countdown": countdown,
352
+ "eta": eta.isoformat() if eta else None,
353
+ },
354
+ )
355
+ except ImportError:
356
+ pass
357
+ except Exception as e:
358
+ logger.warning(
359
+ f"[CManager] Failed to create TaskReconstruction: {e}")
360
+
361
+ return task_id
362
+
363
+ def _send_task(self, task_name: str, task_args: list, task_kwargs: dict, queue_kwargs: Dict[str, Any], task_id: str = None) -> str:
236
364
  celery_app = self._get_celery_app()
237
365
 
238
366
  queue_name = queue_kwargs.pop("channel", None)
239
367
  job_timeout = queue_kwargs.pop("job_timeout", None)
240
368
  retry_policy = queue_kwargs.pop("retry", None)
369
+ countdown = queue_kwargs.pop("countdown", None)
370
+ eta = queue_kwargs.pop("eta", None)
241
371
 
242
372
  created_at = datetime.now().isoformat()
243
373
 
@@ -250,10 +380,10 @@ class CManager:
250
380
  send_opts["queue"] = queue_name
251
381
  if job_timeout is not None:
252
382
  send_opts["time_limit"] = job_timeout
253
- if "countdown" in queue_kwargs:
254
- send_opts["countdown"] = queue_kwargs["countdown"]
255
- if "eta" in queue_kwargs:
256
- send_opts["eta"] = queue_kwargs["eta"]
383
+ if countdown is not None:
384
+ send_opts["countdown"] = countdown
385
+ if eta is not None:
386
+ send_opts["eta"] = eta
257
387
 
258
388
  task_kwargs_with_retry = dict(task_kwargs)
259
389
  if retry_policy is None:
@@ -262,8 +392,12 @@ class CManager:
262
392
  else:
263
393
  task_kwargs_with_retry["retry"] = retry_policy
264
394
 
395
+ if task_id:
396
+ send_opts["task_id"] = task_id
397
+
265
398
  task = celery_app.send_task(task_name, args=task_args,
266
399
  kwargs=task_kwargs_with_retry, **send_opts)
400
+
267
401
  return str(task.id)
268
402
 
269
403
 
@@ -0,0 +1,27 @@
1
+ from django.db import migrations
2
+ from django.db import models
3
+
4
+
5
+ class Migration(migrations.Migration):
6
+
7
+ initial = True
8
+
9
+ dependencies = []
10
+
11
+ operations = [
12
+ migrations.CreateModel(
13
+ name='TaskReconstruction',
14
+ fields=[
15
+ ('task_id', models.CharField(
16
+ max_length=255, primary_key=True, serialize=False)),
17
+ ('task_name', models.CharField(max_length=255)),
18
+ ('task_args', models.TextField()),
19
+ ('task_kwargs', models.TextField()),
20
+ ('queue_options', models.JSONField(default=dict)),
21
+ ('created_at', models.DateTimeField(auto_now_add=True)),
22
+ ],
23
+ options={
24
+ 'db_table': 'taskqueue_task_reconstruction',
25
+ },
26
+ ),
27
+ ]
@@ -0,0 +1,17 @@
1
+ from django.db import models
2
+
3
+
4
+ class TaskReconstruction(models.Model):
5
+ task_id = models.CharField(max_length=255, primary_key=True)
6
+ task_name = models.CharField(max_length=255)
7
+ task_args = models.TextField()
8
+ task_kwargs = models.TextField()
9
+ queue_options = models.JSONField(default=dict)
10
+ created_at = models.DateTimeField(auto_now_add=True)
11
+
12
+ class Meta:
13
+ db_table = 'taskqueue_task_reconstruction'
14
+ app_label = 'taskqueue'
15
+
16
+ def __str__(self):
17
+ return f"TaskReconstruction({self.task_id})"
File without changes
File without changes