p1-taskqueue 0.1.9__tar.gz → 0.1.11__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of p1-taskqueue might be problematic. Click here for more details.

Files changed (19) hide show
  1. {p1_taskqueue-0.1.9 → p1_taskqueue-0.1.11}/PKG-INFO +1 -1
  2. {p1_taskqueue-0.1.9 → p1_taskqueue-0.1.11}/pyproject.toml +1 -1
  3. {p1_taskqueue-0.1.9 → p1_taskqueue-0.1.11}/src/p1_taskqueue.egg-info/PKG-INFO +1 -1
  4. {p1_taskqueue-0.1.9 → p1_taskqueue-0.1.11}/src/taskqueue/celery_app.py +10 -3
  5. {p1_taskqueue-0.1.9 → p1_taskqueue-0.1.11}/src/taskqueue/cmanager.py +10 -8
  6. {p1_taskqueue-0.1.9 → p1_taskqueue-0.1.11}/README.md +0 -0
  7. {p1_taskqueue-0.1.9 → p1_taskqueue-0.1.11}/setup.cfg +0 -0
  8. {p1_taskqueue-0.1.9 → p1_taskqueue-0.1.11}/src/p1_taskqueue.egg-info/SOURCES.txt +0 -0
  9. {p1_taskqueue-0.1.9 → p1_taskqueue-0.1.11}/src/p1_taskqueue.egg-info/dependency_links.txt +0 -0
  10. {p1_taskqueue-0.1.9 → p1_taskqueue-0.1.11}/src/p1_taskqueue.egg-info/requires.txt +0 -0
  11. {p1_taskqueue-0.1.9 → p1_taskqueue-0.1.11}/src/p1_taskqueue.egg-info/top_level.txt +0 -0
  12. {p1_taskqueue-0.1.9 → p1_taskqueue-0.1.11}/src/taskqueue/__init__.py +0 -0
  13. {p1_taskqueue-0.1.9 → p1_taskqueue-0.1.11}/src/taskqueue/libs/__init__.py +0 -0
  14. {p1_taskqueue-0.1.9 → p1_taskqueue-0.1.11}/src/taskqueue/libs/helper_test.py +0 -0
  15. {p1_taskqueue-0.1.9 → p1_taskqueue-0.1.11}/tests/test_celery_app.py +0 -0
  16. {p1_taskqueue-0.1.9 → p1_taskqueue-0.1.11}/tests/test_cmanager.py +0 -0
  17. {p1_taskqueue-0.1.9 → p1_taskqueue-0.1.11}/tests/test_helper_test_functions.py +0 -0
  18. {p1_taskqueue-0.1.9 → p1_taskqueue-0.1.11}/tests/test_return_values.py +0 -0
  19. {p1_taskqueue-0.1.9 → p1_taskqueue-0.1.11}/tests/test_test_utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: p1-taskqueue
3
- Version: 0.1.9
3
+ Version: 0.1.11
4
4
  Summary: A Task Queue Wrapper for Dekoruma Backend
5
5
  Author-email: Chalvin <engineering@dekoruma.com>
6
6
  Project-URL: Homepage, https://github.com/Dekoruma/p1-taskqueue
@@ -5,7 +5,7 @@ build-backend = "setuptools.build_meta"
5
5
  [project]
6
6
  name = "p1-taskqueue"
7
7
  # DO NOT CHANGE THIS VERSION - it gets automatically replaced by CI/CD with the git tag version
8
- version = "0.1.9"
8
+ version = "0.1.11"
9
9
  description = "A Task Queue Wrapper for Dekoruma Backend"
10
10
  authors = [
11
11
  {name = "Chalvin", email = "engineering@dekoruma.com"}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: p1-taskqueue
3
- Version: 0.1.9
3
+ Version: 0.1.11
4
4
  Summary: A Task Queue Wrapper for Dekoruma Backend
5
5
  Author-email: Chalvin <engineering@dekoruma.com>
6
6
  Project-URL: Homepage, https://github.com/Dekoruma/p1-taskqueue
@@ -23,6 +23,7 @@ def create_celery_app():
23
23
  app_name = getattr(settings, 'TASKQUEUE_APP_NAME', 'taskqueue')
24
24
  app = Celery(app_name)
25
25
 
26
+ # https://docs.celeryq.dev/en/latest/userguide/configuration.html
26
27
  celery_config = {
27
28
  'broker_url': getattr(settings, 'CELERY_BROKER_URL', 'amqp://localhost:5672//'),
28
29
  'result_backend': getattr(settings, 'CELERY_RESULT_BACKEND', 'rpc://localhost:5672//'),
@@ -30,12 +31,18 @@ def create_celery_app():
30
31
  'result_serializer': getattr(settings, 'CELERY_RESULT_SERIALIZER', 'pickle'),
31
32
  'accept_content': getattr(settings, 'CELERY_ACCEPT_CONTENT', ['pickle']),
32
33
  'timezone': getattr(settings, 'CELERY_TIMEZONE', 'UTC+7'),
33
- 'task_track_started': getattr(settings, 'CELERY_TASK_TRACK_STARTED', True),
34
34
  'task_time_limit': getattr(settings, 'CELERY_TASK_TIME_LIMIT', 30 * 60),
35
35
  'task_soft_time_limit': getattr(settings, 'CELERY_TASK_SOFT_TIME_LIMIT', 25 * 60),
36
- 'task_always_eager': getattr(settings, 'CELERY_TASK_ALWAYS_EAGER', False),
37
- 'task_eager_propagates': getattr(settings, 'CELERY_TASK_EAGER_PROPAGATES', True),
36
+ # 14 days
37
+ 'result_expires': getattr(settings, 'CELERY_RESULT_EXPIRES', 14 * 24 * 60 * 60),
38
+ 'task_track_started': True,
39
+ 'task_always_eager': False,
40
+ 'task_eager_propagates': True,
38
41
  'task_acks_late': True,
42
+ 'result_extended': True,
43
+ 'task_ignore_result': False,
44
+ 'task_send_sent_event': True,
45
+ 'worker_send_task_events': True,
39
46
  'task_reject_on_worker_lost': True,
40
47
  'worker_prefetch_multiplier': 1,
41
48
  'worker_max_tasks_per_child': 1000,
@@ -8,7 +8,6 @@ from typing import Dict
8
8
  from typing import Tuple
9
9
 
10
10
  from celery import shared_task
11
- from celery.exceptions import Reject
12
11
 
13
12
  # Setup logger
14
13
  logger = logging.getLogger(__name__)
@@ -31,12 +30,12 @@ def _is_class_method(func: Any) -> bool:
31
30
  def taskqueue_class(cls):
32
31
  """Decorator to automatically capture init arguments for taskqueue."""
33
32
  original_init = cls.__init__
34
-
33
+
35
34
  def wrapped_init(self, *args, **kwargs):
36
35
  self._taskqueue_init_args = list(args)
37
36
  self._taskqueue_init_kwargs = dict(kwargs)
38
37
  original_init(self, *args, **kwargs)
39
-
38
+
40
39
  cls.__init__ = wrapped_init
41
40
  return cls
42
41
 
@@ -82,9 +81,9 @@ def _build_dynamic_task_call(func: Any, *args: Any, **func_kwargs: Any) -> Tuple
82
81
  module_path = klass.__module__
83
82
  class_name = klass.__name__
84
83
  method_name = func.__name__
85
-
84
+
86
85
  init_args, init_kwargs = _extract_init_args_from_instance(instance)
87
-
86
+
88
87
  task_name = "taskqueue.cmanager.dynamic_class_method_executor"
89
88
  task_args = []
90
89
  task_kwargs: Dict[str, Any] = {
@@ -238,7 +237,6 @@ class CManager:
238
237
  cm = CManager()
239
238
 
240
239
 
241
- # Dynamic task executors - handle function and class method execution
242
240
  @shared_task(bind=True, max_retries=K_MAX_RETRY_COUNT)
243
241
  def dynamic_function_executor(self, module_path=None, function_name=None, args=None, kwargs=None, retry=None):
244
242
  job_id = self.request.id
@@ -262,7 +260,9 @@ def dynamic_function_executor(self, module_path=None, function_name=None, args=N
262
260
  max_retries = retry['max_retries']
263
261
 
264
262
  if current_retries >= max_retries:
265
- raise Reject(str(e), requeue=False)
263
+ logger.error(
264
+ f"[TaskQueue] Max retries ({max_retries}) reached for function: {function_name}, marking task as FAILED, job_id: {job_id}")
265
+ raise
266
266
 
267
267
  countdown = K_DEFAULT_RETRY_COUNTDOWN
268
268
  if isinstance(retry, dict) and 'countdown' in retry:
@@ -298,7 +298,9 @@ def dynamic_class_method_executor(self, module_path=None, class_name=None, metho
298
298
  max_retries = retry['max_retries']
299
299
 
300
300
  if current_retries >= max_retries:
301
- raise Reject(str(e), requeue=False)
301
+ logger.error(
302
+ f"[TaskQueue] Max retries ({max_retries}) reached for method: {method_name}, marking task as FAILED, job_id: {job_id}")
303
+ raise
302
304
 
303
305
  countdown = K_DEFAULT_RETRY_COUNTDOWN
304
306
  if isinstance(retry, dict) and 'countdown' in retry:
File without changes
File without changes