hawkapi-celery 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,54 @@
1
+ """hawkapi-celery — Celery integration for HawkAPI.
2
+
3
+ Async tasks, beat scheduler, broker/worker healthchecks, request-context
4
+ propagation between HTTP handlers and Celery workers, and eager-mode fixtures
5
+ for testing.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from ._app import CeleryConfig, create_celery
11
+ from ._beat import Periodic, add_periodic, crontab, every, every_seconds
12
+ from ._context import (
13
+ attach_context_signals,
14
+ bind_context,
15
+ current_context,
16
+ reset_context,
17
+ set_context,
18
+ )
19
+ from ._health import HealthReport, check_broker, check_workers, healthcheck
20
+ from ._plugin import get_celery, get_task_result, init_celery, resolve_celery
21
+ from ._tasks import compute_backoff, task
22
+ from ._testing import CapturedTask, TaskRecorder, eager_mode, record_tasks
23
+
24
+ __version__ = "0.1.0"
25
+
26
+ __all__ = [
27
+ "CapturedTask",
28
+ "CeleryConfig",
29
+ "HealthReport",
30
+ "Periodic",
31
+ "TaskRecorder",
32
+ "__version__",
33
+ "add_periodic",
34
+ "attach_context_signals",
35
+ "bind_context",
36
+ "check_broker",
37
+ "check_workers",
38
+ "compute_backoff",
39
+ "create_celery",
40
+ "crontab",
41
+ "current_context",
42
+ "eager_mode",
43
+ "every",
44
+ "every_seconds",
45
+ "get_celery",
46
+ "get_task_result",
47
+ "healthcheck",
48
+ "init_celery",
49
+ "record_tasks",
50
+ "reset_context",
51
+ "resolve_celery",
52
+ "set_context",
53
+ "task",
54
+ ]
hawkapi_celery/_app.py ADDED
@@ -0,0 +1,62 @@
1
+ """Celery app construction + sensible defaults."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from dataclasses import dataclass, field
6
+ from typing import Any
7
+
8
+ from celery import Celery
9
+
10
+
11
+ @dataclass(slots=True)
12
+ class CeleryConfig:
13
+ broker_url: str = "redis://localhost:6379/0"
14
+ result_backend: str = "redis://localhost:6379/0"
15
+ task_serializer: str = "json"
16
+ result_serializer: str = "json"
17
+ accept_content: list[str] = field(default_factory=lambda: ["json"])
18
+ timezone: str = "UTC"
19
+ enable_utc: bool = True
20
+ task_track_started: bool = True
21
+ task_time_limit: int = 600
22
+ task_soft_time_limit: int = 540
23
+ worker_prefetch_multiplier: int = 1
24
+ worker_max_tasks_per_child: int = 1000
25
+ task_default_queue: str = "default"
26
+ task_default_priority: int = 5
27
+ task_always_eager: bool = False
28
+ task_eager_propagates: bool = False
29
+ beat_schedule: dict[str, Any] = field(default_factory=dict)
30
+ include: list[str] = field(default_factory=list)
31
+ extra_kwargs: dict[str, Any] = field(default_factory=dict)
32
+
33
+
34
+ def create_celery(name: str = "hawkapi", *, config: CeleryConfig | None = None) -> Celery:
35
+ """Build a Celery app from :class:`CeleryConfig` with sensible defaults."""
36
+ config = config or CeleryConfig()
37
+ app = Celery(name, broker=config.broker_url, backend=config.result_backend)
38
+ app.conf.update(
39
+ broker_url=config.broker_url,
40
+ result_backend=config.result_backend,
41
+ task_serializer=config.task_serializer,
42
+ result_serializer=config.result_serializer,
43
+ accept_content=list(config.accept_content),
44
+ timezone=config.timezone,
45
+ enable_utc=config.enable_utc,
46
+ task_track_started=config.task_track_started,
47
+ task_time_limit=config.task_time_limit,
48
+ task_soft_time_limit=config.task_soft_time_limit,
49
+ worker_prefetch_multiplier=config.worker_prefetch_multiplier,
50
+ worker_max_tasks_per_child=config.worker_max_tasks_per_child,
51
+ task_default_queue=config.task_default_queue,
52
+ task_default_priority=config.task_default_priority,
53
+ task_always_eager=config.task_always_eager,
54
+ task_eager_propagates=config.task_eager_propagates,
55
+ beat_schedule=dict(config.beat_schedule),
56
+ include=list(config.include),
57
+ **config.extra_kwargs,
58
+ )
59
+ return app
60
+
61
+
62
+ __all__ = ["CeleryConfig", "create_celery"]
@@ -0,0 +1,49 @@
1
+ """Periodic-task helpers — declarative schedule entries."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from dataclasses import dataclass, field
6
+ from datetime import timedelta
7
+ from typing import Any
8
+
9
+ from celery import Celery
10
+ from celery.schedules import crontab
11
+
12
+
13
+ @dataclass(slots=True)
14
+ class Periodic:
15
+ """One periodic-task entry. Use :func:`add_periodic` to register."""
16
+
17
+ task: str
18
+ schedule: float | timedelta | crontab
19
+ args: tuple[Any, ...] = ()
20
+ kwargs: dict[str, Any] = field(default_factory=dict)
21
+ options: dict[str, Any] = field(default_factory=dict)
22
+
23
+ def to_dict(self) -> dict[str, Any]:
24
+ entry: dict[str, Any] = {"task": self.task, "schedule": self.schedule}
25
+ if self.args:
26
+ entry["args"] = list(self.args)
27
+ if self.kwargs:
28
+ entry["kwargs"] = dict(self.kwargs)
29
+ if self.options:
30
+ entry["options"] = dict(self.options)
31
+ return entry
32
+
33
+
34
+ def add_periodic(celery_app: Celery, name: str, periodic: Periodic) -> None:
35
+ """Add ``periodic`` to the Celery beat schedule under ``name``."""
36
+ schedule: dict[str, Any] = dict(celery_app.conf.beat_schedule or {})
37
+ schedule[name] = periodic.to_dict()
38
+ celery_app.conf.beat_schedule = schedule
39
+
40
+
41
+ def every_seconds(seconds: float) -> float:
42
+ return float(seconds)
43
+
44
+
45
+ def every(timedelta_value: timedelta) -> timedelta:
46
+ return timedelta_value
47
+
48
+
49
+ __all__ = ["Periodic", "add_periodic", "crontab", "every", "every_seconds"]
@@ -0,0 +1,106 @@
1
+ """Propagate request context (request_id, user_id, etc.) from HTTP handlers to Celery workers.
2
+
3
+ Usage:
4
+
5
+ .. code-block:: python
6
+
7
+ from hawkapi_celery import bind_context, attach_context_signals
8
+
9
+ attach_context_signals(celery_app) # once, at startup
10
+
11
+ # In a handler:
12
+ with bind_context(request_id=request_id, user_id=user.id):
13
+ my_task.delay(...)
14
+
15
+ The context dict is shipped in the task's ``headers["hawkapi_context"]`` and
16
+ restored into a module-level :class:`ContextVar` on the worker side. Read it
17
+ back inside the task with :func:`current_context`.
18
+ """
19
+
20
+ from __future__ import annotations
21
+
22
+ import contextvars
23
+ from collections.abc import Generator
24
+ from contextlib import contextmanager
25
+ from typing import Any
26
+
27
+ from celery import Celery
28
+ from celery.signals import before_task_publish, task_postrun, task_prerun
29
+
30
+ _CONTEXT_VAR: contextvars.ContextVar[dict[str, Any] | None] = contextvars.ContextVar(
31
+ "hawkapi_celery_context", default=None
32
+ )
33
+ _HEADER_KEY = "hawkapi_context"
34
+
35
+
36
+ def current_context() -> dict[str, Any]:
37
+ """Return the active context dict (empty if unset)."""
38
+ return dict(_CONTEXT_VAR.get() or {})
39
+
40
+
41
+ @contextmanager
42
+ def bind_context(**values: Any) -> Generator[None, None, None]:
43
+ """Temporarily merge ``values`` into the active context."""
44
+ merged = {**(_CONTEXT_VAR.get() or {}), **values}
45
+ token = _CONTEXT_VAR.set(merged)
46
+ try:
47
+ yield
48
+ finally:
49
+ _CONTEXT_VAR.reset(token)
50
+
51
+
52
+ def set_context(values: dict[str, Any]) -> contextvars.Token[dict[str, Any] | None]:
53
+ """Set the active context unconditionally and return a reset token."""
54
+ return _CONTEXT_VAR.set(dict(values))
55
+
56
+
57
+ def reset_context(token: contextvars.Token[dict[str, Any] | None]) -> None:
58
+ _CONTEXT_VAR.reset(token)
59
+
60
+
61
+ def attach_context_signals(celery_app: Celery) -> None:
62
+ """Wire the publisher → worker context propagation. Idempotent per app."""
63
+ flag = "_hawkapi_context_signals"
64
+ if getattr(celery_app, flag, False):
65
+ return
66
+ setattr(celery_app, flag, True)
67
+
68
+ @before_task_publish.connect(weak=False)
69
+ def _publish(headers: dict[str, Any] | None = None, **_: Any) -> None:
70
+ if headers is None:
71
+ return
72
+ ctx = _CONTEXT_VAR.get()
73
+ if ctx:
74
+ headers[_HEADER_KEY] = dict(ctx)
75
+
76
+ @task_prerun.connect(weak=False)
77
+ def _prerun(task: Any = None, **_: Any) -> None:
78
+ if task is None:
79
+ return
80
+ try:
81
+ headers = task.request.headers or {}
82
+ except AttributeError:
83
+ return
84
+ ctx = headers.get(_HEADER_KEY) if isinstance(headers, dict) else None
85
+ if isinstance(ctx, dict):
86
+ task.request._hawkapi_token = _CONTEXT_VAR.set(dict(ctx))
87
+
88
+ @task_postrun.connect(weak=False)
89
+ def _postrun(task: Any = None, **_: Any) -> None:
90
+ if task is None:
91
+ return
92
+ token = getattr(task.request, "_hawkapi_token", None)
93
+ if token is not None:
94
+ import contextlib
95
+
96
+ with contextlib.suppress(ValueError, LookupError):
97
+ _CONTEXT_VAR.reset(token)
98
+
99
+
100
+ __all__ = [
101
+ "attach_context_signals",
102
+ "bind_context",
103
+ "current_context",
104
+ "reset_context",
105
+ "set_context",
106
+ ]
@@ -0,0 +1,56 @@
1
+ """Healthcheck helpers — broker ping + worker discovery."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from dataclasses import dataclass, field
6
+ from typing import Any
7
+
8
+ from celery import Celery
9
+
10
+
11
+ @dataclass(slots=True)
12
+ class HealthReport:
13
+ broker_ok: bool
14
+ workers_alive: int
15
+ workers: dict[str, Any] = field(default_factory=dict)
16
+ error: str = ""
17
+
18
+
19
+ def check_broker(celery_app: Celery, *, timeout: float = 5.0) -> bool:
20
+ """Open a broker connection and verify it is reachable."""
21
+ try:
22
+ with celery_app.connection_for_read() as conn: # type: ignore[attr-defined]
23
+ conn.ensure_connection(max_retries=1, interval_start=0, timeout=timeout)
24
+ return True
25
+ except Exception:
26
+ return False
27
+
28
+
29
+ def check_workers(celery_app: Celery, *, timeout: float = 5.0) -> dict[str, Any]:
30
+ """Return a mapping of worker-name -> ping response. Empty dict if no workers."""
31
+ try:
32
+ replies = celery_app.control.ping(timeout=timeout)
33
+ except Exception:
34
+ return {}
35
+ result: dict[str, Any] = {}
36
+ for reply in replies or []:
37
+ for worker, payload in reply.items():
38
+ result[worker] = payload
39
+ return result
40
+
41
+
42
+ def healthcheck(celery_app: Celery, *, timeout: float = 5.0) -> HealthReport:
43
+ """Combined broker + worker probe."""
44
+ try:
45
+ broker_ok = check_broker(celery_app, timeout=timeout)
46
+ workers = check_workers(celery_app, timeout=timeout) if broker_ok else {}
47
+ return HealthReport(
48
+ broker_ok=broker_ok,
49
+ workers_alive=len(workers),
50
+ workers=workers,
51
+ )
52
+ except Exception as exc:
53
+ return HealthReport(broker_ok=False, workers_alive=0, error=str(exc))
54
+
55
+
56
+ __all__ = ["HealthReport", "check_broker", "check_workers", "healthcheck"]
@@ -0,0 +1,74 @@
1
+ """Plugin entry point + DI helpers."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import Any
6
+
7
+ from celery import Celery
8
+ from celery.result import AsyncResult
9
+ from hawkapi import HTTPException, Request
10
+
11
+ from ._app import CeleryConfig, create_celery
12
+ from ._context import attach_context_signals
13
+
14
+
15
+ class _StateNamespace:
16
+ celery: Any
17
+
18
+
19
+ _ACTIVE_APPS: dict[int, Celery] = {}
20
+ _LAST_APP: list[Celery | None] = [None]
21
+
22
+
23
+ def init_celery(
24
+ app: Any,
25
+ *,
26
+ celery_app: Celery | None = None,
27
+ config: CeleryConfig | None = None,
28
+ name: str = "hawkapi",
29
+ propagate_context: bool = True,
30
+ ) -> Celery:
31
+ """Attach a Celery app to ``app.state.celery`` and register it for DI lookup.
32
+
33
+ Pass an existing ``celery_app=`` to wire up a Celery instance you built yourself;
34
+ otherwise pass a ``config=`` (or nothing for defaults) and we will build one for you.
35
+ """
36
+ if celery_app is None:
37
+ celery_app = create_celery(name, config=config or CeleryConfig())
38
+
39
+ if propagate_context:
40
+ attach_context_signals(celery_app)
41
+
42
+ if getattr(app, "state", None) is None:
43
+ app.state = _StateNamespace()
44
+ app.state.celery = celery_app
45
+ _ACTIVE_APPS[id(app)] = celery_app
46
+ _LAST_APP[0] = celery_app
47
+ return celery_app
48
+
49
+
50
+ def resolve_celery(app: Any) -> Celery | None:
51
+ if app is None:
52
+ return _LAST_APP[0]
53
+ found = _ACTIVE_APPS.get(id(app))
54
+ if found is not None:
55
+ return found
56
+ state = getattr(app, "state", None)
57
+ if state is not None and hasattr(state, "celery"):
58
+ return state.celery # type: ignore[no-any-return]
59
+ return _LAST_APP[0]
60
+
61
+
62
+ def get_celery(request: Request) -> Celery:
63
+ found = resolve_celery(request.scope.get("app"))
64
+ if found is None:
65
+ raise HTTPException(500, detail="Celery not configured — call init_celery(app, ...) first")
66
+ return found
67
+
68
+
69
+ def get_task_result(task_id: str, request: Request) -> AsyncResult:
70
+ """Resolve an ``AsyncResult`` for ``task_id`` against the configured Celery app."""
71
+ return AsyncResult(task_id, app=get_celery(request))
72
+
73
+
74
+ __all__ = ["get_celery", "get_task_result", "init_celery", "resolve_celery"]
@@ -0,0 +1,93 @@
1
+ """Task decorators — sync, async-coroutine, and retry helpers."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import asyncio
6
+ import functools
7
+ import inspect
8
+ import random
9
+ from collections.abc import Callable
10
+ from typing import Any, ParamSpec, TypeVar
11
+
12
+ from celery import Celery
13
+
14
+ P = ParamSpec("P")
15
+ R = TypeVar("R")
16
+
17
+
18
+ def task(
19
+ celery_app: Celery,
20
+ *,
21
+ name: str | None = None,
22
+ queue: str | None = None,
23
+ bind: bool = False,
24
+ autoretry_for: tuple[type[BaseException], ...] = (),
25
+ retry_backoff: bool | int | float = False,
26
+ retry_backoff_max: int = 600,
27
+ retry_jitter: bool = True,
28
+ max_retries: int = 3,
29
+ **task_kwargs: Any,
30
+ ) -> Callable[[Callable[P, R]], Any]:
31
+ """Register ``fn`` as a Celery task with sensible retry defaults.
32
+
33
+ The wrapper auto-detects ``async def`` and runs the coroutine on a private
34
+ event loop for each invocation. For sync functions it forwards as-is.
35
+ """
36
+
37
+ def decorator(fn: Callable[P, R]) -> Any:
38
+ register_kwargs: dict[str, Any] = {
39
+ "name": name or f"{fn.__module__}.{fn.__qualname__}",
40
+ "queue": queue,
41
+ "bind": bind,
42
+ "autoretry_for": autoretry_for,
43
+ "retry_backoff": retry_backoff,
44
+ "retry_backoff_max": retry_backoff_max,
45
+ "retry_jitter": retry_jitter,
46
+ "max_retries": max_retries,
47
+ **task_kwargs,
48
+ }
49
+ register_kwargs = {k: v for k, v in register_kwargs.items() if v is not None}
50
+
51
+ if inspect.iscoroutinefunction(fn):
52
+
53
+ @functools.wraps(fn)
54
+ def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
55
+ return _run_coroutine(fn, *args, **kwargs) # type: ignore[arg-type]
56
+
57
+ return celery_app.task(**register_kwargs)(wrapper)
58
+ return celery_app.task(**register_kwargs)(fn)
59
+
60
+ return decorator
61
+
62
+
63
+ def _run_coroutine(coro_fn: Callable[..., Any], *args: Any, **kwargs: Any) -> Any:
64
+ """Run ``coro_fn(*args, **kwargs)`` on a private event loop."""
65
+ try:
66
+ loop = asyncio.get_event_loop()
67
+ if loop.is_running():
68
+ # Nested call — bail rather than block the outer loop.
69
+ raise RuntimeError("cannot run coroutine task inside an active event loop")
70
+ except RuntimeError:
71
+ loop = asyncio.new_event_loop()
72
+ asyncio.set_event_loop(loop)
73
+ try:
74
+ return loop.run_until_complete(coro_fn(*args, **kwargs))
75
+ finally:
76
+ loop.close()
77
+ asyncio.set_event_loop(None)
78
+
79
+
80
+ def compute_backoff(
81
+ attempt: int, *, base: float = 1.0, cap: float = 60.0, jitter: bool = True
82
+ ) -> float:
83
+ """Compute exponential backoff with optional decorrelated jitter.
84
+
85
+ Useful when scheduling manual retries via ``task.retry(countdown=...)``.
86
+ """
87
+ delay = min(cap, base * (2 ** max(0, attempt - 1)))
88
+ if jitter:
89
+ delay = random.uniform(base, max(base, delay)) # noqa: S311 - not crypto
90
+ return delay
91
+
92
+
93
+ __all__ = ["compute_backoff", "task"]
@@ -0,0 +1,86 @@
1
+ """Test helpers — eager-mode and task capture."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from collections.abc import Generator
6
+ from contextlib import contextmanager
7
+ from dataclasses import dataclass, field
8
+ from typing import Any
9
+
10
+ from celery import Celery
11
+ from celery.signals import before_task_publish
12
+
13
+
14
+ @dataclass(slots=True)
15
+ class CapturedTask:
16
+ name: str
17
+ args: tuple[Any, ...] = ()
18
+ kwargs: dict[str, Any] = field(default_factory=dict)
19
+ headers: dict[str, Any] = field(default_factory=dict)
20
+
21
+
22
+ class TaskRecorder:
23
+ """Captures every task publish on a Celery app — useful in tests."""
24
+
25
+ def __init__(self) -> None:
26
+ self.captured: list[CapturedTask] = []
27
+
28
+ def __len__(self) -> int:
29
+ return len(self.captured)
30
+
31
+ def clear(self) -> None:
32
+ self.captured.clear()
33
+
34
+
35
+ @contextmanager
36
+ def record_tasks(celery_app: Celery) -> Generator[TaskRecorder, None, None]:
37
+ """Capture every task publish for the duration of the block."""
38
+ recorder = TaskRecorder()
39
+
40
+ def _capture(
41
+ sender: str | None = None,
42
+ body: Any = None,
43
+ headers: dict[str, Any] | None = None,
44
+ **_: Any,
45
+ ) -> None:
46
+ args: tuple[Any, ...] = ()
47
+ kwargs: dict[str, Any] = {}
48
+ if isinstance(body, tuple) and len(body) >= 2:
49
+ a, k = body[0], body[1]
50
+ if isinstance(a, (list, tuple)):
51
+ args = tuple(a)
52
+ if isinstance(k, dict):
53
+ kwargs = dict(k)
54
+ recorder.captured.append(
55
+ CapturedTask(
56
+ name=sender or "",
57
+ args=args,
58
+ kwargs=kwargs,
59
+ headers=dict(headers or {}),
60
+ )
61
+ )
62
+
63
+ before_task_publish.connect(_capture, weak=False, sender=None)
64
+ try:
65
+ yield recorder
66
+ finally:
67
+ before_task_publish.disconnect(_capture)
68
+ # Reference to celery_app keeps signature stable / lets us extend later.
69
+ _ = celery_app
70
+
71
+
72
+ @contextmanager
73
+ def eager_mode(celery_app: Celery, *, propagate: bool = True) -> Generator[Celery, None, None]:
74
+ """Run all tasks synchronously in-process — drop-in for tests."""
75
+ prev_always = celery_app.conf.task_always_eager
76
+ prev_prop = celery_app.conf.task_eager_propagates
77
+ celery_app.conf.task_always_eager = True
78
+ celery_app.conf.task_eager_propagates = propagate
79
+ try:
80
+ yield celery_app
81
+ finally:
82
+ celery_app.conf.task_always_eager = prev_always
83
+ celery_app.conf.task_eager_propagates = prev_prop
84
+
85
+
86
+ __all__ = ["CapturedTask", "TaskRecorder", "eager_mode", "record_tasks"]
File without changes
@@ -0,0 +1,224 @@
1
+ Metadata-Version: 2.4
2
+ Name: hawkapi-celery
3
+ Version: 0.1.0
4
+ Summary: Celery integration for HawkAPI — async tasks, beat scheduler, context propagation, healthchecks, eager-mode fixtures
5
+ Project-URL: Homepage, https://pypi.org/project/hawkapi-celery/
6
+ Project-URL: Repository, https://github.com/ashimov/hawkapi-celery
7
+ Project-URL: Issues, https://github.com/ashimov/hawkapi-celery/issues
8
+ Author-email: HawkAPI Contributors <hawkapi@users.noreply.github.com>
9
+ License: MIT License
10
+
11
+ Copyright (c) 2026 HawkAPI Contributors
12
+
13
+ Permission is hereby granted, free of charge, to any person obtaining a copy
14
+ of this software and associated documentation files (the "Software"), to deal
15
+ in the Software without restriction, including without limitation the rights
16
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
17
+ copies of the Software, and to permit persons to whom the Software is
18
+ furnished to do so, subject to the following conditions:
19
+
20
+ The above copyright notice and this permission notice shall be included in all
21
+ copies or substantial portions of the Software.
22
+
23
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
26
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
28
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
29
+ SOFTWARE.
30
+ License-File: LICENSE
31
+ Keywords: async,background,beat,celery,hawkapi,queue,tasks
32
+ Classifier: Development Status :: 5 - Production/Stable
33
+ Classifier: Framework :: AsyncIO
34
+ Classifier: Framework :: Celery
35
+ Classifier: Intended Audience :: Developers
36
+ Classifier: License :: OSI Approved :: MIT License
37
+ Classifier: Programming Language :: Python :: 3
38
+ Classifier: Programming Language :: Python :: 3.12
39
+ Classifier: Programming Language :: Python :: 3.13
40
+ Classifier: Topic :: System :: Distributed Computing
41
+ Classifier: Typing :: Typed
42
+ Requires-Python: >=3.12
43
+ Requires-Dist: celery>=5.4
44
+ Requires-Dist: hawkapi>=0.1.7
45
+ Provides-Extra: dev
46
+ Requires-Dist: pyright>=1.1; extra == 'dev'
47
+ Requires-Dist: pytest-asyncio>=0.24; extra == 'dev'
48
+ Requires-Dist: pytest>=8.0; extra == 'dev'
49
+ Requires-Dist: redis>=5.0; extra == 'dev'
50
+ Requires-Dist: ruff>=0.8; extra == 'dev'
51
+ Provides-Extra: redis
52
+ Requires-Dist: redis>=5.0; extra == 'redis'
53
+ Description-Content-Type: text/markdown
54
+
55
+ # hawkapi-celery
56
+
57
+ Celery integration for [HawkAPI](https://github.com/ashimov/HawkAPI). Async tasks, beat scheduler, request-context propagation, broker/worker healthchecks, and eager-mode fixtures for tests.
58
+
59
+ ## Install
60
+
61
+ ```bash
62
+ pip install hawkapi-celery
63
+ pip install 'hawkapi-celery[redis]' # adds redis client
64
+ ```
65
+
66
+ ## Quickstart
67
+
68
+ ```python
69
+ from hawkapi import Depends, HawkAPI
70
+ from celery import Celery
71
+ from hawkapi_celery import (
72
+ CeleryConfig, bind_context, get_celery, init_celery, task,
73
+ )
74
+
75
+
76
+ celery_app: Celery # populated below
77
+
78
+
79
+ def make_app() -> HawkAPI:
80
+ app = HawkAPI()
81
+ global celery_app
82
+ celery_app = init_celery(
83
+ app,
84
+ config=CeleryConfig(
85
+ broker_url="redis://localhost:6379/0",
86
+ result_backend="redis://localhost:6379/0",
87
+ ),
88
+ )
89
+
90
+
91
+ @task(celery_app, name="emails.send")
92
+ async def send_email(to: str, subject: str, body: str) -> None:
93
+ ... # any await-able send logic
94
+
95
+
96
+ @app.post("/notify")
97
+ async def notify(email: str, c: Celery = Depends(get_celery)):
98
+ with bind_context(request_id="…"):
99
+ send_email.delay(email, "Welcome", "Hello!")
100
+ return {"ok": True}
101
+
102
+ return app
103
+ ```
104
+
105
+ ## Tasks
106
+
107
+ ```python
108
+ from hawkapi_celery import task
109
+
110
+ @task(celery_app, name="myapp.work", queue="default",
111
+ autoretry_for=(ConnectionError,), retry_backoff=True, max_retries=5)
112
+ async def work(x: int) -> int: # async def — runs on a private event loop
113
+ ...
114
+ return x * 2
115
+
116
+
117
+ @task(celery_app, bind=True)
118
+ def slow_work(self, payload): # sync — bound `self` for retry handling
119
+ try:
120
+ do_thing(payload)
121
+ except TransientError as exc:
122
+ raise self.retry(exc=exc, countdown=compute_backoff(self.request.retries))
123
+ ```
124
+
125
+ ## Beat (periodic tasks)
126
+
127
+ ```python
128
+ from datetime import timedelta
129
+ from hawkapi_celery import Periodic, add_periodic, crontab, every
130
+
131
+ add_periodic(celery_app, "cleanup",
132
+ Periodic(task="myapp.cleanup", schedule=every(timedelta(hours=1))))
133
+
134
+ add_periodic(celery_app, "nightly_report",
135
+ Periodic(task="myapp.report", schedule=crontab(hour=2, minute=0),
136
+ kwargs={"date": "yesterday"}))
137
+ ```
138
+
139
+ ## Context propagation
140
+
141
+ `bind_context()` carries a dict from the HTTP handler to the worker process via the task headers. Inside the task call `current_context()` to read it back.
142
+
143
+ ```python
144
+ from hawkapi_celery import bind_context, current_context
145
+
146
+ @task(celery_app, name="log.event")
147
+ def log_event(payload: dict) -> None:
148
+ ctx = current_context() # {"request_id": "…", "user_id": "…"}
149
+ log.info("event", **ctx, **payload)
150
+
151
+
152
+ @app.post("/event")
153
+ async def post_event(p: Payload):
154
+ with bind_context(request_id=p.request_id, user_id=p.user_id):
155
+ log_event.delay(p.model_dump())
156
+ ```
157
+
158
+ Wired automatically by `init_celery(..., propagate_context=True)` (default).
159
+
160
+ ## Healthchecks
161
+
162
+ ```python
163
+ from hawkapi_celery import healthcheck
164
+
165
+
166
+ @app.get("/healthz")
167
+ async def healthz():
168
+ report = healthcheck(celery_app, timeout=2.0)
169
+ return {
170
+ "broker": report.broker_ok,
171
+ "workers_alive": report.workers_alive,
172
+ "workers": list(report.workers),
173
+ }
174
+ ```
175
+
176
+ ## Testing
177
+
178
+ ```python
179
+ from hawkapi_celery import eager_mode, record_tasks
180
+
181
+
182
+ def test_signup_enqueues_welcome_email(client, celery_app):
183
+ with record_tasks(celery_app) as recorder:
184
+ client.post("/signup", json={"email": "x@y.z"})
185
+ assert any(t.name == "emails.send" for t in recorder.captured)
186
+
187
+
188
+ def test_signup_runs_welcome_email_inline(client, celery_app):
189
+ with eager_mode(celery_app):
190
+ client.post("/signup", json={"email": "x@y.z"})
191
+ # All tasks executed synchronously in-process — assert their side-effects directly.
192
+ ```
193
+
194
+ ## CeleryConfig
195
+
196
+ ```python
197
+ CeleryConfig(
198
+ broker_url="redis://localhost:6379/0",
199
+ result_backend="redis://localhost:6379/0",
200
+ task_serializer="json",
201
+ timezone="UTC",
202
+ task_time_limit=600,
203
+ task_soft_time_limit=540,
204
+ worker_prefetch_multiplier=1,
205
+ worker_max_tasks_per_child=1000,
206
+ task_default_queue="default",
207
+ extra_kwargs={...}, # forwarded to celery.conf.update
208
+ )
209
+ ```
210
+
211
+ ## Development
212
+
213
+ ```bash
214
+ git clone https://github.com/ashimov/hawkapi-celery.git
215
+ cd hawkapi-celery
216
+ uv sync --extra dev
217
+ uv run pytest -q
218
+ uv run ruff check . && uv run ruff format --check .
219
+ uv run pyright src/
220
+ ```
221
+
222
+ ## License
223
+
224
+ MIT.
@@ -0,0 +1,13 @@
1
+ hawkapi_celery/__init__.py,sha256=gtBjDMMZwVP2gxCMk_Jiz9fLKABRaXiz7EpWZ98rfrc,1350
2
+ hawkapi_celery/_app.py,sha256=T7K6p7_1LraqtIHqvZ3e9ECjSCm0w1jcKsGY4hVhmG8,2371
3
+ hawkapi_celery/_beat.py,sha256=pZK7CCMLkvqDCiBWLSrYXt7_KyyDnOWzx1mBLN-TnlY,1469
4
+ hawkapi_celery/_context.py,sha256=R-Q4NjUZP8LgKC7oDYbmT4xlKRPA6evsXPCNK58K6Go,3224
5
+ hawkapi_celery/_health.py,sha256=BCfXICrUWjXKkaKJH7J5-ve5bc6ABJG6AbcqVZCou8s,1777
6
+ hawkapi_celery/_plugin.py,sha256=7RILxG0zwubXbxiN8QBnA5bBHvYohhvzJ18rFKCJxSY,2153
7
+ hawkapi_celery/_tasks.py,sha256=kLDt2TLNGb8HacjZ5GQ1uHB_THpZluVtbsVwZob1CG4,2962
8
+ hawkapi_celery/_testing.py,sha256=77IrAUw5UgCPaqT8KfgRpgaW8X-6RAGzEZji_dQcZwQ,2586
9
+ hawkapi_celery/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ hawkapi_celery-0.1.0.dist-info/METADATA,sha256=7u7B8Zb-M9uYeOueZ-A1oHbBBjBUuyc6dRniUcVw5Qo,7066
11
+ hawkapi_celery-0.1.0.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
12
+ hawkapi_celery-0.1.0.dist-info/licenses/LICENSE,sha256=_RpjhvsfLqqeG_gv2cRatjIxCTGXTpXhKU9jqLZXYa4,1077
13
+ hawkapi_celery-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.29.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 HawkAPI Contributors
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.