ltq 0.1.2__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ltq/__init__.py CHANGED
@@ -1,11 +1,17 @@
1
+ from .app import App
2
+ from .utils import dispatch
1
3
  from .task import Task
2
4
  from .worker import Worker
5
+ from .scheduler import Scheduler
3
6
  from .logger import get_logger
4
7
  from .errors import RejectMessage, RetryMessage
5
8
 
6
9
  __all__ = [
10
+ "App",
7
11
  "Worker",
12
+ "Scheduler",
8
13
  "Task",
14
+ "dispatch",
9
15
  "get_logger",
10
16
  "RejectMessage",
11
17
  "RetryMessage",
ltq/app.py ADDED
@@ -0,0 +1,14 @@
1
+ import asyncio
2
+
3
+ from .worker import Worker
4
+
5
+
6
+ class App:
7
+ def __init__(self) -> None:
8
+ self.workers: set[Worker] = set()
9
+
10
+ def register_worker(self, worker: Worker) -> None:
11
+ self.workers.add(worker)
12
+
13
+ async def run(self) -> None:
14
+ await asyncio.gather(*(w.run() for w in self.workers))
ltq/cli.py CHANGED
@@ -7,6 +7,7 @@ from pathlib import Path
7
7
  import argparse
8
8
 
9
9
  from .logger import setup_logging, get_logger
10
+ from .app import App
10
11
  from .worker import Worker
11
12
 
12
13
  logger = get_logger()
@@ -46,7 +47,12 @@ def main():
46
47
  epilog="Example:\n ltq example:worker --concurrency 100",
47
48
  )
48
49
 
49
- parser.add_argument("worker", help="Worker import string (module:attribute)")
50
+ parser.add_argument(
51
+ "worker", nargs="?", help="Worker import string (module:attribute)"
52
+ )
53
+ parser.add_argument(
54
+ "--app", dest="app", help="App import string (module:attribute)"
55
+ )
50
56
  parser.add_argument("--concurrency", type=int, help="Override worker concurrency")
51
57
  parser.add_argument("--poll-sleep", type=float, help="Override worker poll sleep")
52
58
  parser.add_argument(
@@ -58,27 +64,47 @@ def main():
58
64
  )
59
65
  args = parser.parse_args()
60
66
 
67
+ if not args.worker and not args.app:
68
+ parser.error("either worker or --app is required")
69
+ if args.worker and args.app:
70
+ parser.error("cannot specify both worker and --app")
71
+
61
72
  # Setup colored logging for CLI
62
73
  setup_logging(level=args.log_level)
63
-
64
- worker: Worker = import_from_string(args.worker)
65
-
66
- # Apply overrides
67
- if args.concurrency:
68
- worker.concurrency = args.concurrency
69
- if args.poll_sleep:
70
- worker.poll_sleep = args.poll_sleep
71
74
  if args.log_level:
72
75
  logger.setLevel(args.log_level)
73
76
 
74
- # Print startup info
75
- logger.info("Starting ltq worker")
76
- logger.info("Worker: %s", args.worker)
77
-
78
- try:
79
- asyncio.run(worker.run())
80
- except KeyboardInterrupt:
81
- logger.info("Shutting down...")
77
+ if args.app:
78
+ app: App = import_from_string(args.app)
79
+
80
+ for w in app.workers:
81
+ if args.concurrency:
82
+ w.concurrency = args.concurrency
83
+ if args.poll_sleep:
84
+ w.poll_sleep = args.poll_sleep
85
+
86
+ logger.info("Starting ltq app")
87
+ logger.info("App: %s (%d workers)", args.app, len(app.workers))
88
+
89
+ try:
90
+ asyncio.run(app.run())
91
+ except KeyboardInterrupt:
92
+ logger.info("Shutting down...")
93
+ else:
94
+ worker: Worker = import_from_string(args.worker)
95
+
96
+ if args.concurrency:
97
+ worker.concurrency = args.concurrency
98
+ if args.poll_sleep:
99
+ worker.poll_sleep = args.poll_sleep
100
+
101
+ logger.info("Starting ltq worker")
102
+ logger.info("Worker: %s", args.worker)
103
+
104
+ try:
105
+ asyncio.run(worker.run())
106
+ except KeyboardInterrupt:
107
+ logger.info("Shutting down...")
82
108
 
83
109
 
84
110
  if __name__ == "__main__":
ltq/message.py CHANGED
@@ -3,21 +3,25 @@ from __future__ import annotations
3
3
  import json
4
4
  import uuid
5
5
  from dataclasses import dataclass, field
6
- from typing import Any
6
+ from typing import Any, TYPE_CHECKING
7
+
8
+ if TYPE_CHECKING:
9
+ from .task import Task
7
10
 
8
11
 
9
12
  @dataclass
10
13
  class Message:
11
14
  args: tuple[Any, ...]
12
15
  kwargs: dict[str, Any]
13
- task: str
16
+ task_name: str
17
+ task: Task | None = None # only set when Message created with Task.message
14
18
  ctx: dict[str, Any] = field(default_factory=dict)
15
19
  id: str = field(default_factory=lambda: uuid.uuid4().hex)
16
20
 
17
21
  def to_json(self) -> str:
18
22
  return json.dumps(
19
23
  {
20
- "task": self.task,
24
+ "task_name": self.task_name,
21
25
  "id": self.id,
22
26
  "args": self.args,
23
27
  "kwargs": self.kwargs,
ltq/middleware.py CHANGED
@@ -95,13 +95,13 @@ class Sentry(Middleware):
95
95
 
96
96
  async def handle(self, message: Message, next_handler: Handler) -> Any:
97
97
  with self.sentry.push_scope() as scope:
98
- scope.set_tag("task", message.task)
98
+ scope.set_tag("task", message.task_name)
99
99
  scope.set_tag("message_id", message.id)
100
100
  scope.set_context(
101
101
  "message",
102
102
  {
103
103
  "id": message.id,
104
- "task": message.task,
104
+ "task": message.task_name,
105
105
  "args": message.args,
106
106
  "kwargs": message.kwargs,
107
107
  "ctx": message.ctx,
ltq/q.py CHANGED
@@ -14,12 +14,12 @@ class Queue:
14
14
  local items = {}
15
15
  for i = 1, ARGV[1] do
16
16
  local item = redis.call("RPOP", KEYS[1])
17
- if item then
18
- table.insert(items, item)
19
- end
17
+ if not item then break end
18
+ table.insert(items, item)
20
19
  end
21
- if #items > 0 then
22
- redis.call("SADD", KEYS[2], unpack(items))
20
+ for i = 1, #items, 5000 do
21
+ local chunk = {unpack(items, i, math.min(i + 4999, #items))}
22
+ redis.call("SADD", KEYS[2], unpack(chunk))
23
23
  end
24
24
  return items
25
25
  """
ltq/scheduler.py ADDED
@@ -0,0 +1,81 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ import time
5
+ from dataclasses import dataclass, field
6
+ from datetime import datetime
7
+ from typing import TYPE_CHECKING
8
+
9
+ from .message import Message
10
+
11
+ try:
12
+ from croniter import croniter # type: ignore[import-not-found]
13
+ except ModuleNotFoundError as exc:
14
+ raise ModuleNotFoundError(
15
+ "Scheduler requires optional dependency 'croniter'. "
16
+ "Install with 'ltq[scheduler]'."
17
+ ) from exc
18
+
19
+ from .utils import dispatch
20
+ from .logger import get_logger
21
+
22
+ if TYPE_CHECKING:
23
+ from .task import Task
24
+
25
+
26
+ @dataclass
27
+ class ScheduledJob:
28
+ task: Task
29
+ msg: Message
30
+ expr: str
31
+ _cron: croniter = field(init=False, repr=False)
32
+ next_run: datetime = field(init=False)
33
+
34
+ def __post_init__(self):
35
+ self._cron = croniter(self.expr, datetime.now())
36
+ self.advance()
37
+
38
+ def advance(self) -> None:
39
+ self.next_run = self._cron.get_next(datetime)
40
+
41
+
42
+ class Scheduler:
43
+ def __init__(self, poll_interval: float = 10.0) -> None:
44
+ self.poll_interval = poll_interval
45
+ self.jobs: list[ScheduledJob] = []
46
+ self.logger = get_logger("ltq.scheduler")
47
+ self._running = False
48
+
49
+ def cron(self, expr: str, msg: Message) -> None:
50
+ if msg.task is None:
51
+ raise ValueError("Message must have a task assigned to use with scheduler")
52
+ self.jobs.append(ScheduledJob(msg.task, msg, expr))
53
+
54
+ def run(self) -> None:
55
+ self._running = True
56
+ self.logger.info("Starting scheduler")
57
+ for job in self.jobs:
58
+ self.logger.info(
59
+ f"{job.task.name} [{job.expr}] next={job.next_run:%H:%M:%S}"
60
+ )
61
+
62
+ loop = asyncio.new_event_loop()
63
+ while self._running:
64
+ now = datetime.now()
65
+ due = [job for job in self.jobs if now >= job.next_run]
66
+ if due:
67
+ try:
68
+ loop.run_until_complete(dispatch([job.msg for job in due]))
69
+ for job in due:
70
+ self.logger.info(
71
+ f"Enqueued {job.task.name} scheduled={job.next_run:%H:%M:%S}"
72
+ )
73
+ except Exception:
74
+ self.logger.exception("Failed to dispatch scheduled jobs")
75
+ for job in due:
76
+ job.advance()
77
+ time.sleep(self.poll_interval)
78
+ loop.close()
79
+
80
+ def stop(self) -> None:
81
+ self._running = False
ltq/task.py CHANGED
@@ -1,6 +1,5 @@
1
1
  from __future__ import annotations
2
2
 
3
- from functools import update_wrapper
4
3
  from typing import Awaitable, Callable, Generic, ParamSpec, TypeVar
5
4
 
6
5
  from .message import Message
@@ -27,7 +26,8 @@ class Task(Generic[P, R]):
27
26
  return Message(
28
27
  args=args,
29
28
  kwargs=kwargs,
30
- task=self.name,
29
+ task=self,
30
+ task_name=self.name,
31
31
  )
32
32
 
33
33
  async def send(self, *args: P.args, **kwargs: P.kwargs) -> str:
@@ -35,9 +35,5 @@ class Task(Generic[P, R]):
35
35
  await self.queue.put([message], ttl=self.ttl)
36
36
  return message.id
37
37
 
38
- async def send_bulk(self, messages: list[Message]) -> list[str]:
39
- await self.queue.put(messages, ttl=self.ttl)
40
- return [message.id for message in messages]
41
-
42
38
  async def __call__(self, *args: P.args, **kwargs: P.kwargs) -> R:
43
39
  return await self.fn(*args, **kwargs)
ltq/utils.py ADDED
@@ -0,0 +1,19 @@
1
+ from __future__ import annotations
2
+
3
+ from collections import defaultdict
4
+
5
+ from .message import Message
6
+ from .q import Queue
7
+
8
+
9
+ async def dispatch(messages: list[Message]) -> list[str]:
10
+ by_queue: defaultdict[Queue, list[Message]] = defaultdict(list)
11
+ for msg in messages:
12
+ if msg.task is None:
13
+ raise ValueError(f"Message {msg.id} has no task assigned")
14
+ by_queue[msg.task.queue].append(msg)
15
+
16
+ for queue, batch in by_queue.items():
17
+ await queue.put(batch)
18
+
19
+ return [msg.id for msg in messages]
ltq/worker.py CHANGED
@@ -38,6 +38,7 @@ class Worker:
38
38
  self.concurrency: int = concurrency
39
39
  self.poll_sleep: float = poll_sleep
40
40
 
41
+
41
42
  def task(
42
43
  self,
43
44
  queue_name: str | None = None,
@@ -58,7 +59,7 @@ class Worker:
58
59
 
59
60
  return decorator
60
61
 
61
- async def worker(self, task: Task):
62
+ async def processor(self, task: Task):
62
63
  async def base(message: Message) -> Any:
63
64
  return await task.fn(*message.args, **message.kwargs)
64
65
 
@@ -66,15 +67,11 @@ class Worker:
66
67
  for middleware in reversed(self.middlewares):
67
68
  handler = partial(middleware.handle, next_handler=handler)
68
69
 
69
- while True:
70
- messages = await task.queue.get(self.concurrency)
71
- if not messages:
72
- await asyncio.sleep(self.poll_sleep)
73
- continue
74
-
75
- logger.debug(f"Processing {len(messages)} messages for {task.name}")
70
+ sem = asyncio.Semaphore(self.concurrency)
71
+ pending: dict[asyncio.Task, Message] = {}
76
72
 
77
- async def process(msg: Message) -> None:
73
+ async def process(msg: Message) -> None:
74
+ async with sem:
78
75
  try:
79
76
  await handler(msg)
80
77
  except RetryMessage as e:
@@ -86,12 +83,26 @@ class Worker:
86
83
  exc_info=True,
87
84
  )
88
85
 
89
- await asyncio.gather(*(process(m) for m in messages))
90
- await task.queue.ack(messages)
86
+ while True:
87
+ messages = await task.queue.get(self.concurrency)
88
+ if not messages:
89
+ await asyncio.sleep(self.poll_sleep)
90
+ continue
91
+
92
+ logger.debug(f"Processing {len(messages)} messages for {task.name}")
93
+
94
+ for msg in messages:
95
+ t = asyncio.create_task(process(msg))
96
+ pending[t] = msg
97
+
98
+ done, _ = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED)
99
+ await task.queue.ack([pending.pop(t) for t in done])
91
100
 
92
101
  async def run(self) -> None:
93
102
  try:
94
- workers = (self.worker(task) for task in self.tasks)
95
- await asyncio.gather(*workers)
103
+ processors = (self.processor(task) for task in self.tasks)
104
+ await asyncio.gather(*processors)
105
+ except asyncio.CancelledError:
106
+ logger.info("Worker shutting down...")
96
107
  finally:
97
108
  await self.client.aclose()
@@ -1,12 +1,14 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: ltq
3
- Version: 0.1.2
3
+ Version: 0.2.0
4
4
  Summary: Add your description here
5
5
  Author: Tom Clesius
6
6
  Author-email: Tom Clesius <tomclesius@gmail.com>
7
7
  Requires-Dist: redis>=7.1.0
8
+ Requires-Dist: croniter>=6.0.0 ; extra == 'scheduler'
8
9
  Requires-Dist: sentry-sdk>=2.0.0 ; extra == 'sentry'
9
10
  Requires-Python: >=3.13
11
+ Provides-Extra: scheduler
10
12
  Provides-Extra: sentry
11
13
  Description-Content-Type: text/markdown
12
14
 
@@ -0,0 +1,16 @@
1
+ ltq/__init__.py,sha256=yZ65BhEtoQApFCOPWCL5_5b2rAIfBkw6XW5ppSpurW0,355
2
+ ltq/app.py,sha256=PI9Zti37qpgYVzLRIbRoAMmhJ3wLdIY05bTQCkUzehA,316
3
+ ltq/cli.py,sha256=JD_iHStkP2WuVlOz0L90Sako-oHPafTzV08INDJsHio,3219
4
+ ltq/errors.py,sha256=i9kXtSVMpam_0VpyL5eaSfBwkGF6dK7xouAcLdc_eNc,324
5
+ ltq/logger.py,sha256=HPClhDt3ecwZqE0Vq2oYF8Nr9jj-xrsSX9tM6enVgkA,1791
6
+ ltq/message.py,sha256=hY_oPNMicSICV9-_o8Rma_kJO3DGJu_PFxyt1xZKMJE,871
7
+ ltq/middleware.py,sha256=xy4VXy8uXp0vBMhdD6aSOQdzwCdbUkcuFw7FwPsqabU,3641
8
+ ltq/q.py,sha256=3jRlPZKO4Zd5IYnb24jO_oRHtHBbG1la5SCZPI6QLI4,2537
9
+ ltq/scheduler.py,sha256=EjXmmcyDv9Bp473Y40cTZAW6UVvkwZ_0MDE1XOjm5nI,2451
10
+ ltq/task.py,sha256=ZKKag4u9k9hQ-lNdIWZygmGCc01DRt8a6kMQlQr_yaw,1000
11
+ ltq/utils.py,sha256=M7EWJ-n9J3MMwofWwtxVkwgHmGeP66eoWuWDhjbx67k,536
12
+ ltq/worker.py,sha256=gHA9xfDx1plhrC53nfO2mmapWex1JbFVNxGGSIV_ow8,3423
13
+ ltq-0.2.0.dist-info/WHEEL,sha256=e_m4S054HL0hyR3CpOk-b7Q7fDX6BuFkgL5OjAExXas,80
14
+ ltq-0.2.0.dist-info/entry_points.txt,sha256=OogYaOJ_RORrWtrLlEL_gTN9Vx5tkgawl8BO7G9FKcg,38
15
+ ltq-0.2.0.dist-info/METADATA,sha256=mf5XFhk1D41Oimz_f34fiNh7quyOhXBRQu5qwEt2tnE,2396
16
+ ltq-0.2.0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: uv 0.9.26
2
+ Generator: uv 0.9.27
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -1,13 +0,0 @@
1
- ltq/__init__.py,sha256=sN88QG1vEsxFfeV7Lr381UFbLjz9oxLO04BPLY7Ae6k,229
2
- ltq/cli.py,sha256=sDC19BjAzUDSNY_oSsXdV5Q2HosQCi8Htq_dOmT4AXA,2370
3
- ltq/errors.py,sha256=i9kXtSVMpam_0VpyL5eaSfBwkGF6dK7xouAcLdc_eNc,324
4
- ltq/logger.py,sha256=HPClhDt3ecwZqE0Vq2oYF8Nr9jj-xrsSX9tM6enVgkA,1791
5
- ltq/message.py,sha256=C6gJR6KuRrnIb-l9Jna7e-XIu_aFHBhnZICUqE1_2MU,715
6
- ltq/middleware.py,sha256=NMRI7UiJiUTqAHOB18Gz686mQGszSu-6hJiyTDPCKdE,3631
7
- ltq/q.py,sha256=LddedNdb9uYD9qAjkzN0inBhvQ-mp6uJLgguEhv_TeE,2462
8
- ltq/task.py,sha256=JOeyulGJ-jSM_wzYyKPTPElRGx7Ncu6n1dRdn9NGwMI,1184
9
- ltq/worker.py,sha256=PAMW8hvTuWCUALw41TnVBmcCxXytJbgVsxALAY9qmFY,3031
10
- ltq-0.1.2.dist-info/WHEEL,sha256=XV0cjMrO7zXhVAIyyc8aFf1VjZ33Fen4IiJk5zFlC3g,80
11
- ltq-0.1.2.dist-info/entry_points.txt,sha256=OogYaOJ_RORrWtrLlEL_gTN9Vx5tkgawl8BO7G9FKcg,38
12
- ltq-0.1.2.dist-info/METADATA,sha256=7Mvz05MwsRJ5ddbegQofl9iLDHgELxwY2JEsSnJGK5Q,2316
13
- ltq-0.1.2.dist-info/RECORD,,