taskiq-beat 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- taskiq_beat-0.1.0/LICENSE +21 -0
- taskiq_beat-0.1.0/MANIFEST.in +6 -0
- taskiq_beat-0.1.0/PKG-INFO +368 -0
- taskiq_beat-0.1.0/README.md +330 -0
- taskiq_beat-0.1.0/README.ru.md +331 -0
- taskiq_beat-0.1.0/pyproject.toml +73 -0
- taskiq_beat-0.1.0/release_guide +12 -0
- taskiq_beat-0.1.0/scripts/README.md +166 -0
- taskiq_beat-0.1.0/scripts/scheduler_load_test.py +430 -0
- taskiq_beat-0.1.0/setup.cfg +4 -0
- taskiq_beat-0.1.0/taskiq_beat/__init__.py +21 -0
- taskiq_beat-0.1.0/taskiq_beat/_version.py +3 -0
- taskiq_beat-0.1.0/taskiq_beat/app.py +88 -0
- taskiq_beat-0.1.0/taskiq_beat/config.py +28 -0
- taskiq_beat-0.1.0/taskiq_beat/engine.py +339 -0
- taskiq_beat-0.1.0/taskiq_beat/models.py +49 -0
- taskiq_beat-0.1.0/taskiq_beat/py.typed +1 -0
- taskiq_beat-0.1.0/taskiq_beat/registry.py +47 -0
- taskiq_beat-0.1.0/taskiq_beat/repositories.py +49 -0
- taskiq_beat-0.1.0/taskiq_beat/scheduler.py +79 -0
- taskiq_beat-0.1.0/taskiq_beat/triggers.py +322 -0
- taskiq_beat-0.1.0/taskiq_beat.egg-info/PKG-INFO +368 -0
- taskiq_beat-0.1.0/taskiq_beat.egg-info/SOURCES.txt +27 -0
- taskiq_beat-0.1.0/taskiq_beat.egg-info/dependency_links.txt +1 -0
- taskiq_beat-0.1.0/taskiq_beat.egg-info/requires.txt +14 -0
- taskiq_beat-0.1.0/taskiq_beat.egg-info/top_level.txt +1 -0
- taskiq_beat-0.1.0/tests/test_engine.py +160 -0
- taskiq_beat-0.1.0/tests/test_scheduler.py +102 -0
- taskiq_beat-0.1.0/tests/test_triggers.py +83 -0
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,368 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: taskiq-beat
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: A database-backed scheduler and beat engine for Taskiq.
|
|
5
|
+
Author: xlartas
|
|
6
|
+
License-Expression: MIT
|
|
7
|
+
Project-URL: Homepage, https://github.com/Artasov/taskiq-beat
|
|
8
|
+
Project-URL: Repository, https://github.com/Artasov/taskiq-beat
|
|
9
|
+
Project-URL: Issues, https://github.com/Artasov/taskiq-beat/issues
|
|
10
|
+
Keywords: taskiq,beat,scheduler,cron,crontab,interval,sqlalchemy
|
|
11
|
+
Classifier: Development Status :: 3 - Alpha
|
|
12
|
+
Classifier: Intended Audience :: Developers
|
|
13
|
+
Classifier: Operating System :: OS Independent
|
|
14
|
+
Classifier: Programming Language :: Python :: 3
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
18
|
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
19
|
+
Classifier: Typing :: Typed
|
|
20
|
+
Classifier: Topic :: Software Development :: Libraries
|
|
21
|
+
Classifier: Topic :: System :: Distributed Computing
|
|
22
|
+
Requires-Python: >=3.12
|
|
23
|
+
Description-Content-Type: text/markdown
|
|
24
|
+
License-File: LICENSE
|
|
25
|
+
Requires-Dist: sqlalchemy>=2.0.0
|
|
26
|
+
Requires-Dist: taskiq>=0.12.0
|
|
27
|
+
Provides-Extra: test
|
|
28
|
+
Requires-Dist: aiosqlite>=0.20.0; extra == "test"
|
|
29
|
+
Requires-Dist: pytest>=8.0.0; extra == "test"
|
|
30
|
+
Requires-Dist: pytest-asyncio>=1.0.0; extra == "test"
|
|
31
|
+
Provides-Extra: dev
|
|
32
|
+
Requires-Dist: aiosqlite>=0.20.0; extra == "dev"
|
|
33
|
+
Requires-Dist: build>=1.2.2; extra == "dev"
|
|
34
|
+
Requires-Dist: pytest>=8.0.0; extra == "dev"
|
|
35
|
+
Requires-Dist: pytest-asyncio>=1.0.0; extra == "dev"
|
|
36
|
+
Requires-Dist: twine>=6.1.0; extra == "dev"
|
|
37
|
+
Dynamic: license-file
|
|
38
|
+
|
|
39
|
+
<div align="center">
|
|
40
|
+
<h1>taskiq-beat</h1>
|
|
41
|
+
</div>
|
|
42
|
+
|
|
43
|
+
<div align="center">
|
|
44
|
+
<a href="./README.md">
|
|
45
|
+
<img src="https://img.shields.io/badge/English-blue?style=for-the-badge" alt="English">
|
|
46
|
+
</a>
|
|
47
|
+
<a href="./README.ru.md">
|
|
48
|
+
<img src="https://img.shields.io/badge/Русский-red?style=for-the-badge" alt="Русский">
|
|
49
|
+
</a>
|
|
50
|
+
</div>
|
|
51
|
+
|
|
52
|
+
### Scheduler for Taskiq with schedules and run history stored in the database
|
|
53
|
+
|
|
54
|
+
## Navigation
|
|
55
|
+
|
|
56
|
+
- [Quick start](#quick-start)
|
|
57
|
+
- [How to run it](#how-to-run-it)
|
|
58
|
+
- [Run with FastAPI](#run-with-fastapi)
|
|
59
|
+
- [Create jobs](#create-jobs)
|
|
60
|
+
- [Manage jobs](#manage-jobs)
|
|
61
|
+
- [Alembic](#alembic)
|
|
62
|
+
- [Default configuration](#default-configuration)
|
|
63
|
+
- [Create tables manually](#create-tables-manually)
|
|
64
|
+
- [Load testing](#load-testing)
|
|
65
|
+
|
|
66
|
+
## Installation
|
|
67
|
+
|
|
68
|
+
```bash
|
|
69
|
+
pip install taskiq-beat
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
> `pip install "taskiq-beat[test]"`</br>
|
|
73
|
+
> `pip install "taskiq-beat[dev]"`
|
|
74
|
+
|
|
75
|
+
`taskiq-beat` does not provide a network broker on its own. For real multi-process usage you usually also need a broker
|
|
76
|
+
backend from the Taskiq ecosystem.
|
|
77
|
+
|
|
78
|
+
## Quick start
|
|
79
|
+
|
|
80
|
+
```python
|
|
81
|
+
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
|
|
82
|
+
from taskiq import InMemoryBroker
|
|
83
|
+
|
|
84
|
+
from taskiq_beat import (
|
|
85
|
+
IntervalTrigger,
|
|
86
|
+
PeriodicSchedule,
|
|
87
|
+
SchedulerApp,
|
|
88
|
+
SchedulerConfig,
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
# SQLAlchemy engine used to connect to the database.
|
|
92
|
+
engine = create_async_engine("sqlite+aiosqlite:///scheduler.sqlite3")
|
|
93
|
+
|
|
94
|
+
# Factory that creates AsyncSession objects.
|
|
95
|
+
# Scheduler uses these sessions to work with the database.
|
|
96
|
+
session_factory = async_sessionmaker(engine, expire_on_commit=False)
|
|
97
|
+
|
|
98
|
+
# Taskiq broker for API demonstration.
|
|
99
|
+
# InMemoryBroker is not suitable for a separate worker process setup.
|
|
100
|
+
broker = InMemoryBroker()
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
@broker.task(task_name="demo.heartbeat")
|
|
104
|
+
async def heartbeat_task() -> None:
|
|
105
|
+
print("tick")
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
# Main taskiq-beat entry point.
|
|
109
|
+
scheduler_app = SchedulerApp(
|
|
110
|
+
broker=broker,
|
|
111
|
+
session_factory=session_factory,
|
|
112
|
+
config=SchedulerConfig(
|
|
113
|
+
# How often the scheduler reloads active jobs from the database.
|
|
114
|
+
sync_interval_seconds=1.0,
|
|
115
|
+
# Minimum pause between scheduler loop iterations.
|
|
116
|
+
idle_sleep_seconds=0.2,
|
|
117
|
+
# How many seconds to wait before retrying dispatch after task.kiq(...) fails.
|
|
118
|
+
dispatch_retry_seconds=5,
|
|
119
|
+
# How many jobs can be dispatched concurrently inside one batch.
|
|
120
|
+
dispatch_concurrency=32,
|
|
121
|
+
# Maximum number of due jobs taken from the heap in one batch.
|
|
122
|
+
dispatch_batch_size=256,
|
|
123
|
+
# Whether to write history rows into SchedulerRun.
|
|
124
|
+
record_runs=True,
|
|
125
|
+
# Base timezone for helper APIs if another timezone is not specified explicitly.
|
|
126
|
+
default_timezone="UTC",
|
|
127
|
+
),
|
|
128
|
+
)
|
|
129
|
+
```
|
|
130
|
+
|
|
131
|
+
This example only shows how to assemble `broker`, `session_factory`, and `scheduler_app`.
|
|
132
|
+
It does not mean tasks will already start executing in a separate process.
|
|
133
|
+
|
|
134
|
+
## How to run it
|
|
135
|
+
|
|
136
|
+
The example above only creates Python objects. That is still not enough for the system to start working by itself.
|
|
137
|
+
|
|
138
|
+
What a real application needs:
|
|
139
|
+
|
|
140
|
+
1. Start a process with `scheduler_app.start()`.
|
|
141
|
+
2. Start a Taskiq worker process that will consume tasks from the broker.
|
|
142
|
+
3. Create jobs through `scheduler_app.create_scheduler(...).schedule(session)`.
|
|
143
|
+
|
|
144
|
+
Important:
|
|
145
|
+
|
|
146
|
+
- `InMemoryBroker` in the examples is only suitable for API demos, tests, and local experiments inside one process.
|
|
147
|
+
- If you want a separate worker in another terminal or service, you need a real broker backend.
|
|
148
|
+
- A worker is usually started in a separate terminal or a separate service/process.
|
|
149
|
+
- The command `python -m taskiq worker app.main:broker` means: import the `broker` object from the `app.main` module and
|
|
150
|
+
listen to its queue.
|
|
151
|
+
|
|
152
|
+
Typical scenario:
|
|
153
|
+
|
|
154
|
+
- terminal 1: FastAPI with `scheduler_app.start()` inside lifespan
|
|
155
|
+
- terminal 2: Taskiq worker
|
|
156
|
+
- terminal 3: API requests or a separate script that creates jobs
|
|
157
|
+
|
|
158
|
+
The most common FastAPI setup looks like this:
|
|
159
|
+
|
|
160
|
+
1. `app/main.py` contains `broker`, `scheduler_app`, `app = FastAPI(...)`, and task functions.
|
|
161
|
+
2. In the first terminal, start FastAPI:
|
|
162
|
+
|
|
163
|
+
```bash
|
|
164
|
+
uvicorn app.main:app --reload
|
|
165
|
+
```
|
|
166
|
+
|
|
167
|
+
3. In the second terminal, start the worker:
|
|
168
|
+
|
|
169
|
+
```bash
|
|
170
|
+
python -m taskiq worker app.main:broker
|
|
171
|
+
```
|
|
172
|
+
|
|
173
|
+
4. After that, create jobs through an API, script, or Python shell.
|
|
174
|
+
|
|
175
|
+
What happens after startup:
|
|
176
|
+
|
|
177
|
+
- the scheduler watches the schedule and calls `task.kiq(...)` at the required moment
|
|
178
|
+
- the broker publishes the task
|
|
179
|
+
- the worker takes the task and executes it
|
|
180
|
+
|
|
181
|
+
If the worker is not running, the scheduler can publish tasks into the broker, but nothing will execute them.
|
|
182
|
+
|
|
183
|
+
If you want to run the scheduler separately instead of inside FastAPI, that is also possible.
|
|
184
|
+
Then one process keeps `scheduler_app.start()`, and another process still remains the Taskiq worker.
|
|
185
|
+
|
|
186
|
+
## Run with FastAPI
|
|
187
|
+
|
|
188
|
+
```python
|
|
189
|
+
from contextlib import asynccontextmanager
|
|
190
|
+
from fastapi import FastAPI
|
|
191
|
+
from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
|
|
192
|
+
from taskiq import InMemoryBroker
|
|
193
|
+
|
|
194
|
+
from taskiq_beat import SchedulerApp, SchedulerConfig
|
|
195
|
+
|
|
196
|
+
engine = create_async_engine("sqlite+aiosqlite:///scheduler.sqlite3")
|
|
197
|
+
session_factory = async_sessionmaker(engine, expire_on_commit=False)
|
|
198
|
+
broker = InMemoryBroker()
|
|
199
|
+
scheduler_app = SchedulerApp(
|
|
200
|
+
broker=broker,
|
|
201
|
+
session_factory=session_factory,
|
|
202
|
+
config=SchedulerConfig(), # Configuration was shown above
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
@asynccontextmanager
|
|
207
|
+
async def lifespan(app: FastAPI):
|
|
208
|
+
await broker.startup()
|
|
209
|
+
await scheduler_app.start()
|
|
210
|
+
try:
|
|
211
|
+
yield
|
|
212
|
+
finally:
|
|
213
|
+
await scheduler_app.stop()
|
|
214
|
+
await broker.shutdown()
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
app = FastAPI(lifespan=lifespan)
|
|
218
|
+
```
|
|
219
|
+
|
|
220
|
+
### Scheduler can run:
|
|
221
|
+
|
|
222
|
+
- #### Inside FastAPI
|
|
223
|
+
As in the example above
|
|
224
|
+
|
|
225
|
+
- #### Separately
|
|
226
|
+
- terminal/container 1:
|
|
227
|
+
`uvicorn app.main:app --reload`
|
|
228
|
+
- terminal/container 2:
|
|
229
|
+
`python -m app.run_scheduler`
|
|
230
|
+
- terminal/container 3:
|
|
231
|
+
`python -m taskiq worker app.broker:broker`
|
|
232
|
+
|
|
233
|
+
Both approaches are valid. The main rule is: do not run several scheduler processes against the same database.
|
|
234
|
+
|
|
235
|
+
## Create jobs
|
|
236
|
+
|
|
237
|
+
### Interval job
|
|
238
|
+
|
|
239
|
+
```python
|
|
240
|
+
from taskiq_beat import IntervalTrigger, PeriodicSchedule
|
|
241
|
+
|
|
242
|
+
async with session_factory() as session:
|
|
243
|
+
scheduler = scheduler_app.create_scheduler(
|
|
244
|
+
task=heartbeat_task,
|
|
245
|
+
trigger=PeriodicSchedule(interval=IntervalTrigger(seconds=5)),
|
|
246
|
+
name="Heartbeat every 5 seconds",
|
|
247
|
+
)
|
|
248
|
+
job = await scheduler.schedule(session)
|
|
249
|
+
|
|
250
|
+
print(job.id) # Example: "6c6342d8-6d74-4d16-8f7a-5d4f1b3a0b13"
|
|
251
|
+
```
|
|
252
|
+
|
|
253
|
+
`await scheduler.schedule(session)` returns `SchedulerJob`.
|
|
254
|
+
This is a SQLAlchemy model instance with fields such as:
|
|
255
|
+
|
|
256
|
+
- `job.id`
|
|
257
|
+
- `job.task_name`
|
|
258
|
+
- `job.kind`
|
|
259
|
+
- `job.strategy`
|
|
260
|
+
- `job.next_run_at`
|
|
261
|
+
- `job.is_enabled`
|
|
262
|
+
|
|
263
|
+
Most of the time you use `job.id` later for pause, resume, run-now, and delete.
|
|
264
|
+
|
|
265
|
+
### One-off job
|
|
266
|
+
|
|
267
|
+
```python
|
|
268
|
+
from datetime import UTC, datetime, timedelta
|
|
269
|
+
|
|
270
|
+
from taskiq_beat import OneOffSchedule
|
|
271
|
+
|
|
272
|
+
async with session_factory() as session:
|
|
273
|
+
scheduler = scheduler_app.create_scheduler(
|
|
274
|
+
task=heartbeat_task,
|
|
275
|
+
trigger=OneOffSchedule(run_at=datetime.now(UTC) + timedelta(minutes=10)),
|
|
276
|
+
name="Delayed heartbeat",
|
|
277
|
+
)
|
|
278
|
+
job = await scheduler.schedule(session)
|
|
279
|
+
```
|
|
280
|
+
|
|
281
|
+
### Crontab job
|
|
282
|
+
|
|
283
|
+
```python
|
|
284
|
+
from taskiq_beat import CrontabTrigger, PeriodicSchedule
|
|
285
|
+
|
|
286
|
+
async with session_factory() as session:
|
|
287
|
+
scheduler = scheduler_app.create_scheduler(
|
|
288
|
+
task=heartbeat_task,
|
|
289
|
+
trigger=PeriodicSchedule(
|
|
290
|
+
crontab=CrontabTrigger(second="0", minute="*/5", hour="*"),
|
|
291
|
+
),
|
|
292
|
+
name="Every 5 minutes",
|
|
293
|
+
)
|
|
294
|
+
job = await scheduler.schedule(session)
|
|
295
|
+
```
|
|
296
|
+
|
|
297
|
+
## Manage jobs
|
|
298
|
+
|
|
299
|
+
What you need:
|
|
300
|
+
|
|
301
|
+
- `session_factory()` to open an `AsyncSession`
|
|
302
|
+
- `job.id`, which is usually taken from the result of `await scheduler.schedule(session)`
|
|
303
|
+
|
|
304
|
+
```python
|
|
305
|
+
async with session_factory() as session:
|
|
306
|
+
await scheduler_app.pause(session, job.id)
|
|
307
|
+
await scheduler_app.resume(session, job.id)
|
|
308
|
+
await scheduler_app.run_now(session, job.id)
|
|
309
|
+
await scheduler_app.delete(session, job.id)
|
|
310
|
+
```
|
|
311
|
+
|
|
312
|
+
## Alembic
|
|
313
|
+
|
|
314
|
+
To make Alembic detect scheduler tables, add `SchedulerBase.metadata` to `target_metadata`.
|
|
315
|
+
|
|
316
|
+
`alembic/env.py`:
|
|
317
|
+
|
|
318
|
+
```python
|
|
319
|
+
from myapp.db import Base
|
|
320
|
+
from taskiq_beat import SchedulerBase
|
|
321
|
+
|
|
322
|
+
target_metadata = [
|
|
323
|
+
# Your main ORM metadata.
|
|
324
|
+
Base.metadata,
|
|
325
|
+
# Metadata from taskiq-beat.
|
|
326
|
+
SchedulerBase.metadata,
|
|
327
|
+
]
|
|
328
|
+
```
|
|
329
|
+
|
|
330
|
+
Commands you can copy directly:
|
|
331
|
+
|
|
332
|
+
```bash
|
|
333
|
+
pip install alembic
|
|
334
|
+
alembic init alembic
|
|
335
|
+
alembic revision --autogenerate -m "add taskiq beat tables"
|
|
336
|
+
alembic upgrade head
|
|
337
|
+
```
|
|
338
|
+
|
|
339
|
+
Alembic should detect:
|
|
340
|
+
|
|
341
|
+
- `scheduler_job`
|
|
342
|
+
- `scheduler_run`
|
|
343
|
+
|
|
344
|
+
## Create tables manually
|
|
345
|
+
|
|
346
|
+
Use this only for local runs, tests, and quick experiments.
|
|
347
|
+
|
|
348
|
+
```python
|
|
349
|
+
from taskiq_beat import SchedulerBase
|
|
350
|
+
|
|
351
|
+
async with engine.begin() as connection:
|
|
352
|
+
await connection.run_sync(SchedulerBase.metadata.create_all)
|
|
353
|
+
```
|
|
354
|
+
|
|
355
|
+
These tables will be created:
|
|
356
|
+
|
|
357
|
+
- `scheduler_job`
|
|
358
|
+
- `scheduler_run`
|
|
359
|
+
|
|
360
|
+
## Load testing
|
|
361
|
+
|
|
362
|
+
See [scripts/README.md](./scripts/README.md).
|
|
363
|
+
|
|
364
|
+
## Testing
|
|
365
|
+
|
|
366
|
+
```bash
|
|
367
|
+
pytest
|
|
368
|
+
```
|