oban 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- oban/__init__.py +22 -0
- oban/__main__.py +12 -0
- oban/_backoff.py +87 -0
- oban/_config.py +171 -0
- oban/_executor.py +188 -0
- oban/_extensions.py +16 -0
- oban/_leader.py +118 -0
- oban/_lifeline.py +77 -0
- oban/_notifier.py +324 -0
- oban/_producer.py +334 -0
- oban/_pruner.py +93 -0
- oban/_query.py +409 -0
- oban/_recorded.py +34 -0
- oban/_refresher.py +88 -0
- oban/_scheduler.py +359 -0
- oban/_stager.py +115 -0
- oban/_worker.py +78 -0
- oban/cli.py +436 -0
- oban/decorators.py +218 -0
- oban/job.py +315 -0
- oban/oban.py +1084 -0
- oban/py.typed +0 -0
- oban/queries/__init__.py +0 -0
- oban/queries/ack_job.sql +11 -0
- oban/queries/all_jobs.sql +25 -0
- oban/queries/cancel_many_jobs.sql +37 -0
- oban/queries/cleanup_expired_leaders.sql +4 -0
- oban/queries/cleanup_expired_producers.sql +2 -0
- oban/queries/delete_many_jobs.sql +5 -0
- oban/queries/delete_producer.sql +2 -0
- oban/queries/elect_leader.sql +10 -0
- oban/queries/fetch_jobs.sql +44 -0
- oban/queries/get_job.sql +23 -0
- oban/queries/insert_job.sql +28 -0
- oban/queries/insert_producer.sql +2 -0
- oban/queries/install.sql +113 -0
- oban/queries/prune_jobs.sql +18 -0
- oban/queries/reelect_leader.sql +12 -0
- oban/queries/refresh_producers.sql +3 -0
- oban/queries/rescue_jobs.sql +18 -0
- oban/queries/reset.sql +5 -0
- oban/queries/resign_leader.sql +4 -0
- oban/queries/retry_many_jobs.sql +13 -0
- oban/queries/stage_jobs.sql +34 -0
- oban/queries/uninstall.sql +4 -0
- oban/queries/update_job.sql +54 -0
- oban/queries/update_producer.sql +3 -0
- oban/queries/verify_structure.sql +9 -0
- oban/schema.py +115 -0
- oban/telemetry/__init__.py +10 -0
- oban/telemetry/core.py +170 -0
- oban/telemetry/logger.py +147 -0
- oban/testing.py +439 -0
- oban-0.5.0.dist-info/METADATA +290 -0
- oban-0.5.0.dist-info/RECORD +59 -0
- oban-0.5.0.dist-info/WHEEL +5 -0
- oban-0.5.0.dist-info/entry_points.txt +2 -0
- oban-0.5.0.dist-info/licenses/LICENSE.txt +201 -0
- oban-0.5.0.dist-info/top_level.txt +1 -0
oban/cli.py
ADDED
|
@@ -0,0 +1,436 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import importlib
|
|
5
|
+
import logging
|
|
6
|
+
import os
|
|
7
|
+
import signal
|
|
8
|
+
import socket
|
|
9
|
+
import subprocess
|
|
10
|
+
import sys
|
|
11
|
+
from contextlib import asynccontextmanager
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Any, AsyncIterator
|
|
14
|
+
|
|
15
|
+
import click
|
|
16
|
+
import orjson
|
|
17
|
+
from psycopg.types.json import set_json_dumps, set_json_loads
|
|
18
|
+
from psycopg_pool import AsyncConnectionPool
|
|
19
|
+
|
|
20
|
+
from oban import __version__
|
|
21
|
+
from oban._config import Config
|
|
22
|
+
from oban.schema import (
|
|
23
|
+
install as install_schema,
|
|
24
|
+
uninstall as uninstall_schema,
|
|
25
|
+
)
|
|
26
|
+
from oban.telemetry import logger as telemetry_logger
|
|
27
|
+
|
|
28
|
+
try:
|
|
29
|
+
from uvloop import run as asyncio_run
|
|
30
|
+
except ImportError:
|
|
31
|
+
from asyncio import run as asyncio_run
|
|
32
|
+
|
|
33
|
+
logging.basicConfig(
|
|
34
|
+
level=logging.INFO,
|
|
35
|
+
format="%(asctime)s [%(levelname)s] %(name)s: %(message)s",
|
|
36
|
+
datefmt="%Y-%m-%d %H:%M:%S",
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
logger = logging.getLogger("oban.cli")
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def _file_to_module(file_path: str) -> str | None:
|
|
43
|
+
root = Path(os.getcwd())
|
|
44
|
+
path = Path(file_path)
|
|
45
|
+
|
|
46
|
+
try:
|
|
47
|
+
rel_path = path.relative_to(root)
|
|
48
|
+
except ValueError:
|
|
49
|
+
return None
|
|
50
|
+
|
|
51
|
+
parts = list(rel_path.parts)
|
|
52
|
+
|
|
53
|
+
if parts[-1].endswith(".py"):
|
|
54
|
+
parts[-1] = parts[-1][:-3]
|
|
55
|
+
|
|
56
|
+
if parts[-1] == "__init__":
|
|
57
|
+
parts.pop()
|
|
58
|
+
|
|
59
|
+
return ".".join(parts)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def _import_cron_modules(module_paths: list[str]) -> int:
|
|
63
|
+
def safe_import(path: str) -> bool:
|
|
64
|
+
try:
|
|
65
|
+
importlib.import_module(path)
|
|
66
|
+
return True
|
|
67
|
+
except Exception as error:
|
|
68
|
+
logger.error(f"Failed to import cron module at {path}: {error}")
|
|
69
|
+
|
|
70
|
+
return False
|
|
71
|
+
|
|
72
|
+
return sum([safe_import(path) for path in module_paths])
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def _import_cron_paths(paths: list[str]) -> list[str]:
|
|
76
|
+
root = Path(os.getcwd())
|
|
77
|
+
grep = ["grep", "-rl", "--include=*.py", r"@worker.*cron\|@job.*cron"]
|
|
78
|
+
|
|
79
|
+
found = []
|
|
80
|
+
for pattern in [str(root / path) for path in paths]:
|
|
81
|
+
result = subprocess.run(
|
|
82
|
+
[*grep, pattern],
|
|
83
|
+
capture_output=True,
|
|
84
|
+
text=True,
|
|
85
|
+
check=False,
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
if result.returncode == 0:
|
|
89
|
+
files = [
|
|
90
|
+
line.strip()
|
|
91
|
+
for line in result.stdout.strip().split("\n")
|
|
92
|
+
if line.strip()
|
|
93
|
+
]
|
|
94
|
+
found.extend(files)
|
|
95
|
+
|
|
96
|
+
files = set(str(Path(file).resolve()) for file in found)
|
|
97
|
+
|
|
98
|
+
return [mod for file in files if (mod := _file_to_module(file))]
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def _split_csv(value: str | None) -> list[str] | None:
|
|
102
|
+
return [item.strip() for item in value.split(",")] if value else None
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def _find_and_load_cron_modules(
|
|
106
|
+
cron_modules: list[str] | None = None, cron_paths: list[str] | None = None
|
|
107
|
+
) -> None:
|
|
108
|
+
if cron_modules:
|
|
109
|
+
logger.info(f"Importing {len(cron_modules)} cron modules...")
|
|
110
|
+
|
|
111
|
+
elif cron_paths:
|
|
112
|
+
logger.info(f"Discovering cron workers in {', '.join(cron_paths)}...")
|
|
113
|
+
|
|
114
|
+
cron_modules = _import_cron_paths(cron_paths)
|
|
115
|
+
else:
|
|
116
|
+
logger.info("Auto-discovering cron workers in current directory...")
|
|
117
|
+
|
|
118
|
+
cron_modules = _import_cron_paths([os.getcwd()])
|
|
119
|
+
|
|
120
|
+
import_count = _import_cron_modules(cron_modules)
|
|
121
|
+
|
|
122
|
+
logger.info(
|
|
123
|
+
f"Successfully imported {import_count}/{len(cron_modules)} cron modules"
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def print_banner(version: str) -> None:
|
|
128
|
+
banner = f"""
|
|
129
|
+
|
|
130
|
+
[38;2;153;183;183m ██████╗ ██████╗ ████╗ ███╗ ██╗
|
|
131
|
+
[38;2;143;175;175m██╔═══██╗ ██╔══██╗ ██╔═██╗ ████╗ ██║
|
|
132
|
+
[38;2;133;167;167m██║ ██║ ██████╔╝ ████████╗ ██╔██╗ ██║
|
|
133
|
+
[38;2;123;159;159m██║ ██║ ██╔══██╗ ██╔═══██║ ██║╚██╗██║
|
|
134
|
+
[38;2;113;151;151m ██████╔╝ ██████╔╝ ██║ ██║ ██║ ╚████║
|
|
135
|
+
[38;2;103;143;143m ╚═════╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═══╝
|
|
136
|
+
[0m
|
|
137
|
+
Job orchestration framework for Python, backed by PostgreSQL
|
|
138
|
+
|
|
139
|
+
v{version} | [38;2;100;149;237mhttps://oban.pro[0m
|
|
140
|
+
"""
|
|
141
|
+
print(banner)
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
@asynccontextmanager
|
|
145
|
+
async def schema_pool(dsn: str) -> AsyncIterator[AsyncConnectionPool]:
|
|
146
|
+
if not dsn:
|
|
147
|
+
raise click.UsageError("--dsn is required (or set OBAN_DSN)")
|
|
148
|
+
|
|
149
|
+
conf = Config(dsn=dsn, pool_min_size=1, pool_max_size=1)
|
|
150
|
+
pool = await conf.create_pool()
|
|
151
|
+
|
|
152
|
+
try:
|
|
153
|
+
yield pool
|
|
154
|
+
finally:
|
|
155
|
+
await pool.close()
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
async def _start_pool(conf: Config) -> AsyncConnectionPool:
|
|
159
|
+
try:
|
|
160
|
+
pool = await conf.create_pool()
|
|
161
|
+
logger.info(
|
|
162
|
+
f"Connected to database (pool: {conf.pool_min_size}-{conf.pool_max_size})"
|
|
163
|
+
)
|
|
164
|
+
return pool
|
|
165
|
+
except Exception as error:
|
|
166
|
+
logger.error(f"Failed to connect to database: {error!r}")
|
|
167
|
+
sys.exit(1)
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def handle_signals() -> asyncio.Event:
|
|
171
|
+
shutdown_event = asyncio.Event()
|
|
172
|
+
sigint_count = 0
|
|
173
|
+
|
|
174
|
+
def signal_handler(signum: int) -> None:
|
|
175
|
+
nonlocal sigint_count
|
|
176
|
+
|
|
177
|
+
shutdown_event.set()
|
|
178
|
+
|
|
179
|
+
if signum == signal.SIGTERM:
|
|
180
|
+
logger.info("Received SIGTERM, initiating graceful shutdown...")
|
|
181
|
+
elif signum == signal.SIGINT:
|
|
182
|
+
sigint_count += 1
|
|
183
|
+
|
|
184
|
+
if sigint_count == 1:
|
|
185
|
+
logger.info("Received SIGINT, initiating graceful shutdown...")
|
|
186
|
+
logger.info("Send another SIGINT to force exit")
|
|
187
|
+
else:
|
|
188
|
+
logger.warning("Forcing exit...")
|
|
189
|
+
sys.exit(1)
|
|
190
|
+
|
|
191
|
+
loop = asyncio.get_running_loop()
|
|
192
|
+
loop.add_signal_handler(signal.SIGTERM, lambda: signal_handler(signal.SIGTERM))
|
|
193
|
+
loop.add_signal_handler(signal.SIGINT, lambda: signal_handler(signal.SIGINT))
|
|
194
|
+
|
|
195
|
+
return shutdown_event
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
@click.group(
|
|
199
|
+
context_settings={
|
|
200
|
+
"help_option_names": ["-h", "--help"],
|
|
201
|
+
}
|
|
202
|
+
)
|
|
203
|
+
@click.version_option(package_name="oban")
|
|
204
|
+
def main() -> None:
|
|
205
|
+
"""Oban - Job orchestration framework for Python, backed by PostgreSQL."""
|
|
206
|
+
pass
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
@main.command()
|
|
210
|
+
def version() -> None:
|
|
211
|
+
click.echo(f"oban {__version__}")
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
@main.command()
|
|
215
|
+
@click.option(
|
|
216
|
+
"--config",
|
|
217
|
+
type=click.Path(exists=True, dir_okay=False, path_type=str),
|
|
218
|
+
help="Path to TOML configuration file (default: searches for oban.toml)",
|
|
219
|
+
)
|
|
220
|
+
@click.option(
|
|
221
|
+
"--dsn",
|
|
222
|
+
envvar="OBAN_DSN",
|
|
223
|
+
help="PostgreSQL connection string",
|
|
224
|
+
)
|
|
225
|
+
@click.option(
|
|
226
|
+
"--prefix",
|
|
227
|
+
envvar="OBAN_PREFIX",
|
|
228
|
+
help="PostgreSQL schema name (default: public)",
|
|
229
|
+
)
|
|
230
|
+
def install(config: str | None, dsn: str | None, prefix: str | None) -> None:
|
|
231
|
+
"""Install the Oban database schema."""
|
|
232
|
+
|
|
233
|
+
async def run() -> None:
|
|
234
|
+
conf = _load_conf(config, {"dsn": dsn, "prefix": prefix})
|
|
235
|
+
schema_prefix = conf.prefix or "public"
|
|
236
|
+
|
|
237
|
+
logger.info(f"Installing Oban schema in '{schema_prefix}'...")
|
|
238
|
+
|
|
239
|
+
try:
|
|
240
|
+
async with schema_pool(conf.dsn) as pool:
|
|
241
|
+
await install_schema(pool, prefix=schema_prefix)
|
|
242
|
+
logger.info("Schema installed successfully")
|
|
243
|
+
except Exception as error:
|
|
244
|
+
logger.error(f"Failed to install schema: {error!r}", exc_info=True)
|
|
245
|
+
sys.exit(1)
|
|
246
|
+
|
|
247
|
+
asyncio_run(run())
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
@main.command()
|
|
251
|
+
@click.option(
|
|
252
|
+
"--config",
|
|
253
|
+
type=click.Path(exists=True, dir_okay=False, path_type=str),
|
|
254
|
+
help="Path to TOML configuration file (default: searches for oban.toml)",
|
|
255
|
+
)
|
|
256
|
+
@click.option(
|
|
257
|
+
"--dsn",
|
|
258
|
+
envvar="OBAN_DSN",
|
|
259
|
+
help="PostgreSQL connection string",
|
|
260
|
+
)
|
|
261
|
+
@click.option(
|
|
262
|
+
"--prefix",
|
|
263
|
+
envvar="OBAN_PREFIX",
|
|
264
|
+
help="PostgreSQL schema name (default: public)",
|
|
265
|
+
)
|
|
266
|
+
def uninstall(config: str | None, dsn: str | None, prefix: str | None) -> None:
|
|
267
|
+
"""Uninstall the Oban database schema."""
|
|
268
|
+
|
|
269
|
+
async def run() -> None:
|
|
270
|
+
conf = _load_conf(config, {"dsn": dsn, "prefix": prefix})
|
|
271
|
+
schema_prefix = conf.prefix or "public"
|
|
272
|
+
|
|
273
|
+
logger.info(f"Uninstalling Oban schema from '{schema_prefix}' schema...")
|
|
274
|
+
|
|
275
|
+
try:
|
|
276
|
+
async with schema_pool(conf.dsn) as pool:
|
|
277
|
+
await uninstall_schema(pool, prefix=schema_prefix)
|
|
278
|
+
logger.info("Schema uninstalled successfully")
|
|
279
|
+
except Exception as e:
|
|
280
|
+
logger.error(f"Failed to uninstall schema: {e}", exc_info=True)
|
|
281
|
+
sys.exit(1)
|
|
282
|
+
|
|
283
|
+
asyncio_run(run())
|
|
284
|
+
|
|
285
|
+
|
|
286
|
+
@main.command()
|
|
287
|
+
@click.option(
|
|
288
|
+
"--config",
|
|
289
|
+
type=click.Path(exists=True, dir_okay=False, path_type=str),
|
|
290
|
+
help="Path to TOML configuration file (default: searches for oban.toml)",
|
|
291
|
+
)
|
|
292
|
+
@click.option(
|
|
293
|
+
"--dsn",
|
|
294
|
+
envvar="OBAN_DSN",
|
|
295
|
+
help="PostgreSQL connection string",
|
|
296
|
+
)
|
|
297
|
+
@click.option(
|
|
298
|
+
"--queues",
|
|
299
|
+
envvar="OBAN_QUEUES",
|
|
300
|
+
help="Comma-separated queue:limit pairs (e.g., 'default:10,mailers:5')",
|
|
301
|
+
)
|
|
302
|
+
@click.option(
|
|
303
|
+
"--prefix",
|
|
304
|
+
envvar="OBAN_PREFIX",
|
|
305
|
+
help="PostgreSQL schema name (default: public)",
|
|
306
|
+
)
|
|
307
|
+
@click.option(
|
|
308
|
+
"--node",
|
|
309
|
+
envvar="OBAN_NODE",
|
|
310
|
+
help="Node identifier (default: hostname)",
|
|
311
|
+
)
|
|
312
|
+
@click.option(
|
|
313
|
+
"--pool-min-size",
|
|
314
|
+
envvar="OBAN_POOL_MIN_SIZE",
|
|
315
|
+
type=int,
|
|
316
|
+
help="Minimum connection pool size (default: 1)",
|
|
317
|
+
)
|
|
318
|
+
@click.option(
|
|
319
|
+
"--pool-max-size",
|
|
320
|
+
envvar="OBAN_POOL_MAX_SIZE",
|
|
321
|
+
type=int,
|
|
322
|
+
help="Maximum connection pool size (default: 10)",
|
|
323
|
+
)
|
|
324
|
+
@click.option(
|
|
325
|
+
"--log-level",
|
|
326
|
+
type=click.Choice(
|
|
327
|
+
["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"], case_sensitive=False
|
|
328
|
+
),
|
|
329
|
+
default="INFO",
|
|
330
|
+
help="Logging level (default: INFO)",
|
|
331
|
+
)
|
|
332
|
+
@click.option(
|
|
333
|
+
"--cron-modules",
|
|
334
|
+
envvar="OBAN_CRON_MODULES",
|
|
335
|
+
help="Comma-separated list of module paths with cron workers (e.g., 'myapp.workers,myapp.jobs')",
|
|
336
|
+
)
|
|
337
|
+
@click.option(
|
|
338
|
+
"--cron-paths",
|
|
339
|
+
envvar="OBAN_CRON_PATHS",
|
|
340
|
+
help="Comma-separated list of directories to search for cron workers (e.g., 'myapp/workers')",
|
|
341
|
+
)
|
|
342
|
+
@click.option(
|
|
343
|
+
"--dry-run",
|
|
344
|
+
is_flag=True,
|
|
345
|
+
help="Validate configuration and load cron modules without starting Oban",
|
|
346
|
+
)
|
|
347
|
+
def start(
|
|
348
|
+
log_level: str,
|
|
349
|
+
config: str | None,
|
|
350
|
+
cron_modules: str | None,
|
|
351
|
+
cron_paths: str | None,
|
|
352
|
+
dry_run: bool,
|
|
353
|
+
**params: Any,
|
|
354
|
+
) -> None:
|
|
355
|
+
"""Start the Oban worker process.
|
|
356
|
+
|
|
357
|
+
This command starts an Oban instance that processes jobs from the configured queues.
|
|
358
|
+
The process will run until terminated by a signal.
|
|
359
|
+
|
|
360
|
+
Signal handling:
|
|
361
|
+
- SIGTERM: Graceful shutdown (finish running jobs, then exit)
|
|
362
|
+
- SIGINT (Ctrl+C): Graceful shutdown on first signal, force exit on second
|
|
363
|
+
|
|
364
|
+
Examples:
|
|
365
|
+
|
|
366
|
+
# Start with queues
|
|
367
|
+
oban start --dsn postgresql://localhost/mydb --queues default:10,mailers:5
|
|
368
|
+
|
|
369
|
+
# Use environment variables
|
|
370
|
+
export OBAN_DSN=postgresql://localhost/mydb
|
|
371
|
+
export OBAN_QUEUES=default:10,mailers:5
|
|
372
|
+
oban start
|
|
373
|
+
"""
|
|
374
|
+
logging.getLogger().setLevel(getattr(logging, log_level.upper()))
|
|
375
|
+
|
|
376
|
+
set_json_dumps(orjson.dumps)
|
|
377
|
+
set_json_loads(orjson.loads)
|
|
378
|
+
|
|
379
|
+
conf = _load_conf(config, params)
|
|
380
|
+
node = conf.node or socket.gethostname()
|
|
381
|
+
|
|
382
|
+
async def run() -> None:
|
|
383
|
+
print_banner(__version__)
|
|
384
|
+
|
|
385
|
+
logger.info(f"Starting Oban v{__version__} on node {node}...")
|
|
386
|
+
|
|
387
|
+
_find_and_load_cron_modules(
|
|
388
|
+
cron_modules=_split_csv(cron_modules),
|
|
389
|
+
cron_paths=_split_csv(cron_paths),
|
|
390
|
+
)
|
|
391
|
+
|
|
392
|
+
if dry_run:
|
|
393
|
+
logger.info("Dry run complete-configuration is valid!")
|
|
394
|
+
sys.exit(0)
|
|
395
|
+
|
|
396
|
+
pool = await _start_pool(conf)
|
|
397
|
+
oban = await conf.create_oban(pool)
|
|
398
|
+
|
|
399
|
+
telemetry_logger.attach()
|
|
400
|
+
shutdown_event = handle_signals()
|
|
401
|
+
|
|
402
|
+
try:
|
|
403
|
+
async with oban:
|
|
404
|
+
logger.info("Oban started, press Ctrl+C to stop")
|
|
405
|
+
|
|
406
|
+
await shutdown_event.wait()
|
|
407
|
+
|
|
408
|
+
logger.info("Shutting down gracefully...")
|
|
409
|
+
except Exception as error:
|
|
410
|
+
logger.error(f"Error during operation: {error!r}", exc_info=True)
|
|
411
|
+
sys.exit(1)
|
|
412
|
+
finally:
|
|
413
|
+
telemetry_logger.detach()
|
|
414
|
+
await pool.close()
|
|
415
|
+
logger.info("Shutdown complete")
|
|
416
|
+
|
|
417
|
+
asyncio_run(run())
|
|
418
|
+
|
|
419
|
+
|
|
420
|
+
def _load_conf(conf_path: str | None, params: Any) -> Config:
|
|
421
|
+
if conf_path and not Path(conf_path).exists():
|
|
422
|
+
raise click.UsageError(f"--config file '{conf_path}' doesn't exist")
|
|
423
|
+
|
|
424
|
+
if queues := params.pop("queues", None):
|
|
425
|
+
params["queues"] = Config._parse_queues(queues)
|
|
426
|
+
|
|
427
|
+
conf = Config.load(conf_path, **params)
|
|
428
|
+
|
|
429
|
+
if not conf.dsn:
|
|
430
|
+
raise click.UsageError("--dsn, OBAN_DSN, or dsn in oban.toml required")
|
|
431
|
+
|
|
432
|
+
return conf
|
|
433
|
+
|
|
434
|
+
|
|
435
|
+
if __name__ == "__main__":
|
|
436
|
+
main()
|
oban/decorators.py
ADDED
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Decorators for creating Oban workers and jobs.
|
|
3
|
+
|
|
4
|
+
This module provides two decorators for making your code enqueueable:
|
|
5
|
+
|
|
6
|
+
- `@worker` For classes with a `process` method
|
|
7
|
+
- `@job` For wrapping functions as jobs
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import inspect
|
|
11
|
+
from functools import wraps
|
|
12
|
+
from typing import Any, Callable
|
|
13
|
+
|
|
14
|
+
from ._extensions import use_ext
|
|
15
|
+
from ._worker import register_worker, worker_name
|
|
16
|
+
from ._scheduler import register_scheduled
|
|
17
|
+
from .job import Job, Result
|
|
18
|
+
|
|
19
|
+
JOB_FIELDS = set(Job.__dataclass_fields__.keys()) - {"extra"} | {"schedule_in"}
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def worker(*, oban: str = "oban", cron: str | dict | None = None, **overrides):
|
|
23
|
+
"""Decorate a class to make it a viable worker.
|
|
24
|
+
|
|
25
|
+
The decorator adds worker functionality to a class, including job creation
|
|
26
|
+
and enqueueing methods. The decorated class must implement a `process` method.
|
|
27
|
+
|
|
28
|
+
For simpler function-based jobs, consider using @job instead.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
oban: Name of the Oban instance to use (default: "oban")
|
|
32
|
+
cron: Optional cron configuration for periodic execution. Can be:
|
|
33
|
+
- A string expression (e.g., "0 0 \\* \\* \\*" or "@daily")
|
|
34
|
+
- A dict with "expr" and optional "timezone" keys (timezone as string)
|
|
35
|
+
**overrides: Configuration options for the worker (queue, priority, etc.)
|
|
36
|
+
|
|
37
|
+
Returns:
|
|
38
|
+
A decorator function that can be applied to worker classes
|
|
39
|
+
|
|
40
|
+
Example:
|
|
41
|
+
>>> from oban import Oban, worker
|
|
42
|
+
>>>
|
|
43
|
+
>>> # Create an Oban instance with a specific name
|
|
44
|
+
>>> oban_instance = Oban(name="oban", queues={"default": 10, "mailers": 5})
|
|
45
|
+
>>>
|
|
46
|
+
>>> @worker(queue="mailers", priority=1)
|
|
47
|
+
... class EmailWorker:
|
|
48
|
+
... async def process(self, job):
|
|
49
|
+
... print(f"Sending email: {job.args}")
|
|
50
|
+
... return None
|
|
51
|
+
>>>
|
|
52
|
+
>>> # Create a job without enqueueing
|
|
53
|
+
>>> job = EmailWorker.new({"to": "user@example.com", "subject": "Hello"})
|
|
54
|
+
>>> print(job.queue) # "mailers"
|
|
55
|
+
>>> print(job.priority) # 1
|
|
56
|
+
>>>
|
|
57
|
+
>>> # Create and enqueue a job
|
|
58
|
+
>>> job = EmailWorker.enqueue(
|
|
59
|
+
... {"to": "admin@example.com", "subject": "Alert"},
|
|
60
|
+
... priority=5 # Override default priority
|
|
61
|
+
... )
|
|
62
|
+
>>> print(job.priority) # 5
|
|
63
|
+
>>>
|
|
64
|
+
>>> # Schedule a job to run in 5 minutes using a timedelta
|
|
65
|
+
>>> from datetime import timedelta
|
|
66
|
+
>>> job = EmailWorker.enqueue(..., schedule_in=timedelta(minutes=5))
|
|
67
|
+
>>>
|
|
68
|
+
>>> # Schedule a job to run in 60 seconds
|
|
69
|
+
>>> job = EmailWorker.enqueue(...,schedule_in=60)
|
|
70
|
+
>>>
|
|
71
|
+
>>> # Periodic worker that runs daily at midnight
|
|
72
|
+
>>> @worker(queue="cleanup", cron="@daily")
|
|
73
|
+
... class DailyCleanup:
|
|
74
|
+
... async def process(self, job):
|
|
75
|
+
... print("Running daily cleanup")
|
|
76
|
+
... return None
|
|
77
|
+
>>>
|
|
78
|
+
>>> # Periodic worker with timezone
|
|
79
|
+
>>> @worker(queue="reports", cron={"expr": "0 9 \\* \\* MON-FRI", "timezone": "America/New_York"})
|
|
80
|
+
... class BusinessHoursReport:
|
|
81
|
+
... async def process(self, job):
|
|
82
|
+
... print("Running during NY business hours")
|
|
83
|
+
... return None
|
|
84
|
+
>>>
|
|
85
|
+
>>> # Workers can also be created without args
|
|
86
|
+
>>> job = DailyCleanup.new() # args defaults to {}
|
|
87
|
+
>>>
|
|
88
|
+
>>> # Custom backoff for retries
|
|
89
|
+
>>> @worker(queue="default")
|
|
90
|
+
... class CustomBackoffWorker:
|
|
91
|
+
... async def process(self, job):
|
|
92
|
+
... return None
|
|
93
|
+
...
|
|
94
|
+
... def backoff(self, job):
|
|
95
|
+
... # Simple linear backoff at 2x the attempt number
|
|
96
|
+
... return 2 * job.attempt
|
|
97
|
+
|
|
98
|
+
Note:
|
|
99
|
+
The worker class must implement a ``process(self, job: Job) -> Result[Any]`` method.
|
|
100
|
+
If not implemented, a NotImplementedError will be raised when called.
|
|
101
|
+
|
|
102
|
+
Optionally implement a ``backoff(self, job: Job) -> int`` method to customize
|
|
103
|
+
retry delays. If not provided, uses Oban's default jittery clamped backoff.
|
|
104
|
+
"""
|
|
105
|
+
|
|
106
|
+
def decorate(cls: type) -> type:
|
|
107
|
+
if not hasattr(cls, "process"):
|
|
108
|
+
|
|
109
|
+
async def process(self, job: Job) -> Result[Any]:
|
|
110
|
+
raise NotImplementedError("Worker must implement process method")
|
|
111
|
+
|
|
112
|
+
setattr(cls, "process", process)
|
|
113
|
+
|
|
114
|
+
@classmethod
|
|
115
|
+
def new(cls, args: dict[str, Any] | None = None, /, **params) -> Job:
|
|
116
|
+
merged = {**cls._opts, **params}
|
|
117
|
+
extras = {
|
|
118
|
+
key: merged.pop(key) for key in list(merged) if key not in JOB_FIELDS
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
if extras:
|
|
122
|
+
merged["extra"] = extras
|
|
123
|
+
|
|
124
|
+
return Job.new(worker=worker_name(cls), args=args or {}, **merged)
|
|
125
|
+
|
|
126
|
+
@classmethod
|
|
127
|
+
async def enqueue(
|
|
128
|
+
cls, args: dict[str, Any] | None = None, /, **overrides
|
|
129
|
+
) -> Job:
|
|
130
|
+
from .oban import get_instance
|
|
131
|
+
|
|
132
|
+
job = cls.new(args, **overrides)
|
|
133
|
+
|
|
134
|
+
return await get_instance(cls._oban_name).enqueue(job)
|
|
135
|
+
|
|
136
|
+
setattr(cls, "_opts", overrides)
|
|
137
|
+
setattr(cls, "_oban_name", oban)
|
|
138
|
+
setattr(cls, "new", new)
|
|
139
|
+
setattr(cls, "enqueue", enqueue)
|
|
140
|
+
|
|
141
|
+
register_worker(cls)
|
|
142
|
+
|
|
143
|
+
use_ext("worker.after_register", lambda _cls: None, cls)
|
|
144
|
+
|
|
145
|
+
if cron:
|
|
146
|
+
register_scheduled(cron, cls)
|
|
147
|
+
|
|
148
|
+
return cls
|
|
149
|
+
|
|
150
|
+
return decorate
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def job(*, oban: str = "oban", cron: str | dict | None = None, **overrides):
|
|
154
|
+
"""Decorate a function to make it an Oban job.
|
|
155
|
+
|
|
156
|
+
The decorated function's signature is preserved for new() and enqueue().
|
|
157
|
+
|
|
158
|
+
Use @job for simple function-based tasks where you don't need access to
|
|
159
|
+
job metadata such as the attempt, past errors.
|
|
160
|
+
|
|
161
|
+
Args:
|
|
162
|
+
oban: Name of the Oban instance to use (default: "oban")
|
|
163
|
+
cron: Optional cron configuration for periodic execution. Can be:
|
|
164
|
+
- A string expression (e.g., "0 0 \\* \\* \\*" or "@daily")
|
|
165
|
+
- A dict with "expr" and optional "timezone" keys (timezone as string)
|
|
166
|
+
**overrides: Configuration options (queue, priority, etc.)
|
|
167
|
+
|
|
168
|
+
Example:
|
|
169
|
+
>>> from oban import job
|
|
170
|
+
>>>
|
|
171
|
+
>>> @job(queue="mailers", priority=1)
|
|
172
|
+
... def send_email(to: str, subject: str, body: str):
|
|
173
|
+
... print(f"Sending to {to}: {subject}")
|
|
174
|
+
>>>
|
|
175
|
+
>>> send_email.enqueue("user@example.com", "Hello", "World")
|
|
176
|
+
>>>
|
|
177
|
+
>>> # Periodic job that runs weekly at midnight
|
|
178
|
+
>>> @job(queue="reports", cron="@weekly")
|
|
179
|
+
... def generate_weekly_report():
|
|
180
|
+
... print("Generating weekly report")
|
|
181
|
+
... return {"status": "complete"}
|
|
182
|
+
"""
|
|
183
|
+
|
|
184
|
+
def decorate(func: Callable[..., Any]) -> type:
|
|
185
|
+
sig = inspect.signature(func)
|
|
186
|
+
|
|
187
|
+
class FunctionWorker:
|
|
188
|
+
async def process(self, job: Job):
|
|
189
|
+
return func(**job.args)
|
|
190
|
+
|
|
191
|
+
FunctionWorker.__name__ = func.__name__ # type: ignore[attr-defined]
|
|
192
|
+
FunctionWorker.__module__ = func.__module__ # type: ignore[attr-defined]
|
|
193
|
+
FunctionWorker.__qualname__ = func.__qualname__ # type: ignore[attr-defined]
|
|
194
|
+
FunctionWorker.__doc__ = func.__doc__
|
|
195
|
+
|
|
196
|
+
worker_cls = worker(oban=oban, cron=cron, **overrides)(FunctionWorker)
|
|
197
|
+
|
|
198
|
+
original_new = worker_cls.new
|
|
199
|
+
original_enq = worker_cls.enqueue
|
|
200
|
+
|
|
201
|
+
@wraps(func)
|
|
202
|
+
def new_with_sig(*args, **kwargs):
|
|
203
|
+
bound = sig.bind(*args, **kwargs)
|
|
204
|
+
bound.apply_defaults()
|
|
205
|
+
return original_new(dict(bound.arguments))
|
|
206
|
+
|
|
207
|
+
@wraps(func)
|
|
208
|
+
async def enq_with_sig(*args, **kwargs):
|
|
209
|
+
bound = sig.bind(*args, **kwargs)
|
|
210
|
+
bound.apply_defaults()
|
|
211
|
+
return await original_enq(dict(bound.arguments))
|
|
212
|
+
|
|
213
|
+
worker_cls.new = staticmethod(new_with_sig)
|
|
214
|
+
worker_cls.enqueue = staticmethod(enq_with_sig)
|
|
215
|
+
|
|
216
|
+
return worker_cls
|
|
217
|
+
|
|
218
|
+
return decorate
|