fairchild 0.0.1__py3-none-any.whl → 0.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fairchild/__init__.py +11 -0
- fairchild/cli.py +386 -0
- fairchild/context.py +54 -0
- fairchild/db/__init__.py +0 -0
- fairchild/db/migrations.py +69 -0
- fairchild/fairchild.py +166 -0
- fairchild/future.py +78 -0
- fairchild/job.py +123 -0
- fairchild/record.py +22 -0
- fairchild/task.py +225 -0
- fairchild/templates/dashboard.html +1650 -0
- fairchild/templates/job.html +1245 -0
- fairchild/ui.py +560 -0
- fairchild/worker.py +495 -0
- fairchild-0.0.3.dist-info/METADATA +483 -0
- fairchild-0.0.3.dist-info/RECORD +20 -0
- fairchild-0.0.3.dist-info/entry_points.txt +2 -0
- fairchild-0.0.3.dist-info/licenses/LICENSE +21 -0
- fairchild-0.0.3.dist-info/top_level.txt +1 -0
- fairchild-0.0.1.dist-info/METADATA +0 -6
- fairchild-0.0.1.dist-info/RECORD +0 -5
- fairchild-0.0.1.dist-info/top_level.txt +0 -1
- main.py +0 -6
- {fairchild-0.0.1.dist-info → fairchild-0.0.3.dist-info}/WHEEL +0 -0
fairchild/__init__.py
ADDED
fairchild/cli.py
ADDED
|
@@ -0,0 +1,386 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import importlib
|
|
3
|
+
import click
|
|
4
|
+
import os
|
|
5
|
+
|
|
6
|
+
from fairchild.fairchild import Fairchild
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def import_module(module_path: str) -> None:
|
|
10
|
+
"""Import a module by its dotted path."""
|
|
11
|
+
try:
|
|
12
|
+
importlib.import_module(module_path)
|
|
13
|
+
except ImportError as e:
|
|
14
|
+
raise click.ClickException(f"Failed to import '{module_path}': {e}")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def get_database_url() -> str:
|
|
18
|
+
"""Get database URL from environment."""
|
|
19
|
+
url = os.environ.get("FAIRCHILD_DATABASE_URL")
|
|
20
|
+
if not url:
|
|
21
|
+
raise click.ClickException(
|
|
22
|
+
"FAIRCHILD_DATABASE_URL environment variable is required"
|
|
23
|
+
)
|
|
24
|
+
return url
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@click.group()
|
|
28
|
+
def cli():
|
|
29
|
+
"""Fairchild - PostgreSQL-backed job queue and workflow engine."""
|
|
30
|
+
pass
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
@cli.command()
|
|
34
|
+
@click.option(
|
|
35
|
+
"--force",
|
|
36
|
+
is_flag=True,
|
|
37
|
+
help="Drop all tables and reinstall from scratch (DESTRUCTIVE)",
|
|
38
|
+
)
|
|
39
|
+
def install(force):
|
|
40
|
+
"""Install Fairchild schema (runs all migrations on a fresh database)."""
|
|
41
|
+
asyncio.run(_install(force))
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
async def _install(force: bool = False):
|
|
45
|
+
from fairchild.db.migrations import migrate, drop_all
|
|
46
|
+
|
|
47
|
+
url = get_database_url()
|
|
48
|
+
fairchild = Fairchild(url)
|
|
49
|
+
await fairchild.connect()
|
|
50
|
+
|
|
51
|
+
try:
|
|
52
|
+
# Check if table exists
|
|
53
|
+
row = await fairchild._pool.fetchrow("""
|
|
54
|
+
SELECT EXISTS (
|
|
55
|
+
SELECT FROM information_schema.tables
|
|
56
|
+
WHERE table_name = 'fairchild_jobs'
|
|
57
|
+
)
|
|
58
|
+
""")
|
|
59
|
+
|
|
60
|
+
if row["exists"]:
|
|
61
|
+
if not force:
|
|
62
|
+
click.echo(
|
|
63
|
+
"Fairchild is already installed. Use 'migrate' to update schema, or --force to reinstall."
|
|
64
|
+
)
|
|
65
|
+
return
|
|
66
|
+
|
|
67
|
+
click.echo("Dropping all Fairchild tables...")
|
|
68
|
+
await drop_all(fairchild._pool)
|
|
69
|
+
|
|
70
|
+
await migrate(fairchild._pool)
|
|
71
|
+
click.echo("Fairchild installed successfully.")
|
|
72
|
+
finally:
|
|
73
|
+
await fairchild.disconnect()
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
@cli.command()
|
|
77
|
+
def migrate():
|
|
78
|
+
"""Run database migrations."""
|
|
79
|
+
asyncio.run(_migrate())
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
async def _migrate():
|
|
83
|
+
from fairchild.db.migrations import migrate
|
|
84
|
+
|
|
85
|
+
url = get_database_url()
|
|
86
|
+
fairchild = Fairchild(url)
|
|
87
|
+
await fairchild.connect()
|
|
88
|
+
|
|
89
|
+
try:
|
|
90
|
+
await migrate(fairchild._pool)
|
|
91
|
+
click.echo("Migrations complete.")
|
|
92
|
+
finally:
|
|
93
|
+
await fairchild.disconnect()
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
@cli.command()
|
|
97
|
+
@click.option(
|
|
98
|
+
"--queues",
|
|
99
|
+
"-q",
|
|
100
|
+
default="default:1",
|
|
101
|
+
help='Queue configuration, e.g., "default:2,processing:4"',
|
|
102
|
+
)
|
|
103
|
+
@click.option(
|
|
104
|
+
"--import",
|
|
105
|
+
"-i",
|
|
106
|
+
"imports",
|
|
107
|
+
multiple=True,
|
|
108
|
+
help='Module to import for task registration, e.g., "myapp.tasks"',
|
|
109
|
+
)
|
|
110
|
+
def worker(queues: str, imports: tuple[str, ...]):
|
|
111
|
+
"""Start workers to process jobs.
|
|
112
|
+
|
|
113
|
+
Queue format: "queue_name:num_workers,queue_name:num_workers"
|
|
114
|
+
|
|
115
|
+
Examples:
|
|
116
|
+
|
|
117
|
+
fairchild worker --import myapp.tasks --queues "default:2"
|
|
118
|
+
|
|
119
|
+
fairchild worker -i myapp.tasks -i myapp.workflows -q "default:2,processing:4"
|
|
120
|
+
"""
|
|
121
|
+
# Import task modules to register them
|
|
122
|
+
for module_path in imports:
|
|
123
|
+
import_module(module_path)
|
|
124
|
+
click.echo(f"Imported {module_path}")
|
|
125
|
+
|
|
126
|
+
queue_config = parse_queue_config(queues)
|
|
127
|
+
click.echo(f"Starting workers: {queue_config}")
|
|
128
|
+
asyncio.run(_run_workers(queue_config))
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def parse_queue_config(config: str) -> dict[str, int]:
|
|
132
|
+
"""Parse queue configuration string into dict."""
|
|
133
|
+
result = {}
|
|
134
|
+
for part in config.split(","):
|
|
135
|
+
part = part.strip()
|
|
136
|
+
if ":" in part:
|
|
137
|
+
queue, count = part.rsplit(":", 1)
|
|
138
|
+
result[queue.strip()] = int(count)
|
|
139
|
+
else:
|
|
140
|
+
result[part] = 1
|
|
141
|
+
return result
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
async def _run_workers(queue_config: dict[str, int]):
|
|
145
|
+
from fairchild.worker import WorkerPool
|
|
146
|
+
|
|
147
|
+
url = get_database_url()
|
|
148
|
+
fairchild = Fairchild(url)
|
|
149
|
+
await fairchild.connect()
|
|
150
|
+
|
|
151
|
+
pool = WorkerPool(fairchild, queue_config)
|
|
152
|
+
|
|
153
|
+
try:
|
|
154
|
+
await pool.run()
|
|
155
|
+
except KeyboardInterrupt:
|
|
156
|
+
click.echo("\nShutting down workers...")
|
|
157
|
+
finally:
|
|
158
|
+
await pool.shutdown()
|
|
159
|
+
await fairchild.disconnect()
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
@cli.command()
|
|
163
|
+
@click.option("--host", "-h", default="127.0.0.1", help="Host to bind to")
|
|
164
|
+
@click.option("--port", "-p", default=4000, help="Port to bind to")
|
|
165
|
+
@click.option(
|
|
166
|
+
"--import",
|
|
167
|
+
"-i",
|
|
168
|
+
"imports",
|
|
169
|
+
multiple=True,
|
|
170
|
+
help='Module to import for task registration, e.g., "myapp.tasks"',
|
|
171
|
+
)
|
|
172
|
+
def ui(host: str, port: int, imports: tuple[str, ...]):
|
|
173
|
+
"""Start the web UI dashboard."""
|
|
174
|
+
# Import task modules
|
|
175
|
+
for module_path in imports:
|
|
176
|
+
import_module(module_path)
|
|
177
|
+
|
|
178
|
+
click.echo(f"Starting Fairchild UI at http://{host}:{port}")
|
|
179
|
+
asyncio.run(_run_ui(host, port))
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
async def _run_ui(host: str, port: int):
|
|
183
|
+
from fairchild.ui import create_app
|
|
184
|
+
from aiohttp import web
|
|
185
|
+
|
|
186
|
+
url = get_database_url()
|
|
187
|
+
fairchild = Fairchild(url)
|
|
188
|
+
await fairchild.connect()
|
|
189
|
+
|
|
190
|
+
app = create_app(fairchild)
|
|
191
|
+
|
|
192
|
+
runner = web.AppRunner(app)
|
|
193
|
+
await runner.setup()
|
|
194
|
+
site = web.TCPSite(runner, host, port)
|
|
195
|
+
|
|
196
|
+
try:
|
|
197
|
+
await site.start()
|
|
198
|
+
# Keep running until interrupted
|
|
199
|
+
while True:
|
|
200
|
+
await asyncio.sleep(3600)
|
|
201
|
+
except KeyboardInterrupt:
|
|
202
|
+
pass
|
|
203
|
+
finally:
|
|
204
|
+
await runner.cleanup()
|
|
205
|
+
await fairchild.disconnect()
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
@cli.command()
|
|
209
|
+
@click.argument("task_name")
|
|
210
|
+
@click.option(
|
|
211
|
+
"--import",
|
|
212
|
+
"-i",
|
|
213
|
+
"imports",
|
|
214
|
+
multiple=True,
|
|
215
|
+
help='Module to import for task registration, e.g., "myapp.tasks"',
|
|
216
|
+
)
|
|
217
|
+
@click.option(
|
|
218
|
+
"--arg",
|
|
219
|
+
"-a",
|
|
220
|
+
"args",
|
|
221
|
+
multiple=True,
|
|
222
|
+
help="Task argument as key=value, e.g., -a name=World -a count=5",
|
|
223
|
+
)
|
|
224
|
+
@click.option(
|
|
225
|
+
"--in",
|
|
226
|
+
"delay",
|
|
227
|
+
default=None,
|
|
228
|
+
help='Delay before running, e.g., "5m", "1h", "30s"',
|
|
229
|
+
)
|
|
230
|
+
def enqueue(
|
|
231
|
+
task_name: str, imports: tuple[str, ...], args: tuple[str, ...], delay: str | None
|
|
232
|
+
):
|
|
233
|
+
"""Enqueue a task for execution.
|
|
234
|
+
|
|
235
|
+
Examples:
|
|
236
|
+
|
|
237
|
+
fairchild enqueue -i myapp.tasks myapp.tasks.hello -a name=World
|
|
238
|
+
|
|
239
|
+
fairchild enqueue -i myapp.tasks myapp.tasks.process --in 5m -a id=123
|
|
240
|
+
"""
|
|
241
|
+
# Import task modules to register them
|
|
242
|
+
for module_path in imports:
|
|
243
|
+
import_module(module_path)
|
|
244
|
+
|
|
245
|
+
# Parse arguments
|
|
246
|
+
parsed_args = {}
|
|
247
|
+
for arg in args:
|
|
248
|
+
if "=" not in arg:
|
|
249
|
+
raise click.ClickException(f"Invalid argument format: {arg}. Use key=value")
|
|
250
|
+
key, value = arg.split("=", 1)
|
|
251
|
+
# Try to parse as JSON for numbers, bools, etc.
|
|
252
|
+
try:
|
|
253
|
+
import json
|
|
254
|
+
|
|
255
|
+
parsed_args[key] = json.loads(value)
|
|
256
|
+
except json.JSONDecodeError:
|
|
257
|
+
parsed_args[key] = value
|
|
258
|
+
|
|
259
|
+
# Parse delay
|
|
260
|
+
delay_seconds = 0
|
|
261
|
+
if delay:
|
|
262
|
+
delay_seconds = parse_delay(delay)
|
|
263
|
+
|
|
264
|
+
asyncio.run(_enqueue(task_name, parsed_args, delay_seconds))
|
|
265
|
+
|
|
266
|
+
|
|
267
|
+
def parse_delay(delay: str) -> int:
|
|
268
|
+
"""Parse delay string like '5m', '1h', '30s' into seconds."""
|
|
269
|
+
import re
|
|
270
|
+
|
|
271
|
+
match = re.match(r"^(\d+)([smhd])$", delay.lower())
|
|
272
|
+
if not match:
|
|
273
|
+
raise click.ClickException(
|
|
274
|
+
f"Invalid delay format: {delay}. Use e.g., '5m', '1h', '30s'"
|
|
275
|
+
)
|
|
276
|
+
|
|
277
|
+
value = int(match.group(1))
|
|
278
|
+
unit = match.group(2)
|
|
279
|
+
|
|
280
|
+
multipliers = {"s": 1, "m": 60, "h": 3600, "d": 86400}
|
|
281
|
+
return value * multipliers[unit]
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
async def _enqueue(task_name: str, args: dict, delay_seconds: int):
|
|
285
|
+
from datetime import datetime, timedelta, timezone
|
|
286
|
+
from fairchild.task import get_task
|
|
287
|
+
|
|
288
|
+
url = get_database_url()
|
|
289
|
+
fairchild = Fairchild(url)
|
|
290
|
+
await fairchild.connect()
|
|
291
|
+
|
|
292
|
+
try:
|
|
293
|
+
task = get_task(task_name)
|
|
294
|
+
|
|
295
|
+
if delay_seconds > 0:
|
|
296
|
+
job = await fairchild.enqueue(
|
|
297
|
+
task=task,
|
|
298
|
+
args=args,
|
|
299
|
+
scheduled_at=datetime.now(timezone.utc)
|
|
300
|
+
+ timedelta(seconds=delay_seconds),
|
|
301
|
+
)
|
|
302
|
+
click.echo(f"Enqueued job {job.id} (scheduled in {delay_seconds}s)")
|
|
303
|
+
else:
|
|
304
|
+
job = await fairchild.enqueue(task=task, args=args)
|
|
305
|
+
click.echo(f"Enqueued job {job.id}")
|
|
306
|
+
finally:
|
|
307
|
+
await fairchild.disconnect()
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
@cli.command()
|
|
311
|
+
@click.argument("task_name")
|
|
312
|
+
@click.option(
|
|
313
|
+
"--import",
|
|
314
|
+
"-i",
|
|
315
|
+
"imports",
|
|
316
|
+
multiple=True,
|
|
317
|
+
help='Module to import for task registration, e.g., "myapp.tasks"',
|
|
318
|
+
)
|
|
319
|
+
@click.option(
|
|
320
|
+
"--arg",
|
|
321
|
+
"-a",
|
|
322
|
+
"args",
|
|
323
|
+
multiple=True,
|
|
324
|
+
help="Task argument as key=value, e.g., -a name=World -a count=5",
|
|
325
|
+
)
|
|
326
|
+
def run(task_name: str, imports: tuple[str, ...], args: tuple[str, ...]):
|
|
327
|
+
"""Run a task locally for testing (does not enqueue).
|
|
328
|
+
|
|
329
|
+
Runs the task function directly in the current process and prints the result.
|
|
330
|
+
Useful for testing tasks without involving the database or workers.
|
|
331
|
+
|
|
332
|
+
Examples:
|
|
333
|
+
|
|
334
|
+
fairchild run -i myapp.tasks myapp.tasks.hello -a name=World
|
|
335
|
+
|
|
336
|
+
fairchild run -i myapp.tasks myapp.tasks.add -a a=2 -a b=3
|
|
337
|
+
"""
|
|
338
|
+
# Import task modules to register them
|
|
339
|
+
for module_path in imports:
|
|
340
|
+
import_module(module_path)
|
|
341
|
+
|
|
342
|
+
# Parse arguments
|
|
343
|
+
parsed_args = {}
|
|
344
|
+
for arg in args:
|
|
345
|
+
if "=" not in arg:
|
|
346
|
+
raise click.ClickException(f"Invalid argument format: {arg}. Use key=value")
|
|
347
|
+
key, value = arg.split("=", 1)
|
|
348
|
+
# Try to parse as JSON for numbers, bools, etc.
|
|
349
|
+
try:
|
|
350
|
+
import json
|
|
351
|
+
|
|
352
|
+
parsed_args[key] = json.loads(value)
|
|
353
|
+
except json.JSONDecodeError:
|
|
354
|
+
parsed_args[key] = value
|
|
355
|
+
|
|
356
|
+
asyncio.run(_invoke(task_name, parsed_args))
|
|
357
|
+
|
|
358
|
+
|
|
359
|
+
async def _invoke(task_name: str, args: dict):
|
|
360
|
+
import inspect
|
|
361
|
+
import traceback
|
|
362
|
+
from fairchild.task import get_task
|
|
363
|
+
|
|
364
|
+
task = get_task(task_name)
|
|
365
|
+
click.echo(f"Invoking {task_name}...")
|
|
366
|
+
|
|
367
|
+
try:
|
|
368
|
+
# Call the underlying function directly (bypasses worker context check)
|
|
369
|
+
result = task.fn(**args)
|
|
370
|
+
|
|
371
|
+
# Handle async functions
|
|
372
|
+
if inspect.isawaitable(result):
|
|
373
|
+
result = await result
|
|
374
|
+
|
|
375
|
+
click.echo(f"Result: {result}")
|
|
376
|
+
except Exception as e:
|
|
377
|
+
click.echo(traceback.format_exc(), err=True)
|
|
378
|
+
raise click.ClickException(f"Task failed: {e}")
|
|
379
|
+
|
|
380
|
+
|
|
381
|
+
def main():
|
|
382
|
+
cli()
|
|
383
|
+
|
|
384
|
+
|
|
385
|
+
if __name__ == "__main__":
|
|
386
|
+
main()
|
fairchild/context.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
"""Execution context for tracking the currently running job.
|
|
2
|
+
|
|
3
|
+
This module provides a way to track which job is currently being executed
|
|
4
|
+
by a worker. When a task calls another task, we check this context to
|
|
5
|
+
determine whether to spawn a child job (if inside a worker) or execute
|
|
6
|
+
directly (if called from outside).
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from typing import TYPE_CHECKING
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from fairchild.job import Job
|
|
13
|
+
from fairchild.fairchild import Fairchild
|
|
14
|
+
|
|
15
|
+
# The currently executing job (set by worker during task execution)
|
|
16
|
+
_current_job: "Job | None" = None
|
|
17
|
+
_current_fairchild: "Fairchild | None" = None
|
|
18
|
+
_pending_children: list["Job"] = []
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def set_current_job(job: "Job | None", fairchild: "Fairchild | None" = None) -> None:
|
|
22
|
+
"""Set the currently executing job. Called by worker."""
|
|
23
|
+
global _current_job, _current_fairchild, _pending_children
|
|
24
|
+
_current_job = job
|
|
25
|
+
_current_fairchild = fairchild
|
|
26
|
+
_pending_children = [] # Reset pending children for new job
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def get_current_job() -> "Job | None":
|
|
30
|
+
"""Get the currently executing job, or None if not in a worker."""
|
|
31
|
+
return _current_job
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def get_current_fairchild() -> "Fairchild | None":
|
|
35
|
+
"""Get the Fairchild instance for the current context."""
|
|
36
|
+
return _current_fairchild
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def is_inside_task() -> bool:
|
|
40
|
+
"""Check if code is currently running inside a task (in a worker)."""
|
|
41
|
+
return _current_job is not None
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def add_pending_child(job: "Job") -> None:
|
|
45
|
+
"""Add a child job to be inserted after the current task completes."""
|
|
46
|
+
_pending_children.append(job)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def get_pending_children() -> list["Job"]:
|
|
50
|
+
"""Get all pending child jobs and clear the list."""
|
|
51
|
+
global _pending_children
|
|
52
|
+
children = _pending_children
|
|
53
|
+
_pending_children = []
|
|
54
|
+
return children
|
fairchild/db/__init__.py
ADDED
|
File without changes
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
"""Database migration system for Fairchild.
|
|
2
|
+
|
|
3
|
+
Migrations are stored as numbered SQL files in the migrations/ directory:
|
|
4
|
+
- 001_initial.sql
|
|
5
|
+
- 002_add_parent_id.sql
|
|
6
|
+
- etc.
|
|
7
|
+
|
|
8
|
+
Each migration runs exactly once, tracked in the fairchild_migrations table.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
|
|
13
|
+
MIGRATIONS_DIR = Path(__file__).parent / "migrations"
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
async def migrate(pool) -> None:
|
|
17
|
+
"""Run all pending database migrations in order.
|
|
18
|
+
|
|
19
|
+
Usage:
|
|
20
|
+
fairchild = Fairchild("postgresql://localhost/myapp")
|
|
21
|
+
await fairchild.connect()
|
|
22
|
+
await migrate(fairchild._pool)
|
|
23
|
+
"""
|
|
24
|
+
async with pool.acquire() as conn:
|
|
25
|
+
# Ensure migrations tracking table exists
|
|
26
|
+
await conn.execute("""
|
|
27
|
+
CREATE TABLE IF NOT EXISTS fairchild_migrations (
|
|
28
|
+
id SERIAL PRIMARY KEY,
|
|
29
|
+
name VARCHAR(255) NOT NULL UNIQUE,
|
|
30
|
+
applied_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
|
31
|
+
)
|
|
32
|
+
""")
|
|
33
|
+
|
|
34
|
+
# Get list of already applied migrations
|
|
35
|
+
applied = set()
|
|
36
|
+
rows = await conn.fetch("SELECT name FROM fairchild_migrations")
|
|
37
|
+
for row in rows:
|
|
38
|
+
applied.add(row["name"])
|
|
39
|
+
|
|
40
|
+
# Find and run pending migrations in order
|
|
41
|
+
migration_files = sorted(MIGRATIONS_DIR.glob("*.sql"))
|
|
42
|
+
|
|
43
|
+
for migration_file in migration_files:
|
|
44
|
+
name = migration_file.name
|
|
45
|
+
|
|
46
|
+
if name in applied:
|
|
47
|
+
continue
|
|
48
|
+
|
|
49
|
+
print(f"Running migration: {name}")
|
|
50
|
+
sql = migration_file.read_text()
|
|
51
|
+
|
|
52
|
+
async with conn.transaction():
|
|
53
|
+
await conn.execute(sql)
|
|
54
|
+
await conn.execute(
|
|
55
|
+
"INSERT INTO fairchild_migrations (name) VALUES ($1)",
|
|
56
|
+
name,
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
print(f" Applied: {name}")
|
|
60
|
+
|
|
61
|
+
print("Migrations complete.")
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
async def drop_all(pool) -> None:
|
|
65
|
+
"""Drop all Fairchild tables. Use with caution!"""
|
|
66
|
+
async with pool.acquire() as conn:
|
|
67
|
+
await conn.execute("DROP TABLE IF EXISTS fairchild_workers CASCADE")
|
|
68
|
+
await conn.execute("DROP TABLE IF EXISTS fairchild_jobs CASCADE")
|
|
69
|
+
await conn.execute("DROP TABLE IF EXISTS fairchild_migrations CASCADE")
|
fairchild/fairchild.py
ADDED
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
from datetime import datetime, timezone
|
|
2
|
+
from typing import Any, TYPE_CHECKING
|
|
3
|
+
from uuid import UUID
|
|
4
|
+
import json
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def utcnow() -> datetime:
|
|
8
|
+
"""Return current UTC time as timezone-aware datetime."""
|
|
9
|
+
return datetime.now(timezone.utc)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from fairchild.job import Job
|
|
14
|
+
from fairchild.task import Task
|
|
15
|
+
|
|
16
|
+
# Global Fairchild instance
|
|
17
|
+
_instance: "Fairchild | None" = None
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def get_fairchild() -> "Fairchild":
|
|
21
|
+
"""Get the global Fairchild instance."""
|
|
22
|
+
if _instance is None:
|
|
23
|
+
raise RuntimeError(
|
|
24
|
+
"Fairchild not initialized. Call Fairchild(database_url) first."
|
|
25
|
+
)
|
|
26
|
+
return _instance
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class Fairchild:
|
|
30
|
+
"""Main entry point for Fairchild job queue.
|
|
31
|
+
|
|
32
|
+
Usage:
|
|
33
|
+
fairchild = Fairchild("postgresql://localhost/myapp")
|
|
34
|
+
|
|
35
|
+
# Enqueue jobs
|
|
36
|
+
my_task.enqueue(item_id=42)
|
|
37
|
+
|
|
38
|
+
# Or use directly
|
|
39
|
+
fairchild.enqueue(my_task, args={"item_id": 42})
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
def __init__(self, database_url: str):
|
|
43
|
+
global _instance
|
|
44
|
+
|
|
45
|
+
self.database_url = database_url
|
|
46
|
+
self._pool = None
|
|
47
|
+
|
|
48
|
+
# Set as global instance
|
|
49
|
+
_instance = self
|
|
50
|
+
|
|
51
|
+
async def connect(self) -> None:
|
|
52
|
+
"""Initialize the database connection pool."""
|
|
53
|
+
import asyncpg
|
|
54
|
+
|
|
55
|
+
self._pool = await asyncpg.create_pool(self.database_url)
|
|
56
|
+
|
|
57
|
+
async def disconnect(self) -> None:
|
|
58
|
+
"""Close the database connection pool."""
|
|
59
|
+
if self._pool:
|
|
60
|
+
await self._pool.close()
|
|
61
|
+
self._pool = None
|
|
62
|
+
|
|
63
|
+
async def _ensure_connected(self) -> None:
|
|
64
|
+
"""Ensure we have a database connection."""
|
|
65
|
+
if self._pool is None:
|
|
66
|
+
await self.connect()
|
|
67
|
+
|
|
68
|
+
async def enqueue(
|
|
69
|
+
self,
|
|
70
|
+
task: "Task",
|
|
71
|
+
args: dict[str, Any],
|
|
72
|
+
scheduled_at: datetime | None = None,
|
|
73
|
+
priority: int | None = None,
|
|
74
|
+
) -> "Job":
|
|
75
|
+
"""Enqueue a job for execution.
|
|
76
|
+
|
|
77
|
+
Returns the created Job.
|
|
78
|
+
"""
|
|
79
|
+
from fairchild.job import Job, JobState
|
|
80
|
+
|
|
81
|
+
await self._ensure_connected()
|
|
82
|
+
|
|
83
|
+
job = Job(
|
|
84
|
+
task_name=task.name,
|
|
85
|
+
queue=task.queue,
|
|
86
|
+
args=args,
|
|
87
|
+
priority=priority if priority is not None else task.priority,
|
|
88
|
+
max_attempts=task.max_attempts,
|
|
89
|
+
tags=task.tags,
|
|
90
|
+
scheduled_at=scheduled_at or utcnow(),
|
|
91
|
+
state=JobState.AVAILABLE if scheduled_at is None else JobState.SCHEDULED,
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
await self._insert_job(job)
|
|
95
|
+
return job
|
|
96
|
+
|
|
97
|
+
async def _insert_job(self, job: "Job") -> None:
|
|
98
|
+
"""Insert a job into the database."""
|
|
99
|
+
query = """
|
|
100
|
+
INSERT INTO fairchild_jobs (
|
|
101
|
+
id, task_name, queue, args,
|
|
102
|
+
parent_id, deps,
|
|
103
|
+
state, priority, scheduled_at,
|
|
104
|
+
attempt, max_attempts, tags, meta,
|
|
105
|
+
inserted_at, updated_at
|
|
106
|
+
) VALUES (
|
|
107
|
+
$1, $2, $3, $4,
|
|
108
|
+
$5, $6,
|
|
109
|
+
$7, $8, $9,
|
|
110
|
+
$10, $11, $12, $13,
|
|
111
|
+
$14, $15
|
|
112
|
+
)
|
|
113
|
+
"""
|
|
114
|
+
|
|
115
|
+
await self._pool.execute(
|
|
116
|
+
query,
|
|
117
|
+
job.id,
|
|
118
|
+
job.task_name,
|
|
119
|
+
job.queue,
|
|
120
|
+
json.dumps(job.args),
|
|
121
|
+
job.parent_id,
|
|
122
|
+
job.deps,
|
|
123
|
+
job.state.value,
|
|
124
|
+
job.priority,
|
|
125
|
+
job.scheduled_at,
|
|
126
|
+
job.attempt,
|
|
127
|
+
job.max_attempts,
|
|
128
|
+
job.tags,
|
|
129
|
+
json.dumps(job.meta),
|
|
130
|
+
job.inserted_at,
|
|
131
|
+
job.updated_at,
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
async def get_job(self, job_id: UUID) -> "Job | None":
|
|
135
|
+
"""Get a job by ID."""
|
|
136
|
+
from fairchild.job import Job
|
|
137
|
+
|
|
138
|
+
await self._ensure_connected()
|
|
139
|
+
|
|
140
|
+
query = """
|
|
141
|
+
SELECT * FROM fairchild_jobs WHERE id = $1
|
|
142
|
+
"""
|
|
143
|
+
row = await self._pool.fetchrow(query, job_id)
|
|
144
|
+
if row is None:
|
|
145
|
+
return None
|
|
146
|
+
|
|
147
|
+
return Job.from_row(dict(row))
|
|
148
|
+
|
|
149
|
+
async def get_recorded(self, job_id: UUID) -> Any:
|
|
150
|
+
"""Get the recorded value from a completed job."""
|
|
151
|
+
await self._ensure_connected()
|
|
152
|
+
|
|
153
|
+
query = """
|
|
154
|
+
SELECT recorded FROM fairchild_jobs
|
|
155
|
+
WHERE id = $1
|
|
156
|
+
"""
|
|
157
|
+
|
|
158
|
+
row = await self._pool.fetchrow(query, job_id)
|
|
159
|
+
if row is None:
|
|
160
|
+
return None
|
|
161
|
+
|
|
162
|
+
recorded = row["recorded"]
|
|
163
|
+
if recorded is None:
|
|
164
|
+
return None
|
|
165
|
+
|
|
166
|
+
return json.loads(recorded) if isinstance(recorded, str) else recorded
|