fairchild 0.0.1__py3-none-any.whl → 0.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fairchild/__init__.py +11 -0
- fairchild/cli.py +386 -0
- fairchild/context.py +54 -0
- fairchild/db/__init__.py +0 -0
- fairchild/db/migrations.py +69 -0
- fairchild/fairchild.py +166 -0
- fairchild/future.py +78 -0
- fairchild/job.py +123 -0
- fairchild/record.py +22 -0
- fairchild/task.py +225 -0
- fairchild/templates/dashboard.html +1650 -0
- fairchild/templates/job.html +1245 -0
- fairchild/ui.py +560 -0
- fairchild/worker.py +495 -0
- fairchild-0.0.3.dist-info/METADATA +483 -0
- fairchild-0.0.3.dist-info/RECORD +20 -0
- fairchild-0.0.3.dist-info/entry_points.txt +2 -0
- fairchild-0.0.3.dist-info/licenses/LICENSE +21 -0
- fairchild-0.0.3.dist-info/top_level.txt +1 -0
- fairchild-0.0.1.dist-info/METADATA +0 -6
- fairchild-0.0.1.dist-info/RECORD +0 -5
- fairchild-0.0.1.dist-info/top_level.txt +0 -1
- main.py +0 -6
- {fairchild-0.0.1.dist-info → fairchild-0.0.3.dist-info}/WHEEL +0 -0
fairchild/future.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"""Future represents a pending task result."""
|
|
2
|
+
|
|
3
|
+
from uuid import UUID
|
|
4
|
+
from typing import Any, TYPE_CHECKING
|
|
5
|
+
|
|
6
|
+
if TYPE_CHECKING:
|
|
7
|
+
from fairchild.job import Job
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class Future:
|
|
11
|
+
"""Represents the pending result of a spawned task.
|
|
12
|
+
|
|
13
|
+
When a task calls another task from within a worker, it gets back
|
|
14
|
+
a Future instead of the actual result. The Future:
|
|
15
|
+
|
|
16
|
+
1. Tracks the child job that was spawned
|
|
17
|
+
2. Can be passed to other tasks to establish dependencies
|
|
18
|
+
3. Resolves to the actual result when the child job completes
|
|
19
|
+
|
|
20
|
+
Usage:
|
|
21
|
+
@task
|
|
22
|
+
def parent_task():
|
|
23
|
+
# This returns a Future, not the actual result
|
|
24
|
+
result = child_task(arg1, arg2)
|
|
25
|
+
|
|
26
|
+
# Pass the future to another task - creates a dependency
|
|
27
|
+
another_task(result)
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
def __init__(self, job_id: UUID, job_key: str | None = None):
|
|
31
|
+
self.job_id = job_id
|
|
32
|
+
self.job_key = job_key
|
|
33
|
+
self._result: Any = None
|
|
34
|
+
self._resolved = False
|
|
35
|
+
|
|
36
|
+
def __repr__(self) -> str:
|
|
37
|
+
if self._resolved:
|
|
38
|
+
return f"Future({self.job_key or self.job_id}, resolved={self._result!r})"
|
|
39
|
+
return f"Future({self.job_key or self.job_id}, pending)"
|
|
40
|
+
|
|
41
|
+
def resolve(self, result: Any) -> None:
|
|
42
|
+
"""Set the resolved value of this future."""
|
|
43
|
+
self._result = result
|
|
44
|
+
self._resolved = True
|
|
45
|
+
|
|
46
|
+
@property
|
|
47
|
+
def result(self) -> Any:
|
|
48
|
+
"""Get the resolved result. Raises if not yet resolved."""
|
|
49
|
+
if not self._resolved:
|
|
50
|
+
raise RuntimeError(f"Future {self.job_id} has not been resolved yet")
|
|
51
|
+
return self._result
|
|
52
|
+
|
|
53
|
+
@property
|
|
54
|
+
def is_resolved(self) -> bool:
|
|
55
|
+
return self._resolved
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def is_future(obj: Any) -> bool:
|
|
59
|
+
"""Check if an object is a Future."""
|
|
60
|
+
return isinstance(obj, Future)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def extract_futures(args: dict) -> list[Future]:
|
|
64
|
+
"""Extract all Future objects from a dict of arguments (including nested)."""
|
|
65
|
+
futures = []
|
|
66
|
+
|
|
67
|
+
def _extract(obj: Any) -> None:
|
|
68
|
+
if isinstance(obj, Future):
|
|
69
|
+
futures.append(obj)
|
|
70
|
+
elif isinstance(obj, dict):
|
|
71
|
+
for v in obj.values():
|
|
72
|
+
_extract(v)
|
|
73
|
+
elif isinstance(obj, (list, tuple)):
|
|
74
|
+
for item in obj:
|
|
75
|
+
_extract(item)
|
|
76
|
+
|
|
77
|
+
_extract(args)
|
|
78
|
+
return futures
|
fairchild/job.py
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from datetime import datetime, timezone
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from typing import Any
|
|
5
|
+
from uuid import UUID, uuid4
|
|
6
|
+
import json
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def _utcnow() -> datetime:
|
|
10
|
+
return datetime.now(timezone.utc)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def _parse_json(value: Any) -> Any:
|
|
14
|
+
"""Parse JSON string if needed, otherwise return as-is."""
|
|
15
|
+
if isinstance(value, str):
|
|
16
|
+
return json.loads(value)
|
|
17
|
+
return value
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class JobState(str, Enum):
|
|
21
|
+
AVAILABLE = "available"
|
|
22
|
+
SCHEDULED = "scheduled"
|
|
23
|
+
RUNNING = "running"
|
|
24
|
+
COMPLETED = "completed"
|
|
25
|
+
FAILED = "failed"
|
|
26
|
+
CANCELLED = "cancelled"
|
|
27
|
+
DISCARDED = "discarded"
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@dataclass
|
|
31
|
+
class Job:
|
|
32
|
+
"""Represents a job in the queue."""
|
|
33
|
+
|
|
34
|
+
# Task identification
|
|
35
|
+
task_name: str
|
|
36
|
+
queue: str = "default"
|
|
37
|
+
args: dict[str, Any] = field(default_factory=dict)
|
|
38
|
+
|
|
39
|
+
# Identity
|
|
40
|
+
id: UUID = field(default_factory=uuid4)
|
|
41
|
+
|
|
42
|
+
# Parent-child relationship (for spawned tasks)
|
|
43
|
+
parent_id: UUID | None = None
|
|
44
|
+
deps: list[str] = field(default_factory=list) # Job IDs this job depends on
|
|
45
|
+
|
|
46
|
+
# State
|
|
47
|
+
state: JobState = JobState.AVAILABLE
|
|
48
|
+
|
|
49
|
+
# Scheduling & priority
|
|
50
|
+
priority: int = 5 # 0-9, lower = higher priority
|
|
51
|
+
scheduled_at: datetime = field(default_factory=_utcnow)
|
|
52
|
+
|
|
53
|
+
# Execution tracking
|
|
54
|
+
attempted_at: datetime | None = None
|
|
55
|
+
completed_at: datetime | None = None
|
|
56
|
+
attempt: int = 0
|
|
57
|
+
max_attempts: int = 3
|
|
58
|
+
|
|
59
|
+
# Results & errors
|
|
60
|
+
recorded: Any | None = None
|
|
61
|
+
errors: list[dict[str, Any]] = field(default_factory=list)
|
|
62
|
+
|
|
63
|
+
# Metadata
|
|
64
|
+
tags: list[str] = field(default_factory=list)
|
|
65
|
+
meta: dict[str, Any] = field(default_factory=dict)
|
|
66
|
+
|
|
67
|
+
# Timestamps
|
|
68
|
+
inserted_at: datetime = field(default_factory=_utcnow)
|
|
69
|
+
updated_at: datetime = field(default_factory=_utcnow)
|
|
70
|
+
|
|
71
|
+
def __post_init__(self):
|
|
72
|
+
# Ensure state is JobState enum
|
|
73
|
+
if isinstance(self.state, str):
|
|
74
|
+
self.state = JobState(self.state)
|
|
75
|
+
|
|
76
|
+
@classmethod
|
|
77
|
+
def from_row(cls, row: dict[str, Any]) -> "Job":
|
|
78
|
+
"""Create a Job instance from a database row."""
|
|
79
|
+
return cls(
|
|
80
|
+
id=row["id"],
|
|
81
|
+
task_name=row["task_name"],
|
|
82
|
+
queue=row["queue"],
|
|
83
|
+
args=_parse_json(row["args"]) or {},
|
|
84
|
+
parent_id=row.get("parent_id"),
|
|
85
|
+
deps=row.get("deps") or [],
|
|
86
|
+
state=JobState(row["state"]),
|
|
87
|
+
priority=row["priority"],
|
|
88
|
+
scheduled_at=row["scheduled_at"],
|
|
89
|
+
attempted_at=row["attempted_at"],
|
|
90
|
+
completed_at=row["completed_at"],
|
|
91
|
+
attempt=row["attempt"],
|
|
92
|
+
max_attempts=row["max_attempts"],
|
|
93
|
+
recorded=_parse_json(row["recorded"]),
|
|
94
|
+
errors=_parse_json(row["errors"]) or [],
|
|
95
|
+
tags=row.get("tags") or [],
|
|
96
|
+
meta=_parse_json(row.get("meta")) or {},
|
|
97
|
+
inserted_at=row["inserted_at"],
|
|
98
|
+
updated_at=row["updated_at"],
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
def to_dict(self) -> dict[str, Any]:
|
|
102
|
+
"""Convert to dictionary for database insertion."""
|
|
103
|
+
return {
|
|
104
|
+
"id": self.id,
|
|
105
|
+
"task_name": self.task_name,
|
|
106
|
+
"queue": self.queue,
|
|
107
|
+
"args": self.args,
|
|
108
|
+
"parent_id": self.parent_id,
|
|
109
|
+
"deps": self.deps,
|
|
110
|
+
"state": self.state.value,
|
|
111
|
+
"priority": self.priority,
|
|
112
|
+
"scheduled_at": self.scheduled_at,
|
|
113
|
+
"attempted_at": self.attempted_at,
|
|
114
|
+
"completed_at": self.completed_at,
|
|
115
|
+
"attempt": self.attempt,
|
|
116
|
+
"max_attempts": self.max_attempts,
|
|
117
|
+
"recorded": self.recorded,
|
|
118
|
+
"errors": self.errors,
|
|
119
|
+
"tags": self.tags,
|
|
120
|
+
"meta": self.meta,
|
|
121
|
+
"inserted_at": self.inserted_at,
|
|
122
|
+
"updated_at": self.updated_at,
|
|
123
|
+
}
|
fairchild/record.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
@dataclass(frozen=True)
|
|
6
|
+
class Record:
|
|
7
|
+
"""Wrapper to indicate a task's return value should be persisted.
|
|
8
|
+
|
|
9
|
+
Usage:
|
|
10
|
+
@task(queue="default")
|
|
11
|
+
def my_task(item_id: int):
|
|
12
|
+
result = process(item_id)
|
|
13
|
+
return Record({"item_id": item_id, "result": result})
|
|
14
|
+
|
|
15
|
+
The recorded value will be stored in the job's `recorded` column
|
|
16
|
+
and can be retrieved by downstream jobs in a workflow.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
value: Any
|
|
20
|
+
|
|
21
|
+
def __repr__(self) -> str:
|
|
22
|
+
return f"Record({self.value!r})"
|
fairchild/task.py
ADDED
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
from datetime import datetime, timedelta, timezone
|
|
2
|
+
from functools import wraps
|
|
3
|
+
from typing import Any, Callable
|
|
4
|
+
from uuid import uuid4
|
|
5
|
+
import inspect
|
|
6
|
+
|
|
7
|
+
# Global task registry
|
|
8
|
+
_task_registry: dict[str, "Task"] = {}
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def get_task(name: str) -> "Task":
|
|
12
|
+
"""Get a registered task by name."""
|
|
13
|
+
if name not in _task_registry:
|
|
14
|
+
raise ValueError(f"Unknown task: {name}")
|
|
15
|
+
return _task_registry[name]
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def task(
|
|
19
|
+
queue: str = "default",
|
|
20
|
+
max_attempts: int = 3,
|
|
21
|
+
priority: int = 5,
|
|
22
|
+
unique: bool = False,
|
|
23
|
+
unique_period: timedelta | None = None,
|
|
24
|
+
tags: list[str] | None = None,
|
|
25
|
+
) -> Callable[[Callable], "Task"]:
|
|
26
|
+
"""Decorator to define a task.
|
|
27
|
+
|
|
28
|
+
Usage:
|
|
29
|
+
@task(queue="default")
|
|
30
|
+
def my_task(item_id: int):
|
|
31
|
+
return Record({"result": item_id * 2})
|
|
32
|
+
|
|
33
|
+
# Enqueue
|
|
34
|
+
my_task.enqueue(item_id=42)
|
|
35
|
+
|
|
36
|
+
# Schedule for later
|
|
37
|
+
my_task.enqueue_in(minutes=30, item_id=42)
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
queue: Queue name for this task
|
|
41
|
+
max_attempts: Maximum retry attempts
|
|
42
|
+
priority: 0-9, lower = higher priority
|
|
43
|
+
unique: If True, prevent duplicate jobs with same args
|
|
44
|
+
unique_period: Time window for uniqueness check
|
|
45
|
+
tags: Tags for categorizing/filtering jobs
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
def decorator(fn: Callable) -> "Task":
|
|
49
|
+
task_obj = Task(
|
|
50
|
+
fn=fn,
|
|
51
|
+
queue=queue,
|
|
52
|
+
max_attempts=max_attempts,
|
|
53
|
+
priority=priority,
|
|
54
|
+
unique=unique,
|
|
55
|
+
unique_period=unique_period,
|
|
56
|
+
tags=tags or [],
|
|
57
|
+
)
|
|
58
|
+
_task_registry[task_obj.name] = task_obj
|
|
59
|
+
return task_obj
|
|
60
|
+
|
|
61
|
+
return decorator
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class Task:
|
|
65
|
+
"""A registered task that can be enqueued."""
|
|
66
|
+
|
|
67
|
+
def __init__(
|
|
68
|
+
self,
|
|
69
|
+
fn: Callable,
|
|
70
|
+
queue: str,
|
|
71
|
+
max_attempts: int,
|
|
72
|
+
priority: int,
|
|
73
|
+
unique: bool,
|
|
74
|
+
unique_period: timedelta | None,
|
|
75
|
+
tags: list[str],
|
|
76
|
+
):
|
|
77
|
+
self.fn = fn
|
|
78
|
+
self.queue = queue
|
|
79
|
+
self.max_attempts = max_attempts
|
|
80
|
+
self.priority = priority
|
|
81
|
+
self.unique = unique
|
|
82
|
+
self.unique_period = unique_period
|
|
83
|
+
self.tags = tags
|
|
84
|
+
|
|
85
|
+
# Derive task name from module and function name
|
|
86
|
+
self.name = f"{fn.__module__}.{fn.__qualname__}"
|
|
87
|
+
|
|
88
|
+
# Check if function accepts a 'job' parameter
|
|
89
|
+
sig = inspect.signature(fn)
|
|
90
|
+
self._accepts_job = "job" in sig.parameters
|
|
91
|
+
|
|
92
|
+
# Preserve function metadata
|
|
93
|
+
wraps(fn)(self)
|
|
94
|
+
|
|
95
|
+
def __call__(self, *args: Any, **kwargs: Any) -> Any:
|
|
96
|
+
"""Call the task - either spawn as child job or execute directly.
|
|
97
|
+
|
|
98
|
+
If called from inside a running task (in a worker), this spawns
|
|
99
|
+
a child job and returns a Future. Otherwise, it executes the
|
|
100
|
+
function directly.
|
|
101
|
+
"""
|
|
102
|
+
from fairchild.context import (
|
|
103
|
+
is_inside_task,
|
|
104
|
+
get_current_job,
|
|
105
|
+
add_pending_child,
|
|
106
|
+
)
|
|
107
|
+
from fairchild.future import Future, extract_futures
|
|
108
|
+
from fairchild.job import Job, JobState
|
|
109
|
+
|
|
110
|
+
# If we're inside a worker executing a task, spawn a child job
|
|
111
|
+
if is_inside_task():
|
|
112
|
+
parent_job = get_current_job()
|
|
113
|
+
|
|
114
|
+
# Convert positional args to kwargs using function signature
|
|
115
|
+
if args:
|
|
116
|
+
sig = inspect.signature(self.fn)
|
|
117
|
+
param_names = [
|
|
118
|
+
p for p in sig.parameters.keys() if p not in ("job", "workflow")
|
|
119
|
+
]
|
|
120
|
+
for i, arg in enumerate(args):
|
|
121
|
+
if i < len(param_names):
|
|
122
|
+
kwargs[param_names[i]] = arg
|
|
123
|
+
|
|
124
|
+
# Extract any futures from the args - these become dependencies
|
|
125
|
+
futures_in_args = extract_futures(kwargs)
|
|
126
|
+
deps = [str(f.job_id) for f in futures_in_args]
|
|
127
|
+
|
|
128
|
+
# Determine initial state based on dependencies
|
|
129
|
+
has_deps = len(deps) > 0
|
|
130
|
+
state = JobState.SCHEDULED if has_deps else JobState.AVAILABLE
|
|
131
|
+
|
|
132
|
+
# Create the child job
|
|
133
|
+
job_id = uuid4()
|
|
134
|
+
child_job = Job(
|
|
135
|
+
id=job_id,
|
|
136
|
+
task_name=self.name,
|
|
137
|
+
queue=self.queue,
|
|
138
|
+
args=self._serialize_args(kwargs),
|
|
139
|
+
priority=self.priority,
|
|
140
|
+
max_attempts=self.max_attempts,
|
|
141
|
+
tags=self.tags,
|
|
142
|
+
parent_id=parent_job.id,
|
|
143
|
+
deps=deps,
|
|
144
|
+
state=state,
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
# Queue the child job to be inserted after task completes
|
|
148
|
+
add_pending_child(child_job)
|
|
149
|
+
|
|
150
|
+
# Return a future representing this job's result
|
|
151
|
+
return Future(job_id=job_id)
|
|
152
|
+
|
|
153
|
+
# Not inside a task - execute directly
|
|
154
|
+
from fairchild.record import Record
|
|
155
|
+
|
|
156
|
+
result = self.fn(*args, **kwargs)
|
|
157
|
+
# Unwrap Record so local runs behave like resolved futures
|
|
158
|
+
if isinstance(result, Record):
|
|
159
|
+
return result.value
|
|
160
|
+
return result
|
|
161
|
+
|
|
162
|
+
def _serialize_args(self, kwargs: dict[str, Any]) -> dict[str, Any]:
|
|
163
|
+
"""Serialize arguments, converting Futures to their job IDs."""
|
|
164
|
+
from fairchild.future import Future
|
|
165
|
+
|
|
166
|
+
def _convert(obj: Any) -> Any:
|
|
167
|
+
if isinstance(obj, Future):
|
|
168
|
+
# Store as a reference that can be resolved later
|
|
169
|
+
return {"__future__": str(obj.job_id)}
|
|
170
|
+
elif isinstance(obj, dict):
|
|
171
|
+
return {k: _convert(v) for k, v in obj.items()}
|
|
172
|
+
elif isinstance(obj, list):
|
|
173
|
+
return [_convert(item) for item in obj]
|
|
174
|
+
elif isinstance(obj, tuple):
|
|
175
|
+
return [_convert(item) for item in obj]
|
|
176
|
+
return obj
|
|
177
|
+
|
|
178
|
+
return _convert(kwargs)
|
|
179
|
+
|
|
180
|
+
def _get_fairchild(self) -> Any:
|
|
181
|
+
"""Get the Fairchild instance."""
|
|
182
|
+
from fairchild.fairchild import get_fairchild
|
|
183
|
+
|
|
184
|
+
return get_fairchild()
|
|
185
|
+
|
|
186
|
+
def enqueue(self, **kwargs: Any) -> Any:
|
|
187
|
+
"""Enqueue this task for immediate execution.
|
|
188
|
+
|
|
189
|
+
Returns the created Job.
|
|
190
|
+
"""
|
|
191
|
+
return self._get_fairchild().enqueue(
|
|
192
|
+
task=self,
|
|
193
|
+
args=kwargs,
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
def enqueue_at(self, at: datetime, **kwargs: Any) -> Any:
|
|
197
|
+
"""Enqueue this task to run at a specific time.
|
|
198
|
+
|
|
199
|
+
Returns the created Job.
|
|
200
|
+
"""
|
|
201
|
+
return self._get_fairchild().enqueue(
|
|
202
|
+
task=self,
|
|
203
|
+
args=kwargs,
|
|
204
|
+
scheduled_at=at,
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
def enqueue_in(
|
|
208
|
+
self,
|
|
209
|
+
*,
|
|
210
|
+
seconds: int = 0,
|
|
211
|
+
minutes: int = 0,
|
|
212
|
+
hours: int = 0,
|
|
213
|
+
days: int = 0,
|
|
214
|
+
**kwargs: Any,
|
|
215
|
+
) -> Any:
|
|
216
|
+
"""Enqueue this task to run after a delay.
|
|
217
|
+
|
|
218
|
+
Returns the created Job.
|
|
219
|
+
"""
|
|
220
|
+
delay = timedelta(seconds=seconds, minutes=minutes, hours=hours, days=days)
|
|
221
|
+
scheduled_at = datetime.utcnow() + delay
|
|
222
|
+
return self.enqueue_at(at=scheduled_at, **kwargs)
|
|
223
|
+
|
|
224
|
+
def __repr__(self) -> str:
|
|
225
|
+
return f"Task({self.name!r}, queue={self.queue!r})"
|