stabilize 0.9.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- stabilize/__init__.py +29 -0
- stabilize/cli.py +1193 -0
- stabilize/context/__init__.py +7 -0
- stabilize/context/stage_context.py +170 -0
- stabilize/dag/__init__.py +15 -0
- stabilize/dag/graph.py +215 -0
- stabilize/dag/topological.py +199 -0
- stabilize/examples/__init__.py +1 -0
- stabilize/examples/docker-example.py +759 -0
- stabilize/examples/golden-standard-expected-result.txt +1 -0
- stabilize/examples/golden-standard.py +488 -0
- stabilize/examples/http-example.py +606 -0
- stabilize/examples/llama-example.py +662 -0
- stabilize/examples/python-example.py +731 -0
- stabilize/examples/shell-example.py +399 -0
- stabilize/examples/ssh-example.py +603 -0
- stabilize/handlers/__init__.py +53 -0
- stabilize/handlers/base.py +226 -0
- stabilize/handlers/complete_stage.py +209 -0
- stabilize/handlers/complete_task.py +75 -0
- stabilize/handlers/complete_workflow.py +150 -0
- stabilize/handlers/run_task.py +369 -0
- stabilize/handlers/start_stage.py +262 -0
- stabilize/handlers/start_task.py +74 -0
- stabilize/handlers/start_workflow.py +136 -0
- stabilize/launcher.py +307 -0
- stabilize/migrations/01KDQ4N9QPJ6Q4MCV3V9GHWPV4_initial_schema.sql +97 -0
- stabilize/migrations/01KDRK3TXW4R2GERC1WBCQYJGG_rag_embeddings.sql +25 -0
- stabilize/migrations/__init__.py +1 -0
- stabilize/models/__init__.py +15 -0
- stabilize/models/stage.py +389 -0
- stabilize/models/status.py +146 -0
- stabilize/models/task.py +125 -0
- stabilize/models/workflow.py +317 -0
- stabilize/orchestrator.py +113 -0
- stabilize/persistence/__init__.py +28 -0
- stabilize/persistence/connection.py +185 -0
- stabilize/persistence/factory.py +136 -0
- stabilize/persistence/memory.py +214 -0
- stabilize/persistence/postgres.py +655 -0
- stabilize/persistence/sqlite.py +674 -0
- stabilize/persistence/store.py +235 -0
- stabilize/queue/__init__.py +59 -0
- stabilize/queue/messages.py +377 -0
- stabilize/queue/processor.py +312 -0
- stabilize/queue/queue.py +526 -0
- stabilize/queue/sqlite_queue.py +354 -0
- stabilize/rag/__init__.py +19 -0
- stabilize/rag/assistant.py +459 -0
- stabilize/rag/cache.py +294 -0
- stabilize/stages/__init__.py +11 -0
- stabilize/stages/builder.py +253 -0
- stabilize/tasks/__init__.py +19 -0
- stabilize/tasks/interface.py +335 -0
- stabilize/tasks/registry.py +255 -0
- stabilize/tasks/result.py +283 -0
- stabilize-0.9.2.dist-info/METADATA +301 -0
- stabilize-0.9.2.dist-info/RECORD +61 -0
- stabilize-0.9.2.dist-info/WHEEL +4 -0
- stabilize-0.9.2.dist-info/entry_points.txt +2 -0
- stabilize-0.9.2.dist-info/licenses/LICENSE +201 -0
stabilize/rag/cache.py
ADDED
|
@@ -0,0 +1,294 @@
|
|
|
1
|
+
"""Embedding cache implementations for RAG."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import os
|
|
7
|
+
import sqlite3
|
|
8
|
+
from abc import ABC, abstractmethod
|
|
9
|
+
from dataclasses import dataclass
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import TYPE_CHECKING, Any
|
|
12
|
+
|
|
13
|
+
if TYPE_CHECKING:
|
|
14
|
+
import psycopg
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@dataclass
|
|
18
|
+
class CachedEmbedding:
|
|
19
|
+
"""A cached embedding with its metadata."""
|
|
20
|
+
|
|
21
|
+
doc_id: str
|
|
22
|
+
content: str
|
|
23
|
+
embedding: list[float]
|
|
24
|
+
embedding_model: str
|
|
25
|
+
chunk_index: int
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class EmbeddingCache(ABC):
|
|
29
|
+
"""Abstract interface for embedding cache."""
|
|
30
|
+
|
|
31
|
+
@abstractmethod
|
|
32
|
+
def store(self, embeddings: list[CachedEmbedding]) -> None:
|
|
33
|
+
"""Store embeddings in cache."""
|
|
34
|
+
...
|
|
35
|
+
|
|
36
|
+
@abstractmethod
|
|
37
|
+
def load(self, embedding_model: str) -> list[CachedEmbedding]:
|
|
38
|
+
"""Load embeddings from cache for a specific model."""
|
|
39
|
+
...
|
|
40
|
+
|
|
41
|
+
@abstractmethod
|
|
42
|
+
def is_initialized(self, embedding_model: str) -> bool:
|
|
43
|
+
"""Check if cache has embeddings for the given model."""
|
|
44
|
+
...
|
|
45
|
+
|
|
46
|
+
@abstractmethod
|
|
47
|
+
def clear(self) -> None:
|
|
48
|
+
"""Clear all cached embeddings."""
|
|
49
|
+
...
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class SqliteEmbeddingCache(EmbeddingCache):
|
|
53
|
+
"""Store embeddings in SQLite database.
|
|
54
|
+
|
|
55
|
+
Default location: ~/.stabilize/embeddings.db
|
|
56
|
+
"""
|
|
57
|
+
|
|
58
|
+
def __init__(self, db_path: str | None = None):
|
|
59
|
+
if db_path is None:
|
|
60
|
+
cache_dir = Path.home() / ".stabilize"
|
|
61
|
+
cache_dir.mkdir(parents=True, exist_ok=True)
|
|
62
|
+
db_path = str(cache_dir / "embeddings.db")
|
|
63
|
+
|
|
64
|
+
self.db_path = db_path
|
|
65
|
+
self._init_schema()
|
|
66
|
+
|
|
67
|
+
def _get_connection(self) -> sqlite3.Connection:
|
|
68
|
+
"""Get a database connection."""
|
|
69
|
+
conn = sqlite3.connect(self.db_path)
|
|
70
|
+
conn.row_factory = sqlite3.Row
|
|
71
|
+
return conn
|
|
72
|
+
|
|
73
|
+
def _init_schema(self) -> None:
|
|
74
|
+
"""Initialize the database schema."""
|
|
75
|
+
with self._get_connection() as conn:
|
|
76
|
+
conn.execute(
|
|
77
|
+
"""
|
|
78
|
+
CREATE TABLE IF NOT EXISTS rag_embeddings (
|
|
79
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
80
|
+
doc_id TEXT NOT NULL,
|
|
81
|
+
content TEXT NOT NULL,
|
|
82
|
+
embedding TEXT NOT NULL,
|
|
83
|
+
embedding_model TEXT NOT NULL,
|
|
84
|
+
chunk_index INTEGER DEFAULT 0,
|
|
85
|
+
created_at TEXT DEFAULT (datetime('now')),
|
|
86
|
+
UNIQUE(doc_id, chunk_index, embedding_model)
|
|
87
|
+
)
|
|
88
|
+
"""
|
|
89
|
+
)
|
|
90
|
+
conn.execute(
|
|
91
|
+
"""
|
|
92
|
+
CREATE INDEX IF NOT EXISTS idx_rag_embeddings_model
|
|
93
|
+
ON rag_embeddings(embedding_model)
|
|
94
|
+
"""
|
|
95
|
+
)
|
|
96
|
+
conn.commit()
|
|
97
|
+
|
|
98
|
+
def store(self, embeddings: list[CachedEmbedding]) -> None:
|
|
99
|
+
"""Store embeddings in SQLite."""
|
|
100
|
+
if not embeddings:
|
|
101
|
+
return
|
|
102
|
+
|
|
103
|
+
with self._get_connection() as conn:
|
|
104
|
+
# Clear existing embeddings for this model
|
|
105
|
+
model = embeddings[0].embedding_model
|
|
106
|
+
conn.execute(
|
|
107
|
+
"DELETE FROM rag_embeddings WHERE embedding_model = ?",
|
|
108
|
+
(model,),
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
# Insert new embeddings
|
|
112
|
+
for emb in embeddings:
|
|
113
|
+
conn.execute(
|
|
114
|
+
"""
|
|
115
|
+
INSERT INTO rag_embeddings
|
|
116
|
+
(doc_id, content, embedding, embedding_model, chunk_index)
|
|
117
|
+
VALUES (?, ?, ?, ?, ?)
|
|
118
|
+
""",
|
|
119
|
+
(
|
|
120
|
+
emb.doc_id,
|
|
121
|
+
emb.content,
|
|
122
|
+
json.dumps(emb.embedding),
|
|
123
|
+
emb.embedding_model,
|
|
124
|
+
emb.chunk_index,
|
|
125
|
+
),
|
|
126
|
+
)
|
|
127
|
+
conn.commit()
|
|
128
|
+
|
|
129
|
+
def load(self, embedding_model: str) -> list[CachedEmbedding]:
|
|
130
|
+
"""Load embeddings from SQLite."""
|
|
131
|
+
with self._get_connection() as conn:
|
|
132
|
+
cursor = conn.execute(
|
|
133
|
+
"""
|
|
134
|
+
SELECT doc_id, content, embedding, embedding_model, chunk_index
|
|
135
|
+
FROM rag_embeddings
|
|
136
|
+
WHERE embedding_model = ?
|
|
137
|
+
ORDER BY doc_id, chunk_index
|
|
138
|
+
""",
|
|
139
|
+
(embedding_model,),
|
|
140
|
+
)
|
|
141
|
+
results = []
|
|
142
|
+
for row in cursor:
|
|
143
|
+
results.append(
|
|
144
|
+
CachedEmbedding(
|
|
145
|
+
doc_id=row["doc_id"],
|
|
146
|
+
content=row["content"],
|
|
147
|
+
embedding=json.loads(row["embedding"]),
|
|
148
|
+
embedding_model=row["embedding_model"],
|
|
149
|
+
chunk_index=row["chunk_index"],
|
|
150
|
+
)
|
|
151
|
+
)
|
|
152
|
+
return results
|
|
153
|
+
|
|
154
|
+
def is_initialized(self, embedding_model: str) -> bool:
|
|
155
|
+
"""Check if cache has embeddings for the given model."""
|
|
156
|
+
with self._get_connection() as conn:
|
|
157
|
+
cursor = conn.execute(
|
|
158
|
+
"SELECT COUNT(*) FROM rag_embeddings WHERE embedding_model = ?",
|
|
159
|
+
(embedding_model,),
|
|
160
|
+
)
|
|
161
|
+
row = cursor.fetchone()
|
|
162
|
+
count: int = row[0] if row else 0
|
|
163
|
+
return count > 0
|
|
164
|
+
|
|
165
|
+
def clear(self) -> None:
|
|
166
|
+
"""Clear all cached embeddings."""
|
|
167
|
+
with self._get_connection() as conn:
|
|
168
|
+
conn.execute("DELETE FROM rag_embeddings")
|
|
169
|
+
conn.commit()
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
class PostgresEmbeddingCache(EmbeddingCache):
|
|
173
|
+
"""Store embeddings in PostgreSQL database.
|
|
174
|
+
|
|
175
|
+
Uses the rag_embeddings table created by migration.
|
|
176
|
+
"""
|
|
177
|
+
|
|
178
|
+
def __init__(self, connection_string: str):
|
|
179
|
+
self.connection_string = connection_string
|
|
180
|
+
self._conn: psycopg.Connection[tuple[Any, ...]] | None = None
|
|
181
|
+
|
|
182
|
+
def _get_connection(self) -> psycopg.Connection[tuple[Any, ...]]:
|
|
183
|
+
"""Get a database connection."""
|
|
184
|
+
try:
|
|
185
|
+
import psycopg as psycopg_module
|
|
186
|
+
except ImportError as e:
|
|
187
|
+
raise ImportError("PostgreSQL support requires: pip install stabilize[postgres]") from e
|
|
188
|
+
|
|
189
|
+
if self._conn is None or self._conn.closed:
|
|
190
|
+
self._conn = psycopg_module.connect(self.connection_string)
|
|
191
|
+
return self._conn
|
|
192
|
+
|
|
193
|
+
def store(self, embeddings: list[CachedEmbedding]) -> None:
|
|
194
|
+
"""Store embeddings in PostgreSQL."""
|
|
195
|
+
if not embeddings:
|
|
196
|
+
return
|
|
197
|
+
|
|
198
|
+
conn = self._get_connection()
|
|
199
|
+
with conn.cursor() as cur:
|
|
200
|
+
# Clear existing embeddings for this model
|
|
201
|
+
model = embeddings[0].embedding_model
|
|
202
|
+
cur.execute(
|
|
203
|
+
"DELETE FROM rag_embeddings WHERE embedding_model = %s",
|
|
204
|
+
(model,),
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
# Insert new embeddings
|
|
208
|
+
for emb in embeddings:
|
|
209
|
+
cur.execute(
|
|
210
|
+
"""
|
|
211
|
+
INSERT INTO rag_embeddings
|
|
212
|
+
(doc_id, content, embedding, embedding_model, chunk_index)
|
|
213
|
+
VALUES (%s, %s, %s, %s, %s)
|
|
214
|
+
""",
|
|
215
|
+
(
|
|
216
|
+
emb.doc_id,
|
|
217
|
+
emb.content,
|
|
218
|
+
json.dumps(emb.embedding),
|
|
219
|
+
emb.embedding_model,
|
|
220
|
+
emb.chunk_index,
|
|
221
|
+
),
|
|
222
|
+
)
|
|
223
|
+
conn.commit()
|
|
224
|
+
|
|
225
|
+
def load(self, embedding_model: str) -> list[CachedEmbedding]:
|
|
226
|
+
"""Load embeddings from PostgreSQL."""
|
|
227
|
+
conn = self._get_connection()
|
|
228
|
+
with conn.cursor() as cur:
|
|
229
|
+
cur.execute(
|
|
230
|
+
"""
|
|
231
|
+
SELECT doc_id, content, embedding, embedding_model, chunk_index
|
|
232
|
+
FROM rag_embeddings
|
|
233
|
+
WHERE embedding_model = %s
|
|
234
|
+
ORDER BY doc_id, chunk_index
|
|
235
|
+
""",
|
|
236
|
+
(embedding_model,),
|
|
237
|
+
)
|
|
238
|
+
results = []
|
|
239
|
+
for row in cur.fetchall():
|
|
240
|
+
embedding_data = row[2]
|
|
241
|
+
if isinstance(embedding_data, str):
|
|
242
|
+
embedding_data = json.loads(embedding_data)
|
|
243
|
+
results.append(
|
|
244
|
+
CachedEmbedding(
|
|
245
|
+
doc_id=row[0],
|
|
246
|
+
content=row[1],
|
|
247
|
+
embedding=embedding_data,
|
|
248
|
+
embedding_model=row[3],
|
|
249
|
+
chunk_index=row[4],
|
|
250
|
+
)
|
|
251
|
+
)
|
|
252
|
+
return results
|
|
253
|
+
|
|
254
|
+
def is_initialized(self, embedding_model: str) -> bool:
|
|
255
|
+
"""Check if cache has embeddings for the given model."""
|
|
256
|
+
conn = self._get_connection()
|
|
257
|
+
with conn.cursor() as cur:
|
|
258
|
+
cur.execute(
|
|
259
|
+
"SELECT COUNT(*) FROM rag_embeddings WHERE embedding_model = %s",
|
|
260
|
+
(embedding_model,),
|
|
261
|
+
)
|
|
262
|
+
row = cur.fetchone()
|
|
263
|
+
count: int = row[0] if row else 0
|
|
264
|
+
return count > 0
|
|
265
|
+
|
|
266
|
+
def clear(self) -> None:
|
|
267
|
+
"""Clear all cached embeddings."""
|
|
268
|
+
conn = self._get_connection()
|
|
269
|
+
with conn.cursor() as cur:
|
|
270
|
+
cur.execute("DELETE FROM rag_embeddings")
|
|
271
|
+
conn.commit()
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
def get_cache(db_url: str | None = None) -> EmbeddingCache:
|
|
275
|
+
"""Get appropriate cache based on configuration.
|
|
276
|
+
|
|
277
|
+
Priority:
|
|
278
|
+
1. Explicit db_url parameter
|
|
279
|
+
2. MG_DATABASE_URL environment variable (if postgres)
|
|
280
|
+
3. Default SQLite in ~/.stabilize/embeddings.db
|
|
281
|
+
"""
|
|
282
|
+
if db_url:
|
|
283
|
+
if db_url.startswith("postgres"):
|
|
284
|
+
return PostgresEmbeddingCache(db_url)
|
|
285
|
+
else:
|
|
286
|
+
return SqliteEmbeddingCache(db_url)
|
|
287
|
+
|
|
288
|
+
# Check environment
|
|
289
|
+
env_url = os.environ.get("MG_DATABASE_URL")
|
|
290
|
+
if env_url and env_url.startswith("postgres"):
|
|
291
|
+
return PostgresEmbeddingCache(env_url)
|
|
292
|
+
|
|
293
|
+
# Default: SQLite
|
|
294
|
+
return SqliteEmbeddingCache()
|
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Stage definition builders.
|
|
3
|
+
|
|
4
|
+
Stage definition builders are responsible for:
|
|
5
|
+
1. Building tasks for a stage
|
|
6
|
+
2. Building before stages (setup, validation)
|
|
7
|
+
3. Building after stages (cleanup, notification)
|
|
8
|
+
4. Building on-failure stages (rollback, alerts)
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
from abc import ABC
|
|
14
|
+
from typing import TYPE_CHECKING
|
|
15
|
+
|
|
16
|
+
from stabilize.dag.graph import StageGraphBuilder
|
|
17
|
+
from stabilize.models.task import TaskExecution
|
|
18
|
+
|
|
19
|
+
if TYPE_CHECKING:
|
|
20
|
+
from stabilize.models.stage import StageExecution
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class StageDefinitionBuilder(ABC):
|
|
24
|
+
"""
|
|
25
|
+
Abstract base class for stage definition builders.
|
|
26
|
+
|
|
27
|
+
Each stage type has a corresponding builder that defines:
|
|
28
|
+
- What tasks the stage should execute
|
|
29
|
+
- What synthetic stages run before/after
|
|
30
|
+
- What happens on failure
|
|
31
|
+
|
|
32
|
+
Example:
|
|
33
|
+
class DeployStageBuilder(StageDefinitionBuilder):
|
|
34
|
+
@property
|
|
35
|
+
def type(self) -> str:
|
|
36
|
+
return "deploy"
|
|
37
|
+
|
|
38
|
+
def build_tasks(self, stage: StageExecution) -> List[TaskExecution]:
|
|
39
|
+
return [
|
|
40
|
+
TaskExecution.create(
|
|
41
|
+
name="Determine Target Server Group",
|
|
42
|
+
implementing_class="DetermineTargetServerGroupTask",
|
|
43
|
+
stage_start=True,
|
|
44
|
+
),
|
|
45
|
+
TaskExecution.create(
|
|
46
|
+
name="Deploy Server Group",
|
|
47
|
+
implementing_class="DeployServerGroupTask",
|
|
48
|
+
stage_end=True,
|
|
49
|
+
),
|
|
50
|
+
]
|
|
51
|
+
|
|
52
|
+
def before_stages(
|
|
53
|
+
self,
|
|
54
|
+
stage: StageExecution,
|
|
55
|
+
graph: StageGraphBuilder,
|
|
56
|
+
) -> None:
|
|
57
|
+
# Add validation stage before deploy
|
|
58
|
+
validation = StageExecution.create_synthetic(
|
|
59
|
+
type="validation",
|
|
60
|
+
name="Validate Deploy Configuration",
|
|
61
|
+
parent=stage,
|
|
62
|
+
owner=SyntheticStageOwner.STAGE_BEFORE,
|
|
63
|
+
)
|
|
64
|
+
graph.add(validation)
|
|
65
|
+
"""
|
|
66
|
+
|
|
67
|
+
@property
|
|
68
|
+
def type(self) -> str:
|
|
69
|
+
"""
|
|
70
|
+
Get the stage type this builder handles.
|
|
71
|
+
|
|
72
|
+
Defaults to lowercase class name without "StageBuilder" suffix.
|
|
73
|
+
"""
|
|
74
|
+
name = self.__class__.__name__
|
|
75
|
+
if name.endswith("StageBuilder"):
|
|
76
|
+
name = name[:-12]
|
|
77
|
+
return name.lower()
|
|
78
|
+
|
|
79
|
+
@property
|
|
80
|
+
def aliases(self) -> list[str]:
|
|
81
|
+
"""Get alternative names for this stage type."""
|
|
82
|
+
return []
|
|
83
|
+
|
|
84
|
+
def build_tasks(self, stage: StageExecution) -> list[TaskExecution]:
|
|
85
|
+
"""
|
|
86
|
+
Build the tasks for this stage.
|
|
87
|
+
|
|
88
|
+
Override to define what tasks the stage should execute.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
stage: The stage being built
|
|
92
|
+
|
|
93
|
+
Returns:
|
|
94
|
+
List of tasks to execute
|
|
95
|
+
"""
|
|
96
|
+
return []
|
|
97
|
+
|
|
98
|
+
def before_stages(
|
|
99
|
+
self,
|
|
100
|
+
stage: StageExecution,
|
|
101
|
+
graph: StageGraphBuilder,
|
|
102
|
+
) -> None:
|
|
103
|
+
"""
|
|
104
|
+
Build synthetic stages that run before this stage's tasks.
|
|
105
|
+
|
|
106
|
+
Override to add setup, validation, or other pre-requisite stages.
|
|
107
|
+
|
|
108
|
+
Args:
|
|
109
|
+
stage: The parent stage
|
|
110
|
+
graph: Builder for adding synthetic stages
|
|
111
|
+
"""
|
|
112
|
+
pass
|
|
113
|
+
|
|
114
|
+
def after_stages(
|
|
115
|
+
self,
|
|
116
|
+
stage: StageExecution,
|
|
117
|
+
graph: StageGraphBuilder,
|
|
118
|
+
) -> None:
|
|
119
|
+
"""
|
|
120
|
+
Build synthetic stages that run after this stage completes.
|
|
121
|
+
|
|
122
|
+
Override to add cleanup, notification, or other post-processing stages.
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
stage: The parent stage
|
|
126
|
+
graph: Builder for adding synthetic stages
|
|
127
|
+
"""
|
|
128
|
+
pass
|
|
129
|
+
|
|
130
|
+
def on_failure_stages(
|
|
131
|
+
self,
|
|
132
|
+
stage: StageExecution,
|
|
133
|
+
graph: StageGraphBuilder,
|
|
134
|
+
) -> None:
|
|
135
|
+
"""
|
|
136
|
+
Build synthetic stages that run when this stage fails.
|
|
137
|
+
|
|
138
|
+
Override to add rollback, alerting, or other failure-handling stages.
|
|
139
|
+
|
|
140
|
+
Args:
|
|
141
|
+
stage: The failed stage
|
|
142
|
+
graph: Builder for adding synthetic stages
|
|
143
|
+
"""
|
|
144
|
+
pass
|
|
145
|
+
|
|
146
|
+
def add_context_flags(self, stage: StageExecution) -> None:
|
|
147
|
+
"""
|
|
148
|
+
Add any required context flags to the stage.
|
|
149
|
+
|
|
150
|
+
Called before task execution to set up stage context.
|
|
151
|
+
|
|
152
|
+
Args:
|
|
153
|
+
stage: The stage to modify
|
|
154
|
+
"""
|
|
155
|
+
pass
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
class NoOpStageBuilder(StageDefinitionBuilder):
|
|
159
|
+
"""A stage builder that does nothing."""
|
|
160
|
+
|
|
161
|
+
@property
|
|
162
|
+
def type(self) -> str:
|
|
163
|
+
return "noop"
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
class WaitStageBuilder(StageDefinitionBuilder):
|
|
167
|
+
"""Builder for wait stages."""
|
|
168
|
+
|
|
169
|
+
@property
|
|
170
|
+
def type(self) -> str:
|
|
171
|
+
return "wait"
|
|
172
|
+
|
|
173
|
+
def build_tasks(self, stage: StageExecution) -> list[TaskExecution]:
|
|
174
|
+
return [
|
|
175
|
+
TaskExecution.create(
|
|
176
|
+
name="Wait",
|
|
177
|
+
implementing_class="WaitTask",
|
|
178
|
+
stage_start=True,
|
|
179
|
+
stage_end=True,
|
|
180
|
+
),
|
|
181
|
+
]
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
class StageDefinitionBuilderFactory:
|
|
185
|
+
"""Factory for resolving stage definition builders."""
|
|
186
|
+
|
|
187
|
+
def __init__(self) -> None:
|
|
188
|
+
self._builders: dict[str, StageDefinitionBuilder] = {}
|
|
189
|
+
self._default_builder = NoOpStageBuilder()
|
|
190
|
+
|
|
191
|
+
# Register built-in builders
|
|
192
|
+
self.register(NoOpStageBuilder())
|
|
193
|
+
self.register(WaitStageBuilder())
|
|
194
|
+
|
|
195
|
+
def register(self, builder: StageDefinitionBuilder) -> None:
|
|
196
|
+
"""
|
|
197
|
+
Register a stage definition builder.
|
|
198
|
+
|
|
199
|
+
Args:
|
|
200
|
+
builder: The builder to register
|
|
201
|
+
"""
|
|
202
|
+
self._builders[builder.type] = builder
|
|
203
|
+
for alias in builder.aliases:
|
|
204
|
+
self._builders[alias] = builder
|
|
205
|
+
|
|
206
|
+
def register_class(
|
|
207
|
+
self,
|
|
208
|
+
builder_class: type[StageDefinitionBuilder],
|
|
209
|
+
) -> None:
|
|
210
|
+
"""Register a builder by class."""
|
|
211
|
+
self.register(builder_class())
|
|
212
|
+
|
|
213
|
+
def get(self, stage_type: str) -> StageDefinitionBuilder:
|
|
214
|
+
"""
|
|
215
|
+
Get the builder for a stage type.
|
|
216
|
+
|
|
217
|
+
Args:
|
|
218
|
+
stage_type: The stage type
|
|
219
|
+
|
|
220
|
+
Returns:
|
|
221
|
+
The builder (or default if not found)
|
|
222
|
+
"""
|
|
223
|
+
return self._builders.get(stage_type, self._default_builder)
|
|
224
|
+
|
|
225
|
+
def has(self, stage_type: str) -> bool:
|
|
226
|
+
"""Check if a builder is registered for a stage type."""
|
|
227
|
+
return stage_type in self._builders
|
|
228
|
+
|
|
229
|
+
def list_types(self) -> list[str]:
|
|
230
|
+
"""Get all registered stage types."""
|
|
231
|
+
return list(self._builders.keys())
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
# Global factory instance
|
|
235
|
+
_default_factory: StageDefinitionBuilderFactory | None = None
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
def get_default_factory() -> StageDefinitionBuilderFactory:
|
|
239
|
+
"""Get the default global stage definition builder factory."""
|
|
240
|
+
global _default_factory
|
|
241
|
+
if _default_factory is None:
|
|
242
|
+
_default_factory = StageDefinitionBuilderFactory()
|
|
243
|
+
return _default_factory
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
def register_builder(builder: StageDefinitionBuilder) -> None:
|
|
247
|
+
"""Register a builder in the default factory."""
|
|
248
|
+
get_default_factory().register(builder)
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
def get_builder(stage_type: str) -> StageDefinitionBuilder:
|
|
252
|
+
"""Get a builder from the default factory."""
|
|
253
|
+
return get_default_factory().get(stage_type)
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"""Task system for pipeline execution."""
|
|
2
|
+
|
|
3
|
+
from stabilize.tasks.interface import (
|
|
4
|
+
CallableTask,
|
|
5
|
+
RetryableTask,
|
|
6
|
+
SkippableTask,
|
|
7
|
+
Task,
|
|
8
|
+
)
|
|
9
|
+
from stabilize.tasks.registry import TaskRegistry
|
|
10
|
+
from stabilize.tasks.result import TaskResult
|
|
11
|
+
|
|
12
|
+
__all__ = [
|
|
13
|
+
"TaskResult",
|
|
14
|
+
"Task",
|
|
15
|
+
"RetryableTask",
|
|
16
|
+
"CallableTask",
|
|
17
|
+
"SkippableTask",
|
|
18
|
+
"TaskRegistry",
|
|
19
|
+
]
|