stabilize 0.9.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. stabilize/__init__.py +29 -0
  2. stabilize/cli.py +1193 -0
  3. stabilize/context/__init__.py +7 -0
  4. stabilize/context/stage_context.py +170 -0
  5. stabilize/dag/__init__.py +15 -0
  6. stabilize/dag/graph.py +215 -0
  7. stabilize/dag/topological.py +199 -0
  8. stabilize/examples/__init__.py +1 -0
  9. stabilize/examples/docker-example.py +759 -0
  10. stabilize/examples/golden-standard-expected-result.txt +1 -0
  11. stabilize/examples/golden-standard.py +488 -0
  12. stabilize/examples/http-example.py +606 -0
  13. stabilize/examples/llama-example.py +662 -0
  14. stabilize/examples/python-example.py +731 -0
  15. stabilize/examples/shell-example.py +399 -0
  16. stabilize/examples/ssh-example.py +603 -0
  17. stabilize/handlers/__init__.py +53 -0
  18. stabilize/handlers/base.py +226 -0
  19. stabilize/handlers/complete_stage.py +209 -0
  20. stabilize/handlers/complete_task.py +75 -0
  21. stabilize/handlers/complete_workflow.py +150 -0
  22. stabilize/handlers/run_task.py +369 -0
  23. stabilize/handlers/start_stage.py +262 -0
  24. stabilize/handlers/start_task.py +74 -0
  25. stabilize/handlers/start_workflow.py +136 -0
  26. stabilize/launcher.py +307 -0
  27. stabilize/migrations/01KDQ4N9QPJ6Q4MCV3V9GHWPV4_initial_schema.sql +97 -0
  28. stabilize/migrations/01KDRK3TXW4R2GERC1WBCQYJGG_rag_embeddings.sql +25 -0
  29. stabilize/migrations/__init__.py +1 -0
  30. stabilize/models/__init__.py +15 -0
  31. stabilize/models/stage.py +389 -0
  32. stabilize/models/status.py +146 -0
  33. stabilize/models/task.py +125 -0
  34. stabilize/models/workflow.py +317 -0
  35. stabilize/orchestrator.py +113 -0
  36. stabilize/persistence/__init__.py +28 -0
  37. stabilize/persistence/connection.py +185 -0
  38. stabilize/persistence/factory.py +136 -0
  39. stabilize/persistence/memory.py +214 -0
  40. stabilize/persistence/postgres.py +655 -0
  41. stabilize/persistence/sqlite.py +674 -0
  42. stabilize/persistence/store.py +235 -0
  43. stabilize/queue/__init__.py +59 -0
  44. stabilize/queue/messages.py +377 -0
  45. stabilize/queue/processor.py +312 -0
  46. stabilize/queue/queue.py +526 -0
  47. stabilize/queue/sqlite_queue.py +354 -0
  48. stabilize/rag/__init__.py +19 -0
  49. stabilize/rag/assistant.py +459 -0
  50. stabilize/rag/cache.py +294 -0
  51. stabilize/stages/__init__.py +11 -0
  52. stabilize/stages/builder.py +253 -0
  53. stabilize/tasks/__init__.py +19 -0
  54. stabilize/tasks/interface.py +335 -0
  55. stabilize/tasks/registry.py +255 -0
  56. stabilize/tasks/result.py +283 -0
  57. stabilize-0.9.2.dist-info/METADATA +301 -0
  58. stabilize-0.9.2.dist-info/RECORD +61 -0
  59. stabilize-0.9.2.dist-info/WHEEL +4 -0
  60. stabilize-0.9.2.dist-info/entry_points.txt +2 -0
  61. stabilize-0.9.2.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,136 @@
1
+ """
2
+ Factory functions for creating database backends.
3
+
4
+ Automatically selects PostgreSQL or SQLite based on connection string.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from datetime import timedelta
10
+ from typing import TYPE_CHECKING
11
+
12
+ if TYPE_CHECKING:
13
+ from stabilize.persistence.store import WorkflowStore
14
+ from stabilize.queue.queue import Queue
15
+
16
+
17
+ def detect_backend(connection_string: str) -> str:
18
+ """
19
+ Detect the database backend type from a connection string.
20
+
21
+ Args:
22
+ connection_string: Database connection URL
23
+
24
+ Returns:
25
+ "sqlite" or "postgresql"
26
+
27
+ Examples:
28
+ >>> detect_backend("sqlite:///./db.sqlite")
29
+ 'sqlite'
30
+ >>> detect_backend("sqlite:///:memory:")
31
+ 'sqlite'
32
+ >>> detect_backend("postgresql+psycopg://user:pass@localhost/db")
33
+ 'postgresql'
34
+ """
35
+ if connection_string.startswith("sqlite"):
36
+ return "sqlite"
37
+ return "postgresql"
38
+
39
+
40
+ def create_repository(
41
+ connection_string: str,
42
+ create_tables: bool = False,
43
+ ) -> WorkflowStore:
44
+ """
45
+ Create an execution repository based on the connection string.
46
+
47
+ Automatically detects whether to use PostgreSQL or SQLite based
48
+ on the connection string prefix.
49
+
50
+ Args:
51
+ connection_string: Database connection URL
52
+ create_tables: Whether to create tables if they don't exist
53
+
54
+ Returns:
55
+ WorkflowStore: PostgreSQL or SQLite repository instance
56
+
57
+ Examples:
58
+ # PostgreSQL
59
+ repo = create_repository(
60
+ "postgresql+psycopg://user:pass@localhost/stabilize",
61
+ create_tables=True
62
+ )
63
+
64
+ # SQLite file-based
65
+ repo = create_repository(
66
+ "sqlite:///./stabilize.db",
67
+ create_tables=True
68
+ )
69
+
70
+ # SQLite in-memory (for testing)
71
+ repo = create_repository(
72
+ "sqlite:///:memory:",
73
+ create_tables=True
74
+ )
75
+ """
76
+ backend = detect_backend(connection_string)
77
+
78
+ if backend == "sqlite":
79
+ from stabilize.persistence.sqlite import SqliteWorkflowStore
80
+
81
+ return SqliteWorkflowStore(connection_string, create_tables)
82
+ else:
83
+ from stabilize.persistence.postgres import PostgresWorkflowStore
84
+
85
+ return PostgresWorkflowStore(connection_string, create_tables)
86
+
87
+
88
+ def create_queue(
89
+ connection_string: str,
90
+ table_name: str = "queue_messages",
91
+ lock_duration: timedelta | None = None,
92
+ max_attempts: int = 10,
93
+ ) -> Queue:
94
+ """
95
+ Create a message queue based on the connection string.
96
+
97
+ Automatically detects whether to use PostgreSQL or SQLite based
98
+ on the connection string prefix.
99
+
100
+ Args:
101
+ connection_string: Database connection URL
102
+ table_name: Name of the queue table
103
+ lock_duration: How long to lock messages during processing
104
+ max_attempts: Maximum retry attempts before dropping message
105
+
106
+ Returns:
107
+ Queue: PostgreSQL or SQLite queue instance
108
+
109
+ Examples:
110
+ # PostgreSQL (uses FOR UPDATE SKIP LOCKED)
111
+ queue = create_queue("postgresql+psycopg://user:pass@localhost/db")
112
+
113
+ # SQLite (uses optimistic locking)
114
+ queue = create_queue("sqlite:///./stabilize.db")
115
+ """
116
+ backend = detect_backend(connection_string)
117
+ lock_duration = lock_duration or timedelta(minutes=5)
118
+
119
+ if backend == "sqlite":
120
+ from stabilize.queue.sqlite_queue import SqliteQueue
121
+
122
+ return SqliteQueue(
123
+ connection_string,
124
+ table_name=table_name,
125
+ lock_duration=lock_duration,
126
+ max_attempts=max_attempts,
127
+ )
128
+ else:
129
+ from stabilize.queue.queue import PostgresQueue
130
+
131
+ return PostgresQueue(
132
+ connection_string,
133
+ table_name=table_name,
134
+ lock_duration=lock_duration,
135
+ max_attempts=max_attempts,
136
+ )
@@ -0,0 +1,214 @@
1
+ """
2
+ In-memory execution repository.
3
+
4
+ Useful for testing and development.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import copy
10
+ import threading
11
+ import time
12
+ from collections.abc import Iterator
13
+
14
+ from stabilize.models.stage import StageExecution
15
+ from stabilize.models.status import WorkflowStatus
16
+ from stabilize.models.workflow import PausedDetails, Workflow
17
+ from stabilize.persistence.store import (
18
+ WorkflowCriteria,
19
+ WorkflowNotFoundError,
20
+ WorkflowStore,
21
+ )
22
+
23
+
24
+ class InMemoryWorkflowStore(WorkflowStore):
25
+ """
26
+ In-memory implementation of WorkflowStore.
27
+
28
+ Thread-safe storage for testing and single-process execution.
29
+ """
30
+
31
+ def __init__(self) -> None:
32
+ self._executions: dict[str, Workflow] = {}
33
+ self._lock = threading.Lock()
34
+
35
+ def store(self, execution: Workflow) -> None:
36
+ """Store a complete execution."""
37
+ with self._lock:
38
+ # Deep copy to prevent external modifications
39
+ self._executions[execution.id] = copy.deepcopy(execution)
40
+
41
+ def retrieve(self, execution_id: str) -> Workflow:
42
+ """Retrieve an execution by ID."""
43
+ with self._lock:
44
+ if execution_id not in self._executions:
45
+ raise WorkflowNotFoundError(execution_id)
46
+ # Return a deep copy to prevent external modifications
47
+ return copy.deepcopy(self._executions[execution_id])
48
+
49
+ def update_status(self, execution: Workflow) -> None:
50
+ """Update execution status."""
51
+ with self._lock:
52
+ if execution.id not in self._executions:
53
+ raise WorkflowNotFoundError(execution.id)
54
+
55
+ stored = self._executions[execution.id]
56
+ stored.status = execution.status
57
+ stored.start_time = execution.start_time
58
+ stored.end_time = execution.end_time
59
+ stored.is_canceled = execution.is_canceled
60
+ stored.canceled_by = execution.canceled_by
61
+ stored.cancellation_reason = execution.cancellation_reason
62
+ stored.paused = execution.paused
63
+
64
+ def delete(self, execution_id: str) -> None:
65
+ """Delete an execution."""
66
+ with self._lock:
67
+ if execution_id in self._executions:
68
+ del self._executions[execution_id]
69
+
70
+ def store_stage(self, stage: StageExecution) -> None:
71
+ """Store or update a stage."""
72
+ with self._lock:
73
+ execution_id = stage.execution.id
74
+ if execution_id not in self._executions:
75
+ raise WorkflowNotFoundError(execution_id)
76
+
77
+ execution = self._executions[execution_id]
78
+
79
+ # Find and update or add
80
+ for i, s in enumerate(execution.stages):
81
+ if s.id == stage.id:
82
+ # Update existing stage
83
+ execution.stages[i] = copy.deepcopy(stage)
84
+ execution.stages[i]._execution = execution
85
+ return
86
+
87
+ # Add new stage
88
+ new_stage = copy.deepcopy(stage)
89
+ new_stage._execution = execution
90
+ execution.stages.append(new_stage)
91
+
92
+ def add_stage(self, stage: StageExecution) -> None:
93
+ """Add a new stage."""
94
+ self.store_stage(stage)
95
+
96
+ def remove_stage(
97
+ self,
98
+ execution: Workflow,
99
+ stage_id: str,
100
+ ) -> None:
101
+ """Remove a stage."""
102
+ with self._lock:
103
+ if execution.id not in self._executions:
104
+ raise WorkflowNotFoundError(execution.id)
105
+
106
+ stored = self._executions[execution.id]
107
+ stored.stages = [s for s in stored.stages if s.id != stage_id]
108
+
109
+ def retrieve_by_pipeline_config_id(
110
+ self,
111
+ pipeline_config_id: str,
112
+ criteria: WorkflowCriteria | None = None,
113
+ ) -> Iterator[Workflow]:
114
+ """Retrieve executions by pipeline config ID."""
115
+ with self._lock:
116
+ executions = [
117
+ copy.deepcopy(e) for e in self._executions.values() if e.pipeline_config_id == pipeline_config_id
118
+ ]
119
+
120
+ # Apply criteria
121
+ executions = self._apply_criteria(executions, criteria)
122
+
123
+ yield from executions
124
+
125
+ def retrieve_by_application(
126
+ self,
127
+ application: str,
128
+ criteria: WorkflowCriteria | None = None,
129
+ ) -> Iterator[Workflow]:
130
+ """Retrieve executions by application."""
131
+ with self._lock:
132
+ executions = [copy.deepcopy(e) for e in self._executions.values() if e.application == application]
133
+
134
+ # Apply criteria
135
+ executions = self._apply_criteria(executions, criteria)
136
+
137
+ yield from executions
138
+
139
+ def _apply_criteria(
140
+ self,
141
+ executions: list[Workflow],
142
+ criteria: WorkflowCriteria | None,
143
+ ) -> list[Workflow]:
144
+ """Apply query criteria to executions."""
145
+ if criteria is None:
146
+ return executions
147
+
148
+ # Filter by status
149
+ if criteria.statuses:
150
+ executions = [e for e in executions if e.status in criteria.statuses]
151
+
152
+ # Filter by start time
153
+ if criteria.start_time_before:
154
+ executions = [e for e in executions if e.start_time and e.start_time < criteria.start_time_before]
155
+
156
+ if criteria.start_time_after:
157
+ executions = [e for e in executions if e.start_time and e.start_time > criteria.start_time_after]
158
+
159
+ # Sort by start time (newest first) and limit
160
+ executions.sort(key=lambda e: e.start_time or 0, reverse=True)
161
+ return executions[: criteria.page_size]
162
+
163
+ def pause(self, execution_id: str, paused_by: str) -> None:
164
+ """Pause an execution."""
165
+ with self._lock:
166
+ if execution_id not in self._executions:
167
+ raise WorkflowNotFoundError(execution_id)
168
+
169
+ execution = self._executions[execution_id]
170
+ execution.paused = PausedDetails(
171
+ paused_by=paused_by,
172
+ pause_time=int(time.time() * 1000),
173
+ )
174
+ execution.status = WorkflowStatus.PAUSED
175
+
176
+ def resume(self, execution_id: str) -> None:
177
+ """Resume a paused execution."""
178
+ with self._lock:
179
+ if execution_id not in self._executions:
180
+ raise WorkflowNotFoundError(execution_id)
181
+
182
+ execution = self._executions[execution_id]
183
+ if execution.paused:
184
+ current_time = int(time.time() * 1000)
185
+ execution.paused.resume_time = current_time
186
+ if execution.paused.pause_time:
187
+ execution.paused.paused_ms = current_time - execution.paused.pause_time
188
+ execution.status = WorkflowStatus.RUNNING
189
+
190
+ def cancel(
191
+ self,
192
+ execution_id: str,
193
+ canceled_by: str,
194
+ reason: str,
195
+ ) -> None:
196
+ """Cancel an execution."""
197
+ with self._lock:
198
+ if execution_id not in self._executions:
199
+ raise WorkflowNotFoundError(execution_id)
200
+
201
+ execution = self._executions[execution_id]
202
+ execution.is_canceled = True
203
+ execution.canceled_by = canceled_by
204
+ execution.cancellation_reason = reason
205
+
206
+ def clear(self) -> None:
207
+ """Clear all executions."""
208
+ with self._lock:
209
+ self._executions.clear()
210
+
211
+ def count(self) -> int:
212
+ """Get total number of executions."""
213
+ with self._lock:
214
+ return len(self._executions)