dbos 1.13.0a3__py3-none-any.whl → 1.13.0a6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbos/_app_db.py +215 -80
- dbos/_client.py +30 -15
- dbos/_core.py +4 -8
- dbos/_dbos.py +4 -12
- dbos/_dbos_config.py +118 -50
- dbos/_migration.py +89 -0
- dbos/_sys_db.py +121 -169
- dbos/_sys_db_postgres.py +173 -0
- dbos/_sys_db_sqlite.py +182 -0
- dbos/_utils.py +10 -1
- dbos/cli/cli.py +203 -94
- dbos/cli/migration.py +2 -2
- dbos/dbos-config.schema.json +4 -0
- {dbos-1.13.0a3.dist-info → dbos-1.13.0a6.dist-info}/METADATA +1 -1
- {dbos-1.13.0a3.dist-info → dbos-1.13.0a6.dist-info}/RECORD +18 -17
- dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -67
- {dbos-1.13.0a3.dist-info → dbos-1.13.0a6.dist-info}/WHEEL +0 -0
- {dbos-1.13.0a3.dist-info → dbos-1.13.0a6.dist-info}/entry_points.txt +0 -0
- {dbos-1.13.0a3.dist-info → dbos-1.13.0a6.dist-info}/licenses/LICENSE +0 -0
dbos/_sys_db_sqlite.py
ADDED
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import time
|
|
3
|
+
from typing import Any, Dict, Optional, Tuple
|
|
4
|
+
|
|
5
|
+
import sqlalchemy as sa
|
|
6
|
+
from sqlalchemy.exc import DBAPIError
|
|
7
|
+
|
|
8
|
+
from dbos._migration import sqlite_migrations
|
|
9
|
+
from dbos._schemas.system_database import SystemSchema
|
|
10
|
+
|
|
11
|
+
from ._logger import dbos_logger
|
|
12
|
+
from ._sys_db import SystemDatabase
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class SQLiteSystemDatabase(SystemDatabase):
|
|
16
|
+
"""SQLite-specific implementation of SystemDatabase."""
|
|
17
|
+
|
|
18
|
+
def _create_engine(
|
|
19
|
+
self, system_database_url: str, engine_kwargs: Dict[str, Any]
|
|
20
|
+
) -> sa.Engine:
|
|
21
|
+
"""Create a SQLite engine."""
|
|
22
|
+
# TODO: Make the schema dynamic so this isn't needed
|
|
23
|
+
SystemSchema.workflow_status.schema = None
|
|
24
|
+
SystemSchema.operation_outputs.schema = None
|
|
25
|
+
SystemSchema.notifications.schema = None
|
|
26
|
+
SystemSchema.workflow_events.schema = None
|
|
27
|
+
SystemSchema.streams.schema = None
|
|
28
|
+
return sa.create_engine(system_database_url)
|
|
29
|
+
|
|
30
|
+
def run_migrations(self) -> None:
|
|
31
|
+
"""Run SQLite-specific migrations."""
|
|
32
|
+
if self._debug_mode:
|
|
33
|
+
dbos_logger.warning("System database migrations are skipped in debug mode.")
|
|
34
|
+
return
|
|
35
|
+
|
|
36
|
+
with self.engine.begin() as conn:
|
|
37
|
+
# Enable foreign keys for SQLite
|
|
38
|
+
conn.execute(sa.text("PRAGMA foreign_keys = ON"))
|
|
39
|
+
|
|
40
|
+
# Check if migrations table exists
|
|
41
|
+
result = conn.execute(
|
|
42
|
+
sa.text(
|
|
43
|
+
"SELECT name FROM sqlite_master WHERE type='table' AND name='dbos_migrations'"
|
|
44
|
+
)
|
|
45
|
+
).fetchone()
|
|
46
|
+
|
|
47
|
+
if result is None:
|
|
48
|
+
# Create migrations table
|
|
49
|
+
conn.execute(
|
|
50
|
+
sa.text(
|
|
51
|
+
"CREATE TABLE dbos_migrations (version INTEGER NOT NULL PRIMARY KEY)"
|
|
52
|
+
)
|
|
53
|
+
)
|
|
54
|
+
last_applied = 0
|
|
55
|
+
else:
|
|
56
|
+
# Get current migration version
|
|
57
|
+
version_result = conn.execute(
|
|
58
|
+
sa.text("SELECT version FROM dbos_migrations")
|
|
59
|
+
).fetchone()
|
|
60
|
+
last_applied = version_result[0] if version_result else 0
|
|
61
|
+
|
|
62
|
+
# Apply migrations starting from the next version
|
|
63
|
+
for i, migration_sql in enumerate(sqlite_migrations, 1):
|
|
64
|
+
if i <= last_applied:
|
|
65
|
+
continue
|
|
66
|
+
|
|
67
|
+
# Execute the migration
|
|
68
|
+
dbos_logger.info(
|
|
69
|
+
f"Applying DBOS SQLite system database schema migration {i}"
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
# SQLite only allows one statement at a time, so split by semicolon
|
|
73
|
+
statements = [
|
|
74
|
+
stmt.strip() for stmt in migration_sql.split(";") if stmt.strip()
|
|
75
|
+
]
|
|
76
|
+
for statement in statements:
|
|
77
|
+
conn.execute(sa.text(statement))
|
|
78
|
+
|
|
79
|
+
# Update the single row with the new version
|
|
80
|
+
if last_applied == 0:
|
|
81
|
+
conn.execute(
|
|
82
|
+
sa.text(
|
|
83
|
+
"INSERT INTO dbos_migrations (version) VALUES (:version)"
|
|
84
|
+
),
|
|
85
|
+
{"version": i},
|
|
86
|
+
)
|
|
87
|
+
else:
|
|
88
|
+
conn.execute(
|
|
89
|
+
sa.text("UPDATE dbos_migrations SET version = :version"),
|
|
90
|
+
{"version": i},
|
|
91
|
+
)
|
|
92
|
+
last_applied = i
|
|
93
|
+
|
|
94
|
+
def _cleanup_connections(self) -> None:
|
|
95
|
+
# SQLite doesn't require special connection cleanup
|
|
96
|
+
pass
|
|
97
|
+
|
|
98
|
+
def _is_unique_constraint_violation(self, dbapi_error: DBAPIError) -> bool:
|
|
99
|
+
"""Check if the error is a unique constraint violation in SQLite."""
|
|
100
|
+
return "UNIQUE constraint failed" in str(dbapi_error.orig)
|
|
101
|
+
|
|
102
|
+
def _is_foreign_key_violation(self, dbapi_error: DBAPIError) -> bool:
|
|
103
|
+
"""Check if the error is a foreign key violation in SQLite."""
|
|
104
|
+
return "FOREIGN KEY constraint failed" in str(dbapi_error.orig)
|
|
105
|
+
|
|
106
|
+
@staticmethod
|
|
107
|
+
def _reset_system_database(database_url: str) -> None:
|
|
108
|
+
"""Reset the SQLite system database by deleting the database file."""
|
|
109
|
+
|
|
110
|
+
# Parse the SQLite database URL to get the file path
|
|
111
|
+
url = sa.make_url(database_url)
|
|
112
|
+
db_path = url.database
|
|
113
|
+
|
|
114
|
+
if db_path is None:
|
|
115
|
+
raise ValueError(f"System database path not found in URL {url}")
|
|
116
|
+
|
|
117
|
+
try:
|
|
118
|
+
if os.path.exists(db_path):
|
|
119
|
+
os.remove(db_path)
|
|
120
|
+
dbos_logger.info(f"Deleted SQLite database file: {db_path}")
|
|
121
|
+
else:
|
|
122
|
+
dbos_logger.info(f"SQLite database file does not exist: {db_path}")
|
|
123
|
+
except OSError as e:
|
|
124
|
+
dbos_logger.error(
|
|
125
|
+
f"Error deleting SQLite database file {db_path}: {str(e)}"
|
|
126
|
+
)
|
|
127
|
+
raise e
|
|
128
|
+
|
|
129
|
+
def _notification_listener(self) -> None:
|
|
130
|
+
"""Poll for notifications and workflow events in SQLite."""
|
|
131
|
+
|
|
132
|
+
def split_payload(payload: str) -> Tuple[str, Optional[str]]:
|
|
133
|
+
"""Split payload into components (first::second format)."""
|
|
134
|
+
if "::" in payload:
|
|
135
|
+
parts = payload.split("::", 1)
|
|
136
|
+
return parts[0], parts[1]
|
|
137
|
+
return payload, None
|
|
138
|
+
|
|
139
|
+
def signal_condition(condition_map: Any, payload: str) -> None:
|
|
140
|
+
"""Signal a condition variable if it exists."""
|
|
141
|
+
condition = condition_map.get(payload)
|
|
142
|
+
if condition:
|
|
143
|
+
condition.acquire()
|
|
144
|
+
condition.notify_all()
|
|
145
|
+
condition.release()
|
|
146
|
+
dbos_logger.debug(f"Signaled condition for {payload}")
|
|
147
|
+
|
|
148
|
+
while self._run_background_processes:
|
|
149
|
+
try:
|
|
150
|
+
# Poll every second
|
|
151
|
+
time.sleep(1)
|
|
152
|
+
|
|
153
|
+
# Check all payloads in the notifications_map
|
|
154
|
+
for payload in list(self.notifications_map._dict.keys()):
|
|
155
|
+
dest_uuid, topic = split_payload(payload)
|
|
156
|
+
with self.engine.begin() as conn:
|
|
157
|
+
result = conn.execute(
|
|
158
|
+
sa.text(
|
|
159
|
+
"SELECT 1 FROM notifications WHERE destination_uuid = :dest_uuid AND topic = :topic LIMIT 1"
|
|
160
|
+
),
|
|
161
|
+
{"dest_uuid": dest_uuid, "topic": topic},
|
|
162
|
+
)
|
|
163
|
+
if result.fetchone():
|
|
164
|
+
signal_condition(self.notifications_map, payload)
|
|
165
|
+
|
|
166
|
+
# Check all payloads in the workflow_events_map
|
|
167
|
+
for payload in list(self.workflow_events_map._dict.keys()):
|
|
168
|
+
workflow_uuid, key = split_payload(payload)
|
|
169
|
+
with self.engine.begin() as conn:
|
|
170
|
+
result = conn.execute(
|
|
171
|
+
sa.text(
|
|
172
|
+
"SELECT 1 FROM workflow_events WHERE workflow_uuid = :workflow_uuid AND key = :key LIMIT 1"
|
|
173
|
+
),
|
|
174
|
+
{"workflow_uuid": workflow_uuid, "key": key},
|
|
175
|
+
)
|
|
176
|
+
if result.fetchone():
|
|
177
|
+
signal_condition(self.workflow_events_map, payload)
|
|
178
|
+
|
|
179
|
+
except Exception as e:
|
|
180
|
+
if self._run_background_processes:
|
|
181
|
+
dbos_logger.warning(f"SQLite notification poller error: {e}")
|
|
182
|
+
time.sleep(1)
|
dbos/_utils.py
CHANGED
|
@@ -20,7 +20,9 @@ class GlobalParams:
|
|
|
20
20
|
dbos_version = "unknown"
|
|
21
21
|
|
|
22
22
|
|
|
23
|
-
def retriable_postgres_exception(e:
|
|
23
|
+
def retriable_postgres_exception(e: Exception) -> bool:
|
|
24
|
+
if not isinstance(e, DBAPIError):
|
|
25
|
+
return False
|
|
24
26
|
if e.connection_invalidated:
|
|
25
27
|
return True
|
|
26
28
|
if isinstance(e.orig, psycopg.OperationalError):
|
|
@@ -48,3 +50,10 @@ def retriable_postgres_exception(e: DBAPIError) -> bool:
|
|
|
48
50
|
return False
|
|
49
51
|
else:
|
|
50
52
|
return False
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def retriable_sqlite_exception(e: Exception) -> bool:
|
|
56
|
+
if "database is locked" in str(e):
|
|
57
|
+
return True
|
|
58
|
+
else:
|
|
59
|
+
return False
|