dbos 1.12.0a2__py3-none-any.whl → 1.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (40) hide show
  1. dbos/_alembic_migrations/versions/471b60d64126_dbos_migrations.py +35 -0
  2. dbos/_app_db.py +215 -80
  3. dbos/_client.py +30 -15
  4. dbos/_context.py +4 -0
  5. dbos/_core.py +7 -8
  6. dbos/_dbos.py +28 -18
  7. dbos/_dbos_config.py +124 -50
  8. dbos/_fastapi.py +3 -1
  9. dbos/_logger.py +3 -1
  10. dbos/_migration.py +322 -0
  11. dbos/_sys_db.py +122 -200
  12. dbos/_sys_db_postgres.py +173 -0
  13. dbos/_sys_db_sqlite.py +182 -0
  14. dbos/_tracer.py +5 -1
  15. dbos/_utils.py +10 -1
  16. dbos/cli/cli.py +238 -100
  17. dbos/cli/migration.py +2 -2
  18. dbos/dbos-config.schema.json +4 -0
  19. {dbos-1.12.0a2.dist-info → dbos-1.13.0.dist-info}/METADATA +1 -1
  20. dbos-1.13.0.dist-info/RECORD +78 -0
  21. dbos-1.12.0a2.dist-info/RECORD +0 -74
  22. /dbos/{_migrations → _alembic_migrations}/env.py +0 -0
  23. /dbos/{_migrations → _alembic_migrations}/script.py.mako +0 -0
  24. /dbos/{_migrations → _alembic_migrations}/versions/01ce9f07bd10_streaming.py +0 -0
  25. /dbos/{_migrations → _alembic_migrations}/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
  26. /dbos/{_migrations → _alembic_migrations}/versions/27ac6900c6ad_add_queue_dedup.py +0 -0
  27. /dbos/{_migrations → _alembic_migrations}/versions/50f3227f0b4b_fix_job_queue.py +0 -0
  28. /dbos/{_migrations → _alembic_migrations}/versions/5c361fc04708_added_system_tables.py +0 -0
  29. /dbos/{_migrations → _alembic_migrations}/versions/66478e1b95e5_consolidate_queues.py +0 -0
  30. /dbos/{_migrations → _alembic_migrations}/versions/83f3732ae8e7_workflow_timeout.py +0 -0
  31. /dbos/{_migrations → _alembic_migrations}/versions/933e86bdac6a_add_queue_priority.py +0 -0
  32. /dbos/{_migrations → _alembic_migrations}/versions/a3b18ad34abe_added_triggers.py +0 -0
  33. /dbos/{_migrations → _alembic_migrations}/versions/d76646551a6b_job_queue_limiter.py +0 -0
  34. /dbos/{_migrations → _alembic_migrations}/versions/d76646551a6c_workflow_queue.py +0 -0
  35. /dbos/{_migrations → _alembic_migrations}/versions/d994145b47b6_consolidate_inputs.py +0 -0
  36. /dbos/{_migrations → _alembic_migrations}/versions/eab0cc1d9a14_job_queue.py +0 -0
  37. /dbos/{_migrations → _alembic_migrations}/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
  38. {dbos-1.12.0a2.dist-info → dbos-1.13.0.dist-info}/WHEEL +0 -0
  39. {dbos-1.12.0a2.dist-info → dbos-1.13.0.dist-info}/entry_points.txt +0 -0
  40. {dbos-1.12.0a2.dist-info → dbos-1.13.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,173 @@
1
+ import time
2
+ from typing import Any, Dict, Optional
3
+
4
+ import psycopg
5
+ import sqlalchemy as sa
6
+ from sqlalchemy.exc import DBAPIError
7
+
8
+ from dbos._migration import (
9
+ ensure_dbos_schema,
10
+ run_alembic_migrations,
11
+ run_dbos_migrations,
12
+ )
13
+ from dbos._schemas.system_database import SystemSchema
14
+
15
+ from ._logger import dbos_logger
16
+ from ._sys_db import SystemDatabase
17
+
18
+
19
+ class PostgresSystemDatabase(SystemDatabase):
20
+ """PostgreSQL-specific implementation of SystemDatabase."""
21
+
22
+ def __init__(
23
+ self,
24
+ *,
25
+ system_database_url: str,
26
+ engine_kwargs: Dict[str, Any],
27
+ debug_mode: bool = False,
28
+ ):
29
+ super().__init__(
30
+ system_database_url=system_database_url,
31
+ engine_kwargs=engine_kwargs,
32
+ debug_mode=debug_mode,
33
+ )
34
+ self.notification_conn: Optional[psycopg.connection.Connection] = None
35
+
36
+ def _create_engine(
37
+ self, system_database_url: str, engine_kwargs: Dict[str, Any]
38
+ ) -> sa.Engine:
39
+ # TODO: Make the schema dynamic so this isn't needed
40
+ SystemSchema.workflow_status.schema = "dbos"
41
+ SystemSchema.operation_outputs.schema = "dbos"
42
+ SystemSchema.notifications.schema = "dbos"
43
+ SystemSchema.workflow_events.schema = "dbos"
44
+ SystemSchema.streams.schema = "dbos"
45
+ url = sa.make_url(system_database_url).set(drivername="postgresql+psycopg")
46
+ return sa.create_engine(url, **engine_kwargs)
47
+
48
+ def run_migrations(self) -> None:
49
+ """Run PostgreSQL-specific migrations."""
50
+ if self._debug_mode:
51
+ dbos_logger.warning("System database migrations are skipped in debug mode.")
52
+ return
53
+ system_db_url = self.engine.url
54
+ sysdb_name = system_db_url.database
55
+ # If the system database does not already exist, create it
56
+ engine = sa.create_engine(
57
+ system_db_url.set(database="postgres"), **self._engine_kwargs
58
+ )
59
+ with engine.connect() as conn:
60
+ conn.execution_options(isolation_level="AUTOCOMMIT")
61
+ if not conn.execute(
62
+ sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
63
+ parameters={"db_name": sysdb_name},
64
+ ).scalar():
65
+ dbos_logger.info(f"Creating system database {sysdb_name}")
66
+ conn.execute(sa.text(f"CREATE DATABASE {sysdb_name}"))
67
+ engine.dispose()
68
+
69
+ using_dbos_migrations = ensure_dbos_schema(self.engine)
70
+ if not using_dbos_migrations:
71
+ # Complete the Alembic migrations, create the dbos_migrations table
72
+ run_alembic_migrations(self.engine)
73
+ run_dbos_migrations(self.engine)
74
+
75
+ def _cleanup_connections(self) -> None:
76
+ """Clean up PostgreSQL-specific connections."""
77
+ if self.notification_conn is not None:
78
+ self.notification_conn.close()
79
+
80
+ def _is_unique_constraint_violation(self, dbapi_error: DBAPIError) -> bool:
81
+ """Check if the error is a unique constraint violation in PostgreSQL."""
82
+ return dbapi_error.orig.sqlstate == "23505" # type: ignore
83
+
84
+ def _is_foreign_key_violation(self, dbapi_error: DBAPIError) -> bool:
85
+ """Check if the error is a foreign key violation in PostgreSQL."""
86
+ return dbapi_error.orig.sqlstate == "23503" # type: ignore
87
+
88
+ @staticmethod
89
+ def _reset_system_database(database_url: str) -> None:
90
+ """Reset the PostgreSQL system database by dropping it."""
91
+ system_db_url = sa.make_url(database_url)
92
+ sysdb_name = system_db_url.database
93
+
94
+ if sysdb_name is None:
95
+ raise ValueError(f"System database name not found in URL {system_db_url}")
96
+
97
+ try:
98
+ # Connect to postgres default database
99
+ engine = sa.create_engine(
100
+ system_db_url.set(database="postgres", drivername="postgresql+psycopg"),
101
+ connect_args={"connect_timeout": 10},
102
+ )
103
+
104
+ with engine.connect() as conn:
105
+ # Set autocommit required for database dropping
106
+ conn.execution_options(isolation_level="AUTOCOMMIT")
107
+
108
+ # Drop the database
109
+ conn.execute(
110
+ sa.text(f"DROP DATABASE IF EXISTS {sysdb_name} WITH (FORCE)")
111
+ )
112
+ engine.dispose()
113
+ except Exception as e:
114
+ dbos_logger.error(f"Error resetting PostgreSQL system database: {str(e)}")
115
+ raise e
116
+
117
+ def _notification_listener(self) -> None:
118
+ """Listen for PostgreSQL notifications using psycopg."""
119
+ while self._run_background_processes:
120
+ try:
121
+ # since we're using the psycopg connection directly, we need a url without the "+psycopg" suffix
122
+ url = sa.URL.create(
123
+ "postgresql", **self.engine.url.translate_connect_args()
124
+ )
125
+ # Listen to notifications
126
+ self.notification_conn = psycopg.connect(
127
+ url.render_as_string(hide_password=False), autocommit=True
128
+ )
129
+
130
+ self.notification_conn.execute("LISTEN dbos_notifications_channel")
131
+ self.notification_conn.execute("LISTEN dbos_workflow_events_channel")
132
+
133
+ while self._run_background_processes:
134
+ gen = self.notification_conn.notifies()
135
+ for notify in gen:
136
+ channel = notify.channel
137
+ dbos_logger.debug(
138
+ f"Received notification on channel: {channel}, payload: {notify.payload}"
139
+ )
140
+ if channel == "dbos_notifications_channel":
141
+ if notify.payload:
142
+ condition = self.notifications_map.get(notify.payload)
143
+ if condition is None:
144
+ # No condition found for this payload
145
+ continue
146
+ condition.acquire()
147
+ condition.notify_all()
148
+ condition.release()
149
+ dbos_logger.debug(
150
+ f"Signaled notifications condition for {notify.payload}"
151
+ )
152
+ elif channel == "dbos_workflow_events_channel":
153
+ if notify.payload:
154
+ condition = self.workflow_events_map.get(notify.payload)
155
+ if condition is None:
156
+ # No condition found for this payload
157
+ continue
158
+ condition.acquire()
159
+ condition.notify_all()
160
+ condition.release()
161
+ dbos_logger.debug(
162
+ f"Signaled workflow_events condition for {notify.payload}"
163
+ )
164
+ else:
165
+ dbos_logger.error(f"Unknown channel: {channel}")
166
+ except Exception as e:
167
+ if self._run_background_processes:
168
+ dbos_logger.warning(f"Notification listener error: {e}")
169
+ time.sleep(1)
170
+ # Then the loop will try to reconnect and restart the listener
171
+ finally:
172
+ if self.notification_conn is not None:
173
+ self.notification_conn.close()
dbos/_sys_db_sqlite.py ADDED
@@ -0,0 +1,182 @@
1
+ import os
2
+ import time
3
+ from typing import Any, Dict, Optional, Tuple
4
+
5
+ import sqlalchemy as sa
6
+ from sqlalchemy.exc import DBAPIError
7
+
8
+ from dbos._migration import sqlite_migrations
9
+ from dbos._schemas.system_database import SystemSchema
10
+
11
+ from ._logger import dbos_logger
12
+ from ._sys_db import SystemDatabase
13
+
14
+
15
+ class SQLiteSystemDatabase(SystemDatabase):
16
+ """SQLite-specific implementation of SystemDatabase."""
17
+
18
+ def _create_engine(
19
+ self, system_database_url: str, engine_kwargs: Dict[str, Any]
20
+ ) -> sa.Engine:
21
+ """Create a SQLite engine."""
22
+ # TODO: Make the schema dynamic so this isn't needed
23
+ SystemSchema.workflow_status.schema = None
24
+ SystemSchema.operation_outputs.schema = None
25
+ SystemSchema.notifications.schema = None
26
+ SystemSchema.workflow_events.schema = None
27
+ SystemSchema.streams.schema = None
28
+ return sa.create_engine(system_database_url)
29
+
30
+ def run_migrations(self) -> None:
31
+ """Run SQLite-specific migrations."""
32
+ if self._debug_mode:
33
+ dbos_logger.warning("System database migrations are skipped in debug mode.")
34
+ return
35
+
36
+ with self.engine.begin() as conn:
37
+ # Enable foreign keys for SQLite
38
+ conn.execute(sa.text("PRAGMA foreign_keys = ON"))
39
+
40
+ # Check if migrations table exists
41
+ result = conn.execute(
42
+ sa.text(
43
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='dbos_migrations'"
44
+ )
45
+ ).fetchone()
46
+
47
+ if result is None:
48
+ # Create migrations table
49
+ conn.execute(
50
+ sa.text(
51
+ "CREATE TABLE dbos_migrations (version INTEGER NOT NULL PRIMARY KEY)"
52
+ )
53
+ )
54
+ last_applied = 0
55
+ else:
56
+ # Get current migration version
57
+ version_result = conn.execute(
58
+ sa.text("SELECT version FROM dbos_migrations")
59
+ ).fetchone()
60
+ last_applied = version_result[0] if version_result else 0
61
+
62
+ # Apply migrations starting from the next version
63
+ for i, migration_sql in enumerate(sqlite_migrations, 1):
64
+ if i <= last_applied:
65
+ continue
66
+
67
+ # Execute the migration
68
+ dbos_logger.info(
69
+ f"Applying DBOS SQLite system database schema migration {i}"
70
+ )
71
+
72
+ # SQLite only allows one statement at a time, so split by semicolon
73
+ statements = [
74
+ stmt.strip() for stmt in migration_sql.split(";") if stmt.strip()
75
+ ]
76
+ for statement in statements:
77
+ conn.execute(sa.text(statement))
78
+
79
+ # Update the single row with the new version
80
+ if last_applied == 0:
81
+ conn.execute(
82
+ sa.text(
83
+ "INSERT INTO dbos_migrations (version) VALUES (:version)"
84
+ ),
85
+ {"version": i},
86
+ )
87
+ else:
88
+ conn.execute(
89
+ sa.text("UPDATE dbos_migrations SET version = :version"),
90
+ {"version": i},
91
+ )
92
+ last_applied = i
93
+
94
+ def _cleanup_connections(self) -> None:
95
+ # SQLite doesn't require special connection cleanup
96
+ pass
97
+
98
+ def _is_unique_constraint_violation(self, dbapi_error: DBAPIError) -> bool:
99
+ """Check if the error is a unique constraint violation in SQLite."""
100
+ return "UNIQUE constraint failed" in str(dbapi_error.orig)
101
+
102
+ def _is_foreign_key_violation(self, dbapi_error: DBAPIError) -> bool:
103
+ """Check if the error is a foreign key violation in SQLite."""
104
+ return "FOREIGN KEY constraint failed" in str(dbapi_error.orig)
105
+
106
+ @staticmethod
107
+ def _reset_system_database(database_url: str) -> None:
108
+ """Reset the SQLite system database by deleting the database file."""
109
+
110
+ # Parse the SQLite database URL to get the file path
111
+ url = sa.make_url(database_url)
112
+ db_path = url.database
113
+
114
+ if db_path is None:
115
+ raise ValueError(f"System database path not found in URL {url}")
116
+
117
+ try:
118
+ if os.path.exists(db_path):
119
+ os.remove(db_path)
120
+ dbos_logger.info(f"Deleted SQLite database file: {db_path}")
121
+ else:
122
+ dbos_logger.info(f"SQLite database file does not exist: {db_path}")
123
+ except OSError as e:
124
+ dbos_logger.error(
125
+ f"Error deleting SQLite database file {db_path}: {str(e)}"
126
+ )
127
+ raise e
128
+
129
+ def _notification_listener(self) -> None:
130
+ """Poll for notifications and workflow events in SQLite."""
131
+
132
+ def split_payload(payload: str) -> Tuple[str, Optional[str]]:
133
+ """Split payload into components (first::second format)."""
134
+ if "::" in payload:
135
+ parts = payload.split("::", 1)
136
+ return parts[0], parts[1]
137
+ return payload, None
138
+
139
+ def signal_condition(condition_map: Any, payload: str) -> None:
140
+ """Signal a condition variable if it exists."""
141
+ condition = condition_map.get(payload)
142
+ if condition:
143
+ condition.acquire()
144
+ condition.notify_all()
145
+ condition.release()
146
+ dbos_logger.debug(f"Signaled condition for {payload}")
147
+
148
+ while self._run_background_processes:
149
+ try:
150
+ # Poll every second
151
+ time.sleep(1)
152
+
153
+ # Check all payloads in the notifications_map
154
+ for payload in list(self.notifications_map._dict.keys()):
155
+ dest_uuid, topic = split_payload(payload)
156
+ with self.engine.begin() as conn:
157
+ result = conn.execute(
158
+ sa.text(
159
+ "SELECT 1 FROM notifications WHERE destination_uuid = :dest_uuid AND topic = :topic LIMIT 1"
160
+ ),
161
+ {"dest_uuid": dest_uuid, "topic": topic},
162
+ )
163
+ if result.fetchone():
164
+ signal_condition(self.notifications_map, payload)
165
+
166
+ # Check all payloads in the workflow_events_map
167
+ for payload in list(self.workflow_events_map._dict.keys()):
168
+ workflow_uuid, key = split_payload(payload)
169
+ with self.engine.begin() as conn:
170
+ result = conn.execute(
171
+ sa.text(
172
+ "SELECT 1 FROM workflow_events WHERE workflow_uuid = :workflow_uuid AND key = :key LIMIT 1"
173
+ ),
174
+ {"workflow_uuid": workflow_uuid, "key": key},
175
+ )
176
+ if result.fetchone():
177
+ signal_condition(self.workflow_events_map, payload)
178
+
179
+ except Exception as e:
180
+ if self._run_background_processes:
181
+ dbos_logger.warning(f"SQLite notification poller error: {e}")
182
+ time.sleep(1)
dbos/_tracer.py CHANGED
@@ -24,10 +24,14 @@ class DBOSTracer:
24
24
  def __init__(self) -> None:
25
25
  self.app_id = os.environ.get("DBOS__APPID", None)
26
26
  self.provider: Optional[TracerProvider] = None
27
+ self.disable_otlp: bool = False
27
28
 
28
29
  def config(self, config: ConfigFile) -> None:
29
30
  self.otlp_attributes = config.get("telemetry", {}).get("otlp_attributes", {}) # type: ignore
30
- if not isinstance(trace.get_tracer_provider(), TracerProvider):
31
+ self.disable_otlp = config.get("telemetry", {}).get("disable_otlp", False) # type: ignore
32
+ if not self.disable_otlp and not isinstance(
33
+ trace.get_tracer_provider(), TracerProvider
34
+ ):
31
35
  resource = Resource(
32
36
  attributes={
33
37
  ResourceAttributes.SERVICE_NAME: config["name"],
dbos/_utils.py CHANGED
@@ -20,7 +20,9 @@ class GlobalParams:
20
20
  dbos_version = "unknown"
21
21
 
22
22
 
23
- def retriable_postgres_exception(e: DBAPIError) -> bool:
23
+ def retriable_postgres_exception(e: Exception) -> bool:
24
+ if not isinstance(e, DBAPIError):
25
+ return False
24
26
  if e.connection_invalidated:
25
27
  return True
26
28
  if isinstance(e.orig, psycopg.OperationalError):
@@ -48,3 +50,10 @@ def retriable_postgres_exception(e: DBAPIError) -> bool:
48
50
  return False
49
51
  else:
50
52
  return False
53
+
54
+
55
+ def retriable_sqlite_exception(e: Exception) -> bool:
56
+ if "database is locked" in str(e):
57
+ return True
58
+ else:
59
+ return False