dbos 1.13.0a3__py3-none-any.whl → 1.13.0a5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

dbos/_app_db.py CHANGED
@@ -1,11 +1,14 @@
1
+ from abc import ABC, abstractmethod
1
2
  from typing import Any, Dict, List, Optional, TypedDict
2
3
 
4
+ import psycopg
3
5
  import sqlalchemy as sa
4
- import sqlalchemy.dialects.postgresql as pg
5
6
  from sqlalchemy import inspect, text
6
7
  from sqlalchemy.exc import DBAPIError
7
8
  from sqlalchemy.orm import Session, sessionmaker
8
9
 
10
+ from dbos._migration import get_sqlite_timestamp_expr
11
+
9
12
  from . import _serialization
10
13
  from ._error import DBOSUnexpectedStepError, DBOSWorkflowConflictIDError
11
14
  from ._logger import dbos_logger
@@ -29,7 +32,7 @@ class RecordedResult(TypedDict):
29
32
  error: Optional[str] # JSON (jsonpickle)
30
33
 
31
34
 
32
- class ApplicationDatabase:
35
+ class ApplicationDatabase(ABC):
33
36
 
34
37
  def __init__(
35
38
  self,
@@ -38,95 +41,37 @@ class ApplicationDatabase:
38
41
  engine_kwargs: Dict[str, Any],
39
42
  debug_mode: bool = False,
40
43
  ):
41
- app_db_url = sa.make_url(database_url).set(drivername="postgresql+psycopg")
42
-
43
- if engine_kwargs is None:
44
- engine_kwargs = {}
45
-
46
- self.engine = sa.create_engine(
47
- app_db_url,
48
- **engine_kwargs,
49
- )
44
+ self.engine = self._create_engine(database_url, engine_kwargs)
50
45
  self._engine_kwargs = engine_kwargs
51
46
  self.sessionmaker = sessionmaker(bind=self.engine)
52
47
  self.debug_mode = debug_mode
53
48
 
54
- def run_migrations(self) -> None:
55
- if self.debug_mode:
56
- dbos_logger.warning(
57
- "Application database migrations are skipped in debug mode."
58
- )
59
- return
60
- # Check if the database exists
61
- app_db_url = self.engine.url
62
- postgres_db_engine = sa.create_engine(
63
- app_db_url.set(database="postgres"),
64
- **self._engine_kwargs,
65
- )
66
- with postgres_db_engine.connect() as conn:
67
- conn.execution_options(isolation_level="AUTOCOMMIT")
68
- if not conn.execute(
69
- sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
70
- parameters={"db_name": app_db_url.database},
71
- ).scalar():
72
- conn.execute(sa.text(f"CREATE DATABASE {app_db_url.database}"))
73
- postgres_db_engine.dispose()
74
-
75
- # Create the dbos schema and transaction_outputs table in the application database
76
- with self.engine.begin() as conn:
77
- # Check if schema exists first
78
- schema_exists = conn.execute(
79
- sa.text(
80
- "SELECT 1 FROM information_schema.schemata WHERE schema_name = :schema_name"
81
- ),
82
- parameters={"schema_name": ApplicationSchema.schema},
83
- ).scalar()
84
-
85
- if not schema_exists:
86
- schema_creation_query = sa.text(
87
- f"CREATE SCHEMA {ApplicationSchema.schema}"
88
- )
89
- conn.execute(schema_creation_query)
90
-
91
- inspector = inspect(self.engine)
92
- if not inspector.has_table(
93
- "transaction_outputs", schema=ApplicationSchema.schema
94
- ):
95
- ApplicationSchema.metadata_obj.create_all(self.engine)
96
- else:
97
- columns = inspector.get_columns(
98
- "transaction_outputs", schema=ApplicationSchema.schema
99
- )
100
- column_names = [col["name"] for col in columns]
49
+ @abstractmethod
50
+ def _create_engine(
51
+ self, database_url: str, engine_kwargs: Dict[str, Any]
52
+ ) -> sa.Engine:
53
+ """Create a database engine specific to the database type."""
54
+ pass
101
55
 
102
- if "function_name" not in column_names:
103
- # Column missing, alter table to add it
104
- with self.engine.connect() as conn:
105
- conn.execute(
106
- text(
107
- f"""
108
- ALTER TABLE {ApplicationSchema.schema}.transaction_outputs
109
- ADD COLUMN function_name TEXT NOT NULL DEFAULT '';
110
- """
111
- )
112
- )
113
- conn.commit()
56
+ @abstractmethod
57
+ def run_migrations(self) -> None:
58
+ """Run database migrations specific to the database type."""
59
+ pass
114
60
 
115
61
  def destroy(self) -> None:
116
62
  self.engine.dispose()
117
63
 
118
- @staticmethod
119
64
  def record_transaction_output(
120
- session: Session, output: TransactionResultInternal
65
+ self, session: Session, output: TransactionResultInternal
121
66
  ) -> None:
122
67
  try:
123
68
  session.execute(
124
- pg.insert(ApplicationSchema.transaction_outputs).values(
69
+ sa.insert(ApplicationSchema.transaction_outputs).values(
125
70
  workflow_uuid=output["workflow_uuid"],
126
71
  function_id=output["function_id"],
127
72
  output=output["output"],
128
73
  error=None,
129
- txn_id=sa.text("(select pg_current_xact_id_if_assigned()::text)"),
74
+ txn_id="",
130
75
  txn_snapshot=output["txn_snapshot"],
131
76
  executor_id=(
132
77
  output["executor_id"] if output["executor_id"] else None
@@ -135,7 +80,7 @@ class ApplicationDatabase:
135
80
  )
136
81
  )
137
82
  except DBAPIError as dbapi_error:
138
- if dbapi_error.orig.sqlstate == "23505": # type: ignore
83
+ if self._is_unique_constraint_violation(dbapi_error):
139
84
  raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
140
85
  raise
141
86
 
@@ -145,14 +90,12 @@ class ApplicationDatabase:
145
90
  try:
146
91
  with self.engine.begin() as conn:
147
92
  conn.execute(
148
- pg.insert(ApplicationSchema.transaction_outputs).values(
93
+ sa.insert(ApplicationSchema.transaction_outputs).values(
149
94
  workflow_uuid=output["workflow_uuid"],
150
95
  function_id=output["function_id"],
151
96
  output=None,
152
97
  error=output["error"],
153
- txn_id=sa.text(
154
- "(select pg_current_xact_id_if_assigned()::text)"
155
- ),
98
+ txn_id="",
156
99
  txn_snapshot=output["txn_snapshot"],
157
100
  executor_id=(
158
101
  output["executor_id"] if output["executor_id"] else None
@@ -161,7 +104,7 @@ class ApplicationDatabase:
161
104
  )
162
105
  )
163
106
  except DBAPIError as dbapi_error:
164
- if dbapi_error.orig.sqlstate == "23505": # type: ignore
107
+ if self._is_unique_constraint_violation(dbapi_error):
165
108
  raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
166
109
  raise
167
110
 
@@ -283,3 +226,197 @@ class ApplicationDatabase:
283
226
  )
284
227
 
285
228
  c.execute(delete_query)
229
+
230
+ @abstractmethod
231
+ def _is_unique_constraint_violation(self, dbapi_error: DBAPIError) -> bool:
232
+ """Check if the error is a unique constraint violation."""
233
+ pass
234
+
235
+ @abstractmethod
236
+ def _is_serialization_error(self, dbapi_error: DBAPIError) -> bool:
237
+ """Check if the error is a serialization/concurrency error."""
238
+ pass
239
+
240
+ @staticmethod
241
+ def create(
242
+ database_url: str,
243
+ engine_kwargs: Dict[str, Any],
244
+ debug_mode: bool = False,
245
+ ) -> "ApplicationDatabase":
246
+ """Factory method to create the appropriate ApplicationDatabase implementation based on URL."""
247
+ if database_url.startswith("sqlite"):
248
+ return SQLiteApplicationDatabase(
249
+ database_url=database_url,
250
+ engine_kwargs=engine_kwargs,
251
+ debug_mode=debug_mode,
252
+ )
253
+ else:
254
+ # Default to PostgreSQL for postgresql://, postgres://, or other URLs
255
+ return PostgresApplicationDatabase(
256
+ database_url=database_url,
257
+ engine_kwargs=engine_kwargs,
258
+ debug_mode=debug_mode,
259
+ )
260
+
261
+
262
+ class PostgresApplicationDatabase(ApplicationDatabase):
263
+ """PostgreSQL-specific implementation of ApplicationDatabase."""
264
+
265
+ def _create_engine(
266
+ self, database_url: str, engine_kwargs: Dict[str, Any]
267
+ ) -> sa.Engine:
268
+ """Create a PostgreSQL engine."""
269
+ app_db_url = sa.make_url(database_url).set(drivername="postgresql+psycopg")
270
+
271
+ if engine_kwargs is None:
272
+ engine_kwargs = {}
273
+
274
+ # TODO: Make the schema dynamic so this isn't needed
275
+ ApplicationSchema.transaction_outputs.schema = "dbos"
276
+
277
+ return sa.create_engine(
278
+ app_db_url,
279
+ **engine_kwargs,
280
+ )
281
+
282
+ def run_migrations(self) -> None:
283
+ if self.debug_mode:
284
+ dbos_logger.warning(
285
+ "Application database migrations are skipped in debug mode."
286
+ )
287
+ return
288
+ # Check if the database exists
289
+ app_db_url = self.engine.url
290
+ postgres_db_engine = sa.create_engine(
291
+ app_db_url.set(database="postgres"),
292
+ **self._engine_kwargs,
293
+ )
294
+ with postgres_db_engine.connect() as conn:
295
+ conn.execution_options(isolation_level="AUTOCOMMIT")
296
+ if not conn.execute(
297
+ sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
298
+ parameters={"db_name": app_db_url.database},
299
+ ).scalar():
300
+ conn.execute(sa.text(f"CREATE DATABASE {app_db_url.database}"))
301
+ postgres_db_engine.dispose()
302
+
303
+ # Create the dbos schema and transaction_outputs table in the application database
304
+ with self.engine.begin() as conn:
305
+ # Check if schema exists first
306
+ schema_exists = conn.execute(
307
+ sa.text(
308
+ "SELECT 1 FROM information_schema.schemata WHERE schema_name = :schema_name"
309
+ ),
310
+ parameters={"schema_name": ApplicationSchema.schema},
311
+ ).scalar()
312
+
313
+ if not schema_exists:
314
+ schema_creation_query = sa.text(
315
+ f"CREATE SCHEMA {ApplicationSchema.schema}"
316
+ )
317
+ conn.execute(schema_creation_query)
318
+
319
+ inspector = inspect(self.engine)
320
+ if not inspector.has_table(
321
+ "transaction_outputs", schema=ApplicationSchema.schema
322
+ ):
323
+ ApplicationSchema.metadata_obj.create_all(self.engine)
324
+ else:
325
+ columns = inspector.get_columns(
326
+ "transaction_outputs", schema=ApplicationSchema.schema
327
+ )
328
+ column_names = [col["name"] for col in columns]
329
+
330
+ if "function_name" not in column_names:
331
+ # Column missing, alter table to add it
332
+ with self.engine.connect() as conn:
333
+ conn.execute(
334
+ text(
335
+ f"""
336
+ ALTER TABLE {ApplicationSchema.schema}.transaction_outputs
337
+ ADD COLUMN function_name TEXT NOT NULL DEFAULT '';
338
+ """
339
+ )
340
+ )
341
+ conn.commit()
342
+
343
+ def _is_unique_constraint_violation(self, dbapi_error: DBAPIError) -> bool:
344
+ """Check if the error is a unique constraint violation in PostgreSQL."""
345
+ return dbapi_error.orig.sqlstate == "23505" # type: ignore
346
+
347
+ def _is_serialization_error(self, dbapi_error: DBAPIError) -> bool:
348
+ """Check if the error is a serialization/concurrency error in PostgreSQL."""
349
+ # 40001: serialization_failure (MVCC conflict)
350
+ # 40P01: deadlock_detected
351
+ driver_error = dbapi_error.orig
352
+ return (
353
+ driver_error is not None
354
+ and isinstance(driver_error, psycopg.OperationalError)
355
+ and driver_error.sqlstate in ("40001", "40P01")
356
+ )
357
+
358
+
359
+ class SQLiteApplicationDatabase(ApplicationDatabase):
360
+ """SQLite-specific implementation of ApplicationDatabase."""
361
+
362
+ def _create_engine(
363
+ self, database_url: str, engine_kwargs: Dict[str, Any]
364
+ ) -> sa.Engine:
365
+ """Create a SQLite engine."""
366
+ # TODO: Make the schema dynamic so this isn't needed
367
+ ApplicationSchema.transaction_outputs.schema = None
368
+ return sa.create_engine(database_url)
369
+
370
+ def run_migrations(self) -> None:
371
+ if self.debug_mode:
372
+ dbos_logger.warning(
373
+ "Application database migrations are skipped in debug mode."
374
+ )
375
+ return
376
+
377
+ with self.engine.begin() as conn:
378
+ # Check if table exists
379
+ result = conn.execute(
380
+ sa.text(
381
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='transaction_outputs'"
382
+ )
383
+ ).fetchone()
384
+
385
+ if result is None:
386
+ # Create the table with proper SQLite syntax
387
+ conn.execute(
388
+ sa.text(
389
+ f"""
390
+ CREATE TABLE transaction_outputs (
391
+ workflow_uuid TEXT NOT NULL,
392
+ function_id INTEGER NOT NULL,
393
+ output TEXT,
394
+ error TEXT,
395
+ txn_id TEXT,
396
+ txn_snapshot TEXT NOT NULL,
397
+ executor_id TEXT,
398
+ function_name TEXT NOT NULL DEFAULT '',
399
+ created_at BIGINT NOT NULL DEFAULT {get_sqlite_timestamp_expr()},
400
+ PRIMARY KEY (workflow_uuid, function_id)
401
+ )
402
+ """
403
+ )
404
+ )
405
+ # Create the index
406
+ conn.execute(
407
+ sa.text(
408
+ "CREATE INDEX transaction_outputs_created_at_index ON transaction_outputs (created_at)"
409
+ )
410
+ )
411
+
412
+ def _is_unique_constraint_violation(self, dbapi_error: DBAPIError) -> bool:
413
+ """Check if the error is a unique constraint violation in SQLite."""
414
+ return "UNIQUE constraint failed" in str(dbapi_error.orig)
415
+
416
+ def _is_serialization_error(self, dbapi_error: DBAPIError) -> bool:
417
+ """Check if the error is a serialization/concurrency error in SQLite."""
418
+ # SQLite database is locked or busy errors
419
+ error_msg = str(dbapi_error.orig).lower()
420
+ return (
421
+ "database is locked" in error_msg or "database table is locked" in error_msg
422
+ )
dbos/_client.py CHANGED
@@ -16,6 +16,7 @@ from typing import (
16
16
 
17
17
  from dbos._app_db import ApplicationDatabase
18
18
  from dbos._context import MaxPriority, MinPriority
19
+ from dbos._sys_db import SystemDatabase
19
20
 
20
21
  if sys.version_info < (3, 11):
21
22
  from typing_extensions import NotRequired
@@ -119,7 +120,7 @@ class DBOSClient:
119
120
  ):
120
121
  assert is_valid_database_url(database_url)
121
122
  # We only create database connections but do not run migrations
122
- self._sys_db = SystemDatabase(
123
+ self._sys_db = SystemDatabase.create(
123
124
  system_database_url=get_system_database_url(
124
125
  {
125
126
  "system_database_url": system_database_url,
@@ -134,7 +135,7 @@ class DBOSClient:
134
135
  },
135
136
  )
136
137
  self._sys_db.check_connection()
137
- self._app_db = ApplicationDatabase(
138
+ self._app_db = ApplicationDatabase.create(
138
139
  database_url=database_url,
139
140
  engine_kwargs={
140
141
  "pool_timeout": 30,
dbos/_core.py CHANGED
@@ -950,18 +950,14 @@ def decorate_transaction(
950
950
  assert (
951
951
  ctx.sql_session is not None
952
952
  ), "Cannot find a database connection"
953
- ApplicationDatabase.record_transaction_output(
953
+ dbos._app_db.record_transaction_output(
954
954
  ctx.sql_session, txn_output
955
955
  )
956
956
  break
957
957
  except DBAPIError as dbapi_error:
958
- driver_error = cast(
959
- Optional[psycopg.OperationalError], dbapi_error.orig
960
- )
961
- if retriable_postgres_exception(dbapi_error) or (
962
- driver_error is not None
963
- and driver_error.sqlstate == "40001"
964
- ):
958
+ if retriable_postgres_exception(
959
+ dbapi_error
960
+ ) or dbos._app_db._is_serialization_error(dbapi_error):
965
961
  # Retry on serialization failure
966
962
  span = ctx.get_current_span()
967
963
  if span:
dbos/_dbos.py CHANGED
@@ -32,7 +32,7 @@ from opentelemetry.trace import Span
32
32
  from rich import print
33
33
 
34
34
  from dbos._conductor.conductor import ConductorWebsocket
35
- from dbos._sys_db import WorkflowStatus
35
+ from dbos._sys_db import SystemDatabase, WorkflowStatus
36
36
  from dbos._utils import INTERNAL_QUEUE_NAME, GlobalParams
37
37
  from dbos._workflow_commands import fork_workflow, list_queued_workflows, list_workflows
38
38
 
@@ -70,7 +70,6 @@ from ._sys_db import (
70
70
  SystemDatabase,
71
71
  WorkflowStatus,
72
72
  _dbos_stream_closed_sentinel,
73
- reset_system_database,
74
73
  workflow_is_active,
75
74
  )
76
75
  from ._tracer import DBOSTracer, dbos_tracer
@@ -80,7 +79,6 @@ if TYPE_CHECKING:
80
79
  from ._kafka import _KafkaConsumerWorkflow
81
80
  from flask import Flask
82
81
 
83
- from sqlalchemy import make_url
84
82
  from sqlalchemy.orm import Session
85
83
 
86
84
  if sys.version_info < (3, 10):
@@ -457,13 +455,13 @@ class DBOS:
457
455
  self._background_event_loop.start()
458
456
  assert self._config["database_url"] is not None
459
457
  assert self._config["database"]["sys_db_engine_kwargs"] is not None
460
- self._sys_db_field = SystemDatabase(
458
+ self._sys_db_field = SystemDatabase.create(
461
459
  system_database_url=get_system_database_url(self._config),
462
460
  engine_kwargs=self._config["database"]["sys_db_engine_kwargs"],
463
461
  debug_mode=debug_mode,
464
462
  )
465
463
  assert self._config["database"]["db_engine_kwargs"] is not None
466
- self._app_db_field = ApplicationDatabase(
464
+ self._app_db_field = ApplicationDatabase.create(
467
465
  database_url=self._config["database_url"],
468
466
  engine_kwargs=self._config["database"]["db_engine_kwargs"],
469
467
  debug_mode=debug_mode,
@@ -589,13 +587,7 @@ class DBOS:
589
587
  not self._launched
590
588
  ), "The system database cannot be reset after DBOS is launched. Resetting the system database is a destructive operation that should only be used in a test environment."
591
589
 
592
- sysdb_name = self._config["database"]["sys_db_name"]
593
- assert sysdb_name is not None
594
-
595
- assert self._config["database_url"] is not None
596
- pg_db_url = make_url(self._config["database_url"]).set(database="postgres")
597
-
598
- reset_system_database(pg_db_url, sysdb_name)
590
+ SystemDatabase.reset_system_database(get_system_database_url(self._config))
599
591
 
600
592
  def _destroy(self, *, workflow_completion_timeout_sec: int) -> None:
601
593
  self._initialized = False
dbos/_dbos_config.py CHANGED
@@ -296,19 +296,12 @@ def process_config(
296
296
  """
297
297
  If a database_url is provided, pass it as is in the config.
298
298
 
299
- Else, build a database_url from defaults.
299
+ Else, default to SQLite.
300
300
 
301
301
  Also build SQL Alchemy "kwargs" base on user input + defaults.
302
302
  Specifically, db_engine_kwargs takes precedence over app_db_pool_size
303
303
 
304
304
  In debug mode, apply overrides from DBOS_DBHOST, DBOS_DBPORT, DBOS_DBUSER, and DBOS_DBPASSWORD.
305
-
306
- Default configuration:
307
- - Hostname: localhost
308
- - Port: 5432
309
- - Username: postgres
310
- - Password: $PGPASSWORD
311
- - Database name: transformed application name.
312
305
  """
313
306
 
314
307
  if "name" not in data:
@@ -350,9 +343,14 @@ def process_config(
350
343
 
351
344
  url = make_url(data["database_url"])
352
345
 
353
- if not data["database"].get("sys_db_name"):
346
+ if data["database_url"].startswith("sqlite"):
347
+ data["system_database_url"] = data["database_url"]
348
+ else:
354
349
  assert url.database is not None
355
- data["database"]["sys_db_name"] = url.database + SystemSchema.sysdb_suffix
350
+ if not data["database"].get("sys_db_name"):
351
+ data["database"]["sys_db_name"] = (
352
+ url.database + SystemSchema.sysdb_suffix
353
+ )
356
354
 
357
355
  # Gather connect_timeout from the URL if provided. It should be used in engine kwargs if not provided there (instead of our default)
358
356
  connect_timeout_str = url.query.get("connect_timeout")
@@ -380,23 +378,24 @@ def process_config(
380
378
  ).render_as_string(hide_password=False)
381
379
  else:
382
380
  _app_db_name = _app_name_to_db_name(data["name"])
383
- _password = os.environ.get("PGPASSWORD", "dbos")
384
- data["database_url"] = (
385
- f"postgres://postgres:{_password}@localhost:5432/{_app_db_name}?connect_timeout=10&sslmode=prefer"
386
- )
387
- if not data["database"].get("sys_db_name"):
388
- data["database"]["sys_db_name"] = _app_db_name + SystemSchema.sysdb_suffix
389
- assert data["database_url"] is not None
381
+ data["database_url"] = f"sqlite:///{_app_db_name}.sqlite"
382
+ data["system_database_url"] = data["database_url"]
390
383
 
391
384
  configure_db_engine_parameters(data["database"], connect_timeout=connect_timeout)
392
385
 
393
386
  # Pretty-print where we've loaded database connection information from, respecting the log level
387
+ assert data["database_url"] is not None
394
388
  if not silent and logs["logLevel"] == "INFO" or logs["logLevel"] == "DEBUG":
395
389
  log_url = make_url(data["database_url"]).render_as_string(hide_password=True)
396
390
  print(f"[bold blue]Using database connection string: {log_url}[/bold blue]")
397
- print(
398
- f"[bold blue]Database engine parameters: {data['database']['db_engine_kwargs']}[/bold blue]"
399
- )
391
+ if data["database_url"].startswith("sqlite"):
392
+ print(
393
+ f"[bold blue]Using SQLite as a system database. The SQLite system database is for development and testing. PostgreSQL is recommended for production use.[/bold blue]"
394
+ )
395
+ else:
396
+ print(
397
+ f"[bold blue]Database engine parameters: {data['database']['db_engine_kwargs']}[/bold blue]"
398
+ )
400
399
 
401
400
  # Return data as ConfigFile type
402
401
  return data
@@ -445,6 +444,8 @@ def configure_db_engine_parameters(
445
444
 
446
445
 
447
446
  def is_valid_database_url(database_url: str) -> bool:
447
+ if database_url.startswith("sqlite"):
448
+ return True
448
449
  url = make_url(database_url)
449
450
  required_fields = [
450
451
  ("username", "Username must be specified in the connection URL"),
@@ -559,6 +560,8 @@ def get_system_database_url(config: ConfigFile) -> str:
559
560
  return config["system_database_url"]
560
561
  else:
561
562
  assert config["database_url"] is not None
563
+ if config["database_url"].startswith("sqlite"):
564
+ return config["database_url"]
562
565
  app_db_url = make_url(config["database_url"])
563
566
  if config["database"].get("sys_db_name") is not None:
564
567
  sys_db_name = config["database"]["sys_db_name"]
dbos/_migration.py CHANGED
@@ -1,6 +1,7 @@
1
1
  import logging
2
2
  import os
3
3
  import re
4
+ import sys
4
5
 
5
6
  import sqlalchemy as sa
6
7
  from alembic import command
@@ -230,4 +231,92 @@ CREATE TABLE dbos.event_dispatch_kv (
230
231
  );
231
232
  """
232
233
 
234
+
235
+ def get_sqlite_timestamp_expr() -> str:
236
+ """Get SQLite timestamp expression with millisecond precision for Python >= 3.12."""
237
+ if sys.version_info >= (3, 12):
238
+ return "(unixepoch('subsec') * 1000)"
239
+ else:
240
+ return "(strftime('%s','now') * 1000)"
241
+
242
+
243
+ sqlite_migration_one = f"""
244
+ CREATE TABLE workflow_status (
245
+ workflow_uuid TEXT PRIMARY KEY,
246
+ status TEXT,
247
+ name TEXT,
248
+ authenticated_user TEXT,
249
+ assumed_role TEXT,
250
+ authenticated_roles TEXT,
251
+ request TEXT,
252
+ output TEXT,
253
+ error TEXT,
254
+ executor_id TEXT,
255
+ created_at INTEGER NOT NULL DEFAULT {get_sqlite_timestamp_expr()},
256
+ updated_at INTEGER NOT NULL DEFAULT {get_sqlite_timestamp_expr()},
257
+ application_version TEXT,
258
+ application_id TEXT,
259
+ class_name TEXT DEFAULT NULL,
260
+ config_name TEXT DEFAULT NULL,
261
+ recovery_attempts INTEGER DEFAULT 0,
262
+ queue_name TEXT,
263
+ workflow_timeout_ms INTEGER,
264
+ workflow_deadline_epoch_ms INTEGER,
265
+ inputs TEXT,
266
+ started_at_epoch_ms INTEGER,
267
+ deduplication_id TEXT,
268
+ priority INTEGER NOT NULL DEFAULT 0
269
+ );
270
+
271
+ CREATE INDEX workflow_status_created_at_index ON workflow_status (created_at);
272
+ CREATE INDEX workflow_status_executor_id_index ON workflow_status (executor_id);
273
+ CREATE INDEX workflow_status_status_index ON workflow_status (status);
274
+
275
+ CREATE UNIQUE INDEX uq_workflow_status_queue_name_dedup_id
276
+ ON workflow_status (queue_name, deduplication_id);
277
+
278
+ CREATE TABLE operation_outputs (
279
+ workflow_uuid TEXT NOT NULL,
280
+ function_id INTEGER NOT NULL,
281
+ function_name TEXT NOT NULL DEFAULT '',
282
+ output TEXT,
283
+ error TEXT,
284
+ child_workflow_id TEXT,
285
+ PRIMARY KEY (workflow_uuid, function_id),
286
+ FOREIGN KEY (workflow_uuid) REFERENCES workflow_status(workflow_uuid)
287
+ ON UPDATE CASCADE ON DELETE CASCADE
288
+ );
289
+
290
+ CREATE TABLE notifications (
291
+ destination_uuid TEXT NOT NULL,
292
+ topic TEXT,
293
+ message TEXT NOT NULL,
294
+ created_at_epoch_ms INTEGER NOT NULL DEFAULT {get_sqlite_timestamp_expr()},
295
+ message_uuid TEXT NOT NULL DEFAULT (hex(randomblob(16))),
296
+ FOREIGN KEY (destination_uuid) REFERENCES workflow_status(workflow_uuid)
297
+ ON UPDATE CASCADE ON DELETE CASCADE
298
+ );
299
+ CREATE INDEX idx_workflow_topic ON notifications (destination_uuid, topic);
300
+
301
+ CREATE TABLE workflow_events (
302
+ workflow_uuid TEXT NOT NULL,
303
+ key TEXT NOT NULL,
304
+ value TEXT NOT NULL,
305
+ PRIMARY KEY (workflow_uuid, key),
306
+ FOREIGN KEY (workflow_uuid) REFERENCES workflow_status(workflow_uuid)
307
+ ON UPDATE CASCADE ON DELETE CASCADE
308
+ );
309
+
310
+ CREATE TABLE streams (
311
+ workflow_uuid TEXT NOT NULL,
312
+ key TEXT NOT NULL,
313
+ value TEXT NOT NULL,
314
+ "offset" INTEGER NOT NULL,
315
+ PRIMARY KEY (workflow_uuid, key, "offset"),
316
+ FOREIGN KEY (workflow_uuid) REFERENCES workflow_status(workflow_uuid)
317
+ ON UPDATE CASCADE ON DELETE CASCADE
318
+ );
319
+ """
320
+
233
321
  dbos_migrations = [dbos_migration_one]
322
+ sqlite_migrations = [sqlite_migration_one]