dbos 1.15.0a5__tar.gz → 1.15.0a7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (99) hide show
  1. {dbos-1.15.0a5 → dbos-1.15.0a7}/PKG-INFO +1 -1
  2. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_client.py +4 -0
  3. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_dbos.py +3 -0
  4. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_dbos_config.py +9 -14
  5. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_sys_db.py +21 -2
  6. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_sys_db_postgres.py +37 -51
  7. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_sys_db_sqlite.py +0 -2
  8. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/cli/migration.py +1 -0
  9. {dbos-1.15.0a5 → dbos-1.15.0a7}/pyproject.toml +1 -1
  10. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/conftest.py +1 -1
  11. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_dbos.py +63 -0
  12. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_failures.py +3 -5
  13. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_schema_migration.py +2 -1
  14. {dbos-1.15.0a5 → dbos-1.15.0a7}/LICENSE +0 -0
  15. {dbos-1.15.0a5 → dbos-1.15.0a7}/README.md +0 -0
  16. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/__init__.py +0 -0
  17. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/__main__.py +0 -0
  18. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_admin_server.py +0 -0
  19. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_app_db.py +0 -0
  20. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_classproperty.py +0 -0
  21. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_conductor/conductor.py +0 -0
  22. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_conductor/protocol.py +0 -0
  23. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_context.py +0 -0
  24. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_core.py +0 -0
  25. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_croniter.py +0 -0
  26. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_debouncer.py +0 -0
  27. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_debug.py +0 -0
  28. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_docker_pg_helper.py +0 -0
  29. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_error.py +0 -0
  30. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_event_loop.py +0 -0
  31. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_fastapi.py +0 -0
  32. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_flask.py +0 -0
  33. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_kafka.py +0 -0
  34. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_kafka_message.py +0 -0
  35. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_logger.py +0 -0
  36. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_migration.py +0 -0
  37. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_outcome.py +0 -0
  38. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_queue.py +0 -0
  39. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_recovery.py +0 -0
  40. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_registrations.py +0 -0
  41. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_roles.py +0 -0
  42. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_scheduler.py +0 -0
  43. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_schemas/__init__.py +0 -0
  44. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_schemas/application_database.py +0 -0
  45. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_schemas/system_database.py +0 -0
  46. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_serialization.py +0 -0
  47. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_templates/dbos-db-starter/README.md +0 -0
  48. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
  49. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
  50. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
  51. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
  52. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos +0 -0
  53. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
  54. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_tracer.py +0 -0
  55. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_utils.py +0 -0
  56. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/_workflow_commands.py +0 -0
  57. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/cli/_github_init.py +0 -0
  58. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/cli/_template_init.py +0 -0
  59. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/cli/cli.py +0 -0
  60. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/dbos-config.schema.json +0 -0
  61. {dbos-1.15.0a5 → dbos-1.15.0a7}/dbos/py.typed +0 -0
  62. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/__init__.py +0 -0
  63. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/atexit_no_ctor.py +0 -0
  64. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/atexit_no_launch.py +0 -0
  65. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/classdefs.py +0 -0
  66. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/client_collateral.py +0 -0
  67. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/client_worker.py +0 -0
  68. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/dupname_classdefs1.py +0 -0
  69. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/dupname_classdefsa.py +0 -0
  70. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/more_classdefs.py +0 -0
  71. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/queuedworkflow.py +0 -0
  72. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/script_without_fastapi.py +0 -0
  73. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_admin_server.py +0 -0
  74. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_async.py +0 -0
  75. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_async_workflow_management.py +0 -0
  76. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_classdecorators.py +0 -0
  77. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_cli.py +0 -0
  78. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_client.py +0 -0
  79. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_concurrency.py +0 -0
  80. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_config.py +0 -0
  81. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_croniter.py +0 -0
  82. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_debouncer.py +0 -0
  83. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_debug.py +0 -0
  84. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_docker_secrets.py +0 -0
  85. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_fastapi.py +0 -0
  86. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_fastapi_roles.py +0 -0
  87. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_flask.py +0 -0
  88. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_kafka.py +0 -0
  89. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_outcome.py +0 -0
  90. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_package.py +0 -0
  91. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_queue.py +0 -0
  92. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_scheduler.py +0 -0
  93. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_singleton.py +0 -0
  94. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_spans.py +0 -0
  95. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_sqlalchemy.py +0 -0
  96. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_streaming.py +0 -0
  97. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_workflow_introspection.py +0 -0
  98. {dbos-1.15.0a5 → dbos-1.15.0a7}/tests/test_workflow_management.py +0 -0
  99. {dbos-1.15.0a5 → dbos-1.15.0a7}/version/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 1.15.0a5
3
+ Version: 1.15.0a7
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -14,6 +14,8 @@ from typing import (
14
14
  Union,
15
15
  )
16
16
 
17
+ import sqlalchemy as sa
18
+
17
19
  from dbos import _serialization
18
20
  from dbos._app_db import ApplicationDatabase
19
21
  from dbos._context import MaxPriority, MinPriority
@@ -122,6 +124,7 @@ class DBOSClient:
122
124
  database_url: Optional[str] = None, # DEPRECATED
123
125
  *,
124
126
  system_database_url: Optional[str] = None,
127
+ system_database_engine: Optional[sa.Engine] = None,
125
128
  application_database_url: Optional[str] = None,
126
129
  dbos_system_schema: Optional[str] = "dbos",
127
130
  ):
@@ -145,6 +148,7 @@ class DBOSClient:
145
148
  "max_overflow": 0,
146
149
  "pool_size": 2,
147
150
  },
151
+ engine=system_database_engine,
148
152
  schema=dbos_system_schema,
149
153
  )
150
154
  self._sys_db.check_connection()
@@ -335,6 +335,8 @@ class DBOS:
335
335
  self._background_threads: List[threading.Thread] = []
336
336
  self.conductor_url: Optional[str] = conductor_url
337
337
  self.conductor_key: Optional[str] = conductor_key
338
+ if config.get("conductor_key"):
339
+ self.conductor_key = config.get("conductor_key")
338
340
  self.conductor_websocket: Optional[ConductorWebsocket] = None
339
341
  self._background_event_loop: BackgroundEventLoop = BackgroundEventLoop()
340
342
  self._active_workflows_set: set[str] = set()
@@ -449,6 +451,7 @@ class DBOS:
449
451
  self._sys_db_field = SystemDatabase.create(
450
452
  system_database_url=get_system_database_url(self._config),
451
453
  engine_kwargs=self._config["database"]["sys_db_engine_kwargs"],
454
+ engine=self._config["system_database_engine"],
452
455
  debug_mode=debug_mode,
453
456
  schema=schema,
454
457
  )
@@ -3,6 +3,7 @@ import re
3
3
  from importlib import resources
4
4
  from typing import Any, Dict, List, Optional, TypedDict, cast
5
5
 
6
+ import sqlalchemy as sa
6
7
  import yaml
7
8
  from sqlalchemy import make_url
8
9
 
@@ -34,6 +35,8 @@ class DBOSConfig(TypedDict, total=False):
34
35
  executor_id (str): Executor ID, used to identify the application instance in distributed environments
35
36
  dbos_system_schema (str): Schema name for DBOS system tables. Defaults to "dbos".
36
37
  enable_otlp (bool): If True, enable built-in DBOS OTLP tracing and logging.
38
+ system_database_engine (sa.Engine): A custom system database engine. If provided, DBOS will not create an engine but use this instead.
39
+ conductor_key (str): An API key for DBOS Conductor. Pass this in to connect your process to Conductor.
37
40
  """
38
41
 
39
42
  name: str
@@ -52,6 +55,8 @@ class DBOSConfig(TypedDict, total=False):
52
55
  executor_id: Optional[str]
53
56
  dbos_system_schema: Optional[str]
54
57
  enable_otlp: Optional[bool]
58
+ system_database_engine: Optional[sa.Engine]
59
+ conductor_key: Optional[str]
55
60
 
56
61
 
57
62
  class RuntimeConfig(TypedDict, total=False):
@@ -97,20 +102,7 @@ class TelemetryConfig(TypedDict, total=False):
97
102
  class ConfigFile(TypedDict, total=False):
98
103
  """
99
104
  Data structure containing the DBOS Configuration.
100
-
101
- This configuration data is typically loaded from `dbos-config.yaml`.
102
- See `https://docs.dbos.dev/python/reference/configuration#dbos-configuration-file`
103
-
104
- Attributes:
105
- name (str): Application name
106
- runtimeConfig (RuntimeConfig): Configuration for DBOS Cloud
107
- database (DatabaseConfig): Configure pool sizes, migrate commands
108
- database_url (str): Application database URL
109
- system_database_url (str): System database URL
110
- telemetry (TelemetryConfig): Configuration for tracing / logging
111
- env (Dict[str,str]): Environment variables
112
- dbos_system_schema (str): Schema name for DBOS system tables. Defaults to "dbos".
113
-
105
+ The DBOSConfig object is parsed into this.
114
106
  """
115
107
 
116
108
  name: str
@@ -120,6 +112,7 @@ class ConfigFile(TypedDict, total=False):
120
112
  system_database_url: Optional[str]
121
113
  telemetry: Optional[TelemetryConfig]
122
114
  env: Dict[str, str]
115
+ system_database_engine: Optional[sa.Engine]
123
116
  dbos_system_schema: Optional[str]
124
117
 
125
118
 
@@ -188,6 +181,8 @@ def translate_dbos_config_to_config_file(config: DBOSConfig) -> ConfigFile:
188
181
  if telemetry:
189
182
  translated_config["telemetry"] = telemetry
190
183
 
184
+ translated_config["system_database_engine"] = config.get("system_database_engine")
185
+
191
186
  return translated_config
192
187
 
193
188
 
@@ -346,17 +346,32 @@ class SystemDatabase(ABC):
346
346
  *,
347
347
  system_database_url: str,
348
348
  engine_kwargs: Dict[str, Any],
349
+ engine: Optional[sa.Engine],
350
+ schema: Optional[str],
349
351
  debug_mode: bool = False,
350
352
  ):
351
353
  import sqlalchemy.dialects.postgresql as pg
352
354
  import sqlalchemy.dialects.sqlite as sq
353
355
 
354
356
  self.dialect = sq if system_database_url.startswith("sqlite") else pg
355
- self.engine = self._create_engine(system_database_url, engine_kwargs)
357
+
358
+ if system_database_url.startswith("sqlite"):
359
+ self.schema = None
360
+ else:
361
+ self.schema = schema if schema else "dbos"
362
+ SystemSchema.set_schema(self.schema)
363
+
364
+ if engine:
365
+ self.engine = engine
366
+ self.created_engine = False
367
+ else:
368
+ self.engine = self._create_engine(system_database_url, engine_kwargs)
369
+ self.created_engine = True
356
370
  self._engine_kwargs = engine_kwargs
357
371
 
358
372
  self.notifications_map = ThreadSafeConditionDict()
359
373
  self.workflow_events_map = ThreadSafeConditionDict()
374
+ self._listener_thread_lock = threading.Lock()
360
375
 
361
376
  # Now we can run background processes
362
377
  self._run_background_processes = True
@@ -1443,6 +1458,7 @@ class SystemDatabase(ABC):
1443
1458
  def create(
1444
1459
  system_database_url: str,
1445
1460
  engine_kwargs: Dict[str, Any],
1461
+ engine: Optional[sa.Engine],
1446
1462
  schema: Optional[str],
1447
1463
  debug_mode: bool = False,
1448
1464
  ) -> "SystemDatabase":
@@ -1453,6 +1469,8 @@ class SystemDatabase(ABC):
1453
1469
  return SQLiteSystemDatabase(
1454
1470
  system_database_url=system_database_url,
1455
1471
  engine_kwargs=engine_kwargs,
1472
+ engine=engine,
1473
+ schema=schema,
1456
1474
  debug_mode=debug_mode,
1457
1475
  )
1458
1476
  else:
@@ -1461,8 +1479,9 @@ class SystemDatabase(ABC):
1461
1479
  return PostgresSystemDatabase(
1462
1480
  system_database_url=system_database_url,
1463
1481
  engine_kwargs=engine_kwargs,
1464
- debug_mode=debug_mode,
1482
+ engine=engine,
1465
1483
  schema=schema,
1484
+ debug_mode=debug_mode,
1466
1485
  )
1467
1486
 
1468
1487
  @db_retry()
@@ -1,12 +1,11 @@
1
1
  import time
2
- from typing import Any, Dict, Optional
2
+ from typing import Any, Dict, Optional, cast
3
3
 
4
4
  import psycopg
5
5
  import sqlalchemy as sa
6
6
  from sqlalchemy.exc import DBAPIError
7
7
 
8
8
  from dbos._migration import ensure_dbos_schema, run_dbos_migrations
9
- from dbos._schemas.system_database import SystemSchema
10
9
 
11
10
  from ._logger import dbos_logger
12
11
  from ._sys_db import SystemDatabase
@@ -15,25 +14,7 @@ from ._sys_db import SystemDatabase
15
14
  class PostgresSystemDatabase(SystemDatabase):
16
15
  """PostgreSQL-specific implementation of SystemDatabase."""
17
16
 
18
- def __init__(
19
- self,
20
- *,
21
- system_database_url: str,
22
- engine_kwargs: Dict[str, Any],
23
- schema: Optional[str],
24
- debug_mode: bool = False,
25
- ):
26
- super().__init__(
27
- system_database_url=system_database_url,
28
- engine_kwargs=engine_kwargs,
29
- debug_mode=debug_mode,
30
- )
31
- if schema is None:
32
- self.schema = "dbos"
33
- else:
34
- self.schema = schema
35
- SystemSchema.set_schema(self.schema)
36
- self.notification_conn: Optional[psycopg.connection.Connection] = None
17
+ notification_conn: Optional[sa.PoolProxiedConnection] = None
37
18
 
38
19
  def _create_engine(
39
20
  self, system_database_url: str, engine_kwargs: Dict[str, Any]
@@ -48,27 +29,35 @@ class PostgresSystemDatabase(SystemDatabase):
48
29
  return
49
30
  system_db_url = self.engine.url
50
31
  sysdb_name = system_db_url.database
51
- # If the system database does not already exist, create it
52
- engine = sa.create_engine(
53
- system_db_url.set(database="postgres"), **self._engine_kwargs
54
- )
55
- with engine.connect() as conn:
56
- conn.execution_options(isolation_level="AUTOCOMMIT")
57
- if not conn.execute(
58
- sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
59
- parameters={"db_name": sysdb_name},
60
- ).scalar():
61
- dbos_logger.info(f"Creating system database {sysdb_name}")
62
- conn.execute(sa.text(f"CREATE DATABASE {sysdb_name}"))
63
- engine.dispose()
32
+ # Unless we were provided an engine, if the system database does not already exist, create it
33
+ if self.created_engine:
34
+ engine = sa.create_engine(
35
+ system_db_url.set(database="postgres"), **self._engine_kwargs
36
+ )
37
+ with engine.connect() as conn:
38
+ conn.execution_options(isolation_level="AUTOCOMMIT")
39
+ if not conn.execute(
40
+ sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
41
+ parameters={"db_name": sysdb_name},
42
+ ).scalar():
43
+ dbos_logger.info(f"Creating system database {sysdb_name}")
44
+ conn.execute(sa.text(f"CREATE DATABASE {sysdb_name}"))
45
+ engine.dispose()
46
+ else:
47
+ # If we were provided an engine, validate it can connect
48
+ with self.engine.connect() as conn:
49
+ conn.execute(sa.text("SELECT 1"))
64
50
 
51
+ assert self.schema
65
52
  ensure_dbos_schema(self.engine, self.schema)
66
53
  run_dbos_migrations(self.engine, self.schema)
67
54
 
68
55
  def _cleanup_connections(self) -> None:
69
56
  """Clean up PostgreSQL-specific connections."""
70
- if self.notification_conn is not None:
71
- self.notification_conn.close()
57
+ with self._listener_thread_lock:
58
+ if self.notification_conn and self.notification_conn.dbapi_connection:
59
+ self.notification_conn.dbapi_connection.close()
60
+ self.notification_conn.invalidate()
72
61
 
73
62
  def _is_unique_constraint_violation(self, dbapi_error: DBAPIError) -> bool:
74
63
  """Check if the error is a unique constraint violation in PostgreSQL."""
@@ -111,20 +100,18 @@ class PostgresSystemDatabase(SystemDatabase):
111
100
  """Listen for PostgreSQL notifications using psycopg."""
112
101
  while self._run_background_processes:
113
102
  try:
114
- # since we're using the psycopg connection directly, we need a url without the "+psycopg" suffix
115
- url = sa.URL.create(
116
- "postgresql", **self.engine.url.translate_connect_args()
117
- )
118
- # Listen to notifications
119
- self.notification_conn = psycopg.connect(
120
- url.render_as_string(hide_password=False), autocommit=True
121
- )
122
-
123
- self.notification_conn.execute("LISTEN dbos_notifications_channel")
124
- self.notification_conn.execute("LISTEN dbos_workflow_events_channel")
125
-
103
+ with self._listener_thread_lock:
104
+ self.notification_conn = self.engine.raw_connection()
105
+ self.notification_conn.detach()
106
+ psycopg_conn = cast(
107
+ psycopg.connection.Connection, self.notification_conn
108
+ )
109
+ psycopg_conn.set_autocommit(True)
110
+
111
+ psycopg_conn.execute("LISTEN dbos_notifications_channel")
112
+ psycopg_conn.execute("LISTEN dbos_workflow_events_channel")
126
113
  while self._run_background_processes:
127
- gen = self.notification_conn.notifies()
114
+ gen = psycopg_conn.notifies()
128
115
  for notify in gen:
129
116
  channel = notify.channel
130
117
  dbos_logger.debug(
@@ -162,5 +149,4 @@ class PostgresSystemDatabase(SystemDatabase):
162
149
  time.sleep(1)
163
150
  # Then the loop will try to reconnect and restart the listener
164
151
  finally:
165
- if self.notification_conn is not None:
166
- self.notification_conn.close()
152
+ self._cleanup_connections()
@@ -6,7 +6,6 @@ import sqlalchemy as sa
6
6
  from sqlalchemy.exc import DBAPIError
7
7
 
8
8
  from dbos._migration import sqlite_migrations
9
- from dbos._schemas.system_database import SystemSchema
10
9
 
11
10
  from ._logger import dbos_logger
12
11
  from ._sys_db import SystemDatabase
@@ -19,7 +18,6 @@ class SQLiteSystemDatabase(SystemDatabase):
19
18
  self, system_database_url: str, engine_kwargs: Dict[str, Any]
20
19
  ) -> sa.Engine:
21
20
  """Create a SQLite engine."""
22
- SystemSchema.set_schema(None)
23
21
  return sa.create_engine(system_database_url)
24
22
 
25
23
  def run_migrations(self) -> None:
@@ -20,6 +20,7 @@ def migrate_dbos_databases(
20
20
  "max_overflow": 0,
21
21
  "pool_size": 2,
22
22
  },
23
+ engine=None,
23
24
  schema=schema,
24
25
  )
25
26
  sys_db.run_migrations()
@@ -15,7 +15,7 @@ dependencies = [
15
15
  ]
16
16
  requires-python = ">=3.10"
17
17
  readme = "README.md"
18
- version = "1.15.0a5"
18
+ version = "1.15.0a7"
19
19
 
20
20
  [project.license]
21
21
  text = "MIT"
@@ -52,7 +52,7 @@ def default_config() -> DBOSConfig:
52
52
  "system_database_url": (
53
53
  "sqlite:///test.sqlite"
54
54
  if using_sqlite()
55
- else f"postgresql://postgres:{quote(os.environ.get('PGPASSWORD', 'dbos'), safe='')}@localhost:5432/dbostestpy_dbos_sys"
55
+ else f"postgresql+psycopg://postgres:{quote(os.environ.get('PGPASSWORD', 'dbos'), safe='')}@localhost:5432/dbostestpy_dbos_sys"
56
56
  ),
57
57
  "enable_otlp": True,
58
58
  }
@@ -11,6 +11,7 @@ from typing import Any, Optional
11
11
 
12
12
  import pytest
13
13
  import sqlalchemy as sa
14
+ from sqlalchemy.exc import OperationalError
14
15
 
15
16
  # Public API
16
17
  from dbos import (
@@ -1827,3 +1828,65 @@ def test_custom_schema(
1827
1828
  steps = client.list_workflow_steps(handle.workflow_id)
1828
1829
  assert len(steps) == 4
1829
1830
  assert "transaction" in steps[0]["function_name"]
1831
+
1832
+
1833
+ def test_custom_engine(
1834
+ config: DBOSConfig,
1835
+ cleanup_test_databases: None,
1836
+ db_engine: sa.Engine,
1837
+ skip_with_sqlite: None,
1838
+ ) -> None:
1839
+ DBOS.destroy(destroy_registry=True)
1840
+ assert config["system_database_url"]
1841
+ config["application_database_url"] = None
1842
+ system_database_url = config["system_database_url"]
1843
+
1844
+ # Create a custom engine
1845
+ engine = sa.create_engine(system_database_url)
1846
+ config["system_database_engine"] = engine
1847
+
1848
+ # Launch DBOS with the engine. It should fail because the database does not exist.
1849
+ dbos = DBOS(config=config)
1850
+ with pytest.raises(OperationalError):
1851
+ DBOS.launch()
1852
+ DBOS.destroy(destroy_registry=True)
1853
+
1854
+ # Create the database
1855
+ with db_engine.connect() as c:
1856
+ c.execution_options(isolation_level="AUTOCOMMIT")
1857
+ sysdb_name = sa.make_url(config["system_database_url"]).database
1858
+ c.execute(sa.text(f"CREATE DATABASE {sysdb_name}"))
1859
+
1860
+ # Launch DBOS again using the custom pool. It should succeed despite the bogus URL.
1861
+ config["system_database_url"] = "postgresql://bogus:url@not:42/fake"
1862
+ dbos = DBOS(config=config)
1863
+ DBOS.launch()
1864
+
1865
+ key = "key"
1866
+ val = "val"
1867
+
1868
+ @DBOS.workflow()
1869
+ def recv_workflow() -> Any:
1870
+ DBOS.set_event(key, val)
1871
+ return DBOS.recv()
1872
+
1873
+ assert dbos._sys_db.engine == engine
1874
+ handle = DBOS.start_workflow(recv_workflow)
1875
+ assert DBOS.get_event(handle.workflow_id, key) == val
1876
+ DBOS.send(handle.workflow_id, val)
1877
+ assert handle.get_result() == val
1878
+ assert len(DBOS.list_workflows()) == 2
1879
+ steps = DBOS.list_workflow_steps(handle.workflow_id)
1880
+ assert len(steps) == 3
1881
+ assert "setEvent" in steps[0]["function_name"]
1882
+ DBOS.destroy(destroy_registry=True)
1883
+
1884
+ # Test custom engine with client
1885
+ client = DBOSClient(
1886
+ system_database_url=config["system_database_url"],
1887
+ system_database_engine=config["system_database_engine"],
1888
+ )
1889
+ assert len(client.list_workflows()) == 2
1890
+ steps = client.list_workflow_steps(handle.workflow_id)
1891
+ assert len(steps) == 3
1892
+ assert "setEvent" in steps[0]["function_name"]
@@ -136,12 +136,10 @@ def test_notification_errors(dbos: DBOS, skip_with_sqlite: None) -> None:
136
136
  system_database = cast(PostgresSystemDatabase, dbos._sys_db)
137
137
  while system_database.notification_conn is None:
138
138
  time.sleep(1)
139
- system_database.notification_conn.close()
140
- assert system_database.notification_conn.closed == 1
139
+ system_database._cleanup_connections()
141
140
 
142
- # Wait for the connection to be re-established
143
- while system_database.notification_conn.closed != 0:
144
- time.sleep(1)
141
+ # Wait for the connection to re-establish
142
+ time.sleep(3)
145
143
 
146
144
  dest_uuid = str("sruuid1")
147
145
  with SetWorkflowID(dest_uuid):
@@ -132,8 +132,9 @@ def test_sqlite_systemdb_migration() -> None:
132
132
  sys_db = SystemDatabase.create(
133
133
  system_database_url=sqlite_url,
134
134
  engine_kwargs={},
135
- debug_mode=False,
135
+ engine=None,
136
136
  schema=None,
137
+ debug_mode=False,
137
138
  )
138
139
 
139
140
  # Run migrations
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes