dbos 1.15.0a1__py3-none-any.whl → 1.15.0a3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

dbos/_app_db.py CHANGED
@@ -241,6 +241,7 @@ class ApplicationDatabase(ABC):
241
241
  def create(
242
242
  database_url: str,
243
243
  engine_kwargs: Dict[str, Any],
244
+ schema: Optional[str],
244
245
  debug_mode: bool = False,
245
246
  ) -> "ApplicationDatabase":
246
247
  """Factory method to create the appropriate ApplicationDatabase implementation based on URL."""
@@ -256,12 +257,32 @@ class ApplicationDatabase(ABC):
256
257
  database_url=database_url,
257
258
  engine_kwargs=engine_kwargs,
258
259
  debug_mode=debug_mode,
260
+ schema=schema,
259
261
  )
260
262
 
261
263
 
262
264
  class PostgresApplicationDatabase(ApplicationDatabase):
263
265
  """PostgreSQL-specific implementation of ApplicationDatabase."""
264
266
 
267
+ def __init__(
268
+ self,
269
+ *,
270
+ database_url: str,
271
+ engine_kwargs: Dict[str, Any],
272
+ schema: Optional[str],
273
+ debug_mode: bool = False,
274
+ ):
275
+ super().__init__(
276
+ database_url=database_url,
277
+ engine_kwargs=engine_kwargs,
278
+ debug_mode=debug_mode,
279
+ )
280
+ if schema is None:
281
+ self.schema = "dbos"
282
+ else:
283
+ self.schema = schema
284
+ ApplicationSchema.transaction_outputs.schema = schema
285
+
265
286
  def _create_engine(
266
287
  self, database_url: str, engine_kwargs: Dict[str, Any]
267
288
  ) -> sa.Engine:
@@ -271,9 +292,6 @@ class PostgresApplicationDatabase(ApplicationDatabase):
271
292
  if engine_kwargs is None:
272
293
  engine_kwargs = {}
273
294
 
274
- # TODO: Make the schema dynamic so this isn't needed
275
- ApplicationSchema.transaction_outputs.schema = "dbos"
276
-
277
295
  return sa.create_engine(
278
296
  app_db_url,
279
297
  **engine_kwargs,
@@ -307,24 +325,18 @@ class PostgresApplicationDatabase(ApplicationDatabase):
307
325
  sa.text(
308
326
  "SELECT 1 FROM information_schema.schemata WHERE schema_name = :schema_name"
309
327
  ),
310
- parameters={"schema_name": ApplicationSchema.schema},
328
+ parameters={"schema_name": self.schema},
311
329
  ).scalar()
312
330
 
313
331
  if not schema_exists:
314
- schema_creation_query = sa.text(
315
- f"CREATE SCHEMA {ApplicationSchema.schema}"
316
- )
332
+ schema_creation_query = sa.text(f'CREATE SCHEMA "{self.schema}"')
317
333
  conn.execute(schema_creation_query)
318
334
 
319
335
  inspector = inspect(self.engine)
320
- if not inspector.has_table(
321
- "transaction_outputs", schema=ApplicationSchema.schema
322
- ):
336
+ if not inspector.has_table("transaction_outputs", schema=self.schema):
323
337
  ApplicationSchema.metadata_obj.create_all(self.engine)
324
338
  else:
325
- columns = inspector.get_columns(
326
- "transaction_outputs", schema=ApplicationSchema.schema
327
- )
339
+ columns = inspector.get_columns("transaction_outputs", schema=self.schema)
328
340
  column_names = [col["name"] for col in columns]
329
341
 
330
342
  if "function_name" not in column_names:
@@ -333,7 +345,7 @@ class PostgresApplicationDatabase(ApplicationDatabase):
333
345
  conn.execute(
334
346
  text(
335
347
  f"""
336
- ALTER TABLE {ApplicationSchema.schema}.transaction_outputs
348
+ ALTER TABLE \"{self.schema}\".transaction_outputs
337
349
  ADD COLUMN function_name TEXT NOT NULL DEFAULT '';
338
350
  """
339
351
  )
dbos/_client.py CHANGED
@@ -22,11 +22,7 @@ from dbos._sys_db import SystemDatabase
22
22
  if TYPE_CHECKING:
23
23
  from dbos._dbos import WorkflowHandle, WorkflowHandleAsync
24
24
 
25
- from dbos._dbos_config import (
26
- get_application_database_url,
27
- get_system_database_url,
28
- is_valid_database_url,
29
- )
25
+ from dbos._dbos_config import get_system_database_url, is_valid_database_url
30
26
  from dbos._error import DBOSException, DBOSNonExistentWorkflowError
31
27
  from dbos._registrations import DEFAULT_MAX_RECOVERY_ATTEMPTS
32
28
  from dbos._serialization import WorkflowInputs
@@ -118,21 +114,20 @@ class WorkflowHandleClientAsyncPolling(Generic[R]):
118
114
 
119
115
 
120
116
  class DBOSClient:
117
+
118
+ _app_db: ApplicationDatabase | None = None
119
+
121
120
  def __init__(
122
121
  self,
123
122
  database_url: Optional[str] = None, # DEPRECATED
124
123
  *,
125
124
  system_database_url: Optional[str] = None,
126
125
  application_database_url: Optional[str] = None,
126
+ dbos_system_schema: Optional[str] = "dbos",
127
127
  system_database: Optional[str] = None, # DEPRECATED
128
128
  ):
129
- application_database_url = get_application_database_url(
130
- {
131
- "system_database_url": system_database_url,
132
- "database_url": (
133
- database_url if database_url else application_database_url
134
- ),
135
- }
129
+ application_database_url = (
130
+ database_url if database_url else application_database_url
136
131
  )
137
132
  system_database_url = get_system_database_url(
138
133
  {
@@ -142,7 +137,8 @@ class DBOSClient:
142
137
  }
143
138
  )
144
139
  assert is_valid_database_url(system_database_url)
145
- assert is_valid_database_url(application_database_url)
140
+ if application_database_url:
141
+ assert is_valid_database_url(application_database_url)
146
142
  # We only create database connections but do not run migrations
147
143
  self._sys_db = SystemDatabase.create(
148
144
  system_database_url=system_database_url,
@@ -151,16 +147,19 @@ class DBOSClient:
151
147
  "max_overflow": 0,
152
148
  "pool_size": 2,
153
149
  },
150
+ schema=dbos_system_schema,
154
151
  )
155
152
  self._sys_db.check_connection()
156
- self._app_db = ApplicationDatabase.create(
157
- database_url=application_database_url,
158
- engine_kwargs={
159
- "pool_timeout": 30,
160
- "max_overflow": 0,
161
- "pool_size": 2,
162
- },
163
- )
153
+ if application_database_url:
154
+ self._app_db = ApplicationDatabase.create(
155
+ database_url=application_database_url,
156
+ engine_kwargs={
157
+ "pool_timeout": 30,
158
+ "max_overflow": 0,
159
+ "pool_size": 2,
160
+ },
161
+ schema=dbos_system_schema,
162
+ )
164
163
 
165
164
  def destroy(self) -> None:
166
165
  self._sys_db.destroy()
dbos/_core.py CHANGED
@@ -896,7 +896,9 @@ def decorate_transaction(
896
896
  raise DBOSWorkflowCancelledError(
897
897
  f"Workflow {ctx.workflow_id} is cancelled. Aborting transaction {transaction_name}."
898
898
  )
899
-
899
+ assert (
900
+ dbos._app_db
901
+ ), "Transactions can only be used if DBOS is configured with an application_database_url"
900
902
  with dbos._app_db.sessionmaker() as session:
901
903
  attributes: TracedAttributes = {
902
904
  "name": transaction_name,
dbos/_dbos.py CHANGED
@@ -409,13 +409,8 @@ class DBOS:
409
409
  return rv
410
410
 
411
411
  @property
412
- def _app_db(self) -> ApplicationDatabase:
413
- if self._app_db_field is None:
414
- raise DBOSException(
415
- "Application database accessed before DBOS was launched"
416
- )
417
- rv: ApplicationDatabase = self._app_db_field
418
- return rv
412
+ def _app_db(self) -> ApplicationDatabase | None:
413
+ return self._app_db_field
419
414
 
420
415
  @property
421
416
  def _admin_server(self) -> AdminServer:
@@ -448,26 +443,31 @@ class DBOS:
448
443
  dbos_logger.info(f"Application version: {GlobalParams.app_version}")
449
444
  self._executor_field = ThreadPoolExecutor(max_workers=sys.maxsize)
450
445
  self._background_event_loop.start()
451
- assert self._config["database_url"] is not None
452
446
  assert self._config["database"]["sys_db_engine_kwargs"] is not None
447
+ # Get the schema configuration, use "dbos" as default
448
+ schema = self._config.get("dbos_system_schema", "dbos")
453
449
  self._sys_db_field = SystemDatabase.create(
454
450
  system_database_url=get_system_database_url(self._config),
455
451
  engine_kwargs=self._config["database"]["sys_db_engine_kwargs"],
456
452
  debug_mode=debug_mode,
453
+ schema=schema,
457
454
  )
458
455
  assert self._config["database"]["db_engine_kwargs"] is not None
459
- self._app_db_field = ApplicationDatabase.create(
460
- database_url=self._config["database_url"],
461
- engine_kwargs=self._config["database"]["db_engine_kwargs"],
462
- debug_mode=debug_mode,
463
- )
456
+ if self._config["database_url"]:
457
+ self._app_db_field = ApplicationDatabase.create(
458
+ database_url=self._config["database_url"],
459
+ engine_kwargs=self._config["database"]["db_engine_kwargs"],
460
+ debug_mode=debug_mode,
461
+ schema=schema,
462
+ )
464
463
 
465
464
  if debug_mode:
466
465
  return
467
466
 
468
467
  # Run migrations for the system and application databases
469
468
  self._sys_db.run_migrations()
470
- self._app_db.run_migrations()
469
+ if self._app_db:
470
+ self._app_db.run_migrations()
471
471
 
472
472
  admin_port = self._config.get("runtimeConfig", {}).get("admin_port")
473
473
  if admin_port is None:
dbos/_dbos_config.py CHANGED
@@ -1,4 +1,3 @@
1
- import json
2
1
  import os
3
2
  import re
4
3
  from importlib import resources
@@ -34,6 +33,7 @@ class DBOSConfig(TypedDict, total=False):
34
33
  otlp_attributes (dict[str, str]): A set of custom attributes to apply OTLP-exported logs and traces
35
34
  application_version (str): Application version
36
35
  executor_id (str): Executor ID, used to identify the application instance in distributed environments
36
+ dbos_system_schema (str): Schema name for DBOS system tables. Defaults to "dbos".
37
37
  enable_otlp (bool): If True, enable built-in DBOS OTLP tracing and logging.
38
38
  """
39
39
 
@@ -52,6 +52,7 @@ class DBOSConfig(TypedDict, total=False):
52
52
  otlp_attributes: Optional[dict[str, str]]
53
53
  application_version: Optional[str]
54
54
  executor_id: Optional[str]
55
+ dbos_system_schema: Optional[str]
55
56
  enable_otlp: Optional[bool]
56
57
 
57
58
 
@@ -70,6 +71,7 @@ class DatabaseConfig(TypedDict, total=False):
70
71
  sys_db_pool_size (int): System database pool size
71
72
  db_engine_kwargs (Dict[str, Any]): SQLAlchemy engine kwargs
72
73
  migrate (List[str]): Migration commands to run on startup
74
+ dbos_system_schema (str): Schema name for DBOS system tables. Defaults to "dbos".
73
75
  """
74
76
 
75
77
  sys_db_name: Optional[str]
@@ -113,6 +115,7 @@ class ConfigFile(TypedDict, total=False):
113
115
  system_database_url (str): System database URL
114
116
  telemetry (TelemetryConfig): Configuration for tracing / logging
115
117
  env (Dict[str,str]): Environment variables
118
+ dbos_system_schema (str): Schema name for DBOS system tables. Defaults to "dbos".
116
119
 
117
120
  """
118
121
 
@@ -123,6 +126,7 @@ class ConfigFile(TypedDict, total=False):
123
126
  system_database_url: Optional[str]
124
127
  telemetry: Optional[TelemetryConfig]
125
128
  env: Dict[str, str]
129
+ dbos_system_schema: Optional[str]
126
130
 
127
131
 
128
132
  def translate_dbos_config_to_config_file(config: DBOSConfig) -> ConfigFile:
@@ -153,6 +157,9 @@ def translate_dbos_config_to_config_file(config: DBOSConfig) -> ConfigFile:
153
157
  if "system_database_url" in config:
154
158
  translated_config["system_database_url"] = config.get("system_database_url")
155
159
 
160
+ if "dbos_system_schema" in config:
161
+ translated_config["dbos_system_schema"] = config.get("dbos_system_schema")
162
+
156
163
  # Runtime config
157
164
  translated_config["runtimeConfig"] = {"run_admin_server": True}
158
165
  if "admin_port" in config:
@@ -408,22 +415,20 @@ def process_config(
408
415
  url = url.set(database=f"{url.database}{SystemSchema.sysdb_suffix}")
409
416
  data["system_database_url"] = url.render_as_string(hide_password=False)
410
417
 
411
- # If a system database URL is provided but not an application database URL, set the
412
- # application database URL to the system database URL.
418
+ # If a system database URL is provided but not an application database URL,
419
+ # do not create an application database.
413
420
  if data.get("system_database_url") and not data.get("database_url"):
414
421
  assert data["system_database_url"]
415
- data["database_url"] = data["system_database_url"]
422
+ data["database_url"] = None
416
423
 
417
- # If neither URL is provided, use a default SQLite database URL.
424
+ # If neither URL is provided, use a default SQLite system database URL.
418
425
  if not data.get("database_url") and not data.get("system_database_url"):
419
426
  _app_db_name = _app_name_to_db_name(data["name"])
420
- data["system_database_url"] = data["database_url"] = (
421
- f"sqlite:///{_app_db_name}.sqlite"
422
- )
427
+ data["system_database_url"] = f"sqlite:///{_app_db_name}.sqlite"
428
+ data["database_url"] = None
423
429
 
424
430
  configure_db_engine_parameters(data["database"], connect_timeout=connect_timeout)
425
431
 
426
- assert data["database_url"] is not None
427
432
  assert data["system_database_url"] is not None
428
433
  # Pretty-print connection information, respecting log level
429
434
  if not silent and logs["logLevel"] == "INFO" or logs["logLevel"] == "DEBUG":
@@ -431,7 +436,12 @@ def process_config(
431
436
  hide_password=True
432
437
  )
433
438
  print(f"DBOS system database URL: {printable_sys_db_url}")
434
- if data["database_url"].startswith("sqlite"):
439
+ if data["database_url"]:
440
+ printable_app_db_url = make_url(data["database_url"]).render_as_string(
441
+ hide_password=True
442
+ )
443
+ print(f"DBOS application database URL: {printable_app_db_url}")
444
+ if data["system_database_url"].startswith("sqlite"):
435
445
  print(
436
446
  f"Using SQLite as a system database. The SQLite system database is for development and testing. PostgreSQL is recommended for production use."
437
447
  )
@@ -543,6 +553,8 @@ def overwrite_config(provided_config: ConfigFile) -> ConfigFile:
543
553
  "DBOS_SYSTEM_DATABASE_URL environment variable is not set. This is required to connect to the database."
544
554
  )
545
555
  provided_config["system_database_url"] = system_db_url
556
+ # Always use the "dbos" schema when deploying to DBOS Cloud
557
+ provided_config["dbos_system_schema"] = "dbos"
546
558
 
547
559
  # Telemetry config
548
560
  if "telemetry" not in provided_config or provided_config["telemetry"] is None:
@@ -615,12 +627,11 @@ def get_system_database_url(config: ConfigFile) -> str:
615
627
  )
616
628
 
617
629
 
618
- def get_application_database_url(config: ConfigFile) -> str:
630
+ def get_application_database_url(config: ConfigFile) -> str | None:
619
631
  # For backwards compatibility, the application database URL is "database_url"
620
632
  if config.get("database_url"):
621
633
  assert config["database_url"]
622
634
  return config["database_url"]
623
635
  else:
624
- # If the application database URL is not specified, set it to the system database URL
625
- assert config["system_database_url"]
626
- return config["system_database_url"]
636
+ # If the application database URL is not specified, return None
637
+ return None
dbos/_migration.py CHANGED
@@ -5,7 +5,7 @@ import sqlalchemy as sa
5
5
  from ._logger import dbos_logger
6
6
 
7
7
 
8
- def ensure_dbos_schema(engine: sa.Engine) -> None:
8
+ def ensure_dbos_schema(engine: sa.Engine, schema: str) -> None:
9
9
  """
10
10
  True if using DBOS migrations (DBOS schema and migrations table already exist or were created)
11
11
  False if using Alembic migrations (DBOS schema exists, but dbos_migrations table doesn't)
@@ -14,41 +14,46 @@ def ensure_dbos_schema(engine: sa.Engine) -> None:
14
14
  # Check if dbos schema exists
15
15
  schema_result = conn.execute(
16
16
  sa.text(
17
- "SELECT schema_name FROM information_schema.schemata WHERE schema_name = 'dbos'"
18
- )
17
+ "SELECT schema_name FROM information_schema.schemata WHERE schema_name = :schema"
18
+ ),
19
+ {"schema": schema},
19
20
  )
20
21
  schema_exists = schema_result.fetchone() is not None
21
22
 
22
23
  # Create schema if it doesn't exist
23
24
  if not schema_exists:
24
- conn.execute(sa.text("CREATE SCHEMA dbos"))
25
+ conn.execute(sa.text(f'CREATE SCHEMA "{schema}"'))
25
26
 
26
27
  # Check if dbos_migrations table exists
27
28
  table_result = conn.execute(
28
29
  sa.text(
29
- "SELECT table_name FROM information_schema.tables WHERE table_schema = 'dbos' AND table_name = 'dbos_migrations'"
30
- )
30
+ "SELECT table_name FROM information_schema.tables WHERE table_schema = :schema AND table_name = 'dbos_migrations'"
31
+ ),
32
+ {"schema": schema},
31
33
  )
32
34
  table_exists = table_result.fetchone() is not None
33
35
 
34
36
  if not table_exists:
35
37
  conn.execute(
36
38
  sa.text(
37
- "CREATE TABLE dbos.dbos_migrations (version BIGINT NOT NULL PRIMARY KEY)"
39
+ f'CREATE TABLE "{schema}".dbos_migrations (version BIGINT NOT NULL PRIMARY KEY)'
38
40
  )
39
41
  )
40
42
 
41
43
 
42
- def run_dbos_migrations(engine: sa.Engine) -> None:
44
+ def run_dbos_migrations(engine: sa.Engine, schema: str) -> None:
43
45
  """Run DBOS-managed migrations by executing each SQL command in dbos_migrations."""
44
46
  with engine.begin() as conn:
45
47
  # Get current migration version
46
- result = conn.execute(sa.text("SELECT version FROM dbos.dbos_migrations"))
48
+ result = conn.execute(
49
+ sa.text(f'SELECT version FROM "{schema}".dbos_migrations')
50
+ )
47
51
  current_version = result.fetchone()
48
52
  last_applied = current_version[0] if current_version else 0
49
53
 
50
54
  # Apply migrations starting from the next version
51
- for i, migration_sql in enumerate(dbos_migrations, 1):
55
+ migrations = get_dbos_migrations(schema)
56
+ for i, migration_sql in enumerate(migrations, 1):
52
57
  if i <= last_applied:
53
58
  continue
54
59
 
@@ -60,23 +65,26 @@ def run_dbos_migrations(engine: sa.Engine) -> None:
60
65
  if last_applied == 0:
61
66
  conn.execute(
62
67
  sa.text(
63
- "INSERT INTO dbos.dbos_migrations (version) VALUES (:version)"
68
+ f'INSERT INTO "{schema}".dbos_migrations (version) VALUES (:version)'
64
69
  ),
65
70
  {"version": i},
66
71
  )
67
72
  else:
68
73
  conn.execute(
69
- sa.text("UPDATE dbos.dbos_migrations SET version = :version"),
74
+ sa.text(
75
+ f'UPDATE "{schema}".dbos_migrations SET version = :version'
76
+ ),
70
77
  {"version": i},
71
78
  )
72
79
  last_applied = i
73
80
 
74
81
 
75
- dbos_migration_one = """
82
+ def get_dbos_migration_one(schema: str) -> str:
83
+ return f"""
76
84
  -- Enable uuid extension for generating UUIDs
77
85
  CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
78
86
 
79
- CREATE TABLE dbos.workflow_status (
87
+ CREATE TABLE \"{schema}\".workflow_status (
80
88
  workflow_uuid TEXT PRIMARY KEY,
81
89
  status TEXT,
82
90
  name TEXT,
@@ -103,15 +111,15 @@ CREATE TABLE dbos.workflow_status (
103
111
  priority INTEGER NOT NULL DEFAULT 0
104
112
  );
105
113
 
106
- CREATE INDEX workflow_status_created_at_index ON dbos.workflow_status (created_at);
107
- CREATE INDEX workflow_status_executor_id_index ON dbos.workflow_status (executor_id);
108
- CREATE INDEX workflow_status_status_index ON dbos.workflow_status (status);
114
+ CREATE INDEX workflow_status_created_at_index ON \"{schema}\".workflow_status (created_at);
115
+ CREATE INDEX workflow_status_executor_id_index ON \"{schema}\".workflow_status (executor_id);
116
+ CREATE INDEX workflow_status_status_index ON \"{schema}\".workflow_status (status);
109
117
 
110
- ALTER TABLE dbos.workflow_status
118
+ ALTER TABLE \"{schema}\".workflow_status
111
119
  ADD CONSTRAINT uq_workflow_status_queue_name_dedup_id
112
120
  UNIQUE (queue_name, deduplication_id);
113
121
 
114
- CREATE TABLE dbos.operation_outputs (
122
+ CREATE TABLE \"{schema}\".operation_outputs (
115
123
  workflow_uuid TEXT NOT NULL,
116
124
  function_id INTEGER NOT NULL,
117
125
  function_name TEXT NOT NULL DEFAULT '',
@@ -119,23 +127,23 @@ CREATE TABLE dbos.operation_outputs (
119
127
  error TEXT,
120
128
  child_workflow_id TEXT,
121
129
  PRIMARY KEY (workflow_uuid, function_id),
122
- FOREIGN KEY (workflow_uuid) REFERENCES dbos.workflow_status(workflow_uuid)
130
+ FOREIGN KEY (workflow_uuid) REFERENCES \"{schema}\".workflow_status(workflow_uuid)
123
131
  ON UPDATE CASCADE ON DELETE CASCADE
124
132
  );
125
133
 
126
- CREATE TABLE dbos.notifications (
134
+ CREATE TABLE \"{schema}\".notifications (
127
135
  destination_uuid TEXT NOT NULL,
128
136
  topic TEXT,
129
137
  message TEXT NOT NULL,
130
138
  created_at_epoch_ms BIGINT NOT NULL DEFAULT (EXTRACT(epoch FROM now()) * 1000::numeric)::bigint,
131
139
  message_uuid TEXT NOT NULL DEFAULT gen_random_uuid(), -- Built-in function
132
- FOREIGN KEY (destination_uuid) REFERENCES dbos.workflow_status(workflow_uuid)
140
+ FOREIGN KEY (destination_uuid) REFERENCES \"{schema}\".workflow_status(workflow_uuid)
133
141
  ON UPDATE CASCADE ON DELETE CASCADE
134
142
  );
135
- CREATE INDEX idx_workflow_topic ON dbos.notifications (destination_uuid, topic);
143
+ CREATE INDEX idx_workflow_topic ON \"{schema}\".notifications (destination_uuid, topic);
136
144
 
137
145
  -- Create notification function
138
- CREATE OR REPLACE FUNCTION dbos.notifications_function() RETURNS TRIGGER AS $$
146
+ CREATE OR REPLACE FUNCTION \"{schema}\".notifications_function() RETURNS TRIGGER AS $$
139
147
  DECLARE
140
148
  payload text := NEW.destination_uuid || '::' || NEW.topic;
141
149
  BEGIN
@@ -146,20 +154,20 @@ $$ LANGUAGE plpgsql;
146
154
 
147
155
  -- Create notification trigger
148
156
  CREATE TRIGGER dbos_notifications_trigger
149
- AFTER INSERT ON dbos.notifications
150
- FOR EACH ROW EXECUTE FUNCTION dbos.notifications_function();
157
+ AFTER INSERT ON \"{schema}\".notifications
158
+ FOR EACH ROW EXECUTE FUNCTION \"{schema}\".notifications_function();
151
159
 
152
- CREATE TABLE dbos.workflow_events (
160
+ CREATE TABLE \"{schema}\".workflow_events (
153
161
  workflow_uuid TEXT NOT NULL,
154
162
  key TEXT NOT NULL,
155
163
  value TEXT NOT NULL,
156
164
  PRIMARY KEY (workflow_uuid, key),
157
- FOREIGN KEY (workflow_uuid) REFERENCES dbos.workflow_status(workflow_uuid)
165
+ FOREIGN KEY (workflow_uuid) REFERENCES \"{schema}\".workflow_status(workflow_uuid)
158
166
  ON UPDATE CASCADE ON DELETE CASCADE
159
167
  );
160
168
 
161
169
  -- Create events function
162
- CREATE OR REPLACE FUNCTION dbos.workflow_events_function() RETURNS TRIGGER AS $$
170
+ CREATE OR REPLACE FUNCTION \"{schema}\".workflow_events_function() RETURNS TRIGGER AS $$
163
171
  DECLARE
164
172
  payload text := NEW.workflow_uuid || '::' || NEW.key;
165
173
  BEGIN
@@ -170,20 +178,20 @@ $$ LANGUAGE plpgsql;
170
178
 
171
179
  -- Create events trigger
172
180
  CREATE TRIGGER dbos_workflow_events_trigger
173
- AFTER INSERT ON dbos.workflow_events
174
- FOR EACH ROW EXECUTE FUNCTION dbos.workflow_events_function();
181
+ AFTER INSERT ON \"{schema}\".workflow_events
182
+ FOR EACH ROW EXECUTE FUNCTION \"{schema}\".workflow_events_function();
175
183
 
176
- CREATE TABLE dbos.streams (
184
+ CREATE TABLE \"{schema}\".streams (
177
185
  workflow_uuid TEXT NOT NULL,
178
186
  key TEXT NOT NULL,
179
187
  value TEXT NOT NULL,
180
188
  "offset" INTEGER NOT NULL,
181
189
  PRIMARY KEY (workflow_uuid, key, "offset"),
182
- FOREIGN KEY (workflow_uuid) REFERENCES dbos.workflow_status(workflow_uuid)
190
+ FOREIGN KEY (workflow_uuid) REFERENCES \"{schema}\".workflow_status(workflow_uuid)
183
191
  ON UPDATE CASCADE ON DELETE CASCADE
184
192
  );
185
193
 
186
- CREATE TABLE dbos.event_dispatch_kv (
194
+ CREATE TABLE \"{schema}\".event_dispatch_kv (
187
195
  service_name TEXT NOT NULL,
188
196
  workflow_fn_name TEXT NOT NULL,
189
197
  key TEXT NOT NULL,
@@ -195,6 +203,10 @@ CREATE TABLE dbos.event_dispatch_kv (
195
203
  """
196
204
 
197
205
 
206
+ def get_dbos_migrations(schema: str) -> list[str]:
207
+ return [get_dbos_migration_one(schema)]
208
+
209
+
198
210
  def get_sqlite_timestamp_expr() -> str:
199
211
  """Get SQLite timestamp expression with millisecond precision for Python >= 3.12."""
200
212
  if sys.version_info >= (3, 12):
@@ -281,5 +293,4 @@ CREATE TABLE streams (
281
293
  );
282
294
  """
283
295
 
284
- dbos_migrations = [dbos_migration_one]
285
296
  sqlite_migrations = [sqlite_migration_one]
@@ -1,3 +1,5 @@
1
+ from typing import Optional
2
+
1
3
  from sqlalchemy import (
2
4
  BigInteger,
3
5
  Column,
@@ -19,6 +21,21 @@ class SystemSchema:
19
21
  metadata_obj = MetaData(schema="dbos")
20
22
  sysdb_suffix = "_dbos_sys"
21
23
 
24
+ @classmethod
25
+ def set_schema(cls, schema_name: Optional[str]) -> None:
26
+ """
27
+ Set the schema for all DBOS system tables.
28
+
29
+ Args:
30
+ schema_name: The name of the schema to use for system tables
31
+ """
32
+ cls.metadata_obj.schema = schema_name
33
+ cls.workflow_status.schema = schema_name
34
+ cls.operation_outputs.schema = schema_name
35
+ cls.notifications.schema = schema_name
36
+ cls.workflow_events.schema = schema_name
37
+ cls.streams.schema = schema_name
38
+
22
39
  workflow_status = Table(
23
40
  "workflow_status",
24
41
  metadata_obj,
dbos/_sys_db.py CHANGED
@@ -1443,6 +1443,7 @@ class SystemDatabase(ABC):
1443
1443
  def create(
1444
1444
  system_database_url: str,
1445
1445
  engine_kwargs: Dict[str, Any],
1446
+ schema: Optional[str],
1446
1447
  debug_mode: bool = False,
1447
1448
  ) -> "SystemDatabase":
1448
1449
  """Factory method to create the appropriate SystemDatabase implementation based on URL."""
@@ -1461,6 +1462,7 @@ class SystemDatabase(ABC):
1461
1462
  system_database_url=system_database_url,
1462
1463
  engine_kwargs=engine_kwargs,
1463
1464
  debug_mode=debug_mode,
1465
+ schema=schema,
1464
1466
  )
1465
1467
 
1466
1468
  @db_retry()
dbos/_sys_db_postgres.py CHANGED
@@ -20,6 +20,7 @@ class PostgresSystemDatabase(SystemDatabase):
20
20
  *,
21
21
  system_database_url: str,
22
22
  engine_kwargs: Dict[str, Any],
23
+ schema: Optional[str],
23
24
  debug_mode: bool = False,
24
25
  ):
25
26
  super().__init__(
@@ -27,17 +28,16 @@ class PostgresSystemDatabase(SystemDatabase):
27
28
  engine_kwargs=engine_kwargs,
28
29
  debug_mode=debug_mode,
29
30
  )
31
+ if schema is None:
32
+ self.schema = "dbos"
33
+ else:
34
+ self.schema = schema
35
+ SystemSchema.set_schema(self.schema)
30
36
  self.notification_conn: Optional[psycopg.connection.Connection] = None
31
37
 
32
38
  def _create_engine(
33
39
  self, system_database_url: str, engine_kwargs: Dict[str, Any]
34
40
  ) -> sa.Engine:
35
- # TODO: Make the schema dynamic so this isn't needed
36
- SystemSchema.workflow_status.schema = "dbos"
37
- SystemSchema.operation_outputs.schema = "dbos"
38
- SystemSchema.notifications.schema = "dbos"
39
- SystemSchema.workflow_events.schema = "dbos"
40
- SystemSchema.streams.schema = "dbos"
41
41
  url = sa.make_url(system_database_url).set(drivername="postgresql+psycopg")
42
42
  return sa.create_engine(url, **engine_kwargs)
43
43
 
@@ -62,8 +62,8 @@ class PostgresSystemDatabase(SystemDatabase):
62
62
  conn.execute(sa.text(f"CREATE DATABASE {sysdb_name}"))
63
63
  engine.dispose()
64
64
 
65
- ensure_dbos_schema(self.engine)
66
- run_dbos_migrations(self.engine)
65
+ ensure_dbos_schema(self.engine, self.schema)
66
+ run_dbos_migrations(self.engine, self.schema)
67
67
 
68
68
  def _cleanup_connections(self) -> None:
69
69
  """Clean up PostgreSQL-specific connections."""
dbos/_sys_db_sqlite.py CHANGED
@@ -19,12 +19,7 @@ class SQLiteSystemDatabase(SystemDatabase):
19
19
  self, system_database_url: str, engine_kwargs: Dict[str, Any]
20
20
  ) -> sa.Engine:
21
21
  """Create a SQLite engine."""
22
- # TODO: Make the schema dynamic so this isn't needed
23
- SystemSchema.workflow_status.schema = None
24
- SystemSchema.operation_outputs.schema = None
25
- SystemSchema.notifications.schema = None
26
- SystemSchema.workflow_events.schema = None
27
- SystemSchema.streams.schema = None
22
+ SystemSchema.set_schema(None)
28
23
  return sa.create_engine(system_database_url)
29
24
 
30
25
  def run_migrations(self) -> None:
@@ -98,10 +98,10 @@ def get_workflow(sys_db: SystemDatabase, workflow_id: str) -> Optional[WorkflowS
98
98
 
99
99
 
100
100
  def list_workflow_steps(
101
- sys_db: SystemDatabase, app_db: ApplicationDatabase, workflow_id: str
101
+ sys_db: SystemDatabase, app_db: Optional[ApplicationDatabase], workflow_id: str
102
102
  ) -> List[StepInfo]:
103
103
  steps = sys_db.get_workflow_steps(workflow_id)
104
- transactions = app_db.get_transactions(workflow_id)
104
+ transactions = app_db.get_transactions(workflow_id) if app_db else []
105
105
  merged_steps = steps + transactions
106
106
  merged_steps.sort(key=lambda step: step["function_id"])
107
107
  return merged_steps
@@ -109,7 +109,7 @@ def list_workflow_steps(
109
109
 
110
110
  def fork_workflow(
111
111
  sys_db: SystemDatabase,
112
- app_db: ApplicationDatabase,
112
+ app_db: Optional[ApplicationDatabase],
113
113
  workflow_id: str,
114
114
  start_step: int,
115
115
  *,
@@ -122,7 +122,8 @@ def fork_workflow(
122
122
  ctx.id_assigned_for_next_workflow = ""
123
123
  else:
124
124
  forked_workflow_id = str(uuid.uuid4())
125
- app_db.clone_workflow_transactions(workflow_id, forked_workflow_id, start_step)
125
+ if app_db:
126
+ app_db.clone_workflow_transactions(workflow_id, forked_workflow_id, start_step)
126
127
  sys_db.fork_workflow(
127
128
  workflow_id,
128
129
  forked_workflow_id,
@@ -145,7 +146,10 @@ def garbage_collect(
145
146
  )
146
147
  if result is not None:
147
148
  cutoff_epoch_timestamp_ms, pending_workflow_ids = result
148
- dbos._app_db.garbage_collect(cutoff_epoch_timestamp_ms, pending_workflow_ids)
149
+ if dbos._app_db:
150
+ dbos._app_db.garbage_collect(
151
+ cutoff_epoch_timestamp_ms, pending_workflow_ids
152
+ )
149
153
 
150
154
 
151
155
  def global_timeout(dbos: "DBOS", cutoff_epoch_timestamp_ms: int) -> None:
dbos/cli/cli.py CHANGED
@@ -38,7 +38,7 @@ class DefaultEncoder(json.JSONEncoder):
38
38
 
39
39
  def _get_db_url(
40
40
  *, system_database_url: Optional[str], application_database_url: Optional[str]
41
- ) -> Tuple[str, str]:
41
+ ) -> Tuple[str, str | None]:
42
42
  """
43
43
  Get the database URL to use for the DBOS application.
44
44
  Order of precedence:
@@ -287,6 +287,13 @@ def migrate(
287
287
  help="The role with which you will run your DBOS application",
288
288
  ),
289
289
  ] = None,
290
+ schema: Annotated[
291
+ typing.Optional[str],
292
+ typer.Option(
293
+ "--schema",
294
+ help='Schema name for DBOS system tables. Defaults to "dbos".',
295
+ ),
296
+ ] = "dbos",
290
297
  ) -> None:
291
298
  system_database_url, application_database_url = _get_db_url(
292
299
  system_database_url=system_database_url,
@@ -294,22 +301,30 @@ def migrate(
294
301
  )
295
302
 
296
303
  typer.echo(f"Starting DBOS migrations")
297
- typer.echo(f"Application database: {sa.make_url(application_database_url)}")
304
+ if application_database_url:
305
+ typer.echo(f"Application database: {sa.make_url(application_database_url)}")
298
306
  typer.echo(f"System database: {sa.make_url(system_database_url)}")
307
+ if schema is None:
308
+ schema = "dbos"
309
+ typer.echo(f"DBOS system schema: {schema}")
299
310
 
300
311
  # First, run DBOS migrations on the system database and the application database
301
312
  migrate_dbos_databases(
302
313
  app_database_url=application_database_url,
303
314
  system_database_url=system_database_url,
315
+ schema=schema,
304
316
  )
305
317
 
306
318
  # Next, assign permissions on the DBOS schema to the application role, if any
307
319
  if application_role:
320
+ if application_database_url:
321
+ grant_dbos_schema_permissions(
322
+ database_url=application_database_url,
323
+ role_name=application_role,
324
+ schema=schema,
325
+ )
308
326
  grant_dbos_schema_permissions(
309
- database_url=application_database_url, role_name=application_role
310
- )
311
- grant_dbos_schema_permissions(
312
- database_url=system_database_url, role_name=application_role
327
+ database_url=system_database_url, role_name=application_role, schema=schema
313
328
  )
314
329
 
315
330
  # Next, run any custom migration commands specified in the configuration
@@ -475,6 +490,13 @@ def list(
475
490
  help="Offset for pagination",
476
491
  ),
477
492
  ] = None,
493
+ schema: Annotated[
494
+ typing.Optional[str],
495
+ typer.Option(
496
+ "--schema",
497
+ help='Schema name for DBOS system tables. Defaults to "dbos".',
498
+ ),
499
+ ] = "dbos",
478
500
  ) -> None:
479
501
  system_database_url, application_database_url = _get_db_url(
480
502
  system_database_url=system_database_url,
@@ -483,6 +505,7 @@ def list(
483
505
  client = DBOSClient(
484
506
  application_database_url=application_database_url,
485
507
  system_database_url=system_database_url,
508
+ dbos_system_schema=schema,
486
509
  )
487
510
  workflows = client.list_workflows(
488
511
  limit=limit,
@@ -517,6 +540,13 @@ def get(
517
540
  help="Your DBOS system database URL",
518
541
  ),
519
542
  ] = None,
543
+ schema: Annotated[
544
+ typing.Optional[str],
545
+ typer.Option(
546
+ "--schema",
547
+ help='Schema name for DBOS system tables. Defaults to "dbos".',
548
+ ),
549
+ ] = "dbos",
520
550
  ) -> None:
521
551
  system_database_url, application_database_url = _get_db_url(
522
552
  system_database_url=system_database_url,
@@ -525,6 +555,7 @@ def get(
525
555
  client = DBOSClient(
526
556
  application_database_url=application_database_url,
527
557
  system_database_url=system_database_url,
558
+ dbos_system_schema=schema,
528
559
  )
529
560
  status = client.retrieve_workflow(workflow_id=workflow_id).get_status()
530
561
  print(json.dumps(status.__dict__, cls=DefaultEncoder))
@@ -549,6 +580,13 @@ def steps(
549
580
  help="Your DBOS system database URL",
550
581
  ),
551
582
  ] = None,
583
+ schema: Annotated[
584
+ typing.Optional[str],
585
+ typer.Option(
586
+ "--schema",
587
+ help='Schema name for DBOS system tables. Defaults to "dbos".',
588
+ ),
589
+ ] = "dbos",
552
590
  ) -> None:
553
591
  system_database_url, application_database_url = _get_db_url(
554
592
  system_database_url=system_database_url,
@@ -557,6 +595,7 @@ def steps(
557
595
  client = DBOSClient(
558
596
  application_database_url=application_database_url,
559
597
  system_database_url=system_database_url,
598
+ dbos_system_schema=schema,
560
599
  )
561
600
  steps = client.list_workflow_steps(workflow_id=workflow_id)
562
601
  print(json.dumps(steps, cls=DefaultEncoder))
@@ -583,6 +622,13 @@ def cancel(
583
622
  help="Your DBOS system database URL",
584
623
  ),
585
624
  ] = None,
625
+ schema: Annotated[
626
+ typing.Optional[str],
627
+ typer.Option(
628
+ "--schema",
629
+ help='Schema name for DBOS system tables. Defaults to "dbos".',
630
+ ),
631
+ ] = "dbos",
586
632
  ) -> None:
587
633
  system_database_url, application_database_url = _get_db_url(
588
634
  system_database_url=system_database_url,
@@ -591,6 +637,7 @@ def cancel(
591
637
  client = DBOSClient(
592
638
  application_database_url=application_database_url,
593
639
  system_database_url=system_database_url,
640
+ dbos_system_schema=schema,
594
641
  )
595
642
  client.cancel_workflow(workflow_id=workflow_id)
596
643
 
@@ -614,6 +661,13 @@ def resume(
614
661
  help="Your DBOS system database URL",
615
662
  ),
616
663
  ] = None,
664
+ schema: Annotated[
665
+ typing.Optional[str],
666
+ typer.Option(
667
+ "--schema",
668
+ help='Schema name for DBOS system tables. Defaults to "dbos".',
669
+ ),
670
+ ] = "dbos",
617
671
  ) -> None:
618
672
  system_database_url, application_database_url = _get_db_url(
619
673
  system_database_url=system_database_url,
@@ -622,6 +676,7 @@ def resume(
622
676
  client = DBOSClient(
623
677
  application_database_url=application_database_url,
624
678
  system_database_url=system_database_url,
679
+ dbos_system_schema=schema,
625
680
  )
626
681
  client.resume_workflow(workflow_id=workflow_id)
627
682
 
@@ -647,6 +702,13 @@ def restart(
647
702
  help="Your DBOS system database URL",
648
703
  ),
649
704
  ] = None,
705
+ schema: Annotated[
706
+ typing.Optional[str],
707
+ typer.Option(
708
+ "--schema",
709
+ help='Schema name for DBOS system tables. Defaults to "dbos".',
710
+ ),
711
+ ] = "dbos",
650
712
  ) -> None:
651
713
  system_database_url, application_database_url = _get_db_url(
652
714
  system_database_url=system_database_url,
@@ -655,6 +717,7 @@ def restart(
655
717
  client = DBOSClient(
656
718
  application_database_url=application_database_url,
657
719
  system_database_url=system_database_url,
720
+ dbos_system_schema=schema,
658
721
  )
659
722
  status = client.fork_workflow(workflow_id=workflow_id, start_step=1).get_status()
660
723
  print(json.dumps(status.__dict__, cls=DefaultEncoder))
@@ -705,6 +768,13 @@ def fork(
705
768
  help="Your DBOS system database URL",
706
769
  ),
707
770
  ] = None,
771
+ schema: Annotated[
772
+ typing.Optional[str],
773
+ typer.Option(
774
+ "--schema",
775
+ help='Schema name for DBOS system tables. Defaults to "dbos".',
776
+ ),
777
+ ] = "dbos",
708
778
  ) -> None:
709
779
  system_database_url, application_database_url = _get_db_url(
710
780
  system_database_url=system_database_url,
@@ -713,6 +783,7 @@ def fork(
713
783
  client = DBOSClient(
714
784
  application_database_url=application_database_url,
715
785
  system_database_url=system_database_url,
786
+ dbos_system_schema=schema,
716
787
  )
717
788
 
718
789
  if forked_workflow_id is not None:
@@ -809,6 +880,13 @@ def list_queue(
809
880
  help="Offset for pagination",
810
881
  ),
811
882
  ] = None,
883
+ schema: Annotated[
884
+ typing.Optional[str],
885
+ typer.Option(
886
+ "--schema",
887
+ help='Schema name for DBOS system tables. Defaults to "dbos".',
888
+ ),
889
+ ] = "dbos",
812
890
  ) -> None:
813
891
  system_database_url, application_database_url = _get_db_url(
814
892
  system_database_url=system_database_url,
@@ -817,6 +895,7 @@ def list_queue(
817
895
  client = DBOSClient(
818
896
  application_database_url=application_database_url,
819
897
  system_database_url=system_database_url,
898
+ dbos_system_schema=schema,
820
899
  )
821
900
  workflows = client.list_queued_workflows(
822
901
  limit=limit,
dbos/cli/migration.py CHANGED
@@ -1,3 +1,5 @@
1
+ from typing import Optional
2
+
1
3
  import sqlalchemy as sa
2
4
  import typer
3
5
 
@@ -5,7 +7,9 @@ from dbos._app_db import ApplicationDatabase
5
7
  from dbos._sys_db import SystemDatabase
6
8
 
7
9
 
8
- def migrate_dbos_databases(app_database_url: str, system_database_url: str) -> None:
10
+ def migrate_dbos_databases(
11
+ app_database_url: Optional[str], system_database_url: str, schema: str
12
+ ) -> None:
9
13
  app_db = None
10
14
  sys_db = None
11
15
  try:
@@ -16,17 +20,20 @@ def migrate_dbos_databases(app_database_url: str, system_database_url: str) -> N
16
20
  "max_overflow": 0,
17
21
  "pool_size": 2,
18
22
  },
19
- )
20
- app_db = ApplicationDatabase.create(
21
- database_url=app_database_url,
22
- engine_kwargs={
23
- "pool_timeout": 30,
24
- "max_overflow": 0,
25
- "pool_size": 2,
26
- },
23
+ schema=schema,
27
24
  )
28
25
  sys_db.run_migrations()
29
- app_db.run_migrations()
26
+ if app_database_url:
27
+ app_db = ApplicationDatabase.create(
28
+ database_url=app_database_url,
29
+ engine_kwargs={
30
+ "pool_timeout": 30,
31
+ "max_overflow": 0,
32
+ "pool_size": 2,
33
+ },
34
+ schema=schema,
35
+ )
36
+ app_db.run_migrations()
30
37
  except Exception as e:
31
38
  typer.echo(f"DBOS migrations failed: {e}")
32
39
  raise typer.Exit(code=1)
@@ -37,12 +44,14 @@ def migrate_dbos_databases(app_database_url: str, system_database_url: str) -> N
37
44
  app_db.destroy()
38
45
 
39
46
 
40
- def grant_dbos_schema_permissions(database_url: str, role_name: str) -> None:
47
+ def grant_dbos_schema_permissions(
48
+ database_url: str, role_name: str, schema: str
49
+ ) -> None:
41
50
  """
42
- Grant all permissions on all entities in the dbos schema to the specified role.
51
+ Grant all permissions on all entities in the system schema to the specified role.
43
52
  """
44
53
  typer.echo(
45
- f"Granting permissions for DBOS schema to {role_name} in database {sa.make_url(database_url)}"
54
+ f"Granting permissions for the {schema} schema to {role_name} in database {sa.make_url(database_url)}"
46
55
  )
47
56
  engine = None
48
57
  try:
@@ -52,38 +61,38 @@ def grant_dbos_schema_permissions(database_url: str, role_name: str) -> None:
52
61
  with engine.connect() as connection:
53
62
  connection.execution_options(isolation_level="AUTOCOMMIT")
54
63
 
55
- # Grant usage on the dbos schema
56
- sql = f'GRANT USAGE ON SCHEMA dbos TO "{role_name}"'
64
+ # Grant usage on the system schema
65
+ sql = f'GRANT USAGE ON SCHEMA "{schema}" TO "{role_name}"'
57
66
  typer.echo(sql)
58
67
  connection.execute(sa.text(sql))
59
68
 
60
- # Grant all privileges on all existing tables in dbos schema (includes views)
61
- sql = f'GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA dbos TO "{role_name}"'
69
+ # Grant all privileges on all existing tables in the system schema (includes views)
70
+ sql = f'GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA "{schema}" TO "{role_name}"'
62
71
  typer.echo(sql)
63
72
  connection.execute(sa.text(sql))
64
73
 
65
- # Grant all privileges on all sequences in dbos schema
66
- sql = (
67
- f'GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA dbos TO "{role_name}"'
68
- )
74
+ # Grant all privileges on all sequences in the system schema
75
+ sql = f'GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA "{schema}" TO "{role_name}"'
69
76
  typer.echo(sql)
70
77
  connection.execute(sa.text(sql))
71
78
 
72
- # Grant execute on all functions and procedures in dbos schema
73
- sql = f'GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA dbos TO "{role_name}"'
79
+ # Grant execute on all functions and procedures in the system schema
80
+ sql = (
81
+ f'GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA "{schema}" TO "{role_name}"'
82
+ )
74
83
  typer.echo(sql)
75
84
  connection.execute(sa.text(sql))
76
85
 
77
- # Grant default privileges for future objects in dbos schema
78
- sql = f'ALTER DEFAULT PRIVILEGES IN SCHEMA dbos GRANT ALL ON TABLES TO "{role_name}"'
86
+ # Grant default privileges for future objects in the system schema
87
+ sql = f'ALTER DEFAULT PRIVILEGES IN SCHEMA "{schema}" GRANT ALL ON TABLES TO "{role_name}"'
79
88
  typer.echo(sql)
80
89
  connection.execute(sa.text(sql))
81
90
 
82
- sql = f'ALTER DEFAULT PRIVILEGES IN SCHEMA dbos GRANT ALL ON SEQUENCES TO "{role_name}"'
91
+ sql = f'ALTER DEFAULT PRIVILEGES IN SCHEMA "{schema}" GRANT ALL ON SEQUENCES TO "{role_name}"'
83
92
  typer.echo(sql)
84
93
  connection.execute(sa.text(sql))
85
94
 
86
- sql = f'ALTER DEFAULT PRIVILEGES IN SCHEMA dbos GRANT EXECUTE ON FUNCTIONS TO "{role_name}"'
95
+ sql = f'ALTER DEFAULT PRIVILEGES IN SCHEMA "{schema}" GRANT EXECUTE ON FUNCTIONS TO "{role_name}"'
87
96
  typer.echo(sql)
88
97
  connection.execute(sa.text(sql))
89
98
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 1.15.0a1
3
+ Version: 1.15.0a3
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,20 +1,20 @@
1
- dbos-1.15.0a1.dist-info/METADATA,sha256=554w5m_z7LQ0LxzPVltRg_qEfahaA1NPkLDLv9g6GMY,13021
2
- dbos-1.15.0a1.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
3
- dbos-1.15.0a1.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
- dbos-1.15.0a1.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
1
+ dbos-1.15.0a3.dist-info/METADATA,sha256=Ft-tggTIl0dxVo3Byd4BKytIRVdvegchZIKPnrcnq0w,13021
2
+ dbos-1.15.0a3.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
3
+ dbos-1.15.0a3.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
+ dbos-1.15.0a3.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
5
  dbos/__init__.py,sha256=pT4BuNLDCrIQX27vQG8NlfxX6PZRU7r9miq4thJTszU,982
6
6
  dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
7
7
  dbos/_admin_server.py,sha256=e8ELhcDWqR3_PNobnNgUvLGh5lzZq0yFSF6dvtzoQRI,16267
8
- dbos/_app_db.py,sha256=GsV-uYU0QsChWwQDxnrh8_iiZ_zMQB-bsP2jPGIe2aM,16094
8
+ dbos/_app_db.py,sha256=WJwUdKsTpSZPCIWVeSF5FQNf5y1PF_lJ96tiaCjvck8,16385
9
9
  dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
10
- dbos/_client.py,sha256=XW5EQltqkwRgTnHq3avhuBuOTYSZkERrk9GMpqX8kfM,18859
10
+ dbos/_client.py,sha256=Rp1OiT5523QosHB8Qy0xe9eWNx9osZGhfQknO122hj8,18928
11
11
  dbos/_conductor/conductor.py,sha256=3E_hL3c9g9yWqKZkvI6KA0-ZzPMPRo06TOzT1esMiek,24114
12
12
  dbos/_conductor/protocol.py,sha256=q3rgLxINFtWFigdOONc-4gX4vn66UmMlJQD6Kj8LnL4,7420
13
13
  dbos/_context.py,sha256=cJDxVbswTLXKE5MV4Hmg6gpIX3Dd5mBTG-4lmofWP9E,27668
14
- dbos/_core.py,sha256=Eec_XWwewuwyaJiQwJMv7bWBWQoC3QWXDJsm2hzUVdI,50342
14
+ dbos/_core.py,sha256=13DNN_fpSIs42NquV80XsHV7yKwY_adKP03h_xhXok4,50493
15
15
  dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
16
- dbos/_dbos.py,sha256=dMJvLCCPqF31oqxgVF7ydcOgFmy4jRI5ky8AdMReUCQ,57987
17
- dbos/_dbos_config.py,sha256=B1y1hrWJc8UIoWbCqqBRGnc3bainLPp-TC6x1phInk0,25320
16
+ dbos/_dbos.py,sha256=ReujpyRseUNL_9FQN-uIhSFh6HJnAA2IAcVy6qDdEco,58036
17
+ dbos/_dbos_config.py,sha256=pnFeWZFDsk_94DWDCqm3e-ppTrqBFKQQdD5TvQGZ8-Y,25963
18
18
  dbos/_debouncer.py,sha256=VmGq1_ZIQ79fnH14LEhdoqxKWp6rlEwzsUwumwAMgTQ,15095
19
19
  dbos/_debug.py,sha256=0MfgNqutCUhI4PEmmra9x7f3DiFE_0nscfUCHdLimEY,1415
20
20
  dbos/_docker_pg_helper.py,sha256=xySum4hTA8TVMBODoG19u4cXQAB1vCock-jwM2pnmSI,7791
@@ -25,7 +25,7 @@ dbos/_flask.py,sha256=Npnakt-a3W5OykONFRkDRnumaDhTQmA0NPdUCGRYKXE,1652
25
25
  dbos/_kafka.py,sha256=Gm4fHWl7gYb-i5BMvwNwm5Km3z8zQpseqdMgqgFjlGI,4252
26
26
  dbos/_kafka_message.py,sha256=NYvOXNG3Qn7bghn1pv3fg4Pbs86ILZGcK4IB-MLUNu0,409
27
27
  dbos/_logger.py,sha256=djnCp147QoQ1iG9Bt3Uz8RyGaXGmi6gebccXsrA6Cps,4660
28
- dbos/_migration.py,sha256=LgxWPtXqRRwjvS5CrSvQ81B_UzLvRNWd4fnQ_Wo-gek,9507
28
+ dbos/_migration.py,sha256=VAQxZXWQISifW0JpIG78lowV1MTBJ5ZC4P0YIwqxQhM,10013
29
29
  dbos/_outcome.py,sha256=7HvosMfEHTh1U5P6xok7kFTGLwa2lPaul0YApb3UnN4,8191
30
30
  dbos/_queue.py,sha256=0kJTPwXy3nZ4Epzt-lHky9M9S4L31645drPGFR8fIJY,4854
31
31
  dbos/_recovery.py,sha256=K-wlFhdf4yGRm6cUzyhcTjQUS0xp2T5rdNMLiiBErYg,2882
@@ -34,11 +34,11 @@ dbos/_roles.py,sha256=kCuhhg8XLtrHCgKgm44I0abIRTGHltf88OwjEKAUggk,2317
34
34
  dbos/_scheduler.py,sha256=CWeGVfl9h51VXfxt80y5Da_5pE8SPty_AYkfpJkkMxQ,2117
35
35
  dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
36
36
  dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
37
- dbos/_schemas/system_database.py,sha256=-dAKk-_Y3vzbpLT4ei-sIrBQgFyQiwPj1enZb1TYc8I,4943
37
+ dbos/_schemas/system_database.py,sha256=aEkjRQDh9xjdke0d9uFx_20-c9UjQtvuLtHZ24aOypA,5497
38
38
  dbos/_serialization.py,sha256=GLgWLtHpvk7nSHyXukVQLE1ASNA3CJBtfF8w6iflBDw,3590
39
- dbos/_sys_db.py,sha256=SspVk-wYmE6xZLuyYQUclwh_AMjnkDXcog5g5WmYn7c,83036
40
- dbos/_sys_db_postgres.py,sha256=CcvxWzoByEvCZ2P_P-KNBRcyJ_8vSpCjtHBRmc7l5hI,7324
41
- dbos/_sys_db_sqlite.py,sha256=xT9l-czMhLmfuu5UcnBzAyUxSFgzt3XtEWx9t_D8mZs,7361
39
+ dbos/_sys_db.py,sha256=Trlbf99KvH8YRthQ3diJQMSi0eDmky7X8dPG1wuv5VI,83098
40
+ dbos/_sys_db_postgres.py,sha256=V3RGj2cXdCZ8xNxiMO-ECfKuHXtGZLVVNtOXtFSVHBw,7215
41
+ dbos/_sys_db_sqlite.py,sha256=lEmJjyUHXo09q1sMpNpasurh4JyrH6DsmYzLjDC5k6Y,7091
42
42
  dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
43
43
  dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
44
44
  dbos/_templates/dbos-db-starter/__package/main.py.dbos,sha256=aQnBPSSQpkB8ERfhf7gB7P9tsU6OPKhZscfeh0yiaD8,2702
@@ -48,12 +48,12 @@ dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos,sha256=pVm2Q0Asx
48
48
  dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
49
49
  dbos/_tracer.py,sha256=PHbD7iTEkHk7z4B9hc-wPgi2dPTeI1rhZgLI33TQEeM,3786
50
50
  dbos/_utils.py,sha256=ZdoM1MDbHnlJrh31zfhp3iX62bAxK1kyvMwXnltC_84,1779
51
- dbos/_workflow_commands.py,sha256=EmmAaQfRWeOZm_WPTznuU-O3he3jiSzzT9VpYrhxugE,4835
51
+ dbos/_workflow_commands.py,sha256=k-i1bCfNrux43BHLT8wQ-l-MVZX3D6LGZLH7-uuiDRo,4951
52
52
  dbos/cli/_github_init.py,sha256=R_94Fnn40CAmPy-zM00lwHi0ndyfv57TmIooADjmag4,3378
53
53
  dbos/cli/_template_init.py,sha256=AltKk256VocgvxLpuTxpjJyACrdHFjbGoqYhHzeLae4,2649
54
- dbos/cli/cli.py,sha256=-yoFUGzwPiAJlDfCOWr2TfnVw5LvMGqDKDK-ri-HPzw,26631
55
- dbos/cli/migration.py,sha256=5GiyagLZkyVvDz3StYxtFdkFoKFCmh6eSXjzsIGhZ_A,3330
54
+ dbos/cli/cli.py,sha256=K8Fz05qach61EMuv5reUMRgT-UltysGZ4XerOTR4KEg,29003
55
+ dbos/cli/migration.py,sha256=1Y52EMc2rX7PJgJa3_KXV7oiQEO569k7aHXRofwYipo,3608
56
56
  dbos/dbos-config.schema.json,sha256=LyUT1DOTaAwOP6suxQGS5KemVIqXGPyu_q7Hbo0neA8,6192
57
57
  dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
58
58
  version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
59
- dbos-1.15.0a1.dist-info/RECORD,,
59
+ dbos-1.15.0a3.dist-info/RECORD,,