dbos 1.15.0a2__tar.gz → 1.15.0a4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dbos-1.15.0a2 → dbos-1.15.0a4}/PKG-INFO +1 -1
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_admin_server.py +1 -1
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_app_db.py +26 -14
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_client.py +3 -2
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_dbos.py +4 -24
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_dbos_config.py +14 -19
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_migration.py +46 -35
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_schemas/system_database.py +17 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_sys_db.py +2 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_sys_db_postgres.py +8 -8
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_sys_db_sqlite.py +1 -6
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/cli/cli.py +69 -34
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/cli/migration.py +22 -18
- dbos-1.15.0a4/dbos/dbos-config.schema.json +61 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/pyproject.toml +1 -1
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_async_workflow_management.py +0 -19
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_config.py +6 -60
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_dbos.py +53 -1
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_package.py +48 -18
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_schema_migration.py +135 -104
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_workflow_management.py +0 -56
- dbos-1.15.0a2/dbos/dbos-config.schema.json +0 -182
- {dbos-1.15.0a2 → dbos-1.15.0a4}/LICENSE +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/README.md +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/__init__.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/__main__.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_classproperty.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_conductor/conductor.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_conductor/protocol.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_context.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_core.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_croniter.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_debouncer.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_debug.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_docker_pg_helper.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_error.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_event_loop.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_fastapi.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_flask.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_kafka.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_kafka_message.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_logger.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_outcome.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_queue.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_recovery.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_registrations.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_roles.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_scheduler.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_schemas/__init__.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_schemas/application_database.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_serialization.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_templates/dbos-db-starter/README.md +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_tracer.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_utils.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/_workflow_commands.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/cli/_github_init.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/cli/_template_init.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/dbos/py.typed +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/__init__.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/atexit_no_ctor.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/atexit_no_launch.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/classdefs.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/client_collateral.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/client_worker.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/conftest.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/dupname_classdefs1.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/dupname_classdefsa.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/more_classdefs.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/queuedworkflow.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/script_without_fastapi.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_admin_server.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_async.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_classdecorators.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_cli.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_client.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_concurrency.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_croniter.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_debouncer.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_debug.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_docker_secrets.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_failures.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_fastapi.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_fastapi_roles.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_flask.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_kafka.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_outcome.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_queue.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_scheduler.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_singleton.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_spans.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_sqlalchemy.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_streaming.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/tests/test_workflow_introspection.py +0 -0
- {dbos-1.15.0a2 → dbos-1.15.0a4}/version/__init__.py +0 -0
|
@@ -244,7 +244,7 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
|
244
244
|
def _handle_restart(self, workflow_id: str) -> None:
|
|
245
245
|
try:
|
|
246
246
|
print(f"Restarting workflow {workflow_id}")
|
|
247
|
-
handle = self.dbos.
|
|
247
|
+
handle = self.dbos.fork_workflow(workflow_id, 1)
|
|
248
248
|
response_body = json.dumps(
|
|
249
249
|
{
|
|
250
250
|
"workflow_id": handle.workflow_id,
|
|
@@ -241,6 +241,7 @@ class ApplicationDatabase(ABC):
|
|
|
241
241
|
def create(
|
|
242
242
|
database_url: str,
|
|
243
243
|
engine_kwargs: Dict[str, Any],
|
|
244
|
+
schema: Optional[str],
|
|
244
245
|
debug_mode: bool = False,
|
|
245
246
|
) -> "ApplicationDatabase":
|
|
246
247
|
"""Factory method to create the appropriate ApplicationDatabase implementation based on URL."""
|
|
@@ -256,12 +257,32 @@ class ApplicationDatabase(ABC):
|
|
|
256
257
|
database_url=database_url,
|
|
257
258
|
engine_kwargs=engine_kwargs,
|
|
258
259
|
debug_mode=debug_mode,
|
|
260
|
+
schema=schema,
|
|
259
261
|
)
|
|
260
262
|
|
|
261
263
|
|
|
262
264
|
class PostgresApplicationDatabase(ApplicationDatabase):
|
|
263
265
|
"""PostgreSQL-specific implementation of ApplicationDatabase."""
|
|
264
266
|
|
|
267
|
+
def __init__(
|
|
268
|
+
self,
|
|
269
|
+
*,
|
|
270
|
+
database_url: str,
|
|
271
|
+
engine_kwargs: Dict[str, Any],
|
|
272
|
+
schema: Optional[str],
|
|
273
|
+
debug_mode: bool = False,
|
|
274
|
+
):
|
|
275
|
+
super().__init__(
|
|
276
|
+
database_url=database_url,
|
|
277
|
+
engine_kwargs=engine_kwargs,
|
|
278
|
+
debug_mode=debug_mode,
|
|
279
|
+
)
|
|
280
|
+
if schema is None:
|
|
281
|
+
self.schema = "dbos"
|
|
282
|
+
else:
|
|
283
|
+
self.schema = schema
|
|
284
|
+
ApplicationSchema.transaction_outputs.schema = schema
|
|
285
|
+
|
|
265
286
|
def _create_engine(
|
|
266
287
|
self, database_url: str, engine_kwargs: Dict[str, Any]
|
|
267
288
|
) -> sa.Engine:
|
|
@@ -271,9 +292,6 @@ class PostgresApplicationDatabase(ApplicationDatabase):
|
|
|
271
292
|
if engine_kwargs is None:
|
|
272
293
|
engine_kwargs = {}
|
|
273
294
|
|
|
274
|
-
# TODO: Make the schema dynamic so this isn't needed
|
|
275
|
-
ApplicationSchema.transaction_outputs.schema = "dbos"
|
|
276
|
-
|
|
277
295
|
return sa.create_engine(
|
|
278
296
|
app_db_url,
|
|
279
297
|
**engine_kwargs,
|
|
@@ -307,24 +325,18 @@ class PostgresApplicationDatabase(ApplicationDatabase):
|
|
|
307
325
|
sa.text(
|
|
308
326
|
"SELECT 1 FROM information_schema.schemata WHERE schema_name = :schema_name"
|
|
309
327
|
),
|
|
310
|
-
parameters={"schema_name":
|
|
328
|
+
parameters={"schema_name": self.schema},
|
|
311
329
|
).scalar()
|
|
312
330
|
|
|
313
331
|
if not schema_exists:
|
|
314
|
-
schema_creation_query = sa.text(
|
|
315
|
-
f"CREATE SCHEMA {ApplicationSchema.schema}"
|
|
316
|
-
)
|
|
332
|
+
schema_creation_query = sa.text(f'CREATE SCHEMA "{self.schema}"')
|
|
317
333
|
conn.execute(schema_creation_query)
|
|
318
334
|
|
|
319
335
|
inspector = inspect(self.engine)
|
|
320
|
-
if not inspector.has_table(
|
|
321
|
-
"transaction_outputs", schema=ApplicationSchema.schema
|
|
322
|
-
):
|
|
336
|
+
if not inspector.has_table("transaction_outputs", schema=self.schema):
|
|
323
337
|
ApplicationSchema.metadata_obj.create_all(self.engine)
|
|
324
338
|
else:
|
|
325
|
-
columns = inspector.get_columns(
|
|
326
|
-
"transaction_outputs", schema=ApplicationSchema.schema
|
|
327
|
-
)
|
|
339
|
+
columns = inspector.get_columns("transaction_outputs", schema=self.schema)
|
|
328
340
|
column_names = [col["name"] for col in columns]
|
|
329
341
|
|
|
330
342
|
if "function_name" not in column_names:
|
|
@@ -333,7 +345,7 @@ class PostgresApplicationDatabase(ApplicationDatabase):
|
|
|
333
345
|
conn.execute(
|
|
334
346
|
text(
|
|
335
347
|
f"""
|
|
336
|
-
ALTER TABLE {
|
|
348
|
+
ALTER TABLE \"{self.schema}\".transaction_outputs
|
|
337
349
|
ADD COLUMN function_name TEXT NOT NULL DEFAULT '';
|
|
338
350
|
"""
|
|
339
351
|
)
|
|
@@ -123,7 +123,7 @@ class DBOSClient:
|
|
|
123
123
|
*,
|
|
124
124
|
system_database_url: Optional[str] = None,
|
|
125
125
|
application_database_url: Optional[str] = None,
|
|
126
|
-
|
|
126
|
+
dbos_system_schema: Optional[str] = "dbos",
|
|
127
127
|
):
|
|
128
128
|
application_database_url = (
|
|
129
129
|
database_url if database_url else application_database_url
|
|
@@ -132,7 +132,6 @@ class DBOSClient:
|
|
|
132
132
|
{
|
|
133
133
|
"system_database_url": system_database_url,
|
|
134
134
|
"database_url": application_database_url,
|
|
135
|
-
"database": {"sys_db_name": system_database},
|
|
136
135
|
}
|
|
137
136
|
)
|
|
138
137
|
assert is_valid_database_url(system_database_url)
|
|
@@ -146,6 +145,7 @@ class DBOSClient:
|
|
|
146
145
|
"max_overflow": 0,
|
|
147
146
|
"pool_size": 2,
|
|
148
147
|
},
|
|
148
|
+
schema=dbos_system_schema,
|
|
149
149
|
)
|
|
150
150
|
self._sys_db.check_connection()
|
|
151
151
|
if application_database_url:
|
|
@@ -156,6 +156,7 @@ class DBOSClient:
|
|
|
156
156
|
"max_overflow": 0,
|
|
157
157
|
"pool_size": 2,
|
|
158
158
|
},
|
|
159
|
+
schema=dbos_system_schema,
|
|
159
160
|
)
|
|
160
161
|
|
|
161
162
|
def destroy(self) -> None:
|
|
@@ -444,10 +444,13 @@ class DBOS:
|
|
|
444
444
|
self._executor_field = ThreadPoolExecutor(max_workers=sys.maxsize)
|
|
445
445
|
self._background_event_loop.start()
|
|
446
446
|
assert self._config["database"]["sys_db_engine_kwargs"] is not None
|
|
447
|
+
# Get the schema configuration, use "dbos" as default
|
|
448
|
+
schema = self._config.get("dbos_system_schema", "dbos")
|
|
447
449
|
self._sys_db_field = SystemDatabase.create(
|
|
448
450
|
system_database_url=get_system_database_url(self._config),
|
|
449
451
|
engine_kwargs=self._config["database"]["sys_db_engine_kwargs"],
|
|
450
452
|
debug_mode=debug_mode,
|
|
453
|
+
schema=schema,
|
|
451
454
|
)
|
|
452
455
|
assert self._config["database"]["db_engine_kwargs"] is not None
|
|
453
456
|
if self._config["database_url"]:
|
|
@@ -455,6 +458,7 @@ class DBOS:
|
|
|
455
458
|
database_url=self._config["database_url"],
|
|
456
459
|
engine_kwargs=self._config["database"]["db_engine_kwargs"],
|
|
457
460
|
debug_mode=debug_mode,
|
|
461
|
+
schema=schema,
|
|
458
462
|
)
|
|
459
463
|
|
|
460
464
|
if debug_mode:
|
|
@@ -1025,16 +1029,6 @@ class DBOS:
|
|
|
1025
1029
|
await asyncio.to_thread(cls.resume_workflow, workflow_id)
|
|
1026
1030
|
return await cls.retrieve_workflow_async(workflow_id)
|
|
1027
1031
|
|
|
1028
|
-
@classmethod
|
|
1029
|
-
def restart_workflow(cls, workflow_id: str) -> WorkflowHandle[Any]:
|
|
1030
|
-
"""Restart a workflow with a new workflow ID"""
|
|
1031
|
-
return cls.fork_workflow(workflow_id, 1)
|
|
1032
|
-
|
|
1033
|
-
@classmethod
|
|
1034
|
-
async def restart_workflow_async(cls, workflow_id: str) -> WorkflowHandleAsync[Any]:
|
|
1035
|
-
"""Restart a workflow with a new workflow ID"""
|
|
1036
|
-
return await cls.fork_workflow_async(workflow_id, 1)
|
|
1037
|
-
|
|
1038
1032
|
@classmethod
|
|
1039
1033
|
def fork_workflow(
|
|
1040
1034
|
cls,
|
|
@@ -1270,20 +1264,6 @@ class DBOS:
|
|
|
1270
1264
|
else:
|
|
1271
1265
|
return None
|
|
1272
1266
|
|
|
1273
|
-
@classproperty
|
|
1274
|
-
def parent_workflow_id(cls) -> str:
|
|
1275
|
-
"""
|
|
1276
|
-
This method is deprecated and should not be used.
|
|
1277
|
-
"""
|
|
1278
|
-
dbos_logger.warning(
|
|
1279
|
-
"DBOS.parent_workflow_id is deprecated and should not be used"
|
|
1280
|
-
)
|
|
1281
|
-
ctx = assert_current_dbos_context()
|
|
1282
|
-
assert (
|
|
1283
|
-
ctx.is_within_workflow()
|
|
1284
|
-
), "parent_workflow_id is only available within a workflow."
|
|
1285
|
-
return ctx.parent_workflow_id
|
|
1286
|
-
|
|
1287
1267
|
@classproperty
|
|
1288
1268
|
def span(cls) -> "Span":
|
|
1289
1269
|
"""Return the tracing `Span` associated with the current context."""
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import json
|
|
2
1
|
import os
|
|
3
2
|
import re
|
|
4
3
|
from importlib import resources
|
|
@@ -23,7 +22,6 @@ class DBOSConfig(TypedDict, total=False):
|
|
|
23
22
|
system_database_url (str): Connection string for the DBOS system database. Defaults to sqlite:///{name} if not provided.
|
|
24
23
|
application_database_url (str): Connection string for the DBOS application database, in which DBOS @Transaction functions run. Optional. Should be the same type of database (SQLite or Postgres) as the system database.
|
|
25
24
|
database_url (str): (DEPRECATED) Database connection string
|
|
26
|
-
sys_db_name (str): (DEPRECATED) System database name
|
|
27
25
|
sys_db_pool_size (int): System database pool size
|
|
28
26
|
db_engine_kwargs (Dict[str, Any]): SQLAlchemy engine kwargs (See https://docs.sqlalchemy.org/en/20/core/engines.html#sqlalchemy.create_engine)
|
|
29
27
|
log_level (str): Log level
|
|
@@ -34,6 +32,7 @@ class DBOSConfig(TypedDict, total=False):
|
|
|
34
32
|
otlp_attributes (dict[str, str]): A set of custom attributes to apply OTLP-exported logs and traces
|
|
35
33
|
application_version (str): Application version
|
|
36
34
|
executor_id (str): Executor ID, used to identify the application instance in distributed environments
|
|
35
|
+
dbos_system_schema (str): Schema name for DBOS system tables. Defaults to "dbos".
|
|
37
36
|
enable_otlp (bool): If True, enable built-in DBOS OTLP tracing and logging.
|
|
38
37
|
"""
|
|
39
38
|
|
|
@@ -41,7 +40,6 @@ class DBOSConfig(TypedDict, total=False):
|
|
|
41
40
|
system_database_url: Optional[str]
|
|
42
41
|
application_database_url: Optional[str]
|
|
43
42
|
database_url: Optional[str]
|
|
44
|
-
sys_db_name: Optional[str]
|
|
45
43
|
sys_db_pool_size: Optional[int]
|
|
46
44
|
db_engine_kwargs: Optional[Dict[str, Any]]
|
|
47
45
|
log_level: Optional[str]
|
|
@@ -52,6 +50,7 @@ class DBOSConfig(TypedDict, total=False):
|
|
|
52
50
|
otlp_attributes: Optional[dict[str, str]]
|
|
53
51
|
application_version: Optional[str]
|
|
54
52
|
executor_id: Optional[str]
|
|
53
|
+
dbos_system_schema: Optional[str]
|
|
55
54
|
enable_otlp: Optional[bool]
|
|
56
55
|
|
|
57
56
|
|
|
@@ -70,16 +69,13 @@ class DatabaseConfig(TypedDict, total=False):
|
|
|
70
69
|
sys_db_pool_size (int): System database pool size
|
|
71
70
|
db_engine_kwargs (Dict[str, Any]): SQLAlchemy engine kwargs
|
|
72
71
|
migrate (List[str]): Migration commands to run on startup
|
|
72
|
+
dbos_system_schema (str): Schema name for DBOS system tables. Defaults to "dbos".
|
|
73
73
|
"""
|
|
74
74
|
|
|
75
|
-
|
|
76
|
-
sys_db_pool_size: Optional[
|
|
77
|
-
int
|
|
78
|
-
] # For internal use, will be removed in a future version
|
|
75
|
+
sys_db_pool_size: Optional[int]
|
|
79
76
|
db_engine_kwargs: Optional[Dict[str, Any]]
|
|
80
77
|
sys_db_engine_kwargs: Optional[Dict[str, Any]]
|
|
81
78
|
migrate: Optional[List[str]]
|
|
82
|
-
rollback: Optional[List[str]] # Will be removed in a future version
|
|
83
79
|
|
|
84
80
|
|
|
85
81
|
class OTLPExporterConfig(TypedDict, total=False):
|
|
@@ -113,6 +109,7 @@ class ConfigFile(TypedDict, total=False):
|
|
|
113
109
|
system_database_url (str): System database URL
|
|
114
110
|
telemetry (TelemetryConfig): Configuration for tracing / logging
|
|
115
111
|
env (Dict[str,str]): Environment variables
|
|
112
|
+
dbos_system_schema (str): Schema name for DBOS system tables. Defaults to "dbos".
|
|
116
113
|
|
|
117
114
|
"""
|
|
118
115
|
|
|
@@ -123,6 +120,7 @@ class ConfigFile(TypedDict, total=False):
|
|
|
123
120
|
system_database_url: Optional[str]
|
|
124
121
|
telemetry: Optional[TelemetryConfig]
|
|
125
122
|
env: Dict[str, str]
|
|
123
|
+
dbos_system_schema: Optional[str]
|
|
126
124
|
|
|
127
125
|
|
|
128
126
|
def translate_dbos_config_to_config_file(config: DBOSConfig) -> ConfigFile:
|
|
@@ -135,8 +133,6 @@ def translate_dbos_config_to_config_file(config: DBOSConfig) -> ConfigFile:
|
|
|
135
133
|
|
|
136
134
|
# Database config
|
|
137
135
|
db_config: DatabaseConfig = {}
|
|
138
|
-
if "sys_db_name" in config:
|
|
139
|
-
db_config["sys_db_name"] = config.get("sys_db_name")
|
|
140
136
|
if "sys_db_pool_size" in config:
|
|
141
137
|
db_config["sys_db_pool_size"] = config.get("sys_db_pool_size")
|
|
142
138
|
if "db_engine_kwargs" in config:
|
|
@@ -153,6 +149,9 @@ def translate_dbos_config_to_config_file(config: DBOSConfig) -> ConfigFile:
|
|
|
153
149
|
if "system_database_url" in config:
|
|
154
150
|
translated_config["system_database_url"] = config.get("system_database_url")
|
|
155
151
|
|
|
152
|
+
if "dbos_system_schema" in config:
|
|
153
|
+
translated_config["dbos_system_schema"] = config.get("dbos_system_schema")
|
|
154
|
+
|
|
156
155
|
# Runtime config
|
|
157
156
|
translated_config["runtimeConfig"] = {"run_admin_server": True}
|
|
158
157
|
if "admin_port" in config:
|
|
@@ -402,10 +401,7 @@ def process_config(
|
|
|
402
401
|
else:
|
|
403
402
|
url = make_url(data["database_url"])
|
|
404
403
|
assert url.database
|
|
405
|
-
|
|
406
|
-
url = url.set(database=data["database"]["sys_db_name"])
|
|
407
|
-
else:
|
|
408
|
-
url = url.set(database=f"{url.database}{SystemSchema.sysdb_suffix}")
|
|
404
|
+
url = url.set(database=f"{url.database}{SystemSchema.sysdb_suffix}")
|
|
409
405
|
data["system_database_url"] = url.render_as_string(hide_password=False)
|
|
410
406
|
|
|
411
407
|
# If a system database URL is provided but not an application database URL,
|
|
@@ -546,6 +542,8 @@ def overwrite_config(provided_config: ConfigFile) -> ConfigFile:
|
|
|
546
542
|
"DBOS_SYSTEM_DATABASE_URL environment variable is not set. This is required to connect to the database."
|
|
547
543
|
)
|
|
548
544
|
provided_config["system_database_url"] = system_db_url
|
|
545
|
+
# Always use the "dbos" schema when deploying to DBOS Cloud
|
|
546
|
+
provided_config["dbos_system_schema"] = "dbos"
|
|
549
547
|
|
|
550
548
|
# Telemetry config
|
|
551
549
|
if "telemetry" not in provided_config or provided_config["telemetry"] is None:
|
|
@@ -608,11 +606,8 @@ def get_system_database_url(config: ConfigFile) -> str:
|
|
|
608
606
|
if config["database_url"].startswith("sqlite"):
|
|
609
607
|
return config["database_url"]
|
|
610
608
|
app_db_url = make_url(config["database_url"])
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
else:
|
|
614
|
-
assert app_db_url.database is not None
|
|
615
|
-
sys_db_name = app_db_url.database + SystemSchema.sysdb_suffix
|
|
609
|
+
assert app_db_url.database is not None
|
|
610
|
+
sys_db_name = app_db_url.database + SystemSchema.sysdb_suffix
|
|
616
611
|
return app_db_url.set(database=sys_db_name).render_as_string(
|
|
617
612
|
hide_password=False
|
|
618
613
|
)
|
|
@@ -5,7 +5,7 @@ import sqlalchemy as sa
|
|
|
5
5
|
from ._logger import dbos_logger
|
|
6
6
|
|
|
7
7
|
|
|
8
|
-
def ensure_dbos_schema(engine: sa.Engine) -> None:
|
|
8
|
+
def ensure_dbos_schema(engine: sa.Engine, schema: str) -> None:
|
|
9
9
|
"""
|
|
10
10
|
True if using DBOS migrations (DBOS schema and migrations table already exist or were created)
|
|
11
11
|
False if using Alembic migrations (DBOS schema exists, but dbos_migrations table doesn't)
|
|
@@ -14,41 +14,46 @@ def ensure_dbos_schema(engine: sa.Engine) -> None:
|
|
|
14
14
|
# Check if dbos schema exists
|
|
15
15
|
schema_result = conn.execute(
|
|
16
16
|
sa.text(
|
|
17
|
-
"SELECT schema_name FROM information_schema.schemata WHERE schema_name =
|
|
18
|
-
)
|
|
17
|
+
"SELECT schema_name FROM information_schema.schemata WHERE schema_name = :schema"
|
|
18
|
+
),
|
|
19
|
+
{"schema": schema},
|
|
19
20
|
)
|
|
20
21
|
schema_exists = schema_result.fetchone() is not None
|
|
21
22
|
|
|
22
23
|
# Create schema if it doesn't exist
|
|
23
24
|
if not schema_exists:
|
|
24
|
-
conn.execute(sa.text(
|
|
25
|
+
conn.execute(sa.text(f'CREATE SCHEMA "{schema}"'))
|
|
25
26
|
|
|
26
27
|
# Check if dbos_migrations table exists
|
|
27
28
|
table_result = conn.execute(
|
|
28
29
|
sa.text(
|
|
29
|
-
"SELECT table_name FROM information_schema.tables WHERE table_schema =
|
|
30
|
-
)
|
|
30
|
+
"SELECT table_name FROM information_schema.tables WHERE table_schema = :schema AND table_name = 'dbos_migrations'"
|
|
31
|
+
),
|
|
32
|
+
{"schema": schema},
|
|
31
33
|
)
|
|
32
34
|
table_exists = table_result.fetchone() is not None
|
|
33
35
|
|
|
34
36
|
if not table_exists:
|
|
35
37
|
conn.execute(
|
|
36
38
|
sa.text(
|
|
37
|
-
|
|
39
|
+
f'CREATE TABLE "{schema}".dbos_migrations (version BIGINT NOT NULL PRIMARY KEY)'
|
|
38
40
|
)
|
|
39
41
|
)
|
|
40
42
|
|
|
41
43
|
|
|
42
|
-
def run_dbos_migrations(engine: sa.Engine) -> None:
|
|
44
|
+
def run_dbos_migrations(engine: sa.Engine, schema: str) -> None:
|
|
43
45
|
"""Run DBOS-managed migrations by executing each SQL command in dbos_migrations."""
|
|
44
46
|
with engine.begin() as conn:
|
|
45
47
|
# Get current migration version
|
|
46
|
-
result = conn.execute(
|
|
48
|
+
result = conn.execute(
|
|
49
|
+
sa.text(f'SELECT version FROM "{schema}".dbos_migrations')
|
|
50
|
+
)
|
|
47
51
|
current_version = result.fetchone()
|
|
48
52
|
last_applied = current_version[0] if current_version else 0
|
|
49
53
|
|
|
50
54
|
# Apply migrations starting from the next version
|
|
51
|
-
|
|
55
|
+
migrations = get_dbos_migrations(schema)
|
|
56
|
+
for i, migration_sql in enumerate(migrations, 1):
|
|
52
57
|
if i <= last_applied:
|
|
53
58
|
continue
|
|
54
59
|
|
|
@@ -60,23 +65,26 @@ def run_dbos_migrations(engine: sa.Engine) -> None:
|
|
|
60
65
|
if last_applied == 0:
|
|
61
66
|
conn.execute(
|
|
62
67
|
sa.text(
|
|
63
|
-
|
|
68
|
+
f'INSERT INTO "{schema}".dbos_migrations (version) VALUES (:version)'
|
|
64
69
|
),
|
|
65
70
|
{"version": i},
|
|
66
71
|
)
|
|
67
72
|
else:
|
|
68
73
|
conn.execute(
|
|
69
|
-
sa.text(
|
|
74
|
+
sa.text(
|
|
75
|
+
f'UPDATE "{schema}".dbos_migrations SET version = :version'
|
|
76
|
+
),
|
|
70
77
|
{"version": i},
|
|
71
78
|
)
|
|
72
79
|
last_applied = i
|
|
73
80
|
|
|
74
81
|
|
|
75
|
-
|
|
82
|
+
def get_dbos_migration_one(schema: str) -> str:
|
|
83
|
+
return f"""
|
|
76
84
|
-- Enable uuid extension for generating UUIDs
|
|
77
85
|
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
|
78
86
|
|
|
79
|
-
CREATE TABLE
|
|
87
|
+
CREATE TABLE \"{schema}\".workflow_status (
|
|
80
88
|
workflow_uuid TEXT PRIMARY KEY,
|
|
81
89
|
status TEXT,
|
|
82
90
|
name TEXT,
|
|
@@ -103,15 +111,15 @@ CREATE TABLE dbos.workflow_status (
|
|
|
103
111
|
priority INTEGER NOT NULL DEFAULT 0
|
|
104
112
|
);
|
|
105
113
|
|
|
106
|
-
CREATE INDEX workflow_status_created_at_index ON
|
|
107
|
-
CREATE INDEX workflow_status_executor_id_index ON
|
|
108
|
-
CREATE INDEX workflow_status_status_index ON
|
|
114
|
+
CREATE INDEX workflow_status_created_at_index ON \"{schema}\".workflow_status (created_at);
|
|
115
|
+
CREATE INDEX workflow_status_executor_id_index ON \"{schema}\".workflow_status (executor_id);
|
|
116
|
+
CREATE INDEX workflow_status_status_index ON \"{schema}\".workflow_status (status);
|
|
109
117
|
|
|
110
|
-
ALTER TABLE
|
|
118
|
+
ALTER TABLE \"{schema}\".workflow_status
|
|
111
119
|
ADD CONSTRAINT uq_workflow_status_queue_name_dedup_id
|
|
112
120
|
UNIQUE (queue_name, deduplication_id);
|
|
113
121
|
|
|
114
|
-
CREATE TABLE
|
|
122
|
+
CREATE TABLE \"{schema}\".operation_outputs (
|
|
115
123
|
workflow_uuid TEXT NOT NULL,
|
|
116
124
|
function_id INTEGER NOT NULL,
|
|
117
125
|
function_name TEXT NOT NULL DEFAULT '',
|
|
@@ -119,23 +127,23 @@ CREATE TABLE dbos.operation_outputs (
|
|
|
119
127
|
error TEXT,
|
|
120
128
|
child_workflow_id TEXT,
|
|
121
129
|
PRIMARY KEY (workflow_uuid, function_id),
|
|
122
|
-
FOREIGN KEY (workflow_uuid) REFERENCES
|
|
130
|
+
FOREIGN KEY (workflow_uuid) REFERENCES \"{schema}\".workflow_status(workflow_uuid)
|
|
123
131
|
ON UPDATE CASCADE ON DELETE CASCADE
|
|
124
132
|
);
|
|
125
133
|
|
|
126
|
-
CREATE TABLE
|
|
134
|
+
CREATE TABLE \"{schema}\".notifications (
|
|
127
135
|
destination_uuid TEXT NOT NULL,
|
|
128
136
|
topic TEXT,
|
|
129
137
|
message TEXT NOT NULL,
|
|
130
138
|
created_at_epoch_ms BIGINT NOT NULL DEFAULT (EXTRACT(epoch FROM now()) * 1000::numeric)::bigint,
|
|
131
139
|
message_uuid TEXT NOT NULL DEFAULT gen_random_uuid(), -- Built-in function
|
|
132
|
-
FOREIGN KEY (destination_uuid) REFERENCES
|
|
140
|
+
FOREIGN KEY (destination_uuid) REFERENCES \"{schema}\".workflow_status(workflow_uuid)
|
|
133
141
|
ON UPDATE CASCADE ON DELETE CASCADE
|
|
134
142
|
);
|
|
135
|
-
CREATE INDEX idx_workflow_topic ON
|
|
143
|
+
CREATE INDEX idx_workflow_topic ON \"{schema}\".notifications (destination_uuid, topic);
|
|
136
144
|
|
|
137
145
|
-- Create notification function
|
|
138
|
-
CREATE OR REPLACE FUNCTION
|
|
146
|
+
CREATE OR REPLACE FUNCTION \"{schema}\".notifications_function() RETURNS TRIGGER AS $$
|
|
139
147
|
DECLARE
|
|
140
148
|
payload text := NEW.destination_uuid || '::' || NEW.topic;
|
|
141
149
|
BEGIN
|
|
@@ -146,20 +154,20 @@ $$ LANGUAGE plpgsql;
|
|
|
146
154
|
|
|
147
155
|
-- Create notification trigger
|
|
148
156
|
CREATE TRIGGER dbos_notifications_trigger
|
|
149
|
-
AFTER INSERT ON
|
|
150
|
-
FOR EACH ROW EXECUTE FUNCTION
|
|
157
|
+
AFTER INSERT ON \"{schema}\".notifications
|
|
158
|
+
FOR EACH ROW EXECUTE FUNCTION \"{schema}\".notifications_function();
|
|
151
159
|
|
|
152
|
-
CREATE TABLE
|
|
160
|
+
CREATE TABLE \"{schema}\".workflow_events (
|
|
153
161
|
workflow_uuid TEXT NOT NULL,
|
|
154
162
|
key TEXT NOT NULL,
|
|
155
163
|
value TEXT NOT NULL,
|
|
156
164
|
PRIMARY KEY (workflow_uuid, key),
|
|
157
|
-
FOREIGN KEY (workflow_uuid) REFERENCES
|
|
165
|
+
FOREIGN KEY (workflow_uuid) REFERENCES \"{schema}\".workflow_status(workflow_uuid)
|
|
158
166
|
ON UPDATE CASCADE ON DELETE CASCADE
|
|
159
167
|
);
|
|
160
168
|
|
|
161
169
|
-- Create events function
|
|
162
|
-
CREATE OR REPLACE FUNCTION
|
|
170
|
+
CREATE OR REPLACE FUNCTION \"{schema}\".workflow_events_function() RETURNS TRIGGER AS $$
|
|
163
171
|
DECLARE
|
|
164
172
|
payload text := NEW.workflow_uuid || '::' || NEW.key;
|
|
165
173
|
BEGIN
|
|
@@ -170,20 +178,20 @@ $$ LANGUAGE plpgsql;
|
|
|
170
178
|
|
|
171
179
|
-- Create events trigger
|
|
172
180
|
CREATE TRIGGER dbos_workflow_events_trigger
|
|
173
|
-
AFTER INSERT ON
|
|
174
|
-
FOR EACH ROW EXECUTE FUNCTION
|
|
181
|
+
AFTER INSERT ON \"{schema}\".workflow_events
|
|
182
|
+
FOR EACH ROW EXECUTE FUNCTION \"{schema}\".workflow_events_function();
|
|
175
183
|
|
|
176
|
-
CREATE TABLE
|
|
184
|
+
CREATE TABLE \"{schema}\".streams (
|
|
177
185
|
workflow_uuid TEXT NOT NULL,
|
|
178
186
|
key TEXT NOT NULL,
|
|
179
187
|
value TEXT NOT NULL,
|
|
180
188
|
"offset" INTEGER NOT NULL,
|
|
181
189
|
PRIMARY KEY (workflow_uuid, key, "offset"),
|
|
182
|
-
FOREIGN KEY (workflow_uuid) REFERENCES
|
|
190
|
+
FOREIGN KEY (workflow_uuid) REFERENCES \"{schema}\".workflow_status(workflow_uuid)
|
|
183
191
|
ON UPDATE CASCADE ON DELETE CASCADE
|
|
184
192
|
);
|
|
185
193
|
|
|
186
|
-
CREATE TABLE
|
|
194
|
+
CREATE TABLE \"{schema}\".event_dispatch_kv (
|
|
187
195
|
service_name TEXT NOT NULL,
|
|
188
196
|
workflow_fn_name TEXT NOT NULL,
|
|
189
197
|
key TEXT NOT NULL,
|
|
@@ -195,6 +203,10 @@ CREATE TABLE dbos.event_dispatch_kv (
|
|
|
195
203
|
"""
|
|
196
204
|
|
|
197
205
|
|
|
206
|
+
def get_dbos_migrations(schema: str) -> list[str]:
|
|
207
|
+
return [get_dbos_migration_one(schema)]
|
|
208
|
+
|
|
209
|
+
|
|
198
210
|
def get_sqlite_timestamp_expr() -> str:
|
|
199
211
|
"""Get SQLite timestamp expression with millisecond precision for Python >= 3.12."""
|
|
200
212
|
if sys.version_info >= (3, 12):
|
|
@@ -281,5 +293,4 @@ CREATE TABLE streams (
|
|
|
281
293
|
);
|
|
282
294
|
"""
|
|
283
295
|
|
|
284
|
-
dbos_migrations = [dbos_migration_one]
|
|
285
296
|
sqlite_migrations = [sqlite_migration_one]
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
|
|
1
3
|
from sqlalchemy import (
|
|
2
4
|
BigInteger,
|
|
3
5
|
Column,
|
|
@@ -19,6 +21,21 @@ class SystemSchema:
|
|
|
19
21
|
metadata_obj = MetaData(schema="dbos")
|
|
20
22
|
sysdb_suffix = "_dbos_sys"
|
|
21
23
|
|
|
24
|
+
@classmethod
|
|
25
|
+
def set_schema(cls, schema_name: Optional[str]) -> None:
|
|
26
|
+
"""
|
|
27
|
+
Set the schema for all DBOS system tables.
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
schema_name: The name of the schema to use for system tables
|
|
31
|
+
"""
|
|
32
|
+
cls.metadata_obj.schema = schema_name
|
|
33
|
+
cls.workflow_status.schema = schema_name
|
|
34
|
+
cls.operation_outputs.schema = schema_name
|
|
35
|
+
cls.notifications.schema = schema_name
|
|
36
|
+
cls.workflow_events.schema = schema_name
|
|
37
|
+
cls.streams.schema = schema_name
|
|
38
|
+
|
|
22
39
|
workflow_status = Table(
|
|
23
40
|
"workflow_status",
|
|
24
41
|
metadata_obj,
|
|
@@ -1443,6 +1443,7 @@ class SystemDatabase(ABC):
|
|
|
1443
1443
|
def create(
|
|
1444
1444
|
system_database_url: str,
|
|
1445
1445
|
engine_kwargs: Dict[str, Any],
|
|
1446
|
+
schema: Optional[str],
|
|
1446
1447
|
debug_mode: bool = False,
|
|
1447
1448
|
) -> "SystemDatabase":
|
|
1448
1449
|
"""Factory method to create the appropriate SystemDatabase implementation based on URL."""
|
|
@@ -1461,6 +1462,7 @@ class SystemDatabase(ABC):
|
|
|
1461
1462
|
system_database_url=system_database_url,
|
|
1462
1463
|
engine_kwargs=engine_kwargs,
|
|
1463
1464
|
debug_mode=debug_mode,
|
|
1465
|
+
schema=schema,
|
|
1464
1466
|
)
|
|
1465
1467
|
|
|
1466
1468
|
@db_retry()
|
|
@@ -20,6 +20,7 @@ class PostgresSystemDatabase(SystemDatabase):
|
|
|
20
20
|
*,
|
|
21
21
|
system_database_url: str,
|
|
22
22
|
engine_kwargs: Dict[str, Any],
|
|
23
|
+
schema: Optional[str],
|
|
23
24
|
debug_mode: bool = False,
|
|
24
25
|
):
|
|
25
26
|
super().__init__(
|
|
@@ -27,17 +28,16 @@ class PostgresSystemDatabase(SystemDatabase):
|
|
|
27
28
|
engine_kwargs=engine_kwargs,
|
|
28
29
|
debug_mode=debug_mode,
|
|
29
30
|
)
|
|
31
|
+
if schema is None:
|
|
32
|
+
self.schema = "dbos"
|
|
33
|
+
else:
|
|
34
|
+
self.schema = schema
|
|
35
|
+
SystemSchema.set_schema(self.schema)
|
|
30
36
|
self.notification_conn: Optional[psycopg.connection.Connection] = None
|
|
31
37
|
|
|
32
38
|
def _create_engine(
|
|
33
39
|
self, system_database_url: str, engine_kwargs: Dict[str, Any]
|
|
34
40
|
) -> sa.Engine:
|
|
35
|
-
# TODO: Make the schema dynamic so this isn't needed
|
|
36
|
-
SystemSchema.workflow_status.schema = "dbos"
|
|
37
|
-
SystemSchema.operation_outputs.schema = "dbos"
|
|
38
|
-
SystemSchema.notifications.schema = "dbos"
|
|
39
|
-
SystemSchema.workflow_events.schema = "dbos"
|
|
40
|
-
SystemSchema.streams.schema = "dbos"
|
|
41
41
|
url = sa.make_url(system_database_url).set(drivername="postgresql+psycopg")
|
|
42
42
|
return sa.create_engine(url, **engine_kwargs)
|
|
43
43
|
|
|
@@ -62,8 +62,8 @@ class PostgresSystemDatabase(SystemDatabase):
|
|
|
62
62
|
conn.execute(sa.text(f"CREATE DATABASE {sysdb_name}"))
|
|
63
63
|
engine.dispose()
|
|
64
64
|
|
|
65
|
-
ensure_dbos_schema(self.engine)
|
|
66
|
-
run_dbos_migrations(self.engine)
|
|
65
|
+
ensure_dbos_schema(self.engine, self.schema)
|
|
66
|
+
run_dbos_migrations(self.engine, self.schema)
|
|
67
67
|
|
|
68
68
|
def _cleanup_connections(self) -> None:
|
|
69
69
|
"""Clean up PostgreSQL-specific connections."""
|
|
@@ -19,12 +19,7 @@ class SQLiteSystemDatabase(SystemDatabase):
|
|
|
19
19
|
self, system_database_url: str, engine_kwargs: Dict[str, Any]
|
|
20
20
|
) -> sa.Engine:
|
|
21
21
|
"""Create a SQLite engine."""
|
|
22
|
-
|
|
23
|
-
SystemSchema.workflow_status.schema = None
|
|
24
|
-
SystemSchema.operation_outputs.schema = None
|
|
25
|
-
SystemSchema.notifications.schema = None
|
|
26
|
-
SystemSchema.workflow_events.schema = None
|
|
27
|
-
SystemSchema.streams.schema = None
|
|
22
|
+
SystemSchema.set_schema(None)
|
|
28
23
|
return sa.create_engine(system_database_url)
|
|
29
24
|
|
|
30
25
|
def run_migrations(self) -> None:
|