dbos 1.13.0a3__tar.gz → 2.8.0a6__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- {dbos-1.13.0a3 → dbos-2.8.0a6}/PKG-INFO +31 -16
- {dbos-1.13.0a3 → dbos-2.8.0a6}/README.md +6 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/__init__.py +7 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_admin_server.py +3 -1
- dbos-2.8.0a6/dbos/_app_db.py +353 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_client.py +116 -59
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_conductor/conductor.py +65 -32
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_conductor/protocol.py +62 -1
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_context.py +35 -9
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_core.py +318 -168
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_dbos.py +205 -114
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_dbos_config.py +139 -130
- dbos-2.8.0a6/dbos/_debouncer.py +394 -0
- dbos-2.8.0a6/dbos/_debug_trigger.py +108 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_docker_pg_helper.py +93 -51
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_error.py +12 -4
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_fastapi.py +9 -5
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_flask.py +2 -3
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_kafka.py +6 -4
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_logger.py +49 -38
- dbos-2.8.0a6/dbos/_migration.py +398 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_outcome.py +67 -13
- dbos-2.8.0a6/dbos/_queue.py +258 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_recovery.py +1 -1
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_scheduler.py +29 -16
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_schemas/application_database.py +1 -1
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_schemas/system_database.py +46 -9
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_serialization.py +30 -44
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_sys_db.py +894 -480
- dbos-2.8.0a6/dbos/_sys_db_postgres.py +157 -0
- dbos-2.8.0a6/dbos/_sys_db_sqlite.py +119 -0
- dbos-2.8.0a6/dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos +34 -0
- dbos-2.8.0a6/dbos/_tracer.py +111 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_utils.py +20 -1
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_workflow_commands.py +29 -32
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/cli/_github_init.py +22 -16
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/cli/_template_init.py +5 -16
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/cli/cli.py +274 -177
- dbos-2.8.0a6/dbos/cli/migration.py +136 -0
- dbos-2.8.0a6/dbos/dbos-config.schema.json +61 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/pyproject.toml +36 -18
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/classdefs.py +33 -0
- dbos-2.8.0a6/tests/conftest.py +269 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/queuedworkflow.py +9 -11
- dbos-2.8.0a6/tests/script_without_fastapi.py +30 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_admin_server.py +95 -32
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_async.py +47 -6
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_async_workflow_management.py +1 -20
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_classdecorators.py +51 -55
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_client.py +63 -24
- dbos-2.8.0a6/tests/test_cockroachdb.py +40 -0
- dbos-2.8.0a6/tests/test_concurrency.py +59 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_config.py +150 -166
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_dbos.py +692 -233
- dbos-2.8.0a6/tests/test_debouncer.py +316 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_docker_secrets.py +5 -25
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_failures.py +84 -61
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_fastapi.py +2 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_fastapi_roles.py +7 -133
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_kafka.py +50 -17
- dbos-2.8.0a6/tests/test_metrics.py +52 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_package.py +104 -57
- dbos-2.8.0a6/tests/test_patch.py +313 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_queue.py +255 -53
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_scheduler.py +23 -9
- dbos-2.8.0a6/tests/test_schema_migration.py +402 -0
- dbos-2.8.0a6/tests/test_singleexec.py +324 -0
- dbos-2.8.0a6/tests/test_singleexec_async.py +314 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_singleton.py +27 -7
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_spans.py +174 -96
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_sqlalchemy.py +8 -4
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_streaming.py +16 -9
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_workflow_introspection.py +79 -15
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_workflow_management.py +116 -65
- dbos-1.13.0a3/dbos/__main__.py +0 -29
- dbos-1.13.0a3/dbos/_alembic_migrations/env.py +0 -62
- dbos-1.13.0a3/dbos/_alembic_migrations/script.py.mako +0 -26
- dbos-1.13.0a3/dbos/_alembic_migrations/versions/01ce9f07bd10_streaming.py +0 -42
- dbos-1.13.0a3/dbos/_alembic_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -34
- dbos-1.13.0a3/dbos/_alembic_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -45
- dbos-1.13.0a3/dbos/_alembic_migrations/versions/471b60d64126_dbos_migrations.py +0 -35
- dbos-1.13.0a3/dbos/_alembic_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -35
- dbos-1.13.0a3/dbos/_alembic_migrations/versions/5c361fc04708_added_system_tables.py +0 -193
- dbos-1.13.0a3/dbos/_alembic_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -71
- dbos-1.13.0a3/dbos/_alembic_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -44
- dbos-1.13.0a3/dbos/_alembic_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -35
- dbos-1.13.0a3/dbos/_alembic_migrations/versions/a3b18ad34abe_added_triggers.py +0 -72
- dbos-1.13.0a3/dbos/_alembic_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -43
- dbos-1.13.0a3/dbos/_alembic_migrations/versions/d76646551a6c_workflow_queue.py +0 -28
- dbos-1.13.0a3/dbos/_alembic_migrations/versions/d994145b47b6_consolidate_inputs.py +0 -30
- dbos-1.13.0a3/dbos/_alembic_migrations/versions/eab0cc1d9a14_job_queue.py +0 -56
- dbos-1.13.0a3/dbos/_alembic_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -46
- dbos-1.13.0a3/dbos/_app_db.py +0 -285
- dbos-1.13.0a3/dbos/_debug.py +0 -51
- dbos-1.13.0a3/dbos/_migration.py +0 -233
- dbos-1.13.0a3/dbos/_queue.py +0 -132
- dbos-1.13.0a3/dbos/_templates/dbos-db-starter/alembic.ini +0 -116
- dbos-1.13.0a3/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -85
- dbos-1.13.0a3/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -26
- dbos-1.13.0a3/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -35
- dbos-1.13.0a3/dbos/_tracer.py +0 -81
- dbos-1.13.0a3/dbos/cli/migration.py +0 -95
- dbos-1.13.0a3/dbos/dbos-config.schema.json +0 -178
- dbos-1.13.0a3/tests/conftest.py +0 -199
- dbos-1.13.0a3/tests/test_concurrency.py +0 -167
- dbos-1.13.0a3/tests/test_debug.py +0 -147
- dbos-1.13.0a3/tests/test_migrate.py +0 -81
- dbos-1.13.0a3/tests/test_schema_migration.py +0 -147
- {dbos-1.13.0a3 → dbos-2.8.0a6}/LICENSE +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_classproperty.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_croniter.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_event_loop.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_kafka_message.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_registrations.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_roles.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_schemas/__init__.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_templates/dbos-db-starter/README.md +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/dbos/py.typed +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/__init__.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/atexit_no_ctor.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/atexit_no_launch.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/client_collateral.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/client_worker.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/dupname_classdefs1.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/dupname_classdefsa.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/more_classdefs.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_cli.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_croniter.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_flask.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/tests/test_outcome.py +0 -0
- {dbos-1.13.0a3 → dbos-2.8.0a6}/version/__init__.py +0 -0
|
@@ -1,33 +1,48 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: dbos
|
|
3
|
-
Version:
|
|
3
|
+
Version: 2.8.0a6
|
|
4
4
|
Summary: Ultra-lightweight durable execution in Python
|
|
5
5
|
Author-Email: "DBOS, Inc." <contact@dbos.dev>
|
|
6
6
|
License: MIT
|
|
7
|
-
|
|
7
|
+
Classifier: Development Status :: 5 - Production/Stable
|
|
8
|
+
Classifier: Programming Language :: Python
|
|
9
|
+
Classifier: Programming Language :: Python :: 3
|
|
10
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
15
|
+
Classifier: Intended Audience :: Developers
|
|
16
|
+
Classifier: Intended Audience :: Information Technology
|
|
17
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
18
|
+
Classifier: Operating System :: OS Independent
|
|
19
|
+
Classifier: Topic :: Internet
|
|
20
|
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
21
|
+
Classifier: Topic :: Database
|
|
22
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
23
|
+
Classifier: Framework :: AsyncIO
|
|
24
|
+
Requires-Python: >=3.10
|
|
8
25
|
Requires-Dist: pyyaml>=6.0.2
|
|
9
|
-
Requires-Dist: jsonschema>=4.23.0
|
|
10
|
-
Requires-Dist: alembic>=1.13.3
|
|
11
|
-
Requires-Dist: typing-extensions>=4.12.2; python_version < "3.10"
|
|
12
|
-
Requires-Dist: typer>=0.12.5
|
|
13
|
-
Requires-Dist: jsonpickle>=3.3.0
|
|
14
|
-
Requires-Dist: opentelemetry-api>=1.27.0
|
|
15
|
-
Requires-Dist: opentelemetry-sdk>=1.27.0
|
|
16
|
-
Requires-Dist: opentelemetry-exporter-otlp-proto-http>=1.27.0
|
|
17
26
|
Requires-Dist: python-dateutil>=2.9.0.post0
|
|
18
|
-
Requires-Dist: fastapi[standard]>=0.115.2
|
|
19
|
-
Requires-Dist: tomlkit>=0.13.2
|
|
20
27
|
Requires-Dist: psycopg[binary]>=3.1
|
|
21
|
-
Requires-Dist: docker>=7.1.0
|
|
22
|
-
Requires-Dist: cryptography>=43.0.3
|
|
23
|
-
Requires-Dist: rich>=13.9.4
|
|
24
|
-
Requires-Dist: pyjwt>=2.10.1
|
|
25
28
|
Requires-Dist: websockets>=14.0
|
|
29
|
+
Requires-Dist: typer-slim>=0.17.4
|
|
30
|
+
Requires-Dist: sqlalchemy>=2.0.43
|
|
31
|
+
Provides-Extra: otel
|
|
32
|
+
Requires-Dist: opentelemetry-api>=1.37.0; extra == "otel"
|
|
33
|
+
Requires-Dist: opentelemetry-sdk>=1.37.0; extra == "otel"
|
|
34
|
+
Requires-Dist: opentelemetry-exporter-otlp-proto-http>=1.37.0; extra == "otel"
|
|
26
35
|
Description-Content-Type: text/markdown
|
|
27
36
|
|
|
28
37
|
|
|
29
38
|
<div align="center">
|
|
30
39
|
|
|
40
|
+
[](https://github.com/dbos-inc/dbos-transact-py/actions/workflows/unit-test.yml)
|
|
41
|
+
[](https://pypi.python.org/pypi/dbos)
|
|
42
|
+
[](https://pypi.python.org/pypi/dbos)
|
|
43
|
+
[](LICENSE)
|
|
44
|
+
[](https://discord.com/invite/jsmC6pXGgX)
|
|
45
|
+
|
|
31
46
|
# DBOS Transact: Lightweight Durable Workflows
|
|
32
47
|
|
|
33
48
|
#### [Documentation](https://docs.dbos.dev/) • [Examples](https://docs.dbos.dev/examples) • [Github](https://github.com/dbos-inc) • [Discord](https://discord.com/invite/jsmC6pXGgX)
|
|
@@ -1,6 +1,12 @@
|
|
|
1
1
|
|
|
2
2
|
<div align="center">
|
|
3
3
|
|
|
4
|
+
[](https://github.com/dbos-inc/dbos-transact-py/actions/workflows/unit-test.yml)
|
|
5
|
+
[](https://pypi.python.org/pypi/dbos)
|
|
6
|
+
[](https://pypi.python.org/pypi/dbos)
|
|
7
|
+
[](LICENSE)
|
|
8
|
+
[](https://discord.com/invite/jsmC6pXGgX)
|
|
9
|
+
|
|
4
10
|
# DBOS Transact: Lightweight Durable Workflows
|
|
5
11
|
|
|
6
12
|
#### [Documentation](https://docs.dbos.dev/) • [Examples](https://docs.dbos.dev/examples) • [Github](https://github.com/dbos-inc) • [Discord](https://discord.com/invite/jsmC6pXGgX)
|
|
@@ -9,9 +9,12 @@ from ._context import (
|
|
|
9
9
|
)
|
|
10
10
|
from ._dbos import DBOS, DBOSConfiguredInstance, WorkflowHandle, WorkflowHandleAsync
|
|
11
11
|
from ._dbos_config import DBOSConfig
|
|
12
|
+
from ._debouncer import Debouncer, DebouncerClient
|
|
12
13
|
from ._kafka_message import KafkaMessage
|
|
13
14
|
from ._queue import Queue
|
|
15
|
+
from ._serialization import Serializer
|
|
14
16
|
from ._sys_db import GetWorkflowsInput, WorkflowStatus, WorkflowStatusString
|
|
17
|
+
from .cli.migration import run_dbos_database_migrations
|
|
15
18
|
|
|
16
19
|
__all__ = [
|
|
17
20
|
"DBOSConfig",
|
|
@@ -32,4 +35,8 @@ __all__ = [
|
|
|
32
35
|
"WorkflowStatusString",
|
|
33
36
|
"error",
|
|
34
37
|
"Queue",
|
|
38
|
+
"Debouncer",
|
|
39
|
+
"DebouncerClient",
|
|
40
|
+
"Serializer",
|
|
41
|
+
"run_dbos_database_migrations",
|
|
35
42
|
]
|
|
@@ -244,7 +244,7 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
|
244
244
|
def _handle_restart(self, workflow_id: str) -> None:
|
|
245
245
|
try:
|
|
246
246
|
print(f"Restarting workflow {workflow_id}")
|
|
247
|
-
handle = self.dbos.
|
|
247
|
+
handle = self.dbos.fork_workflow(workflow_id, 1)
|
|
248
248
|
response_body = json.dumps(
|
|
249
249
|
{
|
|
250
250
|
"workflow_id": handle.workflow_id,
|
|
@@ -338,6 +338,7 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
|
338
338
|
end_time=filters.get("end_time"),
|
|
339
339
|
status=filters.get("status"),
|
|
340
340
|
app_version=filters.get("application_version"),
|
|
341
|
+
forked_from=filters.get("forked_from"),
|
|
341
342
|
name=filters.get("workflow_name"),
|
|
342
343
|
limit=filters.get("limit"),
|
|
343
344
|
offset=filters.get("offset"),
|
|
@@ -364,6 +365,7 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
|
364
365
|
start_time=filters.get("start_time"),
|
|
365
366
|
end_time=filters.get("end_time"),
|
|
366
367
|
status=filters.get("status"),
|
|
368
|
+
forked_from=filters.get("forked_from"),
|
|
367
369
|
name=filters.get("workflow_name"),
|
|
368
370
|
limit=filters.get("limit"),
|
|
369
371
|
offset=filters.get("offset"),
|
|
@@ -0,0 +1,353 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
from typing import Any, Dict, List, Optional, TypedDict
|
|
3
|
+
|
|
4
|
+
import psycopg
|
|
5
|
+
import sqlalchemy as sa
|
|
6
|
+
from sqlalchemy import inspect, text
|
|
7
|
+
from sqlalchemy.exc import DBAPIError
|
|
8
|
+
from sqlalchemy.orm import Session, sessionmaker
|
|
9
|
+
|
|
10
|
+
from dbos._migration import get_sqlite_timestamp_expr
|
|
11
|
+
from dbos._serialization import Serializer
|
|
12
|
+
|
|
13
|
+
from ._error import DBOSUnexpectedStepError, DBOSWorkflowConflictIDError
|
|
14
|
+
from ._logger import dbos_logger
|
|
15
|
+
from ._schemas.application_database import ApplicationSchema
|
|
16
|
+
from ._sys_db import StepInfo
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class TransactionResultInternal(TypedDict):
|
|
20
|
+
workflow_uuid: str
|
|
21
|
+
function_id: int
|
|
22
|
+
output: Optional[str] # JSON (jsonpickle)
|
|
23
|
+
error: Optional[str] # JSON (jsonpickle)
|
|
24
|
+
txn_id: Optional[str]
|
|
25
|
+
txn_snapshot: str
|
|
26
|
+
executor_id: Optional[str]
|
|
27
|
+
function_name: Optional[str]
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class RecordedResult(TypedDict):
|
|
31
|
+
output: Optional[str] # JSON (jsonpickle)
|
|
32
|
+
error: Optional[str] # JSON (jsonpickle)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class ApplicationDatabase(ABC):
|
|
36
|
+
|
|
37
|
+
@staticmethod
|
|
38
|
+
def create(
|
|
39
|
+
database_url: str,
|
|
40
|
+
engine_kwargs: Dict[str, Any],
|
|
41
|
+
schema: Optional[str],
|
|
42
|
+
serializer: Serializer,
|
|
43
|
+
) -> "ApplicationDatabase":
|
|
44
|
+
"""Factory method to create the appropriate ApplicationDatabase implementation based on URL."""
|
|
45
|
+
if database_url.startswith("sqlite"):
|
|
46
|
+
return SQLiteApplicationDatabase(
|
|
47
|
+
database_url=database_url,
|
|
48
|
+
engine_kwargs=engine_kwargs,
|
|
49
|
+
schema=schema,
|
|
50
|
+
serializer=serializer,
|
|
51
|
+
)
|
|
52
|
+
else:
|
|
53
|
+
# Default to PostgreSQL for postgresql://, postgres://, or other URLs
|
|
54
|
+
return PostgresApplicationDatabase(
|
|
55
|
+
database_url=database_url,
|
|
56
|
+
engine_kwargs=engine_kwargs,
|
|
57
|
+
schema=schema,
|
|
58
|
+
serializer=serializer,
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
def __init__(
|
|
62
|
+
self,
|
|
63
|
+
*,
|
|
64
|
+
database_url: str,
|
|
65
|
+
engine_kwargs: Dict[str, Any],
|
|
66
|
+
serializer: Serializer,
|
|
67
|
+
schema: Optional[str],
|
|
68
|
+
):
|
|
69
|
+
# Log application database connection information
|
|
70
|
+
printable_url = sa.make_url(database_url).render_as_string(hide_password=True)
|
|
71
|
+
dbos_logger.info(
|
|
72
|
+
f"Initializing DBOS application database with URL: {printable_url}"
|
|
73
|
+
)
|
|
74
|
+
if not database_url.startswith("sqlite"):
|
|
75
|
+
dbos_logger.info(
|
|
76
|
+
f"DBOS application database engine parameters: {engine_kwargs}"
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
# Configure and initialize the application database
|
|
80
|
+
if database_url.startswith("sqlite"):
|
|
81
|
+
self.schema = None
|
|
82
|
+
else:
|
|
83
|
+
self.schema = schema if schema else "dbos"
|
|
84
|
+
ApplicationSchema.transaction_outputs.schema = schema
|
|
85
|
+
self.engine = self._create_engine(database_url, engine_kwargs)
|
|
86
|
+
self._engine_kwargs = engine_kwargs
|
|
87
|
+
self.sessionmaker = sessionmaker(bind=self.engine)
|
|
88
|
+
self.serializer = serializer
|
|
89
|
+
|
|
90
|
+
@abstractmethod
|
|
91
|
+
def _create_engine(
|
|
92
|
+
self, database_url: str, engine_kwargs: Dict[str, Any]
|
|
93
|
+
) -> sa.Engine:
|
|
94
|
+
"""Create a database engine specific to the database type."""
|
|
95
|
+
pass
|
|
96
|
+
|
|
97
|
+
@abstractmethod
|
|
98
|
+
def run_migrations(self) -> None:
|
|
99
|
+
"""Run database migrations specific to the database type."""
|
|
100
|
+
pass
|
|
101
|
+
|
|
102
|
+
def destroy(self) -> None:
|
|
103
|
+
self.engine.dispose()
|
|
104
|
+
|
|
105
|
+
def record_transaction_output(
|
|
106
|
+
self, session: Session, output: TransactionResultInternal
|
|
107
|
+
) -> None:
|
|
108
|
+
try:
|
|
109
|
+
session.execute(
|
|
110
|
+
sa.insert(ApplicationSchema.transaction_outputs).values(
|
|
111
|
+
workflow_uuid=output["workflow_uuid"],
|
|
112
|
+
function_id=output["function_id"],
|
|
113
|
+
output=output["output"],
|
|
114
|
+
error=None,
|
|
115
|
+
txn_id="",
|
|
116
|
+
txn_snapshot=output["txn_snapshot"],
|
|
117
|
+
executor_id=(
|
|
118
|
+
output["executor_id"] if output["executor_id"] else None
|
|
119
|
+
),
|
|
120
|
+
function_name=output["function_name"],
|
|
121
|
+
)
|
|
122
|
+
)
|
|
123
|
+
except DBAPIError as dbapi_error:
|
|
124
|
+
if self._is_unique_constraint_violation(dbapi_error):
|
|
125
|
+
raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
|
|
126
|
+
raise
|
|
127
|
+
|
|
128
|
+
def record_transaction_error(self, output: TransactionResultInternal) -> None:
|
|
129
|
+
try:
|
|
130
|
+
with self.engine.begin() as conn:
|
|
131
|
+
conn.execute(
|
|
132
|
+
sa.insert(ApplicationSchema.transaction_outputs).values(
|
|
133
|
+
workflow_uuid=output["workflow_uuid"],
|
|
134
|
+
function_id=output["function_id"],
|
|
135
|
+
output=None,
|
|
136
|
+
error=output["error"],
|
|
137
|
+
txn_id="",
|
|
138
|
+
txn_snapshot=output["txn_snapshot"],
|
|
139
|
+
executor_id=(
|
|
140
|
+
output["executor_id"] if output["executor_id"] else None
|
|
141
|
+
),
|
|
142
|
+
function_name=output["function_name"],
|
|
143
|
+
)
|
|
144
|
+
)
|
|
145
|
+
except DBAPIError as dbapi_error:
|
|
146
|
+
if self._is_unique_constraint_violation(dbapi_error):
|
|
147
|
+
raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
|
|
148
|
+
raise
|
|
149
|
+
|
|
150
|
+
@staticmethod
|
|
151
|
+
def check_transaction_execution(
|
|
152
|
+
session: Session, workflow_id: str, function_id: int, function_name: str
|
|
153
|
+
) -> Optional[RecordedResult]:
|
|
154
|
+
rows = session.execute(
|
|
155
|
+
sa.select(
|
|
156
|
+
ApplicationSchema.transaction_outputs.c.output,
|
|
157
|
+
ApplicationSchema.transaction_outputs.c.error,
|
|
158
|
+
ApplicationSchema.transaction_outputs.c.function_name,
|
|
159
|
+
).where(
|
|
160
|
+
ApplicationSchema.transaction_outputs.c.workflow_uuid == workflow_id,
|
|
161
|
+
ApplicationSchema.transaction_outputs.c.function_id == function_id,
|
|
162
|
+
)
|
|
163
|
+
).all()
|
|
164
|
+
if len(rows) == 0:
|
|
165
|
+
return None
|
|
166
|
+
output, error, recorded_function_name = rows[0][0], rows[0][1], rows[0][2]
|
|
167
|
+
if function_name != recorded_function_name:
|
|
168
|
+
raise DBOSUnexpectedStepError(
|
|
169
|
+
workflow_id=workflow_id,
|
|
170
|
+
step_id=function_id,
|
|
171
|
+
expected_name=function_name,
|
|
172
|
+
recorded_name=recorded_function_name,
|
|
173
|
+
)
|
|
174
|
+
result: RecordedResult = {
|
|
175
|
+
"output": output,
|
|
176
|
+
"error": error,
|
|
177
|
+
}
|
|
178
|
+
return result
|
|
179
|
+
|
|
180
|
+
def garbage_collect(
|
|
181
|
+
self, cutoff_epoch_timestamp_ms: int, pending_workflow_ids: list[str]
|
|
182
|
+
) -> None:
|
|
183
|
+
with self.engine.begin() as c:
|
|
184
|
+
delete_query = sa.delete(ApplicationSchema.transaction_outputs).where(
|
|
185
|
+
ApplicationSchema.transaction_outputs.c.created_at
|
|
186
|
+
< cutoff_epoch_timestamp_ms
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
if len(pending_workflow_ids) > 0:
|
|
190
|
+
delete_query = delete_query.where(
|
|
191
|
+
~ApplicationSchema.transaction_outputs.c.workflow_uuid.in_(
|
|
192
|
+
pending_workflow_ids
|
|
193
|
+
)
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
c.execute(delete_query)
|
|
197
|
+
|
|
198
|
+
@abstractmethod
|
|
199
|
+
def _is_unique_constraint_violation(self, dbapi_error: DBAPIError) -> bool:
|
|
200
|
+
"""Check if the error is a unique constraint violation."""
|
|
201
|
+
pass
|
|
202
|
+
|
|
203
|
+
@abstractmethod
|
|
204
|
+
def _is_serialization_error(self, dbapi_error: DBAPIError) -> bool:
|
|
205
|
+
"""Check if the error is a serialization/concurrency error."""
|
|
206
|
+
pass
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
class PostgresApplicationDatabase(ApplicationDatabase):
|
|
210
|
+
"""PostgreSQL-specific implementation of ApplicationDatabase."""
|
|
211
|
+
|
|
212
|
+
def _create_engine(
|
|
213
|
+
self, database_url: str, engine_kwargs: Dict[str, Any]
|
|
214
|
+
) -> sa.Engine:
|
|
215
|
+
"""Create a PostgreSQL engine."""
|
|
216
|
+
app_db_url = sa.make_url(database_url).set(drivername="postgresql+psycopg")
|
|
217
|
+
|
|
218
|
+
if engine_kwargs is None:
|
|
219
|
+
engine_kwargs = {}
|
|
220
|
+
|
|
221
|
+
return sa.create_engine(
|
|
222
|
+
app_db_url,
|
|
223
|
+
**engine_kwargs,
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
def run_migrations(self) -> None:
|
|
227
|
+
# Check if the database exists
|
|
228
|
+
app_db_url = self.engine.url
|
|
229
|
+
try:
|
|
230
|
+
postgres_db_engine = sa.create_engine(
|
|
231
|
+
app_db_url.set(database="postgres"),
|
|
232
|
+
**self._engine_kwargs,
|
|
233
|
+
)
|
|
234
|
+
with postgres_db_engine.connect() as conn:
|
|
235
|
+
conn.execution_options(isolation_level="AUTOCOMMIT")
|
|
236
|
+
if not conn.execute(
|
|
237
|
+
sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
|
|
238
|
+
parameters={"db_name": app_db_url.database},
|
|
239
|
+
).scalar():
|
|
240
|
+
conn.execute(sa.text(f"CREATE DATABASE {app_db_url.database}"))
|
|
241
|
+
except Exception:
|
|
242
|
+
dbos_logger.warning(
|
|
243
|
+
f"Could not connect to postgres database to verify existence of {app_db_url.database}. Continuing..."
|
|
244
|
+
)
|
|
245
|
+
finally:
|
|
246
|
+
postgres_db_engine.dispose()
|
|
247
|
+
|
|
248
|
+
# Create the dbos schema and transaction_outputs table in the application database
|
|
249
|
+
with self.engine.begin() as conn:
|
|
250
|
+
# Check if schema exists first
|
|
251
|
+
schema_exists = conn.execute(
|
|
252
|
+
sa.text(
|
|
253
|
+
"SELECT 1 FROM information_schema.schemata WHERE schema_name = :schema_name"
|
|
254
|
+
),
|
|
255
|
+
parameters={"schema_name": self.schema},
|
|
256
|
+
).scalar()
|
|
257
|
+
|
|
258
|
+
if not schema_exists:
|
|
259
|
+
schema_creation_query = sa.text(f'CREATE SCHEMA "{self.schema}"')
|
|
260
|
+
conn.execute(schema_creation_query)
|
|
261
|
+
|
|
262
|
+
inspector = inspect(self.engine)
|
|
263
|
+
if not inspector.has_table("transaction_outputs", schema=self.schema):
|
|
264
|
+
ApplicationSchema.metadata_obj.create_all(self.engine)
|
|
265
|
+
else:
|
|
266
|
+
columns = inspector.get_columns("transaction_outputs", schema=self.schema)
|
|
267
|
+
column_names = [col["name"] for col in columns]
|
|
268
|
+
|
|
269
|
+
if "function_name" not in column_names:
|
|
270
|
+
# Column missing, alter table to add it
|
|
271
|
+
with self.engine.connect() as conn:
|
|
272
|
+
conn.execute(
|
|
273
|
+
text(
|
|
274
|
+
f"""
|
|
275
|
+
ALTER TABLE \"{self.schema}\".transaction_outputs
|
|
276
|
+
ADD COLUMN function_name TEXT NOT NULL DEFAULT '';
|
|
277
|
+
"""
|
|
278
|
+
)
|
|
279
|
+
)
|
|
280
|
+
conn.commit()
|
|
281
|
+
|
|
282
|
+
def _is_unique_constraint_violation(self, dbapi_error: DBAPIError) -> bool:
|
|
283
|
+
"""Check if the error is a unique constraint violation in PostgreSQL."""
|
|
284
|
+
return dbapi_error.orig.sqlstate == "23505" # type: ignore
|
|
285
|
+
|
|
286
|
+
def _is_serialization_error(self, dbapi_error: DBAPIError) -> bool:
|
|
287
|
+
"""Check if the error is a serialization/concurrency error in PostgreSQL."""
|
|
288
|
+
# 40001: serialization_failure (MVCC conflict)
|
|
289
|
+
# 40P01: deadlock_detected
|
|
290
|
+
driver_error = dbapi_error.orig
|
|
291
|
+
return (
|
|
292
|
+
driver_error is not None
|
|
293
|
+
and isinstance(driver_error, psycopg.OperationalError)
|
|
294
|
+
and driver_error.sqlstate in ("40001", "40P01")
|
|
295
|
+
)
|
|
296
|
+
|
|
297
|
+
|
|
298
|
+
class SQLiteApplicationDatabase(ApplicationDatabase):
|
|
299
|
+
"""SQLite-specific implementation of ApplicationDatabase."""
|
|
300
|
+
|
|
301
|
+
def _create_engine(
|
|
302
|
+
self, database_url: str, engine_kwargs: Dict[str, Any]
|
|
303
|
+
) -> sa.Engine:
|
|
304
|
+
"""Create a SQLite engine."""
|
|
305
|
+
# TODO: Make the schema dynamic so this isn't needed
|
|
306
|
+
ApplicationSchema.transaction_outputs.schema = None
|
|
307
|
+
return sa.create_engine(database_url)
|
|
308
|
+
|
|
309
|
+
def run_migrations(self) -> None:
|
|
310
|
+
with self.engine.begin() as conn:
|
|
311
|
+
# Check if table exists
|
|
312
|
+
result = conn.execute(
|
|
313
|
+
sa.text(
|
|
314
|
+
"SELECT name FROM sqlite_master WHERE type='table' AND name='transaction_outputs'"
|
|
315
|
+
)
|
|
316
|
+
).fetchone()
|
|
317
|
+
|
|
318
|
+
if result is None:
|
|
319
|
+
conn.execute(
|
|
320
|
+
sa.text(
|
|
321
|
+
f"""
|
|
322
|
+
CREATE TABLE transaction_outputs (
|
|
323
|
+
workflow_uuid TEXT NOT NULL,
|
|
324
|
+
function_id INTEGER NOT NULL,
|
|
325
|
+
output TEXT,
|
|
326
|
+
error TEXT,
|
|
327
|
+
txn_id TEXT,
|
|
328
|
+
txn_snapshot TEXT NOT NULL,
|
|
329
|
+
executor_id TEXT,
|
|
330
|
+
function_name TEXT NOT NULL DEFAULT '',
|
|
331
|
+
created_at BIGINT NOT NULL DEFAULT {get_sqlite_timestamp_expr()},
|
|
332
|
+
PRIMARY KEY (workflow_uuid, function_id)
|
|
333
|
+
)
|
|
334
|
+
"""
|
|
335
|
+
)
|
|
336
|
+
)
|
|
337
|
+
conn.execute(
|
|
338
|
+
sa.text(
|
|
339
|
+
"CREATE INDEX transaction_outputs_created_at_index ON transaction_outputs (created_at)"
|
|
340
|
+
)
|
|
341
|
+
)
|
|
342
|
+
|
|
343
|
+
def _is_unique_constraint_violation(self, dbapi_error: DBAPIError) -> bool:
|
|
344
|
+
"""Check if the error is a unique constraint violation in SQLite."""
|
|
345
|
+
return "UNIQUE constraint failed" in str(dbapi_error.orig)
|
|
346
|
+
|
|
347
|
+
def _is_serialization_error(self, dbapi_error: DBAPIError) -> bool:
|
|
348
|
+
"""Check if the error is a serialization/concurrency error in SQLite."""
|
|
349
|
+
# SQLite database is locked or busy errors
|
|
350
|
+
error_msg = str(dbapi_error.orig).lower()
|
|
351
|
+
return (
|
|
352
|
+
"database is locked" in error_msg or "database table is locked" in error_msg
|
|
353
|
+
)
|