dbos 1.14.0a9__tar.gz → 1.15.0a2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- {dbos-1.14.0a9 → dbos-1.15.0a2}/PKG-INFO +8 -16
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_client.py +30 -35
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_context.py +12 -6
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_core.py +5 -8
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_dbos.py +15 -27
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_dbos_config.py +32 -42
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_debouncer.py +1 -7
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_debug.py +0 -8
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_docker_pg_helper.py +93 -51
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_fastapi.py +5 -1
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_logger.py +18 -21
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_migration.py +4 -41
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_serialization.py +19 -30
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_sys_db_postgres.py +2 -9
- dbos-1.15.0a2/dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos +34 -0
- dbos-1.15.0a2/dbos/_tracer.py +99 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_workflow_commands.py +9 -5
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/cli/_github_init.py +22 -16
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/cli/_template_init.py +5 -16
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/cli/cli.py +27 -33
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/cli/migration.py +15 -10
- {dbos-1.14.0a9 → dbos-1.15.0a2}/pyproject.toml +13 -16
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/conftest.py +1 -0
- dbos-1.15.0a2/tests/script_without_fastapi.py +30 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_async.py +0 -85
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_config.py +11 -28
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_dbos.py +53 -32
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_debug.py +0 -7
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_docker_secrets.py +0 -20
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_failures.py +5 -4
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_fastapi_roles.py +1 -125
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_package.py +15 -3
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_scheduler.py +1 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_schema_migration.py +88 -114
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_spans.py +1 -1
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_workflow_management.py +1 -0
- dbos-1.14.0a9/dbos/_alembic_migrations/env.py +0 -62
- dbos-1.14.0a9/dbos/_alembic_migrations/script.py.mako +0 -26
- dbos-1.14.0a9/dbos/_alembic_migrations/versions/01ce9f07bd10_streaming.py +0 -42
- dbos-1.14.0a9/dbos/_alembic_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -34
- dbos-1.14.0a9/dbos/_alembic_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -45
- dbos-1.14.0a9/dbos/_alembic_migrations/versions/471b60d64126_dbos_migrations.py +0 -35
- dbos-1.14.0a9/dbos/_alembic_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -35
- dbos-1.14.0a9/dbos/_alembic_migrations/versions/5c361fc04708_added_system_tables.py +0 -193
- dbos-1.14.0a9/dbos/_alembic_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -71
- dbos-1.14.0a9/dbos/_alembic_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -44
- dbos-1.14.0a9/dbos/_alembic_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -35
- dbos-1.14.0a9/dbos/_alembic_migrations/versions/a3b18ad34abe_added_triggers.py +0 -72
- dbos-1.14.0a9/dbos/_alembic_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -43
- dbos-1.14.0a9/dbos/_alembic_migrations/versions/d76646551a6c_workflow_queue.py +0 -28
- dbos-1.14.0a9/dbos/_alembic_migrations/versions/d994145b47b6_consolidate_inputs.py +0 -30
- dbos-1.14.0a9/dbos/_alembic_migrations/versions/eab0cc1d9a14_job_queue.py +0 -56
- dbos-1.14.0a9/dbos/_alembic_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -46
- dbos-1.14.0a9/dbos/_templates/dbos-db-starter/alembic.ini +0 -116
- dbos-1.14.0a9/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -85
- dbos-1.14.0a9/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -26
- dbos-1.14.0a9/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -35
- dbos-1.14.0a9/dbos/_tracer.py +0 -88
- {dbos-1.14.0a9 → dbos-1.15.0a2}/LICENSE +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/README.md +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/__init__.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/__main__.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_admin_server.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_app_db.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_classproperty.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_conductor/conductor.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_conductor/protocol.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_croniter.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_error.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_event_loop.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_flask.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_kafka.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_kafka_message.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_outcome.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_queue.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_recovery.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_registrations.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_roles.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_scheduler.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_schemas/__init__.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_schemas/application_database.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_schemas/system_database.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_sys_db.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_sys_db_sqlite.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_templates/dbos-db-starter/README.md +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/_utils.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/dbos-config.schema.json +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/dbos/py.typed +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/__init__.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/atexit_no_ctor.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/atexit_no_launch.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/classdefs.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/client_collateral.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/client_worker.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/dupname_classdefs1.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/dupname_classdefsa.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/more_classdefs.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/queuedworkflow.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_admin_server.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_async_workflow_management.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_classdecorators.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_cli.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_client.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_concurrency.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_croniter.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_debouncer.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_fastapi.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_flask.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_kafka.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_outcome.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_queue.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_singleton.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_sqlalchemy.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_streaming.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/tests/test_workflow_introspection.py +0 -0
- {dbos-1.14.0a9 → dbos-1.15.0a2}/version/__init__.py +0 -0
|
@@ -1,28 +1,20 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: dbos
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.15.0a2
|
|
4
4
|
Summary: Ultra-lightweight durable execution in Python
|
|
5
5
|
Author-Email: "DBOS, Inc." <contact@dbos.dev>
|
|
6
6
|
License: MIT
|
|
7
|
-
Requires-Python: >=3.
|
|
7
|
+
Requires-Python: >=3.10
|
|
8
8
|
Requires-Dist: pyyaml>=6.0.2
|
|
9
|
-
Requires-Dist: jsonschema>=4.23.0
|
|
10
|
-
Requires-Dist: alembic>=1.13.3
|
|
11
|
-
Requires-Dist: typing-extensions>=4.12.2; python_version < "3.10"
|
|
12
|
-
Requires-Dist: typer>=0.12.5
|
|
13
|
-
Requires-Dist: jsonpickle>=3.3.0
|
|
14
|
-
Requires-Dist: opentelemetry-api>=1.27.0
|
|
15
|
-
Requires-Dist: opentelemetry-sdk>=1.27.0
|
|
16
|
-
Requires-Dist: opentelemetry-exporter-otlp-proto-http>=1.27.0
|
|
17
9
|
Requires-Dist: python-dateutil>=2.9.0.post0
|
|
18
|
-
Requires-Dist: fastapi[standard]>=0.115.2
|
|
19
|
-
Requires-Dist: tomlkit>=0.13.2
|
|
20
10
|
Requires-Dist: psycopg[binary]>=3.1
|
|
21
|
-
Requires-Dist: docker>=7.1.0
|
|
22
|
-
Requires-Dist: cryptography>=43.0.3
|
|
23
|
-
Requires-Dist: rich>=13.9.4
|
|
24
|
-
Requires-Dist: pyjwt>=2.10.1
|
|
25
11
|
Requires-Dist: websockets>=14.0
|
|
12
|
+
Requires-Dist: typer-slim>=0.17.4
|
|
13
|
+
Requires-Dist: sqlalchemy>=2.0.43
|
|
14
|
+
Provides-Extra: otel
|
|
15
|
+
Requires-Dist: opentelemetry-api>=1.37.0; extra == "otel"
|
|
16
|
+
Requires-Dist: opentelemetry-sdk>=1.37.0; extra == "otel"
|
|
17
|
+
Requires-Dist: opentelemetry-exporter-otlp-proto-http>=1.37.0; extra == "otel"
|
|
26
18
|
Description-Content-Type: text/markdown
|
|
27
19
|
|
|
28
20
|
|
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
-
import sys
|
|
3
2
|
import time
|
|
4
3
|
import uuid
|
|
5
4
|
from typing import (
|
|
@@ -15,25 +14,15 @@ from typing import (
|
|
|
15
14
|
Union,
|
|
16
15
|
)
|
|
17
16
|
|
|
17
|
+
from dbos import _serialization
|
|
18
18
|
from dbos._app_db import ApplicationDatabase
|
|
19
19
|
from dbos._context import MaxPriority, MinPriority
|
|
20
20
|
from dbos._sys_db import SystemDatabase
|
|
21
21
|
|
|
22
|
-
if sys.version_info < (3, 11):
|
|
23
|
-
from typing_extensions import NotRequired
|
|
24
|
-
else:
|
|
25
|
-
from typing import NotRequired
|
|
26
|
-
|
|
27
|
-
from dbos import _serialization
|
|
28
|
-
|
|
29
22
|
if TYPE_CHECKING:
|
|
30
23
|
from dbos._dbos import WorkflowHandle, WorkflowHandleAsync
|
|
31
24
|
|
|
32
|
-
from dbos._dbos_config import
|
|
33
|
-
get_application_database_url,
|
|
34
|
-
get_system_database_url,
|
|
35
|
-
is_valid_database_url,
|
|
36
|
-
)
|
|
25
|
+
from dbos._dbos_config import get_system_database_url, is_valid_database_url
|
|
37
26
|
from dbos._error import DBOSException, DBOSNonExistentWorkflowError
|
|
38
27
|
from dbos._registrations import DEFAULT_MAX_RECOVERY_ATTEMPTS
|
|
39
28
|
from dbos._serialization import WorkflowInputs
|
|
@@ -58,14 +47,20 @@ from dbos._workflow_commands import (
|
|
|
58
47
|
R = TypeVar("R", covariant=True) # A generic type for workflow return values
|
|
59
48
|
|
|
60
49
|
|
|
61
|
-
|
|
50
|
+
# Required EnqueueOptions fields
|
|
51
|
+
class _EnqueueOptionsRequired(TypedDict):
|
|
62
52
|
workflow_name: str
|
|
63
53
|
queue_name: str
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
# Optional EnqueueOptions fields
|
|
57
|
+
class EnqueueOptions(_EnqueueOptionsRequired, total=False):
|
|
58
|
+
workflow_id: str
|
|
59
|
+
app_version: str
|
|
60
|
+
workflow_timeout: float
|
|
61
|
+
deduplication_id: str
|
|
62
|
+
priority: int
|
|
63
|
+
max_recovery_attempts: int
|
|
69
64
|
|
|
70
65
|
|
|
71
66
|
def validate_enqueue_options(options: EnqueueOptions) -> None:
|
|
@@ -119,6 +114,9 @@ class WorkflowHandleClientAsyncPolling(Generic[R]):
|
|
|
119
114
|
|
|
120
115
|
|
|
121
116
|
class DBOSClient:
|
|
117
|
+
|
|
118
|
+
_app_db: ApplicationDatabase | None = None
|
|
119
|
+
|
|
122
120
|
def __init__(
|
|
123
121
|
self,
|
|
124
122
|
database_url: Optional[str] = None, # DEPRECATED
|
|
@@ -127,13 +125,8 @@ class DBOSClient:
|
|
|
127
125
|
application_database_url: Optional[str] = None,
|
|
128
126
|
system_database: Optional[str] = None, # DEPRECATED
|
|
129
127
|
):
|
|
130
|
-
application_database_url =
|
|
131
|
-
|
|
132
|
-
"system_database_url": system_database_url,
|
|
133
|
-
"database_url": (
|
|
134
|
-
database_url if database_url else application_database_url
|
|
135
|
-
),
|
|
136
|
-
}
|
|
128
|
+
application_database_url = (
|
|
129
|
+
database_url if database_url else application_database_url
|
|
137
130
|
)
|
|
138
131
|
system_database_url = get_system_database_url(
|
|
139
132
|
{
|
|
@@ -143,7 +136,8 @@ class DBOSClient:
|
|
|
143
136
|
}
|
|
144
137
|
)
|
|
145
138
|
assert is_valid_database_url(system_database_url)
|
|
146
|
-
|
|
139
|
+
if application_database_url:
|
|
140
|
+
assert is_valid_database_url(application_database_url)
|
|
147
141
|
# We only create database connections but do not run migrations
|
|
148
142
|
self._sys_db = SystemDatabase.create(
|
|
149
143
|
system_database_url=system_database_url,
|
|
@@ -154,14 +148,15 @@ class DBOSClient:
|
|
|
154
148
|
},
|
|
155
149
|
)
|
|
156
150
|
self._sys_db.check_connection()
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
151
|
+
if application_database_url:
|
|
152
|
+
self._app_db = ApplicationDatabase.create(
|
|
153
|
+
database_url=application_database_url,
|
|
154
|
+
engine_kwargs={
|
|
155
|
+
"pool_timeout": 30,
|
|
156
|
+
"max_overflow": 0,
|
|
157
|
+
"pool_size": 2,
|
|
158
|
+
},
|
|
159
|
+
)
|
|
165
160
|
|
|
166
161
|
def destroy(self) -> None:
|
|
167
162
|
self._sys_db.destroy()
|
|
@@ -8,9 +8,11 @@ from contextvars import ContextVar
|
|
|
8
8
|
from dataclasses import dataclass
|
|
9
9
|
from enum import Enum
|
|
10
10
|
from types import TracebackType
|
|
11
|
-
from typing import List, Literal, Optional, Type, TypedDict
|
|
11
|
+
from typing import TYPE_CHECKING, List, Literal, Optional, Type, TypedDict
|
|
12
|
+
|
|
13
|
+
if TYPE_CHECKING:
|
|
14
|
+
from opentelemetry.trace import Span
|
|
12
15
|
|
|
13
|
-
from opentelemetry.trace import Span, Status, StatusCode, use_span
|
|
14
16
|
from sqlalchemy.orm import Session
|
|
15
17
|
|
|
16
18
|
from dbos._utils import GlobalParams
|
|
@@ -78,8 +80,8 @@ class ContextSpan:
|
|
|
78
80
|
context_manager: The context manager that is used to manage the span's lifecycle.
|
|
79
81
|
"""
|
|
80
82
|
|
|
81
|
-
span: Span
|
|
82
|
-
context_manager: AbstractContextManager[Span]
|
|
83
|
+
span: "Span"
|
|
84
|
+
context_manager: "AbstractContextManager[Span]"
|
|
83
85
|
|
|
84
86
|
|
|
85
87
|
class DBOSContext:
|
|
@@ -217,19 +219,21 @@ class DBOSContext:
|
|
|
217
219
|
|
|
218
220
|
""" Return the current DBOS span if any. It must be a span created by DBOS."""
|
|
219
221
|
|
|
220
|
-
def get_current_dbos_span(self) -> Optional[Span]:
|
|
222
|
+
def get_current_dbos_span(self) -> "Optional[Span]":
|
|
221
223
|
if len(self.context_spans) > 0:
|
|
222
224
|
return self.context_spans[-1].span
|
|
223
225
|
return None
|
|
224
226
|
|
|
225
227
|
""" Return the current active span if any. It might not be a DBOS span."""
|
|
226
228
|
|
|
227
|
-
def get_current_active_span(self) -> Optional[Span]:
|
|
229
|
+
def get_current_active_span(self) -> "Optional[Span]":
|
|
228
230
|
return dbos_tracer.get_current_span()
|
|
229
231
|
|
|
230
232
|
def _start_span(self, attributes: TracedAttributes) -> None:
|
|
231
233
|
if dbos_tracer.disable_otlp:
|
|
232
234
|
return
|
|
235
|
+
from opentelemetry.trace import use_span
|
|
236
|
+
|
|
233
237
|
attributes["operationUUID"] = (
|
|
234
238
|
self.workflow_id if len(self.workflow_id) > 0 else None
|
|
235
239
|
)
|
|
@@ -257,6 +261,8 @@ class DBOSContext:
|
|
|
257
261
|
def _end_span(self, exc_value: Optional[BaseException]) -> None:
|
|
258
262
|
if dbos_tracer.disable_otlp:
|
|
259
263
|
return
|
|
264
|
+
from opentelemetry.trace import Status, StatusCode
|
|
265
|
+
|
|
260
266
|
context_span = self.context_spans.pop()
|
|
261
267
|
if exc_value is None:
|
|
262
268
|
context_span.span.set_status(Status(StatusCode.OK))
|
|
@@ -14,6 +14,7 @@ from typing import (
|
|
|
14
14
|
Coroutine,
|
|
15
15
|
Generic,
|
|
16
16
|
Optional,
|
|
17
|
+
ParamSpec,
|
|
17
18
|
TypeVar,
|
|
18
19
|
Union,
|
|
19
20
|
cast,
|
|
@@ -22,14 +23,8 @@ from typing import (
|
|
|
22
23
|
from dbos._outcome import Immediate, NoResult, Outcome, Pending
|
|
23
24
|
from dbos._utils import GlobalParams, retriable_postgres_exception
|
|
24
25
|
|
|
25
|
-
from ._app_db import ApplicationDatabase, TransactionResultInternal
|
|
26
|
-
|
|
27
|
-
if sys.version_info < (3, 10):
|
|
28
|
-
from typing_extensions import ParamSpec
|
|
29
|
-
else:
|
|
30
|
-
from typing import ParamSpec
|
|
31
|
-
|
|
32
26
|
from . import _serialization
|
|
27
|
+
from ._app_db import ApplicationDatabase, TransactionResultInternal
|
|
33
28
|
from ._context import (
|
|
34
29
|
DBOSAssumeRole,
|
|
35
30
|
DBOSContext,
|
|
@@ -901,7 +896,9 @@ def decorate_transaction(
|
|
|
901
896
|
raise DBOSWorkflowCancelledError(
|
|
902
897
|
f"Workflow {ctx.workflow_id} is cancelled. Aborting transaction {transaction_name}."
|
|
903
898
|
)
|
|
904
|
-
|
|
899
|
+
assert (
|
|
900
|
+
dbos._app_db
|
|
901
|
+
), "Transactions can only be used if DBOS is configured with an application_database_url"
|
|
905
902
|
with dbos._app_db.sessionmaker() as session:
|
|
906
903
|
attributes: TracedAttributes = {
|
|
907
904
|
"name": transaction_name,
|
|
@@ -28,9 +28,6 @@ from typing import (
|
|
|
28
28
|
Union,
|
|
29
29
|
)
|
|
30
30
|
|
|
31
|
-
from opentelemetry.trace import Span
|
|
32
|
-
from rich import print
|
|
33
|
-
|
|
34
31
|
from dbos._conductor.conductor import ConductorWebsocket
|
|
35
32
|
from dbos._debouncer import debouncer_workflow
|
|
36
33
|
from dbos._sys_db import SystemDatabase, WorkflowStatus
|
|
@@ -53,7 +50,6 @@ from ._core import (
|
|
|
53
50
|
set_event,
|
|
54
51
|
start_workflow,
|
|
55
52
|
start_workflow_async,
|
|
56
|
-
workflow_wrapper,
|
|
57
53
|
)
|
|
58
54
|
from ._queue import Queue, queue_thread
|
|
59
55
|
from ._recovery import recover_pending_workflows, startup_recovery_thread
|
|
@@ -62,8 +58,6 @@ from ._registrations import (
|
|
|
62
58
|
DBOSClassInfo,
|
|
63
59
|
_class_fqn,
|
|
64
60
|
get_or_create_class_info,
|
|
65
|
-
set_dbos_func_name,
|
|
66
|
-
set_temp_workflow_type,
|
|
67
61
|
)
|
|
68
62
|
from ._roles import default_required_roles, required_roles
|
|
69
63
|
from ._scheduler import ScheduledWorkflow, scheduled
|
|
@@ -80,13 +74,11 @@ if TYPE_CHECKING:
|
|
|
80
74
|
from fastapi import FastAPI
|
|
81
75
|
from ._kafka import _KafkaConsumerWorkflow
|
|
82
76
|
from flask import Flask
|
|
77
|
+
from opentelemetry.trace import Span
|
|
83
78
|
|
|
84
|
-
from
|
|
79
|
+
from typing import ParamSpec
|
|
85
80
|
|
|
86
|
-
|
|
87
|
-
from typing_extensions import ParamSpec
|
|
88
|
-
else:
|
|
89
|
-
from typing import ParamSpec
|
|
81
|
+
from sqlalchemy.orm import Session
|
|
90
82
|
|
|
91
83
|
from ._admin_server import AdminServer
|
|
92
84
|
from ._app_db import ApplicationDatabase
|
|
@@ -417,13 +409,8 @@ class DBOS:
|
|
|
417
409
|
return rv
|
|
418
410
|
|
|
419
411
|
@property
|
|
420
|
-
def _app_db(self) -> ApplicationDatabase:
|
|
421
|
-
|
|
422
|
-
raise DBOSException(
|
|
423
|
-
"Application database accessed before DBOS was launched"
|
|
424
|
-
)
|
|
425
|
-
rv: ApplicationDatabase = self._app_db_field
|
|
426
|
-
return rv
|
|
412
|
+
def _app_db(self) -> ApplicationDatabase | None:
|
|
413
|
+
return self._app_db_field
|
|
427
414
|
|
|
428
415
|
@property
|
|
429
416
|
def _admin_server(self) -> AdminServer:
|
|
@@ -456,7 +443,6 @@ class DBOS:
|
|
|
456
443
|
dbos_logger.info(f"Application version: {GlobalParams.app_version}")
|
|
457
444
|
self._executor_field = ThreadPoolExecutor(max_workers=sys.maxsize)
|
|
458
445
|
self._background_event_loop.start()
|
|
459
|
-
assert self._config["database_url"] is not None
|
|
460
446
|
assert self._config["database"]["sys_db_engine_kwargs"] is not None
|
|
461
447
|
self._sys_db_field = SystemDatabase.create(
|
|
462
448
|
system_database_url=get_system_database_url(self._config),
|
|
@@ -464,18 +450,20 @@ class DBOS:
|
|
|
464
450
|
debug_mode=debug_mode,
|
|
465
451
|
)
|
|
466
452
|
assert self._config["database"]["db_engine_kwargs"] is not None
|
|
467
|
-
self.
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
453
|
+
if self._config["database_url"]:
|
|
454
|
+
self._app_db_field = ApplicationDatabase.create(
|
|
455
|
+
database_url=self._config["database_url"],
|
|
456
|
+
engine_kwargs=self._config["database"]["db_engine_kwargs"],
|
|
457
|
+
debug_mode=debug_mode,
|
|
458
|
+
)
|
|
472
459
|
|
|
473
460
|
if debug_mode:
|
|
474
461
|
return
|
|
475
462
|
|
|
476
463
|
# Run migrations for the system and application databases
|
|
477
464
|
self._sys_db.run_migrations()
|
|
478
|
-
self._app_db
|
|
465
|
+
if self._app_db:
|
|
466
|
+
self._app_db.run_migrations()
|
|
479
467
|
|
|
480
468
|
admin_port = self._config.get("runtimeConfig", {}).get("admin_port")
|
|
481
469
|
if admin_port is None:
|
|
@@ -558,7 +546,7 @@ class DBOS:
|
|
|
558
546
|
f"https://console.dbos.dev/self-host?appname={app_name}"
|
|
559
547
|
)
|
|
560
548
|
print(
|
|
561
|
-
f"
|
|
549
|
+
f"To view and manage workflows, connect to DBOS Conductor at:{conductor_registration_url}"
|
|
562
550
|
)
|
|
563
551
|
|
|
564
552
|
# Flush handlers and add OTLP to all loggers if enabled
|
|
@@ -1297,7 +1285,7 @@ class DBOS:
|
|
|
1297
1285
|
return ctx.parent_workflow_id
|
|
1298
1286
|
|
|
1299
1287
|
@classproperty
|
|
1300
|
-
def span(cls) -> Span:
|
|
1288
|
+
def span(cls) -> "Span":
|
|
1301
1289
|
"""Return the tracing `Span` associated with the current context."""
|
|
1302
1290
|
ctx = assert_current_dbos_context()
|
|
1303
1291
|
span = ctx.get_current_active_span()
|
|
@@ -5,8 +5,6 @@ from importlib import resources
|
|
|
5
5
|
from typing import Any, Dict, List, Optional, TypedDict, cast
|
|
6
6
|
|
|
7
7
|
import yaml
|
|
8
|
-
from jsonschema import ValidationError, validate
|
|
9
|
-
from rich import print
|
|
10
8
|
from sqlalchemy import make_url
|
|
11
9
|
|
|
12
10
|
from ._error import DBOSInitializationError
|
|
@@ -36,7 +34,7 @@ class DBOSConfig(TypedDict, total=False):
|
|
|
36
34
|
otlp_attributes (dict[str, str]): A set of custom attributes to apply OTLP-exported logs and traces
|
|
37
35
|
application_version (str): Application version
|
|
38
36
|
executor_id (str): Executor ID, used to identify the application instance in distributed environments
|
|
39
|
-
|
|
37
|
+
enable_otlp (bool): If True, enable built-in DBOS OTLP tracing and logging.
|
|
40
38
|
"""
|
|
41
39
|
|
|
42
40
|
name: str
|
|
@@ -54,7 +52,7 @@ class DBOSConfig(TypedDict, total=False):
|
|
|
54
52
|
otlp_attributes: Optional[dict[str, str]]
|
|
55
53
|
application_version: Optional[str]
|
|
56
54
|
executor_id: Optional[str]
|
|
57
|
-
|
|
55
|
+
enable_otlp: Optional[bool]
|
|
58
56
|
|
|
59
57
|
|
|
60
58
|
class RuntimeConfig(TypedDict, total=False):
|
|
@@ -97,7 +95,7 @@ class TelemetryConfig(TypedDict, total=False):
|
|
|
97
95
|
logs: Optional[LoggerConfig]
|
|
98
96
|
OTLPExporter: Optional[OTLPExporterConfig]
|
|
99
97
|
otlp_attributes: Optional[dict[str, str]]
|
|
100
|
-
disable_otlp:
|
|
98
|
+
disable_otlp: bool
|
|
101
99
|
|
|
102
100
|
|
|
103
101
|
class ConfigFile(TypedDict, total=False):
|
|
@@ -165,10 +163,12 @@ def translate_dbos_config_to_config_file(config: DBOSConfig) -> ConfigFile:
|
|
|
165
163
|
]
|
|
166
164
|
|
|
167
165
|
# Telemetry config
|
|
166
|
+
enable_otlp = config.get("enable_otlp", None)
|
|
167
|
+
disable_otlp = True if enable_otlp is None else not enable_otlp
|
|
168
168
|
telemetry: TelemetryConfig = {
|
|
169
169
|
"OTLPExporter": {"tracesEndpoint": [], "logsEndpoint": []},
|
|
170
170
|
"otlp_attributes": config.get("otlp_attributes", {}),
|
|
171
|
-
"disable_otlp":
|
|
171
|
+
"disable_otlp": disable_otlp,
|
|
172
172
|
}
|
|
173
173
|
# For mypy
|
|
174
174
|
assert telemetry["OTLPExporter"] is not None
|
|
@@ -265,17 +265,6 @@ def load_config(
|
|
|
265
265
|
)
|
|
266
266
|
data = cast(Dict[str, Any], data)
|
|
267
267
|
|
|
268
|
-
# Load the JSON schema relative to the package root
|
|
269
|
-
schema_file = resources.files("dbos").joinpath("dbos-config.schema.json")
|
|
270
|
-
with schema_file.open("r") as f:
|
|
271
|
-
schema = json.load(f)
|
|
272
|
-
|
|
273
|
-
# Validate the data against the schema
|
|
274
|
-
try:
|
|
275
|
-
validate(instance=data, schema=schema)
|
|
276
|
-
except ValidationError as e:
|
|
277
|
-
raise DBOSInitializationError(f"Validation error: {e}")
|
|
278
|
-
|
|
279
268
|
# Special case: convert logsEndpoint and tracesEndpoint from strings to lists of strings, if present
|
|
280
269
|
if "telemetry" in data and "OTLPExporter" in data["telemetry"]:
|
|
281
270
|
if "logsEndpoint" in data["telemetry"]["OTLPExporter"]:
|
|
@@ -419,39 +408,38 @@ def process_config(
|
|
|
419
408
|
url = url.set(database=f"{url.database}{SystemSchema.sysdb_suffix}")
|
|
420
409
|
data["system_database_url"] = url.render_as_string(hide_password=False)
|
|
421
410
|
|
|
422
|
-
# If a system database URL is provided but not an application database URL,
|
|
423
|
-
#
|
|
411
|
+
# If a system database URL is provided but not an application database URL,
|
|
412
|
+
# do not create an application database.
|
|
424
413
|
if data.get("system_database_url") and not data.get("database_url"):
|
|
425
414
|
assert data["system_database_url"]
|
|
426
|
-
data["database_url"] =
|
|
415
|
+
data["database_url"] = None
|
|
427
416
|
|
|
428
|
-
# If neither URL is provided, use a default SQLite database URL.
|
|
417
|
+
# If neither URL is provided, use a default SQLite system database URL.
|
|
429
418
|
if not data.get("database_url") and not data.get("system_database_url"):
|
|
430
419
|
_app_db_name = _app_name_to_db_name(data["name"])
|
|
431
|
-
data["system_database_url"] =
|
|
432
|
-
|
|
433
|
-
)
|
|
420
|
+
data["system_database_url"] = f"sqlite:///{_app_db_name}.sqlite"
|
|
421
|
+
data["database_url"] = None
|
|
434
422
|
|
|
435
423
|
configure_db_engine_parameters(data["database"], connect_timeout=connect_timeout)
|
|
436
424
|
|
|
437
|
-
assert data["database_url"] is not None
|
|
438
425
|
assert data["system_database_url"] is not None
|
|
439
426
|
# Pretty-print connection information, respecting log level
|
|
440
427
|
if not silent and logs["logLevel"] == "INFO" or logs["logLevel"] == "DEBUG":
|
|
441
428
|
printable_sys_db_url = make_url(data["system_database_url"]).render_as_string(
|
|
442
429
|
hide_password=True
|
|
443
430
|
)
|
|
444
|
-
print(
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
print(
|
|
449
|
-
f"[bold blue]Using SQLite as a system database. The SQLite system database is for development and testing. PostgreSQL is recommended for production use.[/bold blue]"
|
|
431
|
+
print(f"DBOS system database URL: {printable_sys_db_url}")
|
|
432
|
+
if data["database_url"]:
|
|
433
|
+
printable_app_db_url = make_url(data["database_url"]).render_as_string(
|
|
434
|
+
hide_password=True
|
|
450
435
|
)
|
|
451
|
-
|
|
436
|
+
print(f"DBOS application database URL: {printable_app_db_url}")
|
|
437
|
+
if data["system_database_url"].startswith("sqlite"):
|
|
452
438
|
print(
|
|
453
|
-
f"
|
|
439
|
+
f"Using SQLite as a system database. The SQLite system database is for development and testing. PostgreSQL is recommended for production use."
|
|
454
440
|
)
|
|
441
|
+
else:
|
|
442
|
+
print(f"Database engine parameters: {data['database']['db_engine_kwargs']}")
|
|
455
443
|
|
|
456
444
|
# Return data as ConfigFile type
|
|
457
445
|
return data
|
|
@@ -563,12 +551,15 @@ def overwrite_config(provided_config: ConfigFile) -> ConfigFile:
|
|
|
563
551
|
if "telemetry" not in provided_config or provided_config["telemetry"] is None:
|
|
564
552
|
provided_config["telemetry"] = {
|
|
565
553
|
"OTLPExporter": {"tracesEndpoint": [], "logsEndpoint": []},
|
|
554
|
+
"disable_otlp": False,
|
|
566
555
|
}
|
|
567
|
-
|
|
568
|
-
provided_config["telemetry"]["
|
|
569
|
-
|
|
570
|
-
"
|
|
571
|
-
|
|
556
|
+
else:
|
|
557
|
+
provided_config["telemetry"]["disable_otlp"] = False
|
|
558
|
+
if "OTLPExporter" not in provided_config["telemetry"]:
|
|
559
|
+
provided_config["telemetry"]["OTLPExporter"] = {
|
|
560
|
+
"tracesEndpoint": [],
|
|
561
|
+
"logsEndpoint": [],
|
|
562
|
+
}
|
|
572
563
|
|
|
573
564
|
# This is a super messy from a typing perspective.
|
|
574
565
|
# Some of ConfigFile keys are optional -- but in practice they'll always be present in hosted environments
|
|
@@ -627,12 +618,11 @@ def get_system_database_url(config: ConfigFile) -> str:
|
|
|
627
618
|
)
|
|
628
619
|
|
|
629
620
|
|
|
630
|
-
def get_application_database_url(config: ConfigFile) -> str:
|
|
621
|
+
def get_application_database_url(config: ConfigFile) -> str | None:
|
|
631
622
|
# For backwards compatibility, the application database URL is "database_url"
|
|
632
623
|
if config.get("database_url"):
|
|
633
624
|
assert config["database_url"]
|
|
634
625
|
return config["database_url"]
|
|
635
626
|
else:
|
|
636
|
-
# If the application database URL is not specified,
|
|
637
|
-
|
|
638
|
-
return config["system_database_url"]
|
|
627
|
+
# If the application database URL is not specified, return None
|
|
628
|
+
return None
|
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
import math
|
|
3
|
-
import sys
|
|
4
3
|
import time
|
|
5
4
|
import types
|
|
6
5
|
import uuid
|
|
@@ -12,17 +11,12 @@ from typing import (
|
|
|
12
11
|
Dict,
|
|
13
12
|
Generic,
|
|
14
13
|
Optional,
|
|
14
|
+
ParamSpec,
|
|
15
15
|
Tuple,
|
|
16
16
|
TypedDict,
|
|
17
17
|
TypeVar,
|
|
18
|
-
Union,
|
|
19
18
|
)
|
|
20
19
|
|
|
21
|
-
if sys.version_info < (3, 10):
|
|
22
|
-
from typing_extensions import ParamSpec
|
|
23
|
-
else:
|
|
24
|
-
from typing import ParamSpec
|
|
25
|
-
|
|
26
20
|
from dbos._client import (
|
|
27
21
|
DBOSClient,
|
|
28
22
|
EnqueueOptions,
|
|
@@ -4,8 +4,6 @@ import sys
|
|
|
4
4
|
from pathlib import Path
|
|
5
5
|
from typing import Union
|
|
6
6
|
|
|
7
|
-
from fastapi_cli.discover import get_module_data_from_path
|
|
8
|
-
|
|
9
7
|
from dbos import DBOS
|
|
10
8
|
|
|
11
9
|
|
|
@@ -34,12 +32,6 @@ def debug_workflow(workflow_id: str, entrypoint: Union[str, PythonModule]) -> No
|
|
|
34
32
|
|
|
35
33
|
|
|
36
34
|
def parse_start_command(command: str) -> Union[str, PythonModule]:
|
|
37
|
-
match = re.match(r"fastapi\s+run\s+(\.?[\w/]+\.py)", command)
|
|
38
|
-
if match:
|
|
39
|
-
# Mirror the logic in fastapi's run command by converting the path argument to a module
|
|
40
|
-
mod_data = get_module_data_from_path(Path(match.group(1)))
|
|
41
|
-
sys.path.insert(0, str(mod_data.extra_sys_path))
|
|
42
|
-
return PythonModule(mod_data.module_import_str)
|
|
43
35
|
match = re.match(r"python3?\s+(\.?[\w/]+\.py)", command)
|
|
44
36
|
if match:
|
|
45
37
|
return match.group(1)
|