dbos 1.14.0a9__py3-none-any.whl → 1.15.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- dbos/_client.py +13 -14
- dbos/_context.py +12 -6
- dbos/_core.py +2 -7
- dbos/_dbos.py +5 -13
- dbos/_dbos_config.py +17 -29
- dbos/_debouncer.py +1 -7
- dbos/_debug.py +0 -8
- dbos/_docker_pg_helper.py +93 -51
- dbos/_fastapi.py +5 -1
- dbos/_logger.py +18 -21
- dbos/_migration.py +4 -41
- dbos/_serialization.py +19 -30
- dbos/_sys_db_postgres.py +2 -9
- dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos +34 -0
- dbos/_tracer.py +42 -31
- dbos/cli/_github_init.py +22 -16
- dbos/cli/_template_init.py +5 -16
- dbos/cli/cli.py +20 -28
- {dbos-1.14.0a9.dist-info → dbos-1.15.0a1.dist-info}/METADATA +8 -16
- dbos-1.15.0a1.dist-info/RECORD +59 -0
- dbos/_alembic_migrations/env.py +0 -62
- dbos/_alembic_migrations/script.py.mako +0 -26
- dbos/_alembic_migrations/versions/01ce9f07bd10_streaming.py +0 -42
- dbos/_alembic_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -34
- dbos/_alembic_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -45
- dbos/_alembic_migrations/versions/471b60d64126_dbos_migrations.py +0 -35
- dbos/_alembic_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -35
- dbos/_alembic_migrations/versions/5c361fc04708_added_system_tables.py +0 -193
- dbos/_alembic_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -71
- dbos/_alembic_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -44
- dbos/_alembic_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -35
- dbos/_alembic_migrations/versions/a3b18ad34abe_added_triggers.py +0 -72
- dbos/_alembic_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -43
- dbos/_alembic_migrations/versions/d76646551a6c_workflow_queue.py +0 -28
- dbos/_alembic_migrations/versions/d994145b47b6_consolidate_inputs.py +0 -30
- dbos/_alembic_migrations/versions/eab0cc1d9a14_job_queue.py +0 -56
- dbos/_alembic_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -46
- dbos/_templates/dbos-db-starter/alembic.ini +0 -116
- dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -85
- dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -26
- dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -35
- dbos-1.14.0a9.dist-info/RECORD +0 -79
- {dbos-1.14.0a9.dist-info → dbos-1.15.0a1.dist-info}/WHEEL +0 -0
- {dbos-1.14.0a9.dist-info → dbos-1.15.0a1.dist-info}/entry_points.txt +0 -0
- {dbos-1.14.0a9.dist-info → dbos-1.15.0a1.dist-info}/licenses/LICENSE +0 -0
dbos/_client.py
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
-
import sys
|
|
3
2
|
import time
|
|
4
3
|
import uuid
|
|
5
4
|
from typing import (
|
|
@@ -15,17 +14,11 @@ from typing import (
|
|
|
15
14
|
Union,
|
|
16
15
|
)
|
|
17
16
|
|
|
17
|
+
from dbos import _serialization
|
|
18
18
|
from dbos._app_db import ApplicationDatabase
|
|
19
19
|
from dbos._context import MaxPriority, MinPriority
|
|
20
20
|
from dbos._sys_db import SystemDatabase
|
|
21
21
|
|
|
22
|
-
if sys.version_info < (3, 11):
|
|
23
|
-
from typing_extensions import NotRequired
|
|
24
|
-
else:
|
|
25
|
-
from typing import NotRequired
|
|
26
|
-
|
|
27
|
-
from dbos import _serialization
|
|
28
|
-
|
|
29
22
|
if TYPE_CHECKING:
|
|
30
23
|
from dbos._dbos import WorkflowHandle, WorkflowHandleAsync
|
|
31
24
|
|
|
@@ -58,14 +51,20 @@ from dbos._workflow_commands import (
|
|
|
58
51
|
R = TypeVar("R", covariant=True) # A generic type for workflow return values
|
|
59
52
|
|
|
60
53
|
|
|
61
|
-
|
|
54
|
+
# Required EnqueueOptions fields
|
|
55
|
+
class _EnqueueOptionsRequired(TypedDict):
|
|
62
56
|
workflow_name: str
|
|
63
57
|
queue_name: str
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
# Optional EnqueueOptions fields
|
|
61
|
+
class EnqueueOptions(_EnqueueOptionsRequired, total=False):
|
|
62
|
+
workflow_id: str
|
|
63
|
+
app_version: str
|
|
64
|
+
workflow_timeout: float
|
|
65
|
+
deduplication_id: str
|
|
66
|
+
priority: int
|
|
67
|
+
max_recovery_attempts: int
|
|
69
68
|
|
|
70
69
|
|
|
71
70
|
def validate_enqueue_options(options: EnqueueOptions) -> None:
|
dbos/_context.py
CHANGED
|
@@ -8,9 +8,11 @@ from contextvars import ContextVar
|
|
|
8
8
|
from dataclasses import dataclass
|
|
9
9
|
from enum import Enum
|
|
10
10
|
from types import TracebackType
|
|
11
|
-
from typing import List, Literal, Optional, Type, TypedDict
|
|
11
|
+
from typing import TYPE_CHECKING, List, Literal, Optional, Type, TypedDict
|
|
12
|
+
|
|
13
|
+
if TYPE_CHECKING:
|
|
14
|
+
from opentelemetry.trace import Span
|
|
12
15
|
|
|
13
|
-
from opentelemetry.trace import Span, Status, StatusCode, use_span
|
|
14
16
|
from sqlalchemy.orm import Session
|
|
15
17
|
|
|
16
18
|
from dbos._utils import GlobalParams
|
|
@@ -78,8 +80,8 @@ class ContextSpan:
|
|
|
78
80
|
context_manager: The context manager that is used to manage the span's lifecycle.
|
|
79
81
|
"""
|
|
80
82
|
|
|
81
|
-
span: Span
|
|
82
|
-
context_manager: AbstractContextManager[Span]
|
|
83
|
+
span: "Span"
|
|
84
|
+
context_manager: "AbstractContextManager[Span]"
|
|
83
85
|
|
|
84
86
|
|
|
85
87
|
class DBOSContext:
|
|
@@ -217,19 +219,21 @@ class DBOSContext:
|
|
|
217
219
|
|
|
218
220
|
""" Return the current DBOS span if any. It must be a span created by DBOS."""
|
|
219
221
|
|
|
220
|
-
def get_current_dbos_span(self) -> Optional[Span]:
|
|
222
|
+
def get_current_dbos_span(self) -> "Optional[Span]":
|
|
221
223
|
if len(self.context_spans) > 0:
|
|
222
224
|
return self.context_spans[-1].span
|
|
223
225
|
return None
|
|
224
226
|
|
|
225
227
|
""" Return the current active span if any. It might not be a DBOS span."""
|
|
226
228
|
|
|
227
|
-
def get_current_active_span(self) -> Optional[Span]:
|
|
229
|
+
def get_current_active_span(self) -> "Optional[Span]":
|
|
228
230
|
return dbos_tracer.get_current_span()
|
|
229
231
|
|
|
230
232
|
def _start_span(self, attributes: TracedAttributes) -> None:
|
|
231
233
|
if dbos_tracer.disable_otlp:
|
|
232
234
|
return
|
|
235
|
+
from opentelemetry.trace import use_span
|
|
236
|
+
|
|
233
237
|
attributes["operationUUID"] = (
|
|
234
238
|
self.workflow_id if len(self.workflow_id) > 0 else None
|
|
235
239
|
)
|
|
@@ -257,6 +261,8 @@ class DBOSContext:
|
|
|
257
261
|
def _end_span(self, exc_value: Optional[BaseException]) -> None:
|
|
258
262
|
if dbos_tracer.disable_otlp:
|
|
259
263
|
return
|
|
264
|
+
from opentelemetry.trace import Status, StatusCode
|
|
265
|
+
|
|
260
266
|
context_span = self.context_spans.pop()
|
|
261
267
|
if exc_value is None:
|
|
262
268
|
context_span.span.set_status(Status(StatusCode.OK))
|
dbos/_core.py
CHANGED
|
@@ -14,6 +14,7 @@ from typing import (
|
|
|
14
14
|
Coroutine,
|
|
15
15
|
Generic,
|
|
16
16
|
Optional,
|
|
17
|
+
ParamSpec,
|
|
17
18
|
TypeVar,
|
|
18
19
|
Union,
|
|
19
20
|
cast,
|
|
@@ -22,14 +23,8 @@ from typing import (
|
|
|
22
23
|
from dbos._outcome import Immediate, NoResult, Outcome, Pending
|
|
23
24
|
from dbos._utils import GlobalParams, retriable_postgres_exception
|
|
24
25
|
|
|
25
|
-
from ._app_db import ApplicationDatabase, TransactionResultInternal
|
|
26
|
-
|
|
27
|
-
if sys.version_info < (3, 10):
|
|
28
|
-
from typing_extensions import ParamSpec
|
|
29
|
-
else:
|
|
30
|
-
from typing import ParamSpec
|
|
31
|
-
|
|
32
26
|
from . import _serialization
|
|
27
|
+
from ._app_db import ApplicationDatabase, TransactionResultInternal
|
|
33
28
|
from ._context import (
|
|
34
29
|
DBOSAssumeRole,
|
|
35
30
|
DBOSContext,
|
dbos/_dbos.py
CHANGED
|
@@ -28,9 +28,6 @@ from typing import (
|
|
|
28
28
|
Union,
|
|
29
29
|
)
|
|
30
30
|
|
|
31
|
-
from opentelemetry.trace import Span
|
|
32
|
-
from rich import print
|
|
33
|
-
|
|
34
31
|
from dbos._conductor.conductor import ConductorWebsocket
|
|
35
32
|
from dbos._debouncer import debouncer_workflow
|
|
36
33
|
from dbos._sys_db import SystemDatabase, WorkflowStatus
|
|
@@ -53,7 +50,6 @@ from ._core import (
|
|
|
53
50
|
set_event,
|
|
54
51
|
start_workflow,
|
|
55
52
|
start_workflow_async,
|
|
56
|
-
workflow_wrapper,
|
|
57
53
|
)
|
|
58
54
|
from ._queue import Queue, queue_thread
|
|
59
55
|
from ._recovery import recover_pending_workflows, startup_recovery_thread
|
|
@@ -62,8 +58,6 @@ from ._registrations import (
|
|
|
62
58
|
DBOSClassInfo,
|
|
63
59
|
_class_fqn,
|
|
64
60
|
get_or_create_class_info,
|
|
65
|
-
set_dbos_func_name,
|
|
66
|
-
set_temp_workflow_type,
|
|
67
61
|
)
|
|
68
62
|
from ._roles import default_required_roles, required_roles
|
|
69
63
|
from ._scheduler import ScheduledWorkflow, scheduled
|
|
@@ -80,13 +74,11 @@ if TYPE_CHECKING:
|
|
|
80
74
|
from fastapi import FastAPI
|
|
81
75
|
from ._kafka import _KafkaConsumerWorkflow
|
|
82
76
|
from flask import Flask
|
|
77
|
+
from opentelemetry.trace import Span
|
|
83
78
|
|
|
84
|
-
from
|
|
79
|
+
from typing import ParamSpec
|
|
85
80
|
|
|
86
|
-
|
|
87
|
-
from typing_extensions import ParamSpec
|
|
88
|
-
else:
|
|
89
|
-
from typing import ParamSpec
|
|
81
|
+
from sqlalchemy.orm import Session
|
|
90
82
|
|
|
91
83
|
from ._admin_server import AdminServer
|
|
92
84
|
from ._app_db import ApplicationDatabase
|
|
@@ -558,7 +550,7 @@ class DBOS:
|
|
|
558
550
|
f"https://console.dbos.dev/self-host?appname={app_name}"
|
|
559
551
|
)
|
|
560
552
|
print(
|
|
561
|
-
f"
|
|
553
|
+
f"To view and manage workflows, connect to DBOS Conductor at:{conductor_registration_url}"
|
|
562
554
|
)
|
|
563
555
|
|
|
564
556
|
# Flush handlers and add OTLP to all loggers if enabled
|
|
@@ -1297,7 +1289,7 @@ class DBOS:
|
|
|
1297
1289
|
return ctx.parent_workflow_id
|
|
1298
1290
|
|
|
1299
1291
|
@classproperty
|
|
1300
|
-
def span(cls) -> Span:
|
|
1292
|
+
def span(cls) -> "Span":
|
|
1301
1293
|
"""Return the tracing `Span` associated with the current context."""
|
|
1302
1294
|
ctx = assert_current_dbos_context()
|
|
1303
1295
|
span = ctx.get_current_active_span()
|
dbos/_dbos_config.py
CHANGED
|
@@ -5,8 +5,6 @@ from importlib import resources
|
|
|
5
5
|
from typing import Any, Dict, List, Optional, TypedDict, cast
|
|
6
6
|
|
|
7
7
|
import yaml
|
|
8
|
-
from jsonschema import ValidationError, validate
|
|
9
|
-
from rich import print
|
|
10
8
|
from sqlalchemy import make_url
|
|
11
9
|
|
|
12
10
|
from ._error import DBOSInitializationError
|
|
@@ -36,7 +34,7 @@ class DBOSConfig(TypedDict, total=False):
|
|
|
36
34
|
otlp_attributes (dict[str, str]): A set of custom attributes to apply OTLP-exported logs and traces
|
|
37
35
|
application_version (str): Application version
|
|
38
36
|
executor_id (str): Executor ID, used to identify the application instance in distributed environments
|
|
39
|
-
|
|
37
|
+
enable_otlp (bool): If True, enable built-in DBOS OTLP tracing and logging.
|
|
40
38
|
"""
|
|
41
39
|
|
|
42
40
|
name: str
|
|
@@ -54,7 +52,7 @@ class DBOSConfig(TypedDict, total=False):
|
|
|
54
52
|
otlp_attributes: Optional[dict[str, str]]
|
|
55
53
|
application_version: Optional[str]
|
|
56
54
|
executor_id: Optional[str]
|
|
57
|
-
|
|
55
|
+
enable_otlp: Optional[bool]
|
|
58
56
|
|
|
59
57
|
|
|
60
58
|
class RuntimeConfig(TypedDict, total=False):
|
|
@@ -97,7 +95,7 @@ class TelemetryConfig(TypedDict, total=False):
|
|
|
97
95
|
logs: Optional[LoggerConfig]
|
|
98
96
|
OTLPExporter: Optional[OTLPExporterConfig]
|
|
99
97
|
otlp_attributes: Optional[dict[str, str]]
|
|
100
|
-
disable_otlp:
|
|
98
|
+
disable_otlp: bool
|
|
101
99
|
|
|
102
100
|
|
|
103
101
|
class ConfigFile(TypedDict, total=False):
|
|
@@ -165,10 +163,12 @@ def translate_dbos_config_to_config_file(config: DBOSConfig) -> ConfigFile:
|
|
|
165
163
|
]
|
|
166
164
|
|
|
167
165
|
# Telemetry config
|
|
166
|
+
enable_otlp = config.get("enable_otlp", None)
|
|
167
|
+
disable_otlp = True if enable_otlp is None else not enable_otlp
|
|
168
168
|
telemetry: TelemetryConfig = {
|
|
169
169
|
"OTLPExporter": {"tracesEndpoint": [], "logsEndpoint": []},
|
|
170
170
|
"otlp_attributes": config.get("otlp_attributes", {}),
|
|
171
|
-
"disable_otlp":
|
|
171
|
+
"disable_otlp": disable_otlp,
|
|
172
172
|
}
|
|
173
173
|
# For mypy
|
|
174
174
|
assert telemetry["OTLPExporter"] is not None
|
|
@@ -265,17 +265,6 @@ def load_config(
|
|
|
265
265
|
)
|
|
266
266
|
data = cast(Dict[str, Any], data)
|
|
267
267
|
|
|
268
|
-
# Load the JSON schema relative to the package root
|
|
269
|
-
schema_file = resources.files("dbos").joinpath("dbos-config.schema.json")
|
|
270
|
-
with schema_file.open("r") as f:
|
|
271
|
-
schema = json.load(f)
|
|
272
|
-
|
|
273
|
-
# Validate the data against the schema
|
|
274
|
-
try:
|
|
275
|
-
validate(instance=data, schema=schema)
|
|
276
|
-
except ValidationError as e:
|
|
277
|
-
raise DBOSInitializationError(f"Validation error: {e}")
|
|
278
|
-
|
|
279
268
|
# Special case: convert logsEndpoint and tracesEndpoint from strings to lists of strings, if present
|
|
280
269
|
if "telemetry" in data and "OTLPExporter" in data["telemetry"]:
|
|
281
270
|
if "logsEndpoint" in data["telemetry"]["OTLPExporter"]:
|
|
@@ -441,17 +430,13 @@ def process_config(
|
|
|
441
430
|
printable_sys_db_url = make_url(data["system_database_url"]).render_as_string(
|
|
442
431
|
hide_password=True
|
|
443
432
|
)
|
|
444
|
-
print(
|
|
445
|
-
f"[bold blue]DBOS system database URL: {printable_sys_db_url}[/bold blue]"
|
|
446
|
-
)
|
|
433
|
+
print(f"DBOS system database URL: {printable_sys_db_url}")
|
|
447
434
|
if data["database_url"].startswith("sqlite"):
|
|
448
435
|
print(
|
|
449
|
-
f"
|
|
436
|
+
f"Using SQLite as a system database. The SQLite system database is for development and testing. PostgreSQL is recommended for production use."
|
|
450
437
|
)
|
|
451
438
|
else:
|
|
452
|
-
print(
|
|
453
|
-
f"[bold blue]Database engine parameters: {data['database']['db_engine_kwargs']}[/bold blue]"
|
|
454
|
-
)
|
|
439
|
+
print(f"Database engine parameters: {data['database']['db_engine_kwargs']}")
|
|
455
440
|
|
|
456
441
|
# Return data as ConfigFile type
|
|
457
442
|
return data
|
|
@@ -563,12 +548,15 @@ def overwrite_config(provided_config: ConfigFile) -> ConfigFile:
|
|
|
563
548
|
if "telemetry" not in provided_config or provided_config["telemetry"] is None:
|
|
564
549
|
provided_config["telemetry"] = {
|
|
565
550
|
"OTLPExporter": {"tracesEndpoint": [], "logsEndpoint": []},
|
|
551
|
+
"disable_otlp": False,
|
|
566
552
|
}
|
|
567
|
-
|
|
568
|
-
provided_config["telemetry"]["
|
|
569
|
-
|
|
570
|
-
"
|
|
571
|
-
|
|
553
|
+
else:
|
|
554
|
+
provided_config["telemetry"]["disable_otlp"] = False
|
|
555
|
+
if "OTLPExporter" not in provided_config["telemetry"]:
|
|
556
|
+
provided_config["telemetry"]["OTLPExporter"] = {
|
|
557
|
+
"tracesEndpoint": [],
|
|
558
|
+
"logsEndpoint": [],
|
|
559
|
+
}
|
|
572
560
|
|
|
573
561
|
# This is a super messy from a typing perspective.
|
|
574
562
|
# Some of ConfigFile keys are optional -- but in practice they'll always be present in hosted environments
|
dbos/_debouncer.py
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
import math
|
|
3
|
-
import sys
|
|
4
3
|
import time
|
|
5
4
|
import types
|
|
6
5
|
import uuid
|
|
@@ -12,17 +11,12 @@ from typing import (
|
|
|
12
11
|
Dict,
|
|
13
12
|
Generic,
|
|
14
13
|
Optional,
|
|
14
|
+
ParamSpec,
|
|
15
15
|
Tuple,
|
|
16
16
|
TypedDict,
|
|
17
17
|
TypeVar,
|
|
18
|
-
Union,
|
|
19
18
|
)
|
|
20
19
|
|
|
21
|
-
if sys.version_info < (3, 10):
|
|
22
|
-
from typing_extensions import ParamSpec
|
|
23
|
-
else:
|
|
24
|
-
from typing import ParamSpec
|
|
25
|
-
|
|
26
20
|
from dbos._client import (
|
|
27
21
|
DBOSClient,
|
|
28
22
|
EnqueueOptions,
|
dbos/_debug.py
CHANGED
|
@@ -4,8 +4,6 @@ import sys
|
|
|
4
4
|
from pathlib import Path
|
|
5
5
|
from typing import Union
|
|
6
6
|
|
|
7
|
-
from fastapi_cli.discover import get_module_data_from_path
|
|
8
|
-
|
|
9
7
|
from dbos import DBOS
|
|
10
8
|
|
|
11
9
|
|
|
@@ -34,12 +32,6 @@ def debug_workflow(workflow_id: str, entrypoint: Union[str, PythonModule]) -> No
|
|
|
34
32
|
|
|
35
33
|
|
|
36
34
|
def parse_start_command(command: str) -> Union[str, PythonModule]:
|
|
37
|
-
match = re.match(r"fastapi\s+run\s+(\.?[\w/]+\.py)", command)
|
|
38
|
-
if match:
|
|
39
|
-
# Mirror the logic in fastapi's run command by converting the path argument to a module
|
|
40
|
-
mod_data = get_module_data_from_path(Path(match.group(1)))
|
|
41
|
-
sys.path.insert(0, str(mod_data.extra_sys_path))
|
|
42
|
-
return PythonModule(mod_data.module_import_str)
|
|
43
35
|
match = re.match(r"python3?\s+(\.?[\w/]+\.py)", command)
|
|
44
36
|
if match:
|
|
45
37
|
return match.group(1)
|
dbos/_docker_pg_helper.py
CHANGED
|
@@ -1,11 +1,10 @@
|
|
|
1
|
+
import json
|
|
1
2
|
import logging
|
|
2
3
|
import os
|
|
3
4
|
import subprocess
|
|
4
5
|
import time
|
|
5
6
|
|
|
6
|
-
import docker
|
|
7
7
|
import psycopg
|
|
8
|
-
from docker.errors import APIError, NotFound
|
|
9
8
|
|
|
10
9
|
logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
|
|
11
10
|
from typing import Any, Dict, Optional, Tuple
|
|
@@ -86,48 +85,71 @@ def start_docker_postgres(pool_config: Dict[str, Any]) -> bool:
|
|
|
86
85
|
image_name = "pgvector/pgvector:pg16"
|
|
87
86
|
|
|
88
87
|
try:
|
|
89
|
-
client = docker.from_env()
|
|
90
|
-
|
|
91
88
|
# Check if the container already exists
|
|
92
89
|
try:
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
90
|
+
result = subprocess.run(
|
|
91
|
+
f"docker inspect {container_name}",
|
|
92
|
+
shell=True,
|
|
93
|
+
text=True,
|
|
94
|
+
capture_output=True,
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
if result.returncode == 0:
|
|
98
|
+
# Container exists, check its status
|
|
99
|
+
container_info = json.loads(result.stdout)
|
|
100
|
+
status = container_info[0]["State"]["Status"]
|
|
101
|
+
|
|
102
|
+
if status == "running":
|
|
103
|
+
logging.info(f"Container '{container_name}' is already running.")
|
|
104
|
+
return True
|
|
105
|
+
elif status == "exited":
|
|
106
|
+
subprocess.run(
|
|
107
|
+
f"docker start {container_name}", shell=True, check=True
|
|
108
|
+
)
|
|
109
|
+
logging.info(
|
|
110
|
+
f"Container '{container_name}' was stopped and has been restarted."
|
|
111
|
+
)
|
|
112
|
+
return True
|
|
113
|
+
except (
|
|
114
|
+
subprocess.CalledProcessError,
|
|
115
|
+
json.JSONDecodeError,
|
|
116
|
+
KeyError,
|
|
117
|
+
IndexError,
|
|
118
|
+
):
|
|
119
|
+
# Container doesn't exist or error parsing, proceed with creation
|
|
105
120
|
pass
|
|
106
121
|
|
|
107
|
-
#
|
|
108
|
-
|
|
109
|
-
|
|
122
|
+
# Check if the image exists locally
|
|
123
|
+
result = subprocess.run(
|
|
124
|
+
f"docker images -q {image_name}", shell=True, text=True, capture_output=True
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
if not result.stdout.strip():
|
|
110
128
|
logging.info(f"Pulling Docker image {image_name}...")
|
|
111
|
-
|
|
129
|
+
subprocess.run(f"docker pull {image_name}", shell=True, check=True)
|
|
112
130
|
|
|
113
131
|
# Create and start the container
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
},
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
132
|
+
cmd = [
|
|
133
|
+
"docker run",
|
|
134
|
+
"-d",
|
|
135
|
+
f"--name {container_name}",
|
|
136
|
+
f"-e POSTGRES_PASSWORD={pool_config['password']}",
|
|
137
|
+
f"-e PGDATA={pg_data}",
|
|
138
|
+
f"-p {pool_config['port']}:5432",
|
|
139
|
+
f"-v {pg_data}:{pg_data}",
|
|
140
|
+
"--rm",
|
|
141
|
+
image_name,
|
|
142
|
+
]
|
|
143
|
+
|
|
144
|
+
result = subprocess.run(
|
|
145
|
+
" ".join(cmd), shell=True, text=True, capture_output=True, check=True
|
|
125
146
|
)
|
|
126
147
|
|
|
127
|
-
|
|
148
|
+
container_id = result.stdout.strip()
|
|
149
|
+
logging.info(f"Created container: {container_id}")
|
|
128
150
|
|
|
129
|
-
except
|
|
130
|
-
raise Exception(f"Docker
|
|
151
|
+
except subprocess.CalledProcessError as e:
|
|
152
|
+
raise Exception(f"Docker command error: {e.stderr if e.stderr else str(e)}")
|
|
131
153
|
|
|
132
154
|
# Wait for PostgreSQL to be ready
|
|
133
155
|
attempts = 30
|
|
@@ -148,15 +170,16 @@ def start_docker_postgres(pool_config: Dict[str, Any]) -> bool:
|
|
|
148
170
|
|
|
149
171
|
def check_docker_installed() -> bool:
|
|
150
172
|
"""
|
|
151
|
-
Check if Docker is installed and running using the
|
|
173
|
+
Check if Docker is installed and running using the Docker CLI.
|
|
152
174
|
|
|
153
175
|
Returns:
|
|
154
176
|
bool: True if Docker is installed and running, False otherwise.
|
|
155
177
|
"""
|
|
156
178
|
try:
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
179
|
+
result = subprocess.run(
|
|
180
|
+
"docker version --format json", shell=True, capture_output=True, text=True
|
|
181
|
+
)
|
|
182
|
+
return result.returncode == 0
|
|
160
183
|
except Exception:
|
|
161
184
|
return False
|
|
162
185
|
|
|
@@ -176,22 +199,41 @@ def stop_docker_pg() -> None:
|
|
|
176
199
|
try:
|
|
177
200
|
logger.info(f"Stopping Docker Postgres container {container_name}...")
|
|
178
201
|
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
logger.info(
|
|
187
|
-
f"Successfully stopped Docker Postgres container {container_name}."
|
|
188
|
-
)
|
|
189
|
-
else:
|
|
190
|
-
logger.info(f"Container {container_name} exists but is not running.")
|
|
202
|
+
# Check if container exists
|
|
203
|
+
result = subprocess.run(
|
|
204
|
+
f"docker inspect {container_name}",
|
|
205
|
+
shell=True,
|
|
206
|
+
text=True,
|
|
207
|
+
capture_output=True,
|
|
208
|
+
)
|
|
191
209
|
|
|
192
|
-
|
|
210
|
+
if result.returncode == 0:
|
|
211
|
+
# Container exists, check its status
|
|
212
|
+
try:
|
|
213
|
+
container_info = json.loads(result.stdout)
|
|
214
|
+
status = container_info[0]["State"]["Status"]
|
|
215
|
+
|
|
216
|
+
if status == "running":
|
|
217
|
+
subprocess.run(
|
|
218
|
+
f"docker stop {container_name}", shell=True, check=True
|
|
219
|
+
)
|
|
220
|
+
logger.info(
|
|
221
|
+
f"Successfully stopped Docker Postgres container {container_name}."
|
|
222
|
+
)
|
|
223
|
+
else:
|
|
224
|
+
logger.info(
|
|
225
|
+
f"Container {container_name} exists but is not running."
|
|
226
|
+
)
|
|
227
|
+
except (json.JSONDecodeError, KeyError, IndexError) as e:
|
|
228
|
+
logger.error(f"Error parsing container info: {e}")
|
|
229
|
+
raise
|
|
230
|
+
else:
|
|
193
231
|
logger.info(f"Container {container_name} does not exist.")
|
|
194
232
|
|
|
233
|
+
except subprocess.CalledProcessError as error:
|
|
234
|
+
error_message = error.stderr if error.stderr else str(error)
|
|
235
|
+
logger.error(f"Failed to stop Docker Postgres container: {error_message}")
|
|
236
|
+
raise
|
|
195
237
|
except Exception as error:
|
|
196
238
|
error_message = str(error)
|
|
197
239
|
logger.error(f"Failed to stop Docker Postgres container: {error_message}")
|
dbos/_fastapi.py
CHANGED
|
@@ -83,6 +83,10 @@ def setup_fastapi_middleware(app: FastAPI, dbos: DBOS) -> None:
|
|
|
83
83
|
response = await call_next(request)
|
|
84
84
|
else:
|
|
85
85
|
response = await call_next(request)
|
|
86
|
-
if
|
|
86
|
+
if (
|
|
87
|
+
dbos._config["telemetry"]
|
|
88
|
+
and not dbos._config["telemetry"]["disable_otlp"]
|
|
89
|
+
and hasattr(response, "status_code")
|
|
90
|
+
):
|
|
87
91
|
DBOS.span.set_attribute("responseCode", response.status_code)
|
|
88
92
|
return response
|
dbos/_logger.py
CHANGED
|
@@ -2,14 +2,6 @@ import logging
|
|
|
2
2
|
import os
|
|
3
3
|
from typing import TYPE_CHECKING, Any
|
|
4
4
|
|
|
5
|
-
from opentelemetry._logs import set_logger_provider
|
|
6
|
-
from opentelemetry.exporter.otlp.proto.http._log_exporter import OTLPLogExporter
|
|
7
|
-
from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
|
|
8
|
-
from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
|
|
9
|
-
from opentelemetry.sdk.resources import Resource
|
|
10
|
-
from opentelemetry.semconv.resource import ResourceAttributes
|
|
11
|
-
from opentelemetry.trace.span import format_trace_id
|
|
12
|
-
|
|
13
5
|
from dbos._utils import GlobalParams
|
|
14
6
|
|
|
15
7
|
if TYPE_CHECKING:
|
|
@@ -24,6 +16,7 @@ class DBOSLogTransformer(logging.Filter):
|
|
|
24
16
|
super().__init__()
|
|
25
17
|
self.app_id = os.environ.get("DBOS__APPID", "")
|
|
26
18
|
self.otlp_attributes: dict[str, str] = config.get("telemetry", {}).get("otlp_attributes", {}) # type: ignore
|
|
19
|
+
self.disable_otlp = config.get("telemetry", {}).get("disable_otlp", True) # type: ignore
|
|
27
20
|
|
|
28
21
|
def filter(self, record: Any) -> bool:
|
|
29
22
|
record.applicationID = self.app_id
|
|
@@ -39,19 +32,15 @@ class DBOSLogTransformer(logging.Filter):
|
|
|
39
32
|
if ctx:
|
|
40
33
|
if ctx.is_within_workflow():
|
|
41
34
|
record.operationUUID = ctx.workflow_id
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
trace_id = format_trace_id(span.get_span_context().trace_id)
|
|
45
|
-
record.traceId = trace_id
|
|
46
|
-
|
|
47
|
-
return True
|
|
35
|
+
if not self.disable_otlp:
|
|
36
|
+
from opentelemetry.trace.span import format_trace_id
|
|
48
37
|
|
|
38
|
+
span = ctx.get_current_active_span()
|
|
39
|
+
if span:
|
|
40
|
+
trace_id = format_trace_id(span.get_span_context().trace_id)
|
|
41
|
+
record.traceId = trace_id
|
|
49
42
|
|
|
50
|
-
|
|
51
|
-
# Reduce the force flush timeout
|
|
52
|
-
class PatchedOTLPLoggerProvider(LoggerProvider):
|
|
53
|
-
def force_flush(self, timeout_millis: int = 5000) -> bool:
|
|
54
|
-
return super().force_flush(timeout_millis)
|
|
43
|
+
return True
|
|
55
44
|
|
|
56
45
|
|
|
57
46
|
def init_logger() -> None:
|
|
@@ -80,10 +69,18 @@ def config_logger(config: "ConfigFile") -> None:
|
|
|
80
69
|
disable_otlp = config.get("telemetry", {}).get("disable_otlp", False) # type: ignore
|
|
81
70
|
|
|
82
71
|
if not disable_otlp and otlp_logs_endpoints:
|
|
83
|
-
|
|
72
|
+
|
|
73
|
+
from opentelemetry._logs import set_logger_provider
|
|
74
|
+
from opentelemetry.exporter.otlp.proto.http._log_exporter import OTLPLogExporter
|
|
75
|
+
from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
|
|
76
|
+
from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
|
|
77
|
+
from opentelemetry.sdk.resources import Resource
|
|
78
|
+
from opentelemetry.semconv.attributes.service_attributes import SERVICE_NAME
|
|
79
|
+
|
|
80
|
+
log_provider = LoggerProvider(
|
|
84
81
|
Resource.create(
|
|
85
82
|
attributes={
|
|
86
|
-
|
|
83
|
+
SERVICE_NAME: config["name"],
|
|
87
84
|
}
|
|
88
85
|
)
|
|
89
86
|
)
|