dbos 1.14.0a9__tar.gz → 1.15.0a1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (121) hide show
  1. {dbos-1.14.0a9 → dbos-1.15.0a1}/PKG-INFO +8 -16
  2. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_client.py +13 -14
  3. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_context.py +12 -6
  4. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_core.py +2 -7
  5. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_dbos.py +5 -13
  6. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_dbos_config.py +17 -29
  7. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_debouncer.py +1 -7
  8. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_debug.py +0 -8
  9. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_docker_pg_helper.py +93 -51
  10. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_fastapi.py +5 -1
  11. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_logger.py +18 -21
  12. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_migration.py +4 -41
  13. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_serialization.py +19 -30
  14. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_sys_db_postgres.py +2 -9
  15. dbos-1.15.0a1/dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos +34 -0
  16. dbos-1.15.0a1/dbos/_tracer.py +99 -0
  17. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/cli/_github_init.py +22 -16
  18. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/cli/_template_init.py +5 -16
  19. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/cli/cli.py +20 -28
  20. {dbos-1.14.0a9 → dbos-1.15.0a1}/pyproject.toml +13 -16
  21. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/conftest.py +1 -0
  22. dbos-1.15.0a1/tests/script_without_fastapi.py +25 -0
  23. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_async.py +0 -85
  24. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_config.py +2 -19
  25. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_dbos.py +8 -32
  26. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_debug.py +0 -7
  27. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_docker_secrets.py +0 -20
  28. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_failures.py +5 -4
  29. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_fastapi_roles.py +1 -125
  30. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_package.py +13 -1
  31. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_schema_migration.py +1 -55
  32. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_spans.py +1 -1
  33. dbos-1.14.0a9/dbos/_alembic_migrations/env.py +0 -62
  34. dbos-1.14.0a9/dbos/_alembic_migrations/script.py.mako +0 -26
  35. dbos-1.14.0a9/dbos/_alembic_migrations/versions/01ce9f07bd10_streaming.py +0 -42
  36. dbos-1.14.0a9/dbos/_alembic_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -34
  37. dbos-1.14.0a9/dbos/_alembic_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -45
  38. dbos-1.14.0a9/dbos/_alembic_migrations/versions/471b60d64126_dbos_migrations.py +0 -35
  39. dbos-1.14.0a9/dbos/_alembic_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -35
  40. dbos-1.14.0a9/dbos/_alembic_migrations/versions/5c361fc04708_added_system_tables.py +0 -193
  41. dbos-1.14.0a9/dbos/_alembic_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -71
  42. dbos-1.14.0a9/dbos/_alembic_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -44
  43. dbos-1.14.0a9/dbos/_alembic_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -35
  44. dbos-1.14.0a9/dbos/_alembic_migrations/versions/a3b18ad34abe_added_triggers.py +0 -72
  45. dbos-1.14.0a9/dbos/_alembic_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -43
  46. dbos-1.14.0a9/dbos/_alembic_migrations/versions/d76646551a6c_workflow_queue.py +0 -28
  47. dbos-1.14.0a9/dbos/_alembic_migrations/versions/d994145b47b6_consolidate_inputs.py +0 -30
  48. dbos-1.14.0a9/dbos/_alembic_migrations/versions/eab0cc1d9a14_job_queue.py +0 -56
  49. dbos-1.14.0a9/dbos/_alembic_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -46
  50. dbos-1.14.0a9/dbos/_templates/dbos-db-starter/alembic.ini +0 -116
  51. dbos-1.14.0a9/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -85
  52. dbos-1.14.0a9/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -26
  53. dbos-1.14.0a9/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -35
  54. dbos-1.14.0a9/dbos/_tracer.py +0 -88
  55. {dbos-1.14.0a9 → dbos-1.15.0a1}/LICENSE +0 -0
  56. {dbos-1.14.0a9 → dbos-1.15.0a1}/README.md +0 -0
  57. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/__init__.py +0 -0
  58. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/__main__.py +0 -0
  59. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_admin_server.py +0 -0
  60. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_app_db.py +0 -0
  61. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_classproperty.py +0 -0
  62. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_conductor/conductor.py +0 -0
  63. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_conductor/protocol.py +0 -0
  64. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_croniter.py +0 -0
  65. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_error.py +0 -0
  66. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_event_loop.py +0 -0
  67. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_flask.py +0 -0
  68. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_kafka.py +0 -0
  69. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_kafka_message.py +0 -0
  70. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_outcome.py +0 -0
  71. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_queue.py +0 -0
  72. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_recovery.py +0 -0
  73. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_registrations.py +0 -0
  74. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_roles.py +0 -0
  75. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_scheduler.py +0 -0
  76. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_schemas/__init__.py +0 -0
  77. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_schemas/application_database.py +0 -0
  78. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_schemas/system_database.py +0 -0
  79. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_sys_db.py +0 -0
  80. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_sys_db_sqlite.py +0 -0
  81. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_templates/dbos-db-starter/README.md +0 -0
  82. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
  83. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
  84. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
  85. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
  86. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
  87. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_utils.py +0 -0
  88. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/_workflow_commands.py +0 -0
  89. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/cli/migration.py +0 -0
  90. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/dbos-config.schema.json +0 -0
  91. {dbos-1.14.0a9 → dbos-1.15.0a1}/dbos/py.typed +0 -0
  92. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/__init__.py +0 -0
  93. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/atexit_no_ctor.py +0 -0
  94. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/atexit_no_launch.py +0 -0
  95. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/classdefs.py +0 -0
  96. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/client_collateral.py +0 -0
  97. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/client_worker.py +0 -0
  98. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/dupname_classdefs1.py +0 -0
  99. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/dupname_classdefsa.py +0 -0
  100. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/more_classdefs.py +0 -0
  101. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/queuedworkflow.py +0 -0
  102. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_admin_server.py +0 -0
  103. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_async_workflow_management.py +0 -0
  104. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_classdecorators.py +0 -0
  105. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_cli.py +0 -0
  106. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_client.py +0 -0
  107. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_concurrency.py +0 -0
  108. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_croniter.py +0 -0
  109. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_debouncer.py +0 -0
  110. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_fastapi.py +0 -0
  111. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_flask.py +0 -0
  112. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_kafka.py +0 -0
  113. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_outcome.py +0 -0
  114. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_queue.py +0 -0
  115. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_scheduler.py +0 -0
  116. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_singleton.py +0 -0
  117. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_sqlalchemy.py +0 -0
  118. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_streaming.py +0 -0
  119. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_workflow_introspection.py +0 -0
  120. {dbos-1.14.0a9 → dbos-1.15.0a1}/tests/test_workflow_management.py +0 -0
  121. {dbos-1.14.0a9 → dbos-1.15.0a1}/version/__init__.py +0 -0
@@ -1,28 +1,20 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 1.14.0a9
3
+ Version: 1.15.0a1
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
7
- Requires-Python: >=3.9
7
+ Requires-Python: >=3.10
8
8
  Requires-Dist: pyyaml>=6.0.2
9
- Requires-Dist: jsonschema>=4.23.0
10
- Requires-Dist: alembic>=1.13.3
11
- Requires-Dist: typing-extensions>=4.12.2; python_version < "3.10"
12
- Requires-Dist: typer>=0.12.5
13
- Requires-Dist: jsonpickle>=3.3.0
14
- Requires-Dist: opentelemetry-api>=1.27.0
15
- Requires-Dist: opentelemetry-sdk>=1.27.0
16
- Requires-Dist: opentelemetry-exporter-otlp-proto-http>=1.27.0
17
9
  Requires-Dist: python-dateutil>=2.9.0.post0
18
- Requires-Dist: fastapi[standard]>=0.115.2
19
- Requires-Dist: tomlkit>=0.13.2
20
10
  Requires-Dist: psycopg[binary]>=3.1
21
- Requires-Dist: docker>=7.1.0
22
- Requires-Dist: cryptography>=43.0.3
23
- Requires-Dist: rich>=13.9.4
24
- Requires-Dist: pyjwt>=2.10.1
25
11
  Requires-Dist: websockets>=14.0
12
+ Requires-Dist: typer-slim>=0.17.4
13
+ Requires-Dist: sqlalchemy>=2.0.43
14
+ Provides-Extra: otel
15
+ Requires-Dist: opentelemetry-api>=1.37.0; extra == "otel"
16
+ Requires-Dist: opentelemetry-sdk>=1.37.0; extra == "otel"
17
+ Requires-Dist: opentelemetry-exporter-otlp-proto-http>=1.37.0; extra == "otel"
26
18
  Description-Content-Type: text/markdown
27
19
 
28
20
 
@@ -1,5 +1,4 @@
1
1
  import asyncio
2
- import sys
3
2
  import time
4
3
  import uuid
5
4
  from typing import (
@@ -15,17 +14,11 @@ from typing import (
15
14
  Union,
16
15
  )
17
16
 
17
+ from dbos import _serialization
18
18
  from dbos._app_db import ApplicationDatabase
19
19
  from dbos._context import MaxPriority, MinPriority
20
20
  from dbos._sys_db import SystemDatabase
21
21
 
22
- if sys.version_info < (3, 11):
23
- from typing_extensions import NotRequired
24
- else:
25
- from typing import NotRequired
26
-
27
- from dbos import _serialization
28
-
29
22
  if TYPE_CHECKING:
30
23
  from dbos._dbos import WorkflowHandle, WorkflowHandleAsync
31
24
 
@@ -58,14 +51,20 @@ from dbos._workflow_commands import (
58
51
  R = TypeVar("R", covariant=True) # A generic type for workflow return values
59
52
 
60
53
 
61
- class EnqueueOptions(TypedDict):
54
+ # Required EnqueueOptions fields
55
+ class _EnqueueOptionsRequired(TypedDict):
62
56
  workflow_name: str
63
57
  queue_name: str
64
- workflow_id: NotRequired[str]
65
- app_version: NotRequired[str]
66
- workflow_timeout: NotRequired[float]
67
- deduplication_id: NotRequired[str]
68
- priority: NotRequired[int]
58
+
59
+
60
+ # Optional EnqueueOptions fields
61
+ class EnqueueOptions(_EnqueueOptionsRequired, total=False):
62
+ workflow_id: str
63
+ app_version: str
64
+ workflow_timeout: float
65
+ deduplication_id: str
66
+ priority: int
67
+ max_recovery_attempts: int
69
68
 
70
69
 
71
70
  def validate_enqueue_options(options: EnqueueOptions) -> None:
@@ -8,9 +8,11 @@ from contextvars import ContextVar
8
8
  from dataclasses import dataclass
9
9
  from enum import Enum
10
10
  from types import TracebackType
11
- from typing import List, Literal, Optional, Type, TypedDict
11
+ from typing import TYPE_CHECKING, List, Literal, Optional, Type, TypedDict
12
+
13
+ if TYPE_CHECKING:
14
+ from opentelemetry.trace import Span
12
15
 
13
- from opentelemetry.trace import Span, Status, StatusCode, use_span
14
16
  from sqlalchemy.orm import Session
15
17
 
16
18
  from dbos._utils import GlobalParams
@@ -78,8 +80,8 @@ class ContextSpan:
78
80
  context_manager: The context manager that is used to manage the span's lifecycle.
79
81
  """
80
82
 
81
- span: Span
82
- context_manager: AbstractContextManager[Span]
83
+ span: "Span"
84
+ context_manager: "AbstractContextManager[Span]"
83
85
 
84
86
 
85
87
  class DBOSContext:
@@ -217,19 +219,21 @@ class DBOSContext:
217
219
 
218
220
  """ Return the current DBOS span if any. It must be a span created by DBOS."""
219
221
 
220
- def get_current_dbos_span(self) -> Optional[Span]:
222
+ def get_current_dbos_span(self) -> "Optional[Span]":
221
223
  if len(self.context_spans) > 0:
222
224
  return self.context_spans[-1].span
223
225
  return None
224
226
 
225
227
  """ Return the current active span if any. It might not be a DBOS span."""
226
228
 
227
- def get_current_active_span(self) -> Optional[Span]:
229
+ def get_current_active_span(self) -> "Optional[Span]":
228
230
  return dbos_tracer.get_current_span()
229
231
 
230
232
  def _start_span(self, attributes: TracedAttributes) -> None:
231
233
  if dbos_tracer.disable_otlp:
232
234
  return
235
+ from opentelemetry.trace import use_span
236
+
233
237
  attributes["operationUUID"] = (
234
238
  self.workflow_id if len(self.workflow_id) > 0 else None
235
239
  )
@@ -257,6 +261,8 @@ class DBOSContext:
257
261
  def _end_span(self, exc_value: Optional[BaseException]) -> None:
258
262
  if dbos_tracer.disable_otlp:
259
263
  return
264
+ from opentelemetry.trace import Status, StatusCode
265
+
260
266
  context_span = self.context_spans.pop()
261
267
  if exc_value is None:
262
268
  context_span.span.set_status(Status(StatusCode.OK))
@@ -14,6 +14,7 @@ from typing import (
14
14
  Coroutine,
15
15
  Generic,
16
16
  Optional,
17
+ ParamSpec,
17
18
  TypeVar,
18
19
  Union,
19
20
  cast,
@@ -22,14 +23,8 @@ from typing import (
22
23
  from dbos._outcome import Immediate, NoResult, Outcome, Pending
23
24
  from dbos._utils import GlobalParams, retriable_postgres_exception
24
25
 
25
- from ._app_db import ApplicationDatabase, TransactionResultInternal
26
-
27
- if sys.version_info < (3, 10):
28
- from typing_extensions import ParamSpec
29
- else:
30
- from typing import ParamSpec
31
-
32
26
  from . import _serialization
27
+ from ._app_db import ApplicationDatabase, TransactionResultInternal
33
28
  from ._context import (
34
29
  DBOSAssumeRole,
35
30
  DBOSContext,
@@ -28,9 +28,6 @@ from typing import (
28
28
  Union,
29
29
  )
30
30
 
31
- from opentelemetry.trace import Span
32
- from rich import print
33
-
34
31
  from dbos._conductor.conductor import ConductorWebsocket
35
32
  from dbos._debouncer import debouncer_workflow
36
33
  from dbos._sys_db import SystemDatabase, WorkflowStatus
@@ -53,7 +50,6 @@ from ._core import (
53
50
  set_event,
54
51
  start_workflow,
55
52
  start_workflow_async,
56
- workflow_wrapper,
57
53
  )
58
54
  from ._queue import Queue, queue_thread
59
55
  from ._recovery import recover_pending_workflows, startup_recovery_thread
@@ -62,8 +58,6 @@ from ._registrations import (
62
58
  DBOSClassInfo,
63
59
  _class_fqn,
64
60
  get_or_create_class_info,
65
- set_dbos_func_name,
66
- set_temp_workflow_type,
67
61
  )
68
62
  from ._roles import default_required_roles, required_roles
69
63
  from ._scheduler import ScheduledWorkflow, scheduled
@@ -80,13 +74,11 @@ if TYPE_CHECKING:
80
74
  from fastapi import FastAPI
81
75
  from ._kafka import _KafkaConsumerWorkflow
82
76
  from flask import Flask
77
+ from opentelemetry.trace import Span
83
78
 
84
- from sqlalchemy.orm import Session
79
+ from typing import ParamSpec
85
80
 
86
- if sys.version_info < (3, 10):
87
- from typing_extensions import ParamSpec
88
- else:
89
- from typing import ParamSpec
81
+ from sqlalchemy.orm import Session
90
82
 
91
83
  from ._admin_server import AdminServer
92
84
  from ._app_db import ApplicationDatabase
@@ -558,7 +550,7 @@ class DBOS:
558
550
  f"https://console.dbos.dev/self-host?appname={app_name}"
559
551
  )
560
552
  print(
561
- f"[bold]To view and manage workflows, connect to DBOS Conductor at:[/bold] [bold blue]{conductor_registration_url}[/bold blue]"
553
+ f"To view and manage workflows, connect to DBOS Conductor at:{conductor_registration_url}"
562
554
  )
563
555
 
564
556
  # Flush handlers and add OTLP to all loggers if enabled
@@ -1297,7 +1289,7 @@ class DBOS:
1297
1289
  return ctx.parent_workflow_id
1298
1290
 
1299
1291
  @classproperty
1300
- def span(cls) -> Span:
1292
+ def span(cls) -> "Span":
1301
1293
  """Return the tracing `Span` associated with the current context."""
1302
1294
  ctx = assert_current_dbos_context()
1303
1295
  span = ctx.get_current_active_span()
@@ -5,8 +5,6 @@ from importlib import resources
5
5
  from typing import Any, Dict, List, Optional, TypedDict, cast
6
6
 
7
7
  import yaml
8
- from jsonschema import ValidationError, validate
9
- from rich import print
10
8
  from sqlalchemy import make_url
11
9
 
12
10
  from ._error import DBOSInitializationError
@@ -36,7 +34,7 @@ class DBOSConfig(TypedDict, total=False):
36
34
  otlp_attributes (dict[str, str]): A set of custom attributes to apply OTLP-exported logs and traces
37
35
  application_version (str): Application version
38
36
  executor_id (str): Executor ID, used to identify the application instance in distributed environments
39
- disable_otlp (bool): If True, disables OTLP tracing and logging. Defaults to False.
37
+ enable_otlp (bool): If True, enable built-in DBOS OTLP tracing and logging.
40
38
  """
41
39
 
42
40
  name: str
@@ -54,7 +52,7 @@ class DBOSConfig(TypedDict, total=False):
54
52
  otlp_attributes: Optional[dict[str, str]]
55
53
  application_version: Optional[str]
56
54
  executor_id: Optional[str]
57
- disable_otlp: Optional[bool]
55
+ enable_otlp: Optional[bool]
58
56
 
59
57
 
60
58
  class RuntimeConfig(TypedDict, total=False):
@@ -97,7 +95,7 @@ class TelemetryConfig(TypedDict, total=False):
97
95
  logs: Optional[LoggerConfig]
98
96
  OTLPExporter: Optional[OTLPExporterConfig]
99
97
  otlp_attributes: Optional[dict[str, str]]
100
- disable_otlp: Optional[bool]
98
+ disable_otlp: bool
101
99
 
102
100
 
103
101
  class ConfigFile(TypedDict, total=False):
@@ -165,10 +163,12 @@ def translate_dbos_config_to_config_file(config: DBOSConfig) -> ConfigFile:
165
163
  ]
166
164
 
167
165
  # Telemetry config
166
+ enable_otlp = config.get("enable_otlp", None)
167
+ disable_otlp = True if enable_otlp is None else not enable_otlp
168
168
  telemetry: TelemetryConfig = {
169
169
  "OTLPExporter": {"tracesEndpoint": [], "logsEndpoint": []},
170
170
  "otlp_attributes": config.get("otlp_attributes", {}),
171
- "disable_otlp": config.get("disable_otlp", False),
171
+ "disable_otlp": disable_otlp,
172
172
  }
173
173
  # For mypy
174
174
  assert telemetry["OTLPExporter"] is not None
@@ -265,17 +265,6 @@ def load_config(
265
265
  )
266
266
  data = cast(Dict[str, Any], data)
267
267
 
268
- # Load the JSON schema relative to the package root
269
- schema_file = resources.files("dbos").joinpath("dbos-config.schema.json")
270
- with schema_file.open("r") as f:
271
- schema = json.load(f)
272
-
273
- # Validate the data against the schema
274
- try:
275
- validate(instance=data, schema=schema)
276
- except ValidationError as e:
277
- raise DBOSInitializationError(f"Validation error: {e}")
278
-
279
268
  # Special case: convert logsEndpoint and tracesEndpoint from strings to lists of strings, if present
280
269
  if "telemetry" in data and "OTLPExporter" in data["telemetry"]:
281
270
  if "logsEndpoint" in data["telemetry"]["OTLPExporter"]:
@@ -441,17 +430,13 @@ def process_config(
441
430
  printable_sys_db_url = make_url(data["system_database_url"]).render_as_string(
442
431
  hide_password=True
443
432
  )
444
- print(
445
- f"[bold blue]DBOS system database URL: {printable_sys_db_url}[/bold blue]"
446
- )
433
+ print(f"DBOS system database URL: {printable_sys_db_url}")
447
434
  if data["database_url"].startswith("sqlite"):
448
435
  print(
449
- f"[bold blue]Using SQLite as a system database. The SQLite system database is for development and testing. PostgreSQL is recommended for production use.[/bold blue]"
436
+ f"Using SQLite as a system database. The SQLite system database is for development and testing. PostgreSQL is recommended for production use."
450
437
  )
451
438
  else:
452
- print(
453
- f"[bold blue]Database engine parameters: {data['database']['db_engine_kwargs']}[/bold blue]"
454
- )
439
+ print(f"Database engine parameters: {data['database']['db_engine_kwargs']}")
455
440
 
456
441
  # Return data as ConfigFile type
457
442
  return data
@@ -563,12 +548,15 @@ def overwrite_config(provided_config: ConfigFile) -> ConfigFile:
563
548
  if "telemetry" not in provided_config or provided_config["telemetry"] is None:
564
549
  provided_config["telemetry"] = {
565
550
  "OTLPExporter": {"tracesEndpoint": [], "logsEndpoint": []},
551
+ "disable_otlp": False,
566
552
  }
567
- elif "OTLPExporter" not in provided_config["telemetry"]:
568
- provided_config["telemetry"]["OTLPExporter"] = {
569
- "tracesEndpoint": [],
570
- "logsEndpoint": [],
571
- }
553
+ else:
554
+ provided_config["telemetry"]["disable_otlp"] = False
555
+ if "OTLPExporter" not in provided_config["telemetry"]:
556
+ provided_config["telemetry"]["OTLPExporter"] = {
557
+ "tracesEndpoint": [],
558
+ "logsEndpoint": [],
559
+ }
572
560
 
573
561
  # This is a super messy from a typing perspective.
574
562
  # Some of ConfigFile keys are optional -- but in practice they'll always be present in hosted environments
@@ -1,6 +1,5 @@
1
1
  import asyncio
2
2
  import math
3
- import sys
4
3
  import time
5
4
  import types
6
5
  import uuid
@@ -12,17 +11,12 @@ from typing import (
12
11
  Dict,
13
12
  Generic,
14
13
  Optional,
14
+ ParamSpec,
15
15
  Tuple,
16
16
  TypedDict,
17
17
  TypeVar,
18
- Union,
19
18
  )
20
19
 
21
- if sys.version_info < (3, 10):
22
- from typing_extensions import ParamSpec
23
- else:
24
- from typing import ParamSpec
25
-
26
20
  from dbos._client import (
27
21
  DBOSClient,
28
22
  EnqueueOptions,
@@ -4,8 +4,6 @@ import sys
4
4
  from pathlib import Path
5
5
  from typing import Union
6
6
 
7
- from fastapi_cli.discover import get_module_data_from_path
8
-
9
7
  from dbos import DBOS
10
8
 
11
9
 
@@ -34,12 +32,6 @@ def debug_workflow(workflow_id: str, entrypoint: Union[str, PythonModule]) -> No
34
32
 
35
33
 
36
34
  def parse_start_command(command: str) -> Union[str, PythonModule]:
37
- match = re.match(r"fastapi\s+run\s+(\.?[\w/]+\.py)", command)
38
- if match:
39
- # Mirror the logic in fastapi's run command by converting the path argument to a module
40
- mod_data = get_module_data_from_path(Path(match.group(1)))
41
- sys.path.insert(0, str(mod_data.extra_sys_path))
42
- return PythonModule(mod_data.module_import_str)
43
35
  match = re.match(r"python3?\s+(\.?[\w/]+\.py)", command)
44
36
  if match:
45
37
  return match.group(1)
@@ -1,11 +1,10 @@
1
+ import json
1
2
  import logging
2
3
  import os
3
4
  import subprocess
4
5
  import time
5
6
 
6
- import docker
7
7
  import psycopg
8
- from docker.errors import APIError, NotFound
9
8
 
10
9
  logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
11
10
  from typing import Any, Dict, Optional, Tuple
@@ -86,48 +85,71 @@ def start_docker_postgres(pool_config: Dict[str, Any]) -> bool:
86
85
  image_name = "pgvector/pgvector:pg16"
87
86
 
88
87
  try:
89
- client = docker.from_env()
90
-
91
88
  # Check if the container already exists
92
89
  try:
93
- container = client.containers.get(container_name)
94
- if container.status == "running":
95
- logging.info(f"Container '{container_name}' is already running.")
96
- return True
97
- elif container.status == "exited":
98
- container.start()
99
- logging.info(
100
- f"Container '{container_name}' was stopped and has been restarted."
101
- )
102
- return True
103
- except NotFound:
104
- # Container doesn't exist, proceed with creation
90
+ result = subprocess.run(
91
+ f"docker inspect {container_name}",
92
+ shell=True,
93
+ text=True,
94
+ capture_output=True,
95
+ )
96
+
97
+ if result.returncode == 0:
98
+ # Container exists, check its status
99
+ container_info = json.loads(result.stdout)
100
+ status = container_info[0]["State"]["Status"]
101
+
102
+ if status == "running":
103
+ logging.info(f"Container '{container_name}' is already running.")
104
+ return True
105
+ elif status == "exited":
106
+ subprocess.run(
107
+ f"docker start {container_name}", shell=True, check=True
108
+ )
109
+ logging.info(
110
+ f"Container '{container_name}' was stopped and has been restarted."
111
+ )
112
+ return True
113
+ except (
114
+ subprocess.CalledProcessError,
115
+ json.JSONDecodeError,
116
+ KeyError,
117
+ IndexError,
118
+ ):
119
+ # Container doesn't exist or error parsing, proceed with creation
105
120
  pass
106
121
 
107
- # Pull the image if it doesn't exist
108
- imgs = client.images.list(name=image_name)
109
- if len(imgs) == 0:
122
+ # Check if the image exists locally
123
+ result = subprocess.run(
124
+ f"docker images -q {image_name}", shell=True, text=True, capture_output=True
125
+ )
126
+
127
+ if not result.stdout.strip():
110
128
  logging.info(f"Pulling Docker image {image_name}...")
111
- client.images.pull(image_name)
129
+ subprocess.run(f"docker pull {image_name}", shell=True, check=True)
112
130
 
113
131
  # Create and start the container
114
- container = client.containers.run(
115
- image=image_name,
116
- name=container_name,
117
- detach=True,
118
- environment={
119
- "POSTGRES_PASSWORD": pool_config["password"],
120
- "PGDATA": pg_data,
121
- },
122
- ports={"5432/tcp": pool_config["port"]},
123
- volumes={pg_data: {"bind": pg_data, "mode": "rw"}},
124
- remove=True, # Equivalent to --rm
132
+ cmd = [
133
+ "docker run",
134
+ "-d",
135
+ f"--name {container_name}",
136
+ f"-e POSTGRES_PASSWORD={pool_config['password']}",
137
+ f"-e PGDATA={pg_data}",
138
+ f"-p {pool_config['port']}:5432",
139
+ f"-v {pg_data}:{pg_data}",
140
+ "--rm",
141
+ image_name,
142
+ ]
143
+
144
+ result = subprocess.run(
145
+ " ".join(cmd), shell=True, text=True, capture_output=True, check=True
125
146
  )
126
147
 
127
- logging.info(f"Created container: {container.id}")
148
+ container_id = result.stdout.strip()
149
+ logging.info(f"Created container: {container_id}")
128
150
 
129
- except APIError as e:
130
- raise Exception(f"Docker API error: {str(e)}")
151
+ except subprocess.CalledProcessError as e:
152
+ raise Exception(f"Docker command error: {e.stderr if e.stderr else str(e)}")
131
153
 
132
154
  # Wait for PostgreSQL to be ready
133
155
  attempts = 30
@@ -148,15 +170,16 @@ def start_docker_postgres(pool_config: Dict[str, Any]) -> bool:
148
170
 
149
171
  def check_docker_installed() -> bool:
150
172
  """
151
- Check if Docker is installed and running using the docker library.
173
+ Check if Docker is installed and running using the Docker CLI.
152
174
 
153
175
  Returns:
154
176
  bool: True if Docker is installed and running, False otherwise.
155
177
  """
156
178
  try:
157
- client = docker.from_env()
158
- client.ping() # type: ignore
159
- return True
179
+ result = subprocess.run(
180
+ "docker version --format json", shell=True, capture_output=True, text=True
181
+ )
182
+ return result.returncode == 0
160
183
  except Exception:
161
184
  return False
162
185
 
@@ -176,22 +199,41 @@ def stop_docker_pg() -> None:
176
199
  try:
177
200
  logger.info(f"Stopping Docker Postgres container {container_name}...")
178
201
 
179
- client = docker.from_env()
180
-
181
- try:
182
- container = client.containers.get(container_name)
183
-
184
- if container.status == "running":
185
- container.stop()
186
- logger.info(
187
- f"Successfully stopped Docker Postgres container {container_name}."
188
- )
189
- else:
190
- logger.info(f"Container {container_name} exists but is not running.")
202
+ # Check if container exists
203
+ result = subprocess.run(
204
+ f"docker inspect {container_name}",
205
+ shell=True,
206
+ text=True,
207
+ capture_output=True,
208
+ )
191
209
 
192
- except docker.errors.NotFound:
210
+ if result.returncode == 0:
211
+ # Container exists, check its status
212
+ try:
213
+ container_info = json.loads(result.stdout)
214
+ status = container_info[0]["State"]["Status"]
215
+
216
+ if status == "running":
217
+ subprocess.run(
218
+ f"docker stop {container_name}", shell=True, check=True
219
+ )
220
+ logger.info(
221
+ f"Successfully stopped Docker Postgres container {container_name}."
222
+ )
223
+ else:
224
+ logger.info(
225
+ f"Container {container_name} exists but is not running."
226
+ )
227
+ except (json.JSONDecodeError, KeyError, IndexError) as e:
228
+ logger.error(f"Error parsing container info: {e}")
229
+ raise
230
+ else:
193
231
  logger.info(f"Container {container_name} does not exist.")
194
232
 
233
+ except subprocess.CalledProcessError as error:
234
+ error_message = error.stderr if error.stderr else str(error)
235
+ logger.error(f"Failed to stop Docker Postgres container: {error_message}")
236
+ raise
195
237
  except Exception as error:
196
238
  error_message = str(error)
197
239
  logger.error(f"Failed to stop Docker Postgres container: {error_message}")
@@ -83,6 +83,10 @@ def setup_fastapi_middleware(app: FastAPI, dbos: DBOS) -> None:
83
83
  response = await call_next(request)
84
84
  else:
85
85
  response = await call_next(request)
86
- if hasattr(response, "status_code"):
86
+ if (
87
+ dbos._config["telemetry"]
88
+ and not dbos._config["telemetry"]["disable_otlp"]
89
+ and hasattr(response, "status_code")
90
+ ):
87
91
  DBOS.span.set_attribute("responseCode", response.status_code)
88
92
  return response
@@ -2,14 +2,6 @@ import logging
2
2
  import os
3
3
  from typing import TYPE_CHECKING, Any
4
4
 
5
- from opentelemetry._logs import set_logger_provider
6
- from opentelemetry.exporter.otlp.proto.http._log_exporter import OTLPLogExporter
7
- from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
8
- from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
9
- from opentelemetry.sdk.resources import Resource
10
- from opentelemetry.semconv.resource import ResourceAttributes
11
- from opentelemetry.trace.span import format_trace_id
12
-
13
5
  from dbos._utils import GlobalParams
14
6
 
15
7
  if TYPE_CHECKING:
@@ -24,6 +16,7 @@ class DBOSLogTransformer(logging.Filter):
24
16
  super().__init__()
25
17
  self.app_id = os.environ.get("DBOS__APPID", "")
26
18
  self.otlp_attributes: dict[str, str] = config.get("telemetry", {}).get("otlp_attributes", {}) # type: ignore
19
+ self.disable_otlp = config.get("telemetry", {}).get("disable_otlp", True) # type: ignore
27
20
 
28
21
  def filter(self, record: Any) -> bool:
29
22
  record.applicationID = self.app_id
@@ -39,19 +32,15 @@ class DBOSLogTransformer(logging.Filter):
39
32
  if ctx:
40
33
  if ctx.is_within_workflow():
41
34
  record.operationUUID = ctx.workflow_id
42
- span = ctx.get_current_active_span()
43
- if span:
44
- trace_id = format_trace_id(span.get_span_context().trace_id)
45
- record.traceId = trace_id
46
-
47
- return True
35
+ if not self.disable_otlp:
36
+ from opentelemetry.trace.span import format_trace_id
48
37
 
38
+ span = ctx.get_current_active_span()
39
+ if span:
40
+ trace_id = format_trace_id(span.get_span_context().trace_id)
41
+ record.traceId = trace_id
49
42
 
50
- # Mitigation for https://github.com/open-telemetry/opentelemetry-python/issues/3193
51
- # Reduce the force flush timeout
52
- class PatchedOTLPLoggerProvider(LoggerProvider):
53
- def force_flush(self, timeout_millis: int = 5000) -> bool:
54
- return super().force_flush(timeout_millis)
43
+ return True
55
44
 
56
45
 
57
46
  def init_logger() -> None:
@@ -80,10 +69,18 @@ def config_logger(config: "ConfigFile") -> None:
80
69
  disable_otlp = config.get("telemetry", {}).get("disable_otlp", False) # type: ignore
81
70
 
82
71
  if not disable_otlp and otlp_logs_endpoints:
83
- log_provider = PatchedOTLPLoggerProvider(
72
+
73
+ from opentelemetry._logs import set_logger_provider
74
+ from opentelemetry.exporter.otlp.proto.http._log_exporter import OTLPLogExporter
75
+ from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
76
+ from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
77
+ from opentelemetry.sdk.resources import Resource
78
+ from opentelemetry.semconv.attributes.service_attributes import SERVICE_NAME
79
+
80
+ log_provider = LoggerProvider(
84
81
  Resource.create(
85
82
  attributes={
86
- ResourceAttributes.SERVICE_NAME: config["name"],
83
+ SERVICE_NAME: config["name"],
87
84
  }
88
85
  )
89
86
  )