dbos 0.25.0a14__tar.gz → 0.25.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. {dbos-0.25.0a14 → dbos-0.25.1}/PKG-INFO +1 -1
  2. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/__init__.py +3 -0
  3. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_app_db.py +14 -15
  4. dbos-0.25.1/dbos/_client.py +206 -0
  5. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_context.py +4 -2
  6. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_core.py +15 -11
  7. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_dbos.py +9 -3
  8. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_logger.py +14 -0
  9. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_queue.py +1 -1
  10. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_sys_db.py +40 -26
  11. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/cli/cli.py +6 -6
  12. {dbos-0.25.0a14 → dbos-0.25.1}/pyproject.toml +1 -1
  13. dbos-0.25.1/tests/client_collateral.py +38 -0
  14. dbos-0.25.1/tests/client_worker.py +30 -0
  15. {dbos-0.25.0a14 → dbos-0.25.1}/tests/conftest.py +14 -2
  16. dbos-0.25.1/tests/test_client.py +399 -0
  17. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_queue.py +3 -0
  18. {dbos-0.25.0a14 → dbos-0.25.1}/LICENSE +0 -0
  19. {dbos-0.25.0a14 → dbos-0.25.1}/README.md +0 -0
  20. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/__main__.py +0 -0
  21. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_admin_server.py +0 -0
  22. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_classproperty.py +0 -0
  23. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_cloudutils/authentication.py +0 -0
  24. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_cloudutils/cloudutils.py +0 -0
  25. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_cloudutils/databases.py +0 -0
  26. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_conductor/conductor.py +0 -0
  27. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_conductor/protocol.py +0 -0
  28. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_croniter.py +0 -0
  29. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_db_wizard.py +0 -0
  30. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_dbos_config.py +0 -0
  31. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_debug.py +0 -0
  32. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_error.py +0 -0
  33. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_fastapi.py +0 -0
  34. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_flask.py +0 -0
  35. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_kafka.py +0 -0
  36. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_kafka_message.py +0 -0
  37. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_migrations/env.py +0 -0
  38. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_migrations/script.py.mako +0 -0
  39. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
  40. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
  41. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
  42. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
  43. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
  44. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
  45. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
  46. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
  47. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_outcome.py +0 -0
  48. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_recovery.py +0 -0
  49. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_registrations.py +0 -0
  50. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_request.py +0 -0
  51. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_roles.py +0 -0
  52. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_scheduler.py +0 -0
  53. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_schemas/__init__.py +0 -0
  54. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_schemas/application_database.py +0 -0
  55. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_schemas/system_database.py +0 -0
  56. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_serialization.py +0 -0
  57. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_templates/dbos-db-starter/README.md +0 -0
  58. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
  59. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_templates/dbos-db-starter/__package/main.py +0 -0
  60. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
  61. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
  62. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
  63. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
  64. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
  65. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
  66. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
  67. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_tracer.py +0 -0
  68. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_utils.py +0 -0
  69. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/_workflow_commands.py +0 -0
  70. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/cli/_github_init.py +0 -0
  71. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/cli/_template_init.py +0 -0
  72. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/dbos-config.schema.json +0 -0
  73. {dbos-0.25.0a14 → dbos-0.25.1}/dbos/py.typed +0 -0
  74. {dbos-0.25.0a14 → dbos-0.25.1}/tests/__init__.py +0 -0
  75. {dbos-0.25.0a14 → dbos-0.25.1}/tests/atexit_no_ctor.py +0 -0
  76. {dbos-0.25.0a14 → dbos-0.25.1}/tests/atexit_no_launch.py +0 -0
  77. {dbos-0.25.0a14 → dbos-0.25.1}/tests/classdefs.py +0 -0
  78. {dbos-0.25.0a14 → dbos-0.25.1}/tests/more_classdefs.py +0 -0
  79. {dbos-0.25.0a14 → dbos-0.25.1}/tests/queuedworkflow.py +0 -0
  80. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_admin_server.py +0 -0
  81. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_async.py +0 -0
  82. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_classdecorators.py +0 -0
  83. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_concurrency.py +0 -0
  84. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_config.py +0 -0
  85. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_croniter.py +0 -0
  86. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_dbos.py +0 -0
  87. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_dbwizard.py +0 -0
  88. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_debug.py +0 -0
  89. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_failures.py +0 -0
  90. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_fastapi.py +0 -0
  91. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_fastapi_roles.py +0 -0
  92. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_flask.py +0 -0
  93. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_kafka.py +0 -0
  94. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_outcome.py +0 -0
  95. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_package.py +0 -0
  96. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_scheduler.py +0 -0
  97. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_schema_migration.py +0 -0
  98. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_singleton.py +0 -0
  99. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_spans.py +0 -0
  100. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_sqlalchemy.py +0 -0
  101. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_workflow_cancel.py +0 -0
  102. {dbos-0.25.0a14 → dbos-0.25.1}/tests/test_workflow_cmds.py +0 -0
  103. {dbos-0.25.0a14 → dbos-0.25.1}/version/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 0.25.0a14
3
+ Version: 0.25.1
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,4 +1,5 @@
1
1
  from . import _error as error
2
+ from ._client import DBOSClient, EnqueueOptions
2
3
  from ._context import DBOSContextEnsure, DBOSContextSetAuth, SetWorkflowID
3
4
  from ._dbos import DBOS, DBOSConfiguredInstance, WorkflowHandle
4
5
  from ._dbos_config import ConfigFile, DBOSConfig, get_dbos_database_url, load_config
@@ -11,9 +12,11 @@ __all__ = [
11
12
  "ConfigFile",
12
13
  "DBOSConfig",
13
14
  "DBOS",
15
+ "DBOSClient",
14
16
  "DBOSConfiguredInstance",
15
17
  "DBOSContextEnsure",
16
18
  "DBOSContextSetAuth",
19
+ "EnqueueOptions",
17
20
  "GetWorkflowsInput",
18
21
  "KafkaMessage",
19
22
  "SetWorkflowID",
@@ -27,19 +27,18 @@ class RecordedResult(TypedDict):
27
27
 
28
28
  class ApplicationDatabase:
29
29
 
30
- def __init__(self, config: ConfigFile, *, debug_mode: bool = False):
31
- self.config = config
30
+ def __init__(self, database: DatabaseConfig, *, debug_mode: bool = False):
32
31
 
33
- app_db_name = config["database"]["app_db_name"]
32
+ app_db_name = database["app_db_name"]
34
33
 
35
34
  # If the application database does not already exist, create it
36
35
  if not debug_mode:
37
36
  postgres_db_url = sa.URL.create(
38
37
  "postgresql+psycopg",
39
- username=config["database"]["username"],
40
- password=config["database"]["password"],
41
- host=config["database"]["hostname"],
42
- port=config["database"]["port"],
38
+ username=database["username"],
39
+ password=database["password"],
40
+ host=database["hostname"],
41
+ port=database["port"],
43
42
  database="postgres",
44
43
  )
45
44
  postgres_db_engine = sa.create_engine(postgres_db_url)
@@ -55,25 +54,25 @@ class ApplicationDatabase:
55
54
  # Create a connection pool for the application database
56
55
  app_db_url = sa.URL.create(
57
56
  "postgresql+psycopg",
58
- username=config["database"]["username"],
59
- password=config["database"]["password"],
60
- host=config["database"]["hostname"],
61
- port=config["database"]["port"],
57
+ username=database["username"],
58
+ password=database["password"],
59
+ host=database["hostname"],
60
+ port=database["port"],
62
61
  database=app_db_name,
63
62
  )
64
63
 
65
64
  connect_args = {}
66
65
  if (
67
- "connectionTimeoutMillis" in config["database"]
68
- and config["database"]["connectionTimeoutMillis"]
66
+ "connectionTimeoutMillis" in database
67
+ and database["connectionTimeoutMillis"]
69
68
  ):
70
69
  connect_args["connect_timeout"] = int(
71
- config["database"]["connectionTimeoutMillis"] / 1000
70
+ database["connectionTimeoutMillis"] / 1000
72
71
  )
73
72
 
74
73
  self.engine = sa.create_engine(
75
74
  app_db_url,
76
- pool_size=config["database"]["app_db_pool_size"],
75
+ pool_size=database["app_db_pool_size"],
77
76
  max_overflow=0,
78
77
  pool_timeout=30,
79
78
  connect_args=connect_args,
@@ -0,0 +1,206 @@
1
+ import asyncio
2
+ import sys
3
+ import uuid
4
+ from typing import Any, Generic, Optional, TypedDict, TypeVar
5
+
6
+ if sys.version_info < (3, 11):
7
+ from typing_extensions import NotRequired
8
+ else:
9
+ from typing import NotRequired
10
+
11
+ from dbos import _serialization
12
+ from dbos._dbos import WorkflowHandle, WorkflowHandleAsync
13
+ from dbos._dbos_config import parse_database_url_to_dbconfig
14
+ from dbos._error import DBOSNonExistentWorkflowError
15
+ from dbos._registrations import DEFAULT_MAX_RECOVERY_ATTEMPTS
16
+ from dbos._serialization import WorkflowInputs
17
+ from dbos._sys_db import SystemDatabase, WorkflowStatusInternal, WorkflowStatusString
18
+ from dbos._workflow_commands import WorkflowStatus, get_workflow
19
+
20
+ R = TypeVar("R", covariant=True) # A generic type for workflow return values
21
+
22
+
23
+ class EnqueueOptions(TypedDict):
24
+ workflow_name: str
25
+ workflow_class_name: NotRequired[str]
26
+ queue_name: str
27
+ app_version: NotRequired[str]
28
+ workflow_id: NotRequired[str]
29
+
30
+
31
+ class WorkflowHandleClientPolling(Generic[R]):
32
+
33
+ def __init__(self, workflow_id: str, sys_db: SystemDatabase):
34
+ self.workflow_id = workflow_id
35
+ self._sys_db = sys_db
36
+
37
+ def get_workflow_id(self) -> str:
38
+ return self.workflow_id
39
+
40
+ def get_result(self) -> R:
41
+ res: R = self._sys_db.await_workflow_result(self.workflow_id)
42
+ return res
43
+
44
+ def get_status(self) -> "WorkflowStatus":
45
+ status = get_workflow(self._sys_db, self.workflow_id, True)
46
+ if status is None:
47
+ raise DBOSNonExistentWorkflowError(self.workflow_id)
48
+ return status
49
+
50
+
51
+ class WorkflowHandleClientAsyncPolling(Generic[R]):
52
+
53
+ def __init__(self, workflow_id: str, sys_db: SystemDatabase):
54
+ self.workflow_id = workflow_id
55
+ self._sys_db = sys_db
56
+
57
+ def get_workflow_id(self) -> str:
58
+ return self.workflow_id
59
+
60
+ async def get_result(self) -> R:
61
+ res: R = await asyncio.to_thread(
62
+ self._sys_db.await_workflow_result, self.workflow_id
63
+ )
64
+ return res
65
+
66
+ async def get_status(self) -> "WorkflowStatus":
67
+ status = await asyncio.to_thread(
68
+ get_workflow, self._sys_db, self.workflow_id, True
69
+ )
70
+ if status is None:
71
+ raise DBOSNonExistentWorkflowError(self.workflow_id)
72
+ return status
73
+
74
+
75
+ class DBOSClient:
76
+ def __init__(self, database_url: str, *, system_database: Optional[str] = None):
77
+ db_config = parse_database_url_to_dbconfig(database_url)
78
+ if system_database is not None:
79
+ db_config["sys_db_name"] = system_database
80
+ self._sys_db = SystemDatabase(db_config)
81
+
82
+ def destroy(self) -> None:
83
+ self._sys_db.destroy()
84
+
85
+ def _enqueue(self, options: EnqueueOptions, *args: Any, **kwargs: Any) -> str:
86
+ workflow_name = options["workflow_name"]
87
+ queue_name = options["queue_name"]
88
+
89
+ workflow_class_name = options.get("workflow_class_name")
90
+ app_version = options.get("app_version")
91
+ max_recovery_attempts = options.get("max_recovery_attempts")
92
+ if max_recovery_attempts is None:
93
+ max_recovery_attempts = DEFAULT_MAX_RECOVERY_ATTEMPTS
94
+ workflow_id = options.get("workflow_id")
95
+ if workflow_id is None:
96
+ workflow_id = str(uuid.uuid4())
97
+
98
+ status: WorkflowStatusInternal = {
99
+ "workflow_uuid": workflow_id,
100
+ "status": WorkflowStatusString.ENQUEUED.value,
101
+ "name": workflow_name,
102
+ "class_name": workflow_class_name,
103
+ "queue_name": queue_name,
104
+ "app_version": app_version,
105
+ "config_name": None,
106
+ "authenticated_user": None,
107
+ "assumed_role": None,
108
+ "authenticated_roles": None,
109
+ "request": None,
110
+ "output": None,
111
+ "error": None,
112
+ "created_at": None,
113
+ "updated_at": None,
114
+ "executor_id": None,
115
+ "recovery_attempts": None,
116
+ "app_id": None,
117
+ }
118
+
119
+ inputs: WorkflowInputs = {
120
+ "args": args,
121
+ "kwargs": kwargs,
122
+ }
123
+
124
+ wf_status = self._sys_db.insert_workflow_status(status)
125
+ self._sys_db.update_workflow_inputs(
126
+ workflow_id, _serialization.serialize_args(inputs)
127
+ )
128
+ if wf_status == WorkflowStatusString.ENQUEUED.value:
129
+ self._sys_db.enqueue(workflow_id, queue_name)
130
+ return workflow_id
131
+
132
+ def enqueue(
133
+ self, options: EnqueueOptions, *args: Any, **kwargs: Any
134
+ ) -> WorkflowHandle[R]:
135
+ workflow_id = self._enqueue(options, *args, **kwargs)
136
+ return WorkflowHandleClientPolling[R](workflow_id, self._sys_db)
137
+
138
+ async def enqueue_async(
139
+ self, options: EnqueueOptions, *args: Any, **kwargs: Any
140
+ ) -> WorkflowHandleAsync[R]:
141
+ workflow_id = await asyncio.to_thread(self._enqueue, options, *args, **kwargs)
142
+ return WorkflowHandleClientAsyncPolling[R](workflow_id, self._sys_db)
143
+
144
+ def retrieve_workflow(self, workflow_id: str) -> WorkflowHandle[R]:
145
+ status = get_workflow(self._sys_db, workflow_id, True)
146
+ if status is None:
147
+ raise DBOSNonExistentWorkflowError(workflow_id)
148
+ return WorkflowHandleClientPolling[R](workflow_id, self._sys_db)
149
+
150
+ async def retrieve_workflow_async(self, workflow_id: str) -> WorkflowHandleAsync[R]:
151
+ status = asyncio.to_thread(get_workflow, self._sys_db, workflow_id, True)
152
+ if status is None:
153
+ raise DBOSNonExistentWorkflowError(workflow_id)
154
+ return WorkflowHandleClientAsyncPolling[R](workflow_id, self._sys_db)
155
+
156
+ def send(
157
+ self,
158
+ destination_id: str,
159
+ message: Any,
160
+ topic: Optional[str] = None,
161
+ idempotency_key: Optional[str] = None,
162
+ ) -> None:
163
+ idempotency_key = idempotency_key if idempotency_key else str(uuid.uuid4())
164
+ status: WorkflowStatusInternal = {
165
+ "workflow_uuid": f"{destination_id}-{idempotency_key}",
166
+ "status": WorkflowStatusString.SUCCESS.value,
167
+ "name": "temp_workflow-send-client",
168
+ "class_name": None,
169
+ "queue_name": None,
170
+ "config_name": None,
171
+ "authenticated_user": None,
172
+ "assumed_role": None,
173
+ "authenticated_roles": None,
174
+ "request": None,
175
+ "output": None,
176
+ "error": None,
177
+ "created_at": None,
178
+ "updated_at": None,
179
+ "executor_id": None,
180
+ "recovery_attempts": None,
181
+ "app_id": None,
182
+ "app_version": None,
183
+ }
184
+ self._sys_db.insert_workflow_status(status)
185
+ self._sys_db.send(status["workflow_uuid"], 0, destination_id, message, topic)
186
+
187
+ async def send_async(
188
+ self,
189
+ destination_id: str,
190
+ message: Any,
191
+ topic: Optional[str] = None,
192
+ idempotency_key: Optional[str] = None,
193
+ ) -> None:
194
+ return await asyncio.to_thread(
195
+ self.send, destination_id, message, topic, idempotency_key
196
+ )
197
+
198
+ def get_event(self, workflow_id: str, key: str, timeout_seconds: float = 60) -> Any:
199
+ return self._sys_db.get_event(workflow_id, key, timeout_seconds)
200
+
201
+ async def get_event_async(
202
+ self, workflow_id: str, key: str, timeout_seconds: float = 60
203
+ ) -> Any:
204
+ return await asyncio.to_thread(
205
+ self.get_event, workflow_id, key, timeout_seconds
206
+ )
@@ -195,8 +195,10 @@ class DBOSContext:
195
195
  def end_handler(self, exc_value: Optional[BaseException]) -> None:
196
196
  self._end_span(exc_value)
197
197
 
198
- def get_current_span(self) -> Span:
199
- return self.spans[-1]
198
+ def get_current_span(self) -> Optional[Span]:
199
+ if len(self.spans):
200
+ return self.spans[-1]
201
+ return None
200
202
 
201
203
  def _start_span(self, attributes: TracedAttributes) -> None:
202
204
  attributes["operationUUID"] = (
@@ -892,10 +892,12 @@ def decorate_transaction(
892
892
  except DBAPIError as dbapi_error:
893
893
  if dbapi_error.orig.sqlstate == "40001": # type: ignore
894
894
  # Retry on serialization failure
895
- ctx.get_current_span().add_event(
896
- "Transaction Serialization Failure",
897
- {"retry_wait_seconds": retry_wait_seconds},
898
- )
895
+ span = ctx.get_current_span()
896
+ if span:
897
+ span.add_event(
898
+ "Transaction Serialization Failure",
899
+ {"retry_wait_seconds": retry_wait_seconds},
900
+ )
899
901
  time.sleep(retry_wait_seconds)
900
902
  retry_wait_seconds = min(
901
903
  retry_wait_seconds * backoff_factor,
@@ -1004,13 +1006,15 @@ def decorate_step(
1004
1006
  f"Step being automatically retried. (attempt {attempt + 1} of {attempts}). {traceback.format_exc()}"
1005
1007
  )
1006
1008
  ctx = assert_current_dbos_context()
1007
- ctx.get_current_span().add_event(
1008
- f"Step attempt {attempt} failed",
1009
- {
1010
- "error": str(error),
1011
- "retryIntervalSeconds": interval_seconds,
1012
- },
1013
- )
1009
+ span = ctx.get_current_span()
1010
+ if span:
1011
+ span.add_event(
1012
+ f"Step attempt {attempt} failed",
1013
+ {
1014
+ "error": str(error),
1015
+ "retryIntervalSeconds": interval_seconds,
1016
+ },
1017
+ )
1014
1018
  return min(
1015
1019
  interval_seconds * (backoff_rate**attempt),
1016
1020
  max_retry_interval_seconds,
@@ -445,8 +445,12 @@ class DBOS:
445
445
  dbos_logger.info(f"Executor ID: {GlobalParams.executor_id}")
446
446
  dbos_logger.info(f"Application version: {GlobalParams.app_version}")
447
447
  self._executor_field = ThreadPoolExecutor(max_workers=64)
448
- self._sys_db_field = SystemDatabase(self.config, debug_mode=debug_mode)
449
- self._app_db_field = ApplicationDatabase(self.config, debug_mode=debug_mode)
448
+ self._sys_db_field = SystemDatabase(
449
+ self.config["database"], debug_mode=debug_mode
450
+ )
451
+ self._app_db_field = ApplicationDatabase(
452
+ self.config["database"], debug_mode=debug_mode
453
+ )
450
454
 
451
455
  if debug_mode:
452
456
  return
@@ -1079,7 +1083,9 @@ class DBOS:
1079
1083
  def span(cls) -> Span:
1080
1084
  """Return the tracing `Span` associated with the current context."""
1081
1085
  ctx = assert_current_dbos_context()
1082
- return ctx.get_current_span()
1086
+ span = ctx.get_current_span()
1087
+ assert span
1088
+ return span
1083
1089
 
1084
1090
  @classproperty
1085
1091
  def request(cls) -> Optional["Request"]:
@@ -7,6 +7,7 @@ from opentelemetry.exporter.otlp.proto.http._log_exporter import OTLPLogExporter
7
7
  from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
8
8
  from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
9
9
  from opentelemetry.sdk.resources import Resource
10
+ from opentelemetry.trace.span import format_trace_id
10
11
 
11
12
  from dbos._utils import GlobalParams
12
13
 
@@ -26,6 +27,19 @@ class DBOSLogTransformer(logging.Filter):
26
27
  record.applicationID = self.app_id
27
28
  record.applicationVersion = GlobalParams.app_version
28
29
  record.executorID = GlobalParams.executor_id
30
+
31
+ # If available, decorate the log entry with Workflow ID and Trace ID
32
+ from dbos._context import get_local_dbos_context
33
+
34
+ ctx = get_local_dbos_context()
35
+ if ctx:
36
+ if ctx.is_within_workflow():
37
+ record.operationUUID = ctx.workflow_id
38
+ span = ctx.get_current_span()
39
+ if span:
40
+ trace_id = format_trace_id(span.get_span_context().trace_id)
41
+ record.traceId = trace_id
42
+
29
43
  return True
30
44
 
31
45
 
@@ -85,7 +85,7 @@ def queue_thread(stop_event: threading.Event, dbos: "DBOS") -> None:
85
85
  for _, queue in dbos._registry.queue_info_map.items():
86
86
  try:
87
87
  wf_ids = dbos._sys_db.start_queued_workflows(
88
- queue, GlobalParams.executor_id
88
+ queue, GlobalParams.executor_id, GlobalParams.app_version
89
89
  )
90
90
  for id in wf_ids:
91
91
  execute_workflow_by_id(dbos, id)
@@ -29,7 +29,7 @@ from dbos._utils import GlobalParams
29
29
 
30
30
  from . import _serialization
31
31
  from ._context import get_local_dbos_context
32
- from ._dbos_config import ConfigFile
32
+ from ._dbos_config import ConfigFile, DatabaseConfig
33
33
  from ._error import (
34
34
  DBOSConflictingWorkflowError,
35
35
  DBOSDeadLetterQueueError,
@@ -170,23 +170,21 @@ _dbos_null_topic = "__null__topic__"
170
170
 
171
171
  class SystemDatabase:
172
172
 
173
- def __init__(self, config: ConfigFile, *, debug_mode: bool = False):
174
- self.config = config
175
-
173
+ def __init__(self, database: DatabaseConfig, *, debug_mode: bool = False):
176
174
  sysdb_name = (
177
- config["database"]["sys_db_name"]
178
- if "sys_db_name" in config["database"] and config["database"]["sys_db_name"]
179
- else config["database"]["app_db_name"] + SystemSchema.sysdb_suffix
175
+ database["sys_db_name"]
176
+ if "sys_db_name" in database and database["sys_db_name"]
177
+ else database["app_db_name"] + SystemSchema.sysdb_suffix
180
178
  )
181
179
 
182
180
  if not debug_mode:
183
181
  # If the system database does not already exist, create it
184
182
  postgres_db_url = sa.URL.create(
185
183
  "postgresql+psycopg",
186
- username=config["database"]["username"],
187
- password=config["database"]["password"],
188
- host=config["database"]["hostname"],
189
- port=config["database"]["port"],
184
+ username=database["username"],
185
+ password=database["password"],
186
+ host=database["hostname"],
187
+ port=database["port"],
190
188
  database="postgres",
191
189
  # fills the "application_name" column in pg_stat_activity
192
190
  query={"application_name": f"dbos_transact_{GlobalParams.executor_id}"},
@@ -203,19 +201,23 @@ class SystemDatabase:
203
201
 
204
202
  system_db_url = sa.URL.create(
205
203
  "postgresql+psycopg",
206
- username=config["database"]["username"],
207
- password=config["database"]["password"],
208
- host=config["database"]["hostname"],
209
- port=config["database"]["port"],
204
+ username=database["username"],
205
+ password=database["password"],
206
+ host=database["hostname"],
207
+ port=database["port"],
210
208
  database=sysdb_name,
211
209
  # fills the "application_name" column in pg_stat_activity
212
210
  query={"application_name": f"dbos_transact_{GlobalParams.executor_id}"},
213
211
  )
214
212
 
215
213
  # Create a connection pool for the system database
214
+ pool_size = database.get("sys_db_pool_size")
215
+ if pool_size is None:
216
+ pool_size = 20
217
+
216
218
  self.engine = sa.create_engine(
217
219
  system_db_url,
218
- pool_size=config["database"]["sys_db_pool_size"],
220
+ pool_size=pool_size,
219
221
  max_overflow=0,
220
222
  pool_timeout=30,
221
223
  connect_args={"connect_timeout": 10},
@@ -1264,7 +1266,9 @@ class SystemDatabase:
1264
1266
  .on_conflict_do_nothing()
1265
1267
  )
1266
1268
 
1267
- def start_queued_workflows(self, queue: "Queue", executor_id: str) -> List[str]:
1269
+ def start_queued_workflows(
1270
+ self, queue: "Queue", executor_id: str, app_version: str
1271
+ ) -> List[str]:
1268
1272
  if self._debug_mode:
1269
1273
  return []
1270
1274
 
@@ -1379,26 +1383,36 @@ class SystemDatabase:
1379
1383
  break
1380
1384
 
1381
1385
  # To start a function, first set its status to PENDING and update its executor ID
1382
- c.execute(
1386
+ res = c.execute(
1383
1387
  SystemSchema.workflow_status.update()
1384
1388
  .where(SystemSchema.workflow_status.c.workflow_uuid == id)
1385
1389
  .where(
1386
1390
  SystemSchema.workflow_status.c.status
1387
1391
  == WorkflowStatusString.ENQUEUED.value
1388
1392
  )
1393
+ .where(
1394
+ sa.or_(
1395
+ SystemSchema.workflow_status.c.application_version
1396
+ == app_version,
1397
+ SystemSchema.workflow_status.c.application_version.is_(
1398
+ None
1399
+ ),
1400
+ )
1401
+ )
1389
1402
  .values(
1390
1403
  status=WorkflowStatusString.PENDING.value,
1404
+ application_version=app_version,
1391
1405
  executor_id=executor_id,
1392
1406
  )
1393
1407
  )
1394
-
1395
- # Then give it a start time and assign the executor ID
1396
- c.execute(
1397
- SystemSchema.workflow_queue.update()
1398
- .where(SystemSchema.workflow_queue.c.workflow_uuid == id)
1399
- .values(started_at_epoch_ms=start_time_ms)
1400
- )
1401
- ret_ids.append(id)
1408
+ if res.rowcount > 0:
1409
+ # Then give it a start time and assign the executor ID
1410
+ c.execute(
1411
+ SystemSchema.workflow_queue.update()
1412
+ .where(SystemSchema.workflow_queue.c.workflow_uuid == id)
1413
+ .values(started_at_epoch_ms=start_time_ms)
1414
+ )
1415
+ ret_ids.append(id)
1402
1416
 
1403
1417
  # If we have a limiter, garbage-collect all completed functions started
1404
1418
  # before the period. If there's no limiter, there's no need--they were
@@ -189,8 +189,8 @@ def migrate() -> None:
189
189
  app_db = None
190
190
  sys_db = None
191
191
  try:
192
- sys_db = SystemDatabase(config)
193
- app_db = ApplicationDatabase(config)
192
+ sys_db = SystemDatabase(config["database"])
193
+ app_db = ApplicationDatabase(config["database"])
194
194
  except Exception as e:
195
195
  typer.echo(f"DBOS system schema migration failed: {e}")
196
196
  finally:
@@ -314,7 +314,7 @@ def list(
314
314
  ] = False,
315
315
  ) -> None:
316
316
  config = load_config(silent=True)
317
- sys_db = SystemDatabase(config)
317
+ sys_db = SystemDatabase(config["database"])
318
318
  workflows = list_workflows(
319
319
  sys_db,
320
320
  limit=limit,
@@ -338,7 +338,7 @@ def get(
338
338
  ] = False,
339
339
  ) -> None:
340
340
  config = load_config(silent=True)
341
- sys_db = SystemDatabase(config)
341
+ sys_db = SystemDatabase(config["database"])
342
342
  print(
343
343
  jsonpickle.encode(get_workflow(sys_db, workflow_id, request), unpicklable=False)
344
344
  )
@@ -349,7 +349,7 @@ def steps(
349
349
  workflow_id: Annotated[str, typer.Argument()],
350
350
  ) -> None:
351
351
  config = load_config(silent=True)
352
- sys_db = SystemDatabase(config)
352
+ sys_db = SystemDatabase(config["database"])
353
353
  print(
354
354
  jsonpickle.encode(list_workflow_steps(sys_db, workflow_id), unpicklable=False)
355
355
  )
@@ -475,7 +475,7 @@ def list_queue(
475
475
  ] = False,
476
476
  ) -> None:
477
477
  config = load_config(silent=True)
478
- sys_db = SystemDatabase(config)
478
+ sys_db = SystemDatabase(config["database"])
479
479
  workflows = list_queued_workflows(
480
480
  sys_db=sys_db,
481
481
  limit=limit,
@@ -28,7 +28,7 @@ dependencies = [
28
28
  ]
29
29
  requires-python = ">=3.9"
30
30
  readme = "README.md"
31
- version = "0.25.0a14"
31
+ version = "0.25.1"
32
32
 
33
33
  [project.license]
34
34
  text = "MIT"
@@ -0,0 +1,38 @@
1
+ import json
2
+ from typing import Optional, TypedDict, cast
3
+
4
+ from dbos import DBOS, Queue
5
+
6
+
7
+ class Person(TypedDict):
8
+ first: str
9
+ last: str
10
+ age: int
11
+
12
+
13
+ queue = Queue("test_queue")
14
+
15
+
16
+ @DBOS.workflow()
17
+ def enqueue_test(numVal: int, strVal: str, person: Person) -> str:
18
+ return f"{numVal}-{strVal}-{json.dumps(person)}"
19
+
20
+
21
+ @DBOS.workflow()
22
+ def send_test(topic: Optional[str] = None) -> str:
23
+ return cast(str, DBOS.recv(topic, 60))
24
+
25
+
26
+ @DBOS.workflow()
27
+ def retrieve_test(value: str) -> str:
28
+ DBOS.sleep(5)
29
+ return value
30
+
31
+
32
+ @DBOS.workflow()
33
+ def event_test(key: str, value: str, update: Optional[int] = None) -> str:
34
+ DBOS.set_event(key, value)
35
+ if update is not None:
36
+ DBOS.sleep(update)
37
+ DBOS.set_event(key, f"updated-{value}")
38
+ return f"{key}-{value}"
@@ -0,0 +1,30 @@
1
+ import os
2
+ import sys
3
+
4
+ from dbos import DBOS, SetWorkflowID
5
+ from tests import client_collateral as cc
6
+ from tests.conftest import default_config
7
+
8
+ app_vers = os.environ.get("DBOS__APPVERSION")
9
+ if app_vers is None:
10
+ DBOS.logger.error("DBOS__APPVERSION not set")
11
+ os._exit(1)
12
+ else:
13
+ DBOS.logger.info(f"DBOS__APPVERSION: {app_vers}")
14
+
15
+ if len(sys.argv) < 2:
16
+ DBOS.logger.error("Usage: client_worker wfid <topic>")
17
+ os._exit(1)
18
+
19
+ wfid = sys.argv[1]
20
+ topic = sys.argv[2] if len(sys.argv) > 2 else None
21
+
22
+ config = default_config()
23
+ DBOS(config=config)
24
+ DBOS.launch()
25
+
26
+ DBOS.logger.info(f"Starting send_test with WF ID: {wfid}")
27
+ with SetWorkflowID(wfid):
28
+ DBOS.start_workflow(cc.send_test, topic)
29
+
30
+ os._exit(0)