dbos 1.15.0a9__py3-none-any.whl → 2.4.0a5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dbos/__init__.py CHANGED
@@ -12,6 +12,7 @@ from ._dbos_config import DBOSConfig
12
12
  from ._debouncer import Debouncer, DebouncerClient
13
13
  from ._kafka_message import KafkaMessage
14
14
  from ._queue import Queue
15
+ from ._serialization import Serializer
15
16
  from ._sys_db import GetWorkflowsInput, WorkflowStatus, WorkflowStatusString
16
17
 
17
18
  __all__ = [
@@ -35,4 +36,5 @@ __all__ = [
35
36
  "Queue",
36
37
  "Debouncer",
37
38
  "DebouncerClient",
39
+ "Serializer",
38
40
  ]
dbos/_admin_server.py CHANGED
@@ -338,6 +338,7 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
338
338
  end_time=filters.get("end_time"),
339
339
  status=filters.get("status"),
340
340
  app_version=filters.get("application_version"),
341
+ forked_from=filters.get("forked_from"),
341
342
  name=filters.get("workflow_name"),
342
343
  limit=filters.get("limit"),
343
344
  offset=filters.get("offset"),
@@ -364,6 +365,7 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
364
365
  start_time=filters.get("start_time"),
365
366
  end_time=filters.get("end_time"),
366
367
  status=filters.get("status"),
368
+ forked_from=filters.get("forked_from"),
367
369
  name=filters.get("workflow_name"),
368
370
  limit=filters.get("limit"),
369
371
  offset=filters.get("offset"),
dbos/_app_db.py CHANGED
@@ -8,8 +8,8 @@ from sqlalchemy.exc import DBAPIError
8
8
  from sqlalchemy.orm import Session, sessionmaker
9
9
 
10
10
  from dbos._migration import get_sqlite_timestamp_expr
11
+ from dbos._serialization import Serializer
11
12
 
12
- from . import _serialization
13
13
  from ._error import DBOSUnexpectedStepError, DBOSWorkflowConflictIDError
14
14
  from ._logger import dbos_logger
15
15
  from ._schemas.application_database import ApplicationSchema
@@ -34,17 +34,52 @@ class RecordedResult(TypedDict):
34
34
 
35
35
  class ApplicationDatabase(ABC):
36
36
 
37
+ @staticmethod
38
+ def create(
39
+ database_url: str,
40
+ engine_kwargs: Dict[str, Any],
41
+ schema: Optional[str],
42
+ serializer: Serializer,
43
+ debug_mode: bool = False,
44
+ ) -> "ApplicationDatabase":
45
+ """Factory method to create the appropriate ApplicationDatabase implementation based on URL."""
46
+ if database_url.startswith("sqlite"):
47
+ return SQLiteApplicationDatabase(
48
+ database_url=database_url,
49
+ engine_kwargs=engine_kwargs,
50
+ schema=schema,
51
+ serializer=serializer,
52
+ debug_mode=debug_mode,
53
+ )
54
+ else:
55
+ # Default to PostgreSQL for postgresql://, postgres://, or other URLs
56
+ return PostgresApplicationDatabase(
57
+ database_url=database_url,
58
+ engine_kwargs=engine_kwargs,
59
+ schema=schema,
60
+ serializer=serializer,
61
+ debug_mode=debug_mode,
62
+ )
63
+
37
64
  def __init__(
38
65
  self,
39
66
  *,
40
67
  database_url: str,
41
68
  engine_kwargs: Dict[str, Any],
69
+ serializer: Serializer,
70
+ schema: Optional[str],
42
71
  debug_mode: bool = False,
43
72
  ):
73
+ if database_url.startswith("sqlite"):
74
+ self.schema = None
75
+ else:
76
+ self.schema = schema if schema else "dbos"
77
+ ApplicationSchema.transaction_outputs.schema = schema
44
78
  self.engine = self._create_engine(database_url, engine_kwargs)
45
79
  self._engine_kwargs = engine_kwargs
46
80
  self.sessionmaker = sessionmaker(bind=self.engine)
47
81
  self.debug_mode = debug_mode
82
+ self.serializer = serializer
48
83
 
49
84
  @abstractmethod
50
85
  def _create_engine(
@@ -156,14 +191,18 @@ class ApplicationDatabase(ABC):
156
191
  function_id=row[0],
157
192
  function_name=row[1],
158
193
  output=(
159
- _serialization.deserialize(row[2]) if row[2] is not None else row[2]
194
+ self.serializer.deserialize(row[2])
195
+ if row[2] is not None
196
+ else row[2]
160
197
  ),
161
198
  error=(
162
- _serialization.deserialize_exception(row[3])
199
+ self.serializer.deserialize(row[3])
163
200
  if row[3] is not None
164
201
  else row[3]
165
202
  ),
166
203
  child_workflow_id=None,
204
+ started_at_epoch_ms=None,
205
+ completed_at_epoch_ms=None,
167
206
  )
168
207
  for row in rows
169
208
  ]
@@ -237,52 +276,10 @@ class ApplicationDatabase(ABC):
237
276
  """Check if the error is a serialization/concurrency error."""
238
277
  pass
239
278
 
240
- @staticmethod
241
- def create(
242
- database_url: str,
243
- engine_kwargs: Dict[str, Any],
244
- schema: Optional[str],
245
- debug_mode: bool = False,
246
- ) -> "ApplicationDatabase":
247
- """Factory method to create the appropriate ApplicationDatabase implementation based on URL."""
248
- if database_url.startswith("sqlite"):
249
- return SQLiteApplicationDatabase(
250
- database_url=database_url,
251
- engine_kwargs=engine_kwargs,
252
- debug_mode=debug_mode,
253
- )
254
- else:
255
- # Default to PostgreSQL for postgresql://, postgres://, or other URLs
256
- return PostgresApplicationDatabase(
257
- database_url=database_url,
258
- engine_kwargs=engine_kwargs,
259
- debug_mode=debug_mode,
260
- schema=schema,
261
- )
262
-
263
279
 
264
280
  class PostgresApplicationDatabase(ApplicationDatabase):
265
281
  """PostgreSQL-specific implementation of ApplicationDatabase."""
266
282
 
267
- def __init__(
268
- self,
269
- *,
270
- database_url: str,
271
- engine_kwargs: Dict[str, Any],
272
- schema: Optional[str],
273
- debug_mode: bool = False,
274
- ):
275
- super().__init__(
276
- database_url=database_url,
277
- engine_kwargs=engine_kwargs,
278
- debug_mode=debug_mode,
279
- )
280
- if schema is None:
281
- self.schema = "dbos"
282
- else:
283
- self.schema = schema
284
- ApplicationSchema.transaction_outputs.schema = schema
285
-
286
283
  def _create_engine(
287
284
  self, database_url: str, engine_kwargs: Dict[str, Any]
288
285
  ) -> sa.Engine:
dbos/_client.py CHANGED
@@ -1,4 +1,5 @@
1
1
  import asyncio
2
+ import json
2
3
  import time
3
4
  import uuid
4
5
  from typing import (
@@ -16,7 +17,6 @@ from typing import (
16
17
 
17
18
  import sqlalchemy as sa
18
19
 
19
- from dbos import _serialization
20
20
  from dbos._app_db import ApplicationDatabase
21
21
  from dbos._context import MaxPriority, MinPriority
22
22
  from dbos._sys_db import SystemDatabase
@@ -27,7 +27,7 @@ if TYPE_CHECKING:
27
27
  from dbos._dbos_config import get_system_database_url, is_valid_database_url
28
28
  from dbos._error import DBOSException, DBOSNonExistentWorkflowError
29
29
  from dbos._registrations import DEFAULT_MAX_RECOVERY_ATTEMPTS
30
- from dbos._serialization import WorkflowInputs
30
+ from dbos._serialization import DefaultSerializer, Serializer, WorkflowInputs
31
31
  from dbos._sys_db import (
32
32
  EnqueueOptionsInternal,
33
33
  StepInfo,
@@ -63,6 +63,9 @@ class EnqueueOptions(_EnqueueOptionsRequired, total=False):
63
63
  deduplication_id: str
64
64
  priority: int
65
65
  max_recovery_attempts: int
66
+ queue_partition_key: str
67
+ authenticated_user: str
68
+ authenticated_roles: list[str]
66
69
 
67
70
 
68
71
  def validate_enqueue_options(options: EnqueueOptions) -> None:
@@ -127,7 +130,9 @@ class DBOSClient:
127
130
  system_database_engine: Optional[sa.Engine] = None,
128
131
  application_database_url: Optional[str] = None,
129
132
  dbos_system_schema: Optional[str] = "dbos",
133
+ serializer: Serializer = DefaultSerializer(),
130
134
  ):
135
+ self._serializer = serializer
131
136
  application_database_url = (
132
137
  database_url if database_url else application_database_url
133
138
  )
@@ -144,23 +149,30 @@ class DBOSClient:
144
149
  self._sys_db = SystemDatabase.create(
145
150
  system_database_url=system_database_url,
146
151
  engine_kwargs={
152
+ "connect_args": {"application_name": "dbos_transact_client"},
147
153
  "pool_timeout": 30,
148
154
  "max_overflow": 0,
149
155
  "pool_size": 2,
156
+ "pool_pre_ping": True,
150
157
  },
151
158
  engine=system_database_engine,
152
159
  schema=dbos_system_schema,
160
+ serializer=serializer,
161
+ executor_id=None,
153
162
  )
154
163
  self._sys_db.check_connection()
155
164
  if application_database_url:
156
165
  self._app_db = ApplicationDatabase.create(
157
166
  database_url=application_database_url,
158
167
  engine_kwargs={
168
+ "connect_args": {"application_name": "dbos_transact_client"},
159
169
  "pool_timeout": 30,
160
170
  "max_overflow": 0,
161
171
  "pool_size": 2,
172
+ "pool_pre_ping": True,
162
173
  },
163
174
  schema=dbos_system_schema,
175
+ serializer=serializer,
164
176
  )
165
177
 
166
178
  def destroy(self) -> None:
@@ -182,8 +194,16 @@ class DBOSClient:
182
194
  "deduplication_id": options.get("deduplication_id"),
183
195
  "priority": options.get("priority"),
184
196
  "app_version": options.get("app_version"),
197
+ "queue_partition_key": options.get("queue_partition_key"),
185
198
  }
186
199
 
200
+ authenticated_user = options.get("authenticated_user")
201
+ authenticated_roles = (
202
+ json.dumps(options.get("authenticated_roles"))
203
+ if options.get("authenticated_roles")
204
+ else None
205
+ )
206
+
187
207
  inputs: WorkflowInputs = {
188
208
  "args": args,
189
209
  "kwargs": kwargs,
@@ -197,9 +217,9 @@ class DBOSClient:
197
217
  "queue_name": queue_name,
198
218
  "app_version": enqueue_options_internal["app_version"],
199
219
  "config_name": None,
200
- "authenticated_user": None,
220
+ "authenticated_user": authenticated_user,
201
221
  "assumed_role": None,
202
- "authenticated_roles": None,
222
+ "authenticated_roles": authenticated_roles,
203
223
  "output": None,
204
224
  "error": None,
205
225
  "created_at": None,
@@ -217,7 +237,9 @@ class DBOSClient:
217
237
  if enqueue_options_internal["priority"] is not None
218
238
  else 0
219
239
  ),
220
- "inputs": _serialization.serialize_args(inputs),
240
+ "inputs": self._serializer.serialize(inputs),
241
+ "queue_partition_key": enqueue_options_internal["queue_partition_key"],
242
+ "forked_from": None,
221
243
  }
222
244
 
223
245
  self._sys_db.init_workflow(
@@ -282,7 +304,9 @@ class DBOSClient:
282
304
  "workflow_deadline_epoch_ms": None,
283
305
  "deduplication_id": None,
284
306
  "priority": 0,
285
- "inputs": _serialization.serialize_args({"args": (), "kwargs": {}}),
307
+ "inputs": self._serializer.serialize({"args": (), "kwargs": {}}),
308
+ "queue_partition_key": None,
309
+ "forked_from": None,
286
310
  }
287
311
  with self._sys_db.engine.begin() as conn:
288
312
  self._sys_db._insert_workflow_status(
@@ -223,22 +223,21 @@ class ConductorWebsocket(threading.Thread):
223
223
  body = list_workflows_message.body
224
224
  infos = []
225
225
  try:
226
- load_input = body.get("load_input", False)
227
- load_output = body.get("load_output", False)
228
226
  infos = list_workflows(
229
227
  self.dbos._sys_db,
230
- workflow_ids=body["workflow_uuids"],
231
- user=body["authenticated_user"],
232
- start_time=body["start_time"],
233
- end_time=body["end_time"],
234
- status=body["status"],
235
- app_version=body["application_version"],
236
- name=body["workflow_name"],
237
- limit=body["limit"],
238
- offset=body["offset"],
239
- sort_desc=body["sort_desc"],
240
- load_input=load_input,
241
- load_output=load_output,
228
+ workflow_ids=body.get("workflow_uuids", None),
229
+ user=body.get("authenticated_user", None),
230
+ start_time=body.get("start_time", None),
231
+ end_time=body.get("end_time", None),
232
+ status=body.get("status", None),
233
+ app_version=body.get("application_version", None),
234
+ forked_from=body.get("forked_from", None),
235
+ name=body.get("workflow_name", None),
236
+ limit=body.get("limit", None),
237
+ offset=body.get("offset", None),
238
+ sort_desc=body.get("sort_desc", False),
239
+ load_input=body.get("load_input", False),
240
+ load_output=body.get("load_output", False),
242
241
  )
243
242
  except Exception as e:
244
243
  error_message = f"Exception encountered when listing workflows: {traceback.format_exc()}"
@@ -261,18 +260,18 @@ class ConductorWebsocket(threading.Thread):
261
260
  q_body = list_queued_workflows_message.body
262
261
  infos = []
263
262
  try:
264
- q_load_input = q_body.get("load_input", False)
265
263
  infos = list_queued_workflows(
266
264
  self.dbos._sys_db,
267
- start_time=q_body["start_time"],
268
- end_time=q_body["end_time"],
269
- status=q_body["status"],
270
- name=q_body["workflow_name"],
271
- limit=q_body["limit"],
272
- offset=q_body["offset"],
273
- queue_name=q_body["queue_name"],
274
- sort_desc=q_body["sort_desc"],
275
- load_input=q_load_input,
265
+ start_time=q_body.get("start_time", None),
266
+ end_time=q_body.get("end_time", None),
267
+ status=q_body.get("status", None),
268
+ forked_from=q_body.get("forked_from", None),
269
+ name=q_body.get("workflow_name", None),
270
+ limit=q_body.get("limit", None),
271
+ offset=q_body.get("offset", None),
272
+ queue_name=q_body.get("queue_name", None),
273
+ sort_desc=q_body.get("sort_desc", False),
274
+ load_input=q_body.get("load_input", False),
276
275
  )
277
276
  except Exception as e:
278
277
  error_message = f"Exception encountered when listing queued workflows: {traceback.format_exc()}"
@@ -118,6 +118,7 @@ class ListWorkflowsBody(TypedDict, total=False):
118
118
  end_time: Optional[str]
119
119
  status: Optional[str]
120
120
  application_version: Optional[str]
121
+ forked_from: Optional[str]
121
122
  limit: Optional[int]
122
123
  offset: Optional[int]
123
124
  sort_desc: bool
@@ -143,6 +144,12 @@ class WorkflowsOutput:
143
144
  QueueName: Optional[str]
144
145
  ApplicationVersion: Optional[str]
145
146
  ExecutorID: Optional[str]
147
+ WorkflowTimeoutMS: Optional[str]
148
+ WorkflowDeadlineEpochMS: Optional[str]
149
+ DeduplicationID: Optional[str]
150
+ Priority: Optional[str]
151
+ QueuePartitionKey: Optional[str]
152
+ ForkedFrom: Optional[str]
146
153
 
147
154
  @classmethod
148
155
  def from_workflow_information(cls, info: WorkflowStatus) -> "WorkflowsOutput":
@@ -152,12 +159,22 @@ class WorkflowsOutput:
152
159
  inputs_str = str(info.input) if info.input is not None else None
153
160
  outputs_str = str(info.output) if info.output is not None else None
154
161
  error_str = str(info.error) if info.error is not None else None
155
- request_str = None
156
162
  roles_str = (
157
163
  str(info.authenticated_roles)
158
164
  if info.authenticated_roles is not None
159
165
  else None
160
166
  )
167
+ workflow_timeout_ms_str = (
168
+ str(info.workflow_timeout_ms)
169
+ if info.workflow_timeout_ms is not None
170
+ else None
171
+ )
172
+ workflow_deadline_epoch_ms_str = (
173
+ str(info.workflow_deadline_epoch_ms)
174
+ if info.workflow_deadline_epoch_ms is not None
175
+ else None
176
+ )
177
+ priority_str = str(info.priority) if info.priority is not None else None
161
178
 
162
179
  return cls(
163
180
  WorkflowUUID=info.workflow_id,
@@ -176,6 +193,12 @@ class WorkflowsOutput:
176
193
  QueueName=info.queue_name,
177
194
  ApplicationVersion=info.app_version,
178
195
  ExecutorID=info.executor_id,
196
+ WorkflowTimeoutMS=workflow_timeout_ms_str,
197
+ WorkflowDeadlineEpochMS=workflow_deadline_epoch_ms_str,
198
+ DeduplicationID=info.deduplication_id,
199
+ Priority=priority_str,
200
+ QueuePartitionKey=info.queue_partition_key,
201
+ ForkedFrom=info.forked_from,
179
202
  )
180
203
 
181
204
 
@@ -186,14 +209,28 @@ class WorkflowSteps:
186
209
  output: Optional[str]
187
210
  error: Optional[str]
188
211
  child_workflow_id: Optional[str]
212
+ started_at_epoch_ms: Optional[str]
213
+ completed_at_epoch_ms: Optional[str]
189
214
 
190
215
  @classmethod
191
216
  def from_step_info(cls, info: StepInfo) -> "WorkflowSteps":
192
217
  output_str = str(info["output"]) if info["output"] is not None else None
193
218
  error_str = str(info["error"]) if info["error"] is not None else None
219
+ started_at_str = (
220
+ str(info["started_at_epoch_ms"])
221
+ if info["started_at_epoch_ms"] is not None
222
+ else None
223
+ )
224
+ completed_at_str = (
225
+ str(info["completed_at_epoch_ms"])
226
+ if info["completed_at_epoch_ms"] is not None
227
+ else None
228
+ )
194
229
  return cls(
195
230
  function_id=info["function_id"],
196
231
  function_name=info["function_name"],
232
+ started_at_epoch_ms=started_at_str,
233
+ completed_at_epoch_ms=completed_at_str,
197
234
  output=output_str,
198
235
  error=error_str,
199
236
  child_workflow_id=info["child_workflow_id"],
@@ -216,6 +253,7 @@ class ListQueuedWorkflowsBody(TypedDict, total=False):
216
253
  start_time: Optional[str]
217
254
  end_time: Optional[str]
218
255
  status: Optional[str]
256
+ forked_from: Optional[str]
219
257
  queue_name: Optional[str]
220
258
  limit: Optional[int]
221
259
  offset: Optional[int]
dbos/_context.py CHANGED
@@ -120,6 +120,8 @@ class DBOSContext:
120
120
  self.deduplication_id: Optional[str] = None
121
121
  # A user-specified priority for the enqueuing workflow.
122
122
  self.priority: Optional[int] = None
123
+ # If the workflow is enqueued on a partitioned queue, its partition key
124
+ self.queue_partition_key: Optional[str] = None
123
125
 
124
126
  def create_child(self) -> DBOSContext:
125
127
  rv = DBOSContext()
@@ -479,6 +481,7 @@ class SetEnqueueOptions:
479
481
  deduplication_id: Optional[str] = None,
480
482
  priority: Optional[int] = None,
481
483
  app_version: Optional[str] = None,
484
+ queue_partition_key: Optional[str] = None,
482
485
  ) -> None:
483
486
  self.created_ctx = False
484
487
  self.deduplication_id: Optional[str] = deduplication_id
@@ -491,6 +494,8 @@ class SetEnqueueOptions:
491
494
  self.saved_priority: Optional[int] = None
492
495
  self.app_version: Optional[str] = app_version
493
496
  self.saved_app_version: Optional[str] = None
497
+ self.queue_partition_key = queue_partition_key
498
+ self.saved_queue_partition_key: Optional[str] = None
494
499
 
495
500
  def __enter__(self) -> SetEnqueueOptions:
496
501
  # Code to create a basic context
@@ -505,6 +510,8 @@ class SetEnqueueOptions:
505
510
  ctx.priority = self.priority
506
511
  self.saved_app_version = ctx.app_version
507
512
  ctx.app_version = self.app_version
513
+ self.saved_queue_partition_key = ctx.queue_partition_key
514
+ ctx.queue_partition_key = self.queue_partition_key
508
515
  return self
509
516
 
510
517
  def __exit__(
@@ -517,6 +524,7 @@ class SetEnqueueOptions:
517
524
  curr_ctx.deduplication_id = self.saved_deduplication_id
518
525
  curr_ctx.priority = self.saved_priority
519
526
  curr_ctx.app_version = self.saved_app_version
527
+ curr_ctx.queue_partition_key = self.saved_queue_partition_key
520
528
  # Code to clean up the basic context if we created it
521
529
  if self.created_ctx:
522
530
  _clear_local_dbos_context()