dbos 2.1.0a2__tar.gz → 2.2.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (99) hide show
  1. {dbos-2.1.0a2 → dbos-2.2.0}/PKG-INFO +1 -1
  2. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/__init__.py +2 -0
  3. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_app_db.py +40 -45
  4. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_client.py +11 -4
  5. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_context.py +8 -0
  6. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_core.py +38 -26
  7. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_dbos.py +15 -0
  8. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_dbos_config.py +4 -10
  9. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_migration.py +12 -2
  10. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_queue.py +29 -4
  11. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_scheduler.py +24 -14
  12. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_schemas/system_database.py +1 -0
  13. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_serialization.py +24 -36
  14. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_sys_db.py +124 -60
  15. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/cli/migration.py +3 -0
  16. {dbos-2.1.0a2 → dbos-2.2.0}/pyproject.toml +1 -1
  17. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_dbos.py +81 -1
  18. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_failures.py +9 -8
  19. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_queue.py +77 -0
  20. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_scheduler.py +7 -7
  21. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_schema_migration.py +2 -0
  22. {dbos-2.1.0a2 → dbos-2.2.0}/LICENSE +0 -0
  23. {dbos-2.1.0a2 → dbos-2.2.0}/README.md +0 -0
  24. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/__main__.py +0 -0
  25. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_admin_server.py +0 -0
  26. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_classproperty.py +0 -0
  27. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_conductor/conductor.py +0 -0
  28. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_conductor/protocol.py +0 -0
  29. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_croniter.py +0 -0
  30. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_debouncer.py +0 -0
  31. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_debug.py +0 -0
  32. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_docker_pg_helper.py +0 -0
  33. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_error.py +0 -0
  34. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_event_loop.py +0 -0
  35. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_fastapi.py +0 -0
  36. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_flask.py +0 -0
  37. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_kafka.py +0 -0
  38. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_kafka_message.py +0 -0
  39. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_logger.py +0 -0
  40. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_outcome.py +0 -0
  41. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_recovery.py +0 -0
  42. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_registrations.py +0 -0
  43. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_roles.py +0 -0
  44. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_schemas/__init__.py +0 -0
  45. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_schemas/application_database.py +0 -0
  46. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_sys_db_postgres.py +0 -0
  47. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_sys_db_sqlite.py +0 -0
  48. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_templates/dbos-db-starter/README.md +0 -0
  49. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
  50. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
  51. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
  52. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
  53. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos +0 -0
  54. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
  55. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_tracer.py +0 -0
  56. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_utils.py +0 -0
  57. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/_workflow_commands.py +0 -0
  58. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/cli/_github_init.py +0 -0
  59. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/cli/_template_init.py +0 -0
  60. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/cli/cli.py +0 -0
  61. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/dbos-config.schema.json +0 -0
  62. {dbos-2.1.0a2 → dbos-2.2.0}/dbos/py.typed +0 -0
  63. {dbos-2.1.0a2 → dbos-2.2.0}/tests/__init__.py +0 -0
  64. {dbos-2.1.0a2 → dbos-2.2.0}/tests/atexit_no_ctor.py +0 -0
  65. {dbos-2.1.0a2 → dbos-2.2.0}/tests/atexit_no_launch.py +0 -0
  66. {dbos-2.1.0a2 → dbos-2.2.0}/tests/classdefs.py +0 -0
  67. {dbos-2.1.0a2 → dbos-2.2.0}/tests/client_collateral.py +0 -0
  68. {dbos-2.1.0a2 → dbos-2.2.0}/tests/client_worker.py +0 -0
  69. {dbos-2.1.0a2 → dbos-2.2.0}/tests/conftest.py +0 -0
  70. {dbos-2.1.0a2 → dbos-2.2.0}/tests/dupname_classdefs1.py +0 -0
  71. {dbos-2.1.0a2 → dbos-2.2.0}/tests/dupname_classdefsa.py +0 -0
  72. {dbos-2.1.0a2 → dbos-2.2.0}/tests/more_classdefs.py +0 -0
  73. {dbos-2.1.0a2 → dbos-2.2.0}/tests/queuedworkflow.py +0 -0
  74. {dbos-2.1.0a2 → dbos-2.2.0}/tests/script_without_fastapi.py +0 -0
  75. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_admin_server.py +0 -0
  76. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_async.py +0 -0
  77. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_async_workflow_management.py +0 -0
  78. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_classdecorators.py +0 -0
  79. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_cli.py +0 -0
  80. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_client.py +0 -0
  81. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_concurrency.py +0 -0
  82. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_config.py +0 -0
  83. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_croniter.py +0 -0
  84. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_debouncer.py +0 -0
  85. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_debug.py +0 -0
  86. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_docker_secrets.py +0 -0
  87. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_fastapi.py +0 -0
  88. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_fastapi_roles.py +0 -0
  89. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_flask.py +0 -0
  90. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_kafka.py +0 -0
  91. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_outcome.py +0 -0
  92. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_package.py +0 -0
  93. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_singleton.py +0 -0
  94. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_spans.py +0 -0
  95. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_sqlalchemy.py +0 -0
  96. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_streaming.py +0 -0
  97. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_workflow_introspection.py +0 -0
  98. {dbos-2.1.0a2 → dbos-2.2.0}/tests/test_workflow_management.py +0 -0
  99. {dbos-2.1.0a2 → dbos-2.2.0}/version/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 2.1.0a2
3
+ Version: 2.2.0
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -12,6 +12,7 @@ from ._dbos_config import DBOSConfig
12
12
  from ._debouncer import Debouncer, DebouncerClient
13
13
  from ._kafka_message import KafkaMessage
14
14
  from ._queue import Queue
15
+ from ._serialization import Serializer
15
16
  from ._sys_db import GetWorkflowsInput, WorkflowStatus, WorkflowStatusString
16
17
 
17
18
  __all__ = [
@@ -35,4 +36,5 @@ __all__ = [
35
36
  "Queue",
36
37
  "Debouncer",
37
38
  "DebouncerClient",
39
+ "Serializer",
38
40
  ]
@@ -8,8 +8,8 @@ from sqlalchemy.exc import DBAPIError
8
8
  from sqlalchemy.orm import Session, sessionmaker
9
9
 
10
10
  from dbos._migration import get_sqlite_timestamp_expr
11
+ from dbos._serialization import Serializer
11
12
 
12
- from . import _serialization
13
13
  from ._error import DBOSUnexpectedStepError, DBOSWorkflowConflictIDError
14
14
  from ._logger import dbos_logger
15
15
  from ._schemas.application_database import ApplicationSchema
@@ -34,17 +34,52 @@ class RecordedResult(TypedDict):
34
34
 
35
35
  class ApplicationDatabase(ABC):
36
36
 
37
+ @staticmethod
38
+ def create(
39
+ database_url: str,
40
+ engine_kwargs: Dict[str, Any],
41
+ schema: Optional[str],
42
+ serializer: Serializer,
43
+ debug_mode: bool = False,
44
+ ) -> "ApplicationDatabase":
45
+ """Factory method to create the appropriate ApplicationDatabase implementation based on URL."""
46
+ if database_url.startswith("sqlite"):
47
+ return SQLiteApplicationDatabase(
48
+ database_url=database_url,
49
+ engine_kwargs=engine_kwargs,
50
+ schema=schema,
51
+ serializer=serializer,
52
+ debug_mode=debug_mode,
53
+ )
54
+ else:
55
+ # Default to PostgreSQL for postgresql://, postgres://, or other URLs
56
+ return PostgresApplicationDatabase(
57
+ database_url=database_url,
58
+ engine_kwargs=engine_kwargs,
59
+ schema=schema,
60
+ serializer=serializer,
61
+ debug_mode=debug_mode,
62
+ )
63
+
37
64
  def __init__(
38
65
  self,
39
66
  *,
40
67
  database_url: str,
41
68
  engine_kwargs: Dict[str, Any],
69
+ serializer: Serializer,
70
+ schema: Optional[str],
42
71
  debug_mode: bool = False,
43
72
  ):
73
+ if database_url.startswith("sqlite"):
74
+ self.schema = None
75
+ else:
76
+ self.schema = schema if schema else "dbos"
77
+ ApplicationSchema.transaction_outputs.schema = schema
44
78
  self.engine = self._create_engine(database_url, engine_kwargs)
45
79
  self._engine_kwargs = engine_kwargs
46
80
  self.sessionmaker = sessionmaker(bind=self.engine)
47
81
  self.debug_mode = debug_mode
82
+ self.serializer = serializer
48
83
 
49
84
  @abstractmethod
50
85
  def _create_engine(
@@ -156,10 +191,12 @@ class ApplicationDatabase(ABC):
156
191
  function_id=row[0],
157
192
  function_name=row[1],
158
193
  output=(
159
- _serialization.deserialize(row[2]) if row[2] is not None else row[2]
194
+ self.serializer.deserialize(row[2])
195
+ if row[2] is not None
196
+ else row[2]
160
197
  ),
161
198
  error=(
162
- _serialization.deserialize_exception(row[3])
199
+ self.serializer.deserialize(row[3])
163
200
  if row[3] is not None
164
201
  else row[3]
165
202
  ),
@@ -237,52 +274,10 @@ class ApplicationDatabase(ABC):
237
274
  """Check if the error is a serialization/concurrency error."""
238
275
  pass
239
276
 
240
- @staticmethod
241
- def create(
242
- database_url: str,
243
- engine_kwargs: Dict[str, Any],
244
- schema: Optional[str],
245
- debug_mode: bool = False,
246
- ) -> "ApplicationDatabase":
247
- """Factory method to create the appropriate ApplicationDatabase implementation based on URL."""
248
- if database_url.startswith("sqlite"):
249
- return SQLiteApplicationDatabase(
250
- database_url=database_url,
251
- engine_kwargs=engine_kwargs,
252
- debug_mode=debug_mode,
253
- )
254
- else:
255
- # Default to PostgreSQL for postgresql://, postgres://, or other URLs
256
- return PostgresApplicationDatabase(
257
- database_url=database_url,
258
- engine_kwargs=engine_kwargs,
259
- debug_mode=debug_mode,
260
- schema=schema,
261
- )
262
-
263
277
 
264
278
  class PostgresApplicationDatabase(ApplicationDatabase):
265
279
  """PostgreSQL-specific implementation of ApplicationDatabase."""
266
280
 
267
- def __init__(
268
- self,
269
- *,
270
- database_url: str,
271
- engine_kwargs: Dict[str, Any],
272
- schema: Optional[str],
273
- debug_mode: bool = False,
274
- ):
275
- super().__init__(
276
- database_url=database_url,
277
- engine_kwargs=engine_kwargs,
278
- debug_mode=debug_mode,
279
- )
280
- if schema is None:
281
- self.schema = "dbos"
282
- else:
283
- self.schema = schema
284
- ApplicationSchema.transaction_outputs.schema = schema
285
-
286
281
  def _create_engine(
287
282
  self, database_url: str, engine_kwargs: Dict[str, Any]
288
283
  ) -> sa.Engine:
@@ -16,7 +16,6 @@ from typing import (
16
16
 
17
17
  import sqlalchemy as sa
18
18
 
19
- from dbos import _serialization
20
19
  from dbos._app_db import ApplicationDatabase
21
20
  from dbos._context import MaxPriority, MinPriority
22
21
  from dbos._sys_db import SystemDatabase
@@ -27,7 +26,7 @@ if TYPE_CHECKING:
27
26
  from dbos._dbos_config import get_system_database_url, is_valid_database_url
28
27
  from dbos._error import DBOSException, DBOSNonExistentWorkflowError
29
28
  from dbos._registrations import DEFAULT_MAX_RECOVERY_ATTEMPTS
30
- from dbos._serialization import WorkflowInputs
29
+ from dbos._serialization import DefaultSerializer, Serializer, WorkflowInputs
31
30
  from dbos._sys_db import (
32
31
  EnqueueOptionsInternal,
33
32
  StepInfo,
@@ -63,6 +62,7 @@ class EnqueueOptions(_EnqueueOptionsRequired, total=False):
63
62
  deduplication_id: str
64
63
  priority: int
65
64
  max_recovery_attempts: int
65
+ queue_partition_key: str
66
66
 
67
67
 
68
68
  def validate_enqueue_options(options: EnqueueOptions) -> None:
@@ -127,7 +127,9 @@ class DBOSClient:
127
127
  system_database_engine: Optional[sa.Engine] = None,
128
128
  application_database_url: Optional[str] = None,
129
129
  dbos_system_schema: Optional[str] = "dbos",
130
+ serializer: Serializer = DefaultSerializer(),
130
131
  ):
132
+ self._serializer = serializer
131
133
  application_database_url = (
132
134
  database_url if database_url else application_database_url
133
135
  )
@@ -150,6 +152,7 @@ class DBOSClient:
150
152
  },
151
153
  engine=system_database_engine,
152
154
  schema=dbos_system_schema,
155
+ serializer=serializer,
153
156
  )
154
157
  self._sys_db.check_connection()
155
158
  if application_database_url:
@@ -161,6 +164,7 @@ class DBOSClient:
161
164
  "pool_size": 2,
162
165
  },
163
166
  schema=dbos_system_schema,
167
+ serializer=serializer,
164
168
  )
165
169
 
166
170
  def destroy(self) -> None:
@@ -182,6 +186,7 @@ class DBOSClient:
182
186
  "deduplication_id": options.get("deduplication_id"),
183
187
  "priority": options.get("priority"),
184
188
  "app_version": options.get("app_version"),
189
+ "queue_partition_key": options.get("queue_partition_key"),
185
190
  }
186
191
 
187
192
  inputs: WorkflowInputs = {
@@ -217,7 +222,8 @@ class DBOSClient:
217
222
  if enqueue_options_internal["priority"] is not None
218
223
  else 0
219
224
  ),
220
- "inputs": _serialization.serialize_args(inputs),
225
+ "inputs": self._serializer.serialize(inputs),
226
+ "queue_partition_key": enqueue_options_internal["queue_partition_key"],
221
227
  }
222
228
 
223
229
  self._sys_db.init_workflow(
@@ -282,7 +288,8 @@ class DBOSClient:
282
288
  "workflow_deadline_epoch_ms": None,
283
289
  "deduplication_id": None,
284
290
  "priority": 0,
285
- "inputs": _serialization.serialize_args({"args": (), "kwargs": {}}),
291
+ "inputs": self._serializer.serialize({"args": (), "kwargs": {}}),
292
+ "queue_partition_key": None,
286
293
  }
287
294
  with self._sys_db.engine.begin() as conn:
288
295
  self._sys_db._insert_workflow_status(
@@ -120,6 +120,8 @@ class DBOSContext:
120
120
  self.deduplication_id: Optional[str] = None
121
121
  # A user-specified priority for the enqueuing workflow.
122
122
  self.priority: Optional[int] = None
123
+ # If the workflow is enqueued on a partitioned queue, its partition key
124
+ self.queue_partition_key: Optional[str] = None
123
125
 
124
126
  def create_child(self) -> DBOSContext:
125
127
  rv = DBOSContext()
@@ -479,6 +481,7 @@ class SetEnqueueOptions:
479
481
  deduplication_id: Optional[str] = None,
480
482
  priority: Optional[int] = None,
481
483
  app_version: Optional[str] = None,
484
+ queue_partition_key: Optional[str] = None,
482
485
  ) -> None:
483
486
  self.created_ctx = False
484
487
  self.deduplication_id: Optional[str] = deduplication_id
@@ -491,6 +494,8 @@ class SetEnqueueOptions:
491
494
  self.saved_priority: Optional[int] = None
492
495
  self.app_version: Optional[str] = app_version
493
496
  self.saved_app_version: Optional[str] = None
497
+ self.queue_partition_key = queue_partition_key
498
+ self.saved_queue_partition_key: Optional[str] = None
494
499
 
495
500
  def __enter__(self) -> SetEnqueueOptions:
496
501
  # Code to create a basic context
@@ -505,6 +510,8 @@ class SetEnqueueOptions:
505
510
  ctx.priority = self.priority
506
511
  self.saved_app_version = ctx.app_version
507
512
  ctx.app_version = self.app_version
513
+ self.saved_queue_partition_key = ctx.queue_partition_key
514
+ ctx.queue_partition_key = self.queue_partition_key
508
515
  return self
509
516
 
510
517
  def __exit__(
@@ -517,6 +524,7 @@ class SetEnqueueOptions:
517
524
  curr_ctx.deduplication_id = self.saved_deduplication_id
518
525
  curr_ctx.priority = self.saved_priority
519
526
  curr_ctx.app_version = self.saved_app_version
527
+ curr_ctx.queue_partition_key = self.saved_queue_partition_key
520
528
  # Code to clean up the basic context if we created it
521
529
  if self.created_ctx:
522
530
  _clear_local_dbos_context()
@@ -23,7 +23,6 @@ from typing import (
23
23
  from dbos._outcome import Immediate, NoResult, Outcome, Pending
24
24
  from dbos._utils import GlobalParams, retriable_postgres_exception
25
25
 
26
- from . import _serialization
27
26
  from ._app_db import ApplicationDatabase, TransactionResultInternal
28
27
  from ._context import (
29
28
  DBOSAssumeRole,
@@ -116,10 +115,10 @@ class WorkflowHandleFuture(Generic[R]):
116
115
  try:
117
116
  r = self.future.result()
118
117
  except Exception as e:
119
- serialized_e = _serialization.serialize_exception(e)
118
+ serialized_e = self.dbos._serializer.serialize(e)
120
119
  self.dbos._sys_db.record_get_result(self.workflow_id, None, serialized_e)
121
120
  raise
122
- serialized_r = _serialization.serialize(r)
121
+ serialized_r = self.dbos._serializer.serialize(r)
123
122
  self.dbos._sys_db.record_get_result(self.workflow_id, serialized_r, None)
124
123
  return r
125
124
 
@@ -143,10 +142,10 @@ class WorkflowHandlePolling(Generic[R]):
143
142
  try:
144
143
  r: R = self.dbos._sys_db.await_workflow_result(self.workflow_id)
145
144
  except Exception as e:
146
- serialized_e = _serialization.serialize_exception(e)
145
+ serialized_e = self.dbos._serializer.serialize(e)
147
146
  self.dbos._sys_db.record_get_result(self.workflow_id, None, serialized_e)
148
147
  raise
149
- serialized_r = _serialization.serialize(r)
148
+ serialized_r = self.dbos._serializer.serialize(r)
150
149
  self.dbos._sys_db.record_get_result(self.workflow_id, serialized_r, None)
151
150
  return r
152
151
 
@@ -171,7 +170,7 @@ class WorkflowHandleAsyncTask(Generic[R]):
171
170
  try:
172
171
  r = await self.task
173
172
  except Exception as e:
174
- serialized_e = _serialization.serialize_exception(e)
173
+ serialized_e = self.dbos._serializer.serialize(e)
175
174
  await asyncio.to_thread(
176
175
  self.dbos._sys_db.record_get_result,
177
176
  self.workflow_id,
@@ -179,7 +178,7 @@ class WorkflowHandleAsyncTask(Generic[R]):
179
178
  serialized_e,
180
179
  )
181
180
  raise
182
- serialized_r = _serialization.serialize(r)
181
+ serialized_r = self.dbos._serializer.serialize(r)
183
182
  await asyncio.to_thread(
184
183
  self.dbos._sys_db.record_get_result, self.workflow_id, serialized_r, None
185
184
  )
@@ -207,7 +206,7 @@ class WorkflowHandleAsyncPolling(Generic[R]):
207
206
  self.dbos._sys_db.await_workflow_result, self.workflow_id
208
207
  )
209
208
  except Exception as e:
210
- serialized_e = _serialization.serialize_exception(e)
209
+ serialized_e = self.dbos._serializer.serialize(e)
211
210
  await asyncio.to_thread(
212
211
  self.dbos._sys_db.record_get_result,
213
212
  self.workflow_id,
@@ -215,7 +214,7 @@ class WorkflowHandleAsyncPolling(Generic[R]):
215
214
  serialized_e,
216
215
  )
217
216
  raise
218
- serialized_r = _serialization.serialize(r)
217
+ serialized_r = self.dbos._serializer.serialize(r)
219
218
  await asyncio.to_thread(
220
219
  self.dbos._sys_db.record_get_result, self.workflow_id, serialized_r, None
221
220
  )
@@ -303,7 +302,12 @@ def _init_workflow(
303
302
  if enqueue_options is not None
304
303
  else 0
305
304
  ),
306
- "inputs": _serialization.serialize_args(inputs),
305
+ "inputs": dbos._serializer.serialize(inputs),
306
+ "queue_partition_key": (
307
+ enqueue_options["queue_partition_key"]
308
+ if enqueue_options is not None
309
+ else None
310
+ ),
307
311
  }
308
312
 
309
313
  # Synchronously record the status and inputs for workflows
@@ -319,7 +323,7 @@ def _init_workflow(
319
323
  "function_id": ctx.parent_workflow_fid,
320
324
  "function_name": wf_name,
321
325
  "output": None,
322
- "error": _serialization.serialize_exception(e),
326
+ "error": dbos._serializer.serialize(e),
323
327
  }
324
328
  dbos._sys_db.record_operation_result(result)
325
329
  raise
@@ -378,7 +382,7 @@ def _get_wf_invoke_func(
378
382
  dbos._sys_db.update_workflow_outcome(
379
383
  status["workflow_uuid"],
380
384
  "SUCCESS",
381
- output=_serialization.serialize(output),
385
+ output=dbos._serializer.serialize(output),
382
386
  )
383
387
  return output
384
388
  except DBOSWorkflowConflictIDError:
@@ -392,7 +396,7 @@ def _get_wf_invoke_func(
392
396
  dbos._sys_db.update_workflow_outcome(
393
397
  status["workflow_uuid"],
394
398
  "ERROR",
395
- error=_serialization.serialize_exception(error),
399
+ error=dbos._serializer.serialize(error),
396
400
  )
397
401
  raise
398
402
  finally:
@@ -464,7 +468,7 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
464
468
  status = dbos._sys_db.get_workflow_status(workflow_id)
465
469
  if not status:
466
470
  raise DBOSRecoveryError(workflow_id, "Workflow status not found")
467
- inputs = _serialization.deserialize_args(status["inputs"])
471
+ inputs: WorkflowInputs = dbos._serializer.deserialize(status["inputs"])
468
472
  wf_func = dbos._registry.workflow_info_map.get(status["name"], None)
469
473
  if not wf_func:
470
474
  raise DBOSWorkflowFunctionNotFoundError(
@@ -572,6 +576,9 @@ def start_workflow(
572
576
  deduplication_id=local_ctx.deduplication_id if local_ctx is not None else None,
573
577
  priority=local_ctx.priority if local_ctx is not None else None,
574
578
  app_version=local_ctx.app_version if local_ctx is not None else None,
579
+ queue_partition_key=(
580
+ local_ctx.queue_partition_key if local_ctx is not None else None
581
+ ),
575
582
  )
576
583
  new_wf_id, new_wf_ctx = _get_new_wf()
577
584
 
@@ -665,6 +672,9 @@ async def start_workflow_async(
665
672
  deduplication_id=local_ctx.deduplication_id if local_ctx is not None else None,
666
673
  priority=local_ctx.priority if local_ctx is not None else None,
667
674
  app_version=local_ctx.app_version if local_ctx is not None else None,
675
+ queue_partition_key=(
676
+ local_ctx.queue_partition_key if local_ctx is not None else None
677
+ ),
668
678
  )
669
679
  new_wf_id, new_wf_ctx = _get_new_wf()
670
680
 
@@ -837,11 +847,11 @@ def workflow_wrapper(
837
847
  try:
838
848
  r = func()
839
849
  except Exception as e:
840
- serialized_e = _serialization.serialize_exception(e)
850
+ serialized_e = dbos._serializer.serialize(e)
841
851
  assert workflow_id is not None
842
852
  dbos._sys_db.record_get_result(workflow_id, None, serialized_e)
843
853
  raise
844
- serialized_r = _serialization.serialize(r)
854
+ serialized_r = dbos._serializer.serialize(r)
845
855
  assert workflow_id is not None
846
856
  dbos._sys_db.record_get_result(workflow_id, serialized_r, None)
847
857
  return r
@@ -948,15 +958,15 @@ def decorate_transaction(
948
958
  f"Replaying transaction, id: {ctx.function_id}, name: {attributes['name']}"
949
959
  )
950
960
  if recorded_output["error"]:
951
- deserialized_error = (
952
- _serialization.deserialize_exception(
961
+ deserialized_error: Exception = (
962
+ dbos._serializer.deserialize(
953
963
  recorded_output["error"]
954
964
  )
955
965
  )
956
966
  has_recorded_error = True
957
967
  raise deserialized_error
958
968
  elif recorded_output["output"]:
959
- return _serialization.deserialize(
969
+ return dbos._serializer.deserialize(
960
970
  recorded_output["output"]
961
971
  )
962
972
  else:
@@ -969,7 +979,9 @@ def decorate_transaction(
969
979
  )
970
980
 
971
981
  output = func(*args, **kwargs)
972
- txn_output["output"] = _serialization.serialize(output)
982
+ txn_output["output"] = dbos._serializer.serialize(
983
+ output
984
+ )
973
985
  assert (
974
986
  ctx.sql_session is not None
975
987
  ), "Cannot find a database connection"
@@ -1010,8 +1022,8 @@ def decorate_transaction(
1010
1022
  finally:
1011
1023
  # Don't record the error if it was already recorded
1012
1024
  if txn_error and not has_recorded_error:
1013
- txn_output["error"] = (
1014
- _serialization.serialize_exception(txn_error)
1025
+ txn_output["error"] = dbos._serializer.serialize(
1026
+ txn_error
1015
1027
  )
1016
1028
  dbos._app_db.record_transaction_error(txn_output)
1017
1029
  return output
@@ -1128,10 +1140,10 @@ def decorate_step(
1128
1140
  try:
1129
1141
  output = func()
1130
1142
  except Exception as error:
1131
- step_output["error"] = _serialization.serialize_exception(error)
1143
+ step_output["error"] = dbos._serializer.serialize(error)
1132
1144
  dbos._sys_db.record_operation_result(step_output)
1133
1145
  raise
1134
- step_output["output"] = _serialization.serialize(output)
1146
+ step_output["output"] = dbos._serializer.serialize(output)
1135
1147
  dbos._sys_db.record_operation_result(step_output)
1136
1148
  return output
1137
1149
 
@@ -1147,13 +1159,13 @@ def decorate_step(
1147
1159
  f"Replaying step, id: {ctx.function_id}, name: {attributes['name']}"
1148
1160
  )
1149
1161
  if recorded_output["error"] is not None:
1150
- deserialized_error = _serialization.deserialize_exception(
1162
+ deserialized_error: Exception = dbos._serializer.deserialize(
1151
1163
  recorded_output["error"]
1152
1164
  )
1153
1165
  raise deserialized_error
1154
1166
  elif recorded_output["output"] is not None:
1155
1167
  return cast(
1156
- R, _serialization.deserialize(recorded_output["output"])
1168
+ R, dbos._serializer.deserialize(recorded_output["output"])
1157
1169
  )
1158
1170
  else:
1159
1171
  raise Exception("Output and error are both None")
@@ -31,6 +31,7 @@ from typing import (
31
31
 
32
32
  from dbos._conductor.conductor import ConductorWebsocket
33
33
  from dbos._debouncer import debouncer_workflow
34
+ from dbos._serialization import DefaultSerializer, Serializer
34
35
  from dbos._sys_db import SystemDatabase, WorkflowStatus
35
36
  from dbos._utils import INTERNAL_QUEUE_NAME, GlobalParams
36
37
  from dbos._workflow_commands import fork_workflow, list_queued_workflows, list_workflows
@@ -341,6 +342,8 @@ class DBOS:
341
342
  self.conductor_websocket: Optional[ConductorWebsocket] = None
342
343
  self._background_event_loop: BackgroundEventLoop = BackgroundEventLoop()
343
344
  self._active_workflows_set: set[str] = set()
345
+ serializer = config.get("serializer")
346
+ self._serializer: Serializer = serializer if serializer else DefaultSerializer()
344
347
 
345
348
  # Globally set the application version and executor ID.
346
349
  # In DBOS Cloud, instead use the values supplied through environment variables.
@@ -449,28 +452,34 @@ class DBOS:
449
452
  assert self._config["database"]["sys_db_engine_kwargs"] is not None
450
453
  # Get the schema configuration, use "dbos" as default
451
454
  schema = self._config.get("dbos_system_schema", "dbos")
455
+ dbos_logger.debug("Creating system database")
452
456
  self._sys_db_field = SystemDatabase.create(
453
457
  system_database_url=get_system_database_url(self._config),
454
458
  engine_kwargs=self._config["database"]["sys_db_engine_kwargs"],
455
459
  engine=self._config["system_database_engine"],
456
460
  debug_mode=debug_mode,
457
461
  schema=schema,
462
+ serializer=self._serializer,
458
463
  )
459
464
  assert self._config["database"]["db_engine_kwargs"] is not None
460
465
  if self._config["database_url"]:
466
+ dbos_logger.debug("Creating application database")
461
467
  self._app_db_field = ApplicationDatabase.create(
462
468
  database_url=self._config["database_url"],
463
469
  engine_kwargs=self._config["database"]["db_engine_kwargs"],
464
470
  debug_mode=debug_mode,
465
471
  schema=schema,
472
+ serializer=self._serializer,
466
473
  )
467
474
 
468
475
  if debug_mode:
469
476
  return
470
477
 
471
478
  # Run migrations for the system and application databases
479
+ dbos_logger.debug("Running system database migrations")
472
480
  self._sys_db.run_migrations()
473
481
  if self._app_db:
482
+ dbos_logger.debug("Running application database migrations")
474
483
  self._app_db.run_migrations()
475
484
 
476
485
  admin_port = self._config.get("runtimeConfig", {}).get("admin_port")
@@ -481,10 +490,12 @@ class DBOS:
481
490
  )
482
491
  if run_admin_server:
483
492
  try:
493
+ dbos_logger.debug("Starting admin server")
484
494
  self._admin_server_field = AdminServer(dbos=self, port=admin_port)
485
495
  except Exception as e:
486
496
  dbos_logger.warning(f"Failed to start admin server: {e}")
487
497
 
498
+ dbos_logger.debug("Retrieving local pending workflows for recovery")
488
499
  workflow_ids = self._sys_db.get_pending_workflows(
489
500
  GlobalParams.executor_id, GlobalParams.app_version
490
501
  )
@@ -500,6 +511,7 @@ class DBOS:
500
511
  self._executor.submit(startup_recovery_thread, self, workflow_ids)
501
512
 
502
513
  # Listen to notifications
514
+ dbos_logger.debug("Starting notifications listener thread")
503
515
  notification_listener_thread = threading.Thread(
504
516
  target=self._sys_db._notification_listener,
505
517
  daemon=True,
@@ -511,6 +523,7 @@ class DBOS:
511
523
  self._registry.get_internal_queue()
512
524
 
513
525
  # Start the queue thread
526
+ dbos_logger.debug("Starting queue thread")
514
527
  evt = threading.Event()
515
528
  self.background_thread_stop_events.append(evt)
516
529
  bg_queue_thread = threading.Thread(
@@ -526,6 +539,7 @@ class DBOS:
526
539
  self.conductor_url = f"wss://{dbos_domain}/conductor/v1alpha1"
527
540
  evt = threading.Event()
528
541
  self.background_thread_stop_events.append(evt)
542
+ dbos_logger.debug("Starting Conductor thread")
529
543
  self.conductor_websocket = ConductorWebsocket(
530
544
  self,
531
545
  conductor_url=self.conductor_url,
@@ -536,6 +550,7 @@ class DBOS:
536
550
  self._background_threads.append(self.conductor_websocket)
537
551
 
538
552
  # Grab any pollers that were deferred and start them
553
+ dbos_logger.debug("Starting event receivers")
539
554
  for evt, func, args, kwargs in self._registry.pollers:
540
555
  self.poller_stop_events.append(evt)
541
556
  poller_thread = threading.Thread(
@@ -7,6 +7,8 @@ import sqlalchemy as sa
7
7
  import yaml
8
8
  from sqlalchemy import make_url
9
9
 
10
+ from dbos._serialization import Serializer
11
+
10
12
  from ._error import DBOSInitializationError
11
13
  from ._logger import dbos_logger
12
14
  from ._schemas.system_database import SystemSchema
@@ -37,6 +39,7 @@ class DBOSConfig(TypedDict, total=False):
37
39
  enable_otlp (bool): If True, enable built-in DBOS OTLP tracing and logging.
38
40
  system_database_engine (sa.Engine): A custom system database engine. If provided, DBOS will not create an engine but use this instead.
39
41
  conductor_key (str): An API key for DBOS Conductor. Pass this in to connect your process to Conductor.
42
+ serializer (Serializer): A custom serializer and deserializer DBOS uses when storing program data in the system database
40
43
  """
41
44
 
42
45
  name: str
@@ -57,6 +60,7 @@ class DBOSConfig(TypedDict, total=False):
57
60
  enable_otlp: Optional[bool]
58
61
  system_database_engine: Optional[sa.Engine]
59
62
  conductor_key: Optional[str]
63
+ serializer: Optional[Serializer]
60
64
 
61
65
 
62
66
  class RuntimeConfig(TypedDict, total=False):
@@ -67,16 +71,6 @@ class RuntimeConfig(TypedDict, total=False):
67
71
 
68
72
 
69
73
  class DatabaseConfig(TypedDict, total=False):
70
- """
71
- Internal data structure containing the DBOS database configuration.
72
- Attributes:
73
- sys_db_name (str): System database name
74
- sys_db_pool_size (int): System database pool size
75
- db_engine_kwargs (Dict[str, Any]): SQLAlchemy engine kwargs
76
- migrate (List[str]): Migration commands to run on startup
77
- dbos_system_schema (str): Schema name for DBOS system tables. Defaults to "dbos".
78
- """
79
-
80
74
  sys_db_pool_size: Optional[int]
81
75
  db_engine_kwargs: Optional[Dict[str, Any]]
82
76
  sys_db_engine_kwargs: Optional[Dict[str, Any]]
@@ -203,8 +203,14 @@ CREATE TABLE \"{schema}\".event_dispatch_kv (
203
203
  """
204
204
 
205
205
 
206
+ def get_dbos_migration_two(schema: str) -> str:
207
+ return f"""
208
+ ALTER TABLE \"{schema}\".workflow_status ADD COLUMN queue_partition_key TEXT;
209
+ """
210
+
211
+
206
212
  def get_dbos_migrations(schema: str) -> list[str]:
207
- return [get_dbos_migration_one(schema)]
213
+ return [get_dbos_migration_one(schema), get_dbos_migration_two(schema)]
208
214
 
209
215
 
210
216
  def get_sqlite_timestamp_expr() -> str:
@@ -293,4 +299,8 @@ CREATE TABLE streams (
293
299
  );
294
300
  """
295
301
 
296
- sqlite_migrations = [sqlite_migration_one]
302
+ sqlite_migration_two = """
303
+ ALTER TABLE workflow_status ADD COLUMN queue_partition_key TEXT;
304
+ """
305
+
306
+ sqlite_migrations = [sqlite_migration_one, sqlite_migration_two]