dbos 2.1.0a2__tar.gz → 2.2.0a2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dbos-2.1.0a2 → dbos-2.2.0a2}/PKG-INFO +1 -1
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/__init__.py +2 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_app_db.py +40 -45
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_client.py +7 -4
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_core.py +27 -26
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_dbos.py +15 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_dbos_config.py +4 -10
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_scheduler.py +24 -14
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_serialization.py +24 -36
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_sys_db.py +71 -59
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/cli/migration.py +3 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/pyproject.toml +1 -1
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_dbos.py +81 -1
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_failures.py +9 -8
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_scheduler.py +7 -7
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_schema_migration.py +2 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/LICENSE +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/README.md +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/__main__.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_admin_server.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_classproperty.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_conductor/conductor.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_conductor/protocol.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_context.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_croniter.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_debouncer.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_debug.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_docker_pg_helper.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_error.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_event_loop.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_fastapi.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_flask.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_kafka.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_kafka_message.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_logger.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_migration.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_outcome.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_queue.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_recovery.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_registrations.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_roles.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_schemas/__init__.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_schemas/application_database.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_schemas/system_database.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_sys_db_postgres.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_sys_db_sqlite.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_templates/dbos-db-starter/README.md +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_tracer.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_utils.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/_workflow_commands.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/cli/_github_init.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/cli/_template_init.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/cli/cli.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/dbos-config.schema.json +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/dbos/py.typed +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/__init__.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/atexit_no_ctor.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/atexit_no_launch.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/classdefs.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/client_collateral.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/client_worker.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/conftest.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/dupname_classdefs1.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/dupname_classdefsa.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/more_classdefs.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/queuedworkflow.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/script_without_fastapi.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_admin_server.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_async.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_async_workflow_management.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_classdecorators.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_cli.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_client.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_concurrency.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_config.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_croniter.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_debouncer.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_debug.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_docker_secrets.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_fastapi.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_fastapi_roles.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_flask.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_kafka.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_outcome.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_package.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_queue.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_singleton.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_spans.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_sqlalchemy.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_streaming.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_workflow_introspection.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/tests/test_workflow_management.py +0 -0
- {dbos-2.1.0a2 → dbos-2.2.0a2}/version/__init__.py +0 -0
@@ -12,6 +12,7 @@ from ._dbos_config import DBOSConfig
|
|
12
12
|
from ._debouncer import Debouncer, DebouncerClient
|
13
13
|
from ._kafka_message import KafkaMessage
|
14
14
|
from ._queue import Queue
|
15
|
+
from ._serialization import Serializer
|
15
16
|
from ._sys_db import GetWorkflowsInput, WorkflowStatus, WorkflowStatusString
|
16
17
|
|
17
18
|
__all__ = [
|
@@ -35,4 +36,5 @@ __all__ = [
|
|
35
36
|
"Queue",
|
36
37
|
"Debouncer",
|
37
38
|
"DebouncerClient",
|
39
|
+
"Serializer",
|
38
40
|
]
|
@@ -8,8 +8,8 @@ from sqlalchemy.exc import DBAPIError
|
|
8
8
|
from sqlalchemy.orm import Session, sessionmaker
|
9
9
|
|
10
10
|
from dbos._migration import get_sqlite_timestamp_expr
|
11
|
+
from dbos._serialization import Serializer
|
11
12
|
|
12
|
-
from . import _serialization
|
13
13
|
from ._error import DBOSUnexpectedStepError, DBOSWorkflowConflictIDError
|
14
14
|
from ._logger import dbos_logger
|
15
15
|
from ._schemas.application_database import ApplicationSchema
|
@@ -34,17 +34,52 @@ class RecordedResult(TypedDict):
|
|
34
34
|
|
35
35
|
class ApplicationDatabase(ABC):
|
36
36
|
|
37
|
+
@staticmethod
|
38
|
+
def create(
|
39
|
+
database_url: str,
|
40
|
+
engine_kwargs: Dict[str, Any],
|
41
|
+
schema: Optional[str],
|
42
|
+
serializer: Serializer,
|
43
|
+
debug_mode: bool = False,
|
44
|
+
) -> "ApplicationDatabase":
|
45
|
+
"""Factory method to create the appropriate ApplicationDatabase implementation based on URL."""
|
46
|
+
if database_url.startswith("sqlite"):
|
47
|
+
return SQLiteApplicationDatabase(
|
48
|
+
database_url=database_url,
|
49
|
+
engine_kwargs=engine_kwargs,
|
50
|
+
schema=schema,
|
51
|
+
serializer=serializer,
|
52
|
+
debug_mode=debug_mode,
|
53
|
+
)
|
54
|
+
else:
|
55
|
+
# Default to PostgreSQL for postgresql://, postgres://, or other URLs
|
56
|
+
return PostgresApplicationDatabase(
|
57
|
+
database_url=database_url,
|
58
|
+
engine_kwargs=engine_kwargs,
|
59
|
+
schema=schema,
|
60
|
+
serializer=serializer,
|
61
|
+
debug_mode=debug_mode,
|
62
|
+
)
|
63
|
+
|
37
64
|
def __init__(
|
38
65
|
self,
|
39
66
|
*,
|
40
67
|
database_url: str,
|
41
68
|
engine_kwargs: Dict[str, Any],
|
69
|
+
serializer: Serializer,
|
70
|
+
schema: Optional[str],
|
42
71
|
debug_mode: bool = False,
|
43
72
|
):
|
73
|
+
if database_url.startswith("sqlite"):
|
74
|
+
self.schema = None
|
75
|
+
else:
|
76
|
+
self.schema = schema if schema else "dbos"
|
77
|
+
ApplicationSchema.transaction_outputs.schema = schema
|
44
78
|
self.engine = self._create_engine(database_url, engine_kwargs)
|
45
79
|
self._engine_kwargs = engine_kwargs
|
46
80
|
self.sessionmaker = sessionmaker(bind=self.engine)
|
47
81
|
self.debug_mode = debug_mode
|
82
|
+
self.serializer = serializer
|
48
83
|
|
49
84
|
@abstractmethod
|
50
85
|
def _create_engine(
|
@@ -156,10 +191,12 @@ class ApplicationDatabase(ABC):
|
|
156
191
|
function_id=row[0],
|
157
192
|
function_name=row[1],
|
158
193
|
output=(
|
159
|
-
|
194
|
+
self.serializer.deserialize(row[2])
|
195
|
+
if row[2] is not None
|
196
|
+
else row[2]
|
160
197
|
),
|
161
198
|
error=(
|
162
|
-
|
199
|
+
self.serializer.deserialize(row[3])
|
163
200
|
if row[3] is not None
|
164
201
|
else row[3]
|
165
202
|
),
|
@@ -237,52 +274,10 @@ class ApplicationDatabase(ABC):
|
|
237
274
|
"""Check if the error is a serialization/concurrency error."""
|
238
275
|
pass
|
239
276
|
|
240
|
-
@staticmethod
|
241
|
-
def create(
|
242
|
-
database_url: str,
|
243
|
-
engine_kwargs: Dict[str, Any],
|
244
|
-
schema: Optional[str],
|
245
|
-
debug_mode: bool = False,
|
246
|
-
) -> "ApplicationDatabase":
|
247
|
-
"""Factory method to create the appropriate ApplicationDatabase implementation based on URL."""
|
248
|
-
if database_url.startswith("sqlite"):
|
249
|
-
return SQLiteApplicationDatabase(
|
250
|
-
database_url=database_url,
|
251
|
-
engine_kwargs=engine_kwargs,
|
252
|
-
debug_mode=debug_mode,
|
253
|
-
)
|
254
|
-
else:
|
255
|
-
# Default to PostgreSQL for postgresql://, postgres://, or other URLs
|
256
|
-
return PostgresApplicationDatabase(
|
257
|
-
database_url=database_url,
|
258
|
-
engine_kwargs=engine_kwargs,
|
259
|
-
debug_mode=debug_mode,
|
260
|
-
schema=schema,
|
261
|
-
)
|
262
|
-
|
263
277
|
|
264
278
|
class PostgresApplicationDatabase(ApplicationDatabase):
|
265
279
|
"""PostgreSQL-specific implementation of ApplicationDatabase."""
|
266
280
|
|
267
|
-
def __init__(
|
268
|
-
self,
|
269
|
-
*,
|
270
|
-
database_url: str,
|
271
|
-
engine_kwargs: Dict[str, Any],
|
272
|
-
schema: Optional[str],
|
273
|
-
debug_mode: bool = False,
|
274
|
-
):
|
275
|
-
super().__init__(
|
276
|
-
database_url=database_url,
|
277
|
-
engine_kwargs=engine_kwargs,
|
278
|
-
debug_mode=debug_mode,
|
279
|
-
)
|
280
|
-
if schema is None:
|
281
|
-
self.schema = "dbos"
|
282
|
-
else:
|
283
|
-
self.schema = schema
|
284
|
-
ApplicationSchema.transaction_outputs.schema = schema
|
285
|
-
|
286
281
|
def _create_engine(
|
287
282
|
self, database_url: str, engine_kwargs: Dict[str, Any]
|
288
283
|
) -> sa.Engine:
|
@@ -16,7 +16,6 @@ from typing import (
|
|
16
16
|
|
17
17
|
import sqlalchemy as sa
|
18
18
|
|
19
|
-
from dbos import _serialization
|
20
19
|
from dbos._app_db import ApplicationDatabase
|
21
20
|
from dbos._context import MaxPriority, MinPriority
|
22
21
|
from dbos._sys_db import SystemDatabase
|
@@ -27,7 +26,7 @@ if TYPE_CHECKING:
|
|
27
26
|
from dbos._dbos_config import get_system_database_url, is_valid_database_url
|
28
27
|
from dbos._error import DBOSException, DBOSNonExistentWorkflowError
|
29
28
|
from dbos._registrations import DEFAULT_MAX_RECOVERY_ATTEMPTS
|
30
|
-
from dbos._serialization import WorkflowInputs
|
29
|
+
from dbos._serialization import DefaultSerializer, Serializer, WorkflowInputs
|
31
30
|
from dbos._sys_db import (
|
32
31
|
EnqueueOptionsInternal,
|
33
32
|
StepInfo,
|
@@ -127,7 +126,9 @@ class DBOSClient:
|
|
127
126
|
system_database_engine: Optional[sa.Engine] = None,
|
128
127
|
application_database_url: Optional[str] = None,
|
129
128
|
dbos_system_schema: Optional[str] = "dbos",
|
129
|
+
serializer: Serializer = DefaultSerializer(),
|
130
130
|
):
|
131
|
+
self._serializer = serializer
|
131
132
|
application_database_url = (
|
132
133
|
database_url if database_url else application_database_url
|
133
134
|
)
|
@@ -150,6 +151,7 @@ class DBOSClient:
|
|
150
151
|
},
|
151
152
|
engine=system_database_engine,
|
152
153
|
schema=dbos_system_schema,
|
154
|
+
serializer=serializer,
|
153
155
|
)
|
154
156
|
self._sys_db.check_connection()
|
155
157
|
if application_database_url:
|
@@ -161,6 +163,7 @@ class DBOSClient:
|
|
161
163
|
"pool_size": 2,
|
162
164
|
},
|
163
165
|
schema=dbos_system_schema,
|
166
|
+
serializer=serializer,
|
164
167
|
)
|
165
168
|
|
166
169
|
def destroy(self) -> None:
|
@@ -217,7 +220,7 @@ class DBOSClient:
|
|
217
220
|
if enqueue_options_internal["priority"] is not None
|
218
221
|
else 0
|
219
222
|
),
|
220
|
-
"inputs":
|
223
|
+
"inputs": self._serializer.serialize(inputs),
|
221
224
|
}
|
222
225
|
|
223
226
|
self._sys_db.init_workflow(
|
@@ -282,7 +285,7 @@ class DBOSClient:
|
|
282
285
|
"workflow_deadline_epoch_ms": None,
|
283
286
|
"deduplication_id": None,
|
284
287
|
"priority": 0,
|
285
|
-
"inputs":
|
288
|
+
"inputs": self._serializer.serialize({"args": (), "kwargs": {}}),
|
286
289
|
}
|
287
290
|
with self._sys_db.engine.begin() as conn:
|
288
291
|
self._sys_db._insert_workflow_status(
|
@@ -23,7 +23,6 @@ from typing import (
|
|
23
23
|
from dbos._outcome import Immediate, NoResult, Outcome, Pending
|
24
24
|
from dbos._utils import GlobalParams, retriable_postgres_exception
|
25
25
|
|
26
|
-
from . import _serialization
|
27
26
|
from ._app_db import ApplicationDatabase, TransactionResultInternal
|
28
27
|
from ._context import (
|
29
28
|
DBOSAssumeRole,
|
@@ -116,10 +115,10 @@ class WorkflowHandleFuture(Generic[R]):
|
|
116
115
|
try:
|
117
116
|
r = self.future.result()
|
118
117
|
except Exception as e:
|
119
|
-
serialized_e =
|
118
|
+
serialized_e = self.dbos._serializer.serialize(e)
|
120
119
|
self.dbos._sys_db.record_get_result(self.workflow_id, None, serialized_e)
|
121
120
|
raise
|
122
|
-
serialized_r =
|
121
|
+
serialized_r = self.dbos._serializer.serialize(r)
|
123
122
|
self.dbos._sys_db.record_get_result(self.workflow_id, serialized_r, None)
|
124
123
|
return r
|
125
124
|
|
@@ -143,10 +142,10 @@ class WorkflowHandlePolling(Generic[R]):
|
|
143
142
|
try:
|
144
143
|
r: R = self.dbos._sys_db.await_workflow_result(self.workflow_id)
|
145
144
|
except Exception as e:
|
146
|
-
serialized_e =
|
145
|
+
serialized_e = self.dbos._serializer.serialize(e)
|
147
146
|
self.dbos._sys_db.record_get_result(self.workflow_id, None, serialized_e)
|
148
147
|
raise
|
149
|
-
serialized_r =
|
148
|
+
serialized_r = self.dbos._serializer.serialize(r)
|
150
149
|
self.dbos._sys_db.record_get_result(self.workflow_id, serialized_r, None)
|
151
150
|
return r
|
152
151
|
|
@@ -171,7 +170,7 @@ class WorkflowHandleAsyncTask(Generic[R]):
|
|
171
170
|
try:
|
172
171
|
r = await self.task
|
173
172
|
except Exception as e:
|
174
|
-
serialized_e =
|
173
|
+
serialized_e = self.dbos._serializer.serialize(e)
|
175
174
|
await asyncio.to_thread(
|
176
175
|
self.dbos._sys_db.record_get_result,
|
177
176
|
self.workflow_id,
|
@@ -179,7 +178,7 @@ class WorkflowHandleAsyncTask(Generic[R]):
|
|
179
178
|
serialized_e,
|
180
179
|
)
|
181
180
|
raise
|
182
|
-
serialized_r =
|
181
|
+
serialized_r = self.dbos._serializer.serialize(r)
|
183
182
|
await asyncio.to_thread(
|
184
183
|
self.dbos._sys_db.record_get_result, self.workflow_id, serialized_r, None
|
185
184
|
)
|
@@ -207,7 +206,7 @@ class WorkflowHandleAsyncPolling(Generic[R]):
|
|
207
206
|
self.dbos._sys_db.await_workflow_result, self.workflow_id
|
208
207
|
)
|
209
208
|
except Exception as e:
|
210
|
-
serialized_e =
|
209
|
+
serialized_e = self.dbos._serializer.serialize(e)
|
211
210
|
await asyncio.to_thread(
|
212
211
|
self.dbos._sys_db.record_get_result,
|
213
212
|
self.workflow_id,
|
@@ -215,7 +214,7 @@ class WorkflowHandleAsyncPolling(Generic[R]):
|
|
215
214
|
serialized_e,
|
216
215
|
)
|
217
216
|
raise
|
218
|
-
serialized_r =
|
217
|
+
serialized_r = self.dbos._serializer.serialize(r)
|
219
218
|
await asyncio.to_thread(
|
220
219
|
self.dbos._sys_db.record_get_result, self.workflow_id, serialized_r, None
|
221
220
|
)
|
@@ -303,7 +302,7 @@ def _init_workflow(
|
|
303
302
|
if enqueue_options is not None
|
304
303
|
else 0
|
305
304
|
),
|
306
|
-
"inputs":
|
305
|
+
"inputs": dbos._serializer.serialize(inputs),
|
307
306
|
}
|
308
307
|
|
309
308
|
# Synchronously record the status and inputs for workflows
|
@@ -319,7 +318,7 @@ def _init_workflow(
|
|
319
318
|
"function_id": ctx.parent_workflow_fid,
|
320
319
|
"function_name": wf_name,
|
321
320
|
"output": None,
|
322
|
-
"error":
|
321
|
+
"error": dbos._serializer.serialize(e),
|
323
322
|
}
|
324
323
|
dbos._sys_db.record_operation_result(result)
|
325
324
|
raise
|
@@ -378,7 +377,7 @@ def _get_wf_invoke_func(
|
|
378
377
|
dbos._sys_db.update_workflow_outcome(
|
379
378
|
status["workflow_uuid"],
|
380
379
|
"SUCCESS",
|
381
|
-
output=
|
380
|
+
output=dbos._serializer.serialize(output),
|
382
381
|
)
|
383
382
|
return output
|
384
383
|
except DBOSWorkflowConflictIDError:
|
@@ -392,7 +391,7 @@ def _get_wf_invoke_func(
|
|
392
391
|
dbos._sys_db.update_workflow_outcome(
|
393
392
|
status["workflow_uuid"],
|
394
393
|
"ERROR",
|
395
|
-
error=
|
394
|
+
error=dbos._serializer.serialize(error),
|
396
395
|
)
|
397
396
|
raise
|
398
397
|
finally:
|
@@ -464,7 +463,7 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
|
|
464
463
|
status = dbos._sys_db.get_workflow_status(workflow_id)
|
465
464
|
if not status:
|
466
465
|
raise DBOSRecoveryError(workflow_id, "Workflow status not found")
|
467
|
-
inputs =
|
466
|
+
inputs: WorkflowInputs = dbos._serializer.deserialize(status["inputs"])
|
468
467
|
wf_func = dbos._registry.workflow_info_map.get(status["name"], None)
|
469
468
|
if not wf_func:
|
470
469
|
raise DBOSWorkflowFunctionNotFoundError(
|
@@ -837,11 +836,11 @@ def workflow_wrapper(
|
|
837
836
|
try:
|
838
837
|
r = func()
|
839
838
|
except Exception as e:
|
840
|
-
serialized_e =
|
839
|
+
serialized_e = dbos._serializer.serialize(e)
|
841
840
|
assert workflow_id is not None
|
842
841
|
dbos._sys_db.record_get_result(workflow_id, None, serialized_e)
|
843
842
|
raise
|
844
|
-
serialized_r =
|
843
|
+
serialized_r = dbos._serializer.serialize(r)
|
845
844
|
assert workflow_id is not None
|
846
845
|
dbos._sys_db.record_get_result(workflow_id, serialized_r, None)
|
847
846
|
return r
|
@@ -948,15 +947,15 @@ def decorate_transaction(
|
|
948
947
|
f"Replaying transaction, id: {ctx.function_id}, name: {attributes['name']}"
|
949
948
|
)
|
950
949
|
if recorded_output["error"]:
|
951
|
-
deserialized_error = (
|
952
|
-
|
950
|
+
deserialized_error: Exception = (
|
951
|
+
dbos._serializer.deserialize(
|
953
952
|
recorded_output["error"]
|
954
953
|
)
|
955
954
|
)
|
956
955
|
has_recorded_error = True
|
957
956
|
raise deserialized_error
|
958
957
|
elif recorded_output["output"]:
|
959
|
-
return
|
958
|
+
return dbos._serializer.deserialize(
|
960
959
|
recorded_output["output"]
|
961
960
|
)
|
962
961
|
else:
|
@@ -969,7 +968,9 @@ def decorate_transaction(
|
|
969
968
|
)
|
970
969
|
|
971
970
|
output = func(*args, **kwargs)
|
972
|
-
txn_output["output"] =
|
971
|
+
txn_output["output"] = dbos._serializer.serialize(
|
972
|
+
output
|
973
|
+
)
|
973
974
|
assert (
|
974
975
|
ctx.sql_session is not None
|
975
976
|
), "Cannot find a database connection"
|
@@ -1010,8 +1011,8 @@ def decorate_transaction(
|
|
1010
1011
|
finally:
|
1011
1012
|
# Don't record the error if it was already recorded
|
1012
1013
|
if txn_error and not has_recorded_error:
|
1013
|
-
txn_output["error"] = (
|
1014
|
-
|
1014
|
+
txn_output["error"] = dbos._serializer.serialize(
|
1015
|
+
txn_error
|
1015
1016
|
)
|
1016
1017
|
dbos._app_db.record_transaction_error(txn_output)
|
1017
1018
|
return output
|
@@ -1128,10 +1129,10 @@ def decorate_step(
|
|
1128
1129
|
try:
|
1129
1130
|
output = func()
|
1130
1131
|
except Exception as error:
|
1131
|
-
step_output["error"] =
|
1132
|
+
step_output["error"] = dbos._serializer.serialize(error)
|
1132
1133
|
dbos._sys_db.record_operation_result(step_output)
|
1133
1134
|
raise
|
1134
|
-
step_output["output"] =
|
1135
|
+
step_output["output"] = dbos._serializer.serialize(output)
|
1135
1136
|
dbos._sys_db.record_operation_result(step_output)
|
1136
1137
|
return output
|
1137
1138
|
|
@@ -1147,13 +1148,13 @@ def decorate_step(
|
|
1147
1148
|
f"Replaying step, id: {ctx.function_id}, name: {attributes['name']}"
|
1148
1149
|
)
|
1149
1150
|
if recorded_output["error"] is not None:
|
1150
|
-
deserialized_error =
|
1151
|
+
deserialized_error: Exception = dbos._serializer.deserialize(
|
1151
1152
|
recorded_output["error"]
|
1152
1153
|
)
|
1153
1154
|
raise deserialized_error
|
1154
1155
|
elif recorded_output["output"] is not None:
|
1155
1156
|
return cast(
|
1156
|
-
R,
|
1157
|
+
R, dbos._serializer.deserialize(recorded_output["output"])
|
1157
1158
|
)
|
1158
1159
|
else:
|
1159
1160
|
raise Exception("Output and error are both None")
|
@@ -31,6 +31,7 @@ from typing import (
|
|
31
31
|
|
32
32
|
from dbos._conductor.conductor import ConductorWebsocket
|
33
33
|
from dbos._debouncer import debouncer_workflow
|
34
|
+
from dbos._serialization import DefaultSerializer, Serializer
|
34
35
|
from dbos._sys_db import SystemDatabase, WorkflowStatus
|
35
36
|
from dbos._utils import INTERNAL_QUEUE_NAME, GlobalParams
|
36
37
|
from dbos._workflow_commands import fork_workflow, list_queued_workflows, list_workflows
|
@@ -341,6 +342,8 @@ class DBOS:
|
|
341
342
|
self.conductor_websocket: Optional[ConductorWebsocket] = None
|
342
343
|
self._background_event_loop: BackgroundEventLoop = BackgroundEventLoop()
|
343
344
|
self._active_workflows_set: set[str] = set()
|
345
|
+
serializer = config.get("serializer")
|
346
|
+
self._serializer: Serializer = serializer if serializer else DefaultSerializer()
|
344
347
|
|
345
348
|
# Globally set the application version and executor ID.
|
346
349
|
# In DBOS Cloud, instead use the values supplied through environment variables.
|
@@ -449,28 +452,34 @@ class DBOS:
|
|
449
452
|
assert self._config["database"]["sys_db_engine_kwargs"] is not None
|
450
453
|
# Get the schema configuration, use "dbos" as default
|
451
454
|
schema = self._config.get("dbos_system_schema", "dbos")
|
455
|
+
dbos_logger.debug("Creating system database")
|
452
456
|
self._sys_db_field = SystemDatabase.create(
|
453
457
|
system_database_url=get_system_database_url(self._config),
|
454
458
|
engine_kwargs=self._config["database"]["sys_db_engine_kwargs"],
|
455
459
|
engine=self._config["system_database_engine"],
|
456
460
|
debug_mode=debug_mode,
|
457
461
|
schema=schema,
|
462
|
+
serializer=self._serializer,
|
458
463
|
)
|
459
464
|
assert self._config["database"]["db_engine_kwargs"] is not None
|
460
465
|
if self._config["database_url"]:
|
466
|
+
dbos_logger.debug("Creating application database")
|
461
467
|
self._app_db_field = ApplicationDatabase.create(
|
462
468
|
database_url=self._config["database_url"],
|
463
469
|
engine_kwargs=self._config["database"]["db_engine_kwargs"],
|
464
470
|
debug_mode=debug_mode,
|
465
471
|
schema=schema,
|
472
|
+
serializer=self._serializer,
|
466
473
|
)
|
467
474
|
|
468
475
|
if debug_mode:
|
469
476
|
return
|
470
477
|
|
471
478
|
# Run migrations for the system and application databases
|
479
|
+
dbos_logger.debug("Running system database migrations")
|
472
480
|
self._sys_db.run_migrations()
|
473
481
|
if self._app_db:
|
482
|
+
dbos_logger.debug("Running application database migrations")
|
474
483
|
self._app_db.run_migrations()
|
475
484
|
|
476
485
|
admin_port = self._config.get("runtimeConfig", {}).get("admin_port")
|
@@ -481,10 +490,12 @@ class DBOS:
|
|
481
490
|
)
|
482
491
|
if run_admin_server:
|
483
492
|
try:
|
493
|
+
dbos_logger.debug("Starting admin server")
|
484
494
|
self._admin_server_field = AdminServer(dbos=self, port=admin_port)
|
485
495
|
except Exception as e:
|
486
496
|
dbos_logger.warning(f"Failed to start admin server: {e}")
|
487
497
|
|
498
|
+
dbos_logger.debug("Retrieving local pending workflows for recovery")
|
488
499
|
workflow_ids = self._sys_db.get_pending_workflows(
|
489
500
|
GlobalParams.executor_id, GlobalParams.app_version
|
490
501
|
)
|
@@ -500,6 +511,7 @@ class DBOS:
|
|
500
511
|
self._executor.submit(startup_recovery_thread, self, workflow_ids)
|
501
512
|
|
502
513
|
# Listen to notifications
|
514
|
+
dbos_logger.debug("Starting notifications listener thread")
|
503
515
|
notification_listener_thread = threading.Thread(
|
504
516
|
target=self._sys_db._notification_listener,
|
505
517
|
daemon=True,
|
@@ -511,6 +523,7 @@ class DBOS:
|
|
511
523
|
self._registry.get_internal_queue()
|
512
524
|
|
513
525
|
# Start the queue thread
|
526
|
+
dbos_logger.debug("Starting queue thread")
|
514
527
|
evt = threading.Event()
|
515
528
|
self.background_thread_stop_events.append(evt)
|
516
529
|
bg_queue_thread = threading.Thread(
|
@@ -526,6 +539,7 @@ class DBOS:
|
|
526
539
|
self.conductor_url = f"wss://{dbos_domain}/conductor/v1alpha1"
|
527
540
|
evt = threading.Event()
|
528
541
|
self.background_thread_stop_events.append(evt)
|
542
|
+
dbos_logger.debug("Starting Conductor thread")
|
529
543
|
self.conductor_websocket = ConductorWebsocket(
|
530
544
|
self,
|
531
545
|
conductor_url=self.conductor_url,
|
@@ -536,6 +550,7 @@ class DBOS:
|
|
536
550
|
self._background_threads.append(self.conductor_websocket)
|
537
551
|
|
538
552
|
# Grab any pollers that were deferred and start them
|
553
|
+
dbos_logger.debug("Starting event receivers")
|
539
554
|
for evt, func, args, kwargs in self._registry.pollers:
|
540
555
|
self.poller_stop_events.append(evt)
|
541
556
|
poller_thread = threading.Thread(
|
@@ -7,6 +7,8 @@ import sqlalchemy as sa
|
|
7
7
|
import yaml
|
8
8
|
from sqlalchemy import make_url
|
9
9
|
|
10
|
+
from dbos._serialization import Serializer
|
11
|
+
|
10
12
|
from ._error import DBOSInitializationError
|
11
13
|
from ._logger import dbos_logger
|
12
14
|
from ._schemas.system_database import SystemSchema
|
@@ -37,6 +39,7 @@ class DBOSConfig(TypedDict, total=False):
|
|
37
39
|
enable_otlp (bool): If True, enable built-in DBOS OTLP tracing and logging.
|
38
40
|
system_database_engine (sa.Engine): A custom system database engine. If provided, DBOS will not create an engine but use this instead.
|
39
41
|
conductor_key (str): An API key for DBOS Conductor. Pass this in to connect your process to Conductor.
|
42
|
+
serializer (Serializer): A custom serializer and deserializer DBOS uses when storing program data in the system database
|
40
43
|
"""
|
41
44
|
|
42
45
|
name: str
|
@@ -57,6 +60,7 @@ class DBOSConfig(TypedDict, total=False):
|
|
57
60
|
enable_otlp: Optional[bool]
|
58
61
|
system_database_engine: Optional[sa.Engine]
|
59
62
|
conductor_key: Optional[str]
|
63
|
+
serializer: Optional[Serializer]
|
60
64
|
|
61
65
|
|
62
66
|
class RuntimeConfig(TypedDict, total=False):
|
@@ -67,16 +71,6 @@ class RuntimeConfig(TypedDict, total=False):
|
|
67
71
|
|
68
72
|
|
69
73
|
class DatabaseConfig(TypedDict, total=False):
|
70
|
-
"""
|
71
|
-
Internal data structure containing the DBOS database configuration.
|
72
|
-
Attributes:
|
73
|
-
sys_db_name (str): System database name
|
74
|
-
sys_db_pool_size (int): System database pool size
|
75
|
-
db_engine_kwargs (Dict[str, Any]): SQLAlchemy engine kwargs
|
76
|
-
migrate (List[str]): Migration commands to run on startup
|
77
|
-
dbos_system_schema (str): Schema name for DBOS system tables. Defaults to "dbos".
|
78
|
-
"""
|
79
|
-
|
80
74
|
sys_db_pool_size: Optional[int]
|
81
75
|
db_engine_kwargs: Optional[Dict[str, Any]]
|
82
76
|
sys_db_engine_kwargs: Optional[Dict[str, Any]]
|
@@ -1,3 +1,4 @@
|
|
1
|
+
import random
|
1
2
|
import threading
|
2
3
|
import traceback
|
3
4
|
from datetime import datetime, timezone
|
@@ -15,28 +16,40 @@ from ._registrations import get_dbos_func_name
|
|
15
16
|
|
16
17
|
ScheduledWorkflow = Callable[[datetime, datetime], None]
|
17
18
|
|
18
|
-
scheduler_queue: Queue
|
19
|
-
|
20
19
|
|
21
20
|
def scheduler_loop(
|
22
21
|
func: ScheduledWorkflow, cron: str, stop_event: threading.Event
|
23
22
|
) -> None:
|
23
|
+
from dbos._dbos import _get_dbos_instance
|
24
|
+
|
25
|
+
dbos = _get_dbos_instance()
|
26
|
+
scheduler_queue = dbos._registry.get_internal_queue()
|
24
27
|
try:
|
25
28
|
iter = croniter(cron, datetime.now(timezone.utc), second_at_beginning=True)
|
26
|
-
except Exception
|
29
|
+
except Exception:
|
27
30
|
dbos_logger.error(
|
28
31
|
f'Cannot run scheduled function {get_dbos_func_name(func)}. Invalid crontab "{cron}"'
|
29
32
|
)
|
33
|
+
raise
|
30
34
|
while not stop_event.is_set():
|
31
|
-
|
32
|
-
|
33
|
-
|
35
|
+
next_exec_time = iter.get_next(datetime)
|
36
|
+
sleep_time = (next_exec_time - datetime.now(timezone.utc)).total_seconds()
|
37
|
+
sleep_time = max(0, sleep_time)
|
38
|
+
# To prevent a "thundering herd" problem in a distributed setting,
|
39
|
+
# apply jitter of up to 10% the sleep time, capped at 10 seconds
|
40
|
+
max_jitter = min(sleep_time / 10, 10)
|
41
|
+
jitter = random.uniform(0, max_jitter)
|
42
|
+
if stop_event.wait(timeout=sleep_time + jitter):
|
34
43
|
return
|
35
44
|
try:
|
36
|
-
|
37
|
-
f"sched-{get_dbos_func_name(func)}-{
|
38
|
-
)
|
39
|
-
|
45
|
+
workflowID = (
|
46
|
+
f"sched-{get_dbos_func_name(func)}-{next_exec_time.isoformat()}"
|
47
|
+
)
|
48
|
+
if not dbos._sys_db.get_workflow_status(workflowID):
|
49
|
+
with SetWorkflowID(workflowID):
|
50
|
+
scheduler_queue.enqueue(
|
51
|
+
func, next_exec_time, datetime.now(timezone.utc)
|
52
|
+
)
|
40
53
|
except Exception:
|
41
54
|
dbos_logger.warning(
|
42
55
|
f"Exception encountered in scheduler thread: {traceback.format_exc()})"
|
@@ -49,13 +62,10 @@ def scheduled(
|
|
49
62
|
def decorator(func: ScheduledWorkflow) -> ScheduledWorkflow:
|
50
63
|
try:
|
51
64
|
croniter(cron, datetime.now(timezone.utc), second_at_beginning=True)
|
52
|
-
except Exception
|
65
|
+
except Exception:
|
53
66
|
raise ValueError(
|
54
67
|
f'Invalid crontab "{cron}" for scheduled function function {get_dbos_func_name(func)}.'
|
55
68
|
)
|
56
|
-
|
57
|
-
global scheduler_queue
|
58
|
-
scheduler_queue = dbosreg.get_internal_queue()
|
59
69
|
stop_event = threading.Event()
|
60
70
|
dbosreg.register_poller(stop_event, scheduler_loop, func, cron, stop_event)
|
61
71
|
return func
|