dbos 2.1.0a2__py3-none-any.whl → 2.4.0a7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- dbos/__init__.py +2 -0
- dbos/_admin_server.py +2 -0
- dbos/_app_db.py +42 -45
- dbos/_client.py +33 -9
- dbos/_conductor/conductor.py +25 -27
- dbos/_conductor/protocol.py +39 -1
- dbos/_context.py +10 -3
- dbos/_core.py +41 -47
- dbos/_dbos.py +41 -15
- dbos/_dbos_config.py +6 -14
- dbos/_debouncer.py +4 -5
- dbos/_fastapi.py +2 -3
- dbos/_flask.py +2 -3
- dbos/_kafka.py +6 -4
- dbos/_logger.py +27 -19
- dbos/_migration.py +59 -2
- dbos/_queue.py +29 -4
- dbos/_scheduler.py +29 -16
- dbos/_schemas/system_database.py +4 -0
- dbos/_serialization.py +28 -36
- dbos/_sys_db.py +245 -219
- dbos/_sys_db_postgres.py +1 -1
- dbos/_tracer.py +24 -19
- dbos/_utils.py +10 -0
- dbos/_workflow_commands.py +21 -17
- dbos/cli/cli.py +1 -15
- dbos/cli/migration.py +4 -0
- {dbos-2.1.0a2.dist-info → dbos-2.4.0a7.dist-info}/METADATA +1 -1
- dbos-2.4.0a7.dist-info/RECORD +59 -0
- {dbos-2.1.0a2.dist-info → dbos-2.4.0a7.dist-info}/WHEEL +1 -1
- dbos-2.1.0a2.dist-info/RECORD +0 -59
- {dbos-2.1.0a2.dist-info → dbos-2.4.0a7.dist-info}/entry_points.txt +0 -0
- {dbos-2.1.0a2.dist-info → dbos-2.4.0a7.dist-info}/licenses/LICENSE +0 -0
dbos/__init__.py
CHANGED
|
@@ -12,6 +12,7 @@ from ._dbos_config import DBOSConfig
|
|
|
12
12
|
from ._debouncer import Debouncer, DebouncerClient
|
|
13
13
|
from ._kafka_message import KafkaMessage
|
|
14
14
|
from ._queue import Queue
|
|
15
|
+
from ._serialization import Serializer
|
|
15
16
|
from ._sys_db import GetWorkflowsInput, WorkflowStatus, WorkflowStatusString
|
|
16
17
|
|
|
17
18
|
__all__ = [
|
|
@@ -35,4 +36,5 @@ __all__ = [
|
|
|
35
36
|
"Queue",
|
|
36
37
|
"Debouncer",
|
|
37
38
|
"DebouncerClient",
|
|
39
|
+
"Serializer",
|
|
38
40
|
]
|
dbos/_admin_server.py
CHANGED
|
@@ -338,6 +338,7 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
|
338
338
|
end_time=filters.get("end_time"),
|
|
339
339
|
status=filters.get("status"),
|
|
340
340
|
app_version=filters.get("application_version"),
|
|
341
|
+
forked_from=filters.get("forked_from"),
|
|
341
342
|
name=filters.get("workflow_name"),
|
|
342
343
|
limit=filters.get("limit"),
|
|
343
344
|
offset=filters.get("offset"),
|
|
@@ -364,6 +365,7 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
|
364
365
|
start_time=filters.get("start_time"),
|
|
365
366
|
end_time=filters.get("end_time"),
|
|
366
367
|
status=filters.get("status"),
|
|
368
|
+
forked_from=filters.get("forked_from"),
|
|
367
369
|
name=filters.get("workflow_name"),
|
|
368
370
|
limit=filters.get("limit"),
|
|
369
371
|
offset=filters.get("offset"),
|
dbos/_app_db.py
CHANGED
|
@@ -8,8 +8,8 @@ from sqlalchemy.exc import DBAPIError
|
|
|
8
8
|
from sqlalchemy.orm import Session, sessionmaker
|
|
9
9
|
|
|
10
10
|
from dbos._migration import get_sqlite_timestamp_expr
|
|
11
|
+
from dbos._serialization import Serializer
|
|
11
12
|
|
|
12
|
-
from . import _serialization
|
|
13
13
|
from ._error import DBOSUnexpectedStepError, DBOSWorkflowConflictIDError
|
|
14
14
|
from ._logger import dbos_logger
|
|
15
15
|
from ._schemas.application_database import ApplicationSchema
|
|
@@ -34,17 +34,52 @@ class RecordedResult(TypedDict):
|
|
|
34
34
|
|
|
35
35
|
class ApplicationDatabase(ABC):
|
|
36
36
|
|
|
37
|
+
@staticmethod
|
|
38
|
+
def create(
|
|
39
|
+
database_url: str,
|
|
40
|
+
engine_kwargs: Dict[str, Any],
|
|
41
|
+
schema: Optional[str],
|
|
42
|
+
serializer: Serializer,
|
|
43
|
+
debug_mode: bool = False,
|
|
44
|
+
) -> "ApplicationDatabase":
|
|
45
|
+
"""Factory method to create the appropriate ApplicationDatabase implementation based on URL."""
|
|
46
|
+
if database_url.startswith("sqlite"):
|
|
47
|
+
return SQLiteApplicationDatabase(
|
|
48
|
+
database_url=database_url,
|
|
49
|
+
engine_kwargs=engine_kwargs,
|
|
50
|
+
schema=schema,
|
|
51
|
+
serializer=serializer,
|
|
52
|
+
debug_mode=debug_mode,
|
|
53
|
+
)
|
|
54
|
+
else:
|
|
55
|
+
# Default to PostgreSQL for postgresql://, postgres://, or other URLs
|
|
56
|
+
return PostgresApplicationDatabase(
|
|
57
|
+
database_url=database_url,
|
|
58
|
+
engine_kwargs=engine_kwargs,
|
|
59
|
+
schema=schema,
|
|
60
|
+
serializer=serializer,
|
|
61
|
+
debug_mode=debug_mode,
|
|
62
|
+
)
|
|
63
|
+
|
|
37
64
|
def __init__(
|
|
38
65
|
self,
|
|
39
66
|
*,
|
|
40
67
|
database_url: str,
|
|
41
68
|
engine_kwargs: Dict[str, Any],
|
|
69
|
+
serializer: Serializer,
|
|
70
|
+
schema: Optional[str],
|
|
42
71
|
debug_mode: bool = False,
|
|
43
72
|
):
|
|
73
|
+
if database_url.startswith("sqlite"):
|
|
74
|
+
self.schema = None
|
|
75
|
+
else:
|
|
76
|
+
self.schema = schema if schema else "dbos"
|
|
77
|
+
ApplicationSchema.transaction_outputs.schema = schema
|
|
44
78
|
self.engine = self._create_engine(database_url, engine_kwargs)
|
|
45
79
|
self._engine_kwargs = engine_kwargs
|
|
46
80
|
self.sessionmaker = sessionmaker(bind=self.engine)
|
|
47
81
|
self.debug_mode = debug_mode
|
|
82
|
+
self.serializer = serializer
|
|
48
83
|
|
|
49
84
|
@abstractmethod
|
|
50
85
|
def _create_engine(
|
|
@@ -156,14 +191,18 @@ class ApplicationDatabase(ABC):
|
|
|
156
191
|
function_id=row[0],
|
|
157
192
|
function_name=row[1],
|
|
158
193
|
output=(
|
|
159
|
-
|
|
194
|
+
self.serializer.deserialize(row[2])
|
|
195
|
+
if row[2] is not None
|
|
196
|
+
else row[2]
|
|
160
197
|
),
|
|
161
198
|
error=(
|
|
162
|
-
|
|
199
|
+
self.serializer.deserialize(row[3])
|
|
163
200
|
if row[3] is not None
|
|
164
201
|
else row[3]
|
|
165
202
|
),
|
|
166
203
|
child_workflow_id=None,
|
|
204
|
+
started_at_epoch_ms=None,
|
|
205
|
+
completed_at_epoch_ms=None,
|
|
167
206
|
)
|
|
168
207
|
for row in rows
|
|
169
208
|
]
|
|
@@ -237,52 +276,10 @@ class ApplicationDatabase(ABC):
|
|
|
237
276
|
"""Check if the error is a serialization/concurrency error."""
|
|
238
277
|
pass
|
|
239
278
|
|
|
240
|
-
@staticmethod
|
|
241
|
-
def create(
|
|
242
|
-
database_url: str,
|
|
243
|
-
engine_kwargs: Dict[str, Any],
|
|
244
|
-
schema: Optional[str],
|
|
245
|
-
debug_mode: bool = False,
|
|
246
|
-
) -> "ApplicationDatabase":
|
|
247
|
-
"""Factory method to create the appropriate ApplicationDatabase implementation based on URL."""
|
|
248
|
-
if database_url.startswith("sqlite"):
|
|
249
|
-
return SQLiteApplicationDatabase(
|
|
250
|
-
database_url=database_url,
|
|
251
|
-
engine_kwargs=engine_kwargs,
|
|
252
|
-
debug_mode=debug_mode,
|
|
253
|
-
)
|
|
254
|
-
else:
|
|
255
|
-
# Default to PostgreSQL for postgresql://, postgres://, or other URLs
|
|
256
|
-
return PostgresApplicationDatabase(
|
|
257
|
-
database_url=database_url,
|
|
258
|
-
engine_kwargs=engine_kwargs,
|
|
259
|
-
debug_mode=debug_mode,
|
|
260
|
-
schema=schema,
|
|
261
|
-
)
|
|
262
|
-
|
|
263
279
|
|
|
264
280
|
class PostgresApplicationDatabase(ApplicationDatabase):
|
|
265
281
|
"""PostgreSQL-specific implementation of ApplicationDatabase."""
|
|
266
282
|
|
|
267
|
-
def __init__(
|
|
268
|
-
self,
|
|
269
|
-
*,
|
|
270
|
-
database_url: str,
|
|
271
|
-
engine_kwargs: Dict[str, Any],
|
|
272
|
-
schema: Optional[str],
|
|
273
|
-
debug_mode: bool = False,
|
|
274
|
-
):
|
|
275
|
-
super().__init__(
|
|
276
|
-
database_url=database_url,
|
|
277
|
-
engine_kwargs=engine_kwargs,
|
|
278
|
-
debug_mode=debug_mode,
|
|
279
|
-
)
|
|
280
|
-
if schema is None:
|
|
281
|
-
self.schema = "dbos"
|
|
282
|
-
else:
|
|
283
|
-
self.schema = schema
|
|
284
|
-
ApplicationSchema.transaction_outputs.schema = schema
|
|
285
|
-
|
|
286
283
|
def _create_engine(
|
|
287
284
|
self, database_url: str, engine_kwargs: Dict[str, Any]
|
|
288
285
|
) -> sa.Engine:
|
dbos/_client.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
+
import json
|
|
2
3
|
import time
|
|
3
|
-
import uuid
|
|
4
4
|
from typing import (
|
|
5
5
|
TYPE_CHECKING,
|
|
6
6
|
Any,
|
|
@@ -16,10 +16,10 @@ from typing import (
|
|
|
16
16
|
|
|
17
17
|
import sqlalchemy as sa
|
|
18
18
|
|
|
19
|
-
from dbos import _serialization
|
|
20
19
|
from dbos._app_db import ApplicationDatabase
|
|
21
20
|
from dbos._context import MaxPriority, MinPriority
|
|
22
21
|
from dbos._sys_db import SystemDatabase
|
|
22
|
+
from dbos._utils import generate_uuid
|
|
23
23
|
|
|
24
24
|
if TYPE_CHECKING:
|
|
25
25
|
from dbos._dbos import WorkflowHandle, WorkflowHandleAsync
|
|
@@ -27,7 +27,7 @@ if TYPE_CHECKING:
|
|
|
27
27
|
from dbos._dbos_config import get_system_database_url, is_valid_database_url
|
|
28
28
|
from dbos._error import DBOSException, DBOSNonExistentWorkflowError
|
|
29
29
|
from dbos._registrations import DEFAULT_MAX_RECOVERY_ATTEMPTS
|
|
30
|
-
from dbos._serialization import WorkflowInputs
|
|
30
|
+
from dbos._serialization import DefaultSerializer, Serializer, WorkflowInputs
|
|
31
31
|
from dbos._sys_db import (
|
|
32
32
|
EnqueueOptionsInternal,
|
|
33
33
|
StepInfo,
|
|
@@ -63,6 +63,9 @@ class EnqueueOptions(_EnqueueOptionsRequired, total=False):
|
|
|
63
63
|
deduplication_id: str
|
|
64
64
|
priority: int
|
|
65
65
|
max_recovery_attempts: int
|
|
66
|
+
queue_partition_key: str
|
|
67
|
+
authenticated_user: str
|
|
68
|
+
authenticated_roles: list[str]
|
|
66
69
|
|
|
67
70
|
|
|
68
71
|
def validate_enqueue_options(options: EnqueueOptions) -> None:
|
|
@@ -127,7 +130,9 @@ class DBOSClient:
|
|
|
127
130
|
system_database_engine: Optional[sa.Engine] = None,
|
|
128
131
|
application_database_url: Optional[str] = None,
|
|
129
132
|
dbos_system_schema: Optional[str] = "dbos",
|
|
133
|
+
serializer: Serializer = DefaultSerializer(),
|
|
130
134
|
):
|
|
135
|
+
self._serializer = serializer
|
|
131
136
|
application_database_url = (
|
|
132
137
|
database_url if database_url else application_database_url
|
|
133
138
|
)
|
|
@@ -144,23 +149,30 @@ class DBOSClient:
|
|
|
144
149
|
self._sys_db = SystemDatabase.create(
|
|
145
150
|
system_database_url=system_database_url,
|
|
146
151
|
engine_kwargs={
|
|
152
|
+
"connect_args": {"application_name": "dbos_transact_client"},
|
|
147
153
|
"pool_timeout": 30,
|
|
148
154
|
"max_overflow": 0,
|
|
149
155
|
"pool_size": 2,
|
|
156
|
+
"pool_pre_ping": True,
|
|
150
157
|
},
|
|
151
158
|
engine=system_database_engine,
|
|
152
159
|
schema=dbos_system_schema,
|
|
160
|
+
serializer=serializer,
|
|
161
|
+
executor_id=None,
|
|
153
162
|
)
|
|
154
163
|
self._sys_db.check_connection()
|
|
155
164
|
if application_database_url:
|
|
156
165
|
self._app_db = ApplicationDatabase.create(
|
|
157
166
|
database_url=application_database_url,
|
|
158
167
|
engine_kwargs={
|
|
168
|
+
"connect_args": {"application_name": "dbos_transact_client"},
|
|
159
169
|
"pool_timeout": 30,
|
|
160
170
|
"max_overflow": 0,
|
|
161
171
|
"pool_size": 2,
|
|
172
|
+
"pool_pre_ping": True,
|
|
162
173
|
},
|
|
163
174
|
schema=dbos_system_schema,
|
|
175
|
+
serializer=serializer,
|
|
164
176
|
)
|
|
165
177
|
|
|
166
178
|
def destroy(self) -> None:
|
|
@@ -176,14 +188,22 @@ class DBOSClient:
|
|
|
176
188
|
max_recovery_attempts = DEFAULT_MAX_RECOVERY_ATTEMPTS
|
|
177
189
|
workflow_id = options.get("workflow_id")
|
|
178
190
|
if workflow_id is None:
|
|
179
|
-
workflow_id =
|
|
191
|
+
workflow_id = generate_uuid()
|
|
180
192
|
workflow_timeout = options.get("workflow_timeout", None)
|
|
181
193
|
enqueue_options_internal: EnqueueOptionsInternal = {
|
|
182
194
|
"deduplication_id": options.get("deduplication_id"),
|
|
183
195
|
"priority": options.get("priority"),
|
|
184
196
|
"app_version": options.get("app_version"),
|
|
197
|
+
"queue_partition_key": options.get("queue_partition_key"),
|
|
185
198
|
}
|
|
186
199
|
|
|
200
|
+
authenticated_user = options.get("authenticated_user")
|
|
201
|
+
authenticated_roles = (
|
|
202
|
+
json.dumps(options.get("authenticated_roles"))
|
|
203
|
+
if options.get("authenticated_roles")
|
|
204
|
+
else None
|
|
205
|
+
)
|
|
206
|
+
|
|
187
207
|
inputs: WorkflowInputs = {
|
|
188
208
|
"args": args,
|
|
189
209
|
"kwargs": kwargs,
|
|
@@ -197,9 +217,9 @@ class DBOSClient:
|
|
|
197
217
|
"queue_name": queue_name,
|
|
198
218
|
"app_version": enqueue_options_internal["app_version"],
|
|
199
219
|
"config_name": None,
|
|
200
|
-
"authenticated_user":
|
|
220
|
+
"authenticated_user": authenticated_user,
|
|
201
221
|
"assumed_role": None,
|
|
202
|
-
"authenticated_roles":
|
|
222
|
+
"authenticated_roles": authenticated_roles,
|
|
203
223
|
"output": None,
|
|
204
224
|
"error": None,
|
|
205
225
|
"created_at": None,
|
|
@@ -217,7 +237,9 @@ class DBOSClient:
|
|
|
217
237
|
if enqueue_options_internal["priority"] is not None
|
|
218
238
|
else 0
|
|
219
239
|
),
|
|
220
|
-
"inputs":
|
|
240
|
+
"inputs": self._serializer.serialize(inputs),
|
|
241
|
+
"queue_partition_key": enqueue_options_internal["queue_partition_key"],
|
|
242
|
+
"forked_from": None,
|
|
221
243
|
}
|
|
222
244
|
|
|
223
245
|
self._sys_db.init_workflow(
|
|
@@ -259,7 +281,7 @@ class DBOSClient:
|
|
|
259
281
|
topic: Optional[str] = None,
|
|
260
282
|
idempotency_key: Optional[str] = None,
|
|
261
283
|
) -> None:
|
|
262
|
-
idempotency_key = idempotency_key if idempotency_key else
|
|
284
|
+
idempotency_key = idempotency_key if idempotency_key else generate_uuid()
|
|
263
285
|
status: WorkflowStatusInternal = {
|
|
264
286
|
"workflow_uuid": f"{destination_id}-{idempotency_key}",
|
|
265
287
|
"status": WorkflowStatusString.SUCCESS.value,
|
|
@@ -282,7 +304,9 @@ class DBOSClient:
|
|
|
282
304
|
"workflow_deadline_epoch_ms": None,
|
|
283
305
|
"deduplication_id": None,
|
|
284
306
|
"priority": 0,
|
|
285
|
-
"inputs":
|
|
307
|
+
"inputs": self._serializer.serialize({"args": (), "kwargs": {}}),
|
|
308
|
+
"queue_partition_key": None,
|
|
309
|
+
"forked_from": None,
|
|
286
310
|
}
|
|
287
311
|
with self._sys_db.engine.begin() as conn:
|
|
288
312
|
self._sys_db._insert_workflow_status(
|
dbos/_conductor/conductor.py
CHANGED
|
@@ -2,7 +2,6 @@ import socket
|
|
|
2
2
|
import threading
|
|
3
3
|
import time
|
|
4
4
|
import traceback
|
|
5
|
-
import uuid
|
|
6
5
|
from importlib.metadata import version
|
|
7
6
|
from typing import TYPE_CHECKING, Optional
|
|
8
7
|
|
|
@@ -11,7 +10,7 @@ from websockets.sync.client import connect
|
|
|
11
10
|
from websockets.sync.connection import Connection
|
|
12
11
|
|
|
13
12
|
from dbos._context import SetWorkflowID
|
|
14
|
-
from dbos._utils import GlobalParams
|
|
13
|
+
from dbos._utils import GlobalParams, generate_uuid
|
|
15
14
|
from dbos._workflow_commands import (
|
|
16
15
|
garbage_collect,
|
|
17
16
|
get_workflow,
|
|
@@ -192,7 +191,7 @@ class ConductorWebsocket(threading.Thread):
|
|
|
192
191
|
fork_message = p.ForkWorkflowRequest.from_json(message)
|
|
193
192
|
new_workflow_id = fork_message.body["new_workflow_id"]
|
|
194
193
|
if new_workflow_id is None:
|
|
195
|
-
new_workflow_id =
|
|
194
|
+
new_workflow_id = generate_uuid()
|
|
196
195
|
workflow_id = fork_message.body["workflow_id"]
|
|
197
196
|
start_step = fork_message.body["start_step"]
|
|
198
197
|
app_version = fork_message.body["application_version"]
|
|
@@ -223,22 +222,21 @@ class ConductorWebsocket(threading.Thread):
|
|
|
223
222
|
body = list_workflows_message.body
|
|
224
223
|
infos = []
|
|
225
224
|
try:
|
|
226
|
-
load_input = body.get("load_input", False)
|
|
227
|
-
load_output = body.get("load_output", False)
|
|
228
225
|
infos = list_workflows(
|
|
229
226
|
self.dbos._sys_db,
|
|
230
|
-
workflow_ids=body
|
|
231
|
-
user=body
|
|
232
|
-
start_time=body
|
|
233
|
-
end_time=body
|
|
234
|
-
status=body
|
|
235
|
-
app_version=body
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
227
|
+
workflow_ids=body.get("workflow_uuids", None),
|
|
228
|
+
user=body.get("authenticated_user", None),
|
|
229
|
+
start_time=body.get("start_time", None),
|
|
230
|
+
end_time=body.get("end_time", None),
|
|
231
|
+
status=body.get("status", None),
|
|
232
|
+
app_version=body.get("application_version", None),
|
|
233
|
+
forked_from=body.get("forked_from", None),
|
|
234
|
+
name=body.get("workflow_name", None),
|
|
235
|
+
limit=body.get("limit", None),
|
|
236
|
+
offset=body.get("offset", None),
|
|
237
|
+
sort_desc=body.get("sort_desc", False),
|
|
238
|
+
load_input=body.get("load_input", False),
|
|
239
|
+
load_output=body.get("load_output", False),
|
|
242
240
|
)
|
|
243
241
|
except Exception as e:
|
|
244
242
|
error_message = f"Exception encountered when listing workflows: {traceback.format_exc()}"
|
|
@@ -261,18 +259,18 @@ class ConductorWebsocket(threading.Thread):
|
|
|
261
259
|
q_body = list_queued_workflows_message.body
|
|
262
260
|
infos = []
|
|
263
261
|
try:
|
|
264
|
-
q_load_input = q_body.get("load_input", False)
|
|
265
262
|
infos = list_queued_workflows(
|
|
266
263
|
self.dbos._sys_db,
|
|
267
|
-
start_time=q_body
|
|
268
|
-
end_time=q_body
|
|
269
|
-
status=q_body
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
264
|
+
start_time=q_body.get("start_time", None),
|
|
265
|
+
end_time=q_body.get("end_time", None),
|
|
266
|
+
status=q_body.get("status", None),
|
|
267
|
+
forked_from=q_body.get("forked_from", None),
|
|
268
|
+
name=q_body.get("workflow_name", None),
|
|
269
|
+
limit=q_body.get("limit", None),
|
|
270
|
+
offset=q_body.get("offset", None),
|
|
271
|
+
queue_name=q_body.get("queue_name", None),
|
|
272
|
+
sort_desc=q_body.get("sort_desc", False),
|
|
273
|
+
load_input=q_body.get("load_input", False),
|
|
276
274
|
)
|
|
277
275
|
except Exception as e:
|
|
278
276
|
error_message = f"Exception encountered when listing queued workflows: {traceback.format_exc()}"
|
dbos/_conductor/protocol.py
CHANGED
|
@@ -118,6 +118,7 @@ class ListWorkflowsBody(TypedDict, total=False):
|
|
|
118
118
|
end_time: Optional[str]
|
|
119
119
|
status: Optional[str]
|
|
120
120
|
application_version: Optional[str]
|
|
121
|
+
forked_from: Optional[str]
|
|
121
122
|
limit: Optional[int]
|
|
122
123
|
offset: Optional[int]
|
|
123
124
|
sort_desc: bool
|
|
@@ -143,6 +144,12 @@ class WorkflowsOutput:
|
|
|
143
144
|
QueueName: Optional[str]
|
|
144
145
|
ApplicationVersion: Optional[str]
|
|
145
146
|
ExecutorID: Optional[str]
|
|
147
|
+
WorkflowTimeoutMS: Optional[str]
|
|
148
|
+
WorkflowDeadlineEpochMS: Optional[str]
|
|
149
|
+
DeduplicationID: Optional[str]
|
|
150
|
+
Priority: Optional[str]
|
|
151
|
+
QueuePartitionKey: Optional[str]
|
|
152
|
+
ForkedFrom: Optional[str]
|
|
146
153
|
|
|
147
154
|
@classmethod
|
|
148
155
|
def from_workflow_information(cls, info: WorkflowStatus) -> "WorkflowsOutput":
|
|
@@ -152,12 +159,22 @@ class WorkflowsOutput:
|
|
|
152
159
|
inputs_str = str(info.input) if info.input is not None else None
|
|
153
160
|
outputs_str = str(info.output) if info.output is not None else None
|
|
154
161
|
error_str = str(info.error) if info.error is not None else None
|
|
155
|
-
request_str = None
|
|
156
162
|
roles_str = (
|
|
157
163
|
str(info.authenticated_roles)
|
|
158
164
|
if info.authenticated_roles is not None
|
|
159
165
|
else None
|
|
160
166
|
)
|
|
167
|
+
workflow_timeout_ms_str = (
|
|
168
|
+
str(info.workflow_timeout_ms)
|
|
169
|
+
if info.workflow_timeout_ms is not None
|
|
170
|
+
else None
|
|
171
|
+
)
|
|
172
|
+
workflow_deadline_epoch_ms_str = (
|
|
173
|
+
str(info.workflow_deadline_epoch_ms)
|
|
174
|
+
if info.workflow_deadline_epoch_ms is not None
|
|
175
|
+
else None
|
|
176
|
+
)
|
|
177
|
+
priority_str = str(info.priority) if info.priority is not None else None
|
|
161
178
|
|
|
162
179
|
return cls(
|
|
163
180
|
WorkflowUUID=info.workflow_id,
|
|
@@ -176,6 +193,12 @@ class WorkflowsOutput:
|
|
|
176
193
|
QueueName=info.queue_name,
|
|
177
194
|
ApplicationVersion=info.app_version,
|
|
178
195
|
ExecutorID=info.executor_id,
|
|
196
|
+
WorkflowTimeoutMS=workflow_timeout_ms_str,
|
|
197
|
+
WorkflowDeadlineEpochMS=workflow_deadline_epoch_ms_str,
|
|
198
|
+
DeduplicationID=info.deduplication_id,
|
|
199
|
+
Priority=priority_str,
|
|
200
|
+
QueuePartitionKey=info.queue_partition_key,
|
|
201
|
+
ForkedFrom=info.forked_from,
|
|
179
202
|
)
|
|
180
203
|
|
|
181
204
|
|
|
@@ -186,14 +209,28 @@ class WorkflowSteps:
|
|
|
186
209
|
output: Optional[str]
|
|
187
210
|
error: Optional[str]
|
|
188
211
|
child_workflow_id: Optional[str]
|
|
212
|
+
started_at_epoch_ms: Optional[str]
|
|
213
|
+
completed_at_epoch_ms: Optional[str]
|
|
189
214
|
|
|
190
215
|
@classmethod
|
|
191
216
|
def from_step_info(cls, info: StepInfo) -> "WorkflowSteps":
|
|
192
217
|
output_str = str(info["output"]) if info["output"] is not None else None
|
|
193
218
|
error_str = str(info["error"]) if info["error"] is not None else None
|
|
219
|
+
started_at_str = (
|
|
220
|
+
str(info["started_at_epoch_ms"])
|
|
221
|
+
if info["started_at_epoch_ms"] is not None
|
|
222
|
+
else None
|
|
223
|
+
)
|
|
224
|
+
completed_at_str = (
|
|
225
|
+
str(info["completed_at_epoch_ms"])
|
|
226
|
+
if info["completed_at_epoch_ms"] is not None
|
|
227
|
+
else None
|
|
228
|
+
)
|
|
194
229
|
return cls(
|
|
195
230
|
function_id=info["function_id"],
|
|
196
231
|
function_name=info["function_name"],
|
|
232
|
+
started_at_epoch_ms=started_at_str,
|
|
233
|
+
completed_at_epoch_ms=completed_at_str,
|
|
197
234
|
output=output_str,
|
|
198
235
|
error=error_str,
|
|
199
236
|
child_workflow_id=info["child_workflow_id"],
|
|
@@ -216,6 +253,7 @@ class ListQueuedWorkflowsBody(TypedDict, total=False):
|
|
|
216
253
|
start_time: Optional[str]
|
|
217
254
|
end_time: Optional[str]
|
|
218
255
|
status: Optional[str]
|
|
256
|
+
forked_from: Optional[str]
|
|
219
257
|
queue_name: Optional[str]
|
|
220
258
|
limit: Optional[int]
|
|
221
259
|
offset: Optional[int]
|
dbos/_context.py
CHANGED
|
@@ -2,7 +2,6 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import json
|
|
4
4
|
import os
|
|
5
|
-
import uuid
|
|
6
5
|
from contextlib import AbstractContextManager
|
|
7
6
|
from contextvars import ContextVar
|
|
8
7
|
from dataclasses import dataclass
|
|
@@ -15,7 +14,7 @@ if TYPE_CHECKING:
|
|
|
15
14
|
|
|
16
15
|
from sqlalchemy.orm import Session
|
|
17
16
|
|
|
18
|
-
from dbos._utils import GlobalParams
|
|
17
|
+
from dbos._utils import GlobalParams, generate_uuid
|
|
19
18
|
|
|
20
19
|
from ._logger import dbos_logger
|
|
21
20
|
from ._tracer import dbos_tracer
|
|
@@ -120,6 +119,8 @@ class DBOSContext:
|
|
|
120
119
|
self.deduplication_id: Optional[str] = None
|
|
121
120
|
# A user-specified priority for the enqueuing workflow.
|
|
122
121
|
self.priority: Optional[int] = None
|
|
122
|
+
# If the workflow is enqueued on a partitioned queue, its partition key
|
|
123
|
+
self.queue_partition_key: Optional[str] = None
|
|
123
124
|
|
|
124
125
|
def create_child(self) -> DBOSContext:
|
|
125
126
|
rv = DBOSContext()
|
|
@@ -149,7 +150,7 @@ class DBOSContext:
|
|
|
149
150
|
self.logger.warning(
|
|
150
151
|
f"Multiple workflows started in the same SetWorkflowID block. Only the first workflow is assigned the specified workflow ID; subsequent workflows will use a generated workflow ID."
|
|
151
152
|
)
|
|
152
|
-
wfid =
|
|
153
|
+
wfid = generate_uuid()
|
|
153
154
|
return wfid
|
|
154
155
|
|
|
155
156
|
def start_workflow(
|
|
@@ -479,6 +480,7 @@ class SetEnqueueOptions:
|
|
|
479
480
|
deduplication_id: Optional[str] = None,
|
|
480
481
|
priority: Optional[int] = None,
|
|
481
482
|
app_version: Optional[str] = None,
|
|
483
|
+
queue_partition_key: Optional[str] = None,
|
|
482
484
|
) -> None:
|
|
483
485
|
self.created_ctx = False
|
|
484
486
|
self.deduplication_id: Optional[str] = deduplication_id
|
|
@@ -491,6 +493,8 @@ class SetEnqueueOptions:
|
|
|
491
493
|
self.saved_priority: Optional[int] = None
|
|
492
494
|
self.app_version: Optional[str] = app_version
|
|
493
495
|
self.saved_app_version: Optional[str] = None
|
|
496
|
+
self.queue_partition_key = queue_partition_key
|
|
497
|
+
self.saved_queue_partition_key: Optional[str] = None
|
|
494
498
|
|
|
495
499
|
def __enter__(self) -> SetEnqueueOptions:
|
|
496
500
|
# Code to create a basic context
|
|
@@ -505,6 +509,8 @@ class SetEnqueueOptions:
|
|
|
505
509
|
ctx.priority = self.priority
|
|
506
510
|
self.saved_app_version = ctx.app_version
|
|
507
511
|
ctx.app_version = self.app_version
|
|
512
|
+
self.saved_queue_partition_key = ctx.queue_partition_key
|
|
513
|
+
ctx.queue_partition_key = self.queue_partition_key
|
|
508
514
|
return self
|
|
509
515
|
|
|
510
516
|
def __exit__(
|
|
@@ -517,6 +523,7 @@ class SetEnqueueOptions:
|
|
|
517
523
|
curr_ctx.deduplication_id = self.saved_deduplication_id
|
|
518
524
|
curr_ctx.priority = self.saved_priority
|
|
519
525
|
curr_ctx.app_version = self.saved_app_version
|
|
526
|
+
curr_ctx.queue_partition_key = self.saved_queue_partition_key
|
|
520
527
|
# Code to clean up the basic context if we created it
|
|
521
528
|
if self.created_ctx:
|
|
522
529
|
_clear_local_dbos_context()
|