dbos 2.4.0a7__py3-none-any.whl → 2.6.0a8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- dbos/__init__.py +2 -0
- dbos/_app_db.py +29 -87
- dbos/_client.py +12 -8
- dbos/_conductor/conductor.py +40 -5
- dbos/_conductor/protocol.py +23 -0
- dbos/_core.py +98 -30
- dbos/_dbos.py +15 -12
- dbos/_dbos_config.py +2 -19
- dbos/_fastapi.py +2 -1
- dbos/_logger.py +12 -6
- dbos/_migration.py +30 -0
- dbos/_queue.py +94 -37
- dbos/_schemas/system_database.py +20 -0
- dbos/_sys_db.py +302 -92
- dbos/_sys_db_postgres.py +18 -12
- dbos/_tracer.py +9 -2
- dbos/_workflow_commands.py +0 -15
- dbos/cli/cli.py +8 -18
- dbos/cli/migration.py +28 -1
- {dbos-2.4.0a7.dist-info → dbos-2.6.0a8.dist-info}/METADATA +1 -1
- {dbos-2.4.0a7.dist-info → dbos-2.6.0a8.dist-info}/RECORD +24 -24
- {dbos-2.4.0a7.dist-info → dbos-2.6.0a8.dist-info}/WHEEL +0 -0
- {dbos-2.4.0a7.dist-info → dbos-2.6.0a8.dist-info}/entry_points.txt +0 -0
- {dbos-2.4.0a7.dist-info → dbos-2.6.0a8.dist-info}/licenses/LICENSE +0 -0
dbos/__init__.py
CHANGED
|
@@ -14,6 +14,7 @@ from ._kafka_message import KafkaMessage
|
|
|
14
14
|
from ._queue import Queue
|
|
15
15
|
from ._serialization import Serializer
|
|
16
16
|
from ._sys_db import GetWorkflowsInput, WorkflowStatus, WorkflowStatusString
|
|
17
|
+
from .cli.migration import run_dbos_database_migrations
|
|
17
18
|
|
|
18
19
|
__all__ = [
|
|
19
20
|
"DBOSConfig",
|
|
@@ -37,4 +38,5 @@ __all__ = [
|
|
|
37
38
|
"Debouncer",
|
|
38
39
|
"DebouncerClient",
|
|
39
40
|
"Serializer",
|
|
41
|
+
"run_dbos_database_migrations",
|
|
40
42
|
]
|
dbos/_app_db.py
CHANGED
|
@@ -70,6 +70,17 @@ class ApplicationDatabase(ABC):
|
|
|
70
70
|
schema: Optional[str],
|
|
71
71
|
debug_mode: bool = False,
|
|
72
72
|
):
|
|
73
|
+
# Log application database connection information
|
|
74
|
+
printable_url = sa.make_url(database_url).render_as_string(hide_password=True)
|
|
75
|
+
dbos_logger.info(
|
|
76
|
+
f"Initializing DBOS application database with URL: {printable_url}"
|
|
77
|
+
)
|
|
78
|
+
if not database_url.startswith("sqlite"):
|
|
79
|
+
dbos_logger.info(
|
|
80
|
+
f"DBOS application database engine parameters: {engine_kwargs}"
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
# Configure and initialize the application database
|
|
73
84
|
if database_url.startswith("sqlite"):
|
|
74
85
|
self.schema = None
|
|
75
86
|
else:
|
|
@@ -173,81 +184,6 @@ class ApplicationDatabase(ABC):
|
|
|
173
184
|
}
|
|
174
185
|
return result
|
|
175
186
|
|
|
176
|
-
def get_transactions(self, workflow_uuid: str) -> List[StepInfo]:
|
|
177
|
-
with self.engine.begin() as conn:
|
|
178
|
-
rows = conn.execute(
|
|
179
|
-
sa.select(
|
|
180
|
-
ApplicationSchema.transaction_outputs.c.function_id,
|
|
181
|
-
ApplicationSchema.transaction_outputs.c.function_name,
|
|
182
|
-
ApplicationSchema.transaction_outputs.c.output,
|
|
183
|
-
ApplicationSchema.transaction_outputs.c.error,
|
|
184
|
-
).where(
|
|
185
|
-
ApplicationSchema.transaction_outputs.c.workflow_uuid
|
|
186
|
-
== workflow_uuid,
|
|
187
|
-
)
|
|
188
|
-
).all()
|
|
189
|
-
return [
|
|
190
|
-
StepInfo(
|
|
191
|
-
function_id=row[0],
|
|
192
|
-
function_name=row[1],
|
|
193
|
-
output=(
|
|
194
|
-
self.serializer.deserialize(row[2])
|
|
195
|
-
if row[2] is not None
|
|
196
|
-
else row[2]
|
|
197
|
-
),
|
|
198
|
-
error=(
|
|
199
|
-
self.serializer.deserialize(row[3])
|
|
200
|
-
if row[3] is not None
|
|
201
|
-
else row[3]
|
|
202
|
-
),
|
|
203
|
-
child_workflow_id=None,
|
|
204
|
-
started_at_epoch_ms=None,
|
|
205
|
-
completed_at_epoch_ms=None,
|
|
206
|
-
)
|
|
207
|
-
for row in rows
|
|
208
|
-
]
|
|
209
|
-
|
|
210
|
-
def clone_workflow_transactions(
|
|
211
|
-
self, src_workflow_id: str, forked_workflow_id: str, start_step: int
|
|
212
|
-
) -> None:
|
|
213
|
-
"""
|
|
214
|
-
Copies all steps from dbos.transctions_outputs where function_id < input function_id
|
|
215
|
-
into a new workflow_uuid. Returns the new workflow_uuid.
|
|
216
|
-
"""
|
|
217
|
-
|
|
218
|
-
with self.engine.begin() as conn:
|
|
219
|
-
|
|
220
|
-
insert_stmt = sa.insert(ApplicationSchema.transaction_outputs).from_select(
|
|
221
|
-
[
|
|
222
|
-
"workflow_uuid",
|
|
223
|
-
"function_id",
|
|
224
|
-
"output",
|
|
225
|
-
"error",
|
|
226
|
-
"txn_id",
|
|
227
|
-
"txn_snapshot",
|
|
228
|
-
"executor_id",
|
|
229
|
-
"function_name",
|
|
230
|
-
],
|
|
231
|
-
sa.select(
|
|
232
|
-
sa.literal(forked_workflow_id).label("workflow_uuid"),
|
|
233
|
-
ApplicationSchema.transaction_outputs.c.function_id,
|
|
234
|
-
ApplicationSchema.transaction_outputs.c.output,
|
|
235
|
-
ApplicationSchema.transaction_outputs.c.error,
|
|
236
|
-
ApplicationSchema.transaction_outputs.c.txn_id,
|
|
237
|
-
ApplicationSchema.transaction_outputs.c.txn_snapshot,
|
|
238
|
-
ApplicationSchema.transaction_outputs.c.executor_id,
|
|
239
|
-
ApplicationSchema.transaction_outputs.c.function_name,
|
|
240
|
-
).where(
|
|
241
|
-
(
|
|
242
|
-
ApplicationSchema.transaction_outputs.c.workflow_uuid
|
|
243
|
-
== src_workflow_id
|
|
244
|
-
)
|
|
245
|
-
& (ApplicationSchema.transaction_outputs.c.function_id < start_step)
|
|
246
|
-
),
|
|
247
|
-
)
|
|
248
|
-
|
|
249
|
-
conn.execute(insert_stmt)
|
|
250
|
-
|
|
251
187
|
def garbage_collect(
|
|
252
188
|
self, cutoff_epoch_timestamp_ms: int, pending_workflow_ids: list[str]
|
|
253
189
|
) -> None:
|
|
@@ -302,18 +238,24 @@ class PostgresApplicationDatabase(ApplicationDatabase):
|
|
|
302
238
|
return
|
|
303
239
|
# Check if the database exists
|
|
304
240
|
app_db_url = self.engine.url
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
241
|
+
try:
|
|
242
|
+
postgres_db_engine = sa.create_engine(
|
|
243
|
+
app_db_url.set(database="postgres"),
|
|
244
|
+
**self._engine_kwargs,
|
|
245
|
+
)
|
|
246
|
+
with postgres_db_engine.connect() as conn:
|
|
247
|
+
conn.execution_options(isolation_level="AUTOCOMMIT")
|
|
248
|
+
if not conn.execute(
|
|
249
|
+
sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
|
|
250
|
+
parameters={"db_name": app_db_url.database},
|
|
251
|
+
).scalar():
|
|
252
|
+
conn.execute(sa.text(f"CREATE DATABASE {app_db_url.database}"))
|
|
253
|
+
except Exception:
|
|
254
|
+
dbos_logger.warning(
|
|
255
|
+
f"Could not connect to postgres database to verify existence of {app_db_url.database}. Continuing..."
|
|
256
|
+
)
|
|
257
|
+
finally:
|
|
258
|
+
postgres_db_engine.dispose()
|
|
317
259
|
|
|
318
260
|
# Create the dbos schema and transaction_outputs table in the application database
|
|
319
261
|
with self.engine.begin() as conn:
|
dbos/_client.py
CHANGED
|
@@ -18,6 +18,7 @@ import sqlalchemy as sa
|
|
|
18
18
|
|
|
19
19
|
from dbos._app_db import ApplicationDatabase
|
|
20
20
|
from dbos._context import MaxPriority, MinPriority
|
|
21
|
+
from dbos._core import DEFAULT_POLLING_INTERVAL
|
|
21
22
|
from dbos._sys_db import SystemDatabase
|
|
22
23
|
from dbos._utils import generate_uuid
|
|
23
24
|
|
|
@@ -42,7 +43,6 @@ from dbos._workflow_commands import (
|
|
|
42
43
|
fork_workflow,
|
|
43
44
|
get_workflow,
|
|
44
45
|
list_queued_workflows,
|
|
45
|
-
list_workflow_steps,
|
|
46
46
|
list_workflows,
|
|
47
47
|
)
|
|
48
48
|
|
|
@@ -85,8 +85,12 @@ class WorkflowHandleClientPolling(Generic[R]):
|
|
|
85
85
|
def get_workflow_id(self) -> str:
|
|
86
86
|
return self.workflow_id
|
|
87
87
|
|
|
88
|
-
def get_result(
|
|
89
|
-
|
|
88
|
+
def get_result(
|
|
89
|
+
self, *, polling_interval_sec: float = DEFAULT_POLLING_INTERVAL
|
|
90
|
+
) -> R:
|
|
91
|
+
res: R = self._sys_db.await_workflow_result(
|
|
92
|
+
self.workflow_id, polling_interval_sec
|
|
93
|
+
)
|
|
90
94
|
return res
|
|
91
95
|
|
|
92
96
|
def get_status(self) -> WorkflowStatus:
|
|
@@ -105,9 +109,11 @@ class WorkflowHandleClientAsyncPolling(Generic[R]):
|
|
|
105
109
|
def get_workflow_id(self) -> str:
|
|
106
110
|
return self.workflow_id
|
|
107
111
|
|
|
108
|
-
async def get_result(
|
|
112
|
+
async def get_result(
|
|
113
|
+
self, *, polling_interval_sec: float = DEFAULT_POLLING_INTERVAL
|
|
114
|
+
) -> R:
|
|
109
115
|
res: R = await asyncio.to_thread(
|
|
110
|
-
self._sys_db.await_workflow_result, self.workflow_id
|
|
116
|
+
self._sys_db.await_workflow_result, self.workflow_id, polling_interval_sec
|
|
111
117
|
)
|
|
112
118
|
return res
|
|
113
119
|
|
|
@@ -472,7 +478,7 @@ class DBOSClient:
|
|
|
472
478
|
)
|
|
473
479
|
|
|
474
480
|
def list_workflow_steps(self, workflow_id: str) -> List[StepInfo]:
|
|
475
|
-
return
|
|
481
|
+
return self._sys_db.list_workflow_steps(workflow_id)
|
|
476
482
|
|
|
477
483
|
async def list_workflow_steps_async(self, workflow_id: str) -> List[StepInfo]:
|
|
478
484
|
return await asyncio.to_thread(self.list_workflow_steps, workflow_id)
|
|
@@ -486,7 +492,6 @@ class DBOSClient:
|
|
|
486
492
|
) -> "WorkflowHandle[Any]":
|
|
487
493
|
forked_workflow_id = fork_workflow(
|
|
488
494
|
self._sys_db,
|
|
489
|
-
self._app_db,
|
|
490
495
|
workflow_id,
|
|
491
496
|
start_step,
|
|
492
497
|
application_version=application_version,
|
|
@@ -503,7 +508,6 @@ class DBOSClient:
|
|
|
503
508
|
forked_workflow_id = await asyncio.to_thread(
|
|
504
509
|
fork_workflow,
|
|
505
510
|
self._sys_db,
|
|
506
|
-
self._app_db,
|
|
507
511
|
workflow_id,
|
|
508
512
|
start_step,
|
|
509
513
|
application_version=application_version,
|
dbos/_conductor/conductor.py
CHANGED
|
@@ -16,7 +16,6 @@ from dbos._workflow_commands import (
|
|
|
16
16
|
get_workflow,
|
|
17
17
|
global_timeout,
|
|
18
18
|
list_queued_workflows,
|
|
19
|
-
list_workflow_steps,
|
|
20
19
|
list_workflows,
|
|
21
20
|
)
|
|
22
21
|
|
|
@@ -117,6 +116,8 @@ class ConductorWebsocket(threading.Thread):
|
|
|
117
116
|
executor_id=GlobalParams.executor_id,
|
|
118
117
|
application_version=GlobalParams.app_version,
|
|
119
118
|
hostname=socket.gethostname(),
|
|
119
|
+
language="python",
|
|
120
|
+
dbos_version=GlobalParams.dbos_version,
|
|
120
121
|
)
|
|
121
122
|
websocket.send(info_response.to_json())
|
|
122
123
|
self.dbos.logger.info("Connected to DBOS conductor")
|
|
@@ -339,10 +340,8 @@ class ConductorWebsocket(threading.Thread):
|
|
|
339
340
|
list_steps_message = p.ListStepsRequest.from_json(message)
|
|
340
341
|
step_info = None
|
|
341
342
|
try:
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
self.dbos._app_db,
|
|
345
|
-
list_steps_message.workflow_id,
|
|
343
|
+
self.dbos._sys_db.list_workflow_steps(
|
|
344
|
+
list_steps_message.workflow_id
|
|
346
345
|
)
|
|
347
346
|
except Exception as e:
|
|
348
347
|
error_message = f"Exception encountered when getting workflow {list_steps_message.workflow_id}: {traceback.format_exc()}"
|
|
@@ -397,6 +396,42 @@ class ConductorWebsocket(threading.Thread):
|
|
|
397
396
|
error_message=error_message,
|
|
398
397
|
)
|
|
399
398
|
websocket.send(retention_response.to_json())
|
|
399
|
+
elif msg_type == p.MessageType.GET_METRICS:
|
|
400
|
+
get_metrics_message = p.GetMetricsRequest.from_json(message)
|
|
401
|
+
self.dbos.logger.debug(
|
|
402
|
+
f"Received metrics request for time range {get_metrics_message.start_time} to {get_metrics_message.end_time}"
|
|
403
|
+
)
|
|
404
|
+
metrics_data = []
|
|
405
|
+
if (
|
|
406
|
+
get_metrics_message.metric_class
|
|
407
|
+
== "workflow_step_count"
|
|
408
|
+
):
|
|
409
|
+
try:
|
|
410
|
+
sys_metrics = self.dbos._sys_db.get_metrics(
|
|
411
|
+
get_metrics_message.start_time,
|
|
412
|
+
get_metrics_message.end_time,
|
|
413
|
+
)
|
|
414
|
+
metrics_data = [
|
|
415
|
+
p.MetricData(
|
|
416
|
+
metric_type=m["metric_type"],
|
|
417
|
+
metric_name=m["metric_name"],
|
|
418
|
+
value=m["value"],
|
|
419
|
+
)
|
|
420
|
+
for m in sys_metrics
|
|
421
|
+
]
|
|
422
|
+
except Exception as e:
|
|
423
|
+
error_message = f"Exception encountered when getting metrics: {traceback.format_exc()}"
|
|
424
|
+
self.dbos.logger.error(error_message)
|
|
425
|
+
else:
|
|
426
|
+
error_message = f"Unexpected metric class: {get_metrics_message.metric_class}"
|
|
427
|
+
self.dbos.logger.warning(error_message)
|
|
428
|
+
get_metrics_response = p.GetMetricsResponse(
|
|
429
|
+
type=p.MessageType.GET_METRICS,
|
|
430
|
+
request_id=base_message.request_id,
|
|
431
|
+
metrics=metrics_data,
|
|
432
|
+
error_message=error_message,
|
|
433
|
+
)
|
|
434
|
+
websocket.send(get_metrics_response.to_json())
|
|
400
435
|
else:
|
|
401
436
|
self.dbos.logger.warning(
|
|
402
437
|
f"Unexpected message type: {msg_type}"
|
dbos/_conductor/protocol.py
CHANGED
|
@@ -19,6 +19,7 @@ class MessageType(str, Enum):
|
|
|
19
19
|
LIST_STEPS = "list_steps"
|
|
20
20
|
FORK_WORKFLOW = "fork_workflow"
|
|
21
21
|
RETENTION = "retention"
|
|
22
|
+
GET_METRICS = "get_metrics"
|
|
22
23
|
|
|
23
24
|
|
|
24
25
|
T = TypeVar("T", bound="BaseMessage")
|
|
@@ -63,6 +64,8 @@ class ExecutorInfoResponse(BaseMessage):
|
|
|
63
64
|
executor_id: str
|
|
64
65
|
application_version: str
|
|
65
66
|
hostname: Optional[str]
|
|
67
|
+
language: Optional[str]
|
|
68
|
+
dbos_version: Optional[str]
|
|
66
69
|
error_message: Optional[str] = None
|
|
67
70
|
|
|
68
71
|
|
|
@@ -339,3 +342,23 @@ class RetentionRequest(BaseMessage):
|
|
|
339
342
|
class RetentionResponse(BaseMessage):
|
|
340
343
|
success: bool
|
|
341
344
|
error_message: Optional[str] = None
|
|
345
|
+
|
|
346
|
+
|
|
347
|
+
@dataclass
|
|
348
|
+
class GetMetricsRequest(BaseMessage):
|
|
349
|
+
start_time: str # ISO 8601
|
|
350
|
+
end_time: str # ISO 8601
|
|
351
|
+
metric_class: str
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
@dataclass
|
|
355
|
+
class MetricData:
|
|
356
|
+
metric_type: str
|
|
357
|
+
metric_name: str
|
|
358
|
+
value: int
|
|
359
|
+
|
|
360
|
+
|
|
361
|
+
@dataclass
|
|
362
|
+
class GetMetricsResponse(BaseMessage):
|
|
363
|
+
metrics: List[MetricData]
|
|
364
|
+
error_message: Optional[str] = None
|
dbos/_core.py
CHANGED
|
@@ -91,6 +91,7 @@ F = TypeVar("F", bound=Callable[..., Any])
|
|
|
91
91
|
|
|
92
92
|
TEMP_SEND_WF_NAME = "<temp>.temp_send_workflow"
|
|
93
93
|
DEBOUNCER_WORKFLOW_NAME = "_dbos_debouncer_workflow"
|
|
94
|
+
DEFAULT_POLLING_INTERVAL = 1.0
|
|
94
95
|
|
|
95
96
|
|
|
96
97
|
class WorkflowHandleFuture(Generic[R]):
|
|
@@ -103,7 +104,9 @@ class WorkflowHandleFuture(Generic[R]):
|
|
|
103
104
|
def get_workflow_id(self) -> str:
|
|
104
105
|
return self.workflow_id
|
|
105
106
|
|
|
106
|
-
def get_result(
|
|
107
|
+
def get_result(
|
|
108
|
+
self, *, polling_interval_sec: float = DEFAULT_POLLING_INTERVAL
|
|
109
|
+
) -> R:
|
|
107
110
|
try:
|
|
108
111
|
r = self.future.result()
|
|
109
112
|
except Exception as e:
|
|
@@ -130,9 +133,13 @@ class WorkflowHandlePolling(Generic[R]):
|
|
|
130
133
|
def get_workflow_id(self) -> str:
|
|
131
134
|
return self.workflow_id
|
|
132
135
|
|
|
133
|
-
def get_result(
|
|
136
|
+
def get_result(
|
|
137
|
+
self, *, polling_interval_sec: float = DEFAULT_POLLING_INTERVAL
|
|
138
|
+
) -> R:
|
|
134
139
|
try:
|
|
135
|
-
r: R = self.dbos._sys_db.await_workflow_result(
|
|
140
|
+
r: R = self.dbos._sys_db.await_workflow_result(
|
|
141
|
+
self.workflow_id, polling_interval_sec
|
|
142
|
+
)
|
|
136
143
|
except Exception as e:
|
|
137
144
|
serialized_e = self.dbos._serializer.serialize(e)
|
|
138
145
|
self.dbos._sys_db.record_get_result(self.workflow_id, None, serialized_e)
|
|
@@ -158,7 +165,9 @@ class WorkflowHandleAsyncTask(Generic[R]):
|
|
|
158
165
|
def get_workflow_id(self) -> str:
|
|
159
166
|
return self.workflow_id
|
|
160
167
|
|
|
161
|
-
async def get_result(
|
|
168
|
+
async def get_result(
|
|
169
|
+
self, *, polling_interval_sec: float = DEFAULT_POLLING_INTERVAL
|
|
170
|
+
) -> R:
|
|
162
171
|
try:
|
|
163
172
|
r = await self.task
|
|
164
173
|
except Exception as e:
|
|
@@ -192,10 +201,14 @@ class WorkflowHandleAsyncPolling(Generic[R]):
|
|
|
192
201
|
def get_workflow_id(self) -> str:
|
|
193
202
|
return self.workflow_id
|
|
194
203
|
|
|
195
|
-
async def get_result(
|
|
204
|
+
async def get_result(
|
|
205
|
+
self, *, polling_interval_sec: float = DEFAULT_POLLING_INTERVAL
|
|
206
|
+
) -> R:
|
|
196
207
|
try:
|
|
197
208
|
r: R = await asyncio.to_thread(
|
|
198
|
-
self.dbos._sys_db.await_workflow_result,
|
|
209
|
+
self.dbos._sys_db.await_workflow_result,
|
|
210
|
+
self.workflow_id,
|
|
211
|
+
polling_interval_sec,
|
|
199
212
|
)
|
|
200
213
|
except Exception as e:
|
|
201
214
|
serialized_e = self.dbos._serializer.serialize(e)
|
|
@@ -366,7 +379,7 @@ def _get_wf_invoke_func(
|
|
|
366
379
|
)
|
|
367
380
|
# Directly return the result if the workflow is already completed
|
|
368
381
|
recorded_result: R = dbos._sys_db.await_workflow_result(
|
|
369
|
-
status["workflow_uuid"]
|
|
382
|
+
status["workflow_uuid"], polling_interval=DEFAULT_POLLING_INTERVAL
|
|
370
383
|
)
|
|
371
384
|
return recorded_result
|
|
372
385
|
try:
|
|
@@ -381,7 +394,9 @@ def _get_wf_invoke_func(
|
|
|
381
394
|
return output
|
|
382
395
|
except DBOSWorkflowConflictIDError:
|
|
383
396
|
# Await the workflow result
|
|
384
|
-
r: R = dbos._sys_db.await_workflow_result(
|
|
397
|
+
r: R = dbos._sys_db.await_workflow_result(
|
|
398
|
+
status["workflow_uuid"], polling_interval=DEFAULT_POLLING_INTERVAL
|
|
399
|
+
)
|
|
385
400
|
return r
|
|
386
401
|
except DBOSWorkflowCancelledError as error:
|
|
387
402
|
raise DBOSAwaitedWorkflowCancelledError(status["workflow_uuid"])
|
|
@@ -579,11 +594,14 @@ def start_workflow(
|
|
|
579
594
|
ctx = new_wf_ctx
|
|
580
595
|
new_child_workflow_id = ctx.id_assigned_for_next_workflow
|
|
581
596
|
if ctx.has_parent():
|
|
582
|
-
|
|
583
|
-
ctx.parent_workflow_id, ctx.parent_workflow_fid
|
|
597
|
+
recorded_result = dbos._sys_db.check_operation_execution(
|
|
598
|
+
ctx.parent_workflow_id, ctx.parent_workflow_fid, get_dbos_func_name(func)
|
|
584
599
|
)
|
|
585
|
-
if
|
|
586
|
-
|
|
600
|
+
if recorded_result and recorded_result["error"]:
|
|
601
|
+
e: Exception = dbos._sys_db.serializer.deserialize(recorded_result["error"])
|
|
602
|
+
raise e
|
|
603
|
+
elif recorded_result and recorded_result["child_workflow_id"]:
|
|
604
|
+
return WorkflowHandlePolling(recorded_result["child_workflow_id"], dbos)
|
|
587
605
|
|
|
588
606
|
status = _init_workflow(
|
|
589
607
|
dbos,
|
|
@@ -675,13 +693,19 @@ async def start_workflow_async(
|
|
|
675
693
|
ctx = new_wf_ctx
|
|
676
694
|
new_child_workflow_id = ctx.id_assigned_for_next_workflow
|
|
677
695
|
if ctx.has_parent():
|
|
678
|
-
|
|
679
|
-
dbos._sys_db.
|
|
696
|
+
recorded_result = await asyncio.to_thread(
|
|
697
|
+
dbos._sys_db.check_operation_execution,
|
|
680
698
|
ctx.parent_workflow_id,
|
|
681
699
|
ctx.parent_workflow_fid,
|
|
700
|
+
get_dbos_func_name(func),
|
|
682
701
|
)
|
|
683
|
-
if
|
|
684
|
-
|
|
702
|
+
if recorded_result and recorded_result["error"]:
|
|
703
|
+
e: Exception = dbos._sys_db.serializer.deserialize(recorded_result["error"])
|
|
704
|
+
raise e
|
|
705
|
+
elif recorded_result and recorded_result["child_workflow_id"]:
|
|
706
|
+
return WorkflowHandleAsyncPolling(
|
|
707
|
+
recorded_result["child_workflow_id"], dbos
|
|
708
|
+
)
|
|
685
709
|
|
|
686
710
|
status = await asyncio.to_thread(
|
|
687
711
|
_init_workflow,
|
|
@@ -788,7 +812,9 @@ def workflow_wrapper(
|
|
|
788
812
|
c_wfid: str, dbos: "DBOS"
|
|
789
813
|
) -> Callable[[Callable[[], R]], R]:
|
|
790
814
|
def recorded_result_inner(func: Callable[[], R]) -> R:
|
|
791
|
-
r: R = dbos._sys_db.await_workflow_result(
|
|
815
|
+
r: R = dbos._sys_db.await_workflow_result(
|
|
816
|
+
c_wfid, polling_interval=DEFAULT_POLLING_INTERVAL
|
|
817
|
+
)
|
|
792
818
|
return r
|
|
793
819
|
|
|
794
820
|
return recorded_result_inner
|
|
@@ -798,11 +824,16 @@ def workflow_wrapper(
|
|
|
798
824
|
workflow_id = ctx.workflow_id
|
|
799
825
|
|
|
800
826
|
if ctx.has_parent():
|
|
801
|
-
|
|
802
|
-
ctx.parent_workflow_id,
|
|
827
|
+
r = dbos._sys_db.check_operation_execution(
|
|
828
|
+
ctx.parent_workflow_id,
|
|
829
|
+
ctx.parent_workflow_fid,
|
|
830
|
+
get_dbos_func_name(func),
|
|
803
831
|
)
|
|
804
|
-
if
|
|
805
|
-
|
|
832
|
+
if r and r["error"]:
|
|
833
|
+
e: Exception = dbos._sys_db.serializer.deserialize(r["error"])
|
|
834
|
+
raise e
|
|
835
|
+
elif r and r["child_workflow_id"]:
|
|
836
|
+
return recorded_result(r["child_workflow_id"], dbos)
|
|
806
837
|
|
|
807
838
|
status = _init_workflow(
|
|
808
839
|
dbos,
|
|
@@ -889,12 +920,6 @@ def decorate_transaction(
|
|
|
889
920
|
)
|
|
890
921
|
|
|
891
922
|
dbos = dbosreg.dbos
|
|
892
|
-
ctx = assert_current_dbos_context()
|
|
893
|
-
status = dbos._sys_db.get_workflow_status(ctx.workflow_id)
|
|
894
|
-
if status and status["status"] == WorkflowStatusString.CANCELLED.value:
|
|
895
|
-
raise DBOSWorkflowCancelledError(
|
|
896
|
-
f"Workflow {ctx.workflow_id} is cancelled. Aborting transaction {transaction_name}."
|
|
897
|
-
)
|
|
898
923
|
assert (
|
|
899
924
|
dbos._app_db
|
|
900
925
|
), "Transactions can only be used if DBOS is configured with an application_database_url"
|
|
@@ -905,6 +930,26 @@ def decorate_transaction(
|
|
|
905
930
|
}
|
|
906
931
|
with EnterDBOSTransaction(session, attributes=attributes):
|
|
907
932
|
ctx = assert_current_dbos_context()
|
|
933
|
+
# Check if the step record for this transaction exists
|
|
934
|
+
recorded_step_output = dbos._sys_db.check_operation_execution(
|
|
935
|
+
ctx.workflow_id, ctx.function_id, transaction_name
|
|
936
|
+
)
|
|
937
|
+
if recorded_step_output:
|
|
938
|
+
dbos.logger.debug(
|
|
939
|
+
f"Replaying transaction, id: {ctx.function_id}, name: {attributes['name']}"
|
|
940
|
+
)
|
|
941
|
+
if recorded_step_output["error"]:
|
|
942
|
+
step_error: Exception = dbos._serializer.deserialize(
|
|
943
|
+
recorded_step_output["error"]
|
|
944
|
+
)
|
|
945
|
+
raise step_error
|
|
946
|
+
elif recorded_step_output["output"]:
|
|
947
|
+
return dbos._serializer.deserialize(
|
|
948
|
+
recorded_step_output["output"]
|
|
949
|
+
)
|
|
950
|
+
else:
|
|
951
|
+
raise Exception("Output and error are both None")
|
|
952
|
+
|
|
908
953
|
txn_output: TransactionResultInternal = {
|
|
909
954
|
"workflow_uuid": ctx.workflow_id,
|
|
910
955
|
"function_id": ctx.function_id,
|
|
@@ -915,6 +960,14 @@ def decorate_transaction(
|
|
|
915
960
|
"txn_id": None,
|
|
916
961
|
"function_name": transaction_name,
|
|
917
962
|
}
|
|
963
|
+
step_output: OperationResultInternal = {
|
|
964
|
+
"workflow_uuid": ctx.workflow_id,
|
|
965
|
+
"function_id": ctx.function_id,
|
|
966
|
+
"function_name": transaction_name,
|
|
967
|
+
"output": None,
|
|
968
|
+
"error": None,
|
|
969
|
+
"started_at_epoch_ms": int(time.time() * 1000),
|
|
970
|
+
}
|
|
918
971
|
retry_wait_seconds = 0.001
|
|
919
972
|
backoff_factor = 1.5
|
|
920
973
|
max_retry_wait_seconds = 2.0
|
|
@@ -953,8 +1006,18 @@ def decorate_transaction(
|
|
|
953
1006
|
)
|
|
954
1007
|
)
|
|
955
1008
|
has_recorded_error = True
|
|
1009
|
+
step_output["error"] = recorded_output["error"]
|
|
1010
|
+
dbos._sys_db.record_operation_result(
|
|
1011
|
+
step_output
|
|
1012
|
+
)
|
|
956
1013
|
raise deserialized_error
|
|
957
1014
|
elif recorded_output["output"]:
|
|
1015
|
+
step_output["output"] = recorded_output[
|
|
1016
|
+
"output"
|
|
1017
|
+
]
|
|
1018
|
+
dbos._sys_db.record_operation_result(
|
|
1019
|
+
step_output
|
|
1020
|
+
)
|
|
958
1021
|
return dbos._serializer.deserialize(
|
|
959
1022
|
recorded_output["output"]
|
|
960
1023
|
)
|
|
@@ -1011,10 +1074,13 @@ def decorate_transaction(
|
|
|
1011
1074
|
finally:
|
|
1012
1075
|
# Don't record the error if it was already recorded
|
|
1013
1076
|
if txn_error and not has_recorded_error:
|
|
1014
|
-
txn_output["error"] =
|
|
1015
|
-
txn_error
|
|
1077
|
+
step_output["error"] = txn_output["error"] = (
|
|
1078
|
+
dbos._serializer.serialize(txn_error)
|
|
1016
1079
|
)
|
|
1017
1080
|
dbos._app_db.record_transaction_error(txn_output)
|
|
1081
|
+
dbos._sys_db.record_operation_result(step_output)
|
|
1082
|
+
step_output["output"] = dbos._serializer.serialize(output)
|
|
1083
|
+
dbos._sys_db.record_operation_result(step_output)
|
|
1018
1084
|
return output
|
|
1019
1085
|
|
|
1020
1086
|
if inspect.iscoroutinefunction(func):
|
|
@@ -1283,7 +1349,9 @@ def set_event(dbos: "DBOS", key: str, value: Any) -> None:
|
|
|
1283
1349
|
ctx.workflow_id, ctx.curr_step_function_id, key, value
|
|
1284
1350
|
)
|
|
1285
1351
|
elif cur_ctx.is_step():
|
|
1286
|
-
dbos._sys_db.set_event_from_step(
|
|
1352
|
+
dbos._sys_db.set_event_from_step(
|
|
1353
|
+
cur_ctx.workflow_id, cur_ctx.curr_step_function_id, key, value
|
|
1354
|
+
)
|
|
1287
1355
|
else:
|
|
1288
1356
|
raise DBOSException(
|
|
1289
1357
|
"set_event() must be called from within a workflow or step"
|