dbos 0.25.0a9__py3-none-any.whl → 0.25.0a13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbos/_conductor/conductor.py +32 -1
- dbos/_conductor/protocol.py +34 -0
- dbos/_core.py +102 -40
- dbos/_dbos.py +4 -8
- dbos/_sys_db.py +43 -130
- {dbos-0.25.0a9.dist-info → dbos-0.25.0a13.dist-info}/METADATA +1 -1
- {dbos-0.25.0a9.dist-info → dbos-0.25.0a13.dist-info}/RECORD +10 -10
- {dbos-0.25.0a9.dist-info → dbos-0.25.0a13.dist-info}/WHEEL +0 -0
- {dbos-0.25.0a9.dist-info → dbos-0.25.0a13.dist-info}/entry_points.txt +0 -0
- {dbos-0.25.0a9.dist-info → dbos-0.25.0a13.dist-info}/licenses/LICENSE +0 -0
dbos/_conductor/conductor.py
CHANGED
@@ -9,7 +9,12 @@ from websockets.sync.client import connect
|
|
9
9
|
from websockets.sync.connection import Connection
|
10
10
|
|
11
11
|
from dbos._utils import GlobalParams
|
12
|
-
from dbos._workflow_commands import
|
12
|
+
from dbos._workflow_commands import (
|
13
|
+
get_workflow,
|
14
|
+
list_queued_workflows,
|
15
|
+
list_workflow_steps,
|
16
|
+
list_workflows,
|
17
|
+
)
|
13
18
|
|
14
19
|
from . import protocol as p
|
15
20
|
|
@@ -243,6 +248,32 @@ class ConductorWebsocket(threading.Thread):
|
|
243
248
|
)
|
244
249
|
)
|
245
250
|
websocket.send(exist_pending_workflows_response.to_json())
|
251
|
+
elif msg_type == p.MessageType.LIST_STEPS:
|
252
|
+
list_steps_message = p.ListStepsRequest.from_json(message)
|
253
|
+
step_info = None
|
254
|
+
try:
|
255
|
+
step_info = list_workflow_steps(
|
256
|
+
self.dbos._sys_db,
|
257
|
+
list_steps_message.workflow_id,
|
258
|
+
)
|
259
|
+
except Exception as e:
|
260
|
+
error_message = f"Exception encountered when getting workflow {list_steps_message.workflow_id}: {traceback.format_exc()}"
|
261
|
+
self.dbos.logger.error(error_message)
|
262
|
+
|
263
|
+
list_steps_response = p.ListStepsResponse(
|
264
|
+
type=p.MessageType.LIST_STEPS,
|
265
|
+
request_id=base_message.request_id,
|
266
|
+
output=(
|
267
|
+
[
|
268
|
+
p.WorkflowSteps.from_step_info(i)
|
269
|
+
for i in step_info
|
270
|
+
]
|
271
|
+
if step_info is not None
|
272
|
+
else None
|
273
|
+
),
|
274
|
+
error_message=error_message,
|
275
|
+
)
|
276
|
+
websocket.send(list_steps_response.to_json())
|
246
277
|
else:
|
247
278
|
self.dbos.logger.warning(
|
248
279
|
f"Unexpected message type: {msg_type}"
|
dbos/_conductor/protocol.py
CHANGED
@@ -3,6 +3,7 @@ from dataclasses import asdict, dataclass
|
|
3
3
|
from enum import Enum
|
4
4
|
from typing import List, Optional, Type, TypedDict, TypeVar
|
5
5
|
|
6
|
+
from dbos._sys_db import StepInfo
|
6
7
|
from dbos._workflow_commands import WorkflowStatus
|
7
8
|
|
8
9
|
|
@@ -16,6 +17,7 @@ class MessageType(str, Enum):
|
|
16
17
|
RESTART = "restart"
|
17
18
|
GET_WORKFLOW = "get_workflow"
|
18
19
|
EXIST_PENDING_WORKFLOWS = "exist_pending_workflows"
|
20
|
+
LIST_STEPS = "list_steps"
|
19
21
|
|
20
22
|
|
21
23
|
T = TypeVar("T", bound="BaseMessage")
|
@@ -176,6 +178,27 @@ class WorkflowsOutput:
|
|
176
178
|
)
|
177
179
|
|
178
180
|
|
181
|
+
@dataclass
|
182
|
+
class WorkflowSteps:
|
183
|
+
function_id: int
|
184
|
+
function_name: str
|
185
|
+
output: Optional[str]
|
186
|
+
error: Optional[str]
|
187
|
+
child_workflow_id: Optional[str]
|
188
|
+
|
189
|
+
@classmethod
|
190
|
+
def from_step_info(cls, info: StepInfo) -> "WorkflowSteps":
|
191
|
+
output_str = str(info["output"]) if info["output"] is not None else None
|
192
|
+
error_str = str(info["error"]) if info["error"] is not None else None
|
193
|
+
return cls(
|
194
|
+
function_id=info["function_id"],
|
195
|
+
function_name=info["function_name"],
|
196
|
+
output=output_str,
|
197
|
+
error=error_str,
|
198
|
+
child_workflow_id=info["child_workflow_id"],
|
199
|
+
)
|
200
|
+
|
201
|
+
|
179
202
|
@dataclass
|
180
203
|
class ListWorkflowsRequest(BaseMessage):
|
181
204
|
body: ListWorkflowsBody
|
@@ -230,3 +253,14 @@ class ExistPendingWorkflowsRequest(BaseMessage):
|
|
230
253
|
class ExistPendingWorkflowsResponse(BaseMessage):
|
231
254
|
exist: bool
|
232
255
|
error_message: Optional[str] = None
|
256
|
+
|
257
|
+
|
258
|
+
@dataclass
|
259
|
+
class ListStepsRequest(BaseMessage):
|
260
|
+
workflow_id: str
|
261
|
+
|
262
|
+
|
263
|
+
@dataclass
|
264
|
+
class ListStepsResponse(BaseMessage):
|
265
|
+
output: Optional[List[WorkflowSteps]]
|
266
|
+
error_message: Optional[str] = None
|
dbos/_core.py
CHANGED
@@ -108,7 +108,15 @@ class WorkflowHandleFuture(Generic[R]):
|
|
108
108
|
return self.workflow_id
|
109
109
|
|
110
110
|
def get_result(self) -> R:
|
111
|
-
|
111
|
+
try:
|
112
|
+
r = self.future.result()
|
113
|
+
except Exception as e:
|
114
|
+
serialized_e = _serialization.serialize_exception(e)
|
115
|
+
self.dbos._sys_db.record_get_result(self.workflow_id, None, serialized_e)
|
116
|
+
raise
|
117
|
+
serialized_r = _serialization.serialize(r)
|
118
|
+
self.dbos._sys_db.record_get_result(self.workflow_id, serialized_r, None)
|
119
|
+
return r
|
112
120
|
|
113
121
|
def get_status(self) -> "WorkflowStatus":
|
114
122
|
stat = self.dbos.get_workflow_status(self.workflow_id)
|
@@ -127,8 +135,15 @@ class WorkflowHandlePolling(Generic[R]):
|
|
127
135
|
return self.workflow_id
|
128
136
|
|
129
137
|
def get_result(self) -> R:
|
130
|
-
|
131
|
-
|
138
|
+
try:
|
139
|
+
r: R = self.dbos._sys_db.await_workflow_result(self.workflow_id)
|
140
|
+
except Exception as e:
|
141
|
+
serialized_e = _serialization.serialize_exception(e)
|
142
|
+
self.dbos._sys_db.record_get_result(self.workflow_id, None, serialized_e)
|
143
|
+
raise
|
144
|
+
serialized_r = _serialization.serialize(r)
|
145
|
+
self.dbos._sys_db.record_get_result(self.workflow_id, serialized_r, None)
|
146
|
+
return r
|
132
147
|
|
133
148
|
def get_status(self) -> "WorkflowStatus":
|
134
149
|
stat = self.dbos.get_workflow_status(self.workflow_id)
|
@@ -148,7 +163,22 @@ class WorkflowHandleAsyncTask(Generic[R]):
|
|
148
163
|
return self.workflow_id
|
149
164
|
|
150
165
|
async def get_result(self) -> R:
|
151
|
-
|
166
|
+
try:
|
167
|
+
r = await self.task
|
168
|
+
except Exception as e:
|
169
|
+
serialized_e = _serialization.serialize_exception(e)
|
170
|
+
await asyncio.to_thread(
|
171
|
+
self.dbos._sys_db.record_get_result,
|
172
|
+
self.workflow_id,
|
173
|
+
None,
|
174
|
+
serialized_e,
|
175
|
+
)
|
176
|
+
raise
|
177
|
+
serialized_r = _serialization.serialize(r)
|
178
|
+
await asyncio.to_thread(
|
179
|
+
self.dbos._sys_db.record_get_result, self.workflow_id, serialized_r, None
|
180
|
+
)
|
181
|
+
return r
|
152
182
|
|
153
183
|
async def get_status(self) -> "WorkflowStatus":
|
154
184
|
stat = await asyncio.to_thread(self.dbos.get_workflow_status, self.workflow_id)
|
@@ -167,10 +197,24 @@ class WorkflowHandleAsyncPolling(Generic[R]):
|
|
167
197
|
return self.workflow_id
|
168
198
|
|
169
199
|
async def get_result(self) -> R:
|
170
|
-
|
171
|
-
|
200
|
+
try:
|
201
|
+
r: R = await asyncio.to_thread(
|
202
|
+
self.dbos._sys_db.await_workflow_result, self.workflow_id
|
203
|
+
)
|
204
|
+
except Exception as e:
|
205
|
+
serialized_e = _serialization.serialize_exception(e)
|
206
|
+
await asyncio.to_thread(
|
207
|
+
self.dbos._sys_db.record_get_result,
|
208
|
+
self.workflow_id,
|
209
|
+
None,
|
210
|
+
serialized_e,
|
211
|
+
)
|
212
|
+
raise
|
213
|
+
serialized_r = _serialization.serialize(r)
|
214
|
+
await asyncio.to_thread(
|
215
|
+
self.dbos._sys_db.record_get_result, self.workflow_id, serialized_r, None
|
172
216
|
)
|
173
|
-
return
|
217
|
+
return r
|
174
218
|
|
175
219
|
async def get_status(self) -> "WorkflowStatus":
|
176
220
|
stat = await asyncio.to_thread(self.dbos.get_workflow_status, self.workflow_id)
|
@@ -235,25 +279,14 @@ def _init_workflow(
|
|
235
279
|
raise DBOSNonExistentWorkflowError(wfid)
|
236
280
|
wf_status = get_status_result["status"]
|
237
281
|
else:
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
status, max_recovery_attempts=max_recovery_attempts
|
244
|
-
)
|
245
|
-
|
246
|
-
# TODO: Modify the inputs if they were changed by `update_workflow_inputs`
|
247
|
-
dbos._sys_db.update_workflow_inputs(
|
248
|
-
wfid, _serialization.serialize_args(inputs)
|
249
|
-
)
|
250
|
-
|
251
|
-
else:
|
252
|
-
# Buffer the inputs for single-transaction workflows, but don't buffer the status
|
282
|
+
# Synchronously record the status and inputs for workflows
|
283
|
+
# TODO: Make this transactional (and with the queue step below)
|
284
|
+
wf_status = dbos._sys_db.insert_workflow_status(
|
285
|
+
status, max_recovery_attempts=max_recovery_attempts
|
286
|
+
)
|
253
287
|
|
254
|
-
|
255
|
-
|
256
|
-
)
|
288
|
+
# TODO: Modify the inputs if they were changed by `update_workflow_inputs`
|
289
|
+
dbos._sys_db.update_workflow_inputs(wfid, _serialization.serialize_args(inputs))
|
257
290
|
|
258
291
|
if queue is not None and wf_status == WorkflowStatusString.ENQUEUED.value:
|
259
292
|
dbos._sys_db.enqueue(wfid, queue)
|
@@ -267,6 +300,18 @@ def _get_wf_invoke_func(
|
|
267
300
|
status: WorkflowStatusInternal,
|
268
301
|
) -> Callable[[Callable[[], R]], R]:
|
269
302
|
def persist(func: Callable[[], R]) -> R:
|
303
|
+
if not dbos.debug_mode and (
|
304
|
+
status["status"] == WorkflowStatusString.ERROR.value
|
305
|
+
or status["status"] == WorkflowStatusString.SUCCESS.value
|
306
|
+
):
|
307
|
+
dbos.logger.debug(
|
308
|
+
f"Workflow {status['workflow_uuid']} is already completed with status {status['status']}"
|
309
|
+
)
|
310
|
+
# Directly return the result if the workflow is already completed
|
311
|
+
recorded_result: R = dbos._sys_db.await_workflow_result(
|
312
|
+
status["workflow_uuid"]
|
313
|
+
)
|
314
|
+
return recorded_result
|
270
315
|
try:
|
271
316
|
output = func()
|
272
317
|
status["status"] = "SUCCESS"
|
@@ -275,16 +320,12 @@ def _get_wf_invoke_func(
|
|
275
320
|
if status["queue_name"] is not None:
|
276
321
|
queue = dbos._registry.queue_info_map[status["queue_name"]]
|
277
322
|
dbos._sys_db.remove_from_queue(status["workflow_uuid"], queue)
|
278
|
-
dbos._sys_db.
|
323
|
+
dbos._sys_db.update_workflow_status(status)
|
279
324
|
return output
|
280
325
|
except DBOSWorkflowConflictIDError:
|
281
|
-
#
|
282
|
-
|
283
|
-
|
284
|
-
status["workflow_uuid"], existing_workflow=False
|
285
|
-
)
|
286
|
-
output = wf_handle.get_result()
|
287
|
-
return output
|
326
|
+
# Await the workflow result
|
327
|
+
r: R = dbos._sys_db.await_workflow_result(status["workflow_uuid"])
|
328
|
+
return r
|
288
329
|
except DBOSWorkflowCancelledError as error:
|
289
330
|
raise
|
290
331
|
except Exception as error:
|
@@ -504,7 +545,7 @@ def start_workflow(
|
|
504
545
|
ctx.parent_workflow_id,
|
505
546
|
new_child_workflow_id,
|
506
547
|
ctx.parent_workflow_fid,
|
507
|
-
func
|
548
|
+
get_dbos_func_name(func),
|
508
549
|
)
|
509
550
|
|
510
551
|
if not execute_workflow or (
|
@@ -589,7 +630,7 @@ async def start_workflow_async(
|
|
589
630
|
ctx.parent_workflow_id,
|
590
631
|
new_child_workflow_id,
|
591
632
|
ctx.parent_workflow_fid,
|
592
|
-
func
|
633
|
+
get_dbos_func_name(func),
|
593
634
|
)
|
594
635
|
|
595
636
|
wf_status = status["status"]
|
@@ -631,8 +672,6 @@ def workflow_wrapper(
|
|
631
672
|
) -> Callable[P, R]:
|
632
673
|
func.__orig_func = func # type: ignore
|
633
674
|
|
634
|
-
funcName = func.__name__
|
635
|
-
|
636
675
|
fi = get_or_create_func_info(func)
|
637
676
|
fi.max_recovery_attempts = max_recovery_attempts
|
638
677
|
|
@@ -662,17 +701,22 @@ def workflow_wrapper(
|
|
662
701
|
|
663
702
|
wfOutcome = Outcome[R].make(functools.partial(func, *args, **kwargs))
|
664
703
|
|
704
|
+
workflow_id = None
|
705
|
+
|
665
706
|
def init_wf() -> Callable[[Callable[[], R]], R]:
|
666
707
|
|
667
708
|
def recorded_result(
|
668
709
|
c_wfid: str, dbos: "DBOS"
|
669
710
|
) -> Callable[[Callable[[], R]], R]:
|
670
711
|
def recorded_result_inner(func: Callable[[], R]) -> R:
|
671
|
-
|
712
|
+
r: R = dbos._sys_db.await_workflow_result(c_wfid)
|
713
|
+
return r
|
672
714
|
|
673
715
|
return recorded_result_inner
|
674
716
|
|
675
717
|
ctx = assert_current_dbos_context() # Now the child ctx
|
718
|
+
nonlocal workflow_id
|
719
|
+
workflow_id = ctx.workflow_id
|
676
720
|
|
677
721
|
if ctx.has_parent():
|
678
722
|
child_workflow_id = dbos._sys_db.check_child_workflow(
|
@@ -702,15 +746,33 @@ def workflow_wrapper(
|
|
702
746
|
ctx.parent_workflow_id,
|
703
747
|
ctx.workflow_id,
|
704
748
|
ctx.parent_workflow_fid,
|
705
|
-
|
749
|
+
get_dbos_func_name(func),
|
706
750
|
)
|
707
751
|
|
708
752
|
return _get_wf_invoke_func(dbos, status)
|
709
753
|
|
754
|
+
def record_get_result(func: Callable[[], R]) -> R:
|
755
|
+
"""
|
756
|
+
If a child workflow is invoked synchronously, this records the implicit "getResult" where the
|
757
|
+
parent retrieves the child's output. It executes in the CALLER'S context, not the workflow's.
|
758
|
+
"""
|
759
|
+
try:
|
760
|
+
r = func()
|
761
|
+
except Exception as e:
|
762
|
+
serialized_e = _serialization.serialize_exception(e)
|
763
|
+
assert workflow_id is not None
|
764
|
+
dbos._sys_db.record_get_result(workflow_id, None, serialized_e)
|
765
|
+
raise
|
766
|
+
serialized_r = _serialization.serialize(r)
|
767
|
+
assert workflow_id is not None
|
768
|
+
dbos._sys_db.record_get_result(workflow_id, serialized_r, None)
|
769
|
+
return r
|
770
|
+
|
710
771
|
outcome = (
|
711
772
|
wfOutcome.wrap(init_wf)
|
712
773
|
.also(DBOSAssumeRole(rr))
|
713
774
|
.also(enterWorkflowCtxMgr(attributes))
|
775
|
+
.then(record_get_result)
|
714
776
|
)
|
715
777
|
return outcome() # type: ignore
|
716
778
|
|
@@ -913,7 +975,7 @@ def decorate_step(
|
|
913
975
|
) -> Callable[[Callable[P, R]], Callable[P, R]]:
|
914
976
|
def decorator(func: Callable[P, R]) -> Callable[P, R]:
|
915
977
|
|
916
|
-
stepName = func.
|
978
|
+
stepName = func.__qualname__
|
917
979
|
|
918
980
|
def invoke_step(*args: Any, **kwargs: Any) -> Any:
|
919
981
|
if dbosreg.dbos is None:
|
dbos/_dbos.py
CHANGED
@@ -485,14 +485,6 @@ class DBOS:
|
|
485
485
|
notification_listener_thread.start()
|
486
486
|
self._background_threads.append(notification_listener_thread)
|
487
487
|
|
488
|
-
# Start flush workflow buffers thread
|
489
|
-
flush_workflow_buffers_thread = threading.Thread(
|
490
|
-
target=self._sys_db.flush_workflow_buffers,
|
491
|
-
daemon=True,
|
492
|
-
)
|
493
|
-
flush_workflow_buffers_thread.start()
|
494
|
-
self._background_threads.append(flush_workflow_buffers_thread)
|
495
|
-
|
496
488
|
# Start the queue thread
|
497
489
|
evt = threading.Event()
|
498
490
|
self.stop_events.append(evt)
|
@@ -548,6 +540,10 @@ class DBOS:
|
|
548
540
|
"""
|
549
541
|
if _dbos_global_instance is not None:
|
550
542
|
_dbos_global_instance._reset_system_database()
|
543
|
+
else:
|
544
|
+
dbos_logger.warning(
|
545
|
+
"reset_system_database has no effect because global DBOS object does not exist"
|
546
|
+
)
|
551
547
|
|
552
548
|
def _reset_system_database(self) -> None:
|
553
549
|
assert (
|
dbos/_sys_db.py
CHANGED
@@ -33,7 +33,6 @@ from ._dbos_config import ConfigFile
|
|
33
33
|
from ._error import (
|
34
34
|
DBOSConflictingWorkflowError,
|
35
35
|
DBOSDeadLetterQueueError,
|
36
|
-
DBOSException,
|
37
36
|
DBOSNonExistentWorkflowError,
|
38
37
|
DBOSWorkflowConflictIDError,
|
39
38
|
)
|
@@ -154,16 +153,19 @@ class GetPendingWorkflowsOutput:
|
|
154
153
|
|
155
154
|
|
156
155
|
class StepInfo(TypedDict):
|
156
|
+
# The unique ID of the step in the workflow
|
157
157
|
function_id: int
|
158
|
+
# The (fully qualified) name of the step
|
158
159
|
function_name: str
|
159
|
-
output
|
160
|
-
|
160
|
+
# The step's output, if any
|
161
|
+
output: Optional[Any]
|
162
|
+
# The error the step threw, if any
|
163
|
+
error: Optional[Exception]
|
164
|
+
# If the step starts or retrieves the result of a workflow, its ID
|
161
165
|
child_workflow_id: Optional[str]
|
162
166
|
|
163
167
|
|
164
168
|
_dbos_null_topic = "__null__topic__"
|
165
|
-
_buffer_flush_batch_size = 100
|
166
|
-
_buffer_flush_interval_secs = 1.0
|
167
169
|
|
168
170
|
|
169
171
|
class SystemDatabase:
|
@@ -260,32 +262,17 @@ class SystemDatabase:
|
|
260
262
|
self.notifications_map: Dict[str, threading.Condition] = {}
|
261
263
|
self.workflow_events_map: Dict[str, threading.Condition] = {}
|
262
264
|
|
263
|
-
# Initialize the workflow status and inputs buffers
|
264
|
-
self._workflow_status_buffer: Dict[str, WorkflowStatusInternal] = {}
|
265
|
-
self._workflow_inputs_buffer: Dict[str, str] = {}
|
266
|
-
# Two sets for tracking which single-transaction workflows have been exported to the status table
|
267
|
-
self._exported_temp_txn_wf_status: Set[str] = set()
|
268
|
-
self._temp_txn_wf_ids: Set[str] = set()
|
269
|
-
self._is_flushing_status_buffer = False
|
270
|
-
|
271
265
|
# Now we can run background processes
|
272
266
|
self._run_background_processes = True
|
273
267
|
self._debug_mode = debug_mode
|
274
268
|
|
275
269
|
# Destroy the pool when finished
|
276
270
|
def destroy(self) -> None:
|
277
|
-
self.wait_for_buffer_flush()
|
278
271
|
self._run_background_processes = False
|
279
272
|
if self.notification_conn is not None:
|
280
273
|
self.notification_conn.close()
|
281
274
|
self.engine.dispose()
|
282
275
|
|
283
|
-
def wait_for_buffer_flush(self) -> None:
|
284
|
-
# Wait until the buffers are flushed.
|
285
|
-
while self._is_flushing_status_buffer or not self._is_buffers_empty:
|
286
|
-
dbos_logger.debug("Waiting for system buffers to be exported")
|
287
|
-
time.sleep(1)
|
288
|
-
|
289
276
|
def insert_workflow_status(
|
290
277
|
self,
|
291
278
|
status: WorkflowStatusInternal,
|
@@ -436,10 +423,6 @@ class SystemDatabase:
|
|
436
423
|
with self.engine.begin() as c:
|
437
424
|
c.execute(cmd)
|
438
425
|
|
439
|
-
# If this is a single-transaction workflow, record that its status has been exported
|
440
|
-
if status["workflow_uuid"] in self._temp_txn_wf_ids:
|
441
|
-
self._exported_temp_txn_wf_status.add(status["workflow_uuid"])
|
442
|
-
|
443
426
|
def cancel_workflow(
|
444
427
|
self,
|
445
428
|
workflow_id: str,
|
@@ -617,10 +600,7 @@ class SystemDatabase:
|
|
617
600
|
f"Workflow {workflow_uuid} has been called multiple times with different inputs"
|
618
601
|
)
|
619
602
|
# TODO: actually changing the input
|
620
|
-
|
621
|
-
# Clean up the single-transaction tracking sets
|
622
|
-
self._exported_temp_txn_wf_status.discard(workflow_uuid)
|
623
|
-
self._temp_txn_wf_ids.discard(workflow_uuid)
|
603
|
+
|
624
604
|
return
|
625
605
|
|
626
606
|
def get_workflow_inputs(
|
@@ -771,8 +751,16 @@ class SystemDatabase:
|
|
771
751
|
StepInfo(
|
772
752
|
function_id=row[0],
|
773
753
|
function_name=row[1],
|
774
|
-
output=
|
775
|
-
|
754
|
+
output=(
|
755
|
+
_serialization.deserialize(row[2])
|
756
|
+
if row[2] is not None
|
757
|
+
else row[2]
|
758
|
+
),
|
759
|
+
error=(
|
760
|
+
_serialization.deserialize_exception(row[3])
|
761
|
+
if row[3] is not None
|
762
|
+
else row[3]
|
763
|
+
),
|
776
764
|
child_workflow_id=row[4],
|
777
765
|
)
|
778
766
|
for row in rows
|
@@ -804,6 +792,31 @@ class SystemDatabase:
|
|
804
792
|
raise DBOSWorkflowConflictIDError(result["workflow_uuid"])
|
805
793
|
raise
|
806
794
|
|
795
|
+
def record_get_result(
|
796
|
+
self, result_workflow_id: str, output: Optional[str], error: Optional[str]
|
797
|
+
) -> None:
|
798
|
+
ctx = get_local_dbos_context()
|
799
|
+
# Only record get_result called in workflow functions
|
800
|
+
if ctx is None or not ctx.is_workflow():
|
801
|
+
return
|
802
|
+
ctx.function_id += 1 # Record the get_result as a step
|
803
|
+
# Because there's no corresponding check, we do nothing on conflict
|
804
|
+
# and do not raise a DBOSWorkflowConflictIDError
|
805
|
+
sql = (
|
806
|
+
pg.insert(SystemSchema.operation_outputs)
|
807
|
+
.values(
|
808
|
+
workflow_uuid=ctx.workflow_id,
|
809
|
+
function_id=ctx.function_id,
|
810
|
+
function_name="DBOS.getResult",
|
811
|
+
output=output,
|
812
|
+
error=error,
|
813
|
+
child_workflow_id=result_workflow_id,
|
814
|
+
)
|
815
|
+
.on_conflict_do_nothing()
|
816
|
+
)
|
817
|
+
with self.engine.begin() as c:
|
818
|
+
c.execute(sql)
|
819
|
+
|
807
820
|
def record_child_workflow(
|
808
821
|
self,
|
809
822
|
parentUUID: str,
|
@@ -1238,106 +1251,6 @@ class SystemDatabase:
|
|
1238
1251
|
)
|
1239
1252
|
return value
|
1240
1253
|
|
1241
|
-
def _flush_workflow_status_buffer(self) -> None:
|
1242
|
-
if self._debug_mode:
|
1243
|
-
raise Exception("called _flush_workflow_status_buffer in debug mode")
|
1244
|
-
|
1245
|
-
"""Export the workflow status buffer to the database, up to the batch size."""
|
1246
|
-
if len(self._workflow_status_buffer) == 0:
|
1247
|
-
return
|
1248
|
-
|
1249
|
-
# Record the exported status so far, and add them back on errors.
|
1250
|
-
exported_status: Dict[str, WorkflowStatusInternal] = {}
|
1251
|
-
with self.engine.begin() as c:
|
1252
|
-
exported = 0
|
1253
|
-
status_iter = iter(list(self._workflow_status_buffer))
|
1254
|
-
wf_id: Optional[str] = None
|
1255
|
-
while (
|
1256
|
-
exported < _buffer_flush_batch_size
|
1257
|
-
and (wf_id := next(status_iter, None)) is not None
|
1258
|
-
):
|
1259
|
-
# Pop the first key in the buffer (FIFO)
|
1260
|
-
status = self._workflow_status_buffer.pop(wf_id, None)
|
1261
|
-
if status is None:
|
1262
|
-
continue
|
1263
|
-
exported_status[wf_id] = status
|
1264
|
-
try:
|
1265
|
-
self.update_workflow_status(status, conn=c)
|
1266
|
-
exported += 1
|
1267
|
-
except Exception as e:
|
1268
|
-
dbos_logger.error(f"Error while flushing status buffer: {e}")
|
1269
|
-
c.rollback()
|
1270
|
-
# Add the exported status back to the buffer, so they can be retried next time
|
1271
|
-
self._workflow_status_buffer.update(exported_status)
|
1272
|
-
break
|
1273
|
-
|
1274
|
-
def _flush_workflow_inputs_buffer(self) -> None:
|
1275
|
-
if self._debug_mode:
|
1276
|
-
raise Exception("called _flush_workflow_inputs_buffer in debug mode")
|
1277
|
-
|
1278
|
-
"""Export the workflow inputs buffer to the database, up to the batch size."""
|
1279
|
-
if len(self._workflow_inputs_buffer) == 0:
|
1280
|
-
return
|
1281
|
-
|
1282
|
-
# Record exported inputs so far, and add them back on errors.
|
1283
|
-
exported_inputs: Dict[str, str] = {}
|
1284
|
-
with self.engine.begin() as c:
|
1285
|
-
exported = 0
|
1286
|
-
input_iter = iter(list(self._workflow_inputs_buffer))
|
1287
|
-
wf_id: Optional[str] = None
|
1288
|
-
while (
|
1289
|
-
exported < _buffer_flush_batch_size
|
1290
|
-
and (wf_id := next(input_iter, None)) is not None
|
1291
|
-
):
|
1292
|
-
if wf_id not in self._exported_temp_txn_wf_status:
|
1293
|
-
# Skip exporting inputs if the status has not been exported yet
|
1294
|
-
continue
|
1295
|
-
inputs = self._workflow_inputs_buffer.pop(wf_id, None)
|
1296
|
-
if inputs is None:
|
1297
|
-
continue
|
1298
|
-
exported_inputs[wf_id] = inputs
|
1299
|
-
try:
|
1300
|
-
self.update_workflow_inputs(wf_id, inputs, conn=c)
|
1301
|
-
exported += 1
|
1302
|
-
except Exception as e:
|
1303
|
-
dbos_logger.error(f"Error while flushing inputs buffer: {e}")
|
1304
|
-
c.rollback()
|
1305
|
-
# Add the exported inputs back to the buffer, so they can be retried next time
|
1306
|
-
self._workflow_inputs_buffer.update(exported_inputs)
|
1307
|
-
break
|
1308
|
-
|
1309
|
-
def flush_workflow_buffers(self) -> None:
|
1310
|
-
"""Flush the workflow status and inputs buffers periodically, via a background thread."""
|
1311
|
-
while self._run_background_processes:
|
1312
|
-
try:
|
1313
|
-
self._is_flushing_status_buffer = True
|
1314
|
-
# Must flush the status buffer first, as the inputs table has a foreign key constraint on the status table.
|
1315
|
-
self._flush_workflow_status_buffer()
|
1316
|
-
self._flush_workflow_inputs_buffer()
|
1317
|
-
self._is_flushing_status_buffer = False
|
1318
|
-
if self._is_buffers_empty:
|
1319
|
-
# Only sleep if both buffers are empty
|
1320
|
-
time.sleep(_buffer_flush_interval_secs)
|
1321
|
-
except Exception as e:
|
1322
|
-
dbos_logger.error(f"Error while flushing buffers: {e}")
|
1323
|
-
time.sleep(_buffer_flush_interval_secs)
|
1324
|
-
# Will retry next time
|
1325
|
-
|
1326
|
-
def buffer_workflow_status(self, status: WorkflowStatusInternal) -> None:
|
1327
|
-
self._workflow_status_buffer[status["workflow_uuid"]] = status
|
1328
|
-
|
1329
|
-
def buffer_workflow_inputs(self, workflow_id: str, inputs: str) -> None:
|
1330
|
-
# inputs is a serialized WorkflowInputs string
|
1331
|
-
self._workflow_inputs_buffer[workflow_id] = inputs
|
1332
|
-
self._temp_txn_wf_ids.add(workflow_id)
|
1333
|
-
|
1334
|
-
@property
|
1335
|
-
def _is_buffers_empty(self) -> bool:
|
1336
|
-
return (
|
1337
|
-
len(self._workflow_status_buffer) == 0
|
1338
|
-
and len(self._workflow_inputs_buffer) == 0
|
1339
|
-
)
|
1340
|
-
|
1341
1254
|
def enqueue(self, workflow_id: str, queue_name: str) -> None:
|
1342
1255
|
if self._debug_mode:
|
1343
1256
|
raise Exception("called enqueue in debug mode")
|
@@ -1,7 +1,7 @@
|
|
1
|
-
dbos-0.25.
|
2
|
-
dbos-0.25.
|
3
|
-
dbos-0.25.
|
4
|
-
dbos-0.25.
|
1
|
+
dbos-0.25.0a13.dist-info/METADATA,sha256=53Gij6b4Z6I2vTd2kr_izPtVtXH5YTGu-KKbwFW-W3w,5554
|
2
|
+
dbos-0.25.0a13.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
|
3
|
+
dbos-0.25.0a13.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
4
|
+
dbos-0.25.0a13.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
5
5
|
dbos/__init__.py,sha256=2Ur8QyNElSVn7CeL9Ovek2Zsye8A_ZCyjb9djF-N4A4,785
|
6
6
|
dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
|
7
7
|
dbos/_admin_server.py,sha256=7kguOf9jEt4vg9LO-QJdh4jYddp6Uqtrt14gh7mKA2Y,6387
|
@@ -10,13 +10,13 @@ dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
|
|
10
10
|
dbos/_cloudutils/authentication.py,sha256=V0fCWQN9stCkhbuuxgPTGpvuQcDqfU3KAxPAh01vKW4,5007
|
11
11
|
dbos/_cloudutils/cloudutils.py,sha256=YC7jGsIopT0KveLsqbRpQk2KlRBk-nIRC_UCgep4f3o,7797
|
12
12
|
dbos/_cloudutils/databases.py,sha256=_shqaqSvhY4n2ScgQ8IP5PDZvzvcx3YBKV8fj-cxhSY,8543
|
13
|
-
dbos/_conductor/conductor.py,sha256=
|
14
|
-
dbos/_conductor/protocol.py,sha256=
|
13
|
+
dbos/_conductor/conductor.py,sha256=7elKINsgl4s1Tg5DwrU-K7xQ5vQvmDAIfAvUgfwpGN0,16784
|
14
|
+
dbos/_conductor/protocol.py,sha256=xN7pmooyF1pqbH1b6WhllU5718P7zSb_b0KCwA6bzcs,6716
|
15
15
|
dbos/_context.py,sha256=3He4w46OTFbR7h8U1MLcdaU10wNyIPBSRqzLkdggv7U,19368
|
16
|
-
dbos/_core.py,sha256=
|
16
|
+
dbos/_core.py,sha256=kIj_4wlIff8ptlACJKXAPSNoyJIt2h44swjMKxfwv0k,45789
|
17
17
|
dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
|
18
18
|
dbos/_db_wizard.py,sha256=VnMa6OL87Lc-XPDD1RnXp8NjsJE8YgiQLj3wtWAXp-8,8252
|
19
|
-
dbos/_dbos.py,sha256=
|
19
|
+
dbos/_dbos.py,sha256=IZ3Qj8UBUqYYJiG6HVz35A0GQnQyI1Po_-E6DDvWlQg,45147
|
20
20
|
dbos/_dbos_config.py,sha256=7Qm3FARP3lTKZS0gSxDHLbpaDCT30GzfyERxfCde4bc,21566
|
21
21
|
dbos/_debug.py,sha256=mmgvLkqlrljMBBow9wk01PPur9kUf2rI_11dTJXY4gw,1822
|
22
22
|
dbos/_error.py,sha256=B6Y9XLS1f6yrawxB2uAEYFMxFwk9BHhdxPNddKco-Fw,5399
|
@@ -46,7 +46,7 @@ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
46
46
|
dbos/_schemas/application_database.py,sha256=KeyoPrF7hy_ODXV7QNike_VFSD74QBRfQ76D7QyE9HI,966
|
47
47
|
dbos/_schemas/system_database.py,sha256=W9eSpL7SZzQkxcEZ4W07BOcwkkDr35b9oCjUOgfHWek,5336
|
48
48
|
dbos/_serialization.py,sha256=YCYv0qKAwAZ1djZisBC7khvKqG-5OcIv9t9EC5PFIog,1743
|
49
|
-
dbos/_sys_db.py,sha256=
|
49
|
+
dbos/_sys_db.py,sha256=33euvXfpbpVaCpR0Sx5eQ4yBt1gRKGdvfGQUugqoJBY,62320
|
50
50
|
dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
|
51
51
|
dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
52
52
|
dbos/_templates/dbos-db-starter/__package/main.py,sha256=nJMN3ZD2lmwg4Dcgmiwqc-tQGuCJuJal2Xl85iA277U,2453
|
@@ -66,4 +66,4 @@ dbos/cli/cli.py,sha256=ut47q-R6A423je0zvBTEgwdxENagaKKoyIvyTeACFIU,15977
|
|
66
66
|
dbos/dbos-config.schema.json,sha256=HtF_njVTGHLdzBGZ4OrGQz3qbPPT0Go-iwd1PgFVTNg,5847
|
67
67
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
68
68
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
69
|
-
dbos-0.25.
|
69
|
+
dbos-0.25.0a13.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|