dbos 0.5.0a3__py3-none-any.whl → 0.5.0a5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- dbos/application_database.py +9 -4
- dbos/context.py +16 -18
- dbos/core.py +41 -55
- dbos/dbos.py +14 -17
- dbos/error.py +5 -5
- dbos/registrations.py +1 -1
- dbos/system_database.py +50 -22
- {dbos-0.5.0a3.dist-info → dbos-0.5.0a5.dist-info}/METADATA +1 -1
- {dbos-0.5.0a3.dist-info → dbos-0.5.0a5.dist-info}/RECORD +12 -12
- {dbos-0.5.0a3.dist-info → dbos-0.5.0a5.dist-info}/WHEEL +0 -0
- {dbos-0.5.0a3.dist-info → dbos-0.5.0a5.dist-info}/entry_points.txt +0 -0
- {dbos-0.5.0a3.dist-info → dbos-0.5.0a5.dist-info}/licenses/LICENSE +0 -0
dbos/application_database.py
CHANGED
|
@@ -3,6 +3,7 @@ from typing import Optional, TypedDict, cast
|
|
|
3
3
|
import sqlalchemy as sa
|
|
4
4
|
import sqlalchemy.dialects.postgresql as pg
|
|
5
5
|
import sqlalchemy.exc as sa_exc
|
|
6
|
+
from sqlalchemy.exc import DBAPIError
|
|
6
7
|
from sqlalchemy.orm import Session, sessionmaker
|
|
7
8
|
|
|
8
9
|
from dbos.error import DBOSWorkflowConflictIDError
|
|
@@ -95,8 +96,10 @@ class ApplicationDatabase:
|
|
|
95
96
|
),
|
|
96
97
|
)
|
|
97
98
|
)
|
|
98
|
-
except
|
|
99
|
-
|
|
99
|
+
except DBAPIError as dbapi_error:
|
|
100
|
+
if dbapi_error.orig.pgcode == "23505": # type: ignore
|
|
101
|
+
raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
|
|
102
|
+
raise dbapi_error
|
|
100
103
|
except Exception as e:
|
|
101
104
|
raise e
|
|
102
105
|
|
|
@@ -118,8 +121,10 @@ class ApplicationDatabase:
|
|
|
118
121
|
),
|
|
119
122
|
)
|
|
120
123
|
)
|
|
121
|
-
except
|
|
122
|
-
|
|
124
|
+
except DBAPIError as dbapi_error:
|
|
125
|
+
if dbapi_error.orig.pgcode == "23505": # type: ignore
|
|
126
|
+
raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
|
|
127
|
+
raise dbapi_error
|
|
123
128
|
except Exception as e:
|
|
124
129
|
raise e
|
|
125
130
|
|
dbos/context.py
CHANGED
|
@@ -18,18 +18,16 @@ from .logger import dbos_logger
|
|
|
18
18
|
from .tracer import dbos_tracer
|
|
19
19
|
|
|
20
20
|
|
|
21
|
-
#
|
|
21
|
+
# These are used to tag OTel traces
|
|
22
22
|
class OperationType(Enum):
|
|
23
23
|
HANDLER = "handler"
|
|
24
24
|
WORKFLOW = "workflow"
|
|
25
25
|
TRANSACTION = "transaction"
|
|
26
|
-
|
|
26
|
+
STEP = "step"
|
|
27
27
|
PROCEDURE = "procedure"
|
|
28
28
|
|
|
29
29
|
|
|
30
|
-
OperationTypes = Literal[
|
|
31
|
-
"handler", "workflow", "transaction", "communicator", "procedure"
|
|
32
|
-
]
|
|
30
|
+
OperationTypes = Literal["handler", "workflow", "transaction", "step", "procedure"]
|
|
33
31
|
|
|
34
32
|
|
|
35
33
|
# Keys must be the same as in TypeScript Transact
|
|
@@ -64,7 +62,7 @@ class DBOSContext:
|
|
|
64
62
|
self.function_id: int = -1
|
|
65
63
|
self.in_recovery: bool = False
|
|
66
64
|
|
|
67
|
-
self.
|
|
65
|
+
self.curr_step_function_id: int = -1
|
|
68
66
|
self.curr_tx_function_id: int = -1
|
|
69
67
|
self.sql_session: Optional[Session] = None
|
|
70
68
|
self.spans: list[Span] = []
|
|
@@ -117,26 +115,26 @@ class DBOSContext:
|
|
|
117
115
|
def is_workflow(self) -> bool:
|
|
118
116
|
return (
|
|
119
117
|
len(self.workflow_id) > 0
|
|
120
|
-
and not self.
|
|
118
|
+
and not self.is_step()
|
|
121
119
|
and not self.is_transaction()
|
|
122
120
|
)
|
|
123
121
|
|
|
124
122
|
def is_transaction(self) -> bool:
|
|
125
123
|
return self.sql_session is not None
|
|
126
124
|
|
|
127
|
-
def
|
|
128
|
-
return self.
|
|
125
|
+
def is_step(self) -> bool:
|
|
126
|
+
return self.curr_step_function_id >= 0
|
|
129
127
|
|
|
130
|
-
def
|
|
128
|
+
def start_step(
|
|
131
129
|
self,
|
|
132
130
|
fid: int,
|
|
133
131
|
attributes: TracedAttributes,
|
|
134
132
|
) -> None:
|
|
135
|
-
self.
|
|
133
|
+
self.curr_step_function_id = fid
|
|
136
134
|
self._start_span(attributes)
|
|
137
135
|
|
|
138
|
-
def
|
|
139
|
-
self.
|
|
136
|
+
def end_step(self, exc_value: Optional[BaseException]) -> None:
|
|
137
|
+
self.curr_step_function_id = -1
|
|
140
138
|
self._end_span(exc_value)
|
|
141
139
|
|
|
142
140
|
def start_transaction(
|
|
@@ -375,7 +373,7 @@ class EnterDBOSChildWorkflow:
|
|
|
375
373
|
def __enter__(self) -> DBOSContext:
|
|
376
374
|
ctx = assert_current_dbos_context()
|
|
377
375
|
self.parent_ctx = ctx
|
|
378
|
-
assert ctx.is_workflow() # Is in a workflow and not in
|
|
376
|
+
assert ctx.is_workflow() # Is in a workflow and not in a step
|
|
379
377
|
ctx.function_id += 1
|
|
380
378
|
if len(ctx.id_assigned_for_next_workflow) == 0:
|
|
381
379
|
ctx.id_assigned_for_next_workflow = (
|
|
@@ -401,7 +399,7 @@ class EnterDBOSChildWorkflow:
|
|
|
401
399
|
return False # Did not handle
|
|
402
400
|
|
|
403
401
|
|
|
404
|
-
class
|
|
402
|
+
class EnterDBOSStep:
|
|
405
403
|
def __init__(
|
|
406
404
|
self,
|
|
407
405
|
attributes: TracedAttributes,
|
|
@@ -412,7 +410,7 @@ class EnterDBOSCommunicator:
|
|
|
412
410
|
ctx = assert_current_dbos_context()
|
|
413
411
|
assert ctx.is_workflow()
|
|
414
412
|
ctx.function_id += 1
|
|
415
|
-
ctx.
|
|
413
|
+
ctx.start_step(ctx.function_id, attributes=self.attributes)
|
|
416
414
|
return ctx
|
|
417
415
|
|
|
418
416
|
def __exit__(
|
|
@@ -422,8 +420,8 @@ class EnterDBOSCommunicator:
|
|
|
422
420
|
traceback: Optional[TracebackType],
|
|
423
421
|
) -> Literal[False]:
|
|
424
422
|
ctx = assert_current_dbos_context()
|
|
425
|
-
assert ctx.
|
|
426
|
-
ctx.
|
|
423
|
+
assert ctx.is_step()
|
|
424
|
+
ctx.end_step(exc_value)
|
|
427
425
|
return False # Did not handle
|
|
428
426
|
|
|
429
427
|
|
dbos/core.py
CHANGED
|
@@ -19,20 +19,18 @@ from dbos.context import (
|
|
|
19
19
|
DBOSContextEnsure,
|
|
20
20
|
DBOSContextSwap,
|
|
21
21
|
EnterDBOSChildWorkflow,
|
|
22
|
-
|
|
22
|
+
EnterDBOSStep,
|
|
23
23
|
EnterDBOSTransaction,
|
|
24
24
|
EnterDBOSWorkflow,
|
|
25
25
|
OperationType,
|
|
26
26
|
SetWorkflowID,
|
|
27
27
|
TracedAttributes,
|
|
28
28
|
assert_current_dbos_context,
|
|
29
|
-
clear_local_dbos_context,
|
|
30
29
|
get_local_dbos_context,
|
|
31
|
-
set_local_dbos_context,
|
|
32
30
|
)
|
|
33
31
|
from dbos.error import (
|
|
34
|
-
DBOSCommunicatorMaxRetriesExceededError,
|
|
35
32
|
DBOSException,
|
|
33
|
+
DBOSMaxStepRetriesExceeded,
|
|
36
34
|
DBOSNonExistentWorkflowError,
|
|
37
35
|
DBOSRecoveryError,
|
|
38
36
|
DBOSWorkflowConflictIDError,
|
|
@@ -61,7 +59,6 @@ if TYPE_CHECKING:
|
|
|
61
59
|
from dbos.dbos import IsolationLevel
|
|
62
60
|
|
|
63
61
|
from sqlalchemy.exc import DBAPIError
|
|
64
|
-
from sqlalchemy.orm import Session
|
|
65
62
|
|
|
66
63
|
P = ParamSpec("P") # A generic type for workflow parameters
|
|
67
64
|
R = TypeVar("R", covariant=True) # A generic type for workflow return values
|
|
@@ -139,14 +136,18 @@ def _init_workflow(
|
|
|
139
136
|
"recovery_attempts": None,
|
|
140
137
|
}
|
|
141
138
|
|
|
139
|
+
# If we have a class name, the first arg is the instance and do not serialize
|
|
140
|
+
if class_name is not None:
|
|
141
|
+
inputs = {"args": inputs["args"][1:], "kwargs": inputs["kwargs"]}
|
|
142
|
+
|
|
142
143
|
if temp_wf_type != "transaction":
|
|
143
|
-
#
|
|
144
|
+
# Synchronously record the status and inputs for workflows and single-step workflows
|
|
145
|
+
# We also have to do this for single-step workflows because of the foreign key constraint on the operation outputs table
|
|
144
146
|
dbos.sys_db.update_workflow_status(status, False, ctx.in_recovery)
|
|
145
|
-
|
|
146
|
-
# If we have a class name, the first arg is the instance and do not serialize
|
|
147
|
-
if class_name is not None:
|
|
148
|
-
inputs = {"args": inputs["args"][1:], "kwargs": inputs["kwargs"]}
|
|
149
147
|
dbos.sys_db.update_workflow_inputs(wfid, utils.serialize(inputs))
|
|
148
|
+
else:
|
|
149
|
+
# Buffer the inputs for single-transaction workflows, but don't buffer the status
|
|
150
|
+
dbos.sys_db.buffer_workflow_inputs(wfid, utils.serialize(inputs))
|
|
150
151
|
|
|
151
152
|
return status
|
|
152
153
|
|
|
@@ -176,20 +177,6 @@ def _execute_workflow(
|
|
|
176
177
|
status["error"] = utils.serialize(error)
|
|
177
178
|
dbos.sys_db.update_workflow_status(status)
|
|
178
179
|
raise error
|
|
179
|
-
finally:
|
|
180
|
-
if get_temp_workflow_type(func) == "transaction":
|
|
181
|
-
# Buffer the inputs for single transaction workflows
|
|
182
|
-
inputs: WorkflowInputs = {
|
|
183
|
-
"args": args,
|
|
184
|
-
"kwargs": kwargs,
|
|
185
|
-
}
|
|
186
|
-
# If we have a class name, the first arg is the instance and do not serialize
|
|
187
|
-
class_name = get_dbos_class_name(get_func_info(func), func, args)
|
|
188
|
-
if class_name is not None:
|
|
189
|
-
inputs = {"args": inputs["args"][1:], "kwargs": inputs["kwargs"]}
|
|
190
|
-
dbos.sys_db.buffer_workflow_inputs(
|
|
191
|
-
status["workflow_uuid"], utils.serialize(inputs)
|
|
192
|
-
)
|
|
193
180
|
|
|
194
181
|
return output
|
|
195
182
|
|
|
@@ -362,7 +349,7 @@ def _start_workflow(
|
|
|
362
349
|
|
|
363
350
|
# Sequence of events for starting a workflow:
|
|
364
351
|
# First - is there a WF already running?
|
|
365
|
-
# (and not in
|
|
352
|
+
# (and not in step as that is an error)
|
|
366
353
|
# Assign an ID to the workflow, if it doesn't have an app-assigned one
|
|
367
354
|
# If this is a root workflow, assign a new ID
|
|
368
355
|
# If this is a child workflow, assign parent wf id with call# suffix
|
|
@@ -370,7 +357,7 @@ def _start_workflow(
|
|
|
370
357
|
# Pass the new context to a worker thread that will run the wf function
|
|
371
358
|
cur_ctx = get_local_dbos_context()
|
|
372
359
|
if cur_ctx is not None and cur_ctx.is_within_workflow():
|
|
373
|
-
assert cur_ctx.is_workflow() # Not in
|
|
360
|
+
assert cur_ctx.is_workflow() # Not in a step
|
|
374
361
|
cur_ctx.function_id += 1
|
|
375
362
|
if len(cur_ctx.id_assigned_for_next_workflow) == 0:
|
|
376
363
|
cur_ctx.id_assigned_for_next_workflow = (
|
|
@@ -517,7 +504,7 @@ def _transaction(
|
|
|
517
504
|
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
518
505
|
rr: Optional[str] = check_required_roles(func, fi)
|
|
519
506
|
# Entering transaction is allowed:
|
|
520
|
-
# In a workflow (that is not in a
|
|
507
|
+
# In a workflow (that is not in a step already)
|
|
521
508
|
# Not in a workflow (we will start the single op workflow)
|
|
522
509
|
ctx = get_local_dbos_context()
|
|
523
510
|
if ctx and ctx.is_within_workflow():
|
|
@@ -544,7 +531,7 @@ def _transaction(
|
|
|
544
531
|
return decorator
|
|
545
532
|
|
|
546
533
|
|
|
547
|
-
def
|
|
534
|
+
def _step(
|
|
548
535
|
dbosreg: "_DBOSRegistry",
|
|
549
536
|
*,
|
|
550
537
|
retries_allowed: bool = False,
|
|
@@ -554,7 +541,7 @@ def _communicator(
|
|
|
554
541
|
) -> Callable[[F], F]:
|
|
555
542
|
def decorator(func: F) -> F:
|
|
556
543
|
|
|
557
|
-
def
|
|
544
|
+
def invoke_step(*args: Any, **kwargs: Any) -> Any:
|
|
558
545
|
if dbosreg.dbos is None:
|
|
559
546
|
raise DBOSException(
|
|
560
547
|
f"Function {func.__name__} invoked before DBOS initialized"
|
|
@@ -563,10 +550,10 @@ def _communicator(
|
|
|
563
550
|
|
|
564
551
|
attributes: TracedAttributes = {
|
|
565
552
|
"name": func.__name__,
|
|
566
|
-
"operationType": OperationType.
|
|
553
|
+
"operationType": OperationType.STEP.value,
|
|
567
554
|
}
|
|
568
|
-
with
|
|
569
|
-
|
|
555
|
+
with EnterDBOSStep(attributes) as ctx:
|
|
556
|
+
step_output: OperationResultInternal = {
|
|
570
557
|
"workflow_uuid": ctx.workflow_id,
|
|
571
558
|
"function_id": ctx.function_id,
|
|
572
559
|
"output": None,
|
|
@@ -591,24 +578,24 @@ def _communicator(
|
|
|
591
578
|
for attempt in range(1, local_max_attempts + 1):
|
|
592
579
|
try:
|
|
593
580
|
output = func(*args, **kwargs)
|
|
594
|
-
|
|
581
|
+
step_output["output"] = utils.serialize(output)
|
|
595
582
|
error = None
|
|
596
583
|
break
|
|
597
584
|
except Exception as err:
|
|
598
585
|
error = err
|
|
599
586
|
if retries_allowed:
|
|
600
587
|
dbos.logger.warning(
|
|
601
|
-
f"
|
|
588
|
+
f"Step being automatically retried. (attempt {attempt} of {local_max_attempts}). {traceback.format_exc()}"
|
|
602
589
|
)
|
|
603
590
|
ctx.get_current_span().add_event(
|
|
604
|
-
f"
|
|
591
|
+
f"Step attempt {attempt} failed",
|
|
605
592
|
{
|
|
606
593
|
"error": str(error),
|
|
607
594
|
"retryIntervalSeconds": local_interval_seconds,
|
|
608
595
|
},
|
|
609
596
|
)
|
|
610
597
|
if attempt == local_max_attempts:
|
|
611
|
-
error =
|
|
598
|
+
error = DBOSMaxStepRetriesExceeded()
|
|
612
599
|
else:
|
|
613
600
|
time.sleep(local_interval_seconds)
|
|
614
601
|
local_interval_seconds = min(
|
|
@@ -616,10 +603,11 @@ def _communicator(
|
|
|
616
603
|
max_retry_interval_seconds,
|
|
617
604
|
)
|
|
618
605
|
|
|
619
|
-
|
|
606
|
+
step_output["error"] = (
|
|
620
607
|
utils.serialize(error) if error is not None else None
|
|
621
608
|
)
|
|
622
|
-
dbos.sys_db.record_operation_result(
|
|
609
|
+
dbos.sys_db.record_operation_result(step_output)
|
|
610
|
+
|
|
623
611
|
if error is not None:
|
|
624
612
|
raise error
|
|
625
613
|
return output
|
|
@@ -629,20 +617,18 @@ def _communicator(
|
|
|
629
617
|
@wraps(func)
|
|
630
618
|
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
631
619
|
rr: Optional[str] = check_required_roles(func, fi)
|
|
632
|
-
# Entering
|
|
633
|
-
# In a
|
|
634
|
-
# In a workflow (that is not in a
|
|
620
|
+
# Entering step is allowed:
|
|
621
|
+
# In a step already, just call the original function directly.
|
|
622
|
+
# In a workflow (that is not in a step already)
|
|
635
623
|
# Not in a workflow (we will start the single op workflow)
|
|
636
624
|
ctx = get_local_dbos_context()
|
|
637
|
-
if ctx and ctx.
|
|
625
|
+
if ctx and ctx.is_step():
|
|
638
626
|
# Call the original function directly
|
|
639
627
|
return func(*args, **kwargs)
|
|
640
628
|
if ctx and ctx.is_within_workflow():
|
|
641
|
-
assert (
|
|
642
|
-
ctx.is_workflow()
|
|
643
|
-
), "Communicators must be called from within workflows"
|
|
629
|
+
assert ctx.is_workflow(), "Steps must be called from within workflows"
|
|
644
630
|
with DBOSAssumeRole(rr):
|
|
645
|
-
return
|
|
631
|
+
return invoke_step(*args, **kwargs)
|
|
646
632
|
else:
|
|
647
633
|
tempwf = dbosreg.workflow_info_map.get("<temp>." + func.__qualname__)
|
|
648
634
|
assert tempwf
|
|
@@ -653,7 +639,7 @@ def _communicator(
|
|
|
653
639
|
|
|
654
640
|
wrapped_wf = _workflow_wrapper(dbosreg, temp_wf)
|
|
655
641
|
set_dbos_func_name(temp_wf, "<temp>." + func.__qualname__)
|
|
656
|
-
set_temp_workflow_type(temp_wf, "
|
|
642
|
+
set_temp_workflow_type(temp_wf, "step")
|
|
657
643
|
dbosreg.register_wf_function(get_dbos_func_name(temp_wf), wrapped_wf)
|
|
658
644
|
|
|
659
645
|
return cast(F, wrapper)
|
|
@@ -668,10 +654,10 @@ def _send(
|
|
|
668
654
|
attributes: TracedAttributes = {
|
|
669
655
|
"name": "send",
|
|
670
656
|
}
|
|
671
|
-
with
|
|
657
|
+
with EnterDBOSStep(attributes) as ctx:
|
|
672
658
|
dbos.sys_db.send(
|
|
673
659
|
ctx.workflow_id,
|
|
674
|
-
ctx.
|
|
660
|
+
ctx.curr_step_function_id,
|
|
675
661
|
destination_id,
|
|
676
662
|
message,
|
|
677
663
|
topic,
|
|
@@ -697,12 +683,12 @@ def _recv(
|
|
|
697
683
|
attributes: TracedAttributes = {
|
|
698
684
|
"name": "recv",
|
|
699
685
|
}
|
|
700
|
-
with
|
|
686
|
+
with EnterDBOSStep(attributes) as ctx:
|
|
701
687
|
ctx.function_id += 1 # Reserve for the sleep
|
|
702
688
|
timeout_function_id = ctx.function_id
|
|
703
689
|
return dbos.sys_db.recv(
|
|
704
690
|
ctx.workflow_id,
|
|
705
|
-
ctx.
|
|
691
|
+
ctx.curr_step_function_id,
|
|
706
692
|
timeout_function_id,
|
|
707
693
|
topic,
|
|
708
694
|
timeout_seconds,
|
|
@@ -722,9 +708,9 @@ def _set_event(dbos: "DBOS", key: str, value: Any) -> None:
|
|
|
722
708
|
attributes: TracedAttributes = {
|
|
723
709
|
"name": "set_event",
|
|
724
710
|
}
|
|
725
|
-
with
|
|
711
|
+
with EnterDBOSStep(attributes) as ctx:
|
|
726
712
|
dbos.sys_db.set_event(
|
|
727
|
-
ctx.workflow_id, ctx.
|
|
713
|
+
ctx.workflow_id, ctx.curr_step_function_id, key, value
|
|
728
714
|
)
|
|
729
715
|
else:
|
|
730
716
|
# Cannot call it from outside of a workflow
|
|
@@ -743,12 +729,12 @@ def _get_event(
|
|
|
743
729
|
attributes: TracedAttributes = {
|
|
744
730
|
"name": "get_event",
|
|
745
731
|
}
|
|
746
|
-
with
|
|
732
|
+
with EnterDBOSStep(attributes) as ctx:
|
|
747
733
|
ctx.function_id += 1
|
|
748
734
|
timeout_function_id = ctx.function_id
|
|
749
735
|
caller_ctx: GetEventWorkflowContext = {
|
|
750
736
|
"workflow_uuid": ctx.workflow_id,
|
|
751
|
-
"function_id": ctx.
|
|
737
|
+
"function_id": ctx.curr_step_function_id,
|
|
752
738
|
"timeout_function_id": timeout_function_id,
|
|
753
739
|
}
|
|
754
740
|
return dbos.sys_db.get_event(workflow_id, key, timeout_seconds, caller_ctx)
|
dbos/dbos.py
CHANGED
|
@@ -25,13 +25,13 @@ from opentelemetry.trace import Span
|
|
|
25
25
|
|
|
26
26
|
from dbos.core import (
|
|
27
27
|
TEMP_SEND_WF_NAME,
|
|
28
|
-
_communicator,
|
|
29
28
|
_execute_workflow_id,
|
|
30
29
|
_get_event,
|
|
31
30
|
_recv,
|
|
32
31
|
_send,
|
|
33
32
|
_set_event,
|
|
34
33
|
_start_workflow,
|
|
34
|
+
_step,
|
|
35
35
|
_transaction,
|
|
36
36
|
_workflow,
|
|
37
37
|
_workflow_wrapper,
|
|
@@ -63,7 +63,7 @@ else:
|
|
|
63
63
|
|
|
64
64
|
from dbos.admin_sever import AdminServer
|
|
65
65
|
from dbos.context import (
|
|
66
|
-
|
|
66
|
+
EnterDBOSStep,
|
|
67
67
|
TracedAttributes,
|
|
68
68
|
assert_current_dbos_context,
|
|
69
69
|
get_local_dbos_context,
|
|
@@ -179,7 +179,7 @@ class DBOS:
|
|
|
179
179
|
Main access class for DBOS functionality.
|
|
180
180
|
|
|
181
181
|
`DBOS` contains functions and properties for:
|
|
182
|
-
1. Decorating classes, workflows,
|
|
182
|
+
1. Decorating classes, workflows, and steps
|
|
183
183
|
2. Starting workflow functions
|
|
184
184
|
3. Retrieving workflow status information
|
|
185
185
|
4. Interacting with workflows via events and messages
|
|
@@ -388,9 +388,8 @@ class DBOS:
|
|
|
388
388
|
"""
|
|
389
389
|
return _transaction(_get_or_create_dbos_registry(), isolation_level)
|
|
390
390
|
|
|
391
|
-
# Mirror the CommunicatorConfig from TS. However, we disable retries by default.
|
|
392
391
|
@classmethod
|
|
393
|
-
def
|
|
392
|
+
def step(
|
|
394
393
|
cls,
|
|
395
394
|
*,
|
|
396
395
|
retries_allowed: bool = False,
|
|
@@ -399,17 +398,17 @@ class DBOS:
|
|
|
399
398
|
backoff_rate: float = 2.0,
|
|
400
399
|
) -> Callable[[F], F]:
|
|
401
400
|
"""
|
|
402
|
-
Decorate and configure a function for use as a DBOS
|
|
401
|
+
Decorate and configure a function for use as a DBOS step.
|
|
403
402
|
|
|
404
403
|
Args:
|
|
405
404
|
retries_allowed(bool): If true, enable retries on thrown exceptions
|
|
406
405
|
interval_seconds(float): Time between retry attempts
|
|
407
406
|
backoff_rate(float): Multiplier for exponentially increasing `interval_seconds` between retries
|
|
408
|
-
max_attempts(int): Maximum number of
|
|
407
|
+
max_attempts(int): Maximum number of retries before raising an exception
|
|
409
408
|
|
|
410
409
|
"""
|
|
411
410
|
|
|
412
|
-
return
|
|
411
|
+
return _step(
|
|
413
412
|
_get_or_create_dbos_registry(),
|
|
414
413
|
retries_allowed=retries_allowed,
|
|
415
414
|
interval_seconds=interval_seconds,
|
|
@@ -542,9 +541,9 @@ class DBOS:
|
|
|
542
541
|
}
|
|
543
542
|
if seconds <= 0:
|
|
544
543
|
return
|
|
545
|
-
with
|
|
544
|
+
with EnterDBOSStep(attributes) as ctx:
|
|
546
545
|
_get_dbos_instance().sys_db.sleep(
|
|
547
|
-
ctx.workflow_id, ctx.
|
|
546
|
+
ctx.workflow_id, ctx.curr_step_function_id, seconds
|
|
548
547
|
)
|
|
549
548
|
|
|
550
549
|
@classmethod
|
|
@@ -615,9 +614,7 @@ class DBOS:
|
|
|
615
614
|
def sql_session(cls) -> Session:
|
|
616
615
|
"""Return the SQLAlchemy `Session` for the current context, which must be within a transaction function."""
|
|
617
616
|
ctx = assert_current_dbos_context()
|
|
618
|
-
assert (
|
|
619
|
-
ctx.is_transaction()
|
|
620
|
-
), "sql_session is only available within a transaction."
|
|
617
|
+
assert ctx.is_transaction(), "db is only available within a transaction."
|
|
621
618
|
rv = ctx.sql_session
|
|
622
619
|
assert rv
|
|
623
620
|
return rv
|
|
@@ -628,7 +625,7 @@ class DBOS:
|
|
|
628
625
|
ctx = assert_current_dbos_context()
|
|
629
626
|
assert (
|
|
630
627
|
ctx.is_within_workflow()
|
|
631
|
-
), "workflow_id is only available within a
|
|
628
|
+
), "workflow_id is only available within a DBOS operation."
|
|
632
629
|
return ctx.workflow_id
|
|
633
630
|
|
|
634
631
|
@classproperty
|
|
@@ -722,9 +719,9 @@ class DBOSConfiguredInstance:
|
|
|
722
719
|
"""
|
|
723
720
|
Base class for classes containing DBOS member functions.
|
|
724
721
|
|
|
725
|
-
When a class contains
|
|
726
|
-
|
|
727
|
-
|
|
722
|
+
When a class contains DBOS functions that access instance state, the DBOS workflow
|
|
723
|
+
executor needs a name for the instance. This name is recorded in the database, and
|
|
724
|
+
used to refer to the proper instance upon recovery.
|
|
728
725
|
|
|
729
726
|
Use `DBOSConfiguredInstance` to specify the instance name and register the instance
|
|
730
727
|
with the DBOS workflow executor.
|
dbos/error.py
CHANGED
|
@@ -32,7 +32,7 @@ class DBOSErrorCode(Enum):
|
|
|
32
32
|
WorkflowFunctionNotFound = 4
|
|
33
33
|
NonExistentWorkflowError = 5
|
|
34
34
|
DuplicateWorkflowEventError = 6
|
|
35
|
-
|
|
35
|
+
MaxStepRetriesExceeded = 7
|
|
36
36
|
NotAuthorized = 8
|
|
37
37
|
|
|
38
38
|
|
|
@@ -106,11 +106,11 @@ class DBOSNotAuthorizedError(DBOSException):
|
|
|
106
106
|
)
|
|
107
107
|
|
|
108
108
|
|
|
109
|
-
class
|
|
110
|
-
"""Exception raised when a
|
|
109
|
+
class DBOSMaxStepRetriesExceeded(DBOSException):
|
|
110
|
+
"""Exception raised when a step was retried the maximimum number of times without success."""
|
|
111
111
|
|
|
112
112
|
def __init__(self) -> None:
|
|
113
113
|
super().__init__(
|
|
114
|
-
"
|
|
115
|
-
dbos_error_code=DBOSErrorCode.
|
|
114
|
+
"Step reached maximum retries.",
|
|
115
|
+
dbos_error_code=DBOSErrorCode.MaxStepRetriesExceeded.value,
|
|
116
116
|
)
|
dbos/registrations.py
CHANGED
|
@@ -16,7 +16,7 @@ def set_dbos_func_name(f: Any, name: str) -> None:
|
|
|
16
16
|
setattr(f, "dbos_function_name", name)
|
|
17
17
|
|
|
18
18
|
|
|
19
|
-
TempWorkflowType = Literal["transaction", "
|
|
19
|
+
TempWorkflowType = Literal["transaction", "step", "send", None]
|
|
20
20
|
|
|
21
21
|
|
|
22
22
|
def get_temp_workflow_type(f: Any) -> TempWorkflowType:
|
dbos/system_database.py
CHANGED
|
@@ -12,6 +12,7 @@ from typing import (
|
|
|
12
12
|
Literal,
|
|
13
13
|
Optional,
|
|
14
14
|
Sequence,
|
|
15
|
+
Set,
|
|
15
16
|
TypedDict,
|
|
16
17
|
cast,
|
|
17
18
|
)
|
|
@@ -21,6 +22,7 @@ import sqlalchemy as sa
|
|
|
21
22
|
import sqlalchemy.dialects.postgresql as pg
|
|
22
23
|
from alembic import command
|
|
23
24
|
from alembic.config import Config
|
|
25
|
+
from sqlalchemy.exc import DBAPIError
|
|
24
26
|
|
|
25
27
|
import dbos.utils as utils
|
|
26
28
|
from dbos.error import (
|
|
@@ -33,9 +35,6 @@ from .dbos_config import ConfigFile
|
|
|
33
35
|
from .logger import dbos_logger
|
|
34
36
|
from .schemas.system_database import SystemSchema
|
|
35
37
|
|
|
36
|
-
if TYPE_CHECKING:
|
|
37
|
-
from dbos.dbos import DBOS
|
|
38
|
-
|
|
39
38
|
|
|
40
39
|
class WorkflowStatusString(Enum):
|
|
41
40
|
"""Enumeration of values allowed for `WorkflowSatusInternal.status`."""
|
|
@@ -212,6 +211,9 @@ class SystemDatabase:
|
|
|
212
211
|
# Initialize the workflow status and inputs buffers
|
|
213
212
|
self._workflow_status_buffer: Dict[str, WorkflowStatusInternal] = {}
|
|
214
213
|
self._workflow_inputs_buffer: Dict[str, str] = {}
|
|
214
|
+
# Two sets for tracking which single-transaction workflows have been exported to the status table
|
|
215
|
+
self._exported_temp_txn_wf_status: Set[str] = set()
|
|
216
|
+
self._temp_txn_wf_ids: Set[str] = set()
|
|
215
217
|
self._is_flushing_status_buffer = False
|
|
216
218
|
|
|
217
219
|
# Now we can run background processes
|
|
@@ -277,6 +279,10 @@ class SystemDatabase:
|
|
|
277
279
|
with self.engine.begin() as c:
|
|
278
280
|
c.execute(cmd)
|
|
279
281
|
|
|
282
|
+
# Record we have exported status for this single-transaction workflow
|
|
283
|
+
if status["workflow_uuid"] in self._temp_txn_wf_ids:
|
|
284
|
+
self._exported_temp_txn_wf_status.add(status["workflow_uuid"])
|
|
285
|
+
|
|
280
286
|
def set_workflow_status(
|
|
281
287
|
self,
|
|
282
288
|
workflow_uuid: str,
|
|
@@ -467,6 +473,11 @@ class SystemDatabase:
|
|
|
467
473
|
with self.engine.begin() as c:
|
|
468
474
|
c.execute(cmd)
|
|
469
475
|
|
|
476
|
+
if workflow_uuid in self._temp_txn_wf_ids:
|
|
477
|
+
# Clean up the single-transaction tracking sets
|
|
478
|
+
self._exported_temp_txn_wf_status.discard(workflow_uuid)
|
|
479
|
+
self._temp_txn_wf_ids.discard(workflow_uuid)
|
|
480
|
+
|
|
470
481
|
def get_workflow_inputs(self, workflow_uuid: str) -> Optional[WorkflowInputs]:
|
|
471
482
|
with self.engine.begin() as c:
|
|
472
483
|
row = c.execute(
|
|
@@ -546,8 +557,10 @@ class SystemDatabase:
|
|
|
546
557
|
else:
|
|
547
558
|
with self.engine.begin() as c:
|
|
548
559
|
c.execute(sql)
|
|
549
|
-
except
|
|
550
|
-
|
|
560
|
+
except DBAPIError as dbapi_error:
|
|
561
|
+
if dbapi_error.orig.pgcode == "23505": # type: ignore
|
|
562
|
+
raise DBOSWorkflowConflictIDError(result["workflow_uuid"])
|
|
563
|
+
raise dbapi_error
|
|
551
564
|
except Exception as e:
|
|
552
565
|
raise e
|
|
553
566
|
|
|
@@ -601,8 +614,11 @@ class SystemDatabase:
|
|
|
601
614
|
message=utils.serialize(message),
|
|
602
615
|
)
|
|
603
616
|
)
|
|
604
|
-
except
|
|
605
|
-
|
|
617
|
+
except DBAPIError as dbapi_error:
|
|
618
|
+
# Foreign key violation
|
|
619
|
+
if dbapi_error.orig.pgcode == "23503": # type: ignore
|
|
620
|
+
raise DBOSNonExistentWorkflowError(destination_uuid)
|
|
621
|
+
raise dbapi_error
|
|
606
622
|
except Exception as e:
|
|
607
623
|
raise e
|
|
608
624
|
output: OperationResultInternal = {
|
|
@@ -825,8 +841,10 @@ class SystemDatabase:
|
|
|
825
841
|
value=utils.serialize(message),
|
|
826
842
|
)
|
|
827
843
|
)
|
|
828
|
-
except
|
|
829
|
-
|
|
844
|
+
except DBAPIError as dbapi_error:
|
|
845
|
+
if dbapi_error.orig.pgcode == "23505": # type: ignore
|
|
846
|
+
raise DBOSDuplicateWorkflowEventError(workflow_uuid, key)
|
|
847
|
+
raise dbapi_error
|
|
830
848
|
except Exception as e:
|
|
831
849
|
raise e
|
|
832
850
|
output: OperationResultInternal = {
|
|
@@ -910,7 +928,7 @@ class SystemDatabase:
|
|
|
910
928
|
return value
|
|
911
929
|
|
|
912
930
|
def _flush_workflow_status_buffer(self) -> None:
|
|
913
|
-
"""Export the workflow status buffer to the database, up to the batch size
|
|
931
|
+
"""Export the workflow status buffer to the database, up to the batch size"""
|
|
914
932
|
if len(self._workflow_status_buffer) == 0:
|
|
915
933
|
return
|
|
916
934
|
|
|
@@ -918,13 +936,16 @@ class SystemDatabase:
|
|
|
918
936
|
exported_status: Dict[str, WorkflowStatusInternal] = {}
|
|
919
937
|
with self.engine.begin() as c:
|
|
920
938
|
exported = 0
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
924
|
-
|
|
939
|
+
status_iter = iter(list(self._workflow_status_buffer))
|
|
940
|
+
wf_id: Optional[str] = None
|
|
941
|
+
while (
|
|
942
|
+
exported < buffer_flush_batch_size
|
|
943
|
+
and (wf_id := next(status_iter, None)) is not None
|
|
944
|
+
):
|
|
925
945
|
# Pop the first key in the buffer (FIFO)
|
|
926
|
-
|
|
927
|
-
status
|
|
946
|
+
status = self._workflow_status_buffer.pop(wf_id, None)
|
|
947
|
+
if status is None:
|
|
948
|
+
continue
|
|
928
949
|
exported_status[wf_id] = status
|
|
929
950
|
try:
|
|
930
951
|
self.update_workflow_status(status, conn=c)
|
|
@@ -945,12 +966,18 @@ class SystemDatabase:
|
|
|
945
966
|
exported_inputs: Dict[str, str] = {}
|
|
946
967
|
with self.engine.begin() as c:
|
|
947
968
|
exported = 0
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
|
|
952
|
-
wf_id
|
|
953
|
-
|
|
969
|
+
input_iter = iter(list(self._workflow_inputs_buffer))
|
|
970
|
+
wf_id: Optional[str] = None
|
|
971
|
+
while (
|
|
972
|
+
exported < buffer_flush_batch_size
|
|
973
|
+
and (wf_id := next(input_iter, None)) is not None
|
|
974
|
+
):
|
|
975
|
+
if wf_id not in self._exported_temp_txn_wf_status:
|
|
976
|
+
# Skip exporting inputs if the status has not been exported yet
|
|
977
|
+
continue
|
|
978
|
+
inputs = self._workflow_inputs_buffer.pop(wf_id, None)
|
|
979
|
+
if inputs is None:
|
|
980
|
+
continue
|
|
954
981
|
exported_inputs[wf_id] = inputs
|
|
955
982
|
try:
|
|
956
983
|
self.update_workflow_inputs(wf_id, inputs, conn=c)
|
|
@@ -985,6 +1012,7 @@ class SystemDatabase:
|
|
|
985
1012
|
def buffer_workflow_inputs(self, workflow_id: str, inputs: str) -> None:
|
|
986
1013
|
# inputs is a serialized WorkflowInputs string
|
|
987
1014
|
self._workflow_inputs_buffer[workflow_id] = inputs
|
|
1015
|
+
self._temp_txn_wf_ids.add(workflow_id)
|
|
988
1016
|
|
|
989
1017
|
@property
|
|
990
1018
|
def _is_buffers_empty(self) -> bool:
|
|
@@ -1,18 +1,18 @@
|
|
|
1
|
-
dbos-0.5.
|
|
2
|
-
dbos-0.5.
|
|
3
|
-
dbos-0.5.
|
|
4
|
-
dbos-0.5.
|
|
1
|
+
dbos-0.5.0a5.dist-info/METADATA,sha256=WRKFIucNawLOnizlrKNBp1cTcYW2BcuIcaYaQjXq1FU,5420
|
|
2
|
+
dbos-0.5.0a5.dist-info/WHEEL,sha256=rSwsxJWe3vzyR5HCwjWXQruDgschpei4h_giTm0dJVE,90
|
|
3
|
+
dbos-0.5.0a5.dist-info/entry_points.txt,sha256=3PmOPbM4FYxEmggRRdJw0oAsiBzKR8U0yx7bmwUmMOM,39
|
|
4
|
+
dbos-0.5.0a5.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
|
5
5
|
dbos/__init__.py,sha256=X1LdP36NomDtvPfFwoMNtgXf81TO05jj7vltsp79UUw,787
|
|
6
6
|
dbos/admin_sever.py,sha256=KtzH6aKyskCm4h3yulpy9jb5PIqRlYI2sjctw5mvaKY,3395
|
|
7
|
-
dbos/application_database.py,sha256=
|
|
7
|
+
dbos/application_database.py,sha256=1K3kE96BgGi_QWOd2heXluyNTwFAwlUVuAR6JKKUqf0,5659
|
|
8
8
|
dbos/cli.py,sha256=QnbGtZ8S963q3iyFvXNBcL4DB35r4SFMarlb5DRqN6M,7915
|
|
9
|
-
dbos/context.py,sha256=
|
|
10
|
-
dbos/core.py,sha256=
|
|
9
|
+
dbos/context.py,sha256=JZMV2RtSpTK7lnyyWxeBmGPwrZSB00XZEP6R6MT9ygQ,15690
|
|
10
|
+
dbos/core.py,sha256=HfKnPpIaQqIBAHzP2hD67aSIchTHp87NgD21CcujKkE,28300
|
|
11
11
|
dbos/dbos-config.schema.json,sha256=azpfmoDZg7WfSy3kvIsk9iEiKB_-VZt03VEOoXJAkqE,5331
|
|
12
|
-
dbos/dbos.py,sha256=
|
|
12
|
+
dbos/dbos.py,sha256=HngS2BUWSbWPmloXGr-KE81BQ8dpZtlvOXM4tx4_Qhg,26246
|
|
13
13
|
dbos/dbos_config.py,sha256=EkO0c0xaIM7_vAAqqnvNNEAKG5fOJbmmalqnZvaKYZA,5312
|
|
14
14
|
dbos/decorators.py,sha256=lbPefsLK6Cya4cb7TrOcLglOpGT3pc6qjZdsQKlfZLg,629
|
|
15
|
-
dbos/error.py,sha256=
|
|
15
|
+
dbos/error.py,sha256=nBdLC4hxGO_K9V26YbDGOo7xi1CKuN4PsE_cBv7K8Cc,3798
|
|
16
16
|
dbos/fastapi.py,sha256=ZFcMizyv3pizo5zf0sSF6U4GoR3rQH8LxGipkQIGHfU,2282
|
|
17
17
|
dbos/logger.py,sha256=cfybbu6F1zsgYLEPW8D8V6h033u-YedLXnGMnQQM6-4,3341
|
|
18
18
|
dbos/migrations/env.py,sha256=38SIGVbmn_VV2x2u1aHLcPOoWgZ84eCymf3g_NljmbU,1626
|
|
@@ -21,14 +21,14 @@ dbos/migrations/versions/5c361fc04708_added_system_tables.py,sha256=QMgFMb0aLgC2
|
|
|
21
21
|
dbos/migrations/versions/a3b18ad34abe_added_triggers.py,sha256=Rv0ZsZYZ_WdgGEULYsPfnp4YzaO5L198gDTgYY39AVA,2022
|
|
22
22
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
|
23
23
|
dbos/recovery.py,sha256=xfwQFWNuD8DXg5HD5_-3tG7Neo9j-x1lrqiwtn5FSh8,2015
|
|
24
|
-
dbos/registrations.py,sha256=
|
|
24
|
+
dbos/registrations.py,sha256=gMI-u05tv5bpvyddQGtoUgCsqARx51aOY7p0JXPafQo,6539
|
|
25
25
|
dbos/roles.py,sha256=9u0z4CWmXPeqIKzQWEzaOKIlzOuaagBtMiB-swqjX_U,2291
|
|
26
26
|
dbos/scheduler/croniter.py,sha256=hbhgfsHBqclUS8VeLnJ9PSE9Z54z6mi4nnrr1aUXn0k,47561
|
|
27
27
|
dbos/scheduler/scheduler.py,sha256=uO4_9jmWW2rLv1ODL3lc1cE_37ZaVTgnvmFx_FAlN50,1472
|
|
28
28
|
dbos/schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
29
29
|
dbos/schemas/application_database.py,sha256=q_Wr2XbiZNBYFkOtu7uKavo1T_cSOBblxKGHThYGGsY,962
|
|
30
30
|
dbos/schemas/system_database.py,sha256=5V3vqnEzry0Hn7ZbVS9Gs_dJKia8uX8p7mGC82Ru8rk,4303
|
|
31
|
-
dbos/system_database.py,sha256=
|
|
31
|
+
dbos/system_database.py,sha256=SK24Avj10rbbWFilVUexdPX6VvOL8zC-CoWDhNQj6QM,39698
|
|
32
32
|
dbos/templates/hello/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
|
|
33
33
|
dbos/templates/hello/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
34
34
|
dbos/templates/hello/__package/main.py,sha256=hJgp3S14cseT7zWIZsPwjqdzwTCw1aLo8kPKsTvYz0Y,2976
|
|
@@ -42,4 +42,4 @@ dbos/templates/hello/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKs
|
|
|
42
42
|
dbos/tracer.py,sha256=RPW9oxmX9tSc0Yq7O-FAhpQWBg1QT7Ni1Q06uwhtNDk,2237
|
|
43
43
|
dbos/utils.py,sha256=hWj9iWDrby2cVEhb0pG-IdnrxLqP64NhkaWUXiLc8bA,402
|
|
44
44
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
|
45
|
-
dbos-0.5.
|
|
45
|
+
dbos-0.5.0a5.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|