dbos 0.26.0a7__py3-none-any.whl → 0.26.0a8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dbos/_app_db.py CHANGED
@@ -8,7 +8,7 @@ from sqlalchemy.orm import Session, sessionmaker
8
8
 
9
9
  from . import _serialization
10
10
  from ._dbos_config import ConfigFile, DatabaseConfig
11
- from ._error import DBOSWorkflowConflictIDError
11
+ from ._error import DBOSUnexpectedStepError, DBOSWorkflowConflictIDError
12
12
  from ._schemas.application_database import ApplicationSchema
13
13
  from ._sys_db import StepInfo
14
14
 
@@ -171,22 +171,31 @@ class ApplicationDatabase:
171
171
 
172
172
  @staticmethod
173
173
  def check_transaction_execution(
174
- session: Session, workflow_uuid: str, function_id: int
174
+ session: Session, workflow_id: str, function_id: int, function_name: str
175
175
  ) -> Optional[RecordedResult]:
176
176
  rows = session.execute(
177
177
  sa.select(
178
178
  ApplicationSchema.transaction_outputs.c.output,
179
179
  ApplicationSchema.transaction_outputs.c.error,
180
+ ApplicationSchema.transaction_outputs.c.function_name,
180
181
  ).where(
181
- ApplicationSchema.transaction_outputs.c.workflow_uuid == workflow_uuid,
182
+ ApplicationSchema.transaction_outputs.c.workflow_uuid == workflow_id,
182
183
  ApplicationSchema.transaction_outputs.c.function_id == function_id,
183
184
  )
184
185
  ).all()
185
186
  if len(rows) == 0:
186
187
  return None
188
+ output, error, recorded_function_name = rows[0][0], rows[0][1], rows[0][2]
189
+ if function_name != recorded_function_name:
190
+ raise DBOSUnexpectedStepError(
191
+ workflow_id=workflow_id,
192
+ step_id=function_id,
193
+ expected_name=function_name,
194
+ recorded_name=recorded_function_name,
195
+ )
187
196
  result: RecordedResult = {
188
- "output": rows[0][0],
189
- "error": rows[0][1],
197
+ "output": output,
198
+ "error": error,
190
199
  }
191
200
  return result
192
201
 
@@ -67,7 +67,7 @@ class ConductorWebsocket(threading.Thread):
67
67
  recovery_message = p.RecoveryRequest.from_json(message)
68
68
  success = True
69
69
  try:
70
- self.dbos.recover_pending_workflows(
70
+ self.dbos._recover_pending_workflows(
71
71
  recovery_message.executor_ids
72
72
  )
73
73
  except Exception as e:
dbos/_core.py CHANGED
@@ -52,6 +52,7 @@ from ._error import (
52
52
  DBOSMaxStepRetriesExceeded,
53
53
  DBOSNonExistentWorkflowError,
54
54
  DBOSRecoveryError,
55
+ DBOSUnexpectedStepError,
55
56
  DBOSWorkflowCancelledError,
56
57
  DBOSWorkflowConflictIDError,
57
58
  DBOSWorkflowFunctionNotFoundError,
@@ -783,7 +784,7 @@ def decorate_transaction(
783
784
  ) -> Callable[[F], F]:
784
785
  def decorator(func: F) -> F:
785
786
 
786
- transactionName = func.__qualname__
787
+ transaction_name = func.__qualname__
787
788
 
788
789
  def invoke_tx(*args: Any, **kwargs: Any) -> Any:
789
790
  if dbosreg.dbos is None:
@@ -791,13 +792,14 @@ def decorate_transaction(
791
792
  f"Function {func.__name__} invoked before DBOS initialized"
792
793
  )
793
794
 
795
+ dbos = dbosreg.dbos
794
796
  ctx = assert_current_dbos_context()
795
- if dbosreg.is_workflow_cancelled(ctx.workflow_id):
797
+ status = dbos._sys_db.get_workflow_status(ctx.workflow_id)
798
+ if status and status["status"] == WorkflowStatusString.CANCELLED.value:
796
799
  raise DBOSWorkflowCancelledError(
797
800
  f"Workflow {ctx.workflow_id} is cancelled. Aborting transaction {func.__name__}."
798
801
  )
799
802
 
800
- dbos = dbosreg.dbos
801
803
  with dbos._app_db.sessionmaker() as session:
802
804
  attributes: TracedAttributes = {
803
805
  "name": func.__name__,
@@ -813,18 +815,12 @@ def decorate_transaction(
813
815
  "txn_snapshot": "", # TODO: add actual snapshot
814
816
  "executor_id": None,
815
817
  "txn_id": None,
816
- "function_name": transactionName,
818
+ "function_name": transaction_name,
817
819
  }
818
820
  retry_wait_seconds = 0.001
819
821
  backoff_factor = 1.5
820
822
  max_retry_wait_seconds = 2.0
821
823
  while True:
822
-
823
- if dbosreg.is_workflow_cancelled(ctx.workflow_id):
824
- raise DBOSWorkflowCancelledError(
825
- f"Workflow {ctx.workflow_id} is cancelled. Aborting transaction {func.__name__}."
826
- )
827
-
828
824
  has_recorded_error = False
829
825
  txn_error: Optional[Exception] = None
830
826
  try:
@@ -841,6 +837,7 @@ def decorate_transaction(
841
837
  session,
842
838
  ctx.workflow_id,
843
839
  ctx.function_id,
840
+ transaction_name,
844
841
  )
845
842
  )
846
843
  if dbos.debug_mode and recorded_output is None:
@@ -904,6 +901,8 @@ def decorate_transaction(
904
901
  )
905
902
  txn_error = invalid_request_error
906
903
  raise
904
+ except DBOSUnexpectedStepError:
905
+ raise
907
906
  except Exception as error:
908
907
  txn_error = error
909
908
  raise
@@ -969,7 +968,7 @@ def decorate_step(
969
968
  ) -> Callable[[Callable[P, R]], Callable[P, R]]:
970
969
  def decorator(func: Callable[P, R]) -> Callable[P, R]:
971
970
 
972
- stepName = func.__qualname__
971
+ step_name = func.__qualname__
973
972
 
974
973
  def invoke_step(*args: Any, **kwargs: Any) -> Any:
975
974
  if dbosreg.dbos is None:
@@ -983,13 +982,6 @@ def decorate_step(
983
982
  "operationType": OperationType.STEP.value,
984
983
  }
985
984
 
986
- # Check if the workflow is cancelled
987
- ctx = assert_current_dbos_context()
988
- if dbosreg.is_workflow_cancelled(ctx.workflow_id):
989
- raise DBOSWorkflowCancelledError(
990
- f"Workflow {ctx.workflow_id} is cancelled. Aborting step {func.__name__}."
991
- )
992
-
993
985
  attempts = max_attempts if retries_allowed else 1
994
986
  max_retry_interval_seconds: float = 3600 # 1 Hour
995
987
 
@@ -1017,7 +1009,7 @@ def decorate_step(
1017
1009
  step_output: OperationResultInternal = {
1018
1010
  "workflow_uuid": ctx.workflow_id,
1019
1011
  "function_id": ctx.function_id,
1020
- "function_name": stepName,
1012
+ "function_name": step_name,
1021
1013
  "output": None,
1022
1014
  "error": None,
1023
1015
  }
@@ -1035,7 +1027,7 @@ def decorate_step(
1035
1027
  def check_existing_result() -> Union[NoResult, R]:
1036
1028
  ctx = assert_current_dbos_context()
1037
1029
  recorded_output = dbos._sys_db.check_operation_execution(
1038
- ctx.workflow_id, ctx.function_id
1030
+ ctx.workflow_id, ctx.function_id, step_name
1039
1031
  )
1040
1032
  if dbos.debug_mode and recorded_output is None:
1041
1033
  raise DBOSException("Step output not found in debug mode")
dbos/_dbos.py CHANGED
@@ -166,7 +166,6 @@ class DBOSRegistry:
166
166
  self.pollers: list[RegisteredJob] = []
167
167
  self.dbos: Optional[DBOS] = None
168
168
  self.config: Optional[ConfigFile] = None
169
- self.workflow_cancelled_map: dict[str, bool] = {}
170
169
 
171
170
  def register_wf_function(self, name: str, wrapped_func: F, functype: str) -> None:
172
171
  if name in self.function_type_map:
@@ -215,15 +214,6 @@ class DBOSRegistry:
215
214
  else:
216
215
  self.instance_info_map[fn] = inst
217
216
 
218
- def cancel_workflow(self, workflow_id: str) -> None:
219
- self.workflow_cancelled_map[workflow_id] = True
220
-
221
- def is_workflow_cancelled(self, workflow_id: str) -> bool:
222
- return self.workflow_cancelled_map.get(workflow_id, False)
223
-
224
- def clear_workflow_cancelled(self, workflow_id: str) -> None:
225
- self.workflow_cancelled_map.pop(workflow_id, None)
226
-
227
217
  def compute_app_version(self) -> str:
228
218
  """
229
219
  An application's version is computed from a hash of the source of its workflows.
@@ -740,32 +730,11 @@ class DBOS:
740
730
  @classmethod
741
731
  def get_workflow_status(cls, workflow_id: str) -> Optional[WorkflowStatus]:
742
732
  """Return the status of a workflow execution."""
743
- sys_db = _get_dbos_instance()._sys_db
744
- ctx = get_local_dbos_context()
745
- if ctx and ctx.is_within_workflow():
746
- ctx.function_id += 1
747
- res = sys_db.check_operation_execution(ctx.workflow_id, ctx.function_id)
748
- if res is not None:
749
- if res["output"]:
750
- resstat: WorkflowStatus = _serialization.deserialize(res["output"])
751
- return resstat
752
- else:
753
- raise DBOSException(
754
- "Workflow status record not found. This should not happen! \033[1m Hint: Check if your workflow is deterministic.\033[0m"
755
- )
756
- stat = get_workflow(_get_dbos_instance()._sys_db, workflow_id, True)
757
-
758
- if ctx and ctx.is_within_workflow():
759
- sys_db.record_operation_result(
760
- {
761
- "workflow_uuid": ctx.workflow_id,
762
- "function_id": ctx.function_id,
763
- "function_name": "DBOS.getStatus",
764
- "output": _serialization.serialize(stat),
765
- "error": None,
766
- }
767
- )
768
- return stat
733
+
734
+ def fn() -> Optional[WorkflowStatus]:
735
+ return get_workflow(_get_dbos_instance()._sys_db, workflow_id, True)
736
+
737
+ return _get_dbos_instance()._sys_db.call_function_as_step(fn, "DBOS.getStatus")
769
738
 
770
739
  @classmethod
771
740
  async def get_workflow_status_async(
@@ -941,12 +910,12 @@ class DBOS:
941
910
  )
942
911
 
943
912
  @classmethod
944
- def execute_workflow_id(cls, workflow_id: str) -> WorkflowHandle[Any]:
913
+ def _execute_workflow_id(cls, workflow_id: str) -> WorkflowHandle[Any]:
945
914
  """Execute a workflow by ID (for recovery)."""
946
915
  return execute_workflow_by_id(_get_dbos_instance(), workflow_id)
947
916
 
948
917
  @classmethod
949
- def recover_pending_workflows(
918
+ def _recover_pending_workflows(
950
919
  cls, executor_ids: List[str] = ["local"]
951
920
  ) -> List[WorkflowHandle[Any]]:
952
921
  """Find all PENDING workflows and execute them."""
@@ -955,22 +924,37 @@ class DBOS:
955
924
  @classmethod
956
925
  def cancel_workflow(cls, workflow_id: str) -> None:
957
926
  """Cancel a workflow by ID."""
958
- dbos_logger.info(f"Cancelling workflow: {workflow_id}")
959
- _get_dbos_instance()._sys_db.cancel_workflow(workflow_id)
960
- _get_or_create_dbos_registry().cancel_workflow(workflow_id)
927
+
928
+ def fn() -> None:
929
+ dbos_logger.info(f"Cancelling workflow: {workflow_id}")
930
+ _get_dbos_instance()._sys_db.cancel_workflow(workflow_id)
931
+
932
+ return _get_dbos_instance()._sys_db.call_function_as_step(
933
+ fn, "DBOS.cancelWorkflow"
934
+ )
961
935
 
962
936
  @classmethod
963
937
  def resume_workflow(cls, workflow_id: str) -> WorkflowHandle[Any]:
964
938
  """Resume a workflow by ID."""
965
- dbos_logger.info(f"Resuming workflow: {workflow_id}")
966
- _get_dbos_instance()._sys_db.resume_workflow(workflow_id)
967
- _get_or_create_dbos_registry().clear_workflow_cancelled(workflow_id)
939
+
940
+ def fn() -> None:
941
+ dbos_logger.info(f"Resuming workflow: {workflow_id}")
942
+ _get_dbos_instance()._sys_db.resume_workflow(workflow_id)
943
+
944
+ _get_dbos_instance()._sys_db.call_function_as_step(fn, "DBOS.resumeWorkflow")
968
945
  return cls.retrieve_workflow(workflow_id)
969
946
 
970
947
  @classmethod
971
948
  def restart_workflow(cls, workflow_id: str) -> WorkflowHandle[Any]:
972
949
  """Restart a workflow with a new workflow ID"""
973
- forked_workflow_id = _get_dbos_instance()._sys_db.fork_workflow(workflow_id)
950
+
951
+ def fn() -> str:
952
+ dbos_logger.info(f"Restarting workflow: {workflow_id}")
953
+ return _get_dbos_instance()._sys_db.fork_workflow(workflow_id)
954
+
955
+ forked_workflow_id = _get_dbos_instance()._sys_db.call_function_as_step(
956
+ fn, "DBOS.restartWorkflow"
957
+ )
974
958
  return cls.retrieve_workflow(forked_workflow_id)
975
959
 
976
960
  @classmethod
@@ -988,18 +972,23 @@ class DBOS:
988
972
  offset: Optional[int] = None,
989
973
  sort_desc: bool = False,
990
974
  ) -> List[WorkflowStatus]:
991
- return list_workflows(
992
- _get_dbos_instance()._sys_db,
993
- workflow_ids=workflow_ids,
994
- status=status,
995
- start_time=start_time,
996
- end_time=end_time,
997
- name=name,
998
- app_version=app_version,
999
- user=user,
1000
- limit=limit,
1001
- offset=offset,
1002
- sort_desc=sort_desc,
975
+ def fn() -> List[WorkflowStatus]:
976
+ return list_workflows(
977
+ _get_dbos_instance()._sys_db,
978
+ workflow_ids=workflow_ids,
979
+ status=status,
980
+ start_time=start_time,
981
+ end_time=end_time,
982
+ name=name,
983
+ app_version=app_version,
984
+ user=user,
985
+ limit=limit,
986
+ offset=offset,
987
+ sort_desc=sort_desc,
988
+ )
989
+
990
+ return _get_dbos_instance()._sys_db.call_function_as_step(
991
+ fn, "DBOS.listWorkflows"
1003
992
  )
1004
993
 
1005
994
  @classmethod
@@ -1015,16 +1004,21 @@ class DBOS:
1015
1004
  offset: Optional[int] = None,
1016
1005
  sort_desc: bool = False,
1017
1006
  ) -> List[WorkflowStatus]:
1018
- return list_queued_workflows(
1019
- _get_dbos_instance()._sys_db,
1020
- queue_name=queue_name,
1021
- status=status,
1022
- start_time=start_time,
1023
- end_time=end_time,
1024
- name=name,
1025
- limit=limit,
1026
- offset=offset,
1027
- sort_desc=sort_desc,
1007
+ def fn() -> List[WorkflowStatus]:
1008
+ return list_queued_workflows(
1009
+ _get_dbos_instance()._sys_db,
1010
+ queue_name=queue_name,
1011
+ status=status,
1012
+ start_time=start_time,
1013
+ end_time=end_time,
1014
+ name=name,
1015
+ limit=limit,
1016
+ offset=offset,
1017
+ sort_desc=sort_desc,
1018
+ )
1019
+
1020
+ return _get_dbos_instance()._sys_db.call_function_as_step(
1021
+ fn, "DBOS.listQueuedWorkflows"
1028
1022
  )
1029
1023
 
1030
1024
  @classproperty
dbos/_debug.py CHANGED
@@ -28,7 +28,7 @@ def debug_workflow(workflow_id: str, entrypoint: Union[str, PythonModule]) -> No
28
28
 
29
29
  DBOS.logger.info(f"Debugging workflow {workflow_id}...")
30
30
  DBOS.launch(debug_mode=True)
31
- handle = DBOS.execute_workflow_id(workflow_id)
31
+ handle = DBOS._execute_workflow_id(workflow_id)
32
32
  handle.get_result()
33
33
  DBOS.logger.info("Workflow Debugging complete. Exiting process.")
34
34
 
dbos/_error.py CHANGED
@@ -37,6 +37,7 @@ class DBOSErrorCode(Enum):
37
37
  NotAuthorized = 8
38
38
  ConflictingWorkflowError = 9
39
39
  WorkflowCancelled = 10
40
+ UnexpectedStep = 11
40
41
  ConflictingRegistrationError = 25
41
42
 
42
43
 
@@ -155,3 +156,15 @@ class DBOSConflictingRegistrationError(DBOSException):
155
156
  f"Operation (Name: {name}) is already registered with a conflicting function type",
156
157
  dbos_error_code=DBOSErrorCode.ConflictingRegistrationError.value,
157
158
  )
159
+
160
+
161
+ class DBOSUnexpectedStepError(DBOSException):
162
+ """Exception raised when a step has an unexpected recorded name."""
163
+
164
+ def __init__(
165
+ self, workflow_id: str, step_id: int, expected_name: str, recorded_name: str
166
+ ) -> None:
167
+ super().__init__(
168
+ f"During execution of workflow {workflow_id} step {step_id}, function {recorded_name} was recorded when {expected_name} was expected. Check that your workflow is deterministic.",
169
+ dbos_error_code=DBOSErrorCode.UnexpectedStep.value,
170
+ )
dbos/_sys_db.py CHANGED
@@ -9,13 +9,14 @@ from enum import Enum
9
9
  from typing import (
10
10
  TYPE_CHECKING,
11
11
  Any,
12
+ Callable,
12
13
  Dict,
13
14
  List,
14
15
  Literal,
15
16
  Optional,
16
17
  Sequence,
17
- Set,
18
18
  TypedDict,
19
+ TypeVar,
19
20
  )
20
21
 
21
22
  import psycopg
@@ -35,6 +36,8 @@ from ._error import (
35
36
  DBOSConflictingWorkflowError,
36
37
  DBOSDeadLetterQueueError,
37
38
  DBOSNonExistentWorkflowError,
39
+ DBOSUnexpectedStepError,
40
+ DBOSWorkflowCancelledError,
38
41
  DBOSWorkflowConflictIDError,
39
42
  )
40
43
  from ._logger import dbos_logger
@@ -577,9 +580,7 @@ class SystemDatabase:
577
580
  }
578
581
  return status
579
582
 
580
- def await_workflow_result_internal(self, workflow_uuid: str) -> dict[str, Any]:
581
- polling_interval_secs: float = 1.000
582
-
583
+ def await_workflow_result(self, workflow_id: str) -> Any:
583
584
  while True:
584
585
  with self.engine.begin() as c:
585
586
  row = c.execute(
@@ -587,41 +588,23 @@ class SystemDatabase:
587
588
  SystemSchema.workflow_status.c.status,
588
589
  SystemSchema.workflow_status.c.output,
589
590
  SystemSchema.workflow_status.c.error,
590
- ).where(
591
- SystemSchema.workflow_status.c.workflow_uuid == workflow_uuid
592
- )
591
+ ).where(SystemSchema.workflow_status.c.workflow_uuid == workflow_id)
593
592
  ).fetchone()
594
593
  if row is not None:
595
594
  status = row[0]
596
- if status == str(WorkflowStatusString.SUCCESS.value):
597
- return {
598
- "status": status,
599
- "output": row[1],
600
- "workflow_uuid": workflow_uuid,
601
- }
602
-
603
- elif status == str(WorkflowStatusString.ERROR.value):
604
- return {
605
- "status": status,
606
- "error": row[2],
607
- "workflow_uuid": workflow_uuid,
608
- }
609
-
595
+ if status == WorkflowStatusString.SUCCESS.value:
596
+ output = row[1]
597
+ return _serialization.deserialize(output)
598
+ elif status == WorkflowStatusString.ERROR.value:
599
+ error = row[2]
600
+ raise _serialization.deserialize_exception(error)
601
+ elif status == WorkflowStatusString.CANCELLED.value:
602
+ # Raise a normal exception here, not the cancellation exception
603
+ # because the awaiting workflow is not being cancelled.
604
+ raise Exception(f"Awaited workflow {workflow_id} was cancelled")
610
605
  else:
611
606
  pass # CB: I guess we're assuming the WF will show up eventually.
612
-
613
- time.sleep(polling_interval_secs)
614
-
615
- def await_workflow_result(self, workflow_uuid: str) -> Any:
616
- stat = self.await_workflow_result_internal(workflow_uuid)
617
- if not stat:
618
- return None
619
- status: str = stat["status"]
620
- if status == str(WorkflowStatusString.SUCCESS.value):
621
- return _serialization.deserialize(stat["output"])
622
- elif status == str(WorkflowStatusString.ERROR.value):
623
- raise _serialization.deserialize_exception(stat["error"])
624
- return None
607
+ time.sleep(1)
625
608
 
626
609
  def update_workflow_inputs(
627
610
  self, workflow_uuid: str, inputs: str, conn: Optional[sa.Connection] = None
@@ -895,16 +878,34 @@ class SystemDatabase:
895
878
  raise
896
879
 
897
880
  def check_operation_execution(
898
- self, workflow_uuid: str, function_id: int, conn: Optional[sa.Connection] = None
881
+ self,
882
+ workflow_id: str,
883
+ function_id: int,
884
+ function_name: str,
885
+ *,
886
+ conn: Optional[sa.Connection] = None,
899
887
  ) -> Optional[RecordedResult]:
900
- sql = sa.select(
901
- SystemSchema.operation_outputs.c.output,
902
- SystemSchema.operation_outputs.c.error,
903
- ).where(
904
- SystemSchema.operation_outputs.c.workflow_uuid == workflow_uuid,
905
- SystemSchema.operation_outputs.c.function_id == function_id,
888
+ # Retrieve the status of the workflow. Additionally, if this step
889
+ # has run before, retrieve its name, output, and error.
890
+ sql = (
891
+ sa.select(
892
+ SystemSchema.workflow_status.c.status,
893
+ SystemSchema.operation_outputs.c.output,
894
+ SystemSchema.operation_outputs.c.error,
895
+ SystemSchema.operation_outputs.c.function_name,
896
+ )
897
+ .select_from(
898
+ SystemSchema.workflow_status.outerjoin(
899
+ SystemSchema.operation_outputs,
900
+ (
901
+ SystemSchema.workflow_status.c.workflow_uuid
902
+ == SystemSchema.operation_outputs.c.workflow_uuid
903
+ )
904
+ & (SystemSchema.operation_outputs.c.function_id == function_id),
905
+ )
906
+ )
907
+ .where(SystemSchema.workflow_status.c.workflow_uuid == workflow_id)
906
908
  )
907
-
908
909
  # If in a transaction, use the provided connection
909
910
  rows: Sequence[Any]
910
911
  if conn is not None:
@@ -912,11 +913,32 @@ class SystemDatabase:
912
913
  else:
913
914
  with self.engine.begin() as c:
914
915
  rows = c.execute(sql).all()
915
- if len(rows) == 0:
916
+ assert len(rows) > 0, f"Error: Workflow {workflow_id} does not exist"
917
+ workflow_status, output, error, recorded_function_name = (
918
+ rows[0][0],
919
+ rows[0][1],
920
+ rows[0][2],
921
+ rows[0][3],
922
+ )
923
+ # If the workflow is cancelled, raise the exception
924
+ if workflow_status == WorkflowStatusString.CANCELLED.value:
925
+ raise DBOSWorkflowCancelledError(
926
+ f"Workflow {workflow_id} is cancelled. Aborting function."
927
+ )
928
+ # If there is no row for the function, return None
929
+ if recorded_function_name is None:
916
930
  return None
931
+ # If the provided and recorded function name are different, throw an exception.
932
+ if function_name != recorded_function_name:
933
+ raise DBOSUnexpectedStepError(
934
+ workflow_id=workflow_id,
935
+ step_id=function_id,
936
+ expected_name=function_name,
937
+ recorded_name=recorded_function_name,
938
+ )
917
939
  result: RecordedResult = {
918
- "output": rows[0][0],
919
- "error": rows[0][1],
940
+ "output": output,
941
+ "error": error,
920
942
  }
921
943
  return result
922
944
 
@@ -945,10 +967,11 @@ class SystemDatabase:
945
967
  message: Any,
946
968
  topic: Optional[str] = None,
947
969
  ) -> None:
970
+ function_name = "DBOS.send"
948
971
  topic = topic if topic is not None else _dbos_null_topic
949
972
  with self.engine.begin() as c:
950
973
  recorded_output = self.check_operation_execution(
951
- workflow_uuid, function_id, conn=c
974
+ workflow_uuid, function_id, function_name, conn=c
952
975
  )
953
976
  if self._debug_mode and recorded_output is None:
954
977
  raise Exception(
@@ -981,7 +1004,7 @@ class SystemDatabase:
981
1004
  output: OperationResultInternal = {
982
1005
  "workflow_uuid": workflow_uuid,
983
1006
  "function_id": function_id,
984
- "function_name": "DBOS.send",
1007
+ "function_name": function_name,
985
1008
  "output": None,
986
1009
  "error": None,
987
1010
  }
@@ -995,10 +1018,13 @@ class SystemDatabase:
995
1018
  topic: Optional[str],
996
1019
  timeout_seconds: float = 60,
997
1020
  ) -> Any:
1021
+ function_name = "DBOS.recv"
998
1022
  topic = topic if topic is not None else _dbos_null_topic
999
1023
 
1000
1024
  # First, check for previous executions.
1001
- recorded_output = self.check_operation_execution(workflow_uuid, function_id)
1025
+ recorded_output = self.check_operation_execution(
1026
+ workflow_uuid, function_id, function_name
1027
+ )
1002
1028
  if self._debug_mode and recorded_output is None:
1003
1029
  raise Exception("called recv in debug mode without a previous execution")
1004
1030
  if recorded_output is not None:
@@ -1075,7 +1101,7 @@ class SystemDatabase:
1075
1101
  {
1076
1102
  "workflow_uuid": workflow_uuid,
1077
1103
  "function_id": function_id,
1078
- "function_name": "DBOS.recv",
1104
+ "function_name": function_name,
1079
1105
  "output": _serialization.serialize(
1080
1106
  message
1081
1107
  ), # None will be serialized to 'null'
@@ -1149,7 +1175,10 @@ class SystemDatabase:
1149
1175
  seconds: float,
1150
1176
  skip_sleep: bool = False,
1151
1177
  ) -> float:
1152
- recorded_output = self.check_operation_execution(workflow_uuid, function_id)
1178
+ function_name = "DBOS.sleep"
1179
+ recorded_output = self.check_operation_execution(
1180
+ workflow_uuid, function_id, function_name
1181
+ )
1153
1182
  end_time: float
1154
1183
  if self._debug_mode and recorded_output is None:
1155
1184
  raise Exception("called sleep in debug mode without a previous execution")
@@ -1166,7 +1195,7 @@ class SystemDatabase:
1166
1195
  {
1167
1196
  "workflow_uuid": workflow_uuid,
1168
1197
  "function_id": function_id,
1169
- "function_name": "DBOS.sleep",
1198
+ "function_name": function_name,
1170
1199
  "output": _serialization.serialize(end_time),
1171
1200
  "error": None,
1172
1201
  }
@@ -1185,9 +1214,10 @@ class SystemDatabase:
1185
1214
  key: str,
1186
1215
  message: Any,
1187
1216
  ) -> None:
1217
+ function_name = "DBOS.setEvent"
1188
1218
  with self.engine.begin() as c:
1189
1219
  recorded_output = self.check_operation_execution(
1190
- workflow_uuid, function_id, conn=c
1220
+ workflow_uuid, function_id, function_name, conn=c
1191
1221
  )
1192
1222
  if self._debug_mode and recorded_output is None:
1193
1223
  raise Exception(
@@ -1214,7 +1244,7 @@ class SystemDatabase:
1214
1244
  output: OperationResultInternal = {
1215
1245
  "workflow_uuid": workflow_uuid,
1216
1246
  "function_id": function_id,
1217
- "function_name": "DBOS.setEvent",
1247
+ "function_name": function_name,
1218
1248
  "output": None,
1219
1249
  "error": None,
1220
1250
  }
@@ -1227,6 +1257,7 @@ class SystemDatabase:
1227
1257
  timeout_seconds: float = 60,
1228
1258
  caller_ctx: Optional[GetEventWorkflowContext] = None,
1229
1259
  ) -> Any:
1260
+ function_name = "DBOS.getEvent"
1230
1261
  get_sql = sa.select(
1231
1262
  SystemSchema.workflow_events.c.value,
1232
1263
  ).where(
@@ -1236,7 +1267,7 @@ class SystemDatabase:
1236
1267
  # Check for previous executions only if it's in a workflow
1237
1268
  if caller_ctx is not None:
1238
1269
  recorded_output = self.check_operation_execution(
1239
- caller_ctx["workflow_uuid"], caller_ctx["function_id"]
1270
+ caller_ctx["workflow_uuid"], caller_ctx["function_id"], function_name
1240
1271
  )
1241
1272
  if self._debug_mode and recorded_output is None:
1242
1273
  raise Exception(
@@ -1295,7 +1326,7 @@ class SystemDatabase:
1295
1326
  {
1296
1327
  "workflow_uuid": caller_ctx["workflow_uuid"],
1297
1328
  "function_id": caller_ctx["function_id"],
1298
- "function_name": "DBOS.getEvent",
1329
+ "function_name": function_name,
1299
1330
  "output": _serialization.serialize(
1300
1331
  value
1301
1332
  ), # None will be serialized to 'null'
@@ -1534,6 +1565,40 @@ class SystemDatabase:
1534
1565
  )
1535
1566
  return True
1536
1567
 
1568
+ T = TypeVar("T")
1569
+
1570
+ def call_function_as_step(self, fn: Callable[[], T], function_name: str) -> T:
1571
+ ctx = get_local_dbos_context()
1572
+ if ctx and ctx.is_within_workflow():
1573
+ ctx.function_id += 1
1574
+ res = self.check_operation_execution(
1575
+ ctx.workflow_id, ctx.function_id, function_name
1576
+ )
1577
+ if res is not None:
1578
+ if res["output"] is not None:
1579
+ resstat: SystemDatabase.T = _serialization.deserialize(
1580
+ res["output"]
1581
+ )
1582
+ return resstat
1583
+ elif res["error"] is not None:
1584
+ raise _serialization.deserialize_exception(res["error"])
1585
+ else:
1586
+ raise Exception(
1587
+ f"Recorded output and error are both None for {function_name}"
1588
+ )
1589
+ result = fn()
1590
+ if ctx and ctx.is_within_workflow():
1591
+ self.record_operation_result(
1592
+ {
1593
+ "workflow_uuid": ctx.workflow_id,
1594
+ "function_id": ctx.function_id,
1595
+ "function_name": function_name,
1596
+ "output": _serialization.serialize(result),
1597
+ "error": None,
1598
+ }
1599
+ )
1600
+ return result
1601
+
1537
1602
 
1538
1603
  def reset_system_database(config: ConfigFile) -> None:
1539
1604
  sysdb_name = (
@@ -45,6 +45,9 @@ class WorkflowStatus:
45
45
  executor_id: Optional[str]
46
46
  # The application version on which this workflow was started
47
47
  app_version: Optional[str]
48
+
49
+ # INTERNAL FIELDS
50
+
48
51
  # The ID of the application executing this workflow
49
52
  app_id: Optional[str]
50
53
  # The number of times this workflow's execution has been attempted
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 0.26.0a7
3
+ Version: 0.26.0a8
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,26 +1,26 @@
1
- dbos-0.26.0a7.dist-info/METADATA,sha256=PjY70KmfnbwPahIoEhiSsBEG6VLDOb972f59w0UWS3Q,5553
2
- dbos-0.26.0a7.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
- dbos-0.26.0a7.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
- dbos-0.26.0a7.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
1
+ dbos-0.26.0a8.dist-info/METADATA,sha256=Ll7QmuHMijtBbtmGEJuVaYbcmNMKKSbZTh1aWcQUU9I,5553
2
+ dbos-0.26.0a8.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
+ dbos-0.26.0a8.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
+ dbos-0.26.0a8.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
5
  dbos/__init__.py,sha256=3NQfGlBiiUSM_v88STdVP3rNZvGkUL_9WbSotKb8Voo,873
6
6
  dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
7
7
  dbos/_admin_server.py,sha256=vxPG_YJ6lYrkfPCSp42FiATVLBOij7Fm52Yngg5Z_tE,7027
8
- dbos/_app_db.py,sha256=XdjZgKJMezSVZQJkvxBNa9x4asLURl2O-QxdmLai7wA,8491
8
+ dbos/_app_db.py,sha256=IwnNlHEQYp2bl5BM66vVPFa40h8DOtvRgUWTJ1dz20A,8963
9
9
  dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
10
10
  dbos/_client.py,sha256=fzW_Gagh-oyWyDYtREcQDBesoVl_LsEoMeJAsn5-C5s,7262
11
11
  dbos/_cloudutils/authentication.py,sha256=V0fCWQN9stCkhbuuxgPTGpvuQcDqfU3KAxPAh01vKW4,5007
12
12
  dbos/_cloudutils/cloudutils.py,sha256=YC7jGsIopT0KveLsqbRpQk2KlRBk-nIRC_UCgep4f3o,7797
13
13
  dbos/_cloudutils/databases.py,sha256=_shqaqSvhY4n2ScgQ8IP5PDZvzvcx3YBKV8fj-cxhSY,8543
14
- dbos/_conductor/conductor.py,sha256=PzUFCX_JXGHClTF-hqTLR0ssO4kXdet4ZwHhJtuevEM,16839
14
+ dbos/_conductor/conductor.py,sha256=HYzVL29IMMrs2Mnms_7cHJynCnmmEN5SDQOMjzn3UoU,16840
15
15
  dbos/_conductor/protocol.py,sha256=xN7pmooyF1pqbH1b6WhllU5718P7zSb_b0KCwA6bzcs,6716
16
16
  dbos/_context.py,sha256=I8sLkdKTTkZEz7wG-MjynaQB6XEF2bLXuwNksiauP7w,19430
17
- dbos/_core.py,sha256=EA9X4lTTTlimN8oa_mFICtl6Ke2biCvPdHl6PABjgGI,45749
17
+ dbos/_core.py,sha256=tjBGVbSgOn59lR29gcYi5f6fcKNKQM5EP1QXrQGUkXA,45426
18
18
  dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
19
19
  dbos/_db_wizard.py,sha256=VnMa6OL87Lc-XPDD1RnXp8NjsJE8YgiQLj3wtWAXp-8,8252
20
- dbos/_dbos.py,sha256=je4LWR78S5uitxNzyf3k_x5KErEmXyu88TjqZnHtQXU,46183
20
+ dbos/_dbos.py,sha256=TOLi95Aca50huyOAWl9H5fii4nMYaGwN-zQ8GlLWdOg,45569
21
21
  dbos/_dbos_config.py,sha256=rTn30Hgh-RzTxqHbnYh2pC3Ioo30eJV9K4YxhJd-Gj4,22718
22
- dbos/_debug.py,sha256=mmgvLkqlrljMBBow9wk01PPur9kUf2rI_11dTJXY4gw,1822
23
- dbos/_error.py,sha256=B6Y9XLS1f6yrawxB2uAEYFMxFwk9BHhdxPNddKco-Fw,5399
22
+ dbos/_debug.py,sha256=MNlQVZ6TscGCRQeEEL0VE8Uignvr6dPeDDDefS3xgIE,1823
23
+ dbos/_error.py,sha256=HtdV6Qy7qRyGD57wxLwE7YT0WdYtlx5ZLEe_Kv_gC-U,5953
24
24
  dbos/_fastapi.py,sha256=PhaKftbApHnjtYEOw0EYna_3K0cmz__J9of7mRJWzu4,3704
25
25
  dbos/_flask.py,sha256=DZKUZR5-xOzPI7tYZ53r2PvvHVoAb8SYwLzMVFsVfjI,2608
26
26
  dbos/_kafka.py,sha256=pz0xZ9F3X9Ky1k-VSbeF3tfPhP3UPr3lUUhUfE41__U,4198
@@ -47,7 +47,7 @@ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
47
  dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
48
48
  dbos/_schemas/system_database.py,sha256=W9eSpL7SZzQkxcEZ4W07BOcwkkDr35b9oCjUOgfHWek,5336
49
49
  dbos/_serialization.py,sha256=YCYv0qKAwAZ1djZisBC7khvKqG-5OcIv9t9EC5PFIog,1743
50
- dbos/_sys_db.py,sha256=VBYVyKqZrwlFbDJ5cFIkeS5WtDOKpkI3lWJbSd5rB2s,65362
50
+ dbos/_sys_db.py,sha256=BqXZ0l4X4Y4cFKDyaa8ZirWCnRlof9A12yp-XflGnb0,68229
51
51
  dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
52
52
  dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
53
53
  dbos/_templates/dbos-db-starter/__package/main.py,sha256=nJMN3ZD2lmwg4Dcgmiwqc-tQGuCJuJal2Xl85iA277U,2453
@@ -60,11 +60,11 @@ dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sh
60
60
  dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
61
61
  dbos/_tracer.py,sha256=dFDSFlta-rfA3-ahIRLYwnnoAOmlavdxAGllqwFgnCA,2440
62
62
  dbos/_utils.py,sha256=nFRUHzVjXG5AusF85AlYHikj63Tzi-kQm992ihsrAxA,201
63
- dbos/_workflow_commands.py,sha256=w981c3rVvhbhYd6BBP268C0Q88ClmwBwnachBxfnRmU,6129
63
+ dbos/_workflow_commands.py,sha256=Tf7_hZQoPgP90KHQjMNlBggCNrLLCNRJxHtAJLvarc4,6153
64
64
  dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
65
65
  dbos/cli/_template_init.py,sha256=-WW3kbq0W_Tq4WbMqb1UGJG3xvJb3woEY5VspG95Srk,2857
66
66
  dbos/cli/cli.py,sha256=FnI5ZAo-kAic-ij5wBqNJ2EJiYoBK1Ot-tTMh1WcXEM,16132
67
67
  dbos/dbos-config.schema.json,sha256=4z2OXPfp7H0uNT1m5dKxjg31qbAfPyKkFXwHufuUMec,5910
68
68
  dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
69
69
  version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
70
- dbos-0.26.0a7.dist-info/RECORD,,
70
+ dbos-0.26.0a8.dist-info/RECORD,,