dbos 0.24.1__py3-none-any.whl → 0.25.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dbos/_dbos.py CHANGED
@@ -11,7 +11,6 @@ import threading
11
11
  import traceback
12
12
  import uuid
13
13
  from concurrent.futures import ThreadPoolExecutor
14
- from dataclasses import dataclass
15
14
  from logging import Logger
16
15
  from typing import (
17
16
  TYPE_CHECKING,
@@ -28,17 +27,23 @@ from typing import (
28
27
  TypeVar,
29
28
  Union,
30
29
  cast,
31
- overload,
32
30
  )
33
31
 
34
32
  from opentelemetry.trace import Span
35
33
 
34
+ from dbos import _serialization
36
35
  from dbos._conductor.conductor import ConductorWebsocket
37
36
  from dbos._utils import GlobalParams
37
+ from dbos._workflow_commands import (
38
+ WorkflowStatus,
39
+ list_queued_workflows,
40
+ list_workflows,
41
+ )
38
42
 
39
43
  from ._classproperty import classproperty
40
44
  from ._core import (
41
45
  TEMP_SEND_WF_NAME,
46
+ WorkflowHandleAsyncPolling,
42
47
  WorkflowHandlePolling,
43
48
  decorate_step,
44
49
  decorate_transaction,
@@ -85,6 +90,7 @@ from ._admin_server import AdminServer
85
90
  from ._app_db import ApplicationDatabase
86
91
  from ._context import (
87
92
  EnterDBOSStep,
93
+ StepStatus,
88
94
  TracedAttributes,
89
95
  assert_current_dbos_context,
90
96
  get_local_dbos_context,
@@ -107,6 +113,7 @@ from ._error import (
107
113
  )
108
114
  from ._logger import add_otlp_to_all_loggers, config_logger, dbos_logger, init_logger
109
115
  from ._sys_db import SystemDatabase
116
+ from ._workflow_commands import WorkflowStatus, get_workflow
110
117
 
111
118
  # Most DBOS functions are just any callable F, so decorators / wrappers work on F
112
119
  # There are cases where the parameters P and return value R should be separate
@@ -119,17 +126,6 @@ R = TypeVar("R", covariant=True) # A generic type for workflow return values
119
126
 
120
127
  T = TypeVar("T")
121
128
 
122
-
123
- class DBOSCallProtocol(Protocol[P, R]):
124
- __name__: str
125
- __qualname__: str
126
-
127
- def __call__(*args: P.args, **kwargs: P.kwargs) -> R: ...
128
-
129
-
130
- Workflow: TypeAlias = DBOSCallProtocol[P, R]
131
-
132
-
133
129
  IsolationLevel = Literal[
134
130
  "SERIALIZABLE",
135
131
  "REPEATABLE READ",
@@ -162,7 +158,7 @@ RegisteredJob = Tuple[
162
158
 
163
159
  class DBOSRegistry:
164
160
  def __init__(self) -> None:
165
- self.workflow_info_map: dict[str, Workflow[..., Any]] = {}
161
+ self.workflow_info_map: dict[str, Callable[..., Any]] = {}
166
162
  self.function_type_map: dict[str, str] = {}
167
163
  self.class_info_map: dict[str, type] = {}
168
164
  self.instance_info_map: dict[str, object] = {}
@@ -449,8 +445,12 @@ class DBOS:
449
445
  dbos_logger.info(f"Executor ID: {GlobalParams.executor_id}")
450
446
  dbos_logger.info(f"Application version: {GlobalParams.app_version}")
451
447
  self._executor_field = ThreadPoolExecutor(max_workers=64)
452
- self._sys_db_field = SystemDatabase(self.config, debug_mode=debug_mode)
453
- self._app_db_field = ApplicationDatabase(self.config, debug_mode=debug_mode)
448
+ self._sys_db_field = SystemDatabase(
449
+ self.config["database"], debug_mode=debug_mode
450
+ )
451
+ self._app_db_field = ApplicationDatabase(
452
+ self.config["database"], debug_mode=debug_mode
453
+ )
454
454
 
455
455
  if debug_mode:
456
456
  return
@@ -489,14 +489,6 @@ class DBOS:
489
489
  notification_listener_thread.start()
490
490
  self._background_threads.append(notification_listener_thread)
491
491
 
492
- # Start flush workflow buffers thread
493
- flush_workflow_buffers_thread = threading.Thread(
494
- target=self._sys_db.flush_workflow_buffers,
495
- daemon=True,
496
- )
497
- flush_workflow_buffers_thread.start()
498
- self._background_threads.append(flush_workflow_buffers_thread)
499
-
500
492
  # Start the queue thread
501
493
  evt = threading.Event()
502
494
  self.stop_events.append(evt)
@@ -552,6 +544,10 @@ class DBOS:
552
544
  """
553
545
  if _dbos_global_instance is not None:
554
546
  _dbos_global_instance._reset_system_database()
547
+ else:
548
+ dbos_logger.warning(
549
+ "reset_system_database has no effect because global DBOS object does not exist"
550
+ )
555
551
 
556
552
  def _reset_system_database(self) -> None:
557
553
  assert (
@@ -706,7 +702,7 @@ class DBOS:
706
702
  @classmethod
707
703
  def start_workflow(
708
704
  cls,
709
- func: Workflow[P, R],
705
+ func: Callable[P, R],
710
706
  *args: P.args,
711
707
  **kwargs: P.kwargs,
712
708
  ) -> WorkflowHandle[R]:
@@ -716,7 +712,7 @@ class DBOS:
716
712
  @classmethod
717
713
  async def start_workflow_async(
718
714
  cls,
719
- func: Workflow[P, Coroutine[Any, Any, R]],
715
+ func: Callable[P, Coroutine[Any, Any, R]],
720
716
  *args: P.args,
721
717
  **kwargs: P.kwargs,
722
718
  ) -> WorkflowHandleAsync[R]:
@@ -728,34 +724,39 @@ class DBOS:
728
724
  @classmethod
729
725
  def get_workflow_status(cls, workflow_id: str) -> Optional[WorkflowStatus]:
730
726
  """Return the status of a workflow execution."""
727
+ sys_db = _get_dbos_instance()._sys_db
731
728
  ctx = get_local_dbos_context()
732
729
  if ctx and ctx.is_within_workflow():
733
730
  ctx.function_id += 1
734
- stat = _get_dbos_instance()._sys_db.get_workflow_status_within_wf(
735
- workflow_id, ctx.workflow_id, ctx.function_id
731
+ res = sys_db.check_operation_execution(ctx.workflow_id, ctx.function_id)
732
+ if res is not None:
733
+ if res["output"]:
734
+ resstat: WorkflowStatus = _serialization.deserialize(res["output"])
735
+ return resstat
736
+ else:
737
+ raise DBOSException(
738
+ "Workflow status record not found. This should not happen! \033[1m Hint: Check if your workflow is deterministic.\033[0m"
739
+ )
740
+ stat = get_workflow(_get_dbos_instance()._sys_db, workflow_id, True)
741
+
742
+ if ctx and ctx.is_within_workflow():
743
+ sys_db.record_operation_result(
744
+ {
745
+ "workflow_uuid": ctx.workflow_id,
746
+ "function_id": ctx.function_id,
747
+ "function_name": "DBOS.getStatus",
748
+ "output": _serialization.serialize(stat),
749
+ "error": None,
750
+ }
736
751
  )
737
- else:
738
- stat = _get_dbos_instance()._sys_db.get_workflow_status(workflow_id)
739
- if stat is None:
740
- return None
741
-
742
- return WorkflowStatus(
743
- workflow_id=workflow_id,
744
- status=stat["status"],
745
- name=stat["name"],
746
- executor_id=stat["executor_id"],
747
- recovery_attempts=stat["recovery_attempts"],
748
- class_name=stat["class_name"],
749
- config_name=stat["config_name"],
750
- queue_name=stat["queue_name"],
751
- authenticated_user=stat["authenticated_user"],
752
- assumed_role=stat["assumed_role"],
753
- authenticated_roles=(
754
- json.loads(stat["authenticated_roles"])
755
- if stat["authenticated_roles"] is not None
756
- else None
757
- ),
758
- )
752
+ return stat
753
+
754
+ @classmethod
755
+ async def get_workflow_status_async(
756
+ cls, workflow_id: str
757
+ ) -> Optional[WorkflowStatus]:
758
+ """Return the status of a workflow execution."""
759
+ return await asyncio.to_thread(cls.get_workflow_status, workflow_id)
759
760
 
760
761
  @classmethod
761
762
  def retrieve_workflow(
@@ -769,6 +770,18 @@ class DBOS:
769
770
  raise DBOSNonExistentWorkflowError(workflow_id)
770
771
  return WorkflowHandlePolling(workflow_id, dbos)
771
772
 
773
+ @classmethod
774
+ async def retrieve_workflow_async(
775
+ cls, workflow_id: str, existing_workflow: bool = True
776
+ ) -> WorkflowHandleAsync[R]:
777
+ """Return a `WorkflowHandle` for a workflow execution."""
778
+ dbos = _get_dbos_instance()
779
+ if existing_workflow:
780
+ stat = await dbos.get_workflow_status_async(workflow_id)
781
+ if stat is None:
782
+ raise DBOSNonExistentWorkflowError(workflow_id)
783
+ return WorkflowHandleAsyncPolling(workflow_id, dbos)
784
+
772
785
  @classmethod
773
786
  def send(
774
787
  cls, destination_id: str, message: Any, topic: Optional[str] = None
@@ -943,6 +956,60 @@ class DBOS:
943
956
  _get_or_create_dbos_registry().clear_workflow_cancelled(workflow_id)
944
957
  return execute_workflow_by_id(_get_dbos_instance(), workflow_id, False)
945
958
 
959
+ @classmethod
960
+ def list_workflows(
961
+ cls,
962
+ *,
963
+ workflow_ids: Optional[List[str]] = None,
964
+ status: Optional[str] = None,
965
+ start_time: Optional[str] = None,
966
+ end_time: Optional[str] = None,
967
+ name: Optional[str] = None,
968
+ app_version: Optional[str] = None,
969
+ user: Optional[str] = None,
970
+ limit: Optional[int] = None,
971
+ offset: Optional[int] = None,
972
+ sort_desc: bool = False,
973
+ ) -> List[WorkflowStatus]:
974
+ return list_workflows(
975
+ _get_dbos_instance()._sys_db,
976
+ workflow_ids=workflow_ids,
977
+ status=status,
978
+ start_time=start_time,
979
+ end_time=end_time,
980
+ name=name,
981
+ app_version=app_version,
982
+ user=user,
983
+ limit=limit,
984
+ offset=offset,
985
+ sort_desc=sort_desc,
986
+ )
987
+
988
+ @classmethod
989
+ def list_queued_workflows(
990
+ cls,
991
+ *,
992
+ queue_name: Optional[str] = None,
993
+ status: Optional[str] = None,
994
+ start_time: Optional[str] = None,
995
+ end_time: Optional[str] = None,
996
+ name: Optional[str] = None,
997
+ limit: Optional[int] = None,
998
+ offset: Optional[int] = None,
999
+ sort_desc: bool = False,
1000
+ ) -> List[WorkflowStatus]:
1001
+ return list_queued_workflows(
1002
+ _get_dbos_instance()._sys_db,
1003
+ queue_name=queue_name,
1004
+ status=status,
1005
+ start_time=start_time,
1006
+ end_time=end_time,
1007
+ name=name,
1008
+ limit=limit,
1009
+ offset=offset,
1010
+ sort_desc=sort_desc,
1011
+ )
1012
+
946
1013
  @classproperty
947
1014
  def logger(cls) -> Logger:
948
1015
  """Return the DBOS `Logger` for the current context."""
@@ -981,6 +1048,23 @@ class DBOS:
981
1048
  ), "workflow_id is only available within a DBOS operation."
982
1049
  return ctx.workflow_id
983
1050
 
1051
+ @classproperty
1052
+ def step_id(cls) -> int:
1053
+ """Return the step ID for the current context. This is a unique identifier of the current step within the workflow."""
1054
+ ctx = assert_current_dbos_context()
1055
+ assert (
1056
+ ctx.is_within_workflow()
1057
+ ), "step_id is only available within a DBOS workflow."
1058
+ return ctx.function_id
1059
+
1060
+ @classproperty
1061
+ def step_status(cls) -> StepStatus:
1062
+ """Return the status of the currently executing step."""
1063
+ ctx = assert_current_dbos_context()
1064
+ assert ctx.is_step(), "step_status is only available within a DBOS step."
1065
+ assert ctx.step_status is not None
1066
+ return ctx.step_status
1067
+
984
1068
  @classproperty
985
1069
  def parent_workflow_id(cls) -> str:
986
1070
  """
@@ -1035,41 +1119,6 @@ class DBOS:
1035
1119
  ctx.authenticated_roles = authenticated_roles
1036
1120
 
1037
1121
 
1038
- @dataclass
1039
- class WorkflowStatus:
1040
- """
1041
- Status of workflow execution.
1042
-
1043
- This captures the state of a workflow execution at a point in time.
1044
-
1045
- Attributes:
1046
- workflow_id(str): The ID of the workflow execution
1047
- status(str): The status of the execution, from `WorkflowStatusString`
1048
- name(str): The workflow function name
1049
- executor_id(str): The ID of the executor running the workflow
1050
- class_name(str): For member functions, the name of the class containing the workflow function
1051
- config_name(str): For instance member functions, the name of the class instance for the execution
1052
- queue_name(str): For workflows that are or were queued, the queue name
1053
- authenticated_user(str): The user who invoked the workflow
1054
- assumed_role(str): The access role used by the user to allow access to the workflow function
1055
- authenticated_roles(List[str]): List of all access roles available to the authenticated user
1056
- recovery_attempts(int): Number of times the workflow has been restarted (usually by recovery)
1057
-
1058
- """
1059
-
1060
- workflow_id: str
1061
- status: str
1062
- name: str
1063
- executor_id: Optional[str]
1064
- class_name: Optional[str]
1065
- config_name: Optional[str]
1066
- queue_name: Optional[str]
1067
- authenticated_user: Optional[str]
1068
- assumed_role: Optional[str]
1069
- authenticated_roles: Optional[List[str]]
1070
- recovery_attempts: Optional[int]
1071
-
1072
-
1073
1122
  class WorkflowHandle(Generic[R], Protocol):
1074
1123
  """
1075
1124
  Handle to a workflow function.
dbos/_fastapi.py CHANGED
@@ -63,7 +63,10 @@ class LifespanMiddleware:
63
63
  if scope["type"] == "lifespan":
64
64
 
65
65
  async def wrapped_send(message: MutableMapping[str, Any]) -> None:
66
- if message["type"] == "lifespan.startup.complete":
66
+ if (
67
+ message["type"] == "lifespan.startup.complete"
68
+ and not self.dbos._launched
69
+ ):
67
70
  self.dbos._launch()
68
71
  elif message["type"] == "lifespan.shutdown.complete":
69
72
  self.dbos._destroy()
dbos/_logger.py CHANGED
@@ -7,6 +7,7 @@ from opentelemetry.exporter.otlp.proto.http._log_exporter import OTLPLogExporter
7
7
  from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
8
8
  from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
9
9
  from opentelemetry.sdk.resources import Resource
10
+ from opentelemetry.trace.span import format_trace_id
10
11
 
11
12
  from dbos._utils import GlobalParams
12
13
 
@@ -26,6 +27,19 @@ class DBOSLogTransformer(logging.Filter):
26
27
  record.applicationID = self.app_id
27
28
  record.applicationVersion = GlobalParams.app_version
28
29
  record.executorID = GlobalParams.executor_id
30
+
31
+ # If available, decorate the log entry with Workflow ID and Trace ID
32
+ from dbos._context import get_local_dbos_context
33
+
34
+ ctx = get_local_dbos_context()
35
+ if ctx:
36
+ if ctx.is_within_workflow():
37
+ record.operationUUID = ctx.workflow_id
38
+ span = ctx.get_current_span()
39
+ if span:
40
+ trace_id = format_trace_id(span.get_span_context().trace_id)
41
+ record.traceId = trace_id
42
+
29
43
  return True
30
44
 
31
45
 
@@ -0,0 +1,46 @@
1
+ """functionname_childid_op_outputs
2
+
3
+ Revision ID: f4b9b32ba814
4
+ Revises: 04ca4f231047
5
+ Create Date: 2025-03-21 14:32:43.091074
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ import sqlalchemy as sa
12
+ from alembic import op
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = "f4b9b32ba814"
16
+ down_revision: Union[str, None] = "04ca4f231047"
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ op.add_column(
23
+ "operation_outputs",
24
+ sa.Column(
25
+ "function_name",
26
+ sa.Text(),
27
+ nullable=False,
28
+ server_default="",
29
+ ),
30
+ schema="dbos",
31
+ )
32
+
33
+ op.add_column(
34
+ "operation_outputs",
35
+ sa.Column(
36
+ "child_workflow_id",
37
+ sa.Text(),
38
+ nullable=True,
39
+ ),
40
+ schema="dbos",
41
+ )
42
+
43
+
44
+ def downgrade() -> None:
45
+ op.drop_column("operation_outputs", "function_name", schema="dbos")
46
+ op.drop_column("operation_outputs", "child_workflow_id", schema="dbos")
dbos/_outcome.py CHANGED
@@ -4,6 +4,8 @@ import inspect
4
4
  import time
5
5
  from typing import Any, Callable, Coroutine, Optional, Protocol, TypeVar, Union, cast
6
6
 
7
+ from dbos._context import EnterDBOSStepRetry
8
+
7
9
  T = TypeVar("T")
8
10
  R = TypeVar("R")
9
11
 
@@ -98,7 +100,8 @@ class Immediate(Outcome[T]):
98
100
  ) -> T:
99
101
  for i in range(attempts):
100
102
  try:
101
- return func()
103
+ with EnterDBOSStepRetry(i, attempts):
104
+ return func()
102
105
  except Exception as exp:
103
106
  wait_time = on_exception(i, exp)
104
107
  time.sleep(wait_time)
@@ -184,7 +187,8 @@ class Pending(Outcome[T]):
184
187
  ) -> T:
185
188
  for i in range(attempts):
186
189
  try:
187
- return await func()
190
+ with EnterDBOSStepRetry(i, attempts):
191
+ return await func()
188
192
  except Exception as exp:
189
193
  wait_time = on_exception(i, exp)
190
194
  await asyncio.sleep(wait_time)
dbos/_queue.py CHANGED
@@ -1,6 +1,6 @@
1
1
  import threading
2
2
  import traceback
3
- from typing import TYPE_CHECKING, Any, Coroutine, Optional, TypedDict
3
+ from typing import TYPE_CHECKING, Any, Callable, Coroutine, Optional, TypedDict
4
4
 
5
5
  from psycopg import errors
6
6
  from sqlalchemy.exc import OperationalError
@@ -10,7 +10,7 @@ from dbos._utils import GlobalParams
10
10
  from ._core import P, R, execute_workflow_by_id, start_workflow, start_workflow_async
11
11
 
12
12
  if TYPE_CHECKING:
13
- from ._dbos import DBOS, Workflow, WorkflowHandle, WorkflowHandleAsync
13
+ from ._dbos import DBOS, WorkflowHandle, WorkflowHandleAsync
14
14
 
15
15
 
16
16
  class QueueRateLimit(TypedDict):
@@ -59,7 +59,7 @@ class Queue:
59
59
  registry.queue_info_map[self.name] = self
60
60
 
61
61
  def enqueue(
62
- self, func: "Workflow[P, R]", *args: P.args, **kwargs: P.kwargs
62
+ self, func: "Callable[P, R]", *args: P.args, **kwargs: P.kwargs
63
63
  ) -> "WorkflowHandle[R]":
64
64
  from ._dbos import _get_dbos_instance
65
65
 
@@ -68,7 +68,7 @@ class Queue:
68
68
 
69
69
  async def enqueue_async(
70
70
  self,
71
- func: "Workflow[P, Coroutine[Any, Any, R]]",
71
+ func: "Callable[P, Coroutine[Any, Any, R]]",
72
72
  *args: P.args,
73
73
  **kwargs: P.kwargs,
74
74
  ) -> "WorkflowHandleAsync[R]":
@@ -85,7 +85,7 @@ def queue_thread(stop_event: threading.Event, dbos: "DBOS") -> None:
85
85
  for _, queue in dbos._registry.queue_info_map.items():
86
86
  try:
87
87
  wf_ids = dbos._sys_db.start_queued_workflows(
88
- queue, GlobalParams.executor_id
88
+ queue, GlobalParams.executor_id, GlobalParams.app_version
89
89
  )
90
90
  for id in wf_ids:
91
91
  execute_workflow_by_id(dbos, id)
@@ -71,8 +71,10 @@ class SystemSchema:
71
71
  nullable=False,
72
72
  ),
73
73
  Column("function_id", Integer, nullable=False),
74
+ Column("function_name", Text, nullable=False, default=""),
74
75
  Column("output", Text, nullable=True),
75
76
  Column("error", Text, nullable=True),
77
+ Column("child_workflow_id", Text, nullable=True),
76
78
  PrimaryKeyConstraint("workflow_uuid", "function_id"),
77
79
  )
78
80