dbos 0.25.0a16__tar.gz → 0.26.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (111) hide show
  1. {dbos-0.25.0a16 → dbos-0.26.0}/PKG-INFO +1 -1
  2. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/__init__.py +1 -2
  3. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_admin_server.py +56 -6
  4. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_app_db.py +135 -8
  5. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_client.py +175 -15
  6. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_conductor/conductor.py +2 -1
  7. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_conductor/protocol.py +1 -2
  8. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_context.py +66 -2
  9. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_core.py +130 -76
  10. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_dbos.py +155 -107
  11. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_dbos_config.py +53 -67
  12. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_debug.py +1 -1
  13. dbos-0.26.0/dbos/_docker_pg_helper.py +191 -0
  14. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_error.py +61 -15
  15. dbos-0.26.0/dbos/_event_loop.py +67 -0
  16. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_kafka.py +1 -1
  17. dbos-0.26.0/dbos/_migrations/versions/83f3732ae8e7_workflow_timeout.py +44 -0
  18. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_queue.py +2 -1
  19. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_recovery.py +1 -1
  20. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_registrations.py +20 -5
  21. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_scheduler.py +1 -1
  22. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_schemas/application_database.py +1 -0
  23. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_schemas/system_database.py +3 -1
  24. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_sys_db.py +533 -130
  25. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_utils.py +2 -0
  26. dbos-0.26.0/dbos/_workflow_commands.py +120 -0
  27. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/cli/cli.py +70 -4
  28. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/dbos-config.schema.json +26 -21
  29. {dbos-0.25.0a16 → dbos-0.26.0}/pyproject.toml +2 -1
  30. {dbos-0.25.0a16 → dbos-0.26.0}/tests/client_collateral.py +21 -0
  31. {dbos-0.25.0a16 → dbos-0.26.0}/tests/conftest.py +8 -0
  32. dbos-0.26.0/tests/dupname_classdefs1.py +9 -0
  33. dbos-0.26.0/tests/dupname_classdefsa.py +9 -0
  34. {dbos-0.25.0a16 → dbos-0.26.0}/tests/queuedworkflow.py +1 -1
  35. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_admin_server.py +25 -54
  36. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_async.py +128 -5
  37. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_classdecorators.py +45 -24
  38. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_client.py +70 -3
  39. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_config.py +6 -117
  40. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_dbos.py +204 -23
  41. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_debug.py +2 -2
  42. dbos-0.26.0/tests/test_docker_secrets.py +521 -0
  43. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_failures.py +108 -46
  44. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_fastapi.py +3 -1
  45. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_fastapi_roles.py +3 -3
  46. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_flask.py +3 -1
  47. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_queue.py +87 -6
  48. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_scheduler.py +7 -6
  49. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_singleton.py +1 -1
  50. dbos-0.25.0a16/tests/test_workflow_cmds.py → dbos-0.26.0/tests/test_workflow_introspection.py +177 -37
  51. dbos-0.26.0/tests/test_workflow_management.py +591 -0
  52. dbos-0.25.0a16/dbos/_cloudutils/authentication.py +0 -163
  53. dbos-0.25.0a16/dbos/_cloudutils/cloudutils.py +0 -254
  54. dbos-0.25.0a16/dbos/_cloudutils/databases.py +0 -241
  55. dbos-0.25.0a16/dbos/_db_wizard.py +0 -220
  56. dbos-0.25.0a16/dbos/_workflow_commands.py +0 -175
  57. dbos-0.25.0a16/tests/test_dbwizard.py +0 -84
  58. dbos-0.25.0a16/tests/test_workflow_cancel.py +0 -145
  59. {dbos-0.25.0a16 → dbos-0.26.0}/LICENSE +0 -0
  60. {dbos-0.25.0a16 → dbos-0.26.0}/README.md +0 -0
  61. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/__main__.py +0 -0
  62. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_classproperty.py +0 -0
  63. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_croniter.py +0 -0
  64. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_fastapi.py +0 -0
  65. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_flask.py +0 -0
  66. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_kafka_message.py +0 -0
  67. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_logger.py +0 -0
  68. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_migrations/env.py +0 -0
  69. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_migrations/script.py.mako +0 -0
  70. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
  71. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
  72. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
  73. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
  74. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
  75. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
  76. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
  77. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
  78. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_outcome.py +0 -0
  79. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_request.py +0 -0
  80. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_roles.py +0 -0
  81. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_schemas/__init__.py +0 -0
  82. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_serialization.py +0 -0
  83. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_templates/dbos-db-starter/README.md +0 -0
  84. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
  85. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_templates/dbos-db-starter/__package/main.py +0 -0
  86. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
  87. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
  88. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
  89. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
  90. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
  91. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
  92. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
  93. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/_tracer.py +0 -0
  94. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/cli/_github_init.py +0 -0
  95. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/cli/_template_init.py +0 -0
  96. {dbos-0.25.0a16 → dbos-0.26.0}/dbos/py.typed +0 -0
  97. {dbos-0.25.0a16 → dbos-0.26.0}/tests/__init__.py +0 -0
  98. {dbos-0.25.0a16 → dbos-0.26.0}/tests/atexit_no_ctor.py +0 -0
  99. {dbos-0.25.0a16 → dbos-0.26.0}/tests/atexit_no_launch.py +0 -0
  100. {dbos-0.25.0a16 → dbos-0.26.0}/tests/classdefs.py +0 -0
  101. {dbos-0.25.0a16 → dbos-0.26.0}/tests/client_worker.py +0 -0
  102. {dbos-0.25.0a16 → dbos-0.26.0}/tests/more_classdefs.py +0 -0
  103. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_concurrency.py +0 -0
  104. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_croniter.py +0 -0
  105. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_kafka.py +0 -0
  106. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_outcome.py +0 -0
  107. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_package.py +0 -0
  108. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_schema_migration.py +0 -0
  109. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_spans.py +0 -0
  110. {dbos-0.25.0a16 → dbos-0.26.0}/tests/test_sqlalchemy.py +0 -0
  111. {dbos-0.25.0a16 → dbos-0.26.0}/version/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 0.25.0a16
3
+ Version: 0.26.0
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -5,8 +5,7 @@ from ._dbos import DBOS, DBOSConfiguredInstance, WorkflowHandle
5
5
  from ._dbos_config import ConfigFile, DBOSConfig, get_dbos_database_url, load_config
6
6
  from ._kafka_message import KafkaMessage
7
7
  from ._queue import Queue
8
- from ._sys_db import GetWorkflowsInput, WorkflowStatusString
9
- from ._workflow_commands import WorkflowStatus
8
+ from ._sys_db import GetWorkflowsInput, WorkflowStatus, WorkflowStatusString
10
9
 
11
10
  __all__ = [
12
11
  "ConfigFile",
@@ -7,8 +7,10 @@ from functools import partial
7
7
  from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
8
8
  from typing import TYPE_CHECKING, Any, List, TypedDict
9
9
 
10
+ from ._error import DBOSException
10
11
  from ._logger import dbos_logger
11
12
  from ._recovery import recover_pending_workflows
13
+ from ._utils import GlobalParams
12
14
 
13
15
  if TYPE_CHECKING:
14
16
  from ._dbos import DBOS
@@ -21,6 +23,7 @@ _workflow_queues_metadata_path = "/dbos-workflow-queues-metadata"
21
23
  # /workflows/:workflow_id/resume
22
24
  # /workflows/:workflow_id/restart
23
25
  # /workflows/:workflow_id/steps
26
+ # /workflows/:workflow_id/fork
24
27
 
25
28
 
26
29
  class AdminServer:
@@ -44,6 +47,7 @@ class AdminServer:
44
47
  class AdminRequestHandler(BaseHTTPRequestHandler):
45
48
  def __init__(self, dbos: DBOS, *args: Any, **kwargs: Any) -> None:
46
49
  self.dbos = dbos
50
+ self.is_deactivated = False
47
51
  super().__init__(*args, **kwargs)
48
52
 
49
53
  def _end_headers(self) -> None:
@@ -59,10 +63,14 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
59
63
  self._end_headers()
60
64
  self.wfile.write("healthy".encode("utf-8"))
61
65
  elif self.path == _deactivate_path:
66
+ if not self.is_deactivated:
67
+ dbos_logger.info(
68
+ f"Deactivating DBOS executor {GlobalParams.executor_id} with version {GlobalParams.app_version}. This executor will complete existing workflows but will not start new workflows."
69
+ )
70
+ self.is_deactivated = True
62
71
  # Stop all scheduled workflows, queues, and kafka loops
63
72
  for event in self.dbos.stop_events:
64
73
  event.set()
65
-
66
74
  self.send_response(200)
67
75
  self._end_headers()
68
76
  self.wfile.write("deactivated".encode("utf-8"))
@@ -117,6 +125,9 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
117
125
  restart_match = re.match(
118
126
  r"^/workflows/(?P<workflow_id>[^/]+)/restart$", self.path
119
127
  )
128
+ fork_match = re.match(
129
+ r"^/workflows/(?P<workflow_id>[^/]+)/fork$", self.path
130
+ )
120
131
  resume_match = re.match(
121
132
  r"^/workflows/(?P<workflow_id>[^/]+)/resume$", self.path
122
133
  )
@@ -124,7 +135,23 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
124
135
  r"^/workflows/(?P<workflow_id>[^/]+)/cancel$", self.path
125
136
  )
126
137
 
127
- if restart_match:
138
+ if fork_match:
139
+ workflow_id = fork_match.group("workflow_id")
140
+ try:
141
+ data = json.loads(post_data.decode("utf-8"))
142
+ start_step: int = data.get("start_step", 1)
143
+ self._handle_fork(workflow_id, start_step)
144
+ except (json.JSONDecodeError, AttributeError) as e:
145
+ self.send_response(500)
146
+ self.send_header("Content-Type", "application/json")
147
+ self.end_headers()
148
+ self.wfile.write(
149
+ json.dumps({"error": f"Invalid JSON input: {str(e)}"}).encode(
150
+ "utf-8"
151
+ )
152
+ )
153
+ return
154
+ elif restart_match:
128
155
  workflow_id = restart_match.group("workflow_id")
129
156
  self._handle_restart(workflow_id)
130
157
  elif resume_match:
@@ -141,10 +168,33 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
141
168
  return # Disable admin server request logging
142
169
 
143
170
  def _handle_restart(self, workflow_id: str) -> None:
144
- self.dbos.restart_workflow(workflow_id)
145
- print("Restarting workflow", workflow_id)
146
- self.send_response(204)
147
- self._end_headers()
171
+ try:
172
+ print(f"Restarting workflow {workflow_id}")
173
+ self.dbos.restart_workflow(workflow_id)
174
+ self.send_response(204)
175
+ self._end_headers()
176
+ except DBOSException as e:
177
+ print(f"Error restarting workflow: {e}")
178
+ self.send_response(500)
179
+ response_body = json.dumps({"error": str(e)}).encode("utf-8")
180
+ self.send_header("Content-Type", "application/json")
181
+ self.send_header("Content-Length", str(len(response_body)))
182
+ self.end_headers()
183
+ self.wfile.write(response_body)
184
+
185
+ def _handle_fork(self, workflow_id: str, start_step: int) -> None:
186
+ try:
187
+ self.dbos.fork_workflow(workflow_id, start_step)
188
+ self.send_response(204)
189
+ self._end_headers()
190
+ except DBOSException as e:
191
+ print(f"Error forking workflow: {e}")
192
+ self.send_response(500)
193
+ response_body = json.dumps({"error": str(e)}).encode("utf-8")
194
+ self.send_header("Content-Type", "application/json")
195
+ self.send_header("Content-Length", str(len(response_body)))
196
+ self.end_headers()
197
+ self.wfile.write(response_body)
148
198
 
149
199
  def _handle_resume(self, workflow_id: str) -> None:
150
200
  print("Resuming workflow", workflow_id)
@@ -1,13 +1,16 @@
1
- from typing import Optional, TypedDict
1
+ from typing import List, Optional, TypedDict
2
2
 
3
3
  import sqlalchemy as sa
4
4
  import sqlalchemy.dialects.postgresql as pg
5
+ from sqlalchemy import inspect, text
5
6
  from sqlalchemy.exc import DBAPIError
6
7
  from sqlalchemy.orm import Session, sessionmaker
7
8
 
9
+ from . import _serialization
8
10
  from ._dbos_config import ConfigFile, DatabaseConfig
9
- from ._error import DBOSWorkflowConflictIDError
11
+ from ._error import DBOSUnexpectedStepError, DBOSWorkflowConflictIDError
10
12
  from ._schemas.application_database import ApplicationSchema
13
+ from ._sys_db import StepInfo
11
14
 
12
15
 
13
16
  class TransactionResultInternal(TypedDict):
@@ -18,6 +21,7 @@ class TransactionResultInternal(TypedDict):
18
21
  txn_id: Optional[str]
19
22
  txn_snapshot: str
20
23
  executor_id: Optional[str]
24
+ function_name: Optional[str]
21
25
 
22
26
 
23
27
  class RecordedResult(TypedDict):
@@ -70,9 +74,12 @@ class ApplicationDatabase:
70
74
  database["connectionTimeoutMillis"] / 1000
71
75
  )
72
76
 
77
+ pool_size = database.get("app_db_pool_size")
78
+ if pool_size is None:
79
+ pool_size = 20
73
80
  self.engine = sa.create_engine(
74
81
  app_db_url,
75
- pool_size=database["app_db_pool_size"],
82
+ pool_size=pool_size,
76
83
  max_overflow=0,
77
84
  pool_timeout=30,
78
85
  connect_args=connect_args,
@@ -87,7 +94,30 @@ class ApplicationDatabase:
87
94
  f"CREATE SCHEMA IF NOT EXISTS {ApplicationSchema.schema}"
88
95
  )
89
96
  conn.execute(schema_creation_query)
90
- ApplicationSchema.metadata_obj.create_all(self.engine)
97
+
98
+ inspector = inspect(self.engine)
99
+ if not inspector.has_table(
100
+ "transaction_outputs", schema=ApplicationSchema.schema
101
+ ):
102
+ ApplicationSchema.metadata_obj.create_all(self.engine)
103
+ else:
104
+ columns = inspector.get_columns(
105
+ "transaction_outputs", schema=ApplicationSchema.schema
106
+ )
107
+ column_names = [col["name"] for col in columns]
108
+
109
+ if "function_name" not in column_names:
110
+ # Column missing, alter table to add it
111
+ with self.engine.connect() as conn:
112
+ conn.execute(
113
+ text(
114
+ f"""
115
+ ALTER TABLE {ApplicationSchema.schema}.transaction_outputs
116
+ ADD COLUMN function_name TEXT NOT NULL DEFAULT '';
117
+ """
118
+ )
119
+ )
120
+ conn.commit()
91
121
 
92
122
  def destroy(self) -> None:
93
123
  self.engine.dispose()
@@ -108,6 +138,7 @@ class ApplicationDatabase:
108
138
  executor_id=(
109
139
  output["executor_id"] if output["executor_id"] else None
110
140
  ),
141
+ function_name=output["function_name"],
111
142
  )
112
143
  )
113
144
  except DBAPIError as dbapi_error:
@@ -133,6 +164,7 @@ class ApplicationDatabase:
133
164
  executor_id=(
134
165
  output["executor_id"] if output["executor_id"] else None
135
166
  ),
167
+ function_name=output["function_name"],
136
168
  )
137
169
  )
138
170
  except DBAPIError as dbapi_error:
@@ -142,21 +174,116 @@ class ApplicationDatabase:
142
174
 
143
175
  @staticmethod
144
176
  def check_transaction_execution(
145
- session: Session, workflow_uuid: str, function_id: int
177
+ session: Session, workflow_id: str, function_id: int, function_name: str
146
178
  ) -> Optional[RecordedResult]:
147
179
  rows = session.execute(
148
180
  sa.select(
149
181
  ApplicationSchema.transaction_outputs.c.output,
150
182
  ApplicationSchema.transaction_outputs.c.error,
183
+ ApplicationSchema.transaction_outputs.c.function_name,
151
184
  ).where(
152
- ApplicationSchema.transaction_outputs.c.workflow_uuid == workflow_uuid,
185
+ ApplicationSchema.transaction_outputs.c.workflow_uuid == workflow_id,
153
186
  ApplicationSchema.transaction_outputs.c.function_id == function_id,
154
187
  )
155
188
  ).all()
156
189
  if len(rows) == 0:
157
190
  return None
191
+ output, error, recorded_function_name = rows[0][0], rows[0][1], rows[0][2]
192
+ if function_name != recorded_function_name:
193
+ raise DBOSUnexpectedStepError(
194
+ workflow_id=workflow_id,
195
+ step_id=function_id,
196
+ expected_name=function_name,
197
+ recorded_name=recorded_function_name,
198
+ )
158
199
  result: RecordedResult = {
159
- "output": rows[0][0],
160
- "error": rows[0][1],
200
+ "output": output,
201
+ "error": error,
161
202
  }
162
203
  return result
204
+
205
+ def get_transactions(self, workflow_uuid: str) -> List[StepInfo]:
206
+ with self.engine.begin() as conn:
207
+ rows = conn.execute(
208
+ sa.select(
209
+ ApplicationSchema.transaction_outputs.c.function_id,
210
+ ApplicationSchema.transaction_outputs.c.function_name,
211
+ ApplicationSchema.transaction_outputs.c.output,
212
+ ApplicationSchema.transaction_outputs.c.error,
213
+ ).where(
214
+ ApplicationSchema.transaction_outputs.c.workflow_uuid
215
+ == workflow_uuid,
216
+ )
217
+ ).all()
218
+ return [
219
+ StepInfo(
220
+ function_id=row[0],
221
+ function_name=row[1],
222
+ output=(
223
+ _serialization.deserialize(row[2]) if row[2] is not None else row[2]
224
+ ),
225
+ error=(
226
+ _serialization.deserialize_exception(row[3])
227
+ if row[3] is not None
228
+ else row[3]
229
+ ),
230
+ child_workflow_id=None,
231
+ )
232
+ for row in rows
233
+ ]
234
+
235
+ def get_max_function_id(self, workflow_uuid: str) -> Optional[int]:
236
+ with self.engine.begin() as conn:
237
+ max_function_id_row = conn.execute(
238
+ sa.select(
239
+ sa.func.max(ApplicationSchema.transaction_outputs.c.function_id)
240
+ ).where(
241
+ ApplicationSchema.transaction_outputs.c.workflow_uuid
242
+ == workflow_uuid
243
+ )
244
+ ).fetchone()
245
+
246
+ max_function_id = max_function_id_row[0] if max_function_id_row else None
247
+
248
+ return max_function_id
249
+
250
+ def clone_workflow_transactions(
251
+ self, src_workflow_id: str, forked_workflow_id: str, start_step: int
252
+ ) -> None:
253
+ """
254
+ Copies all steps from dbos.transctions_outputs where function_id < input function_id
255
+ into a new workflow_uuid. Returns the new workflow_uuid.
256
+ """
257
+
258
+ with self.engine.begin() as conn:
259
+
260
+ insert_stmt = sa.insert(ApplicationSchema.transaction_outputs).from_select(
261
+ [
262
+ "workflow_uuid",
263
+ "function_id",
264
+ "output",
265
+ "error",
266
+ "txn_id",
267
+ "txn_snapshot",
268
+ "executor_id",
269
+ "function_name",
270
+ ],
271
+ sa.select(
272
+ sa.literal(forked_workflow_id).label("workflow_uuid"),
273
+ ApplicationSchema.transaction_outputs.c.function_id,
274
+ ApplicationSchema.transaction_outputs.c.output,
275
+ ApplicationSchema.transaction_outputs.c.error,
276
+ ApplicationSchema.transaction_outputs.c.txn_id,
277
+ ApplicationSchema.transaction_outputs.c.txn_snapshot,
278
+ ApplicationSchema.transaction_outputs.c.executor_id,
279
+ ApplicationSchema.transaction_outputs.c.function_name,
280
+ ).where(
281
+ (
282
+ ApplicationSchema.transaction_outputs.c.workflow_uuid
283
+ == src_workflow_id
284
+ )
285
+ & (ApplicationSchema.transaction_outputs.c.function_id < start_step)
286
+ ),
287
+ )
288
+
289
+ conn.execute(insert_stmt)
@@ -1,7 +1,10 @@
1
1
  import asyncio
2
2
  import sys
3
+ import time
3
4
  import uuid
4
- from typing import Any, Generic, Optional, TypedDict, TypeVar
5
+ from typing import Any, Generic, List, Optional, TypedDict, TypeVar
6
+
7
+ from dbos._app_db import ApplicationDatabase
5
8
 
6
9
  if sys.version_info < (3, 11):
7
10
  from typing_extensions import NotRequired
@@ -14,18 +17,30 @@ from dbos._dbos_config import parse_database_url_to_dbconfig
14
17
  from dbos._error import DBOSNonExistentWorkflowError
15
18
  from dbos._registrations import DEFAULT_MAX_RECOVERY_ATTEMPTS
16
19
  from dbos._serialization import WorkflowInputs
17
- from dbos._sys_db import SystemDatabase, WorkflowStatusInternal, WorkflowStatusString
18
- from dbos._workflow_commands import WorkflowStatus, get_workflow
20
+ from dbos._sys_db import (
21
+ StepInfo,
22
+ SystemDatabase,
23
+ WorkflowStatus,
24
+ WorkflowStatusInternal,
25
+ WorkflowStatusString,
26
+ )
27
+ from dbos._workflow_commands import (
28
+ fork_workflow,
29
+ get_workflow,
30
+ list_queued_workflows,
31
+ list_workflow_steps,
32
+ list_workflows,
33
+ )
19
34
 
20
35
  R = TypeVar("R", covariant=True) # A generic type for workflow return values
21
36
 
22
37
 
23
38
  class EnqueueOptions(TypedDict):
24
39
  workflow_name: str
25
- workflow_class_name: NotRequired[str]
26
40
  queue_name: str
27
- app_version: NotRequired[str]
28
41
  workflow_id: NotRequired[str]
42
+ app_version: NotRequired[str]
43
+ workflow_timeout: NotRequired[float]
29
44
 
30
45
 
31
46
  class WorkflowHandleClientPolling(Generic[R]):
@@ -41,7 +56,7 @@ class WorkflowHandleClientPolling(Generic[R]):
41
56
  res: R = self._sys_db.await_workflow_result(self.workflow_id)
42
57
  return res
43
58
 
44
- def get_status(self) -> "WorkflowStatus":
59
+ def get_status(self) -> WorkflowStatus:
45
60
  status = get_workflow(self._sys_db, self.workflow_id, True)
46
61
  if status is None:
47
62
  raise DBOSNonExistentWorkflowError(self.workflow_id)
@@ -63,7 +78,7 @@ class WorkflowHandleClientAsyncPolling(Generic[R]):
63
78
  )
64
79
  return res
65
80
 
66
- async def get_status(self) -> "WorkflowStatus":
81
+ async def get_status(self) -> WorkflowStatus:
67
82
  status = await asyncio.to_thread(
68
83
  get_workflow, self._sys_db, self.workflow_id, True
69
84
  )
@@ -78,6 +93,7 @@ class DBOSClient:
78
93
  if system_database is not None:
79
94
  db_config["sys_db_name"] = system_database
80
95
  self._sys_db = SystemDatabase(db_config)
96
+ self._app_db = ApplicationDatabase(db_config)
81
97
 
82
98
  def destroy(self) -> None:
83
99
  self._sys_db.destroy()
@@ -86,7 +102,6 @@ class DBOSClient:
86
102
  workflow_name = options["workflow_name"]
87
103
  queue_name = options["queue_name"]
88
104
 
89
- workflow_class_name = options.get("workflow_class_name")
90
105
  app_version = options.get("app_version")
91
106
  max_recovery_attempts = options.get("max_recovery_attempts")
92
107
  if max_recovery_attempts is None:
@@ -94,12 +109,13 @@ class DBOSClient:
94
109
  workflow_id = options.get("workflow_id")
95
110
  if workflow_id is None:
96
111
  workflow_id = str(uuid.uuid4())
112
+ workflow_timeout = options.get("workflow_timeout", None)
97
113
 
98
114
  status: WorkflowStatusInternal = {
99
115
  "workflow_uuid": workflow_id,
100
116
  "status": WorkflowStatusString.ENQUEUED.value,
101
117
  "name": workflow_name,
102
- "class_name": workflow_class_name,
118
+ "class_name": None,
103
119
  "queue_name": queue_name,
104
120
  "app_version": app_version,
105
121
  "config_name": None,
@@ -114,6 +130,10 @@ class DBOSClient:
114
130
  "executor_id": None,
115
131
  "recovery_attempts": None,
116
132
  "app_id": None,
133
+ "workflow_timeout_ms": (
134
+ int(workflow_timeout * 1000) if workflow_timeout is not None else None
135
+ ),
136
+ "workflow_deadline_epoch_ms": None,
117
137
  }
118
138
 
119
139
  inputs: WorkflowInputs = {
@@ -121,12 +141,9 @@ class DBOSClient:
121
141
  "kwargs": kwargs,
122
142
  }
123
143
 
124
- wf_status = self._sys_db.insert_workflow_status(status)
125
- self._sys_db.update_workflow_inputs(
126
- workflow_id, _serialization.serialize_args(inputs)
144
+ self._sys_db.init_workflow(
145
+ status, _serialization.serialize_args(inputs), max_recovery_attempts=None
127
146
  )
128
- if wf_status == WorkflowStatusString.ENQUEUED.value:
129
- self._sys_db.enqueue(workflow_id, queue_name)
130
147
  return workflow_id
131
148
 
132
149
  def enqueue(
@@ -180,8 +197,13 @@ class DBOSClient:
180
197
  "recovery_attempts": None,
181
198
  "app_id": None,
182
199
  "app_version": None,
200
+ "workflow_timeout_ms": None,
201
+ "workflow_deadline_epoch_ms": None,
183
202
  }
184
- self._sys_db.insert_workflow_status(status)
203
+ with self._sys_db.engine.begin() as conn:
204
+ self._sys_db.insert_workflow_status(
205
+ status, conn, max_recovery_attempts=None
206
+ )
185
207
  self._sys_db.send(status["workflow_uuid"], 0, destination_id, message, topic)
186
208
 
187
209
  async def send_async(
@@ -204,3 +226,141 @@ class DBOSClient:
204
226
  return await asyncio.to_thread(
205
227
  self.get_event, workflow_id, key, timeout_seconds
206
228
  )
229
+
230
+ def cancel_workflow(self, workflow_id: str) -> None:
231
+ self._sys_db.cancel_workflow(workflow_id)
232
+
233
+ async def cancel_workflow_async(self, workflow_id: str) -> None:
234
+ await asyncio.to_thread(self.cancel_workflow, workflow_id)
235
+
236
+ def resume_workflow(self, workflow_id: str) -> None:
237
+ self._sys_db.resume_workflow(workflow_id)
238
+
239
+ async def resume_workflow_async(self, workflow_id: str) -> None:
240
+ await asyncio.to_thread(self.resume_workflow, workflow_id)
241
+
242
+ def list_workflows(
243
+ self,
244
+ *,
245
+ workflow_ids: Optional[List[str]] = None,
246
+ status: Optional[str] = None,
247
+ start_time: Optional[str] = None,
248
+ end_time: Optional[str] = None,
249
+ name: Optional[str] = None,
250
+ app_version: Optional[str] = None,
251
+ user: Optional[str] = None,
252
+ limit: Optional[int] = None,
253
+ offset: Optional[int] = None,
254
+ sort_desc: bool = False,
255
+ workflow_id_prefix: Optional[str] = None,
256
+ ) -> List[WorkflowStatus]:
257
+ return list_workflows(
258
+ self._sys_db,
259
+ workflow_ids=workflow_ids,
260
+ status=status,
261
+ start_time=start_time,
262
+ end_time=end_time,
263
+ name=name,
264
+ app_version=app_version,
265
+ user=user,
266
+ limit=limit,
267
+ offset=offset,
268
+ sort_desc=sort_desc,
269
+ workflow_id_prefix=workflow_id_prefix,
270
+ )
271
+
272
+ async def list_workflows_async(
273
+ self,
274
+ *,
275
+ workflow_ids: Optional[List[str]] = None,
276
+ status: Optional[str] = None,
277
+ start_time: Optional[str] = None,
278
+ end_time: Optional[str] = None,
279
+ name: Optional[str] = None,
280
+ app_version: Optional[str] = None,
281
+ user: Optional[str] = None,
282
+ limit: Optional[int] = None,
283
+ offset: Optional[int] = None,
284
+ sort_desc: bool = False,
285
+ ) -> List[WorkflowStatus]:
286
+ return await asyncio.to_thread(
287
+ self.list_workflows,
288
+ workflow_ids=workflow_ids,
289
+ status=status,
290
+ start_time=start_time,
291
+ end_time=end_time,
292
+ name=name,
293
+ app_version=app_version,
294
+ user=user,
295
+ limit=limit,
296
+ offset=offset,
297
+ sort_desc=sort_desc,
298
+ )
299
+
300
+ def list_queued_workflows(
301
+ self,
302
+ *,
303
+ queue_name: Optional[str] = None,
304
+ status: Optional[str] = None,
305
+ start_time: Optional[str] = None,
306
+ end_time: Optional[str] = None,
307
+ name: Optional[str] = None,
308
+ limit: Optional[int] = None,
309
+ offset: Optional[int] = None,
310
+ sort_desc: bool = False,
311
+ ) -> List[WorkflowStatus]:
312
+ return list_queued_workflows(
313
+ self._sys_db,
314
+ queue_name=queue_name,
315
+ status=status,
316
+ start_time=start_time,
317
+ end_time=end_time,
318
+ name=name,
319
+ limit=limit,
320
+ offset=offset,
321
+ sort_desc=sort_desc,
322
+ )
323
+
324
+ async def list_queued_workflows_async(
325
+ self,
326
+ *,
327
+ queue_name: Optional[str] = None,
328
+ status: Optional[str] = None,
329
+ start_time: Optional[str] = None,
330
+ end_time: Optional[str] = None,
331
+ name: Optional[str] = None,
332
+ limit: Optional[int] = None,
333
+ offset: Optional[int] = None,
334
+ sort_desc: bool = False,
335
+ ) -> List[WorkflowStatus]:
336
+ return await asyncio.to_thread(
337
+ self.list_queued_workflows,
338
+ queue_name=queue_name,
339
+ status=status,
340
+ start_time=start_time,
341
+ end_time=end_time,
342
+ name=name,
343
+ limit=limit,
344
+ offset=offset,
345
+ sort_desc=sort_desc,
346
+ )
347
+
348
+ def list_workflow_steps(self, workflow_id: str) -> List[StepInfo]:
349
+ return list_workflow_steps(self._sys_db, self._app_db, workflow_id)
350
+
351
+ async def list_workflow_steps_async(self, workflow_id: str) -> List[StepInfo]:
352
+ return await asyncio.to_thread(self.list_workflow_steps, workflow_id)
353
+
354
+ def fork_workflow(self, workflow_id: str, start_step: int) -> WorkflowHandle[R]:
355
+ forked_workflow_id = fork_workflow(
356
+ self._sys_db, self._app_db, workflow_id, start_step
357
+ )
358
+ return WorkflowHandleClientPolling[R](forked_workflow_id, self._sys_db)
359
+
360
+ async def fork_workflow_async(
361
+ self, workflow_id: str, start_step: int
362
+ ) -> WorkflowHandleAsync[R]:
363
+ forked_workflow_id = await asyncio.to_thread(
364
+ fork_workflow, self._sys_db, self._app_db, workflow_id, start_step
365
+ )
366
+ return WorkflowHandleClientAsyncPolling[R](forked_workflow_id, self._sys_db)
@@ -67,7 +67,7 @@ class ConductorWebsocket(threading.Thread):
67
67
  recovery_message = p.RecoveryRequest.from_json(message)
68
68
  success = True
69
69
  try:
70
- self.dbos.recover_pending_workflows(
70
+ self.dbos._recover_pending_workflows(
71
71
  recovery_message.executor_ids
72
72
  )
73
73
  except Exception as e:
@@ -254,6 +254,7 @@ class ConductorWebsocket(threading.Thread):
254
254
  try:
255
255
  step_info = list_workflow_steps(
256
256
  self.dbos._sys_db,
257
+ self.dbos._app_db,
257
258
  list_steps_message.workflow_id,
258
259
  )
259
260
  except Exception as e:
@@ -3,8 +3,7 @@ from dataclasses import asdict, dataclass
3
3
  from enum import Enum
4
4
  from typing import List, Optional, Type, TypedDict, TypeVar
5
5
 
6
- from dbos._sys_db import StepInfo
7
- from dbos._workflow_commands import WorkflowStatus
6
+ from dbos._sys_db import StepInfo, WorkflowStatus
8
7
 
9
8
 
10
9
  class MessageType(str, Enum):