dbos 0.26.0a13__tar.gz → 0.26.0a15__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dbos-0.26.0a13 → dbos-0.26.0a15}/PKG-INFO +1 -1
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_admin_server.py +49 -5
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_app_db.py +56 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_core.py +3 -1
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_dbos.py +40 -7
- dbos-0.26.0a15/dbos/_event_loop.py +67 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_sys_db.py +48 -3
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_workflow_commands.py +1 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/cli/cli.py +49 -2
- {dbos-0.26.0a13 → dbos-0.26.0a15}/pyproject.toml +1 -1
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_async.py +70 -4
- dbos-0.26.0a15/tests/test_workflow_management.py +591 -0
- dbos-0.26.0a13/tests/test_workflow_management.py +0 -218
- {dbos-0.26.0a13 → dbos-0.26.0a15}/LICENSE +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/README.md +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/__init__.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/__main__.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_classproperty.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_client.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_conductor/conductor.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_conductor/protocol.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_context.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_croniter.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_dbos_config.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_debug.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_docker_pg_helper.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_error.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_fastapi.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_flask.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_kafka.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_kafka_message.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_logger.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_migrations/env.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_migrations/script.py.mako +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_outcome.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_queue.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_recovery.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_registrations.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_request.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_roles.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_scheduler.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_schemas/__init__.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_schemas/application_database.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_schemas/system_database.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_serialization.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_templates/dbos-db-starter/README.md +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_templates/dbos-db-starter/__package/main.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_tracer.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/_utils.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/cli/_github_init.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/cli/_template_init.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/dbos-config.schema.json +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/dbos/py.typed +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/__init__.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/atexit_no_ctor.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/atexit_no_launch.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/classdefs.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/client_collateral.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/client_worker.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/conftest.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/dupname_classdefs1.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/dupname_classdefsa.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/more_classdefs.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/queuedworkflow.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_admin_server.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_classdecorators.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_client.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_concurrency.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_config.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_croniter.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_dbos.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_debug.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_docker_secrets.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_failures.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_fastapi.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_fastapi_roles.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_flask.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_kafka.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_outcome.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_package.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_queue.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_scheduler.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_schema_migration.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_singleton.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_spans.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_sqlalchemy.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/tests/test_workflow_introspection.py +0 -0
- {dbos-0.26.0a13 → dbos-0.26.0a15}/version/__init__.py +0 -0
@@ -7,6 +7,7 @@ from functools import partial
|
|
7
7
|
from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
|
8
8
|
from typing import TYPE_CHECKING, Any, List, TypedDict
|
9
9
|
|
10
|
+
from ._error import DBOSException
|
10
11
|
from ._logger import dbos_logger
|
11
12
|
from ._recovery import recover_pending_workflows
|
12
13
|
from ._utils import GlobalParams
|
@@ -22,6 +23,7 @@ _workflow_queues_metadata_path = "/dbos-workflow-queues-metadata"
|
|
22
23
|
# /workflows/:workflow_id/resume
|
23
24
|
# /workflows/:workflow_id/restart
|
24
25
|
# /workflows/:workflow_id/steps
|
26
|
+
# /workflows/:workflow_id/fork
|
25
27
|
|
26
28
|
|
27
29
|
class AdminServer:
|
@@ -123,6 +125,9 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
123
125
|
restart_match = re.match(
|
124
126
|
r"^/workflows/(?P<workflow_id>[^/]+)/restart$", self.path
|
125
127
|
)
|
128
|
+
fork_match = re.match(
|
129
|
+
r"^/workflows/(?P<workflow_id>[^/]+)/fork$", self.path
|
130
|
+
)
|
126
131
|
resume_match = re.match(
|
127
132
|
r"^/workflows/(?P<workflow_id>[^/]+)/resume$", self.path
|
128
133
|
)
|
@@ -130,7 +135,23 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
130
135
|
r"^/workflows/(?P<workflow_id>[^/]+)/cancel$", self.path
|
131
136
|
)
|
132
137
|
|
133
|
-
if
|
138
|
+
if fork_match:
|
139
|
+
workflow_id = fork_match.group("workflow_id")
|
140
|
+
try:
|
141
|
+
data = json.loads(post_data.decode("utf-8"))
|
142
|
+
start_step: int = data.get("start_step", 1)
|
143
|
+
self._handle_fork(workflow_id, start_step)
|
144
|
+
except (json.JSONDecodeError, AttributeError) as e:
|
145
|
+
self.send_response(500)
|
146
|
+
self.send_header("Content-Type", "application/json")
|
147
|
+
self.end_headers()
|
148
|
+
self.wfile.write(
|
149
|
+
json.dumps({"error": f"Invalid JSON input: {str(e)}"}).encode(
|
150
|
+
"utf-8"
|
151
|
+
)
|
152
|
+
)
|
153
|
+
return
|
154
|
+
elif restart_match:
|
134
155
|
workflow_id = restart_match.group("workflow_id")
|
135
156
|
self._handle_restart(workflow_id)
|
136
157
|
elif resume_match:
|
@@ -147,10 +168,33 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
147
168
|
return # Disable admin server request logging
|
148
169
|
|
149
170
|
def _handle_restart(self, workflow_id: str) -> None:
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
171
|
+
try:
|
172
|
+
print(f"Restarting workflow {workflow_id}")
|
173
|
+
self.dbos.restart_workflow(workflow_id)
|
174
|
+
self.send_response(204)
|
175
|
+
self._end_headers()
|
176
|
+
except DBOSException as e:
|
177
|
+
print(f"Error restarting workflow: {e}")
|
178
|
+
self.send_response(500)
|
179
|
+
response_body = json.dumps({"error": str(e)}).encode("utf-8")
|
180
|
+
self.send_header("Content-Type", "application/json")
|
181
|
+
self.send_header("Content-Length", str(len(response_body)))
|
182
|
+
self.end_headers()
|
183
|
+
self.wfile.write(response_body)
|
184
|
+
|
185
|
+
def _handle_fork(self, workflow_id: str, start_step: int) -> None:
|
186
|
+
try:
|
187
|
+
self.dbos.fork_workflow(workflow_id, start_step)
|
188
|
+
self.send_response(204)
|
189
|
+
self._end_headers()
|
190
|
+
except DBOSException as e:
|
191
|
+
print(f"Error forking workflow: {e}")
|
192
|
+
self.send_response(500)
|
193
|
+
response_body = json.dumps({"error": str(e)}).encode("utf-8")
|
194
|
+
self.send_header("Content-Type", "application/json")
|
195
|
+
self.send_header("Content-Length", str(len(response_body)))
|
196
|
+
self.end_headers()
|
197
|
+
self.wfile.write(response_body)
|
154
198
|
|
155
199
|
def _handle_resume(self, workflow_id: str) -> None:
|
156
200
|
print("Resuming workflow", workflow_id)
|
@@ -228,3 +228,59 @@ class ApplicationDatabase:
|
|
228
228
|
)
|
229
229
|
for row in rows
|
230
230
|
]
|
231
|
+
|
232
|
+
def get_max_function_id(self, workflow_uuid: str) -> Optional[int]:
|
233
|
+
with self.engine.begin() as conn:
|
234
|
+
max_function_id_row = conn.execute(
|
235
|
+
sa.select(
|
236
|
+
sa.func.max(ApplicationSchema.transaction_outputs.c.function_id)
|
237
|
+
).where(
|
238
|
+
ApplicationSchema.transaction_outputs.c.workflow_uuid
|
239
|
+
== workflow_uuid
|
240
|
+
)
|
241
|
+
).fetchone()
|
242
|
+
|
243
|
+
max_function_id = max_function_id_row[0] if max_function_id_row else None
|
244
|
+
|
245
|
+
return max_function_id
|
246
|
+
|
247
|
+
def clone_workflow_transactions(
|
248
|
+
self, src_workflow_id: str, forked_workflow_id: str, start_step: int
|
249
|
+
) -> None:
|
250
|
+
"""
|
251
|
+
Copies all steps from dbos.transctions_outputs where function_id < input function_id
|
252
|
+
into a new workflow_uuid. Returns the new workflow_uuid.
|
253
|
+
"""
|
254
|
+
|
255
|
+
with self.engine.begin() as conn:
|
256
|
+
|
257
|
+
insert_stmt = sa.insert(ApplicationSchema.transaction_outputs).from_select(
|
258
|
+
[
|
259
|
+
"workflow_uuid",
|
260
|
+
"function_id",
|
261
|
+
"output",
|
262
|
+
"error",
|
263
|
+
"txn_id",
|
264
|
+
"txn_snapshot",
|
265
|
+
"executor_id",
|
266
|
+
"function_name",
|
267
|
+
],
|
268
|
+
sa.select(
|
269
|
+
sa.literal(forked_workflow_id).label("workflow_uuid"),
|
270
|
+
ApplicationSchema.transaction_outputs.c.function_id,
|
271
|
+
ApplicationSchema.transaction_outputs.c.output,
|
272
|
+
ApplicationSchema.transaction_outputs.c.error,
|
273
|
+
ApplicationSchema.transaction_outputs.c.txn_id,
|
274
|
+
ApplicationSchema.transaction_outputs.c.txn_snapshot,
|
275
|
+
ApplicationSchema.transaction_outputs.c.executor_id,
|
276
|
+
ApplicationSchema.transaction_outputs.c.function_name,
|
277
|
+
).where(
|
278
|
+
(
|
279
|
+
ApplicationSchema.transaction_outputs.c.workflow_uuid
|
280
|
+
== src_workflow_id
|
281
|
+
)
|
282
|
+
& (ApplicationSchema.transaction_outputs.c.function_id < start_step)
|
283
|
+
),
|
284
|
+
)
|
285
|
+
|
286
|
+
conn.execute(insert_stmt)
|
@@ -365,7 +365,9 @@ def _execute_workflow_wthread(
|
|
365
365
|
if isinstance(result, Immediate):
|
366
366
|
return cast(Immediate[R], result)()
|
367
367
|
else:
|
368
|
-
return
|
368
|
+
return dbos._background_event_loop.submit_coroutine(
|
369
|
+
cast(Pending[R], result)()
|
370
|
+
)
|
369
371
|
except Exception:
|
370
372
|
dbos.logger.error(
|
371
373
|
f"Exception encountered in asynchronous workflow: {traceback.format_exc()}"
|
@@ -4,7 +4,6 @@ import asyncio
|
|
4
4
|
import atexit
|
5
5
|
import hashlib
|
6
6
|
import inspect
|
7
|
-
import json
|
8
7
|
import os
|
9
8
|
import sys
|
10
9
|
import threading
|
@@ -31,7 +30,6 @@ from typing import (
|
|
31
30
|
|
32
31
|
from opentelemetry.trace import Span
|
33
32
|
|
34
|
-
from dbos import _serialization
|
35
33
|
from dbos._conductor.conductor import ConductorWebsocket
|
36
34
|
from dbos._utils import INTERNAL_QUEUE_NAME, GlobalParams
|
37
35
|
from dbos._workflow_commands import (
|
@@ -112,6 +110,7 @@ from ._error import (
|
|
112
110
|
DBOSException,
|
113
111
|
DBOSNonExistentWorkflowError,
|
114
112
|
)
|
113
|
+
from ._event_loop import BackgroundEventLoop
|
115
114
|
from ._logger import add_otlp_to_all_loggers, config_logger, dbos_logger, init_logger
|
116
115
|
from ._sys_db import SystemDatabase
|
117
116
|
from ._workflow_commands import WorkflowStatus, get_workflow
|
@@ -341,6 +340,7 @@ class DBOS:
|
|
341
340
|
self.conductor_url: Optional[str] = conductor_url
|
342
341
|
self.conductor_key: Optional[str] = conductor_key
|
343
342
|
self.conductor_websocket: Optional[ConductorWebsocket] = None
|
343
|
+
self._background_event_loop: BackgroundEventLoop = BackgroundEventLoop()
|
344
344
|
|
345
345
|
init_logger()
|
346
346
|
|
@@ -451,6 +451,7 @@ class DBOS:
|
|
451
451
|
dbos_logger.info(f"Executor ID: {GlobalParams.executor_id}")
|
452
452
|
dbos_logger.info(f"Application version: {GlobalParams.app_version}")
|
453
453
|
self._executor_field = ThreadPoolExecutor(max_workers=64)
|
454
|
+
self._background_event_loop.start()
|
454
455
|
self._sys_db_field = SystemDatabase(
|
455
456
|
self.config["database"], debug_mode=debug_mode
|
456
457
|
)
|
@@ -568,6 +569,7 @@ class DBOS:
|
|
568
569
|
self._initialized = False
|
569
570
|
for event in self.stop_events:
|
570
571
|
event.set()
|
572
|
+
self._background_event_loop.stop()
|
571
573
|
if self._sys_db_field is not None:
|
572
574
|
self._sys_db_field.destroy()
|
573
575
|
self._sys_db_field = None
|
@@ -958,14 +960,45 @@ class DBOS:
|
|
958
960
|
def restart_workflow(cls, workflow_id: str) -> WorkflowHandle[Any]:
|
959
961
|
"""Restart a workflow with a new workflow ID"""
|
960
962
|
|
963
|
+
return cls.fork_workflow(workflow_id, 1)
|
964
|
+
|
965
|
+
@classmethod
|
966
|
+
def fork_workflow(
|
967
|
+
cls, workflow_id: str, start_step: int = 1
|
968
|
+
) -> WorkflowHandle[Any]:
|
969
|
+
"""Restart a workflow with a new workflow ID"""
|
970
|
+
|
971
|
+
def get_max_function_id(workflow_uuid: str) -> int:
|
972
|
+
max_transactions = (
|
973
|
+
_get_dbos_instance()._app_db.get_max_function_id(workflow_uuid) or 0
|
974
|
+
)
|
975
|
+
max_operations = (
|
976
|
+
_get_dbos_instance()._sys_db.get_max_function_id(workflow_uuid) or 0
|
977
|
+
)
|
978
|
+
return max(max_transactions, max_operations)
|
979
|
+
|
980
|
+
max_function_id = get_max_function_id(workflow_id)
|
981
|
+
if max_function_id > 0 and start_step > max_function_id:
|
982
|
+
raise DBOSException(
|
983
|
+
f"Cannot fork workflow {workflow_id} at step {start_step}. The workflow has {max_function_id} steps."
|
984
|
+
)
|
985
|
+
|
961
986
|
def fn() -> str:
|
962
|
-
|
963
|
-
|
987
|
+
forked_workflow_id = str(uuid.uuid4())
|
988
|
+
dbos_logger.info(f"Forking workflow: {workflow_id} from step {start_step}")
|
989
|
+
|
990
|
+
_get_dbos_instance()._app_db.clone_workflow_transactions(
|
991
|
+
workflow_id, forked_workflow_id, start_step
|
992
|
+
)
|
993
|
+
|
994
|
+
return _get_dbos_instance()._sys_db.fork_workflow(
|
995
|
+
workflow_id, forked_workflow_id, start_step
|
996
|
+
)
|
964
997
|
|
965
|
-
|
966
|
-
fn, "DBOS.
|
998
|
+
new_id = _get_dbos_instance()._sys_db.call_function_as_step(
|
999
|
+
fn, "DBOS.forkWorkflow"
|
967
1000
|
)
|
968
|
-
return cls.retrieve_workflow(
|
1001
|
+
return cls.retrieve_workflow(new_id)
|
969
1002
|
|
970
1003
|
@classmethod
|
971
1004
|
def list_workflows(
|
@@ -0,0 +1,67 @@
|
|
1
|
+
import asyncio
|
2
|
+
import threading
|
3
|
+
from typing import Any, Coroutine, Optional, TypeVar
|
4
|
+
|
5
|
+
|
6
|
+
class BackgroundEventLoop:
|
7
|
+
"""
|
8
|
+
This is the event loop to which DBOS submits any coroutines that are not started from within an event loop.
|
9
|
+
In particular, coroutines submitted to queues (such as from scheduled workflows) run on this event loop.
|
10
|
+
"""
|
11
|
+
|
12
|
+
def __init__(self) -> None:
|
13
|
+
self._loop: Optional[asyncio.AbstractEventLoop] = None
|
14
|
+
self._thread: Optional[threading.Thread] = None
|
15
|
+
self._running = False
|
16
|
+
self._ready = threading.Event()
|
17
|
+
|
18
|
+
def start(self) -> None:
|
19
|
+
if self._running:
|
20
|
+
return
|
21
|
+
|
22
|
+
self._thread = threading.Thread(target=self._run_event_loop, daemon=True)
|
23
|
+
self._thread.start()
|
24
|
+
self._ready.wait() # Wait until the loop is running
|
25
|
+
|
26
|
+
def stop(self) -> None:
|
27
|
+
if not self._running or self._loop is None or self._thread is None:
|
28
|
+
return
|
29
|
+
|
30
|
+
asyncio.run_coroutine_threadsafe(self._shutdown(), self._loop)
|
31
|
+
self._thread.join()
|
32
|
+
self._running = False
|
33
|
+
|
34
|
+
def _run_event_loop(self) -> None:
|
35
|
+
self._loop = asyncio.new_event_loop()
|
36
|
+
asyncio.set_event_loop(self._loop)
|
37
|
+
|
38
|
+
self._running = True
|
39
|
+
self._ready.set() # Signal that the loop is ready
|
40
|
+
|
41
|
+
try:
|
42
|
+
self._loop.run_forever()
|
43
|
+
finally:
|
44
|
+
self._loop.close()
|
45
|
+
|
46
|
+
async def _shutdown(self) -> None:
|
47
|
+
if self._loop is None:
|
48
|
+
raise RuntimeError("Event loop not started")
|
49
|
+
tasks = [
|
50
|
+
task
|
51
|
+
for task in asyncio.all_tasks(self._loop)
|
52
|
+
if task is not asyncio.current_task(self._loop)
|
53
|
+
]
|
54
|
+
|
55
|
+
for task in tasks:
|
56
|
+
task.cancel()
|
57
|
+
|
58
|
+
await asyncio.gather(*tasks, return_exceptions=True)
|
59
|
+
self._loop.stop()
|
60
|
+
|
61
|
+
T = TypeVar("T")
|
62
|
+
|
63
|
+
def submit_coroutine(self, coro: Coroutine[Any, Any, T]) -> T:
|
64
|
+
"""Submit a coroutine to the background event loop"""
|
65
|
+
if self._loop is None:
|
66
|
+
raise RuntimeError("Event loop not started")
|
67
|
+
return asyncio.run_coroutine_threadsafe(coro, self._loop).result()
|
@@ -489,15 +489,29 @@ class SystemDatabase:
|
|
489
489
|
.values(status=WorkflowStatusString.ENQUEUED.value, recovery_attempts=0)
|
490
490
|
)
|
491
491
|
|
492
|
-
def
|
492
|
+
def get_max_function_id(self, workflow_uuid: str) -> Optional[int]:
|
493
|
+
with self.engine.begin() as conn:
|
494
|
+
max_function_id_row = conn.execute(
|
495
|
+
sa.select(
|
496
|
+
sa.func.max(SystemSchema.operation_outputs.c.function_id)
|
497
|
+
).where(SystemSchema.operation_outputs.c.workflow_uuid == workflow_uuid)
|
498
|
+
).fetchone()
|
499
|
+
|
500
|
+
max_function_id = max_function_id_row[0] if max_function_id_row else None
|
501
|
+
|
502
|
+
return max_function_id
|
503
|
+
|
504
|
+
def fork_workflow(
|
505
|
+
self, original_workflow_id: str, forked_workflow_id: str, start_step: int = 1
|
506
|
+
) -> str:
|
507
|
+
|
493
508
|
status = self.get_workflow_status(original_workflow_id)
|
494
509
|
if status is None:
|
495
510
|
raise Exception(f"Workflow {original_workflow_id} not found")
|
496
511
|
inputs = self.get_workflow_inputs(original_workflow_id)
|
497
512
|
if inputs is None:
|
498
513
|
raise Exception(f"Workflow {original_workflow_id} not found")
|
499
|
-
|
500
|
-
forked_workflow_id = str(uuid.uuid4())
|
514
|
+
|
501
515
|
with self.engine.begin() as c:
|
502
516
|
# Create an entry for the forked workflow with the same
|
503
517
|
# initial values as the original.
|
@@ -524,6 +538,37 @@ class SystemDatabase:
|
|
524
538
|
inputs=_serialization.serialize_args(inputs),
|
525
539
|
)
|
526
540
|
)
|
541
|
+
|
542
|
+
if start_step > 1:
|
543
|
+
|
544
|
+
# Copy the original workflow's outputs into the forked workflow
|
545
|
+
insert_stmt = sa.insert(SystemSchema.operation_outputs).from_select(
|
546
|
+
[
|
547
|
+
"workflow_uuid",
|
548
|
+
"function_id",
|
549
|
+
"output",
|
550
|
+
"error",
|
551
|
+
"function_name",
|
552
|
+
"child_workflow_id",
|
553
|
+
],
|
554
|
+
sa.select(
|
555
|
+
sa.literal(forked_workflow_id).label("workflow_uuid"),
|
556
|
+
SystemSchema.operation_outputs.c.function_id,
|
557
|
+
SystemSchema.operation_outputs.c.output,
|
558
|
+
SystemSchema.operation_outputs.c.error,
|
559
|
+
SystemSchema.operation_outputs.c.function_name,
|
560
|
+
SystemSchema.operation_outputs.c.child_workflow_id,
|
561
|
+
).where(
|
562
|
+
(
|
563
|
+
SystemSchema.operation_outputs.c.workflow_uuid
|
564
|
+
== original_workflow_id
|
565
|
+
)
|
566
|
+
& (SystemSchema.operation_outputs.c.function_id < start_step)
|
567
|
+
),
|
568
|
+
)
|
569
|
+
|
570
|
+
c.execute(insert_stmt)
|
571
|
+
|
527
572
|
# Enqueue the forked workflow on the internal queue
|
528
573
|
c.execute(
|
529
574
|
pg.insert(SystemSchema.workflow_queue).values(
|
@@ -433,13 +433,60 @@ def restart(
|
|
433
433
|
] = 3001,
|
434
434
|
) -> None:
|
435
435
|
response = requests.post(
|
436
|
-
f"http://{host}:{port}/workflows/{uuid}/restart",
|
436
|
+
f"http://{host}:{port}/workflows/{uuid}/restart",
|
437
|
+
json=[],
|
438
|
+
timeout=5,
|
437
439
|
)
|
438
440
|
|
439
441
|
if response.status_code == 204:
|
440
442
|
print(f"Workflow {uuid} has been restarted")
|
441
443
|
else:
|
442
|
-
|
444
|
+
error_message = response.json().get("error", "Unknown error")
|
445
|
+
print(
|
446
|
+
f"Failed to restart workflow {uuid}. "
|
447
|
+
f"Status code: {response.status_code}. "
|
448
|
+
f"Error: {error_message}"
|
449
|
+
)
|
450
|
+
|
451
|
+
|
452
|
+
@workflow.command(
|
453
|
+
help="fork a workflow from the beginning with a new id and from a step"
|
454
|
+
)
|
455
|
+
def fork(
|
456
|
+
uuid: Annotated[str, typer.Argument()],
|
457
|
+
host: Annotated[
|
458
|
+
typing.Optional[str],
|
459
|
+
typer.Option("--host", "-H", help="Specify the admin host"),
|
460
|
+
] = "localhost",
|
461
|
+
port: Annotated[
|
462
|
+
typing.Optional[int],
|
463
|
+
typer.Option("--port", "-p", help="Specify the admin port"),
|
464
|
+
] = 3001,
|
465
|
+
step: Annotated[
|
466
|
+
typing.Optional[int],
|
467
|
+
typer.Option(
|
468
|
+
"--step",
|
469
|
+
"-s",
|
470
|
+
help="Restart from this step (default: first step)",
|
471
|
+
),
|
472
|
+
] = 1,
|
473
|
+
) -> None:
|
474
|
+
print(f"Forking workflow {uuid} from step {step}")
|
475
|
+
response = requests.post(
|
476
|
+
f"http://{host}:{port}/workflows/{uuid}/fork",
|
477
|
+
json={"start_step": step},
|
478
|
+
timeout=5,
|
479
|
+
)
|
480
|
+
|
481
|
+
if response.status_code == 204:
|
482
|
+
print(f"Workflow {uuid} has been forked")
|
483
|
+
else:
|
484
|
+
error_message = response.json().get("error", "Unknown error")
|
485
|
+
print(
|
486
|
+
f"Failed to fork workflow {uuid}. "
|
487
|
+
f"Status code: {response.status_code}. "
|
488
|
+
f"Error: {error_message}"
|
489
|
+
)
|
443
490
|
|
444
491
|
|
445
492
|
@queue.command(name="list", help="List enqueued functions for your application")
|
@@ -7,8 +7,8 @@ import pytest
|
|
7
7
|
import sqlalchemy as sa
|
8
8
|
|
9
9
|
# Public API
|
10
|
-
from dbos import DBOS, SetWorkflowID
|
11
|
-
from dbos._dbos import WorkflowHandleAsync
|
10
|
+
from dbos import DBOS, Queue, SetWorkflowID
|
11
|
+
from dbos._dbos import WorkflowHandle, WorkflowHandleAsync
|
12
12
|
from dbos._dbos_config import ConfigFile
|
13
13
|
from dbos._error import DBOSException
|
14
14
|
|
@@ -56,6 +56,15 @@ async def test_async_workflow(dbos: DBOS) -> None:
|
|
56
56
|
assert step_counter == 1
|
57
57
|
assert txn_counter == 1
|
58
58
|
|
59
|
+
# Test DBOS.start_workflow_async
|
60
|
+
handle = await DBOS.start_workflow_async(test_workflow, "alice", "bob")
|
61
|
+
assert (await handle.get_result()) == "alicetxn21bobstep2"
|
62
|
+
|
63
|
+
# Test DBOS.start_workflow. Not recommended for async workflows,
|
64
|
+
# but needed for backwards compatibility.
|
65
|
+
sync_handle = DBOS.start_workflow(test_workflow, "alice", "bob")
|
66
|
+
assert sync_handle.get_result() == "alicetxn31bobstep3" # type: ignore
|
67
|
+
|
59
68
|
|
60
69
|
@pytest.mark.asyncio
|
61
70
|
async def test_async_step(dbos: DBOS) -> None:
|
@@ -160,10 +169,11 @@ async def test_send_recv_async(dbos: DBOS) -> None:
|
|
160
169
|
none_uuid = str(uuid.uuid4())
|
161
170
|
none_handle = None
|
162
171
|
with SetWorkflowID(none_uuid):
|
163
|
-
none_handle = dbos.
|
172
|
+
none_handle = await dbos.start_workflow_async(test_recv_timeout, 10.0)
|
164
173
|
await test_send_none(none_uuid)
|
165
174
|
begin_time = time.time()
|
166
|
-
|
175
|
+
result = await none_handle.get_result() # type: ignore
|
176
|
+
assert result is None
|
167
177
|
duration = time.time() - begin_time
|
168
178
|
assert duration < 1.0 # None is from the received message, not from the timeout.
|
169
179
|
|
@@ -400,3 +410,59 @@ async def test_retrieve_workflow_async(dbos: DBOS) -> None:
|
|
400
410
|
wfstatus = await handle.get_status()
|
401
411
|
assert wfstatus.status == "SUCCESS"
|
402
412
|
assert wfstatus.workflow_id == wfuuid
|
413
|
+
|
414
|
+
|
415
|
+
def test_unawaited_workflow(dbos: DBOS) -> None:
|
416
|
+
input = 5
|
417
|
+
child_id = str(uuid.uuid4())
|
418
|
+
queue = Queue("test_queue")
|
419
|
+
|
420
|
+
@DBOS.workflow()
|
421
|
+
async def child_workflow(x: int) -> int:
|
422
|
+
await asyncio.sleep(0.1)
|
423
|
+
return x
|
424
|
+
|
425
|
+
@DBOS.workflow()
|
426
|
+
async def parent_workflow(x: int) -> None:
|
427
|
+
with SetWorkflowID(child_id):
|
428
|
+
await DBOS.start_workflow_async(child_workflow, x)
|
429
|
+
|
430
|
+
assert queue.enqueue(parent_workflow, input).get_result() is None
|
431
|
+
handle: WorkflowHandle[int] = DBOS.retrieve_workflow(
|
432
|
+
child_id, existing_workflow=False
|
433
|
+
)
|
434
|
+
assert handle.get_result() == 5
|
435
|
+
|
436
|
+
|
437
|
+
def test_unawaited_workflow_exception(dbos: DBOS) -> None:
|
438
|
+
child_id = str(uuid.uuid4())
|
439
|
+
queue = Queue("test_queue")
|
440
|
+
|
441
|
+
@DBOS.workflow()
|
442
|
+
async def child_workflow(s: str) -> int:
|
443
|
+
await asyncio.sleep(0.1)
|
444
|
+
raise Exception(s)
|
445
|
+
|
446
|
+
@DBOS.workflow()
|
447
|
+
async def parent_workflow(s: str) -> None:
|
448
|
+
with SetWorkflowID(child_id):
|
449
|
+
await DBOS.start_workflow_async(child_workflow, s)
|
450
|
+
|
451
|
+
# Verify the unawaited child properly throws an exception
|
452
|
+
input = "alice"
|
453
|
+
assert queue.enqueue(parent_workflow, input).get_result() is None
|
454
|
+
handle: WorkflowHandle[int] = DBOS.retrieve_workflow(
|
455
|
+
child_id, existing_workflow=False
|
456
|
+
)
|
457
|
+
with pytest.raises(Exception) as exc_info:
|
458
|
+
handle.get_result()
|
459
|
+
assert input in str(exc_info.value)
|
460
|
+
|
461
|
+
# Verify it works if run again
|
462
|
+
input = "bob"
|
463
|
+
child_id = str(uuid.uuid4())
|
464
|
+
assert queue.enqueue(parent_workflow, input).get_result() is None
|
465
|
+
handle = DBOS.retrieve_workflow(child_id, existing_workflow=False)
|
466
|
+
with pytest.raises(Exception) as exc_info:
|
467
|
+
handle.get_result()
|
468
|
+
assert input in str(exc_info.value)
|