dbos 0.26.0a3__tar.gz → 0.26.0a5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dbos-0.26.0a3 → dbos-0.26.0a5}/PKG-INFO +1 -1
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_context.py +4 -2
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_core.py +17 -25
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_dbos.py +21 -8
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_kafka.py +1 -1
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_queue.py +2 -1
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_recovery.py +1 -1
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_scheduler.py +1 -1
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_sys_db.py +58 -7
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_utils.py +2 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/pyproject.toml +1 -1
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_admin_server.py +2 -1
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_failures.py +1 -8
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_scheduler.py +2 -3
- dbos-0.26.0a5/tests/test_workflow_management.py +212 -0
- dbos-0.26.0a3/tests/test_workflow_cancel.py +0 -145
- {dbos-0.26.0a3 → dbos-0.26.0a5}/LICENSE +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/README.md +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/__init__.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/__main__.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_admin_server.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_app_db.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_classproperty.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_client.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_cloudutils/authentication.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_cloudutils/cloudutils.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_cloudutils/databases.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_conductor/conductor.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_conductor/protocol.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_croniter.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_db_wizard.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_dbos_config.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_debug.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_error.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_fastapi.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_flask.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_kafka_message.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_logger.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_migrations/env.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_migrations/script.py.mako +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_outcome.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_registrations.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_request.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_roles.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_schemas/__init__.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_schemas/application_database.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_schemas/system_database.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_serialization.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_templates/dbos-db-starter/README.md +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_templates/dbos-db-starter/__package/main.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_tracer.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_workflow_commands.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/cli/_github_init.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/cli/_template_init.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/cli/cli.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/dbos-config.schema.json +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/py.typed +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/__init__.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/atexit_no_ctor.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/atexit_no_launch.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/classdefs.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/client_collateral.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/client_worker.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/conftest.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/more_classdefs.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/queuedworkflow.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_async.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_classdecorators.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_client.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_concurrency.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_config.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_croniter.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_dbos.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_dbwizard.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_debug.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_docker_secrets.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_fastapi.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_fastapi_roles.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_flask.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_kafka.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_outcome.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_package.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_queue.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_schema_migration.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_singleton.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_spans.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/tests/test_sqlalchemy.py +0 -0
- /dbos-0.26.0a3/tests/test_workflow_cmds.py → /dbos-0.26.0a5/tests/test_workflow_introspection.py +0 -0
- {dbos-0.26.0a3 → dbos-0.26.0a5}/version/__init__.py +0 -0
@@ -195,8 +195,10 @@ class DBOSContext:
|
|
195
195
|
def end_handler(self, exc_value: Optional[BaseException]) -> None:
|
196
196
|
self._end_span(exc_value)
|
197
197
|
|
198
|
-
def get_current_span(self) -> Span:
|
199
|
-
|
198
|
+
def get_current_span(self) -> Optional[Span]:
|
199
|
+
if len(self.spans):
|
200
|
+
return self.spans[-1]
|
201
|
+
return None
|
200
202
|
|
201
203
|
def _start_span(self, attributes: TracedAttributes) -> None:
|
202
204
|
attributes["operationUUID"] = (
|
@@ -398,9 +398,7 @@ async def _execute_workflow_async(
|
|
398
398
|
raise
|
399
399
|
|
400
400
|
|
401
|
-
def execute_workflow_by_id(
|
402
|
-
dbos: "DBOS", workflow_id: str, startNew: bool = False
|
403
|
-
) -> "WorkflowHandle[Any]":
|
401
|
+
def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[Any]":
|
404
402
|
status = dbos._sys_db.get_workflow_status(workflow_id)
|
405
403
|
if not status:
|
406
404
|
raise DBOSRecoveryError(workflow_id, "Workflow status not found")
|
@@ -441,7 +439,7 @@ def execute_workflow_by_id(
|
|
441
439
|
class_object = dbos._registry.class_info_map[class_name]
|
442
440
|
inputs["args"] = (class_object,) + inputs["args"]
|
443
441
|
|
444
|
-
|
442
|
+
with SetWorkflowID(workflow_id):
|
445
443
|
return start_workflow(
|
446
444
|
dbos,
|
447
445
|
wf_func,
|
@@ -450,16 +448,6 @@ def execute_workflow_by_id(
|
|
450
448
|
*inputs["args"],
|
451
449
|
**inputs["kwargs"],
|
452
450
|
)
|
453
|
-
else:
|
454
|
-
with SetWorkflowID(workflow_id):
|
455
|
-
return start_workflow(
|
456
|
-
dbos,
|
457
|
-
wf_func,
|
458
|
-
status["queue_name"],
|
459
|
-
True,
|
460
|
-
*inputs["args"],
|
461
|
-
**inputs["kwargs"],
|
462
|
-
)
|
463
451
|
|
464
452
|
|
465
453
|
def _get_new_wf() -> tuple[str, DBOSContext]:
|
@@ -892,10 +880,12 @@ def decorate_transaction(
|
|
892
880
|
except DBAPIError as dbapi_error:
|
893
881
|
if dbapi_error.orig.sqlstate == "40001": # type: ignore
|
894
882
|
# Retry on serialization failure
|
895
|
-
ctx.get_current_span()
|
896
|
-
|
897
|
-
|
898
|
-
|
883
|
+
span = ctx.get_current_span()
|
884
|
+
if span:
|
885
|
+
span.add_event(
|
886
|
+
"Transaction Serialization Failure",
|
887
|
+
{"retry_wait_seconds": retry_wait_seconds},
|
888
|
+
)
|
899
889
|
time.sleep(retry_wait_seconds)
|
900
890
|
retry_wait_seconds = min(
|
901
891
|
retry_wait_seconds * backoff_factor,
|
@@ -1004,13 +994,15 @@ def decorate_step(
|
|
1004
994
|
f"Step being automatically retried. (attempt {attempt + 1} of {attempts}). {traceback.format_exc()}"
|
1005
995
|
)
|
1006
996
|
ctx = assert_current_dbos_context()
|
1007
|
-
ctx.get_current_span()
|
1008
|
-
|
1009
|
-
|
1010
|
-
"
|
1011
|
-
|
1012
|
-
|
1013
|
-
|
997
|
+
span = ctx.get_current_span()
|
998
|
+
if span:
|
999
|
+
span.add_event(
|
1000
|
+
f"Step attempt {attempt} failed",
|
1001
|
+
{
|
1002
|
+
"error": str(error),
|
1003
|
+
"retryIntervalSeconds": interval_seconds,
|
1004
|
+
},
|
1005
|
+
)
|
1014
1006
|
return min(
|
1015
1007
|
interval_seconds * (backoff_rate**attempt),
|
1016
1008
|
max_retry_interval_seconds,
|
@@ -33,7 +33,7 @@ from opentelemetry.trace import Span
|
|
33
33
|
|
34
34
|
from dbos import _serialization
|
35
35
|
from dbos._conductor.conductor import ConductorWebsocket
|
36
|
-
from dbos._utils import GlobalParams
|
36
|
+
from dbos._utils import INTERNAL_QUEUE_NAME, GlobalParams
|
37
37
|
from dbos._workflow_commands import (
|
38
38
|
WorkflowStatus,
|
39
39
|
list_queued_workflows,
|
@@ -234,6 +234,13 @@ class DBOSRegistry:
|
|
234
234
|
hasher.update(source.encode("utf-8"))
|
235
235
|
return hasher.hexdigest()
|
236
236
|
|
237
|
+
def get_internal_queue(self) -> Queue:
|
238
|
+
"""
|
239
|
+
Get or create the internal queue used for the DBOS scheduler, for Kafka, and for
|
240
|
+
programmatic resuming and restarting of workflows.
|
241
|
+
"""
|
242
|
+
return Queue(INTERNAL_QUEUE_NAME)
|
243
|
+
|
237
244
|
|
238
245
|
class DBOS:
|
239
246
|
"""
|
@@ -489,6 +496,9 @@ class DBOS:
|
|
489
496
|
notification_listener_thread.start()
|
490
497
|
self._background_threads.append(notification_listener_thread)
|
491
498
|
|
499
|
+
# Create the internal queue if it has not yet been created
|
500
|
+
self._registry.get_internal_queue()
|
501
|
+
|
492
502
|
# Start the queue thread
|
493
503
|
evt = threading.Event()
|
494
504
|
self.stop_events.append(evt)
|
@@ -929,11 +939,6 @@ class DBOS:
|
|
929
939
|
"""Execute a workflow by ID (for recovery)."""
|
930
940
|
return execute_workflow_by_id(_get_dbos_instance(), workflow_id)
|
931
941
|
|
932
|
-
@classmethod
|
933
|
-
def restart_workflow(cls, workflow_id: str) -> None:
|
934
|
-
"""Execute a workflow by ID (for recovery)."""
|
935
|
-
execute_workflow_by_id(_get_dbos_instance(), workflow_id, True)
|
936
|
-
|
937
942
|
@classmethod
|
938
943
|
def recover_pending_workflows(
|
939
944
|
cls, executor_ids: List[str] = ["local"]
|
@@ -954,7 +959,13 @@ class DBOS:
|
|
954
959
|
dbos_logger.info(f"Resuming workflow: {workflow_id}")
|
955
960
|
_get_dbos_instance()._sys_db.resume_workflow(workflow_id)
|
956
961
|
_get_or_create_dbos_registry().clear_workflow_cancelled(workflow_id)
|
957
|
-
return
|
962
|
+
return cls.retrieve_workflow(workflow_id)
|
963
|
+
|
964
|
+
@classmethod
|
965
|
+
def restart_workflow(cls, workflow_id: str) -> WorkflowHandle[Any]:
|
966
|
+
"""Restart a workflow with a new workflow ID"""
|
967
|
+
forked_workflow_id = _get_dbos_instance()._sys_db.fork_workflow(workflow_id)
|
968
|
+
return cls.retrieve_workflow(forked_workflow_id)
|
958
969
|
|
959
970
|
@classmethod
|
960
971
|
def list_workflows(
|
@@ -1083,7 +1094,9 @@ class DBOS:
|
|
1083
1094
|
def span(cls) -> Span:
|
1084
1095
|
"""Return the tracing `Span` associated with the current context."""
|
1085
1096
|
ctx = assert_current_dbos_context()
|
1086
|
-
|
1097
|
+
span = ctx.get_current_span()
|
1098
|
+
assert span
|
1099
|
+
return span
|
1087
1100
|
|
1088
1101
|
@classproperty
|
1089
1102
|
def request(cls) -> Optional["Request"]:
|
@@ -115,7 +115,7 @@ def kafka_consumer(
|
|
115
115
|
_in_order_kafka_queues[topic] = queue
|
116
116
|
else:
|
117
117
|
global _kafka_queue
|
118
|
-
_kafka_queue =
|
118
|
+
_kafka_queue = dbosreg.get_internal_queue()
|
119
119
|
stop_event = threading.Event()
|
120
120
|
dbosreg.register_poller(
|
121
121
|
stop_event, _kafka_consumer_loop, func, config, topics, stop_event, in_order
|
@@ -82,7 +82,8 @@ def queue_thread(stop_event: threading.Event, dbos: "DBOS") -> None:
|
|
82
82
|
while not stop_event.is_set():
|
83
83
|
if stop_event.wait(timeout=1):
|
84
84
|
return
|
85
|
-
|
85
|
+
queues = dict(dbos._registry.queue_info_map)
|
86
|
+
for _, queue in queues.items():
|
86
87
|
try:
|
87
88
|
wf_ids = dbos._sys_db.start_queued_workflows(
|
88
89
|
queue, GlobalParams.executor_id, GlobalParams.app_version
|
@@ -17,7 +17,7 @@ if TYPE_CHECKING:
|
|
17
17
|
def _recover_workflow(
|
18
18
|
dbos: "DBOS", workflow: GetPendingWorkflowsOutput
|
19
19
|
) -> "WorkflowHandle[Any]":
|
20
|
-
if workflow.queue_name
|
20
|
+
if workflow.queue_name:
|
21
21
|
cleared = dbos._sys_db.clear_queue_assignment(workflow.workflow_uuid)
|
22
22
|
if cleared:
|
23
23
|
return dbos.retrieve_workflow(workflow.workflow_uuid)
|
@@ -52,7 +52,7 @@ def scheduled(
|
|
52
52
|
)
|
53
53
|
|
54
54
|
global scheduler_queue
|
55
|
-
scheduler_queue =
|
55
|
+
scheduler_queue = dbosreg.get_internal_queue()
|
56
56
|
stop_event = threading.Event()
|
57
57
|
dbosreg.register_poller(stop_event, scheduler_loop, func, cron, stop_event)
|
58
58
|
return func
|
@@ -4,6 +4,7 @@ import os
|
|
4
4
|
import re
|
5
5
|
import threading
|
6
6
|
import time
|
7
|
+
import uuid
|
7
8
|
from enum import Enum
|
8
9
|
from typing import (
|
9
10
|
TYPE_CHECKING,
|
@@ -25,7 +26,7 @@ from alembic.config import Config
|
|
25
26
|
from sqlalchemy.exc import DBAPIError
|
26
27
|
from sqlalchemy.sql import func
|
27
28
|
|
28
|
-
from dbos._utils import GlobalParams
|
29
|
+
from dbos._utils import INTERNAL_QUEUE_NAME, GlobalParams
|
29
30
|
|
30
31
|
from . import _serialization
|
31
32
|
from ._context import get_local_dbos_context
|
@@ -447,13 +448,12 @@ class SystemDatabase:
|
|
447
448
|
)
|
448
449
|
)
|
449
450
|
|
450
|
-
def resume_workflow(
|
451
|
-
self,
|
452
|
-
workflow_id: str,
|
453
|
-
) -> None:
|
451
|
+
def resume_workflow(self, workflow_id: str) -> None:
|
454
452
|
if self._debug_mode:
|
455
453
|
raise Exception("called resume_workflow in debug mode")
|
456
454
|
with self.engine.begin() as c:
|
455
|
+
# Execute with snapshot isolation in case of concurrent calls on the same workflow
|
456
|
+
c.execute(sa.text("SET TRANSACTION ISOLATION LEVEL REPEATABLE READ"))
|
457
457
|
# Check the status of the workflow. If it is complete, do nothing.
|
458
458
|
row = c.execute(
|
459
459
|
sa.select(
|
@@ -472,12 +472,63 @@ class SystemDatabase:
|
|
472
472
|
SystemSchema.workflow_queue.c.workflow_uuid == workflow_id
|
473
473
|
)
|
474
474
|
)
|
475
|
-
#
|
475
|
+
# Enqueue the workflow on the internal queue
|
476
|
+
c.execute(
|
477
|
+
pg.insert(SystemSchema.workflow_queue).values(
|
478
|
+
workflow_uuid=workflow_id,
|
479
|
+
queue_name=INTERNAL_QUEUE_NAME,
|
480
|
+
)
|
481
|
+
)
|
482
|
+
# Set the workflow's status to ENQUEUED and clear its recovery attempts.
|
476
483
|
c.execute(
|
477
484
|
sa.update(SystemSchema.workflow_status)
|
478
485
|
.where(SystemSchema.workflow_status.c.workflow_uuid == workflow_id)
|
479
|
-
.values(status=WorkflowStatusString.
|
486
|
+
.values(status=WorkflowStatusString.ENQUEUED.value, recovery_attempts=0)
|
487
|
+
)
|
488
|
+
|
489
|
+
def fork_workflow(self, original_workflow_id: str) -> str:
|
490
|
+
status = self.get_workflow_status(original_workflow_id)
|
491
|
+
if status is None:
|
492
|
+
raise Exception(f"Workflow {original_workflow_id} not found")
|
493
|
+
inputs = self.get_workflow_inputs(original_workflow_id)
|
494
|
+
if inputs is None:
|
495
|
+
raise Exception(f"Workflow {original_workflow_id} not found")
|
496
|
+
# Generate a random ID for the forked workflow
|
497
|
+
forked_workflow_id = str(uuid.uuid4())
|
498
|
+
with self.engine.begin() as c:
|
499
|
+
# Create an entry for the forked workflow with the same
|
500
|
+
# initial values as the original.
|
501
|
+
c.execute(
|
502
|
+
pg.insert(SystemSchema.workflow_status).values(
|
503
|
+
workflow_uuid=forked_workflow_id,
|
504
|
+
status=WorkflowStatusString.ENQUEUED.value,
|
505
|
+
name=status["name"],
|
506
|
+
class_name=status["class_name"],
|
507
|
+
config_name=status["config_name"],
|
508
|
+
application_version=status["app_version"],
|
509
|
+
application_id=status["app_id"],
|
510
|
+
request=status["request"],
|
511
|
+
authenticated_user=status["authenticated_user"],
|
512
|
+
authenticated_roles=status["authenticated_roles"],
|
513
|
+
assumed_role=status["assumed_role"],
|
514
|
+
queue_name=INTERNAL_QUEUE_NAME,
|
515
|
+
)
|
516
|
+
)
|
517
|
+
# Copy the original workflow's inputs into the forked workflow
|
518
|
+
c.execute(
|
519
|
+
pg.insert(SystemSchema.workflow_inputs).values(
|
520
|
+
workflow_uuid=forked_workflow_id,
|
521
|
+
inputs=_serialization.serialize_args(inputs),
|
522
|
+
)
|
523
|
+
)
|
524
|
+
# Enqueue the forked workflow on the internal queue
|
525
|
+
c.execute(
|
526
|
+
pg.insert(SystemSchema.workflow_queue).values(
|
527
|
+
workflow_uuid=forked_workflow_id,
|
528
|
+
queue_name=INTERNAL_QUEUE_NAME,
|
529
|
+
)
|
480
530
|
)
|
531
|
+
return forked_workflow_id
|
481
532
|
|
482
533
|
def get_workflow_status(
|
483
534
|
self, workflow_uuid: str
|
@@ -13,7 +13,7 @@ from requests.exceptions import ConnectionError
|
|
13
13
|
from dbos import DBOS, ConfigFile, DBOSConfig, Queue, SetWorkflowID, _workflow_commands
|
14
14
|
from dbos._schemas.system_database import SystemSchema
|
15
15
|
from dbos._sys_db import SystemDatabase, WorkflowStatusString
|
16
|
-
from dbos._utils import GlobalParams
|
16
|
+
from dbos._utils import INTERNAL_QUEUE_NAME, GlobalParams
|
17
17
|
|
18
18
|
|
19
19
|
def test_admin_endpoints(dbos: DBOS) -> None:
|
@@ -41,6 +41,7 @@ def test_admin_endpoints(dbos: DBOS) -> None:
|
|
41
41
|
)
|
42
42
|
assert response.status_code == 200
|
43
43
|
assert response.json() == [
|
44
|
+
{"name": INTERNAL_QUEUE_NAME},
|
44
45
|
{"name": "q1"},
|
45
46
|
{"name": "q2", "concurrency": 1},
|
46
47
|
{"name": "q3", "concurrency": 1, "workerConcurrency": 1},
|
@@ -178,15 +178,8 @@ def test_dead_letter_queue(dbos: DBOS) -> None:
|
|
178
178
|
dead_letter_workflow()
|
179
179
|
assert exc_info.errisinstance(DBOSDeadLetterQueueError)
|
180
180
|
|
181
|
-
# Resume the workflow. Verify it
|
181
|
+
# Resume the workflow. Verify it can recover again without error.
|
182
182
|
resumed_handle = dbos.resume_workflow(wfid)
|
183
|
-
assert (
|
184
|
-
handle.get_status().status
|
185
|
-
== resumed_handle.get_status().status
|
186
|
-
== WorkflowStatusString.PENDING.value
|
187
|
-
)
|
188
|
-
|
189
|
-
# Verify the workflow can recover again without error.
|
190
183
|
DBOS.recover_pending_workflows()
|
191
184
|
|
192
185
|
# Complete the blocked workflow
|
@@ -201,9 +201,8 @@ def test_scheduler_oaoo(dbos: DBOS) -> None:
|
|
201
201
|
else:
|
202
202
|
time.sleep(1)
|
203
203
|
|
204
|
-
# Stop
|
205
|
-
|
206
|
-
event.set()
|
204
|
+
# Stop the scheduled workflow
|
205
|
+
dbos.stop_events[0].set()
|
207
206
|
|
208
207
|
dbos._sys_db.update_workflow_status(
|
209
208
|
{
|
@@ -0,0 +1,212 @@
|
|
1
|
+
import threading
|
2
|
+
import uuid
|
3
|
+
from typing import Callable
|
4
|
+
|
5
|
+
import pytest
|
6
|
+
|
7
|
+
# Public API
|
8
|
+
from dbos import DBOS, Queue, SetWorkflowID
|
9
|
+
from dbos._dbos import DBOSConfiguredInstance
|
10
|
+
from dbos._error import DBOSWorkflowCancelledError
|
11
|
+
from dbos._utils import INTERNAL_QUEUE_NAME
|
12
|
+
from tests.conftest import queue_entries_are_cleaned_up
|
13
|
+
|
14
|
+
|
15
|
+
def test_cancel_resume(dbos: DBOS) -> None:
|
16
|
+
steps_completed = 0
|
17
|
+
workflow_event = threading.Event()
|
18
|
+
main_thread_event = threading.Event()
|
19
|
+
input = 5
|
20
|
+
|
21
|
+
@DBOS.step()
|
22
|
+
def step_one() -> None:
|
23
|
+
nonlocal steps_completed
|
24
|
+
steps_completed += 1
|
25
|
+
|
26
|
+
@DBOS.step()
|
27
|
+
def step_two() -> None:
|
28
|
+
nonlocal steps_completed
|
29
|
+
steps_completed += 1
|
30
|
+
|
31
|
+
@DBOS.workflow()
|
32
|
+
def simple_workflow(x: int) -> int:
|
33
|
+
step_one()
|
34
|
+
main_thread_event.set()
|
35
|
+
workflow_event.wait()
|
36
|
+
step_two()
|
37
|
+
return x
|
38
|
+
|
39
|
+
# Start the workflow and cancel it.
|
40
|
+
# Verify it stops after step one but before step two
|
41
|
+
wfid = str(uuid.uuid4())
|
42
|
+
with SetWorkflowID(wfid):
|
43
|
+
handle = DBOS.start_workflow(simple_workflow, input)
|
44
|
+
main_thread_event.wait()
|
45
|
+
DBOS.cancel_workflow(wfid)
|
46
|
+
workflow_event.set()
|
47
|
+
with pytest.raises(DBOSWorkflowCancelledError):
|
48
|
+
handle.get_result()
|
49
|
+
assert steps_completed == 1
|
50
|
+
|
51
|
+
# Resume the workflow. Verify it completes successfully.
|
52
|
+
handle = DBOS.resume_workflow(wfid)
|
53
|
+
assert handle.get_result() == input
|
54
|
+
assert steps_completed == 2
|
55
|
+
|
56
|
+
# Resume the workflow again. Verify it does not run again.
|
57
|
+
handle = DBOS.resume_workflow(wfid)
|
58
|
+
assert handle.get_result() == input
|
59
|
+
assert steps_completed == 2
|
60
|
+
|
61
|
+
|
62
|
+
def test_cancel_resume_txn(dbos: DBOS) -> None:
|
63
|
+
txn_completed = 0
|
64
|
+
workflow_event = threading.Event()
|
65
|
+
main_thread_event = threading.Event()
|
66
|
+
input = 5
|
67
|
+
|
68
|
+
@DBOS.transaction()
|
69
|
+
def txn_one() -> None:
|
70
|
+
nonlocal txn_completed
|
71
|
+
txn_completed += 1
|
72
|
+
|
73
|
+
@DBOS.transaction()
|
74
|
+
def txn_two() -> None:
|
75
|
+
nonlocal txn_completed
|
76
|
+
txn_completed += 1
|
77
|
+
|
78
|
+
@DBOS.workflow()
|
79
|
+
def simple_workflow(x: int) -> int:
|
80
|
+
txn_one()
|
81
|
+
main_thread_event.set()
|
82
|
+
workflow_event.wait()
|
83
|
+
txn_two()
|
84
|
+
return x
|
85
|
+
|
86
|
+
# Start the workflow and cancel it.
|
87
|
+
# Verify it stops after step one but before step two
|
88
|
+
wfid = str(uuid.uuid4())
|
89
|
+
with SetWorkflowID(wfid):
|
90
|
+
handle = DBOS.start_workflow(simple_workflow, input)
|
91
|
+
main_thread_event.wait()
|
92
|
+
DBOS.cancel_workflow(wfid)
|
93
|
+
workflow_event.set()
|
94
|
+
with pytest.raises(DBOSWorkflowCancelledError):
|
95
|
+
handle.get_result()
|
96
|
+
assert txn_completed == 1
|
97
|
+
|
98
|
+
# Resume the workflow. Verify it completes successfully.
|
99
|
+
handle = DBOS.resume_workflow(wfid)
|
100
|
+
assert handle.get_result() == input
|
101
|
+
assert txn_completed == 2
|
102
|
+
|
103
|
+
# Resume the workflow again. Verify it does not run again.
|
104
|
+
handle = DBOS.resume_workflow(wfid)
|
105
|
+
assert handle.get_result() == input
|
106
|
+
assert txn_completed == 2
|
107
|
+
|
108
|
+
|
109
|
+
def test_cancel_resume_queue(dbos: DBOS) -> None:
|
110
|
+
steps_completed = 0
|
111
|
+
workflow_event = threading.Event()
|
112
|
+
main_thread_event = threading.Event()
|
113
|
+
input = 5
|
114
|
+
|
115
|
+
queue = Queue("test_queue")
|
116
|
+
|
117
|
+
@DBOS.step()
|
118
|
+
def step_one() -> None:
|
119
|
+
nonlocal steps_completed
|
120
|
+
steps_completed += 1
|
121
|
+
|
122
|
+
@DBOS.step()
|
123
|
+
def step_two() -> None:
|
124
|
+
nonlocal steps_completed
|
125
|
+
steps_completed += 1
|
126
|
+
|
127
|
+
@DBOS.workflow()
|
128
|
+
def simple_workflow(x: int) -> int:
|
129
|
+
step_one()
|
130
|
+
main_thread_event.set()
|
131
|
+
workflow_event.wait()
|
132
|
+
step_two()
|
133
|
+
return x
|
134
|
+
|
135
|
+
# Start the workflow and cancel it.
|
136
|
+
# Verify it stops after step one but before step two
|
137
|
+
wfid = str(uuid.uuid4())
|
138
|
+
with SetWorkflowID(wfid):
|
139
|
+
handle = queue.enqueue(simple_workflow, input)
|
140
|
+
main_thread_event.wait()
|
141
|
+
DBOS.cancel_workflow(wfid)
|
142
|
+
workflow_event.set()
|
143
|
+
assert steps_completed == 1
|
144
|
+
|
145
|
+
# Resume the workflow. Verify it completes successfully.
|
146
|
+
handle = DBOS.resume_workflow(wfid)
|
147
|
+
assert handle.get_result() == input
|
148
|
+
assert steps_completed == 2
|
149
|
+
|
150
|
+
# Resume the workflow again. Verify it does not run again.
|
151
|
+
handle = DBOS.resume_workflow(wfid)
|
152
|
+
assert handle.get_result() == input
|
153
|
+
assert steps_completed == 2
|
154
|
+
|
155
|
+
# Verify nothing is left on any queue
|
156
|
+
assert queue_entries_are_cleaned_up(dbos)
|
157
|
+
|
158
|
+
|
159
|
+
def test_restart(dbos: DBOS) -> None:
|
160
|
+
input = 2
|
161
|
+
multiplier = 5
|
162
|
+
|
163
|
+
@DBOS.dbos_class()
|
164
|
+
class TestClass(DBOSConfiguredInstance):
|
165
|
+
|
166
|
+
def __init__(self, multiplier: int) -> None:
|
167
|
+
self.multiply: Callable[[int], int] = lambda x: x * multiplier
|
168
|
+
super().__init__("test_class")
|
169
|
+
|
170
|
+
@DBOS.workflow()
|
171
|
+
def workflow(self, x: int) -> int:
|
172
|
+
return self.multiply(x)
|
173
|
+
|
174
|
+
@DBOS.step()
|
175
|
+
def step(self, x: int) -> int:
|
176
|
+
return self.multiply(x)
|
177
|
+
|
178
|
+
inst = TestClass(multiplier)
|
179
|
+
|
180
|
+
# Start the workflow, let it finish, restart it.
|
181
|
+
# Verify it returns the same result with a different workflow ID.
|
182
|
+
handle = DBOS.start_workflow(inst.workflow, input)
|
183
|
+
assert handle.get_result() == input * multiplier
|
184
|
+
forked_handle = DBOS.restart_workflow(handle.workflow_id)
|
185
|
+
assert forked_handle.workflow_id != handle.workflow_id
|
186
|
+
assert forked_handle.get_result() == input * multiplier
|
187
|
+
|
188
|
+
# Enqueue the workflow, let it finish, restart it.
|
189
|
+
# Verify it returns the same result with a different workflow ID and queue.
|
190
|
+
queue = Queue("test_queue")
|
191
|
+
handle = queue.enqueue(inst.workflow, input)
|
192
|
+
assert handle.get_result() == input * multiplier
|
193
|
+
forked_handle = DBOS.restart_workflow(handle.workflow_id)
|
194
|
+
assert forked_handle.workflow_id != handle.workflow_id
|
195
|
+
assert forked_handle.get_status().queue_name == INTERNAL_QUEUE_NAME
|
196
|
+
assert forked_handle.get_result() == input * multiplier
|
197
|
+
|
198
|
+
# Enqueue the step, let it finish, restart it.
|
199
|
+
# Verify it returns the same result with a different workflow ID and queue.
|
200
|
+
handle = queue.enqueue(inst.step, input)
|
201
|
+
assert handle.get_result() == input * multiplier
|
202
|
+
forked_handle = DBOS.restart_workflow(handle.workflow_id)
|
203
|
+
assert forked_handle.workflow_id != handle.workflow_id
|
204
|
+
assert forked_handle.get_status().queue_name != handle.get_status().queue_name
|
205
|
+
assert forked_handle.get_result() == input * multiplier
|
206
|
+
|
207
|
+
# Verify restarting a nonexistent workflow throws an exception
|
208
|
+
with pytest.raises(Exception):
|
209
|
+
DBOS.restart_workflow("fake_id")
|
210
|
+
|
211
|
+
# Verify nothing is left on any queue
|
212
|
+
assert queue_entries_are_cleaned_up(dbos)
|
@@ -1,145 +0,0 @@
|
|
1
|
-
import threading
|
2
|
-
import time
|
3
|
-
import uuid
|
4
|
-
from datetime import datetime, timedelta, timezone
|
5
|
-
|
6
|
-
# Public API
|
7
|
-
from dbos import (
|
8
|
-
DBOS,
|
9
|
-
ConfigFile,
|
10
|
-
Queue,
|
11
|
-
SetWorkflowID,
|
12
|
-
WorkflowStatusString,
|
13
|
-
_workflow_commands,
|
14
|
-
)
|
15
|
-
|
16
|
-
|
17
|
-
def test_basic(dbos: DBOS, config: ConfigFile) -> None:
|
18
|
-
|
19
|
-
steps_completed = 0
|
20
|
-
|
21
|
-
@DBOS.step()
|
22
|
-
def step_one() -> None:
|
23
|
-
nonlocal steps_completed
|
24
|
-
steps_completed += 1
|
25
|
-
print("Step one completed!")
|
26
|
-
|
27
|
-
@DBOS.step()
|
28
|
-
def step_two() -> None:
|
29
|
-
nonlocal steps_completed
|
30
|
-
steps_completed += 1
|
31
|
-
print("Step two completed!")
|
32
|
-
|
33
|
-
@DBOS.workflow()
|
34
|
-
def simple_workflow() -> None:
|
35
|
-
step_one()
|
36
|
-
dbos.sleep(1)
|
37
|
-
step_two()
|
38
|
-
print("Executed Simple workflow")
|
39
|
-
return
|
40
|
-
|
41
|
-
# run the workflow
|
42
|
-
simple_workflow()
|
43
|
-
time.sleep(1) # wait for the workflow to complete
|
44
|
-
assert (
|
45
|
-
steps_completed == 2
|
46
|
-
), f"Expected steps_completed to be 2, but got {steps_completed}"
|
47
|
-
|
48
|
-
|
49
|
-
def test_two_steps_cancel(dbos: DBOS, config: ConfigFile) -> None:
|
50
|
-
|
51
|
-
steps_completed = 0
|
52
|
-
|
53
|
-
@DBOS.step()
|
54
|
-
def step_one() -> None:
|
55
|
-
nonlocal steps_completed
|
56
|
-
steps_completed += 1
|
57
|
-
print("Step one completed!")
|
58
|
-
|
59
|
-
@DBOS.step()
|
60
|
-
def step_two() -> None:
|
61
|
-
nonlocal steps_completed
|
62
|
-
steps_completed += 1
|
63
|
-
print("Step two completed!")
|
64
|
-
|
65
|
-
@DBOS.workflow()
|
66
|
-
def simple_workflow() -> None:
|
67
|
-
step_one()
|
68
|
-
dbos.sleep(2)
|
69
|
-
step_two()
|
70
|
-
print("Executed Simple workflow")
|
71
|
-
return
|
72
|
-
|
73
|
-
# run the workflow
|
74
|
-
wfuuid = str(uuid.uuid4())
|
75
|
-
try:
|
76
|
-
with SetWorkflowID(wfuuid):
|
77
|
-
simple_workflow()
|
78
|
-
|
79
|
-
dbos.cancel_workflow(wfuuid)
|
80
|
-
except Exception as e:
|
81
|
-
# time.sleep(1) # wait for the workflow to complete
|
82
|
-
assert (
|
83
|
-
steps_completed == 1
|
84
|
-
), f"Expected steps_completed to be 1, but got {steps_completed}"
|
85
|
-
|
86
|
-
dbos.resume_workflow(wfuuid)
|
87
|
-
time.sleep(1)
|
88
|
-
|
89
|
-
assert (
|
90
|
-
steps_completed == 2
|
91
|
-
), f"Expected steps_completed to be 2, but got {steps_completed}"
|
92
|
-
|
93
|
-
|
94
|
-
def test_two_transactions_cancel(dbos: DBOS, config: ConfigFile) -> None:
|
95
|
-
|
96
|
-
tr_completed = 0
|
97
|
-
|
98
|
-
@DBOS.transaction()
|
99
|
-
def transaction_one() -> None:
|
100
|
-
nonlocal tr_completed
|
101
|
-
tr_completed += 1
|
102
|
-
print("Transaction one completed!")
|
103
|
-
|
104
|
-
@DBOS.transaction()
|
105
|
-
def transaction_two() -> None:
|
106
|
-
nonlocal tr_completed
|
107
|
-
tr_completed += 1
|
108
|
-
print("Step two completed!")
|
109
|
-
|
110
|
-
@DBOS.workflow()
|
111
|
-
def simple_workflow() -> None:
|
112
|
-
transaction_one()
|
113
|
-
dbos.sleep(2)
|
114
|
-
transaction_two()
|
115
|
-
print("Executed Simple workflow")
|
116
|
-
return
|
117
|
-
|
118
|
-
# run the workflow
|
119
|
-
wfuuid = str(uuid.uuid4())
|
120
|
-
try:
|
121
|
-
with SetWorkflowID(wfuuid):
|
122
|
-
simple_workflow()
|
123
|
-
|
124
|
-
dbos.cancel_workflow(wfuuid)
|
125
|
-
except Exception as e:
|
126
|
-
# time.sleep(1) # wait for the workflow to complete
|
127
|
-
assert (
|
128
|
-
tr_completed == 1
|
129
|
-
), f"Expected tr_completed to be 1, but got {tr_completed}"
|
130
|
-
|
131
|
-
dbos.resume_workflow(wfuuid)
|
132
|
-
time.sleep(1)
|
133
|
-
|
134
|
-
assert (
|
135
|
-
tr_completed == 2
|
136
|
-
), f"Expected steps_completed to be 2, but got {tr_completed}"
|
137
|
-
|
138
|
-
# resume it a 2nd time
|
139
|
-
|
140
|
-
dbos.resume_workflow(wfuuid)
|
141
|
-
time.sleep(1)
|
142
|
-
|
143
|
-
assert (
|
144
|
-
tr_completed == 2
|
145
|
-
), f"Expected steps_completed to be 2, but got {tr_completed}"
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{dbos-0.26.0a3 → dbos-0.26.0a5}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
/dbos-0.26.0a3/tests/test_workflow_cmds.py → /dbos-0.26.0a5/tests/test_workflow_introspection.py
RENAMED
File without changes
|
File without changes
|