dbos 0.25.0a7__tar.gz → 0.25.0a9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- {dbos-0.25.0a7 → dbos-0.25.0a9}/PKG-INFO +2 -1
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/__init__.py +2 -1
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_conductor/conductor.py +1 -1
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_conductor/protocol.py +13 -7
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_context.py +45 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_core.py +6 -13
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_dbos.py +96 -107
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_outcome.py +6 -2
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_queue.py +4 -4
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_sys_db.py +1 -27
- dbos-0.25.0a9/dbos/_workflow_commands.py +175 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/pyproject.toml +3 -1
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_admin_server.py +3 -3
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_classdecorators.py +1 -1
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_dbos.py +23 -2
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_failures.py +24 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_fastapi.py +1 -1
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_fastapi_roles.py +3 -1
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_workflow_cancel.py +2 -2
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_workflow_cmds.py +49 -73
- dbos-0.25.0a7/dbos/_workflow_commands.py +0 -154
- {dbos-0.25.0a7 → dbos-0.25.0a9}/LICENSE +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/README.md +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/__main__.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_admin_server.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_app_db.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_classproperty.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_cloudutils/authentication.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_cloudutils/cloudutils.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_cloudutils/databases.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_croniter.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_db_wizard.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_dbos_config.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_debug.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_error.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_fastapi.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_flask.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_kafka.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_kafka_message.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_logger.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_migrations/env.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_migrations/script.py.mako +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_recovery.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_registrations.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_request.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_roles.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_scheduler.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_schemas/__init__.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_schemas/application_database.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_schemas/system_database.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_serialization.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_templates/dbos-db-starter/README.md +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_templates/dbos-db-starter/__package/main.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_tracer.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/_utils.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/cli/_github_init.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/cli/_template_init.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/cli/cli.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/dbos-config.schema.json +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/dbos/py.typed +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/__init__.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/atexit_no_ctor.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/atexit_no_launch.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/classdefs.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/conftest.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/more_classdefs.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/queuedworkflow.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_async.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_concurrency.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_config.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_croniter.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_dbwizard.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_debug.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_flask.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_kafka.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_outcome.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_package.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_queue.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_scheduler.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_schema_migration.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_singleton.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_spans.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/tests/test_sqlalchemy.py +0 -0
- {dbos-0.25.0a7 → dbos-0.25.0a9}/version/__init__.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: dbos
|
|
3
|
-
Version: 0.25.
|
|
3
|
+
Version: 0.25.0a9
|
|
4
4
|
Summary: Ultra-lightweight durable execution in Python
|
|
5
5
|
Author-Email: "DBOS, Inc." <contact@dbos.dev>
|
|
6
6
|
License: MIT
|
|
@@ -23,6 +23,7 @@ Requires-Dist: cryptography>=43.0.3
|
|
|
23
23
|
Requires-Dist: rich>=13.9.4
|
|
24
24
|
Requires-Dist: pyjwt>=2.10.1
|
|
25
25
|
Requires-Dist: websockets>=15.0
|
|
26
|
+
Requires-Dist: pyright>=1.1.398
|
|
26
27
|
Description-Content-Type: text/markdown
|
|
27
28
|
|
|
28
29
|
|
|
@@ -1,10 +1,11 @@
|
|
|
1
1
|
from . import _error as error
|
|
2
2
|
from ._context import DBOSContextEnsure, DBOSContextSetAuth, SetWorkflowID
|
|
3
|
-
from ._dbos import DBOS, DBOSConfiguredInstance, WorkflowHandle
|
|
3
|
+
from ._dbos import DBOS, DBOSConfiguredInstance, WorkflowHandle
|
|
4
4
|
from ._dbos_config import ConfigFile, DBOSConfig, get_dbos_database_url, load_config
|
|
5
5
|
from ._kafka_message import KafkaMessage
|
|
6
6
|
from ._queue import Queue
|
|
7
7
|
from ._sys_db import GetWorkflowsInput, WorkflowStatusString
|
|
8
|
+
from ._workflow_commands import WorkflowStatus
|
|
8
9
|
|
|
9
10
|
__all__ = [
|
|
10
11
|
"ConfigFile",
|
|
@@ -203,7 +203,7 @@ class ConductorWebsocket(threading.Thread):
|
|
|
203
203
|
info = get_workflow(
|
|
204
204
|
self.dbos._sys_db,
|
|
205
205
|
get_workflow_message.workflow_id,
|
|
206
|
-
|
|
206
|
+
get_request=False,
|
|
207
207
|
)
|
|
208
208
|
except Exception as e:
|
|
209
209
|
error_message = f"Exception encountered when getting workflow {get_workflow_message.workflow_id}: {traceback.format_exc()}"
|
|
@@ -3,7 +3,7 @@ from dataclasses import asdict, dataclass
|
|
|
3
3
|
from enum import Enum
|
|
4
4
|
from typing import List, Optional, Type, TypedDict, TypeVar
|
|
5
5
|
|
|
6
|
-
from dbos._workflow_commands import
|
|
6
|
+
from dbos._workflow_commands import WorkflowStatus
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
class MessageType(str, Enum):
|
|
@@ -141,27 +141,33 @@ class WorkflowsOutput:
|
|
|
141
141
|
ExecutorID: Optional[str]
|
|
142
142
|
|
|
143
143
|
@classmethod
|
|
144
|
-
def from_workflow_information(cls, info:
|
|
144
|
+
def from_workflow_information(cls, info: WorkflowStatus) -> "WorkflowsOutput":
|
|
145
145
|
# Convert fields to strings as needed
|
|
146
146
|
created_at_str = str(info.created_at) if info.created_at is not None else None
|
|
147
147
|
updated_at_str = str(info.updated_at) if info.updated_at is not None else None
|
|
148
148
|
inputs_str = str(info.input) if info.input is not None else None
|
|
149
149
|
outputs_str = str(info.output) if info.output is not None else None
|
|
150
|
+
error_str = str(info.error) if info.error is not None else None
|
|
150
151
|
request_str = str(info.request) if info.request is not None else None
|
|
152
|
+
roles_str = (
|
|
153
|
+
str(info.authenticated_roles)
|
|
154
|
+
if info.authenticated_roles is not None
|
|
155
|
+
else None
|
|
156
|
+
)
|
|
151
157
|
|
|
152
158
|
return cls(
|
|
153
159
|
WorkflowUUID=info.workflow_id,
|
|
154
160
|
Status=info.status,
|
|
155
|
-
WorkflowName=info.
|
|
156
|
-
WorkflowClassName=info.
|
|
157
|
-
WorkflowConfigName=info.
|
|
161
|
+
WorkflowName=info.name,
|
|
162
|
+
WorkflowClassName=info.class_name,
|
|
163
|
+
WorkflowConfigName=info.config_name,
|
|
158
164
|
AuthenticatedUser=info.authenticated_user,
|
|
159
165
|
AssumedRole=info.assumed_role,
|
|
160
|
-
AuthenticatedRoles=
|
|
166
|
+
AuthenticatedRoles=roles_str,
|
|
161
167
|
Input=inputs_str,
|
|
162
168
|
Output=outputs_str,
|
|
163
169
|
Request=request_str,
|
|
164
|
-
Error=
|
|
170
|
+
Error=error_str,
|
|
165
171
|
CreatedAt=created_at_str,
|
|
166
172
|
UpdatedAt=updated_at_str,
|
|
167
173
|
QueueName=info.queue_name,
|
|
@@ -5,6 +5,7 @@ import os
|
|
|
5
5
|
import uuid
|
|
6
6
|
from contextlib import AbstractContextManager
|
|
7
7
|
from contextvars import ContextVar
|
|
8
|
+
from dataclasses import dataclass
|
|
8
9
|
from enum import Enum
|
|
9
10
|
from types import TracebackType
|
|
10
11
|
from typing import List, Literal, Optional, Type, TypedDict
|
|
@@ -48,6 +49,23 @@ class TracedAttributes(TypedDict, total=False):
|
|
|
48
49
|
authenticatedUserAssumedRole: Optional[str]
|
|
49
50
|
|
|
50
51
|
|
|
52
|
+
@dataclass
|
|
53
|
+
class StepStatus:
|
|
54
|
+
"""
|
|
55
|
+
Status of a step execution.
|
|
56
|
+
|
|
57
|
+
Attributes:
|
|
58
|
+
step_id: The unique ID of this step in its workflow.
|
|
59
|
+
current_attempt: For steps with automatic retries, which attempt number (zero-indexed) is currently executing.
|
|
60
|
+
max_attempts: For steps with automatic retries, the maximum number of attempts that will be made before the step fails.
|
|
61
|
+
|
|
62
|
+
"""
|
|
63
|
+
|
|
64
|
+
step_id: int
|
|
65
|
+
current_attempt: Optional[int]
|
|
66
|
+
max_attempts: Optional[int]
|
|
67
|
+
|
|
68
|
+
|
|
51
69
|
class DBOSContext:
|
|
52
70
|
def __init__(self) -> None:
|
|
53
71
|
self.executor_id = GlobalParams.executor_id
|
|
@@ -73,6 +91,7 @@ class DBOSContext:
|
|
|
73
91
|
self.authenticated_user: Optional[str] = None
|
|
74
92
|
self.authenticated_roles: Optional[List[str]] = None
|
|
75
93
|
self.assumed_role: Optional[str] = None
|
|
94
|
+
self.step_status: Optional[StepStatus] = None
|
|
76
95
|
|
|
77
96
|
def create_child(self) -> DBOSContext:
|
|
78
97
|
rv = DBOSContext()
|
|
@@ -150,10 +169,12 @@ class DBOSContext:
|
|
|
150
169
|
attributes: TracedAttributes,
|
|
151
170
|
) -> None:
|
|
152
171
|
self.curr_step_function_id = fid
|
|
172
|
+
self.step_status = StepStatus(fid, None, None)
|
|
153
173
|
self._start_span(attributes)
|
|
154
174
|
|
|
155
175
|
def end_step(self, exc_value: Optional[BaseException]) -> None:
|
|
156
176
|
self.curr_step_function_id = -1
|
|
177
|
+
self.step_status = None
|
|
157
178
|
self._end_span(exc_value)
|
|
158
179
|
|
|
159
180
|
def start_transaction(
|
|
@@ -432,6 +453,30 @@ class EnterDBOSStep:
|
|
|
432
453
|
return False # Did not handle
|
|
433
454
|
|
|
434
455
|
|
|
456
|
+
class EnterDBOSStepRetry:
|
|
457
|
+
def __init__(self, current_attempt: int, max_attempts: int) -> None:
|
|
458
|
+
self.current_attempt = current_attempt
|
|
459
|
+
self.max_attempts = max_attempts
|
|
460
|
+
|
|
461
|
+
def __enter__(self) -> None:
|
|
462
|
+
ctx = get_local_dbos_context()
|
|
463
|
+
if ctx is not None and ctx.step_status is not None:
|
|
464
|
+
ctx.step_status.current_attempt = self.current_attempt
|
|
465
|
+
ctx.step_status.max_attempts = self.max_attempts
|
|
466
|
+
|
|
467
|
+
def __exit__(
|
|
468
|
+
self,
|
|
469
|
+
exc_type: Optional[Type[BaseException]],
|
|
470
|
+
exc_value: Optional[BaseException],
|
|
471
|
+
traceback: Optional[TracebackType],
|
|
472
|
+
) -> Literal[False]:
|
|
473
|
+
ctx = get_local_dbos_context()
|
|
474
|
+
if ctx is not None and ctx.step_status is not None:
|
|
475
|
+
ctx.step_status.current_attempt = None
|
|
476
|
+
ctx.step_status.max_attempts = None
|
|
477
|
+
return False # Did not handle
|
|
478
|
+
|
|
479
|
+
|
|
435
480
|
class EnterDBOSTransaction:
|
|
436
481
|
def __init__(self, sqls: Session, attributes: TracedAttributes) -> None:
|
|
437
482
|
self.sqls = sqls
|
|
@@ -81,13 +81,12 @@ from ._sys_db import (
|
|
|
81
81
|
if TYPE_CHECKING:
|
|
82
82
|
from ._dbos import (
|
|
83
83
|
DBOS,
|
|
84
|
-
Workflow,
|
|
85
84
|
WorkflowHandle,
|
|
86
85
|
WorkflowHandleAsync,
|
|
87
|
-
WorkflowStatus,
|
|
88
86
|
DBOSRegistry,
|
|
89
87
|
IsolationLevel,
|
|
90
88
|
)
|
|
89
|
+
from ._workflow_commands import WorkflowStatus
|
|
91
90
|
|
|
92
91
|
from sqlalchemy.exc import DBAPIError, InvalidRequestError
|
|
93
92
|
|
|
@@ -304,7 +303,7 @@ def _get_wf_invoke_func(
|
|
|
304
303
|
def _execute_workflow_wthread(
|
|
305
304
|
dbos: "DBOS",
|
|
306
305
|
status: WorkflowStatusInternal,
|
|
307
|
-
func: "
|
|
306
|
+
func: "Callable[P, R]",
|
|
308
307
|
ctx: DBOSContext,
|
|
309
308
|
*args: Any,
|
|
310
309
|
**kwargs: Any,
|
|
@@ -335,7 +334,7 @@ def _execute_workflow_wthread(
|
|
|
335
334
|
async def _execute_workflow_async(
|
|
336
335
|
dbos: "DBOS",
|
|
337
336
|
status: WorkflowStatusInternal,
|
|
338
|
-
func: "
|
|
337
|
+
func: "Callable[P, Coroutine[Any, Any, R]]",
|
|
339
338
|
ctx: DBOSContext,
|
|
340
339
|
*args: Any,
|
|
341
340
|
**kwargs: Any,
|
|
@@ -449,7 +448,7 @@ def _get_new_wf() -> tuple[str, DBOSContext]:
|
|
|
449
448
|
|
|
450
449
|
def start_workflow(
|
|
451
450
|
dbos: "DBOS",
|
|
452
|
-
func: "
|
|
451
|
+
func: "Callable[P, Union[R, Coroutine[Any, Any, R]]]",
|
|
453
452
|
queue_name: Optional[str],
|
|
454
453
|
execute_workflow: bool,
|
|
455
454
|
*args: P.args,
|
|
@@ -515,9 +514,6 @@ def start_workflow(
|
|
|
515
514
|
or wf_status == WorkflowStatusString.SUCCESS.value
|
|
516
515
|
)
|
|
517
516
|
):
|
|
518
|
-
dbos.logger.debug(
|
|
519
|
-
f"Workflow {new_wf_id} already completed with status {wf_status}. Directly returning a workflow handle."
|
|
520
|
-
)
|
|
521
517
|
return WorkflowHandlePolling(new_wf_id, dbos)
|
|
522
518
|
|
|
523
519
|
future = dbos._executor.submit(
|
|
@@ -534,7 +530,7 @@ def start_workflow(
|
|
|
534
530
|
|
|
535
531
|
async def start_workflow_async(
|
|
536
532
|
dbos: "DBOS",
|
|
537
|
-
func: "
|
|
533
|
+
func: "Callable[P, Coroutine[Any, Any, R]]",
|
|
538
534
|
queue_name: Optional[str],
|
|
539
535
|
execute_workflow: bool,
|
|
540
536
|
*args: P.args,
|
|
@@ -605,9 +601,6 @@ async def start_workflow_async(
|
|
|
605
601
|
or wf_status == WorkflowStatusString.SUCCESS.value
|
|
606
602
|
)
|
|
607
603
|
):
|
|
608
|
-
dbos.logger.debug(
|
|
609
|
-
f"Workflow {new_wf_id} already completed with status {wf_status}. Directly returning a workflow handle."
|
|
610
|
-
)
|
|
611
604
|
return WorkflowHandleAsyncPolling(new_wf_id, dbos)
|
|
612
605
|
|
|
613
606
|
coro = _execute_workflow_async(dbos, status, func, new_wf_ctx, *args, **kwargs)
|
|
@@ -946,7 +939,7 @@ def decorate_step(
|
|
|
946
939
|
|
|
947
940
|
def on_exception(attempt: int, error: BaseException) -> float:
|
|
948
941
|
dbos.logger.warning(
|
|
949
|
-
f"Step being automatically retried. (attempt {attempt} of {attempts}). {traceback.format_exc()}"
|
|
942
|
+
f"Step being automatically retried. (attempt {attempt + 1} of {attempts}). {traceback.format_exc()}"
|
|
950
943
|
)
|
|
951
944
|
ctx = assert_current_dbos_context()
|
|
952
945
|
ctx.get_current_span().add_event(
|
|
@@ -11,7 +11,6 @@ import threading
|
|
|
11
11
|
import traceback
|
|
12
12
|
import uuid
|
|
13
13
|
from concurrent.futures import ThreadPoolExecutor
|
|
14
|
-
from dataclasses import dataclass
|
|
15
14
|
from logging import Logger
|
|
16
15
|
from typing import (
|
|
17
16
|
TYPE_CHECKING,
|
|
@@ -28,13 +27,18 @@ from typing import (
|
|
|
28
27
|
TypeVar,
|
|
29
28
|
Union,
|
|
30
29
|
cast,
|
|
31
|
-
overload,
|
|
32
30
|
)
|
|
33
31
|
|
|
34
32
|
from opentelemetry.trace import Span
|
|
35
33
|
|
|
34
|
+
from dbos import _serialization
|
|
36
35
|
from dbos._conductor.conductor import ConductorWebsocket
|
|
37
36
|
from dbos._utils import GlobalParams
|
|
37
|
+
from dbos._workflow_commands import (
|
|
38
|
+
WorkflowStatus,
|
|
39
|
+
list_queued_workflows,
|
|
40
|
+
list_workflows,
|
|
41
|
+
)
|
|
38
42
|
|
|
39
43
|
from ._classproperty import classproperty
|
|
40
44
|
from ._core import (
|
|
@@ -86,6 +90,7 @@ from ._admin_server import AdminServer
|
|
|
86
90
|
from ._app_db import ApplicationDatabase
|
|
87
91
|
from ._context import (
|
|
88
92
|
EnterDBOSStep,
|
|
93
|
+
StepStatus,
|
|
89
94
|
TracedAttributes,
|
|
90
95
|
assert_current_dbos_context,
|
|
91
96
|
get_local_dbos_context,
|
|
@@ -108,6 +113,7 @@ from ._error import (
|
|
|
108
113
|
)
|
|
109
114
|
from ._logger import add_otlp_to_all_loggers, config_logger, dbos_logger, init_logger
|
|
110
115
|
from ._sys_db import SystemDatabase
|
|
116
|
+
from ._workflow_commands import WorkflowStatus, get_workflow
|
|
111
117
|
|
|
112
118
|
# Most DBOS functions are just any callable F, so decorators / wrappers work on F
|
|
113
119
|
# There are cases where the parameters P and return value R should be separate
|
|
@@ -120,17 +126,6 @@ R = TypeVar("R", covariant=True) # A generic type for workflow return values
|
|
|
120
126
|
|
|
121
127
|
T = TypeVar("T")
|
|
122
128
|
|
|
123
|
-
|
|
124
|
-
class DBOSCallProtocol(Protocol[P, R]):
|
|
125
|
-
__name__: str
|
|
126
|
-
__qualname__: str
|
|
127
|
-
|
|
128
|
-
def __call__(*args: P.args, **kwargs: P.kwargs) -> R: ...
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
Workflow: TypeAlias = DBOSCallProtocol[P, R]
|
|
132
|
-
|
|
133
|
-
|
|
134
129
|
IsolationLevel = Literal[
|
|
135
130
|
"SERIALIZABLE",
|
|
136
131
|
"REPEATABLE READ",
|
|
@@ -163,7 +158,7 @@ RegisteredJob = Tuple[
|
|
|
163
158
|
|
|
164
159
|
class DBOSRegistry:
|
|
165
160
|
def __init__(self) -> None:
|
|
166
|
-
self.workflow_info_map: dict[str,
|
|
161
|
+
self.workflow_info_map: dict[str, Callable[..., Any]] = {}
|
|
167
162
|
self.function_type_map: dict[str, str] = {}
|
|
168
163
|
self.class_info_map: dict[str, type] = {}
|
|
169
164
|
self.instance_info_map: dict[str, object] = {}
|
|
@@ -707,7 +702,7 @@ class DBOS:
|
|
|
707
702
|
@classmethod
|
|
708
703
|
def start_workflow(
|
|
709
704
|
cls,
|
|
710
|
-
func:
|
|
705
|
+
func: Callable[P, R],
|
|
711
706
|
*args: P.args,
|
|
712
707
|
**kwargs: P.kwargs,
|
|
713
708
|
) -> WorkflowHandle[R]:
|
|
@@ -717,7 +712,7 @@ class DBOS:
|
|
|
717
712
|
@classmethod
|
|
718
713
|
async def start_workflow_async(
|
|
719
714
|
cls,
|
|
720
|
-
func:
|
|
715
|
+
func: Callable[P, Coroutine[Any, Any, R]],
|
|
721
716
|
*args: P.args,
|
|
722
717
|
**kwargs: P.kwargs,
|
|
723
718
|
) -> WorkflowHandleAsync[R]:
|
|
@@ -729,72 +724,39 @@ class DBOS:
|
|
|
729
724
|
@classmethod
|
|
730
725
|
def get_workflow_status(cls, workflow_id: str) -> Optional[WorkflowStatus]:
|
|
731
726
|
"""Return the status of a workflow execution."""
|
|
727
|
+
sys_db = _get_dbos_instance()._sys_db
|
|
732
728
|
ctx = get_local_dbos_context()
|
|
733
729
|
if ctx and ctx.is_within_workflow():
|
|
734
730
|
ctx.function_id += 1
|
|
735
|
-
|
|
736
|
-
|
|
731
|
+
res = sys_db.check_operation_execution(ctx.workflow_id, ctx.function_id)
|
|
732
|
+
if res is not None:
|
|
733
|
+
if res["output"]:
|
|
734
|
+
resstat: WorkflowStatus = _serialization.deserialize(res["output"])
|
|
735
|
+
return resstat
|
|
736
|
+
else:
|
|
737
|
+
raise DBOSException(
|
|
738
|
+
"Workflow status record not found. This should not happen! \033[1m Hint: Check if your workflow is deterministic.\033[0m"
|
|
739
|
+
)
|
|
740
|
+
stat = get_workflow(_get_dbos_instance()._sys_db, workflow_id, True)
|
|
741
|
+
|
|
742
|
+
if ctx and ctx.is_within_workflow():
|
|
743
|
+
sys_db.record_operation_result(
|
|
744
|
+
{
|
|
745
|
+
"workflow_uuid": ctx.workflow_id,
|
|
746
|
+
"function_id": ctx.function_id,
|
|
747
|
+
"function_name": "DBOS.getStatus",
|
|
748
|
+
"output": _serialization.serialize(stat),
|
|
749
|
+
"error": None,
|
|
750
|
+
}
|
|
737
751
|
)
|
|
738
|
-
|
|
739
|
-
stat = _get_dbos_instance()._sys_db.get_workflow_status(workflow_id)
|
|
740
|
-
if stat is None:
|
|
741
|
-
return None
|
|
742
|
-
|
|
743
|
-
return WorkflowStatus(
|
|
744
|
-
workflow_id=workflow_id,
|
|
745
|
-
status=stat["status"],
|
|
746
|
-
name=stat["name"],
|
|
747
|
-
executor_id=stat["executor_id"],
|
|
748
|
-
recovery_attempts=stat["recovery_attempts"],
|
|
749
|
-
class_name=stat["class_name"],
|
|
750
|
-
config_name=stat["config_name"],
|
|
751
|
-
queue_name=stat["queue_name"],
|
|
752
|
-
authenticated_user=stat["authenticated_user"],
|
|
753
|
-
assumed_role=stat["assumed_role"],
|
|
754
|
-
authenticated_roles=(
|
|
755
|
-
json.loads(stat["authenticated_roles"])
|
|
756
|
-
if stat["authenticated_roles"] is not None
|
|
757
|
-
else None
|
|
758
|
-
),
|
|
759
|
-
)
|
|
752
|
+
return stat
|
|
760
753
|
|
|
761
754
|
@classmethod
|
|
762
755
|
async def get_workflow_status_async(
|
|
763
756
|
cls, workflow_id: str
|
|
764
757
|
) -> Optional[WorkflowStatus]:
|
|
765
758
|
"""Return the status of a workflow execution."""
|
|
766
|
-
|
|
767
|
-
if ctx and ctx.is_within_workflow():
|
|
768
|
-
ctx.function_id += 1
|
|
769
|
-
stat = await asyncio.to_thread(
|
|
770
|
-
lambda: _get_dbos_instance()._sys_db.get_workflow_status_within_wf(
|
|
771
|
-
workflow_id, ctx.workflow_id, ctx.function_id
|
|
772
|
-
)
|
|
773
|
-
)
|
|
774
|
-
else:
|
|
775
|
-
stat = await asyncio.to_thread(
|
|
776
|
-
lambda: _get_dbos_instance()._sys_db.get_workflow_status(workflow_id)
|
|
777
|
-
)
|
|
778
|
-
if stat is None:
|
|
779
|
-
return None
|
|
780
|
-
|
|
781
|
-
return WorkflowStatus(
|
|
782
|
-
workflow_id=workflow_id,
|
|
783
|
-
status=stat["status"],
|
|
784
|
-
name=stat["name"],
|
|
785
|
-
executor_id=stat["executor_id"],
|
|
786
|
-
recovery_attempts=stat["recovery_attempts"],
|
|
787
|
-
class_name=stat["class_name"],
|
|
788
|
-
config_name=stat["config_name"],
|
|
789
|
-
queue_name=stat["queue_name"],
|
|
790
|
-
authenticated_user=stat["authenticated_user"],
|
|
791
|
-
assumed_role=stat["assumed_role"],
|
|
792
|
-
authenticated_roles=(
|
|
793
|
-
json.loads(stat["authenticated_roles"])
|
|
794
|
-
if stat["authenticated_roles"] is not None
|
|
795
|
-
else None
|
|
796
|
-
),
|
|
797
|
-
)
|
|
759
|
+
return await asyncio.to_thread(cls.get_workflow_status, workflow_id)
|
|
798
760
|
|
|
799
761
|
@classmethod
|
|
800
762
|
def retrieve_workflow(
|
|
@@ -994,6 +956,60 @@ class DBOS:
|
|
|
994
956
|
_get_or_create_dbos_registry().clear_workflow_cancelled(workflow_id)
|
|
995
957
|
return execute_workflow_by_id(_get_dbos_instance(), workflow_id, False)
|
|
996
958
|
|
|
959
|
+
@classmethod
|
|
960
|
+
def list_workflows(
|
|
961
|
+
cls,
|
|
962
|
+
*,
|
|
963
|
+
workflow_ids: Optional[List[str]] = None,
|
|
964
|
+
status: Optional[str] = None,
|
|
965
|
+
start_time: Optional[str] = None,
|
|
966
|
+
end_time: Optional[str] = None,
|
|
967
|
+
name: Optional[str] = None,
|
|
968
|
+
app_version: Optional[str] = None,
|
|
969
|
+
user: Optional[str] = None,
|
|
970
|
+
limit: Optional[int] = None,
|
|
971
|
+
offset: Optional[int] = None,
|
|
972
|
+
sort_desc: bool = False,
|
|
973
|
+
) -> List[WorkflowStatus]:
|
|
974
|
+
return list_workflows(
|
|
975
|
+
_get_dbos_instance()._sys_db,
|
|
976
|
+
workflow_ids=workflow_ids,
|
|
977
|
+
status=status,
|
|
978
|
+
start_time=start_time,
|
|
979
|
+
end_time=end_time,
|
|
980
|
+
name=name,
|
|
981
|
+
app_version=app_version,
|
|
982
|
+
user=user,
|
|
983
|
+
limit=limit,
|
|
984
|
+
offset=offset,
|
|
985
|
+
sort_desc=sort_desc,
|
|
986
|
+
)
|
|
987
|
+
|
|
988
|
+
@classmethod
|
|
989
|
+
def list_queued_workflows(
|
|
990
|
+
cls,
|
|
991
|
+
*,
|
|
992
|
+
queue_name: Optional[str] = None,
|
|
993
|
+
status: Optional[str] = None,
|
|
994
|
+
start_time: Optional[str] = None,
|
|
995
|
+
end_time: Optional[str] = None,
|
|
996
|
+
name: Optional[str] = None,
|
|
997
|
+
limit: Optional[int] = None,
|
|
998
|
+
offset: Optional[int] = None,
|
|
999
|
+
sort_desc: bool = False,
|
|
1000
|
+
) -> List[WorkflowStatus]:
|
|
1001
|
+
return list_queued_workflows(
|
|
1002
|
+
_get_dbos_instance()._sys_db,
|
|
1003
|
+
queue_name=queue_name,
|
|
1004
|
+
status=status,
|
|
1005
|
+
start_time=start_time,
|
|
1006
|
+
end_time=end_time,
|
|
1007
|
+
name=name,
|
|
1008
|
+
limit=limit,
|
|
1009
|
+
offset=offset,
|
|
1010
|
+
sort_desc=sort_desc,
|
|
1011
|
+
)
|
|
1012
|
+
|
|
997
1013
|
@classproperty
|
|
998
1014
|
def logger(cls) -> Logger:
|
|
999
1015
|
"""Return the DBOS `Logger` for the current context."""
|
|
@@ -1041,6 +1057,14 @@ class DBOS:
|
|
|
1041
1057
|
), "step_id is only available within a DBOS workflow."
|
|
1042
1058
|
return ctx.function_id
|
|
1043
1059
|
|
|
1060
|
+
@classproperty
|
|
1061
|
+
def step_status(cls) -> StepStatus:
|
|
1062
|
+
"""Return the status of the currently executing step."""
|
|
1063
|
+
ctx = assert_current_dbos_context()
|
|
1064
|
+
assert ctx.is_step(), "step_status is only available within a DBOS step."
|
|
1065
|
+
assert ctx.step_status is not None
|
|
1066
|
+
return ctx.step_status
|
|
1067
|
+
|
|
1044
1068
|
@classproperty
|
|
1045
1069
|
def parent_workflow_id(cls) -> str:
|
|
1046
1070
|
"""
|
|
@@ -1095,41 +1119,6 @@ class DBOS:
|
|
|
1095
1119
|
ctx.authenticated_roles = authenticated_roles
|
|
1096
1120
|
|
|
1097
1121
|
|
|
1098
|
-
@dataclass
|
|
1099
|
-
class WorkflowStatus:
|
|
1100
|
-
"""
|
|
1101
|
-
Status of workflow execution.
|
|
1102
|
-
|
|
1103
|
-
This captures the state of a workflow execution at a point in time.
|
|
1104
|
-
|
|
1105
|
-
Attributes:
|
|
1106
|
-
workflow_id(str): The ID of the workflow execution
|
|
1107
|
-
status(str): The status of the execution, from `WorkflowStatusString`
|
|
1108
|
-
name(str): The workflow function name
|
|
1109
|
-
executor_id(str): The ID of the executor running the workflow
|
|
1110
|
-
class_name(str): For member functions, the name of the class containing the workflow function
|
|
1111
|
-
config_name(str): For instance member functions, the name of the class instance for the execution
|
|
1112
|
-
queue_name(str): For workflows that are or were queued, the queue name
|
|
1113
|
-
authenticated_user(str): The user who invoked the workflow
|
|
1114
|
-
assumed_role(str): The access role used by the user to allow access to the workflow function
|
|
1115
|
-
authenticated_roles(List[str]): List of all access roles available to the authenticated user
|
|
1116
|
-
recovery_attempts(int): Number of times the workflow has been restarted (usually by recovery)
|
|
1117
|
-
|
|
1118
|
-
"""
|
|
1119
|
-
|
|
1120
|
-
workflow_id: str
|
|
1121
|
-
status: str
|
|
1122
|
-
name: str
|
|
1123
|
-
executor_id: Optional[str]
|
|
1124
|
-
class_name: Optional[str]
|
|
1125
|
-
config_name: Optional[str]
|
|
1126
|
-
queue_name: Optional[str]
|
|
1127
|
-
authenticated_user: Optional[str]
|
|
1128
|
-
assumed_role: Optional[str]
|
|
1129
|
-
authenticated_roles: Optional[List[str]]
|
|
1130
|
-
recovery_attempts: Optional[int]
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
1122
|
class WorkflowHandle(Generic[R], Protocol):
|
|
1134
1123
|
"""
|
|
1135
1124
|
Handle to a workflow function.
|
|
@@ -4,6 +4,8 @@ import inspect
|
|
|
4
4
|
import time
|
|
5
5
|
from typing import Any, Callable, Coroutine, Optional, Protocol, TypeVar, Union, cast
|
|
6
6
|
|
|
7
|
+
from dbos._context import EnterDBOSStepRetry
|
|
8
|
+
|
|
7
9
|
T = TypeVar("T")
|
|
8
10
|
R = TypeVar("R")
|
|
9
11
|
|
|
@@ -98,7 +100,8 @@ class Immediate(Outcome[T]):
|
|
|
98
100
|
) -> T:
|
|
99
101
|
for i in range(attempts):
|
|
100
102
|
try:
|
|
101
|
-
|
|
103
|
+
with EnterDBOSStepRetry(i, attempts):
|
|
104
|
+
return func()
|
|
102
105
|
except Exception as exp:
|
|
103
106
|
wait_time = on_exception(i, exp)
|
|
104
107
|
time.sleep(wait_time)
|
|
@@ -184,7 +187,8 @@ class Pending(Outcome[T]):
|
|
|
184
187
|
) -> T:
|
|
185
188
|
for i in range(attempts):
|
|
186
189
|
try:
|
|
187
|
-
|
|
190
|
+
with EnterDBOSStepRetry(i, attempts):
|
|
191
|
+
return await func()
|
|
188
192
|
except Exception as exp:
|
|
189
193
|
wait_time = on_exception(i, exp)
|
|
190
194
|
await asyncio.sleep(wait_time)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import threading
|
|
2
2
|
import traceback
|
|
3
|
-
from typing import TYPE_CHECKING, Any, Coroutine, Optional, TypedDict
|
|
3
|
+
from typing import TYPE_CHECKING, Any, Callable, Coroutine, Optional, TypedDict
|
|
4
4
|
|
|
5
5
|
from psycopg import errors
|
|
6
6
|
from sqlalchemy.exc import OperationalError
|
|
@@ -10,7 +10,7 @@ from dbos._utils import GlobalParams
|
|
|
10
10
|
from ._core import P, R, execute_workflow_by_id, start_workflow, start_workflow_async
|
|
11
11
|
|
|
12
12
|
if TYPE_CHECKING:
|
|
13
|
-
from ._dbos import DBOS,
|
|
13
|
+
from ._dbos import DBOS, WorkflowHandle, WorkflowHandleAsync
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
class QueueRateLimit(TypedDict):
|
|
@@ -59,7 +59,7 @@ class Queue:
|
|
|
59
59
|
registry.queue_info_map[self.name] = self
|
|
60
60
|
|
|
61
61
|
def enqueue(
|
|
62
|
-
self, func: "
|
|
62
|
+
self, func: "Callable[P, R]", *args: P.args, **kwargs: P.kwargs
|
|
63
63
|
) -> "WorkflowHandle[R]":
|
|
64
64
|
from ._dbos import _get_dbos_instance
|
|
65
65
|
|
|
@@ -68,7 +68,7 @@ class Queue:
|
|
|
68
68
|
|
|
69
69
|
async def enqueue_async(
|
|
70
70
|
self,
|
|
71
|
-
func: "
|
|
71
|
+
func: "Callable[P, Coroutine[Any, Any, R]]",
|
|
72
72
|
*args: P.args,
|
|
73
73
|
**kwargs: P.kwargs,
|
|
74
74
|
) -> "WorkflowHandleAsync[R]":
|
|
@@ -116,7 +116,7 @@ class GetWorkflowsInput:
|
|
|
116
116
|
self.authenticated_user: Optional[str] = None # The user who ran the workflow.
|
|
117
117
|
self.start_time: Optional[str] = None # Timestamp in ISO 8601 format
|
|
118
118
|
self.end_time: Optional[str] = None # Timestamp in ISO 8601 format
|
|
119
|
-
self.status: Optional[
|
|
119
|
+
self.status: Optional[str] = None
|
|
120
120
|
self.application_version: Optional[str] = (
|
|
121
121
|
None # The application version that ran this workflow. = None
|
|
122
122
|
)
|
|
@@ -541,32 +541,6 @@ class SystemDatabase:
|
|
|
541
541
|
}
|
|
542
542
|
return status
|
|
543
543
|
|
|
544
|
-
def get_workflow_status_within_wf(
|
|
545
|
-
self, workflow_uuid: str, calling_wf: str, calling_wf_fn: int
|
|
546
|
-
) -> Optional[WorkflowStatusInternal]:
|
|
547
|
-
res = self.check_operation_execution(calling_wf, calling_wf_fn)
|
|
548
|
-
if res is not None:
|
|
549
|
-
if res["output"]:
|
|
550
|
-
resstat: WorkflowStatusInternal = _serialization.deserialize(
|
|
551
|
-
res["output"]
|
|
552
|
-
)
|
|
553
|
-
return resstat
|
|
554
|
-
else:
|
|
555
|
-
raise DBOSException(
|
|
556
|
-
"Workflow status record not found. This should not happen! \033[1m Hint: Check if your workflow is deterministic.\033[0m"
|
|
557
|
-
)
|
|
558
|
-
stat = self.get_workflow_status(workflow_uuid)
|
|
559
|
-
self.record_operation_result(
|
|
560
|
-
{
|
|
561
|
-
"workflow_uuid": calling_wf,
|
|
562
|
-
"function_id": calling_wf_fn,
|
|
563
|
-
"function_name": "DBOS.getStatus",
|
|
564
|
-
"output": _serialization.serialize(stat),
|
|
565
|
-
"error": None,
|
|
566
|
-
}
|
|
567
|
-
)
|
|
568
|
-
return stat
|
|
569
|
-
|
|
570
544
|
def await_workflow_result_internal(self, workflow_uuid: str) -> dict[str, Any]:
|
|
571
545
|
polling_interval_secs: float = 1.000
|
|
572
546
|
|