dbos 0.25.0a3__py3-none-any.whl → 0.25.0a8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- dbos/__init__.py +2 -1
- dbos/__main__.py +3 -0
- dbos/_admin_server.py +20 -2
- dbos/_conductor/conductor.py +1 -1
- dbos/_conductor/protocol.py +13 -7
- dbos/_context.py +48 -0
- dbos/_core.py +76 -12
- dbos/_dbos.py +112 -61
- dbos/_fastapi.py +4 -1
- dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +46 -0
- dbos/_outcome.py +6 -2
- dbos/_schemas/system_database.py +2 -0
- dbos/_sys_db.py +80 -26
- dbos/_templates/dbos-db-starter/__package/main.py +6 -11
- dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +2 -4
- dbos/_workflow_commands.py +90 -63
- dbos/cli/_template_init.py +8 -3
- dbos/cli/cli.py +17 -1
- {dbos-0.25.0a3.dist-info → dbos-0.25.0a8.dist-info}/METADATA +1 -1
- {dbos-0.25.0a3.dist-info → dbos-0.25.0a8.dist-info}/RECORD +23 -22
- {dbos-0.25.0a3.dist-info → dbos-0.25.0a8.dist-info}/WHEEL +0 -0
- {dbos-0.25.0a3.dist-info → dbos-0.25.0a8.dist-info}/entry_points.txt +0 -0
- {dbos-0.25.0a3.dist-info → dbos-0.25.0a8.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
"""functionname_childid_op_outputs
|
|
2
|
+
|
|
3
|
+
Revision ID: f4b9b32ba814
|
|
4
|
+
Revises: 04ca4f231047
|
|
5
|
+
Create Date: 2025-03-21 14:32:43.091074
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from typing import Sequence, Union
|
|
10
|
+
|
|
11
|
+
import sqlalchemy as sa
|
|
12
|
+
from alembic import op
|
|
13
|
+
|
|
14
|
+
# revision identifiers, used by Alembic.
|
|
15
|
+
revision: str = "f4b9b32ba814"
|
|
16
|
+
down_revision: Union[str, None] = "04ca4f231047"
|
|
17
|
+
branch_labels: Union[str, Sequence[str], None] = None
|
|
18
|
+
depends_on: Union[str, Sequence[str], None] = None
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def upgrade() -> None:
|
|
22
|
+
op.add_column(
|
|
23
|
+
"operation_outputs",
|
|
24
|
+
sa.Column(
|
|
25
|
+
"function_name",
|
|
26
|
+
sa.Text(),
|
|
27
|
+
nullable=False,
|
|
28
|
+
default="",
|
|
29
|
+
),
|
|
30
|
+
schema="dbos",
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
op.add_column(
|
|
34
|
+
"operation_outputs",
|
|
35
|
+
sa.Column(
|
|
36
|
+
"child_workflow_id",
|
|
37
|
+
sa.Text(),
|
|
38
|
+
nullable=True,
|
|
39
|
+
),
|
|
40
|
+
schema="dbos",
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def downgrade() -> None:
|
|
45
|
+
op.drop_column("operation_outputs", "function_name", schema="dbos")
|
|
46
|
+
op.drop_column("operation_outputs", "child_workflow_id", schema="dbos")
|
dbos/_outcome.py
CHANGED
|
@@ -4,6 +4,8 @@ import inspect
|
|
|
4
4
|
import time
|
|
5
5
|
from typing import Any, Callable, Coroutine, Optional, Protocol, TypeVar, Union, cast
|
|
6
6
|
|
|
7
|
+
from dbos._context import EnterDBOSStepRetry
|
|
8
|
+
|
|
7
9
|
T = TypeVar("T")
|
|
8
10
|
R = TypeVar("R")
|
|
9
11
|
|
|
@@ -98,7 +100,8 @@ class Immediate(Outcome[T]):
|
|
|
98
100
|
) -> T:
|
|
99
101
|
for i in range(attempts):
|
|
100
102
|
try:
|
|
101
|
-
|
|
103
|
+
with EnterDBOSStepRetry(i, attempts):
|
|
104
|
+
return func()
|
|
102
105
|
except Exception as exp:
|
|
103
106
|
wait_time = on_exception(i, exp)
|
|
104
107
|
time.sleep(wait_time)
|
|
@@ -184,7 +187,8 @@ class Pending(Outcome[T]):
|
|
|
184
187
|
) -> T:
|
|
185
188
|
for i in range(attempts):
|
|
186
189
|
try:
|
|
187
|
-
|
|
190
|
+
with EnterDBOSStepRetry(i, attempts):
|
|
191
|
+
return await func()
|
|
188
192
|
except Exception as exp:
|
|
189
193
|
wait_time = on_exception(i, exp)
|
|
190
194
|
await asyncio.sleep(wait_time)
|
dbos/_schemas/system_database.py
CHANGED
|
@@ -71,8 +71,10 @@ class SystemSchema:
|
|
|
71
71
|
nullable=False,
|
|
72
72
|
),
|
|
73
73
|
Column("function_id", Integer, nullable=False),
|
|
74
|
+
Column("function_name", Text, nullable=False, default=""),
|
|
74
75
|
Column("output", Text, nullable=True),
|
|
75
76
|
Column("error", Text, nullable=True),
|
|
77
|
+
Column("child_workflow_id", Text, nullable=True),
|
|
76
78
|
PrimaryKeyConstraint("workflow_uuid", "function_id"),
|
|
77
79
|
)
|
|
78
80
|
|
dbos/_sys_db.py
CHANGED
|
@@ -28,6 +28,7 @@ from sqlalchemy.sql import func
|
|
|
28
28
|
from dbos._utils import GlobalParams
|
|
29
29
|
|
|
30
30
|
from . import _serialization
|
|
31
|
+
from ._context import get_local_dbos_context
|
|
31
32
|
from ._dbos_config import ConfigFile
|
|
32
33
|
from ._error import (
|
|
33
34
|
DBOSConflictingWorkflowError,
|
|
@@ -89,6 +90,7 @@ class RecordedResult(TypedDict):
|
|
|
89
90
|
class OperationResultInternal(TypedDict):
|
|
90
91
|
workflow_uuid: str
|
|
91
92
|
function_id: int
|
|
93
|
+
function_name: str
|
|
92
94
|
output: Optional[str] # JSON (jsonpickle)
|
|
93
95
|
error: Optional[str] # JSON (jsonpickle)
|
|
94
96
|
|
|
@@ -114,7 +116,7 @@ class GetWorkflowsInput:
|
|
|
114
116
|
self.authenticated_user: Optional[str] = None # The user who ran the workflow.
|
|
115
117
|
self.start_time: Optional[str] = None # Timestamp in ISO 8601 format
|
|
116
118
|
self.end_time: Optional[str] = None # Timestamp in ISO 8601 format
|
|
117
|
-
self.status: Optional[
|
|
119
|
+
self.status: Optional[str] = None
|
|
118
120
|
self.application_version: Optional[str] = (
|
|
119
121
|
None # The application version that ran this workflow. = None
|
|
120
122
|
)
|
|
@@ -151,6 +153,14 @@ class GetPendingWorkflowsOutput:
|
|
|
151
153
|
self.queue_name: Optional[str] = queue_name
|
|
152
154
|
|
|
153
155
|
|
|
156
|
+
class StepInfo(TypedDict):
|
|
157
|
+
function_id: int
|
|
158
|
+
function_name: str
|
|
159
|
+
output: Optional[str] # JSON (jsonpickle)
|
|
160
|
+
error: Optional[str] # JSON (jsonpickle)
|
|
161
|
+
child_workflow_id: Optional[str]
|
|
162
|
+
|
|
163
|
+
|
|
154
164
|
_dbos_null_topic = "__null__topic__"
|
|
155
165
|
_buffer_flush_batch_size = 100
|
|
156
166
|
_buffer_flush_interval_secs = 1.0
|
|
@@ -531,31 +541,6 @@ class SystemDatabase:
|
|
|
531
541
|
}
|
|
532
542
|
return status
|
|
533
543
|
|
|
534
|
-
def get_workflow_status_within_wf(
|
|
535
|
-
self, workflow_uuid: str, calling_wf: str, calling_wf_fn: int
|
|
536
|
-
) -> Optional[WorkflowStatusInternal]:
|
|
537
|
-
res = self.check_operation_execution(calling_wf, calling_wf_fn)
|
|
538
|
-
if res is not None:
|
|
539
|
-
if res["output"]:
|
|
540
|
-
resstat: WorkflowStatusInternal = _serialization.deserialize(
|
|
541
|
-
res["output"]
|
|
542
|
-
)
|
|
543
|
-
return resstat
|
|
544
|
-
else:
|
|
545
|
-
raise DBOSException(
|
|
546
|
-
"Workflow status record not found. This should not happen! \033[1m Hint: Check if your workflow is deterministic.\033[0m"
|
|
547
|
-
)
|
|
548
|
-
stat = self.get_workflow_status(workflow_uuid)
|
|
549
|
-
self.record_operation_result(
|
|
550
|
-
{
|
|
551
|
-
"workflow_uuid": calling_wf,
|
|
552
|
-
"function_id": calling_wf_fn,
|
|
553
|
-
"output": _serialization.serialize(stat),
|
|
554
|
-
"error": None,
|
|
555
|
-
}
|
|
556
|
-
)
|
|
557
|
-
return stat
|
|
558
|
-
|
|
559
544
|
def await_workflow_result_internal(self, workflow_uuid: str) -> dict[str, Any]:
|
|
560
545
|
polling_interval_secs: float = 1.000
|
|
561
546
|
|
|
@@ -771,6 +756,28 @@ class SystemDatabase:
|
|
|
771
756
|
for row in rows
|
|
772
757
|
]
|
|
773
758
|
|
|
759
|
+
def get_workflow_steps(self, workflow_id: str) -> List[StepInfo]:
|
|
760
|
+
with self.engine.begin() as c:
|
|
761
|
+
rows = c.execute(
|
|
762
|
+
sa.select(
|
|
763
|
+
SystemSchema.operation_outputs.c.function_id,
|
|
764
|
+
SystemSchema.operation_outputs.c.function_name,
|
|
765
|
+
SystemSchema.operation_outputs.c.output,
|
|
766
|
+
SystemSchema.operation_outputs.c.error,
|
|
767
|
+
SystemSchema.operation_outputs.c.child_workflow_id,
|
|
768
|
+
).where(SystemSchema.operation_outputs.c.workflow_uuid == workflow_id)
|
|
769
|
+
).fetchall()
|
|
770
|
+
return [
|
|
771
|
+
StepInfo(
|
|
772
|
+
function_id=row[0],
|
|
773
|
+
function_name=row[1],
|
|
774
|
+
output=row[2], # Preserve JSON data
|
|
775
|
+
error=row[3],
|
|
776
|
+
child_workflow_id=row[4],
|
|
777
|
+
)
|
|
778
|
+
for row in rows
|
|
779
|
+
]
|
|
780
|
+
|
|
774
781
|
def record_operation_result(
|
|
775
782
|
self, result: OperationResultInternal, conn: Optional[sa.Connection] = None
|
|
776
783
|
) -> None:
|
|
@@ -782,6 +789,7 @@ class SystemDatabase:
|
|
|
782
789
|
sql = pg.insert(SystemSchema.operation_outputs).values(
|
|
783
790
|
workflow_uuid=result["workflow_uuid"],
|
|
784
791
|
function_id=result["function_id"],
|
|
792
|
+
function_name=result["function_name"],
|
|
785
793
|
output=output,
|
|
786
794
|
error=error,
|
|
787
795
|
)
|
|
@@ -796,6 +804,30 @@ class SystemDatabase:
|
|
|
796
804
|
raise DBOSWorkflowConflictIDError(result["workflow_uuid"])
|
|
797
805
|
raise
|
|
798
806
|
|
|
807
|
+
def record_child_workflow(
|
|
808
|
+
self,
|
|
809
|
+
parentUUID: str,
|
|
810
|
+
childUUID: str,
|
|
811
|
+
functionID: int,
|
|
812
|
+
functionName: str,
|
|
813
|
+
) -> None:
|
|
814
|
+
if self._debug_mode:
|
|
815
|
+
raise Exception("called record_child_workflow in debug mode")
|
|
816
|
+
|
|
817
|
+
sql = pg.insert(SystemSchema.operation_outputs).values(
|
|
818
|
+
workflow_uuid=parentUUID,
|
|
819
|
+
function_id=functionID,
|
|
820
|
+
function_name=functionName,
|
|
821
|
+
child_workflow_id=childUUID,
|
|
822
|
+
)
|
|
823
|
+
try:
|
|
824
|
+
with self.engine.begin() as c:
|
|
825
|
+
c.execute(sql)
|
|
826
|
+
except DBAPIError as dbapi_error:
|
|
827
|
+
if dbapi_error.orig.sqlstate == "23505": # type: ignore
|
|
828
|
+
raise DBOSWorkflowConflictIDError(parentUUID)
|
|
829
|
+
raise
|
|
830
|
+
|
|
799
831
|
def check_operation_execution(
|
|
800
832
|
self, workflow_uuid: str, function_id: int, conn: Optional[sa.Connection] = None
|
|
801
833
|
) -> Optional[RecordedResult]:
|
|
@@ -822,6 +854,23 @@ class SystemDatabase:
|
|
|
822
854
|
}
|
|
823
855
|
return result
|
|
824
856
|
|
|
857
|
+
def check_child_workflow(
|
|
858
|
+
self, workflow_uuid: str, function_id: int
|
|
859
|
+
) -> Optional[str]:
|
|
860
|
+
sql = sa.select(SystemSchema.operation_outputs.c.child_workflow_id).where(
|
|
861
|
+
SystemSchema.operation_outputs.c.workflow_uuid == workflow_uuid,
|
|
862
|
+
SystemSchema.operation_outputs.c.function_id == function_id,
|
|
863
|
+
)
|
|
864
|
+
|
|
865
|
+
# If in a transaction, use the provided connection
|
|
866
|
+
row: Any
|
|
867
|
+
with self.engine.begin() as c:
|
|
868
|
+
row = c.execute(sql).fetchone()
|
|
869
|
+
|
|
870
|
+
if row is None:
|
|
871
|
+
return None
|
|
872
|
+
return str(row[0])
|
|
873
|
+
|
|
825
874
|
def send(
|
|
826
875
|
self,
|
|
827
876
|
workflow_uuid: str,
|
|
@@ -866,6 +915,7 @@ class SystemDatabase:
|
|
|
866
915
|
output: OperationResultInternal = {
|
|
867
916
|
"workflow_uuid": workflow_uuid,
|
|
868
917
|
"function_id": function_id,
|
|
918
|
+
"function_name": "DBOS.send",
|
|
869
919
|
"output": None,
|
|
870
920
|
"error": None,
|
|
871
921
|
}
|
|
@@ -959,6 +1009,7 @@ class SystemDatabase:
|
|
|
959
1009
|
{
|
|
960
1010
|
"workflow_uuid": workflow_uuid,
|
|
961
1011
|
"function_id": function_id,
|
|
1012
|
+
"function_name": "DBOS.recv",
|
|
962
1013
|
"output": _serialization.serialize(
|
|
963
1014
|
message
|
|
964
1015
|
), # None will be serialized to 'null'
|
|
@@ -1049,6 +1100,7 @@ class SystemDatabase:
|
|
|
1049
1100
|
{
|
|
1050
1101
|
"workflow_uuid": workflow_uuid,
|
|
1051
1102
|
"function_id": function_id,
|
|
1103
|
+
"function_name": "DBOS.sleep",
|
|
1052
1104
|
"output": _serialization.serialize(end_time),
|
|
1053
1105
|
"error": None,
|
|
1054
1106
|
}
|
|
@@ -1096,6 +1148,7 @@ class SystemDatabase:
|
|
|
1096
1148
|
output: OperationResultInternal = {
|
|
1097
1149
|
"workflow_uuid": workflow_uuid,
|
|
1098
1150
|
"function_id": function_id,
|
|
1151
|
+
"function_name": "DBOS.setEvent",
|
|
1099
1152
|
"output": None,
|
|
1100
1153
|
"error": None,
|
|
1101
1154
|
}
|
|
@@ -1176,6 +1229,7 @@ class SystemDatabase:
|
|
|
1176
1229
|
{
|
|
1177
1230
|
"workflow_uuid": caller_ctx["workflow_uuid"],
|
|
1178
1231
|
"function_id": caller_ctx["function_id"],
|
|
1232
|
+
"function_name": "DBOS.getEvent",
|
|
1179
1233
|
"output": _serialization.serialize(
|
|
1180
1234
|
value
|
|
1181
1235
|
), # None will be serialized to 'null'
|
|
@@ -6,6 +6,7 @@
|
|
|
6
6
|
|
|
7
7
|
# First, let's do imports, create a FastAPI app, and initialize DBOS.
|
|
8
8
|
|
|
9
|
+
import uvicorn
|
|
9
10
|
from fastapi import FastAPI
|
|
10
11
|
from fastapi.responses import HTMLResponse
|
|
11
12
|
|
|
@@ -37,7 +38,7 @@ def example_transaction(name: str) -> str:
|
|
|
37
38
|
return greeting
|
|
38
39
|
|
|
39
40
|
|
|
40
|
-
#
|
|
41
|
+
# Now, let's use FastAPI to serve an HTML + CSS readme
|
|
41
42
|
# from the root path.
|
|
42
43
|
|
|
43
44
|
|
|
@@ -66,14 +67,8 @@ def readme() -> HTMLResponse:
|
|
|
66
67
|
return HTMLResponse(readme)
|
|
67
68
|
|
|
68
69
|
|
|
69
|
-
#
|
|
70
|
-
# - "npm i -g @dbos-inc/dbos-cloud@latest" to install the Cloud CLI (requires Node)
|
|
71
|
-
# - "dbos-cloud app deploy" to deploy your app
|
|
72
|
-
# - Deploy outputs a URL--visit it to see your app!
|
|
70
|
+
# Finally, we'll launch DBOS then start the FastAPI server.
|
|
73
71
|
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
# - "dbos migrate" to set up your database tables
|
|
78
|
-
# - "dbos start" to start the app
|
|
79
|
-
# - Visit localhost:8000 to see your app!
|
|
72
|
+
if __name__ == "__main__":
|
|
73
|
+
DBOS.launch()
|
|
74
|
+
uvicorn.run(app, host="0.0.0.0", port=8000)
|
dbos/_workflow_commands.py
CHANGED
|
@@ -1,59 +1,78 @@
|
|
|
1
|
-
|
|
1
|
+
import json
|
|
2
|
+
from typing import Any, List, Optional
|
|
2
3
|
|
|
3
4
|
from . import _serialization
|
|
4
5
|
from ._sys_db import (
|
|
5
6
|
GetQueuedWorkflowsInput,
|
|
6
7
|
GetWorkflowsInput,
|
|
7
8
|
GetWorkflowsOutput,
|
|
9
|
+
StepInfo,
|
|
8
10
|
SystemDatabase,
|
|
9
|
-
WorkflowStatuses,
|
|
10
11
|
)
|
|
11
12
|
|
|
12
13
|
|
|
13
|
-
class
|
|
14
|
+
class WorkflowStatus:
|
|
15
|
+
# The workflow ID
|
|
14
16
|
workflow_id: str
|
|
15
|
-
status
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
17
|
+
# The workflow status. Must be one of ENQUEUED, PENDING, SUCCESS, ERROR, CANCELLED, or RETRIES_EXCEEDED
|
|
18
|
+
status: str
|
|
19
|
+
# The name of the workflow function
|
|
20
|
+
name: str
|
|
21
|
+
# The name of the workflow's class, if any
|
|
22
|
+
class_name: Optional[str]
|
|
23
|
+
# The name with which the workflow's class instance was configured, if any
|
|
24
|
+
config_name: Optional[str]
|
|
25
|
+
# The user who ran the workflow, if specified
|
|
19
26
|
authenticated_user: Optional[str]
|
|
27
|
+
# The role with which the workflow ran, if specified
|
|
20
28
|
assumed_role: Optional[str]
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
29
|
+
# All roles which the authenticated user could assume
|
|
30
|
+
authenticated_roles: Optional[list[str]]
|
|
31
|
+
# The deserialized workflow input object
|
|
32
|
+
input: Optional[_serialization.WorkflowInputs]
|
|
33
|
+
# The workflow's output, if any
|
|
34
|
+
output: Optional[Any] = None
|
|
35
|
+
# The error the workflow threw, if any
|
|
36
|
+
error: Optional[Exception] = None
|
|
37
|
+
# Workflow start time, as a Unix epoch timestamp in ms
|
|
38
|
+
created_at: Optional[int]
|
|
39
|
+
# Last time the workflow status was updated, as a Unix epoch timestamp in ms
|
|
40
|
+
updated_at: Optional[int]
|
|
41
|
+
# If this workflow was enqueued, on which queue
|
|
28
42
|
queue_name: Optional[str]
|
|
43
|
+
# The executor to most recently executed this workflow
|
|
29
44
|
executor_id: Optional[str]
|
|
45
|
+
# The application version on which this workflow was started
|
|
30
46
|
app_version: Optional[str]
|
|
47
|
+
# The ID of the application executing this workflow
|
|
31
48
|
app_id: Optional[str]
|
|
49
|
+
# The number of times this workflow's execution has been attempted
|
|
32
50
|
recovery_attempts: Optional[int]
|
|
51
|
+
# The HTTP request that triggered the workflow, if known
|
|
52
|
+
request: Optional[str]
|
|
33
53
|
|
|
34
54
|
|
|
35
55
|
def list_workflows(
|
|
36
56
|
sys_db: SystemDatabase,
|
|
37
57
|
*,
|
|
38
58
|
workflow_ids: Optional[List[str]] = None,
|
|
39
|
-
|
|
59
|
+
status: Optional[str] = None,
|
|
40
60
|
start_time: Optional[str] = None,
|
|
41
61
|
end_time: Optional[str] = None,
|
|
42
|
-
status: Optional[str] = None,
|
|
43
|
-
request: bool = False,
|
|
44
|
-
app_version: Optional[str] = None,
|
|
45
62
|
name: Optional[str] = None,
|
|
63
|
+
app_version: Optional[str] = None,
|
|
64
|
+
user: Optional[str] = None,
|
|
46
65
|
limit: Optional[int] = None,
|
|
47
66
|
offset: Optional[int] = None,
|
|
48
67
|
sort_desc: bool = False,
|
|
49
|
-
|
|
68
|
+
request: bool = False,
|
|
69
|
+
) -> List[WorkflowStatus]:
|
|
50
70
|
input = GetWorkflowsInput()
|
|
51
71
|
input.workflow_ids = workflow_ids
|
|
52
72
|
input.authenticated_user = user
|
|
53
73
|
input.start_time = start_time
|
|
54
74
|
input.end_time = end_time
|
|
55
|
-
|
|
56
|
-
input.status = cast(WorkflowStatuses, status)
|
|
75
|
+
input.status = status
|
|
57
76
|
input.application_version = app_version
|
|
58
77
|
input.limit = limit
|
|
59
78
|
input.name = name
|
|
@@ -61,7 +80,7 @@ def list_workflows(
|
|
|
61
80
|
input.sort_desc = sort_desc
|
|
62
81
|
|
|
63
82
|
output: GetWorkflowsOutput = sys_db.get_workflows(input)
|
|
64
|
-
infos: List[
|
|
83
|
+
infos: List[WorkflowStatus] = []
|
|
65
84
|
for workflow_id in output.workflow_uuids:
|
|
66
85
|
info = get_workflow(sys_db, workflow_id, request) # Call the method for each ID
|
|
67
86
|
if info is not None:
|
|
@@ -72,16 +91,16 @@ def list_workflows(
|
|
|
72
91
|
def list_queued_workflows(
|
|
73
92
|
sys_db: SystemDatabase,
|
|
74
93
|
*,
|
|
75
|
-
limit: Optional[int] = None,
|
|
76
|
-
start_time: Optional[str] = None,
|
|
77
|
-
end_time: Optional[str] = None,
|
|
78
94
|
queue_name: Optional[str] = None,
|
|
79
95
|
status: Optional[str] = None,
|
|
96
|
+
start_time: Optional[str] = None,
|
|
97
|
+
end_time: Optional[str] = None,
|
|
80
98
|
name: Optional[str] = None,
|
|
81
|
-
|
|
99
|
+
limit: Optional[int] = None,
|
|
82
100
|
offset: Optional[int] = None,
|
|
83
101
|
sort_desc: bool = False,
|
|
84
|
-
|
|
102
|
+
request: bool = False,
|
|
103
|
+
) -> List[WorkflowStatus]:
|
|
85
104
|
input: GetQueuedWorkflowsInput = {
|
|
86
105
|
"queue_name": queue_name,
|
|
87
106
|
"start_time": start_time,
|
|
@@ -93,7 +112,7 @@ def list_queued_workflows(
|
|
|
93
112
|
"sort_desc": sort_desc,
|
|
94
113
|
}
|
|
95
114
|
output: GetWorkflowsOutput = sys_db.get_queued_workflows(input)
|
|
96
|
-
infos: List[
|
|
115
|
+
infos: List[WorkflowStatus] = []
|
|
97
116
|
for workflow_id in output.workflow_uuids:
|
|
98
117
|
info = get_workflow(sys_db, workflow_id, request) # Call the method for each ID
|
|
99
118
|
if info is not None:
|
|
@@ -102,47 +121,55 @@ def list_queued_workflows(
|
|
|
102
121
|
|
|
103
122
|
|
|
104
123
|
def get_workflow(
|
|
105
|
-
sys_db: SystemDatabase,
|
|
106
|
-
) -> Optional[
|
|
124
|
+
sys_db: SystemDatabase, workflow_id: str, get_request: bool
|
|
125
|
+
) -> Optional[WorkflowStatus]:
|
|
107
126
|
|
|
108
|
-
|
|
109
|
-
if
|
|
127
|
+
internal_status = sys_db.get_workflow_status(workflow_id)
|
|
128
|
+
if internal_status is None:
|
|
110
129
|
return None
|
|
111
130
|
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
131
|
+
info = WorkflowStatus()
|
|
132
|
+
|
|
133
|
+
info.workflow_id = workflow_id
|
|
134
|
+
info.status = internal_status["status"]
|
|
135
|
+
info.name = internal_status["name"]
|
|
136
|
+
info.class_name = internal_status["class_name"]
|
|
137
|
+
info.config_name = internal_status["config_name"]
|
|
138
|
+
info.authenticated_user = internal_status["authenticated_user"]
|
|
139
|
+
info.assumed_role = internal_status["assumed_role"]
|
|
140
|
+
info.authenticated_roles = (
|
|
141
|
+
json.loads(internal_status["authenticated_roles"])
|
|
142
|
+
if internal_status["authenticated_roles"] is not None
|
|
143
|
+
else None
|
|
144
|
+
)
|
|
145
|
+
info.request = internal_status["request"]
|
|
146
|
+
info.created_at = internal_status["created_at"]
|
|
147
|
+
info.updated_at = internal_status["updated_at"]
|
|
148
|
+
info.queue_name = internal_status["queue_name"]
|
|
149
|
+
info.executor_id = internal_status["executor_id"]
|
|
150
|
+
info.app_version = internal_status["app_version"]
|
|
151
|
+
info.app_id = internal_status["app_id"]
|
|
152
|
+
info.recovery_attempts = internal_status["recovery_attempts"]
|
|
153
|
+
|
|
154
|
+
input_data = sys_db.get_workflow_inputs(workflow_id)
|
|
133
155
|
if input_data is not None:
|
|
134
|
-
|
|
156
|
+
info.input = input_data
|
|
135
157
|
|
|
136
|
-
if
|
|
137
|
-
result = sys_db.await_workflow_result(
|
|
138
|
-
|
|
139
|
-
elif
|
|
158
|
+
if internal_status.get("status") == "SUCCESS":
|
|
159
|
+
result = sys_db.await_workflow_result(workflow_id)
|
|
160
|
+
info.output = result
|
|
161
|
+
elif internal_status.get("status") == "ERROR":
|
|
140
162
|
try:
|
|
141
|
-
sys_db.await_workflow_result(
|
|
163
|
+
sys_db.await_workflow_result(workflow_id)
|
|
142
164
|
except Exception as e:
|
|
143
|
-
|
|
165
|
+
info.error = e
|
|
166
|
+
|
|
167
|
+
if not get_request:
|
|
168
|
+
info.request = None
|
|
169
|
+
|
|
170
|
+
return info
|
|
144
171
|
|
|
145
|
-
if not getRequest:
|
|
146
|
-
winfo.request = None
|
|
147
172
|
|
|
148
|
-
|
|
173
|
+
def list_workflow_steps(sys_db: SystemDatabase, workflow_id: str) -> List[StepInfo]:
|
|
174
|
+
output = sys_db.get_workflow_steps(workflow_id)
|
|
175
|
+
return output
|
dbos/cli/_template_init.py
CHANGED
|
@@ -58,15 +58,20 @@ def copy_template(src_dir: str, project_name: str, config_mode: bool) -> None:
|
|
|
58
58
|
dst_dir = path.abspath(".")
|
|
59
59
|
|
|
60
60
|
package_name = project_name.replace("-", "_")
|
|
61
|
+
default_migration_section = """database:
|
|
62
|
+
migrate:
|
|
63
|
+
- alembic upgrade head
|
|
64
|
+
"""
|
|
61
65
|
ctx = {
|
|
62
66
|
"project_name": project_name,
|
|
63
67
|
"package_name": package_name,
|
|
64
|
-
"
|
|
68
|
+
"start_command": f"python3 -m {package_name}.main",
|
|
69
|
+
"migration_section": default_migration_section,
|
|
65
70
|
}
|
|
66
71
|
|
|
67
72
|
if config_mode:
|
|
68
|
-
ctx["
|
|
69
|
-
ctx["
|
|
73
|
+
ctx["start_command"] = "python3 main.py"
|
|
74
|
+
ctx["migration_section"] = ""
|
|
70
75
|
_copy_dbos_template(
|
|
71
76
|
os.path.join(src_dir, "dbos-config.yaml.dbos"),
|
|
72
77
|
os.path.join(dst_dir, "dbos-config.yaml"),
|
dbos/cli/cli.py
CHANGED
|
@@ -21,7 +21,12 @@ from .. import load_config
|
|
|
21
21
|
from .._app_db import ApplicationDatabase
|
|
22
22
|
from .._dbos_config import _is_valid_app_name
|
|
23
23
|
from .._sys_db import SystemDatabase, reset_system_database
|
|
24
|
-
from .._workflow_commands import
|
|
24
|
+
from .._workflow_commands import (
|
|
25
|
+
get_workflow,
|
|
26
|
+
list_queued_workflows,
|
|
27
|
+
list_workflow_steps,
|
|
28
|
+
list_workflows,
|
|
29
|
+
)
|
|
25
30
|
from ..cli._github_init import create_template_from_github
|
|
26
31
|
from ._template_init import copy_template, get_project_name, get_templates_directory
|
|
27
32
|
|
|
@@ -339,6 +344,17 @@ def get(
|
|
|
339
344
|
)
|
|
340
345
|
|
|
341
346
|
|
|
347
|
+
@workflow.command(help="List the steps of a workflow")
|
|
348
|
+
def steps(
|
|
349
|
+
workflow_id: Annotated[str, typer.Argument()],
|
|
350
|
+
) -> None:
|
|
351
|
+
config = load_config(silent=True)
|
|
352
|
+
sys_db = SystemDatabase(config)
|
|
353
|
+
print(
|
|
354
|
+
jsonpickle.encode(list_workflow_steps(sys_db, workflow_id), unpicklable=False)
|
|
355
|
+
)
|
|
356
|
+
|
|
357
|
+
|
|
342
358
|
@workflow.command(
|
|
343
359
|
help="Cancel a workflow so it is no longer automatically retried or restarted"
|
|
344
360
|
)
|