dbos 0.21.0a4__py3-none-any.whl → 0.21.0a7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- dbos/_dbos.py +4 -1
- dbos/_dbos_config.py +17 -13
- dbos/_recovery.py +31 -11
- dbos/_sys_db.py +91 -6
- dbos/_workflow_commands.py +43 -13
- dbos/cli/cli.py +84 -16
- {dbos-0.21.0a4.dist-info → dbos-0.21.0a7.dist-info}/METADATA +1 -1
- {dbos-0.21.0a4.dist-info → dbos-0.21.0a7.dist-info}/RECORD +11 -11
- {dbos-0.21.0a4.dist-info → dbos-0.21.0a7.dist-info}/WHEEL +0 -0
- {dbos-0.21.0a4.dist-info → dbos-0.21.0a7.dist-info}/entry_points.txt +0 -0
- {dbos-0.21.0a4.dist-info → dbos-0.21.0a7.dist-info}/licenses/LICENSE +0 -0
dbos/_dbos.py
CHANGED
|
@@ -56,7 +56,7 @@ from ._registrations import (
|
|
|
56
56
|
)
|
|
57
57
|
from ._roles import default_required_roles, required_roles
|
|
58
58
|
from ._scheduler import ScheduledWorkflow, scheduled
|
|
59
|
-
from ._sys_db import
|
|
59
|
+
from ._sys_db import reset_system_database
|
|
60
60
|
from ._tracer import dbos_tracer
|
|
61
61
|
|
|
62
62
|
if TYPE_CHECKING:
|
|
@@ -613,6 +613,7 @@ class DBOS:
|
|
|
613
613
|
workflow_id=workflow_id,
|
|
614
614
|
status=stat["status"],
|
|
615
615
|
name=stat["name"],
|
|
616
|
+
executor_id=stat["executor_id"],
|
|
616
617
|
recovery_attempts=stat["recovery_attempts"],
|
|
617
618
|
class_name=stat["class_name"],
|
|
618
619
|
config_name=stat["config_name"],
|
|
@@ -909,6 +910,7 @@ class WorkflowStatus:
|
|
|
909
910
|
workflow_id(str): The ID of the workflow execution
|
|
910
911
|
status(str): The status of the execution, from `WorkflowStatusString`
|
|
911
912
|
name(str): The workflow function name
|
|
913
|
+
executor_id(str): The ID of the executor running the workflow
|
|
912
914
|
class_name(str): For member functions, the name of the class containing the workflow function
|
|
913
915
|
config_name(str): For instance member functions, the name of the class instance for the execution
|
|
914
916
|
queue_name(str): For workflows that are or were queued, the queue name
|
|
@@ -922,6 +924,7 @@ class WorkflowStatus:
|
|
|
922
924
|
workflow_id: str
|
|
923
925
|
status: str
|
|
924
926
|
name: str
|
|
927
|
+
executor_id: Optional[str]
|
|
925
928
|
class_name: Optional[str]
|
|
926
929
|
config_name: Optional[str]
|
|
927
930
|
queue_name: Optional[str]
|
dbos/_dbos_config.py
CHANGED
|
@@ -123,7 +123,10 @@ def get_dbos_database_url(config_file_path: str = DBOS_CONFIG_PATH) -> str:
|
|
|
123
123
|
|
|
124
124
|
|
|
125
125
|
def load_config(
|
|
126
|
-
config_file_path: str = DBOS_CONFIG_PATH,
|
|
126
|
+
config_file_path: str = DBOS_CONFIG_PATH,
|
|
127
|
+
*,
|
|
128
|
+
use_db_wizard: bool = True,
|
|
129
|
+
silent: bool = False,
|
|
127
130
|
) -> ConfigFile:
|
|
128
131
|
"""
|
|
129
132
|
Load the DBOS `ConfigFile` from the specified path (typically `dbos-config.yaml`).
|
|
@@ -188,18 +191,19 @@ def load_config(
|
|
|
188
191
|
# Load the DB connection file. Use its values for missing fields from dbos-config.yaml. Use defaults otherwise.
|
|
189
192
|
data = cast(ConfigFile, data)
|
|
190
193
|
db_connection = load_db_connection()
|
|
191
|
-
if
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
194
|
+
if not silent:
|
|
195
|
+
if data["database"].get("hostname"):
|
|
196
|
+
print(
|
|
197
|
+
"[bold blue]Loading database connection parameters from dbos-config.yaml[/bold blue]"
|
|
198
|
+
)
|
|
199
|
+
elif db_connection.get("hostname"):
|
|
200
|
+
print(
|
|
201
|
+
"[bold blue]Loading database connection parameters from .dbos/db_connection[/bold blue]"
|
|
202
|
+
)
|
|
203
|
+
else:
|
|
204
|
+
print(
|
|
205
|
+
"[bold blue]Using default database connection parameters (localhost)[/bold blue]"
|
|
206
|
+
)
|
|
203
207
|
|
|
204
208
|
data["database"]["hostname"] = (
|
|
205
209
|
data["database"].get("hostname") or db_connection.get("hostname") or "localhost"
|
dbos/_recovery.py
CHANGED
|
@@ -6,20 +6,29 @@ from typing import TYPE_CHECKING, Any, List
|
|
|
6
6
|
|
|
7
7
|
from ._core import execute_workflow_by_id
|
|
8
8
|
from ._error import DBOSWorkflowFunctionNotFoundError
|
|
9
|
+
from ._sys_db import GetPendingWorkflowsOutput
|
|
9
10
|
|
|
10
11
|
if TYPE_CHECKING:
|
|
11
12
|
from ._dbos import DBOS, WorkflowHandle
|
|
12
13
|
|
|
13
14
|
|
|
14
|
-
def startup_recovery_thread(
|
|
15
|
+
def startup_recovery_thread(
|
|
16
|
+
dbos: "DBOS", pending_workflows: List[GetPendingWorkflowsOutput]
|
|
17
|
+
) -> None:
|
|
15
18
|
"""Attempt to recover local pending workflows on startup using a background thread."""
|
|
16
19
|
stop_event = threading.Event()
|
|
17
20
|
dbos.stop_events.append(stop_event)
|
|
18
|
-
while not stop_event.is_set() and len(
|
|
21
|
+
while not stop_event.is_set() and len(pending_workflows) > 0:
|
|
19
22
|
try:
|
|
20
|
-
for
|
|
21
|
-
|
|
22
|
-
|
|
23
|
+
for pending_workflow in list(pending_workflows):
|
|
24
|
+
if (
|
|
25
|
+
pending_workflow.queue_name
|
|
26
|
+
and pending_workflow.queue_name != "_dbos_internal_queue"
|
|
27
|
+
):
|
|
28
|
+
dbos._sys_db.clear_queue_assignment(pending_workflow.workflow_uuid)
|
|
29
|
+
continue
|
|
30
|
+
execute_workflow_by_id(dbos, pending_workflow.workflow_uuid)
|
|
31
|
+
pending_workflows.remove(pending_workflow)
|
|
23
32
|
except DBOSWorkflowFunctionNotFoundError:
|
|
24
33
|
time.sleep(1)
|
|
25
34
|
except Exception as e:
|
|
@@ -39,12 +48,23 @@ def recover_pending_workflows(
|
|
|
39
48
|
f"Skip local recovery because it's running in a VM: {os.environ.get('DBOS__VMID')}"
|
|
40
49
|
)
|
|
41
50
|
dbos.logger.debug(f"Recovering pending workflows for executor: {executor_id}")
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
51
|
+
pending_workflows = dbos._sys_db.get_pending_workflows(executor_id)
|
|
52
|
+
for pending_workflow in pending_workflows:
|
|
53
|
+
if (
|
|
54
|
+
pending_workflow.queue_name
|
|
55
|
+
and pending_workflow.queue_name != "_dbos_internal_queue"
|
|
56
|
+
):
|
|
57
|
+
try:
|
|
58
|
+
dbos._sys_db.clear_queue_assignment(pending_workflow.workflow_uuid)
|
|
59
|
+
workflow_handles.append(
|
|
60
|
+
dbos.retrieve_workflow(pending_workflow.workflow_uuid)
|
|
61
|
+
)
|
|
62
|
+
except Exception as e:
|
|
63
|
+
dbos.logger.error(e)
|
|
64
|
+
else:
|
|
65
|
+
workflow_handles.append(
|
|
66
|
+
execute_workflow_by_id(dbos, pending_workflow.workflow_uuid)
|
|
67
|
+
)
|
|
48
68
|
|
|
49
69
|
dbos.logger.info("Recovered pending workflows")
|
|
50
70
|
return workflow_handles
|
dbos/_sys_db.py
CHANGED
|
@@ -126,11 +126,26 @@ class GetWorkflowsInput:
|
|
|
126
126
|
)
|
|
127
127
|
|
|
128
128
|
|
|
129
|
+
class GetQueuedWorkflowsInput(TypedDict):
|
|
130
|
+
queue_name: Optional[str]
|
|
131
|
+
status: Optional[str]
|
|
132
|
+
start_time: Optional[str] # Timestamp in ISO 8601 format
|
|
133
|
+
end_time: Optional[str] # Timestamp in ISO 8601 format
|
|
134
|
+
limit: Optional[int] # Return up to this many workflows IDs.
|
|
135
|
+
name: Optional[str] # The name of the workflow function
|
|
136
|
+
|
|
137
|
+
|
|
129
138
|
class GetWorkflowsOutput:
|
|
130
139
|
def __init__(self, workflow_uuids: List[str]):
|
|
131
140
|
self.workflow_uuids = workflow_uuids
|
|
132
141
|
|
|
133
142
|
|
|
143
|
+
class GetPendingWorkflowsOutput:
|
|
144
|
+
def __init__(self, *, workflow_uuid: str, queue_name: Optional[str] = None):
|
|
145
|
+
self.workflow_uuid: str = workflow_uuid
|
|
146
|
+
self.queue_name: Optional[str] = queue_name
|
|
147
|
+
|
|
148
|
+
|
|
134
149
|
class WorkflowInformation(TypedDict, total=False):
|
|
135
150
|
workflow_uuid: str
|
|
136
151
|
status: WorkflowStatuses # The status of the workflow.
|
|
@@ -456,6 +471,7 @@ class SystemDatabase:
|
|
|
456
471
|
SystemSchema.workflow_status.c.authenticated_roles,
|
|
457
472
|
SystemSchema.workflow_status.c.assumed_role,
|
|
458
473
|
SystemSchema.workflow_status.c.queue_name,
|
|
474
|
+
SystemSchema.workflow_status.c.executor_id,
|
|
459
475
|
).where(SystemSchema.workflow_status.c.workflow_uuid == workflow_uuid)
|
|
460
476
|
).fetchone()
|
|
461
477
|
if row is None:
|
|
@@ -470,7 +486,7 @@ class SystemDatabase:
|
|
|
470
486
|
"error": None,
|
|
471
487
|
"app_id": None,
|
|
472
488
|
"app_version": None,
|
|
473
|
-
"executor_id":
|
|
489
|
+
"executor_id": row[10],
|
|
474
490
|
"request": row[2],
|
|
475
491
|
"recovery_attempts": row[3],
|
|
476
492
|
"authenticated_user": row[6],
|
|
@@ -656,9 +672,8 @@ class SystemDatabase:
|
|
|
656
672
|
|
|
657
673
|
def get_workflows(self, input: GetWorkflowsInput) -> GetWorkflowsOutput:
|
|
658
674
|
query = sa.select(SystemSchema.workflow_status.c.workflow_uuid).order_by(
|
|
659
|
-
SystemSchema.workflow_status.c.created_at.
|
|
675
|
+
SystemSchema.workflow_status.c.created_at.asc()
|
|
660
676
|
)
|
|
661
|
-
|
|
662
677
|
if input.name:
|
|
663
678
|
query = query.where(SystemSchema.workflow_status.c.name == input.name)
|
|
664
679
|
if input.authenticated_user:
|
|
@@ -692,16 +707,73 @@ class SystemDatabase:
|
|
|
692
707
|
|
|
693
708
|
return GetWorkflowsOutput(workflow_uuids)
|
|
694
709
|
|
|
695
|
-
def
|
|
710
|
+
def get_queued_workflows(
|
|
711
|
+
self, input: GetQueuedWorkflowsInput
|
|
712
|
+
) -> GetWorkflowsOutput:
|
|
713
|
+
|
|
714
|
+
query = (
|
|
715
|
+
sa.select(SystemSchema.workflow_queue.c.workflow_uuid)
|
|
716
|
+
.join(
|
|
717
|
+
SystemSchema.workflow_status,
|
|
718
|
+
SystemSchema.workflow_queue.c.workflow_uuid
|
|
719
|
+
== SystemSchema.workflow_status.c.workflow_uuid,
|
|
720
|
+
)
|
|
721
|
+
.order_by(SystemSchema.workflow_status.c.created_at.asc())
|
|
722
|
+
)
|
|
723
|
+
|
|
724
|
+
if input.get("name"):
|
|
725
|
+
query = query.where(SystemSchema.workflow_status.c.name == input["name"])
|
|
726
|
+
|
|
727
|
+
if input.get("queue_name"):
|
|
728
|
+
query = query.where(
|
|
729
|
+
SystemSchema.workflow_queue.c.queue_name == input["queue_name"]
|
|
730
|
+
)
|
|
731
|
+
|
|
732
|
+
if input.get("status"):
|
|
733
|
+
query = query.where(
|
|
734
|
+
SystemSchema.workflow_status.c.status == input["status"]
|
|
735
|
+
)
|
|
736
|
+
if "start_time" in input and input["start_time"] is not None:
|
|
737
|
+
query = query.where(
|
|
738
|
+
SystemSchema.workflow_status.c.created_at
|
|
739
|
+
>= datetime.datetime.fromisoformat(input["start_time"]).timestamp()
|
|
740
|
+
* 1000
|
|
741
|
+
)
|
|
742
|
+
if "end_time" in input and input["end_time"] is not None:
|
|
743
|
+
query = query.where(
|
|
744
|
+
SystemSchema.workflow_status.c.created_at
|
|
745
|
+
<= datetime.datetime.fromisoformat(input["end_time"]).timestamp() * 1000
|
|
746
|
+
)
|
|
747
|
+
if input.get("limit"):
|
|
748
|
+
query = query.limit(input["limit"])
|
|
749
|
+
|
|
750
|
+
with self.engine.begin() as c:
|
|
751
|
+
rows = c.execute(query)
|
|
752
|
+
workflow_uuids = [row[0] for row in rows]
|
|
753
|
+
|
|
754
|
+
return GetWorkflowsOutput(workflow_uuids)
|
|
755
|
+
|
|
756
|
+
def get_pending_workflows(
|
|
757
|
+
self, executor_id: str
|
|
758
|
+
) -> list[GetPendingWorkflowsOutput]:
|
|
696
759
|
with self.engine.begin() as c:
|
|
697
760
|
rows = c.execute(
|
|
698
|
-
sa.select(
|
|
761
|
+
sa.select(
|
|
762
|
+
SystemSchema.workflow_status.c.workflow_uuid,
|
|
763
|
+
SystemSchema.workflow_status.c.queue_name,
|
|
764
|
+
).where(
|
|
699
765
|
SystemSchema.workflow_status.c.status
|
|
700
766
|
== WorkflowStatusString.PENDING.value,
|
|
701
767
|
SystemSchema.workflow_status.c.executor_id == executor_id,
|
|
702
768
|
)
|
|
703
769
|
).fetchall()
|
|
704
|
-
return [
|
|
770
|
+
return [
|
|
771
|
+
GetPendingWorkflowsOutput(
|
|
772
|
+
workflow_uuid=row.workflow_uuid,
|
|
773
|
+
queue_name=row.queue_name,
|
|
774
|
+
)
|
|
775
|
+
for row in rows
|
|
776
|
+
]
|
|
705
777
|
|
|
706
778
|
def record_operation_result(
|
|
707
779
|
self, result: OperationResultInternal, conn: Optional[sa.Connection] = None
|
|
@@ -1321,6 +1393,19 @@ class SystemDatabase:
|
|
|
1321
1393
|
.values(completed_at_epoch_ms=int(time.time() * 1000))
|
|
1322
1394
|
)
|
|
1323
1395
|
|
|
1396
|
+
def clear_queue_assignment(self, workflow_id: str) -> None:
|
|
1397
|
+
with self.engine.begin() as c:
|
|
1398
|
+
c.execute(
|
|
1399
|
+
sa.update(SystemSchema.workflow_queue)
|
|
1400
|
+
.where(SystemSchema.workflow_queue.c.workflow_uuid == workflow_id)
|
|
1401
|
+
.values(executor_id=None, started_at_epoch_ms=None)
|
|
1402
|
+
)
|
|
1403
|
+
c.execute(
|
|
1404
|
+
sa.update(SystemSchema.workflow_status)
|
|
1405
|
+
.where(SystemSchema.workflow_status.c.workflow_uuid == workflow_id)
|
|
1406
|
+
.values(executor_id=None, status=WorkflowStatusString.ENQUEUED.value)
|
|
1407
|
+
)
|
|
1408
|
+
|
|
1324
1409
|
|
|
1325
1410
|
def reset_system_database(config: ConfigFile) -> None:
|
|
1326
1411
|
sysdb_name = (
|
dbos/_workflow_commands.py
CHANGED
|
@@ -5,6 +5,7 @@ import typer
|
|
|
5
5
|
from . import _serialization
|
|
6
6
|
from ._dbos_config import ConfigFile
|
|
7
7
|
from ._sys_db import (
|
|
8
|
+
GetQueuedWorkflowsInput,
|
|
8
9
|
GetWorkflowsInput,
|
|
9
10
|
GetWorkflowsOutput,
|
|
10
11
|
SystemDatabase,
|
|
@@ -19,8 +20,8 @@ class WorkflowInformation:
|
|
|
19
20
|
workflowClassName: Optional[str]
|
|
20
21
|
workflowConfigName: Optional[str]
|
|
21
22
|
input: Optional[_serialization.WorkflowInputs] # JSON (jsonpickle)
|
|
22
|
-
output: Optional[str] # JSON (jsonpickle)
|
|
23
|
-
error: Optional[str] # JSON (jsonpickle)
|
|
23
|
+
output: Optional[str] = None # JSON (jsonpickle)
|
|
24
|
+
error: Optional[str] = None # JSON (jsonpickle)
|
|
24
25
|
executor_id: Optional[str]
|
|
25
26
|
app_version: Optional[str]
|
|
26
27
|
app_id: Optional[str]
|
|
@@ -34,17 +35,15 @@ class WorkflowInformation:
|
|
|
34
35
|
|
|
35
36
|
def list_workflows(
|
|
36
37
|
config: ConfigFile,
|
|
37
|
-
|
|
38
|
+
limit: int,
|
|
38
39
|
user: Optional[str],
|
|
39
40
|
starttime: Optional[str],
|
|
40
41
|
endtime: Optional[str],
|
|
41
42
|
status: Optional[str],
|
|
42
43
|
request: bool,
|
|
43
44
|
appversion: Optional[str],
|
|
45
|
+
name: Optional[str],
|
|
44
46
|
) -> List[WorkflowInformation]:
|
|
45
|
-
|
|
46
|
-
sys_db = None
|
|
47
|
-
|
|
48
47
|
try:
|
|
49
48
|
sys_db = SystemDatabase(config)
|
|
50
49
|
|
|
@@ -55,24 +54,55 @@ def list_workflows(
|
|
|
55
54
|
if status is not None:
|
|
56
55
|
input.status = cast(WorkflowStatuses, status)
|
|
57
56
|
input.application_version = appversion
|
|
58
|
-
input.limit =
|
|
57
|
+
input.limit = limit
|
|
58
|
+
input.name = name
|
|
59
59
|
|
|
60
60
|
output: GetWorkflowsOutput = sys_db.get_workflows(input)
|
|
61
|
-
|
|
62
61
|
infos: List[WorkflowInformation] = []
|
|
62
|
+
for workflow_id in output.workflow_uuids:
|
|
63
|
+
info = _get_workflow_info(
|
|
64
|
+
sys_db, workflow_id, request
|
|
65
|
+
) # Call the method for each ID
|
|
66
|
+
if info is not None:
|
|
67
|
+
infos.append(info)
|
|
68
|
+
|
|
69
|
+
return infos
|
|
70
|
+
except Exception as e:
|
|
71
|
+
typer.echo(f"Error listing workflows: {e}")
|
|
72
|
+
return []
|
|
73
|
+
finally:
|
|
74
|
+
if sys_db:
|
|
75
|
+
sys_db.destroy()
|
|
63
76
|
|
|
64
|
-
if output.workflow_uuids is None:
|
|
65
|
-
typer.echo("No workflows found")
|
|
66
|
-
return {}
|
|
67
77
|
|
|
78
|
+
def list_queued_workflows(
|
|
79
|
+
config: ConfigFile,
|
|
80
|
+
limit: Optional[int] = None,
|
|
81
|
+
start_time: Optional[str] = None,
|
|
82
|
+
end_time: Optional[str] = None,
|
|
83
|
+
queue_name: Optional[str] = None,
|
|
84
|
+
status: Optional[str] = None,
|
|
85
|
+
name: Optional[str] = None,
|
|
86
|
+
request: bool = False,
|
|
87
|
+
) -> List[WorkflowInformation]:
|
|
88
|
+
try:
|
|
89
|
+
sys_db = SystemDatabase(config)
|
|
90
|
+
input: GetQueuedWorkflowsInput = {
|
|
91
|
+
"queue_name": queue_name,
|
|
92
|
+
"start_time": start_time,
|
|
93
|
+
"end_time": end_time,
|
|
94
|
+
"status": status,
|
|
95
|
+
"limit": limit,
|
|
96
|
+
"name": name,
|
|
97
|
+
}
|
|
98
|
+
output: GetWorkflowsOutput = sys_db.get_queued_workflows(input)
|
|
99
|
+
infos: List[WorkflowInformation] = []
|
|
68
100
|
for workflow_id in output.workflow_uuids:
|
|
69
101
|
info = _get_workflow_info(
|
|
70
102
|
sys_db, workflow_id, request
|
|
71
103
|
) # Call the method for each ID
|
|
72
|
-
|
|
73
104
|
if info is not None:
|
|
74
105
|
infos.append(info)
|
|
75
|
-
|
|
76
106
|
return infos
|
|
77
107
|
except Exception as e:
|
|
78
108
|
typer.echo(f"Error listing workflows: {e}")
|
dbos/cli/cli.py
CHANGED
|
@@ -19,14 +19,21 @@ from .. import load_config
|
|
|
19
19
|
from .._app_db import ApplicationDatabase
|
|
20
20
|
from .._dbos_config import _is_valid_app_name
|
|
21
21
|
from .._sys_db import SystemDatabase, reset_system_database
|
|
22
|
-
from .._workflow_commands import
|
|
22
|
+
from .._workflow_commands import (
|
|
23
|
+
cancel_workflow,
|
|
24
|
+
get_workflow,
|
|
25
|
+
list_queued_workflows,
|
|
26
|
+
list_workflows,
|
|
27
|
+
)
|
|
23
28
|
from ..cli._github_init import create_template_from_github
|
|
24
29
|
from ._template_init import copy_template, get_project_name, get_templates_directory
|
|
25
30
|
|
|
26
31
|
app = typer.Typer()
|
|
27
32
|
workflow = typer.Typer()
|
|
33
|
+
queue = typer.Typer()
|
|
28
34
|
|
|
29
35
|
app.add_typer(workflow, name="workflow", help="Manage DBOS workflows")
|
|
36
|
+
workflow.add_typer(queue, name="queue", help="Manage enqueued workflows")
|
|
30
37
|
|
|
31
38
|
|
|
32
39
|
def _on_windows() -> bool:
|
|
@@ -272,18 +279,22 @@ def list(
|
|
|
272
279
|
help="Retrieve workflows with this application version",
|
|
273
280
|
),
|
|
274
281
|
] = None,
|
|
282
|
+
name: Annotated[
|
|
283
|
+
typing.Optional[str],
|
|
284
|
+
typer.Option(
|
|
285
|
+
"--name",
|
|
286
|
+
"-n",
|
|
287
|
+
help="Retrieve workflows with this name",
|
|
288
|
+
),
|
|
289
|
+
] = None,
|
|
275
290
|
request: Annotated[
|
|
276
291
|
bool,
|
|
277
292
|
typer.Option("--request", help="Retrieve workflow request information"),
|
|
278
293
|
] = True,
|
|
279
|
-
appdir: Annotated[
|
|
280
|
-
typing.Optional[str],
|
|
281
|
-
typer.Option("--app-dir", "-d", help="Specify the application root directory"),
|
|
282
|
-
] = None,
|
|
283
294
|
) -> None:
|
|
284
|
-
config = load_config()
|
|
295
|
+
config = load_config(silent=True)
|
|
285
296
|
workflows = list_workflows(
|
|
286
|
-
config, limit, user, starttime, endtime, status, request, appversion
|
|
297
|
+
config, limit, user, starttime, endtime, status, request, appversion, name
|
|
287
298
|
)
|
|
288
299
|
print(jsonpickle.encode(workflows, unpicklable=False))
|
|
289
300
|
|
|
@@ -291,16 +302,12 @@ def list(
|
|
|
291
302
|
@workflow.command(help="Retrieve the status of a workflow")
|
|
292
303
|
def get(
|
|
293
304
|
uuid: Annotated[str, typer.Argument()],
|
|
294
|
-
appdir: Annotated[
|
|
295
|
-
typing.Optional[str],
|
|
296
|
-
typer.Option("--app-dir", "-d", help="Specify the application root directory"),
|
|
297
|
-
] = None,
|
|
298
305
|
request: Annotated[
|
|
299
306
|
bool,
|
|
300
307
|
typer.Option("--request", help="Retrieve workflow request information"),
|
|
301
308
|
] = True,
|
|
302
309
|
) -> None:
|
|
303
|
-
config = load_config()
|
|
310
|
+
config = load_config(silent=True)
|
|
304
311
|
print(jsonpickle.encode(get_workflow(config, uuid, request), unpicklable=False))
|
|
305
312
|
|
|
306
313
|
|
|
@@ -309,10 +316,6 @@ def get(
|
|
|
309
316
|
)
|
|
310
317
|
def cancel(
|
|
311
318
|
uuid: Annotated[str, typer.Argument()],
|
|
312
|
-
appdir: Annotated[
|
|
313
|
-
typing.Optional[str],
|
|
314
|
-
typer.Option("--app-dir", "-d", help="Specify the application root directory"),
|
|
315
|
-
] = None,
|
|
316
319
|
) -> None:
|
|
317
320
|
config = load_config()
|
|
318
321
|
cancel_workflow(config, uuid)
|
|
@@ -363,5 +366,70 @@ def restart(
|
|
|
363
366
|
print(f"Failed to resume workflow {uuid}. Status code: {response.status_code}")
|
|
364
367
|
|
|
365
368
|
|
|
369
|
+
@queue.command(name="list", help="List enqueued functions for your application")
|
|
370
|
+
def list_queue(
|
|
371
|
+
limit: Annotated[
|
|
372
|
+
typing.Optional[int],
|
|
373
|
+
typer.Option("--limit", "-l", help="Limit the results returned"),
|
|
374
|
+
] = None,
|
|
375
|
+
start_time: Annotated[
|
|
376
|
+
typing.Optional[str],
|
|
377
|
+
typer.Option(
|
|
378
|
+
"--start-time",
|
|
379
|
+
"-s",
|
|
380
|
+
help="Retrieve functions starting after this timestamp (ISO 8601 format)",
|
|
381
|
+
),
|
|
382
|
+
] = None,
|
|
383
|
+
end_time: Annotated[
|
|
384
|
+
typing.Optional[str],
|
|
385
|
+
typer.Option(
|
|
386
|
+
"--end-time",
|
|
387
|
+
"-e",
|
|
388
|
+
help="Retrieve functions starting before this timestamp (ISO 8601 format)",
|
|
389
|
+
),
|
|
390
|
+
] = None,
|
|
391
|
+
status: Annotated[
|
|
392
|
+
typing.Optional[str],
|
|
393
|
+
typer.Option(
|
|
394
|
+
"--status",
|
|
395
|
+
"-S",
|
|
396
|
+
help="Retrieve functions with this status (PENDING, SUCCESS, ERROR, RETRIES_EXCEEDED, ENQUEUED, or CANCELLED)",
|
|
397
|
+
),
|
|
398
|
+
] = None,
|
|
399
|
+
queue_name: Annotated[
|
|
400
|
+
typing.Optional[str],
|
|
401
|
+
typer.Option(
|
|
402
|
+
"--queue-name",
|
|
403
|
+
"-q",
|
|
404
|
+
help="Retrieve functions on this queue",
|
|
405
|
+
),
|
|
406
|
+
] = None,
|
|
407
|
+
name: Annotated[
|
|
408
|
+
typing.Optional[str],
|
|
409
|
+
typer.Option(
|
|
410
|
+
"--name",
|
|
411
|
+
"-n",
|
|
412
|
+
help="Retrieve functions on this queue",
|
|
413
|
+
),
|
|
414
|
+
] = None,
|
|
415
|
+
request: Annotated[
|
|
416
|
+
bool,
|
|
417
|
+
typer.Option("--request", help="Retrieve workflow request information"),
|
|
418
|
+
] = True,
|
|
419
|
+
) -> None:
|
|
420
|
+
config = load_config(silent=True)
|
|
421
|
+
workflows = list_queued_workflows(
|
|
422
|
+
config=config,
|
|
423
|
+
limit=limit,
|
|
424
|
+
start_time=start_time,
|
|
425
|
+
end_time=end_time,
|
|
426
|
+
queue_name=queue_name,
|
|
427
|
+
status=status,
|
|
428
|
+
request=request,
|
|
429
|
+
name=name,
|
|
430
|
+
)
|
|
431
|
+
print(jsonpickle.encode(workflows, unpicklable=False))
|
|
432
|
+
|
|
433
|
+
|
|
366
434
|
if __name__ == "__main__":
|
|
367
435
|
app()
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
dbos-0.21.
|
|
2
|
-
dbos-0.21.
|
|
3
|
-
dbos-0.21.
|
|
4
|
-
dbos-0.21.
|
|
1
|
+
dbos-0.21.0a7.dist-info/METADATA,sha256=76OQDkyQg_N8Yg1zzmRXJ34Kofg1rX7VgTkhX7mMlnI,5309
|
|
2
|
+
dbos-0.21.0a7.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
|
|
3
|
+
dbos-0.21.0a7.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
|
4
|
+
dbos-0.21.0a7.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
|
5
5
|
dbos/__init__.py,sha256=CxRHBHEthPL4PZoLbZhp3rdm44-KkRTT2-7DkK9d4QQ,724
|
|
6
6
|
dbos/_admin_server.py,sha256=PJgneZG9-64TapZrPeJtt73puAswRImCE5uce2k2PKU,4750
|
|
7
7
|
dbos/_app_db.py,sha256=_tv2vmPjjiaikwgxH3mqxgJ4nUUcG2-0uMXKWCqVu1c,5509
|
|
@@ -13,8 +13,8 @@ dbos/_context.py,sha256=FHB_fpE4fQt4fIJvAmMMsbY4xHwH77gsW01cFsRZjsE,17779
|
|
|
13
13
|
dbos/_core.py,sha256=nGiXyYgV8H5TRRZG0e8HCd5IZimufYQLmKNr7nBbwbo,36564
|
|
14
14
|
dbos/_croniter.py,sha256=hbhgfsHBqclUS8VeLnJ9PSE9Z54z6mi4nnrr1aUXn0k,47561
|
|
15
15
|
dbos/_db_wizard.py,sha256=xgKLna0_6Xi50F3o8msRosXba8NScHlpJR5ICVCkHDQ,7534
|
|
16
|
-
dbos/_dbos.py,sha256=
|
|
17
|
-
dbos/_dbos_config.py,sha256=
|
|
16
|
+
dbos/_dbos.py,sha256=wAjdlUgDSIC_Q8D_GZYDoiKaxjtr6KNHeq6DDuUh9do,36340
|
|
17
|
+
dbos/_dbos_config.py,sha256=DfiqVVxNqnafkocSzLqBp1Ig5vCviDTDK_GO3zTtQqI,8298
|
|
18
18
|
dbos/_error.py,sha256=vtaSsG0QW6cRlwfZ4zzZWy_IHCZlomwSlrDyGWuyn8c,4337
|
|
19
19
|
dbos/_fastapi.py,sha256=ke03vqsSYDnO6XeOtOVFXj0-f-v1MGsOxa9McaROvNc,3616
|
|
20
20
|
dbos/_flask.py,sha256=DZKUZR5-xOzPI7tYZ53r2PvvHVoAb8SYwLzMVFsVfjI,2608
|
|
@@ -32,7 +32,7 @@ dbos/_migrations/versions/d76646551a6c_workflow_queue.py,sha256=G942nophZ2uC2vc4
|
|
|
32
32
|
dbos/_migrations/versions/eab0cc1d9a14_job_queue.py,sha256=uvhFOtqbBreCePhAxZfIT0qCAI7BiZTou9wt6QnbY7c,1412
|
|
33
33
|
dbos/_outcome.py,sha256=FDMgWVjZ06vm9xO-38H17mTqBImUYQxgKs_bDCSIAhE,6648
|
|
34
34
|
dbos/_queue.py,sha256=o_aczwualJTMoXb0XXL-Y5QH77OEukWzuerogbWi2ho,2779
|
|
35
|
-
dbos/_recovery.py,sha256=
|
|
35
|
+
dbos/_recovery.py,sha256=rek9rm2CaENbbl_vu3To-BdXop7tMEyGvtoNiJLVxjQ,2772
|
|
36
36
|
dbos/_registrations.py,sha256=mei6q6_3R5uei8i_Wo_TqGZs85s10shOekDX41sFYD0,6642
|
|
37
37
|
dbos/_request.py,sha256=cX1B3Atlh160phgS35gF1VEEV4pD126c9F3BDgBmxZU,929
|
|
38
38
|
dbos/_roles.py,sha256=iOsgmIAf1XVzxs3gYWdGRe1B880YfOw5fpU7Jwx8_A8,2271
|
|
@@ -41,7 +41,7 @@ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
41
41
|
dbos/_schemas/application_database.py,sha256=KeyoPrF7hy_ODXV7QNike_VFSD74QBRfQ76D7QyE9HI,966
|
|
42
42
|
dbos/_schemas/system_database.py,sha256=rwp4EvCSaXcUoMaRczZCvETCxGp72k3-hvLyGUDkih0,5163
|
|
43
43
|
dbos/_serialization.py,sha256=YCYv0qKAwAZ1djZisBC7khvKqG-5OcIv9t9EC5PFIog,1743
|
|
44
|
-
dbos/_sys_db.py,sha256=
|
|
44
|
+
dbos/_sys_db.py,sha256=U5rXoS2gA4vm8YT6Rja_YyP2EXWLlo1HqDka1tnpRjk,59460
|
|
45
45
|
dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
|
|
46
46
|
dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
47
47
|
dbos/_templates/dbos-db-starter/__package/main.py,sha256=eI0SS9Nwj-fldtiuSzIlIG6dC91GXXwdRsoHxv6S_WI,2719
|
|
@@ -53,11 +53,11 @@ dbos/_templates/dbos-db-starter/migrations/script.py.mako,sha256=MEqL-2qATlST9TA
|
|
|
53
53
|
dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sha256=U5thFWGqNN4QLrNXT7wUUqftIFDNE5eSdqD8JNW1mec,942
|
|
54
54
|
dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
|
|
55
55
|
dbos/_tracer.py,sha256=rvBY1RQU6DO7rL7EnaJJxGcmd4tP_PpGqUEE6imZnhY,2518
|
|
56
|
-
dbos/_workflow_commands.py,sha256=
|
|
56
|
+
dbos/_workflow_commands.py,sha256=gAynfrq5sAMhdNpMIphiAm_hC2-xk1ZyWEYA-whtfPs,5402
|
|
57
57
|
dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
|
|
58
58
|
dbos/cli/_template_init.py,sha256=AfuMaO8bmr9WsPNHr6j2cp7kjVVZDUpH7KpbTg0hhFs,2722
|
|
59
|
-
dbos/cli/cli.py,sha256=
|
|
59
|
+
dbos/cli/cli.py,sha256=_tXw2IQrWW7fV_h51f_R99vEBSi6aMLz-vCOxKaENiQ,14155
|
|
60
60
|
dbos/dbos-config.schema.json,sha256=X5TpXNcARGceX0zQs0fVgtZW_Xj9uBbY5afPt9Rz9yk,5741
|
|
61
61
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
|
62
62
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
|
63
|
-
dbos-0.21.
|
|
63
|
+
dbos-0.21.0a7.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|