dbos 0.23.0a3__py3-none-any.whl → 0.23.0a8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- dbos/_core.py +2 -0
- dbos/_croniter.py +2 -2
- dbos/_migrations/versions/5c361fc04708_added_system_tables.py +1 -1
- dbos/_sys_db.py +54 -114
- dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +1 -1
- dbos/_workflow_commands.py +77 -108
- dbos/cli/cli.py +41 -20
- {dbos-0.23.0a3.dist-info → dbos-0.23.0a8.dist-info}/METADATA +6 -3
- {dbos-0.23.0a3.dist-info → dbos-0.23.0a8.dist-info}/RECORD +12 -12
- {dbos-0.23.0a3.dist-info → dbos-0.23.0a8.dist-info}/WHEEL +0 -0
- {dbos-0.23.0a3.dist-info → dbos-0.23.0a8.dist-info}/entry_points.txt +0 -0
- {dbos-0.23.0a3.dist-info → dbos-0.23.0a8.dist-info}/licenses/LICENSE +0 -0
dbos/_core.py
CHANGED
dbos/_croniter.py
CHANGED
|
@@ -5,14 +5,14 @@ Copyright (C) 2010-2012 Matsumoto Taichi.
|
|
|
5
5
|
|
|
6
6
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this
|
|
7
7
|
software and associated documentation files (the "Software"), to deal in the Software
|
|
8
|
-
without restriction, including without limitation the rights to use, copy, modify,
|
|
8
|
+
without restriction, including without limitation the rights to use, copy, modify,
|
|
9
9
|
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
|
|
10
10
|
persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
11
11
|
|
|
12
12
|
The above copyright notice and this permission notice shall be included in all
|
|
13
13
|
copies or substantial portions of the Software.
|
|
14
14
|
|
|
15
|
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
|
|
16
16
|
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
|
|
17
17
|
PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
|
|
18
18
|
FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
dbos/_sys_db.py
CHANGED
|
@@ -14,9 +14,7 @@ from typing import (
|
|
|
14
14
|
Optional,
|
|
15
15
|
Sequence,
|
|
16
16
|
Set,
|
|
17
|
-
Tuple,
|
|
18
17
|
TypedDict,
|
|
19
|
-
cast,
|
|
20
18
|
)
|
|
21
19
|
|
|
22
20
|
import psycopg
|
|
@@ -68,17 +66,19 @@ class WorkflowStatusInternal(TypedDict):
|
|
|
68
66
|
name: str
|
|
69
67
|
class_name: Optional[str]
|
|
70
68
|
config_name: Optional[str]
|
|
69
|
+
authenticated_user: Optional[str]
|
|
70
|
+
assumed_role: Optional[str]
|
|
71
|
+
authenticated_roles: Optional[str] # JSON list of roles
|
|
71
72
|
output: Optional[str] # JSON (jsonpickle)
|
|
73
|
+
request: Optional[str] # JSON (jsonpickle)
|
|
72
74
|
error: Optional[str] # JSON (jsonpickle)
|
|
75
|
+
created_at: Optional[int] # Unix epoch timestamp in ms
|
|
76
|
+
updated_at: Optional[int] # Unix epoch timestamp in ms
|
|
77
|
+
queue_name: Optional[str]
|
|
73
78
|
executor_id: Optional[str]
|
|
74
79
|
app_version: Optional[str]
|
|
75
80
|
app_id: Optional[str]
|
|
76
|
-
request: Optional[str] # JSON (jsonpickle)
|
|
77
81
|
recovery_attempts: Optional[int]
|
|
78
|
-
authenticated_user: Optional[str]
|
|
79
|
-
assumed_role: Optional[str]
|
|
80
|
-
authenticated_roles: Optional[str] # JSON list of roles.
|
|
81
|
-
queue_name: Optional[str]
|
|
82
82
|
|
|
83
83
|
|
|
84
84
|
class RecordedResult(TypedDict):
|
|
@@ -104,19 +104,12 @@ class GetWorkflowsInput:
|
|
|
104
104
|
Structure for argument to `get_workflows` function.
|
|
105
105
|
|
|
106
106
|
This specifies the search criteria for workflow retrieval by `get_workflows`.
|
|
107
|
-
|
|
108
|
-
Attributes:
|
|
109
|
-
name(str): The name of the workflow function
|
|
110
|
-
authenticated_user(str): The name of the user who invoked the function
|
|
111
|
-
start_time(str): Beginning of search range for time of invocation, in ISO 8601 format
|
|
112
|
-
end_time(str): End of search range for time of invocation, in ISO 8601 format
|
|
113
|
-
status(str): Current status of the workflow invocation (see `WorkflowStatusString`)
|
|
114
|
-
application_version(str): Application version that invoked the workflow
|
|
115
|
-
limit(int): Limit on number of returned records
|
|
116
|
-
|
|
117
107
|
"""
|
|
118
108
|
|
|
119
109
|
def __init__(self) -> None:
|
|
110
|
+
self.workflow_ids: Optional[List[str]] = (
|
|
111
|
+
None # Search only in these workflow IDs
|
|
112
|
+
)
|
|
120
113
|
self.name: Optional[str] = None # The name of the workflow function
|
|
121
114
|
self.authenticated_user: Optional[str] = None # The user who ran the workflow.
|
|
122
115
|
self.start_time: Optional[str] = None # Timestamp in ISO 8601 format
|
|
@@ -128,15 +121,21 @@ class GetWorkflowsInput:
|
|
|
128
121
|
self.limit: Optional[int] = (
|
|
129
122
|
None # Return up to this many workflows IDs. IDs are ordered by workflow creation time.
|
|
130
123
|
)
|
|
124
|
+
self.offset: Optional[int] = (
|
|
125
|
+
None # Offset into the matching records for pagination
|
|
126
|
+
)
|
|
127
|
+
self.sort_desc: bool = False # If true, sort by created_at in DESC order. Default false (in ASC order).
|
|
131
128
|
|
|
132
129
|
|
|
133
130
|
class GetQueuedWorkflowsInput(TypedDict):
|
|
134
|
-
queue_name: Optional[str]
|
|
135
|
-
status: Optional[str]
|
|
131
|
+
queue_name: Optional[str] # Get workflows belonging to this queue
|
|
132
|
+
status: Optional[str] # Get workflows with this status
|
|
136
133
|
start_time: Optional[str] # Timestamp in ISO 8601 format
|
|
137
134
|
end_time: Optional[str] # Timestamp in ISO 8601 format
|
|
138
135
|
limit: Optional[int] # Return up to this many workflows IDs.
|
|
136
|
+
offset: Optional[int] # Offset into the matching records for pagination
|
|
139
137
|
name: Optional[str] # The name of the workflow function
|
|
138
|
+
sort_desc: Optional[bool] # Sort by created_at in DESC or ASC order
|
|
140
139
|
|
|
141
140
|
|
|
142
141
|
class GetWorkflowsOutput:
|
|
@@ -150,25 +149,6 @@ class GetPendingWorkflowsOutput:
|
|
|
150
149
|
self.queue_name: Optional[str] = queue_name
|
|
151
150
|
|
|
152
151
|
|
|
153
|
-
class WorkflowInformation(TypedDict, total=False):
|
|
154
|
-
workflow_uuid: str
|
|
155
|
-
status: WorkflowStatuses # The status of the workflow.
|
|
156
|
-
name: str # The name of the workflow function.
|
|
157
|
-
workflow_class_name: str # The class name holding the workflow function.
|
|
158
|
-
workflow_config_name: (
|
|
159
|
-
str # The name of the configuration, if the class needs configuration
|
|
160
|
-
)
|
|
161
|
-
authenticated_user: str # The user who ran the workflow. Empty string if not set.
|
|
162
|
-
assumed_role: str
|
|
163
|
-
# The role used to run this workflow. Empty string if authorization is not required.
|
|
164
|
-
authenticated_roles: List[str]
|
|
165
|
-
# All roles the authenticated user has, if any.
|
|
166
|
-
input: Optional[_serialization.WorkflowInputs]
|
|
167
|
-
output: Optional[str]
|
|
168
|
-
error: Optional[str]
|
|
169
|
-
request: Optional[str]
|
|
170
|
-
|
|
171
|
-
|
|
172
152
|
_dbos_null_topic = "__null__topic__"
|
|
173
153
|
_buffer_flush_batch_size = 100
|
|
174
154
|
_buffer_flush_interval_secs = 1.0
|
|
@@ -489,27 +469,33 @@ class SystemDatabase:
|
|
|
489
469
|
SystemSchema.workflow_status.c.assumed_role,
|
|
490
470
|
SystemSchema.workflow_status.c.queue_name,
|
|
491
471
|
SystemSchema.workflow_status.c.executor_id,
|
|
472
|
+
SystemSchema.workflow_status.c.created_at,
|
|
473
|
+
SystemSchema.workflow_status.c.updated_at,
|
|
474
|
+
SystemSchema.workflow_status.c.application_version,
|
|
475
|
+
SystemSchema.workflow_status.c.application_id,
|
|
492
476
|
).where(SystemSchema.workflow_status.c.workflow_uuid == workflow_uuid)
|
|
493
477
|
).fetchone()
|
|
494
478
|
if row is None:
|
|
495
479
|
return None
|
|
496
480
|
status: WorkflowStatusInternal = {
|
|
497
481
|
"workflow_uuid": workflow_uuid,
|
|
498
|
-
"status": row[0],
|
|
499
|
-
"name": row[1],
|
|
500
|
-
"class_name": row[5],
|
|
501
|
-
"config_name": row[4],
|
|
502
482
|
"output": None,
|
|
503
483
|
"error": None,
|
|
504
|
-
"
|
|
505
|
-
"
|
|
506
|
-
"executor_id": row[10],
|
|
484
|
+
"status": row[0],
|
|
485
|
+
"name": row[1],
|
|
507
486
|
"request": row[2],
|
|
508
487
|
"recovery_attempts": row[3],
|
|
488
|
+
"config_name": row[4],
|
|
489
|
+
"class_name": row[5],
|
|
509
490
|
"authenticated_user": row[6],
|
|
510
491
|
"authenticated_roles": row[7],
|
|
511
492
|
"assumed_role": row[8],
|
|
512
493
|
"queue_name": row[9],
|
|
494
|
+
"executor_id": row[10],
|
|
495
|
+
"created_at": row[11],
|
|
496
|
+
"updated_at": row[12],
|
|
497
|
+
"app_version": row[13],
|
|
498
|
+
"app_id": row[14],
|
|
513
499
|
}
|
|
514
500
|
return status
|
|
515
501
|
|
|
@@ -538,47 +524,6 @@ class SystemDatabase:
|
|
|
538
524
|
)
|
|
539
525
|
return stat
|
|
540
526
|
|
|
541
|
-
def get_workflow_status_w_outputs(
|
|
542
|
-
self, workflow_uuid: str
|
|
543
|
-
) -> Optional[WorkflowStatusInternal]:
|
|
544
|
-
with self.engine.begin() as c:
|
|
545
|
-
row = c.execute(
|
|
546
|
-
sa.select(
|
|
547
|
-
SystemSchema.workflow_status.c.status,
|
|
548
|
-
SystemSchema.workflow_status.c.name,
|
|
549
|
-
SystemSchema.workflow_status.c.request,
|
|
550
|
-
SystemSchema.workflow_status.c.output,
|
|
551
|
-
SystemSchema.workflow_status.c.error,
|
|
552
|
-
SystemSchema.workflow_status.c.config_name,
|
|
553
|
-
SystemSchema.workflow_status.c.class_name,
|
|
554
|
-
SystemSchema.workflow_status.c.authenticated_user,
|
|
555
|
-
SystemSchema.workflow_status.c.authenticated_roles,
|
|
556
|
-
SystemSchema.workflow_status.c.assumed_role,
|
|
557
|
-
SystemSchema.workflow_status.c.queue_name,
|
|
558
|
-
).where(SystemSchema.workflow_status.c.workflow_uuid == workflow_uuid)
|
|
559
|
-
).fetchone()
|
|
560
|
-
if row is None:
|
|
561
|
-
return None
|
|
562
|
-
status: WorkflowStatusInternal = {
|
|
563
|
-
"workflow_uuid": workflow_uuid,
|
|
564
|
-
"status": row[0],
|
|
565
|
-
"name": row[1],
|
|
566
|
-
"config_name": row[5],
|
|
567
|
-
"class_name": row[6],
|
|
568
|
-
"output": row[3],
|
|
569
|
-
"error": row[4],
|
|
570
|
-
"app_id": None,
|
|
571
|
-
"app_version": None,
|
|
572
|
-
"executor_id": None,
|
|
573
|
-
"request": row[2],
|
|
574
|
-
"recovery_attempts": None,
|
|
575
|
-
"authenticated_user": row[7],
|
|
576
|
-
"authenticated_roles": row[8],
|
|
577
|
-
"assumed_role": row[9],
|
|
578
|
-
"queue_name": row[10],
|
|
579
|
-
}
|
|
580
|
-
return status
|
|
581
|
-
|
|
582
527
|
def await_workflow_result_internal(self, workflow_uuid: str) -> dict[str, Any]:
|
|
583
528
|
polling_interval_secs: float = 1.000
|
|
584
529
|
|
|
@@ -625,21 +570,6 @@ class SystemDatabase:
|
|
|
625
570
|
raise _serialization.deserialize_exception(stat["error"])
|
|
626
571
|
return None
|
|
627
572
|
|
|
628
|
-
def get_workflow_info(
|
|
629
|
-
self, workflow_uuid: str, get_request: bool
|
|
630
|
-
) -> Optional[WorkflowInformation]:
|
|
631
|
-
stat = self.get_workflow_status_w_outputs(workflow_uuid)
|
|
632
|
-
if stat is None:
|
|
633
|
-
return None
|
|
634
|
-
info = cast(WorkflowInformation, stat)
|
|
635
|
-
input = self.get_workflow_inputs(workflow_uuid)
|
|
636
|
-
if input is not None:
|
|
637
|
-
info["input"] = input
|
|
638
|
-
if not get_request:
|
|
639
|
-
info.pop("request", None)
|
|
640
|
-
|
|
641
|
-
return info
|
|
642
|
-
|
|
643
573
|
def update_workflow_inputs(
|
|
644
574
|
self, workflow_uuid: str, inputs: str, conn: Optional[sa.Connection] = None
|
|
645
575
|
) -> None:
|
|
@@ -688,9 +618,11 @@ class SystemDatabase:
|
|
|
688
618
|
return inputs
|
|
689
619
|
|
|
690
620
|
def get_workflows(self, input: GetWorkflowsInput) -> GetWorkflowsOutput:
|
|
691
|
-
query = sa.select(SystemSchema.workflow_status.c.workflow_uuid)
|
|
692
|
-
|
|
693
|
-
|
|
621
|
+
query = sa.select(SystemSchema.workflow_status.c.workflow_uuid)
|
|
622
|
+
if input.sort_desc:
|
|
623
|
+
query = query.order_by(SystemSchema.workflow_status.c.created_at.desc())
|
|
624
|
+
else:
|
|
625
|
+
query = query.order_by(SystemSchema.workflow_status.c.created_at.asc())
|
|
694
626
|
if input.name:
|
|
695
627
|
query = query.where(SystemSchema.workflow_status.c.name == input.name)
|
|
696
628
|
if input.authenticated_user:
|
|
@@ -715,28 +647,34 @@ class SystemDatabase:
|
|
|
715
647
|
SystemSchema.workflow_status.c.application_version
|
|
716
648
|
== input.application_version
|
|
717
649
|
)
|
|
650
|
+
if input.workflow_ids:
|
|
651
|
+
query = query.where(
|
|
652
|
+
SystemSchema.workflow_status.c.workflow_uuid.in_(input.workflow_ids)
|
|
653
|
+
)
|
|
718
654
|
if input.limit:
|
|
719
655
|
query = query.limit(input.limit)
|
|
656
|
+
if input.offset:
|
|
657
|
+
query = query.offset(input.offset)
|
|
720
658
|
|
|
721
659
|
with self.engine.begin() as c:
|
|
722
660
|
rows = c.execute(query)
|
|
723
|
-
|
|
661
|
+
workflow_ids = [row[0] for row in rows]
|
|
724
662
|
|
|
725
|
-
return GetWorkflowsOutput(
|
|
663
|
+
return GetWorkflowsOutput(workflow_ids)
|
|
726
664
|
|
|
727
665
|
def get_queued_workflows(
|
|
728
666
|
self, input: GetQueuedWorkflowsInput
|
|
729
667
|
) -> GetWorkflowsOutput:
|
|
730
668
|
|
|
731
|
-
query = (
|
|
732
|
-
|
|
733
|
-
.
|
|
734
|
-
|
|
735
|
-
SystemSchema.workflow_queue.c.workflow_uuid
|
|
736
|
-
== SystemSchema.workflow_status.c.workflow_uuid,
|
|
737
|
-
)
|
|
738
|
-
.order_by(SystemSchema.workflow_status.c.created_at.asc())
|
|
669
|
+
query = sa.select(SystemSchema.workflow_queue.c.workflow_uuid).join(
|
|
670
|
+
SystemSchema.workflow_status,
|
|
671
|
+
SystemSchema.workflow_queue.c.workflow_uuid
|
|
672
|
+
== SystemSchema.workflow_status.c.workflow_uuid,
|
|
739
673
|
)
|
|
674
|
+
if input["sort_desc"]:
|
|
675
|
+
query = query.order_by(SystemSchema.workflow_status.c.created_at.desc())
|
|
676
|
+
else:
|
|
677
|
+
query = query.order_by(SystemSchema.workflow_status.c.created_at.asc())
|
|
740
678
|
|
|
741
679
|
if input.get("name"):
|
|
742
680
|
query = query.where(SystemSchema.workflow_status.c.name == input["name"])
|
|
@@ -763,6 +701,8 @@ class SystemDatabase:
|
|
|
763
701
|
)
|
|
764
702
|
if input.get("limit"):
|
|
765
703
|
query = query.limit(input["limit"])
|
|
704
|
+
if input.get("offset"):
|
|
705
|
+
query = query.offset(input["offset"])
|
|
766
706
|
|
|
767
707
|
with self.engine.begin() as c:
|
|
768
708
|
rows = c.execute(query)
|
dbos/_workflow_commands.py
CHANGED
|
@@ -4,6 +4,7 @@ import typer
|
|
|
4
4
|
|
|
5
5
|
from . import _serialization
|
|
6
6
|
from ._dbos_config import ConfigFile
|
|
7
|
+
from ._logger import dbos_logger
|
|
7
8
|
from ._sys_db import (
|
|
8
9
|
GetQueuedWorkflowsInput,
|
|
9
10
|
GetWorkflowsInput,
|
|
@@ -14,69 +15,67 @@ from ._sys_db import (
|
|
|
14
15
|
|
|
15
16
|
|
|
16
17
|
class WorkflowInformation:
|
|
17
|
-
|
|
18
|
+
workflow_id: str
|
|
18
19
|
status: WorkflowStatuses
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
20
|
+
workflow_name: str
|
|
21
|
+
workflow_class_name: Optional[str]
|
|
22
|
+
workflow_config_name: Optional[str]
|
|
23
|
+
authenticated_user: Optional[str]
|
|
24
|
+
assumed_role: Optional[str]
|
|
25
|
+
authenticated_roles: Optional[str] # JSON list of roles.
|
|
22
26
|
input: Optional[_serialization.WorkflowInputs] # JSON (jsonpickle)
|
|
23
27
|
output: Optional[str] = None # JSON (jsonpickle)
|
|
28
|
+
request: Optional[str] # JSON (jsonpickle)
|
|
24
29
|
error: Optional[str] = None # JSON (jsonpickle)
|
|
30
|
+
created_at: Optional[int] # Unix epoch timestamp in ms
|
|
31
|
+
updated_at: Optional[int] # Unix epoch timestamp in ms
|
|
32
|
+
queue_name: Optional[str]
|
|
25
33
|
executor_id: Optional[str]
|
|
26
34
|
app_version: Optional[str]
|
|
27
35
|
app_id: Optional[str]
|
|
28
|
-
request: Optional[str] # JSON (jsonpickle)
|
|
29
36
|
recovery_attempts: Optional[int]
|
|
30
|
-
authenticated_user: Optional[str]
|
|
31
|
-
assumed_role: Optional[str]
|
|
32
|
-
authenticated_roles: Optional[str] # JSON list of roles.
|
|
33
|
-
queue_name: Optional[str]
|
|
34
37
|
|
|
35
38
|
|
|
36
39
|
def list_workflows(
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
40
|
+
sys_db: SystemDatabase,
|
|
41
|
+
*,
|
|
42
|
+
workflow_ids: Optional[List[str]] = None,
|
|
43
|
+
user: Optional[str] = None,
|
|
44
|
+
start_time: Optional[str] = None,
|
|
45
|
+
end_time: Optional[str] = None,
|
|
46
|
+
status: Optional[str] = None,
|
|
47
|
+
request: bool = False,
|
|
48
|
+
app_version: Optional[str] = None,
|
|
49
|
+
name: Optional[str] = None,
|
|
50
|
+
limit: Optional[int] = None,
|
|
51
|
+
offset: Optional[int] = None,
|
|
52
|
+
sort_desc: bool = False,
|
|
46
53
|
) -> List[WorkflowInformation]:
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
input.
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
)
|
|
66
|
-
|
|
67
|
-
infos.append(info)
|
|
68
|
-
|
|
69
|
-
return infos
|
|
70
|
-
except Exception as e:
|
|
71
|
-
typer.echo(f"Error listing workflows: {e}")
|
|
72
|
-
return []
|
|
73
|
-
finally:
|
|
74
|
-
if sys_db:
|
|
75
|
-
sys_db.destroy()
|
|
54
|
+
input = GetWorkflowsInput()
|
|
55
|
+
input.workflow_ids = workflow_ids
|
|
56
|
+
input.authenticated_user = user
|
|
57
|
+
input.start_time = start_time
|
|
58
|
+
input.end_time = end_time
|
|
59
|
+
if status is not None:
|
|
60
|
+
input.status = cast(WorkflowStatuses, status)
|
|
61
|
+
input.application_version = app_version
|
|
62
|
+
input.limit = limit
|
|
63
|
+
input.name = name
|
|
64
|
+
input.offset = offset
|
|
65
|
+
input.sort_desc = sort_desc
|
|
66
|
+
|
|
67
|
+
output: GetWorkflowsOutput = sys_db.get_workflows(input)
|
|
68
|
+
infos: List[WorkflowInformation] = []
|
|
69
|
+
for workflow_id in output.workflow_uuids:
|
|
70
|
+
info = get_workflow(sys_db, workflow_id, request) # Call the method for each ID
|
|
71
|
+
if info is not None:
|
|
72
|
+
infos.append(info)
|
|
73
|
+
return infos
|
|
76
74
|
|
|
77
75
|
|
|
78
76
|
def list_queued_workflows(
|
|
79
|
-
|
|
77
|
+
sys_db: SystemDatabase,
|
|
78
|
+
*,
|
|
80
79
|
limit: Optional[int] = None,
|
|
81
80
|
start_time: Optional[str] = None,
|
|
82
81
|
end_time: Optional[str] = None,
|
|
@@ -84,62 +83,29 @@ def list_queued_workflows(
|
|
|
84
83
|
status: Optional[str] = None,
|
|
85
84
|
name: Optional[str] = None,
|
|
86
85
|
request: bool = False,
|
|
86
|
+
offset: Optional[int] = None,
|
|
87
|
+
sort_desc: bool = False,
|
|
87
88
|
) -> List[WorkflowInformation]:
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
)
|
|
104
|
-
|
|
105
|
-
infos.append(info)
|
|
106
|
-
return infos
|
|
107
|
-
except Exception as e:
|
|
108
|
-
typer.echo(f"Error listing workflows: {e}")
|
|
109
|
-
return []
|
|
110
|
-
finally:
|
|
111
|
-
if sys_db:
|
|
112
|
-
sys_db.destroy()
|
|
89
|
+
input: GetQueuedWorkflowsInput = {
|
|
90
|
+
"queue_name": queue_name,
|
|
91
|
+
"start_time": start_time,
|
|
92
|
+
"end_time": end_time,
|
|
93
|
+
"status": status,
|
|
94
|
+
"limit": limit,
|
|
95
|
+
"name": name,
|
|
96
|
+
"offset": offset,
|
|
97
|
+
"sort_desc": sort_desc,
|
|
98
|
+
}
|
|
99
|
+
output: GetWorkflowsOutput = sys_db.get_queued_workflows(input)
|
|
100
|
+
infos: List[WorkflowInformation] = []
|
|
101
|
+
for workflow_id in output.workflow_uuids:
|
|
102
|
+
info = get_workflow(sys_db, workflow_id, request) # Call the method for each ID
|
|
103
|
+
if info is not None:
|
|
104
|
+
infos.append(info)
|
|
105
|
+
return infos
|
|
113
106
|
|
|
114
107
|
|
|
115
108
|
def get_workflow(
|
|
116
|
-
config: ConfigFile, uuid: str, request: bool
|
|
117
|
-
) -> Optional[WorkflowInformation]:
|
|
118
|
-
try:
|
|
119
|
-
sys_db = SystemDatabase(config)
|
|
120
|
-
info = _get_workflow_info(sys_db, uuid, request)
|
|
121
|
-
return info
|
|
122
|
-
except Exception as e:
|
|
123
|
-
typer.echo(f"Error getting workflow: {e}")
|
|
124
|
-
return None
|
|
125
|
-
finally:
|
|
126
|
-
if sys_db:
|
|
127
|
-
sys_db.destroy()
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
def cancel_workflow(config: ConfigFile, uuid: str) -> None:
|
|
131
|
-
try:
|
|
132
|
-
sys_db = SystemDatabase(config)
|
|
133
|
-
sys_db.cancel_workflow(uuid)
|
|
134
|
-
except Exception as e:
|
|
135
|
-
typer.echo(f"Failed to connect to DBOS system database: {e}")
|
|
136
|
-
raise e
|
|
137
|
-
finally:
|
|
138
|
-
if sys_db:
|
|
139
|
-
sys_db.destroy()
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
def _get_workflow_info(
|
|
143
109
|
sys_db: SystemDatabase, workflowUUID: str, getRequest: bool
|
|
144
110
|
) -> Optional[WorkflowInformation]:
|
|
145
111
|
|
|
@@ -149,19 +115,22 @@ def _get_workflow_info(
|
|
|
149
115
|
|
|
150
116
|
winfo = WorkflowInformation()
|
|
151
117
|
|
|
152
|
-
winfo.
|
|
118
|
+
winfo.workflow_id = workflowUUID
|
|
153
119
|
winfo.status = info["status"]
|
|
154
|
-
winfo.
|
|
155
|
-
winfo.
|
|
156
|
-
winfo.
|
|
157
|
-
winfo.executor_id = info["executor_id"]
|
|
158
|
-
winfo.app_version = info["app_version"]
|
|
159
|
-
winfo.app_id = info["app_id"]
|
|
160
|
-
winfo.recovery_attempts = info["recovery_attempts"]
|
|
120
|
+
winfo.workflow_name = info["name"]
|
|
121
|
+
winfo.workflow_class_name = info["class_name"]
|
|
122
|
+
winfo.workflow_config_name = info["config_name"]
|
|
161
123
|
winfo.authenticated_user = info["authenticated_user"]
|
|
162
124
|
winfo.assumed_role = info["assumed_role"]
|
|
163
125
|
winfo.authenticated_roles = info["authenticated_roles"]
|
|
126
|
+
winfo.request = info["request"]
|
|
127
|
+
winfo.created_at = info["created_at"]
|
|
128
|
+
winfo.updated_at = info["updated_at"]
|
|
164
129
|
winfo.queue_name = info["queue_name"]
|
|
130
|
+
winfo.executor_id = info["executor_id"]
|
|
131
|
+
winfo.app_version = info["app_version"]
|
|
132
|
+
winfo.app_id = info["app_id"]
|
|
133
|
+
winfo.recovery_attempts = info["recovery_attempts"]
|
|
165
134
|
|
|
166
135
|
# no input field
|
|
167
136
|
input_data = sys_db.get_workflow_inputs(workflowUUID)
|
dbos/cli/cli.py
CHANGED
|
@@ -19,12 +19,7 @@ from .. import load_config
|
|
|
19
19
|
from .._app_db import ApplicationDatabase
|
|
20
20
|
from .._dbos_config import _is_valid_app_name
|
|
21
21
|
from .._sys_db import SystemDatabase, reset_system_database
|
|
22
|
-
from .._workflow_commands import
|
|
23
|
-
cancel_workflow,
|
|
24
|
-
get_workflow,
|
|
25
|
-
list_queued_workflows,
|
|
26
|
-
list_workflows,
|
|
27
|
-
)
|
|
22
|
+
from .._workflow_commands import get_workflow, list_queued_workflows, list_workflows
|
|
28
23
|
from ..cli._github_init import create_template_from_github
|
|
29
24
|
from ._template_init import copy_template, get_project_name, get_templates_directory
|
|
30
25
|
|
|
@@ -290,25 +285,37 @@ def list(
|
|
|
290
285
|
request: Annotated[
|
|
291
286
|
bool,
|
|
292
287
|
typer.Option("--request", help="Retrieve workflow request information"),
|
|
293
|
-
] =
|
|
288
|
+
] = False,
|
|
294
289
|
) -> None:
|
|
295
290
|
config = load_config(silent=True)
|
|
291
|
+
sys_db = SystemDatabase(config)
|
|
296
292
|
workflows = list_workflows(
|
|
297
|
-
|
|
293
|
+
sys_db,
|
|
294
|
+
limit=limit,
|
|
295
|
+
user=user,
|
|
296
|
+
start_time=starttime,
|
|
297
|
+
end_time=endtime,
|
|
298
|
+
status=status,
|
|
299
|
+
request=request,
|
|
300
|
+
app_version=appversion,
|
|
301
|
+
name=name,
|
|
298
302
|
)
|
|
299
303
|
print(jsonpickle.encode(workflows, unpicklable=False))
|
|
300
304
|
|
|
301
305
|
|
|
302
306
|
@workflow.command(help="Retrieve the status of a workflow")
|
|
303
307
|
def get(
|
|
304
|
-
|
|
308
|
+
workflow_id: Annotated[str, typer.Argument()],
|
|
305
309
|
request: Annotated[
|
|
306
310
|
bool,
|
|
307
311
|
typer.Option("--request", help="Retrieve workflow request information"),
|
|
308
|
-
] =
|
|
312
|
+
] = False,
|
|
309
313
|
) -> None:
|
|
310
314
|
config = load_config(silent=True)
|
|
311
|
-
|
|
315
|
+
sys_db = SystemDatabase(config)
|
|
316
|
+
print(
|
|
317
|
+
jsonpickle.encode(get_workflow(sys_db, workflow_id, request), unpicklable=False)
|
|
318
|
+
)
|
|
312
319
|
|
|
313
320
|
|
|
314
321
|
@workflow.command(
|
|
@@ -316,10 +323,23 @@ def get(
|
|
|
316
323
|
)
|
|
317
324
|
def cancel(
|
|
318
325
|
uuid: Annotated[str, typer.Argument()],
|
|
326
|
+
host: Annotated[
|
|
327
|
+
typing.Optional[str],
|
|
328
|
+
typer.Option("--host", "-H", help="Specify the admin host"),
|
|
329
|
+
] = "localhost",
|
|
330
|
+
port: Annotated[
|
|
331
|
+
typing.Optional[int],
|
|
332
|
+
typer.Option("--port", "-p", help="Specify the admin port"),
|
|
333
|
+
] = 3001,
|
|
319
334
|
) -> None:
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
335
|
+
response = requests.post(
|
|
336
|
+
f"http://{host}:{port}/workflows/{uuid}/cancel", json=[], timeout=5
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
if response.status_code == 204:
|
|
340
|
+
print(f"Workflow {uuid} has been cancelled")
|
|
341
|
+
else:
|
|
342
|
+
print(f"Failed to cancel workflow {uuid}. Status code: {response.status_code}")
|
|
323
343
|
|
|
324
344
|
|
|
325
345
|
@workflow.command(help="Resume a workflow that has been cancelled")
|
|
@@ -327,7 +347,7 @@ def resume(
|
|
|
327
347
|
uuid: Annotated[str, typer.Argument()],
|
|
328
348
|
host: Annotated[
|
|
329
349
|
typing.Optional[str],
|
|
330
|
-
typer.Option("--host", "-
|
|
350
|
+
typer.Option("--host", "-H", help="Specify the admin host"),
|
|
331
351
|
] = "localhost",
|
|
332
352
|
port: Annotated[
|
|
333
353
|
typing.Optional[int],
|
|
@@ -338,7 +358,7 @@ def resume(
|
|
|
338
358
|
f"http://{host}:{port}/workflows/{uuid}/resume", json=[], timeout=5
|
|
339
359
|
)
|
|
340
360
|
|
|
341
|
-
if response.status_code ==
|
|
361
|
+
if response.status_code == 204:
|
|
342
362
|
print(f"Workflow {uuid} has been resumed")
|
|
343
363
|
else:
|
|
344
364
|
print(f"Failed to resume workflow {uuid}. Status code: {response.status_code}")
|
|
@@ -349,7 +369,7 @@ def restart(
|
|
|
349
369
|
uuid: Annotated[str, typer.Argument()],
|
|
350
370
|
host: Annotated[
|
|
351
371
|
typing.Optional[str],
|
|
352
|
-
typer.Option("--host", "-
|
|
372
|
+
typer.Option("--host", "-H", help="Specify the admin host"),
|
|
353
373
|
] = "localhost",
|
|
354
374
|
port: Annotated[
|
|
355
375
|
typing.Optional[int],
|
|
@@ -360,7 +380,7 @@ def restart(
|
|
|
360
380
|
f"http://{host}:{port}/workflows/{uuid}/restart", json=[], timeout=5
|
|
361
381
|
)
|
|
362
382
|
|
|
363
|
-
if response.status_code ==
|
|
383
|
+
if response.status_code == 204:
|
|
364
384
|
print(f"Workflow {uuid} has been restarted")
|
|
365
385
|
else:
|
|
366
386
|
print(f"Failed to resume workflow {uuid}. Status code: {response.status_code}")
|
|
@@ -415,11 +435,12 @@ def list_queue(
|
|
|
415
435
|
request: Annotated[
|
|
416
436
|
bool,
|
|
417
437
|
typer.Option("--request", help="Retrieve workflow request information"),
|
|
418
|
-
] =
|
|
438
|
+
] = False,
|
|
419
439
|
) -> None:
|
|
420
440
|
config = load_config(silent=True)
|
|
441
|
+
sys_db = SystemDatabase(config)
|
|
421
442
|
workflows = list_queued_workflows(
|
|
422
|
-
|
|
443
|
+
sys_db=sys_db,
|
|
423
444
|
limit=limit,
|
|
424
445
|
start_time=start_time,
|
|
425
446
|
end_time=end_time,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: dbos
|
|
3
|
-
Version: 0.23.
|
|
3
|
+
Version: 0.23.0a8
|
|
4
4
|
Summary: Ultra-lightweight durable execution in Python
|
|
5
5
|
Author-Email: "DBOS, Inc." <contact@dbos.dev>
|
|
6
6
|
License: MIT
|
|
@@ -78,6 +78,9 @@ You can use DBOS to add reliable background jobs or cron scheduling or queues to
|
|
|
78
78
|
Install and configure with:
|
|
79
79
|
|
|
80
80
|
```shell
|
|
81
|
+
python3 -m venv dbos-example/.venv
|
|
82
|
+
cd dbos-example
|
|
83
|
+
source .venv/bin/activate
|
|
81
84
|
pip install dbos
|
|
82
85
|
dbos init --config
|
|
83
86
|
```
|
|
@@ -103,7 +106,7 @@ def step_two():
|
|
|
103
106
|
def dbos_workflow():
|
|
104
107
|
step_one()
|
|
105
108
|
for _ in range(5):
|
|
106
|
-
print("Press Control +
|
|
109
|
+
print("Press Control + C twice to stop the app...")
|
|
107
110
|
DBOS.sleep(1)
|
|
108
111
|
step_two()
|
|
109
112
|
|
|
@@ -114,7 +117,7 @@ def fastapi_endpoint():
|
|
|
114
117
|
|
|
115
118
|
Save the program into `main.py` and start it with `fastapi run`.
|
|
116
119
|
Visit `localhost:8000` in your browser to start the workflow.
|
|
117
|
-
When prompted, press `Control +
|
|
120
|
+
When prompted, press `Control + C` (You may need to press `Control + C` twice quickly, or press `Control + \`, if `Control + C` is not effective in your environment) to force quit your application.
|
|
118
121
|
It should crash midway through the workflow, having completed step one but not step two.
|
|
119
122
|
Then, restart your app with `fastapi run`.
|
|
120
123
|
It should resume the workflow from where it left off, completing step two without re-executing step one.
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
dbos-0.23.
|
|
2
|
-
dbos-0.23.
|
|
3
|
-
dbos-0.23.
|
|
4
|
-
dbos-0.23.
|
|
1
|
+
dbos-0.23.0a8.dist-info/METADATA,sha256=BKJpwbpeqY4J_-TQ6XHnlBaUUNPpnk63uHr0R8Cht3M,5523
|
|
2
|
+
dbos-0.23.0a8.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
|
|
3
|
+
dbos-0.23.0a8.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
|
4
|
+
dbos-0.23.0a8.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
|
5
5
|
dbos/__init__.py,sha256=CxRHBHEthPL4PZoLbZhp3rdm44-KkRTT2-7DkK9d4QQ,724
|
|
6
6
|
dbos/_admin_server.py,sha256=YiVn5lywz2Vg8_juyNHOYl0HVEy48--7b4phwK7r92o,5732
|
|
7
7
|
dbos/_app_db.py,sha256=_tv2vmPjjiaikwgxH3mqxgJ4nUUcG2-0uMXKWCqVu1c,5509
|
|
@@ -10,8 +10,8 @@ dbos/_cloudutils/authentication.py,sha256=V0fCWQN9stCkhbuuxgPTGpvuQcDqfU3KAxPAh0
|
|
|
10
10
|
dbos/_cloudutils/cloudutils.py,sha256=YC7jGsIopT0KveLsqbRpQk2KlRBk-nIRC_UCgep4f3o,7797
|
|
11
11
|
dbos/_cloudutils/databases.py,sha256=_shqaqSvhY4n2ScgQ8IP5PDZvzvcx3YBKV8fj-cxhSY,8543
|
|
12
12
|
dbos/_context.py,sha256=Ue5qu3rzLfRmPkz-UUZi9ZS8iXpapRN0NTM4mbA2QmQ,17738
|
|
13
|
-
dbos/_core.py,sha256=
|
|
14
|
-
dbos/_croniter.py,sha256=
|
|
13
|
+
dbos/_core.py,sha256=MWIa8r-KwnadYQtGSod2KdAaeQ4gTJAUMPhMGaM0u2c,36613
|
|
14
|
+
dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
|
|
15
15
|
dbos/_db_wizard.py,sha256=6tfJaCRa1NtkUdNW75a2yvi_mEgnPJ9C1HP2zPG1hCU,8067
|
|
16
16
|
dbos/_dbos.py,sha256=JNAFYQ3kVjnZsUl0qJ-JWeaSHKI51VGE3JBXdaPD8Oo,39054
|
|
17
17
|
dbos/_dbos_config.py,sha256=DfiqVVxNqnafkocSzLqBp1Ig5vCviDTDK_GO3zTtQqI,8298
|
|
@@ -25,7 +25,7 @@ dbos/_migrations/env.py,sha256=38SIGVbmn_VV2x2u1aHLcPOoWgZ84eCymf3g_NljmbU,1626
|
|
|
25
25
|
dbos/_migrations/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
|
|
26
26
|
dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py,sha256=ICLPl8CN9tQXMsLDsAj8z1TsL831-Z3F8jSBvrR-wyw,736
|
|
27
27
|
dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py,sha256=ZBYrtTdxy64HxIAlOes89fVIk2P1gNaJack7wuC_epg,873
|
|
28
|
-
dbos/_migrations/versions/5c361fc04708_added_system_tables.py,sha256=
|
|
28
|
+
dbos/_migrations/versions/5c361fc04708_added_system_tables.py,sha256=Xr9hBDJjkAtymlauOmAy00yUHj0VVUaEz7kNwEM9IwE,6403
|
|
29
29
|
dbos/_migrations/versions/a3b18ad34abe_added_triggers.py,sha256=Rv0ZsZYZ_WdgGEULYsPfnp4YzaO5L198gDTgYY39AVA,2022
|
|
30
30
|
dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py,sha256=8PyFi8rd6CN-mUro43wGhsg5wcQWKZPRHD6jw8R5pVc,986
|
|
31
31
|
dbos/_migrations/versions/d76646551a6c_workflow_queue.py,sha256=G942nophZ2uC2vc4hGBC02Ptng1715roTjY3xiyzZU4,729
|
|
@@ -41,7 +41,7 @@ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
41
41
|
dbos/_schemas/application_database.py,sha256=KeyoPrF7hy_ODXV7QNike_VFSD74QBRfQ76D7QyE9HI,966
|
|
42
42
|
dbos/_schemas/system_database.py,sha256=rwp4EvCSaXcUoMaRczZCvETCxGp72k3-hvLyGUDkih0,5163
|
|
43
43
|
dbos/_serialization.py,sha256=YCYv0qKAwAZ1djZisBC7khvKqG-5OcIv9t9EC5PFIog,1743
|
|
44
|
-
dbos/_sys_db.py,sha256=
|
|
44
|
+
dbos/_sys_db.py,sha256=9Knaq-zsnTebFx0vS6vIwxL-tFITpUf7Bh-xUSB7cyE,60637
|
|
45
45
|
dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
|
|
46
46
|
dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
47
47
|
dbos/_templates/dbos-db-starter/__package/main.py,sha256=eI0SS9Nwj-fldtiuSzIlIG6dC91GXXwdRsoHxv6S_WI,2719
|
|
@@ -50,15 +50,15 @@ dbos/_templates/dbos-db-starter/alembic.ini,sha256=VKBn4Gy8mMuCdY7Hip1jmo3wEUJ1V
|
|
|
50
50
|
dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos,sha256=OMlcpdYUJKjyAme7phOz3pbn9upcIRjm42iwEThWUEQ,495
|
|
51
51
|
dbos/_templates/dbos-db-starter/migrations/env.py.dbos,sha256=GUV6sjkDzf9Vl6wkGEd0RSkK-ftRfV6EUwSQdd0qFXg,2392
|
|
52
52
|
dbos/_templates/dbos-db-starter/migrations/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
|
|
53
|
-
dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sha256=
|
|
53
|
+
dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sha256=MpS7LGaJS0CpvsjhfDkp9EJqvMvVCjRPfUp4c0aE2ys,941
|
|
54
54
|
dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
|
|
55
55
|
dbos/_tracer.py,sha256=_Id9j9kCrptSNpEpLiRk_g5VPp-DrTWP1WNZInd5BA4,2439
|
|
56
56
|
dbos/_utils.py,sha256=wjOJzxN66IzL9p4dwcEmQACRQah_V09G6mJI2exQfOM,155
|
|
57
|
-
dbos/_workflow_commands.py,sha256=
|
|
57
|
+
dbos/_workflow_commands.py,sha256=Z1PwprvR_A8PXV2FNhcMrvV8B4NlDI9dc5naMeeNKGw,4774
|
|
58
58
|
dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
|
|
59
59
|
dbos/cli/_template_init.py,sha256=AfuMaO8bmr9WsPNHr6j2cp7kjVVZDUpH7KpbTg0hhFs,2722
|
|
60
|
-
dbos/cli/cli.py,sha256=
|
|
60
|
+
dbos/cli/cli.py,sha256=BJWFT94I14uKTmMYSI4ITscPMBgidgjV0RBx5_LyNKI,14849
|
|
61
61
|
dbos/dbos-config.schema.json,sha256=X5TpXNcARGceX0zQs0fVgtZW_Xj9uBbY5afPt9Rz9yk,5741
|
|
62
62
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
|
63
63
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
|
64
|
-
dbos-0.23.
|
|
64
|
+
dbos-0.23.0a8.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|