dbos 0.22.0a11__py3-none-any.whl → 0.23.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- dbos/__main__.py +26 -0
- dbos/_app_db.py +29 -24
- dbos/_conductor/conductor.py +213 -0
- dbos/_conductor/protocol.py +197 -0
- dbos/_context.py +3 -1
- dbos/_core.py +73 -26
- dbos/_croniter.py +2 -2
- dbos/_dbos.py +74 -16
- dbos/_dbos_config.py +45 -11
- dbos/_debug.py +45 -0
- dbos/_error.py +11 -0
- dbos/_logger.py +5 -6
- dbos/_migrations/versions/5c361fc04708_added_system_tables.py +1 -1
- dbos/_queue.py +5 -1
- dbos/_recovery.py +23 -24
- dbos/_schemas/system_database.py +1 -1
- dbos/_sys_db.py +212 -187
- dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +1 -1
- dbos/_tracer.py +4 -4
- dbos/_utils.py +6 -0
- dbos/_workflow_commands.py +76 -111
- dbos/cli/cli.py +63 -21
- {dbos-0.22.0a11.dist-info → dbos-0.23.0.dist-info}/METADATA +7 -3
- {dbos-0.22.0a11.dist-info → dbos-0.23.0.dist-info}/RECORD +27 -22
- {dbos-0.22.0a11.dist-info → dbos-0.23.0.dist-info}/WHEEL +0 -0
- {dbos-0.22.0a11.dist-info → dbos-0.23.0.dist-info}/entry_points.txt +0 -0
- {dbos-0.22.0a11.dist-info → dbos-0.23.0.dist-info}/licenses/LICENSE +0 -0
dbos/__main__.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import re
|
|
2
|
+
import sys
|
|
3
|
+
from typing import NoReturn, Optional, Union
|
|
4
|
+
|
|
5
|
+
from dbos.cli.cli import app
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def main() -> NoReturn:
|
|
9
|
+
# Modify sys.argv[0] to remove script or executable extensions
|
|
10
|
+
sys.argv[0] = re.sub(r"(-script\.pyw|\.exe)?$", "", sys.argv[0])
|
|
11
|
+
|
|
12
|
+
retval: Optional[Union[str, int]] = 1
|
|
13
|
+
try:
|
|
14
|
+
app()
|
|
15
|
+
retval = None
|
|
16
|
+
except SystemExit as e:
|
|
17
|
+
retval = e.code
|
|
18
|
+
except Exception as e:
|
|
19
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
20
|
+
retval = 1
|
|
21
|
+
finally:
|
|
22
|
+
sys.exit(retval)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
if __name__ == "__main__":
|
|
26
|
+
main()
|
dbos/_app_db.py
CHANGED
|
@@ -27,29 +27,30 @@ class RecordedResult(TypedDict):
|
|
|
27
27
|
|
|
28
28
|
class ApplicationDatabase:
|
|
29
29
|
|
|
30
|
-
def __init__(self, config: ConfigFile):
|
|
30
|
+
def __init__(self, config: ConfigFile, *, debug_mode: bool = False):
|
|
31
31
|
self.config = config
|
|
32
32
|
|
|
33
33
|
app_db_name = config["database"]["app_db_name"]
|
|
34
34
|
|
|
35
35
|
# If the application database does not already exist, create it
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
36
|
+
if not debug_mode:
|
|
37
|
+
postgres_db_url = sa.URL.create(
|
|
38
|
+
"postgresql+psycopg",
|
|
39
|
+
username=config["database"]["username"],
|
|
40
|
+
password=config["database"]["password"],
|
|
41
|
+
host=config["database"]["hostname"],
|
|
42
|
+
port=config["database"]["port"],
|
|
43
|
+
database="postgres",
|
|
44
|
+
)
|
|
45
|
+
postgres_db_engine = sa.create_engine(postgres_db_url)
|
|
46
|
+
with postgres_db_engine.connect() as conn:
|
|
47
|
+
conn.execution_options(isolation_level="AUTOCOMMIT")
|
|
48
|
+
if not conn.execute(
|
|
49
|
+
sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
|
|
50
|
+
parameters={"db_name": app_db_name},
|
|
51
|
+
).scalar():
|
|
52
|
+
conn.execute(sa.text(f"CREATE DATABASE {app_db_name}"))
|
|
53
|
+
postgres_db_engine.dispose()
|
|
53
54
|
|
|
54
55
|
# Create a connection pool for the application database
|
|
55
56
|
app_db_url = sa.URL.create(
|
|
@@ -64,14 +65,16 @@ class ApplicationDatabase:
|
|
|
64
65
|
app_db_url, pool_size=20, max_overflow=5, pool_timeout=30
|
|
65
66
|
)
|
|
66
67
|
self.sessionmaker = sessionmaker(bind=self.engine)
|
|
68
|
+
self.debug_mode = debug_mode
|
|
67
69
|
|
|
68
70
|
# Create the dbos schema and transaction_outputs table in the application database
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
71
|
+
if not debug_mode:
|
|
72
|
+
with self.engine.begin() as conn:
|
|
73
|
+
schema_creation_query = sa.text(
|
|
74
|
+
f"CREATE SCHEMA IF NOT EXISTS {ApplicationSchema.schema}"
|
|
75
|
+
)
|
|
76
|
+
conn.execute(schema_creation_query)
|
|
77
|
+
ApplicationSchema.metadata_obj.create_all(self.engine)
|
|
75
78
|
|
|
76
79
|
def destroy(self) -> None:
|
|
77
80
|
self.engine.dispose()
|
|
@@ -100,6 +103,8 @@ class ApplicationDatabase:
|
|
|
100
103
|
raise
|
|
101
104
|
|
|
102
105
|
def record_transaction_error(self, output: TransactionResultInternal) -> None:
|
|
106
|
+
if self.debug_mode:
|
|
107
|
+
raise Exception("called record_transaction_error in debug mode")
|
|
103
108
|
try:
|
|
104
109
|
with self.engine.begin() as conn:
|
|
105
110
|
conn.execute(
|
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
import threading
|
|
2
|
+
import time
|
|
3
|
+
import traceback
|
|
4
|
+
from typing import TYPE_CHECKING, Optional
|
|
5
|
+
|
|
6
|
+
from websockets import ConnectionClosed, ConnectionClosedOK
|
|
7
|
+
from websockets.sync.client import connect
|
|
8
|
+
from websockets.sync.connection import Connection
|
|
9
|
+
|
|
10
|
+
from dbos._utils import GlobalParams
|
|
11
|
+
from dbos._workflow_commands import get_workflow, list_queued_workflows, list_workflows
|
|
12
|
+
|
|
13
|
+
from . import protocol as p
|
|
14
|
+
|
|
15
|
+
if TYPE_CHECKING:
|
|
16
|
+
from dbos import DBOS
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class ConductorWebsocket(threading.Thread):
|
|
20
|
+
|
|
21
|
+
def __init__(
|
|
22
|
+
self, dbos: "DBOS", conductor_url: str, conductor_key: str, evt: threading.Event
|
|
23
|
+
):
|
|
24
|
+
super().__init__(daemon=True)
|
|
25
|
+
self.websocket: Optional[Connection] = None
|
|
26
|
+
self.evt = evt
|
|
27
|
+
self.dbos = dbos
|
|
28
|
+
self.app_name = dbos.config["name"]
|
|
29
|
+
self.url = (
|
|
30
|
+
conductor_url.rstrip("/") + f"/websocket/{self.app_name}/{conductor_key}"
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
def run(self) -> None:
|
|
34
|
+
while not self.evt.is_set():
|
|
35
|
+
try:
|
|
36
|
+
with connect(self.url) as websocket:
|
|
37
|
+
self.websocket = websocket
|
|
38
|
+
while not self.evt.is_set():
|
|
39
|
+
message = websocket.recv()
|
|
40
|
+
if not isinstance(message, str):
|
|
41
|
+
self.dbos.logger.warning(
|
|
42
|
+
"Receieved unexpected non-str message"
|
|
43
|
+
)
|
|
44
|
+
continue
|
|
45
|
+
base_message = p.BaseMessage.from_json(message)
|
|
46
|
+
type = base_message.type
|
|
47
|
+
if type == p.MessageType.EXECUTOR_INFO:
|
|
48
|
+
info_response = p.ExecutorInfoResponse(
|
|
49
|
+
type=p.MessageType.EXECUTOR_INFO,
|
|
50
|
+
request_id=base_message.request_id,
|
|
51
|
+
executor_id=GlobalParams.executor_id,
|
|
52
|
+
application_version=GlobalParams.app_version,
|
|
53
|
+
)
|
|
54
|
+
websocket.send(info_response.to_json())
|
|
55
|
+
self.dbos.logger.info("Connected to DBOS conductor")
|
|
56
|
+
elif type == p.MessageType.RECOVERY:
|
|
57
|
+
recovery_message = p.RecoveryRequest.from_json(message)
|
|
58
|
+
success = True
|
|
59
|
+
try:
|
|
60
|
+
self.dbos.recover_pending_workflows(
|
|
61
|
+
recovery_message.executor_ids
|
|
62
|
+
)
|
|
63
|
+
except Exception as e:
|
|
64
|
+
self.dbos.logger.error(
|
|
65
|
+
f"Exception encountered when recovering workflows: {traceback.format_exc()}"
|
|
66
|
+
)
|
|
67
|
+
success = False
|
|
68
|
+
recovery_response = p.RecoveryResponse(
|
|
69
|
+
type=p.MessageType.RECOVERY,
|
|
70
|
+
request_id=base_message.request_id,
|
|
71
|
+
success=success,
|
|
72
|
+
)
|
|
73
|
+
websocket.send(recovery_response.to_json())
|
|
74
|
+
elif type == p.MessageType.CANCEL:
|
|
75
|
+
cancel_message = p.CancelRequest.from_json(message)
|
|
76
|
+
success = True
|
|
77
|
+
try:
|
|
78
|
+
self.dbos.cancel_workflow(cancel_message.workflow_id)
|
|
79
|
+
except Exception as e:
|
|
80
|
+
self.dbos.logger.error(
|
|
81
|
+
f"Exception encountered when cancelling workflow {cancel_message.workflow_id}: {traceback.format_exc()}"
|
|
82
|
+
)
|
|
83
|
+
success = False
|
|
84
|
+
cancel_response = p.CancelResponse(
|
|
85
|
+
type=p.MessageType.CANCEL,
|
|
86
|
+
request_id=base_message.request_id,
|
|
87
|
+
success=success,
|
|
88
|
+
)
|
|
89
|
+
websocket.send(cancel_response.to_json())
|
|
90
|
+
elif type == p.MessageType.RESUME:
|
|
91
|
+
resume_message = p.ResumeRequest.from_json(message)
|
|
92
|
+
success = True
|
|
93
|
+
try:
|
|
94
|
+
self.dbos.resume_workflow(resume_message.workflow_id)
|
|
95
|
+
except Exception as e:
|
|
96
|
+
self.dbos.logger.error(
|
|
97
|
+
f"Exception encountered when resuming workflow {resume_message.workflow_id}: {traceback.format_exc()}"
|
|
98
|
+
)
|
|
99
|
+
success = False
|
|
100
|
+
resume_response = p.ResumeResponse(
|
|
101
|
+
type=p.MessageType.RESUME,
|
|
102
|
+
request_id=base_message.request_id,
|
|
103
|
+
success=success,
|
|
104
|
+
)
|
|
105
|
+
websocket.send(resume_response.to_json())
|
|
106
|
+
elif type == p.MessageType.RESTART:
|
|
107
|
+
restart_message = p.RestartRequest.from_json(message)
|
|
108
|
+
success = True
|
|
109
|
+
try:
|
|
110
|
+
self.dbos.restart_workflow(restart_message.workflow_id)
|
|
111
|
+
except Exception as e:
|
|
112
|
+
self.dbos.logger.error(
|
|
113
|
+
f"Exception encountered when restarting workflow {restart_message.workflow_id}: {traceback.format_exc()}"
|
|
114
|
+
)
|
|
115
|
+
success = False
|
|
116
|
+
restart_response = p.RestartResponse(
|
|
117
|
+
type=p.MessageType.RESTART,
|
|
118
|
+
request_id=base_message.request_id,
|
|
119
|
+
success=success,
|
|
120
|
+
)
|
|
121
|
+
websocket.send(restart_response.to_json())
|
|
122
|
+
elif type == p.MessageType.LIST_WORKFLOWS:
|
|
123
|
+
list_workflows_message = p.ListWorkflowsRequest.from_json(
|
|
124
|
+
message
|
|
125
|
+
)
|
|
126
|
+
body = list_workflows_message.body
|
|
127
|
+
infos = list_workflows(
|
|
128
|
+
self.dbos._sys_db,
|
|
129
|
+
workflow_ids=body["workflow_uuids"],
|
|
130
|
+
user=body["authenticated_user"],
|
|
131
|
+
start_time=body["start_time"],
|
|
132
|
+
end_time=body["end_time"],
|
|
133
|
+
status=body["status"],
|
|
134
|
+
request=False,
|
|
135
|
+
app_version=body["application_version"],
|
|
136
|
+
name=body["workflow_name"],
|
|
137
|
+
limit=body["limit"],
|
|
138
|
+
offset=body["offset"],
|
|
139
|
+
sort_desc=body["sort_desc"],
|
|
140
|
+
)
|
|
141
|
+
list_workflows_response = p.ListWorkflowsResponse(
|
|
142
|
+
type=p.MessageType.LIST_WORKFLOWS,
|
|
143
|
+
request_id=base_message.request_id,
|
|
144
|
+
output=[
|
|
145
|
+
p.WorkflowsOutput.from_workflow_information(i)
|
|
146
|
+
for i in infos
|
|
147
|
+
],
|
|
148
|
+
)
|
|
149
|
+
websocket.send(list_workflows_response.to_json())
|
|
150
|
+
elif type == p.MessageType.LIST_QUEUED_WORKFLOWS:
|
|
151
|
+
list_queued_workflows_message = (
|
|
152
|
+
p.ListQueuedWorkflowsRequest.from_json(message)
|
|
153
|
+
)
|
|
154
|
+
q_body = list_queued_workflows_message.body
|
|
155
|
+
infos = list_queued_workflows(
|
|
156
|
+
self.dbos._sys_db,
|
|
157
|
+
start_time=q_body["start_time"],
|
|
158
|
+
end_time=q_body["end_time"],
|
|
159
|
+
status=q_body["status"],
|
|
160
|
+
request=False,
|
|
161
|
+
name=q_body["workflow_name"],
|
|
162
|
+
limit=q_body["limit"],
|
|
163
|
+
offset=q_body["offset"],
|
|
164
|
+
queue_name=q_body["queue_name"],
|
|
165
|
+
sort_desc=q_body["sort_desc"],
|
|
166
|
+
)
|
|
167
|
+
list_queued_workflows_response = (
|
|
168
|
+
p.ListQueuedWorkflowsResponse(
|
|
169
|
+
type=p.MessageType.LIST_QUEUED_WORKFLOWS,
|
|
170
|
+
request_id=base_message.request_id,
|
|
171
|
+
output=[
|
|
172
|
+
p.WorkflowsOutput.from_workflow_information(i)
|
|
173
|
+
for i in infos
|
|
174
|
+
],
|
|
175
|
+
)
|
|
176
|
+
)
|
|
177
|
+
websocket.send(list_queued_workflows_response.to_json())
|
|
178
|
+
elif type == p.MessageType.GET_WORKFLOW:
|
|
179
|
+
get_workflow_message = p.GetWorkflowRequest.from_json(
|
|
180
|
+
message
|
|
181
|
+
)
|
|
182
|
+
info = get_workflow(
|
|
183
|
+
self.dbos._sys_db,
|
|
184
|
+
get_workflow_message.workflow_id,
|
|
185
|
+
getRequest=False,
|
|
186
|
+
)
|
|
187
|
+
get_workflow_response = p.GetWorkflowResponse(
|
|
188
|
+
type=p.MessageType.GET_WORKFLOW,
|
|
189
|
+
request_id=base_message.request_id,
|
|
190
|
+
output=(
|
|
191
|
+
p.WorkflowsOutput.from_workflow_information(info)
|
|
192
|
+
if info is not None
|
|
193
|
+
else None
|
|
194
|
+
),
|
|
195
|
+
)
|
|
196
|
+
websocket.send(get_workflow_response.to_json())
|
|
197
|
+
else:
|
|
198
|
+
self.dbos.logger.warning(f"Unexpected message type: {type}")
|
|
199
|
+
except ConnectionClosedOK:
|
|
200
|
+
self.dbos.logger.info("Conductor connection terminated")
|
|
201
|
+
break
|
|
202
|
+
except ConnectionClosed as e:
|
|
203
|
+
self.dbos.logger.warning(
|
|
204
|
+
f"Connection to conductor lost. Reconnecting: {e}"
|
|
205
|
+
)
|
|
206
|
+
time.sleep(1)
|
|
207
|
+
continue
|
|
208
|
+
except Exception as e:
|
|
209
|
+
self.dbos.logger.error(
|
|
210
|
+
f"Unexpected exception in connection to conductor. Reconnecting: {e}"
|
|
211
|
+
)
|
|
212
|
+
time.sleep(1)
|
|
213
|
+
continue
|
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from dataclasses import asdict, dataclass
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from typing import List, Optional, Type, TypedDict, TypeVar
|
|
5
|
+
|
|
6
|
+
from dbos._workflow_commands import WorkflowInformation
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class MessageType(str, Enum):
|
|
10
|
+
EXECUTOR_INFO = "executor_info"
|
|
11
|
+
RECOVERY = "recovery"
|
|
12
|
+
CANCEL = "cancel"
|
|
13
|
+
LIST_WORKFLOWS = "list_workflows"
|
|
14
|
+
LIST_QUEUED_WORKFLOWS = "list_queued_workflows"
|
|
15
|
+
RESUME = "resume"
|
|
16
|
+
RESTART = "restart"
|
|
17
|
+
GET_WORKFLOW = "get_workflow"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
T = TypeVar("T", bound="BaseMessage")
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass
|
|
24
|
+
class BaseMessage:
|
|
25
|
+
type: MessageType
|
|
26
|
+
request_id: str
|
|
27
|
+
|
|
28
|
+
@classmethod
|
|
29
|
+
def from_json(cls: Type[T], json_str: str) -> T:
|
|
30
|
+
"""
|
|
31
|
+
Safely load a JSON into a dataclass, loading only the
|
|
32
|
+
attributes specified in the dataclass.
|
|
33
|
+
"""
|
|
34
|
+
data = json.loads(json_str)
|
|
35
|
+
all_annotations = {}
|
|
36
|
+
for base_cls in cls.__mro__:
|
|
37
|
+
if hasattr(base_cls, "__annotations__"):
|
|
38
|
+
all_annotations.update(base_cls.__annotations__)
|
|
39
|
+
kwargs = {k: v for k, v in data.items() if k in all_annotations}
|
|
40
|
+
return cls(**kwargs)
|
|
41
|
+
|
|
42
|
+
def to_json(self) -> str:
|
|
43
|
+
dict_data = asdict(self)
|
|
44
|
+
return json.dumps(dict_data)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
@dataclass
|
|
48
|
+
class ExecutorInfoRequest(BaseMessage):
|
|
49
|
+
pass
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
@dataclass
|
|
53
|
+
class ExecutorInfoResponse(BaseMessage):
|
|
54
|
+
executor_id: str
|
|
55
|
+
application_version: str
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
@dataclass
|
|
59
|
+
class RecoveryRequest(BaseMessage):
|
|
60
|
+
executor_ids: List[str]
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
@dataclass
|
|
64
|
+
class RecoveryResponse(BaseMessage):
|
|
65
|
+
success: bool
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
@dataclass
|
|
69
|
+
class CancelRequest(BaseMessage):
|
|
70
|
+
workflow_id: str
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
@dataclass
|
|
74
|
+
class CancelResponse(BaseMessage):
|
|
75
|
+
success: bool
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
@dataclass
|
|
79
|
+
class ResumeRequest(BaseMessage):
|
|
80
|
+
workflow_id: str
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
@dataclass
|
|
84
|
+
class ResumeResponse(BaseMessage):
|
|
85
|
+
success: bool
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
@dataclass
|
|
89
|
+
class RestartRequest(BaseMessage):
|
|
90
|
+
workflow_id: str
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
@dataclass
|
|
94
|
+
class RestartResponse(BaseMessage):
|
|
95
|
+
success: bool
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class ListWorkflowsBody(TypedDict):
|
|
99
|
+
workflow_uuids: List[str]
|
|
100
|
+
workflow_name: Optional[str]
|
|
101
|
+
authenticated_user: Optional[str]
|
|
102
|
+
start_time: Optional[str]
|
|
103
|
+
end_time: Optional[str]
|
|
104
|
+
status: Optional[str]
|
|
105
|
+
application_version: Optional[str]
|
|
106
|
+
limit: Optional[int]
|
|
107
|
+
offset: Optional[int]
|
|
108
|
+
sort_desc: bool
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
@dataclass
|
|
112
|
+
class WorkflowsOutput:
|
|
113
|
+
WorkflowUUID: str
|
|
114
|
+
Status: Optional[str]
|
|
115
|
+
WorkflowName: Optional[str]
|
|
116
|
+
WorkflowClassName: Optional[str]
|
|
117
|
+
WorkflowConfigName: Optional[str]
|
|
118
|
+
AuthenticatedUser: Optional[str]
|
|
119
|
+
AssumedRole: Optional[str]
|
|
120
|
+
AuthenticatedRoles: Optional[str]
|
|
121
|
+
Input: Optional[str]
|
|
122
|
+
Output: Optional[str]
|
|
123
|
+
Request: Optional[str]
|
|
124
|
+
Error: Optional[str]
|
|
125
|
+
CreatedAt: Optional[str]
|
|
126
|
+
UpdatedAt: Optional[str]
|
|
127
|
+
QueueName: Optional[str]
|
|
128
|
+
ApplicationVersion: Optional[str]
|
|
129
|
+
|
|
130
|
+
@classmethod
|
|
131
|
+
def from_workflow_information(cls, info: WorkflowInformation) -> "WorkflowsOutput":
|
|
132
|
+
# Convert fields to strings as needed
|
|
133
|
+
created_at_str = str(info.created_at) if info.created_at is not None else None
|
|
134
|
+
updated_at_str = str(info.updated_at) if info.updated_at is not None else None
|
|
135
|
+
inputs_str = str(info.input) if info.input is not None else None
|
|
136
|
+
outputs_str = str(info.output) if info.output is not None else None
|
|
137
|
+
request_str = str(info.request) if info.request is not None else None
|
|
138
|
+
|
|
139
|
+
return cls(
|
|
140
|
+
WorkflowUUID=info.workflow_id,
|
|
141
|
+
Status=info.status,
|
|
142
|
+
WorkflowName=info.workflow_name,
|
|
143
|
+
WorkflowClassName=info.workflow_class_name,
|
|
144
|
+
WorkflowConfigName=info.workflow_config_name,
|
|
145
|
+
AuthenticatedUser=info.authenticated_user,
|
|
146
|
+
AssumedRole=info.assumed_role,
|
|
147
|
+
AuthenticatedRoles=info.authenticated_roles,
|
|
148
|
+
Input=inputs_str,
|
|
149
|
+
Output=outputs_str,
|
|
150
|
+
Request=request_str,
|
|
151
|
+
Error=info.error,
|
|
152
|
+
CreatedAt=created_at_str,
|
|
153
|
+
UpdatedAt=updated_at_str,
|
|
154
|
+
QueueName=info.queue_name,
|
|
155
|
+
ApplicationVersion=info.app_version,
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
@dataclass
|
|
160
|
+
class ListWorkflowsRequest(BaseMessage):
|
|
161
|
+
body: ListWorkflowsBody
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
@dataclass
|
|
165
|
+
class ListWorkflowsResponse(BaseMessage):
|
|
166
|
+
output: List[WorkflowsOutput]
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
class ListQueuedWorkflowsBody(TypedDict):
|
|
170
|
+
workflow_name: Optional[str]
|
|
171
|
+
start_time: Optional[str]
|
|
172
|
+
end_time: Optional[str]
|
|
173
|
+
status: Optional[str]
|
|
174
|
+
queue_name: Optional[str]
|
|
175
|
+
limit: Optional[int]
|
|
176
|
+
offset: Optional[int]
|
|
177
|
+
sort_desc: bool
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
@dataclass
|
|
181
|
+
class ListQueuedWorkflowsRequest(BaseMessage):
|
|
182
|
+
body: ListQueuedWorkflowsBody
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
@dataclass
|
|
186
|
+
class ListQueuedWorkflowsResponse(BaseMessage):
|
|
187
|
+
output: List[WorkflowsOutput]
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
@dataclass
|
|
191
|
+
class GetWorkflowRequest(BaseMessage):
|
|
192
|
+
workflow_id: str
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
@dataclass
|
|
196
|
+
class GetWorkflowResponse(BaseMessage):
|
|
197
|
+
output: Optional[WorkflowsOutput]
|
dbos/_context.py
CHANGED
|
@@ -12,6 +12,8 @@ from typing import List, Literal, Optional, Type, TypedDict
|
|
|
12
12
|
from opentelemetry.trace import Span, Status, StatusCode
|
|
13
13
|
from sqlalchemy.orm import Session
|
|
14
14
|
|
|
15
|
+
from dbos._utils import GlobalParams
|
|
16
|
+
|
|
15
17
|
from ._logger import dbos_logger
|
|
16
18
|
from ._request import Request
|
|
17
19
|
from ._tracer import dbos_tracer
|
|
@@ -48,7 +50,7 @@ class TracedAttributes(TypedDict, total=False):
|
|
|
48
50
|
|
|
49
51
|
class DBOSContext:
|
|
50
52
|
def __init__(self) -> None:
|
|
51
|
-
self.executor_id =
|
|
53
|
+
self.executor_id = GlobalParams.executor_id
|
|
52
54
|
self.app_id = os.environ.get("DBOS__APPID", "")
|
|
53
55
|
|
|
54
56
|
self.logger = dbos_logger
|