dbos 0.26.0a7__py3-none-any.whl → 0.26.0a9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbos/_app_db.py +14 -5
- dbos/_conductor/conductor.py +1 -1
- dbos/_core.py +12 -20
- dbos/_dbos.py +61 -67
- dbos/_dbos_config.py +4 -54
- dbos/_debug.py +1 -1
- dbos/_docker_pg_helper.py +191 -0
- dbos/_error.py +13 -0
- dbos/_sys_db.py +120 -55
- dbos/_workflow_commands.py +3 -0
- dbos/cli/cli.py +17 -1
- dbos/dbos-config.schema.json +0 -4
- {dbos-0.26.0a7.dist-info → dbos-0.26.0a9.dist-info}/METADATA +1 -1
- {dbos-0.26.0a7.dist-info → dbos-0.26.0a9.dist-info}/RECORD +17 -20
- dbos/_cloudutils/authentication.py +0 -163
- dbos/_cloudutils/cloudutils.py +0 -254
- dbos/_cloudutils/databases.py +0 -241
- dbos/_db_wizard.py +0 -220
- {dbos-0.26.0a7.dist-info → dbos-0.26.0a9.dist-info}/WHEEL +0 -0
- {dbos-0.26.0a7.dist-info → dbos-0.26.0a9.dist-info}/entry_points.txt +0 -0
- {dbos-0.26.0a7.dist-info → dbos-0.26.0a9.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,191 @@
|
|
1
|
+
import logging
|
2
|
+
import os
|
3
|
+
import subprocess
|
4
|
+
import time
|
5
|
+
|
6
|
+
import docker
|
7
|
+
import psycopg
|
8
|
+
from docker.errors import APIError, NotFound
|
9
|
+
|
10
|
+
logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
|
11
|
+
from typing import Any, Dict, Optional, Tuple
|
12
|
+
|
13
|
+
|
14
|
+
def start_docker_pg() -> None:
|
15
|
+
"""
|
16
|
+
Starts a PostgreSQL database in a Docker container.
|
17
|
+
|
18
|
+
This function checks if Docker is installed, and if so, starts a local PostgreSQL
|
19
|
+
database in a Docker container. It configures the database with default settings
|
20
|
+
and provides connection information upon successful startup.
|
21
|
+
|
22
|
+
The function uses environment variable PGPASSWORD if available, otherwise
|
23
|
+
defaults to 'dbos' as the database password.
|
24
|
+
|
25
|
+
Returns:
|
26
|
+
None
|
27
|
+
|
28
|
+
Raises:
|
29
|
+
Exception: If there is an error starting the Docker container or if the
|
30
|
+
PostgreSQL service does not become available within the timeout period.
|
31
|
+
"""
|
32
|
+
|
33
|
+
logging.info("Attempting to create a Docker Postgres container...")
|
34
|
+
has_docker = check_docker_installed()
|
35
|
+
|
36
|
+
pool_config = {
|
37
|
+
"host": "localhost",
|
38
|
+
"port": 5432,
|
39
|
+
"password": os.environ.get("PGPASSWORD", "dbos"),
|
40
|
+
"user": "postgres",
|
41
|
+
"database": "postgres",
|
42
|
+
"connect_timeout": 2,
|
43
|
+
}
|
44
|
+
|
45
|
+
# If Docker is installed, start a local Docker based Postgres
|
46
|
+
if has_docker:
|
47
|
+
start_docker_postgres(pool_config)
|
48
|
+
logging.info(
|
49
|
+
f"Postgres available at postgres://postgres:{pool_config['password']}@{pool_config['host']}:{pool_config['port']}"
|
50
|
+
)
|
51
|
+
else:
|
52
|
+
logging.warning("Docker not detected locally")
|
53
|
+
|
54
|
+
|
55
|
+
def check_db_connectivity(config: Dict[str, Any]) -> Optional[Exception]:
|
56
|
+
conn = None
|
57
|
+
try:
|
58
|
+
conn = psycopg.connect(
|
59
|
+
host=config["host"],
|
60
|
+
port=config["port"],
|
61
|
+
user=config["user"],
|
62
|
+
password=config["password"],
|
63
|
+
dbname=config["database"],
|
64
|
+
connect_timeout=config.get("connect_timeout", 30),
|
65
|
+
)
|
66
|
+
cursor = conn.cursor()
|
67
|
+
cursor.execute("SELECT 1;")
|
68
|
+
cursor.close()
|
69
|
+
return None
|
70
|
+
except Exception as error:
|
71
|
+
return error
|
72
|
+
finally:
|
73
|
+
if conn is not None:
|
74
|
+
conn.close()
|
75
|
+
|
76
|
+
|
77
|
+
def exec_sync(cmd: str) -> Tuple[str, str]:
|
78
|
+
result = subprocess.run(cmd, shell=True, text=True, capture_output=True, check=True)
|
79
|
+
return result.stdout, result.stderr
|
80
|
+
|
81
|
+
|
82
|
+
def start_docker_postgres(pool_config: Dict[str, Any]) -> bool:
|
83
|
+
logging.info("Starting a Postgres Docker container...")
|
84
|
+
container_name = "dbos-db"
|
85
|
+
pg_data = "/var/lib/postgresql/data"
|
86
|
+
|
87
|
+
try:
|
88
|
+
client = docker.from_env()
|
89
|
+
|
90
|
+
# Check if the container already exists
|
91
|
+
try:
|
92
|
+
container = client.containers.get(container_name)
|
93
|
+
if container.status == "running":
|
94
|
+
logging.info(f"Container '{container_name}' is already running.")
|
95
|
+
return True
|
96
|
+
elif container.status == "exited":
|
97
|
+
container.start()
|
98
|
+
logging.info(
|
99
|
+
f"Container '{container_name}' was stopped and has been restarted."
|
100
|
+
)
|
101
|
+
return True
|
102
|
+
except NotFound:
|
103
|
+
# Container doesn't exist, proceed with creation
|
104
|
+
pass
|
105
|
+
|
106
|
+
# Create and start the container
|
107
|
+
container = client.containers.run(
|
108
|
+
image="pgvector/pgvector:pg16",
|
109
|
+
name=container_name,
|
110
|
+
detach=True,
|
111
|
+
environment={
|
112
|
+
"POSTGRES_PASSWORD": pool_config["password"],
|
113
|
+
"PGDATA": pg_data,
|
114
|
+
},
|
115
|
+
ports={"5432/tcp": pool_config["port"]},
|
116
|
+
volumes={pg_data: {"bind": pg_data, "mode": "rw"}},
|
117
|
+
remove=True, # Equivalent to --rm
|
118
|
+
)
|
119
|
+
|
120
|
+
logging.info(f"Created container: {container.id}")
|
121
|
+
|
122
|
+
except APIError as e:
|
123
|
+
raise Exception(f"Docker API error: {str(e)}")
|
124
|
+
|
125
|
+
# Wait for PostgreSQL to be ready
|
126
|
+
attempts = 30
|
127
|
+
while attempts > 0:
|
128
|
+
if attempts % 5 == 0:
|
129
|
+
logging.info("Waiting for Postgres Docker container to start...")
|
130
|
+
|
131
|
+
if check_db_connectivity(pool_config) is None:
|
132
|
+
return True
|
133
|
+
|
134
|
+
attempts -= 1
|
135
|
+
time.sleep(1)
|
136
|
+
|
137
|
+
raise Exception(
|
138
|
+
f"Failed to start Docker container: Container {container_name} did not start in time."
|
139
|
+
)
|
140
|
+
|
141
|
+
|
142
|
+
def check_docker_installed() -> bool:
|
143
|
+
"""
|
144
|
+
Check if Docker is installed and running using the docker library.
|
145
|
+
|
146
|
+
Returns:
|
147
|
+
bool: True if Docker is installed and running, False otherwise.
|
148
|
+
"""
|
149
|
+
try:
|
150
|
+
client = docker.from_env()
|
151
|
+
client.ping() # type: ignore
|
152
|
+
return True
|
153
|
+
except Exception:
|
154
|
+
return False
|
155
|
+
|
156
|
+
|
157
|
+
def stop_docker_pg() -> None:
|
158
|
+
"""
|
159
|
+
Stops the Docker Postgres container.
|
160
|
+
|
161
|
+
Returns:
|
162
|
+
bool: True if the container was successfully stopped, False if it wasn't running
|
163
|
+
|
164
|
+
Raises:
|
165
|
+
Exception: If there was an error stopping the container
|
166
|
+
"""
|
167
|
+
logger = logging.getLogger()
|
168
|
+
container_name = "dbos-db"
|
169
|
+
try:
|
170
|
+
logger.info(f"Stopping Docker Postgres container {container_name}...")
|
171
|
+
|
172
|
+
client = docker.from_env()
|
173
|
+
|
174
|
+
try:
|
175
|
+
container = client.containers.get(container_name)
|
176
|
+
|
177
|
+
if container.status == "running":
|
178
|
+
container.stop()
|
179
|
+
logger.info(
|
180
|
+
f"Successfully stopped Docker Postgres container {container_name}."
|
181
|
+
)
|
182
|
+
else:
|
183
|
+
logger.info(f"Container {container_name} exists but is not running.")
|
184
|
+
|
185
|
+
except docker.errors.NotFound:
|
186
|
+
logger.info(f"Container {container_name} does not exist.")
|
187
|
+
|
188
|
+
except Exception as error:
|
189
|
+
error_message = str(error)
|
190
|
+
logger.error(f"Failed to stop Docker Postgres container: {error_message}")
|
191
|
+
raise
|
dbos/_error.py
CHANGED
@@ -37,6 +37,7 @@ class DBOSErrorCode(Enum):
|
|
37
37
|
NotAuthorized = 8
|
38
38
|
ConflictingWorkflowError = 9
|
39
39
|
WorkflowCancelled = 10
|
40
|
+
UnexpectedStep = 11
|
40
41
|
ConflictingRegistrationError = 25
|
41
42
|
|
42
43
|
|
@@ -155,3 +156,15 @@ class DBOSConflictingRegistrationError(DBOSException):
|
|
155
156
|
f"Operation (Name: {name}) is already registered with a conflicting function type",
|
156
157
|
dbos_error_code=DBOSErrorCode.ConflictingRegistrationError.value,
|
157
158
|
)
|
159
|
+
|
160
|
+
|
161
|
+
class DBOSUnexpectedStepError(DBOSException):
|
162
|
+
"""Exception raised when a step has an unexpected recorded name."""
|
163
|
+
|
164
|
+
def __init__(
|
165
|
+
self, workflow_id: str, step_id: int, expected_name: str, recorded_name: str
|
166
|
+
) -> None:
|
167
|
+
super().__init__(
|
168
|
+
f"During execution of workflow {workflow_id} step {step_id}, function {recorded_name} was recorded when {expected_name} was expected. Check that your workflow is deterministic.",
|
169
|
+
dbos_error_code=DBOSErrorCode.UnexpectedStep.value,
|
170
|
+
)
|
dbos/_sys_db.py
CHANGED
@@ -9,13 +9,14 @@ from enum import Enum
|
|
9
9
|
from typing import (
|
10
10
|
TYPE_CHECKING,
|
11
11
|
Any,
|
12
|
+
Callable,
|
12
13
|
Dict,
|
13
14
|
List,
|
14
15
|
Literal,
|
15
16
|
Optional,
|
16
17
|
Sequence,
|
17
|
-
Set,
|
18
18
|
TypedDict,
|
19
|
+
TypeVar,
|
19
20
|
)
|
20
21
|
|
21
22
|
import psycopg
|
@@ -35,6 +36,8 @@ from ._error import (
|
|
35
36
|
DBOSConflictingWorkflowError,
|
36
37
|
DBOSDeadLetterQueueError,
|
37
38
|
DBOSNonExistentWorkflowError,
|
39
|
+
DBOSUnexpectedStepError,
|
40
|
+
DBOSWorkflowCancelledError,
|
38
41
|
DBOSWorkflowConflictIDError,
|
39
42
|
)
|
40
43
|
from ._logger import dbos_logger
|
@@ -577,9 +580,7 @@ class SystemDatabase:
|
|
577
580
|
}
|
578
581
|
return status
|
579
582
|
|
580
|
-
def
|
581
|
-
polling_interval_secs: float = 1.000
|
582
|
-
|
583
|
+
def await_workflow_result(self, workflow_id: str) -> Any:
|
583
584
|
while True:
|
584
585
|
with self.engine.begin() as c:
|
585
586
|
row = c.execute(
|
@@ -587,41 +588,23 @@ class SystemDatabase:
|
|
587
588
|
SystemSchema.workflow_status.c.status,
|
588
589
|
SystemSchema.workflow_status.c.output,
|
589
590
|
SystemSchema.workflow_status.c.error,
|
590
|
-
).where(
|
591
|
-
SystemSchema.workflow_status.c.workflow_uuid == workflow_uuid
|
592
|
-
)
|
591
|
+
).where(SystemSchema.workflow_status.c.workflow_uuid == workflow_id)
|
593
592
|
).fetchone()
|
594
593
|
if row is not None:
|
595
594
|
status = row[0]
|
596
|
-
if status ==
|
597
|
-
|
598
|
-
|
599
|
-
|
600
|
-
|
601
|
-
|
602
|
-
|
603
|
-
|
604
|
-
|
605
|
-
|
606
|
-
"error": row[2],
|
607
|
-
"workflow_uuid": workflow_uuid,
|
608
|
-
}
|
609
|
-
|
595
|
+
if status == WorkflowStatusString.SUCCESS.value:
|
596
|
+
output = row[1]
|
597
|
+
return _serialization.deserialize(output)
|
598
|
+
elif status == WorkflowStatusString.ERROR.value:
|
599
|
+
error = row[2]
|
600
|
+
raise _serialization.deserialize_exception(error)
|
601
|
+
elif status == WorkflowStatusString.CANCELLED.value:
|
602
|
+
# Raise a normal exception here, not the cancellation exception
|
603
|
+
# because the awaiting workflow is not being cancelled.
|
604
|
+
raise Exception(f"Awaited workflow {workflow_id} was cancelled")
|
610
605
|
else:
|
611
606
|
pass # CB: I guess we're assuming the WF will show up eventually.
|
612
|
-
|
613
|
-
time.sleep(polling_interval_secs)
|
614
|
-
|
615
|
-
def await_workflow_result(self, workflow_uuid: str) -> Any:
|
616
|
-
stat = self.await_workflow_result_internal(workflow_uuid)
|
617
|
-
if not stat:
|
618
|
-
return None
|
619
|
-
status: str = stat["status"]
|
620
|
-
if status == str(WorkflowStatusString.SUCCESS.value):
|
621
|
-
return _serialization.deserialize(stat["output"])
|
622
|
-
elif status == str(WorkflowStatusString.ERROR.value):
|
623
|
-
raise _serialization.deserialize_exception(stat["error"])
|
624
|
-
return None
|
607
|
+
time.sleep(1)
|
625
608
|
|
626
609
|
def update_workflow_inputs(
|
627
610
|
self, workflow_uuid: str, inputs: str, conn: Optional[sa.Connection] = None
|
@@ -895,16 +878,34 @@ class SystemDatabase:
|
|
895
878
|
raise
|
896
879
|
|
897
880
|
def check_operation_execution(
|
898
|
-
self,
|
881
|
+
self,
|
882
|
+
workflow_id: str,
|
883
|
+
function_id: int,
|
884
|
+
function_name: str,
|
885
|
+
*,
|
886
|
+
conn: Optional[sa.Connection] = None,
|
899
887
|
) -> Optional[RecordedResult]:
|
900
|
-
|
901
|
-
|
902
|
-
|
903
|
-
|
904
|
-
|
905
|
-
|
888
|
+
# Retrieve the status of the workflow. Additionally, if this step
|
889
|
+
# has run before, retrieve its name, output, and error.
|
890
|
+
sql = (
|
891
|
+
sa.select(
|
892
|
+
SystemSchema.workflow_status.c.status,
|
893
|
+
SystemSchema.operation_outputs.c.output,
|
894
|
+
SystemSchema.operation_outputs.c.error,
|
895
|
+
SystemSchema.operation_outputs.c.function_name,
|
896
|
+
)
|
897
|
+
.select_from(
|
898
|
+
SystemSchema.workflow_status.outerjoin(
|
899
|
+
SystemSchema.operation_outputs,
|
900
|
+
(
|
901
|
+
SystemSchema.workflow_status.c.workflow_uuid
|
902
|
+
== SystemSchema.operation_outputs.c.workflow_uuid
|
903
|
+
)
|
904
|
+
& (SystemSchema.operation_outputs.c.function_id == function_id),
|
905
|
+
)
|
906
|
+
)
|
907
|
+
.where(SystemSchema.workflow_status.c.workflow_uuid == workflow_id)
|
906
908
|
)
|
907
|
-
|
908
909
|
# If in a transaction, use the provided connection
|
909
910
|
rows: Sequence[Any]
|
910
911
|
if conn is not None:
|
@@ -912,11 +913,32 @@ class SystemDatabase:
|
|
912
913
|
else:
|
913
914
|
with self.engine.begin() as c:
|
914
915
|
rows = c.execute(sql).all()
|
915
|
-
|
916
|
+
assert len(rows) > 0, f"Error: Workflow {workflow_id} does not exist"
|
917
|
+
workflow_status, output, error, recorded_function_name = (
|
918
|
+
rows[0][0],
|
919
|
+
rows[0][1],
|
920
|
+
rows[0][2],
|
921
|
+
rows[0][3],
|
922
|
+
)
|
923
|
+
# If the workflow is cancelled, raise the exception
|
924
|
+
if workflow_status == WorkflowStatusString.CANCELLED.value:
|
925
|
+
raise DBOSWorkflowCancelledError(
|
926
|
+
f"Workflow {workflow_id} is cancelled. Aborting function."
|
927
|
+
)
|
928
|
+
# If there is no row for the function, return None
|
929
|
+
if recorded_function_name is None:
|
916
930
|
return None
|
931
|
+
# If the provided and recorded function name are different, throw an exception.
|
932
|
+
if function_name != recorded_function_name:
|
933
|
+
raise DBOSUnexpectedStepError(
|
934
|
+
workflow_id=workflow_id,
|
935
|
+
step_id=function_id,
|
936
|
+
expected_name=function_name,
|
937
|
+
recorded_name=recorded_function_name,
|
938
|
+
)
|
917
939
|
result: RecordedResult = {
|
918
|
-
"output":
|
919
|
-
"error":
|
940
|
+
"output": output,
|
941
|
+
"error": error,
|
920
942
|
}
|
921
943
|
return result
|
922
944
|
|
@@ -945,10 +967,11 @@ class SystemDatabase:
|
|
945
967
|
message: Any,
|
946
968
|
topic: Optional[str] = None,
|
947
969
|
) -> None:
|
970
|
+
function_name = "DBOS.send"
|
948
971
|
topic = topic if topic is not None else _dbos_null_topic
|
949
972
|
with self.engine.begin() as c:
|
950
973
|
recorded_output = self.check_operation_execution(
|
951
|
-
workflow_uuid, function_id, conn=c
|
974
|
+
workflow_uuid, function_id, function_name, conn=c
|
952
975
|
)
|
953
976
|
if self._debug_mode and recorded_output is None:
|
954
977
|
raise Exception(
|
@@ -981,7 +1004,7 @@ class SystemDatabase:
|
|
981
1004
|
output: OperationResultInternal = {
|
982
1005
|
"workflow_uuid": workflow_uuid,
|
983
1006
|
"function_id": function_id,
|
984
|
-
"function_name":
|
1007
|
+
"function_name": function_name,
|
985
1008
|
"output": None,
|
986
1009
|
"error": None,
|
987
1010
|
}
|
@@ -995,10 +1018,13 @@ class SystemDatabase:
|
|
995
1018
|
topic: Optional[str],
|
996
1019
|
timeout_seconds: float = 60,
|
997
1020
|
) -> Any:
|
1021
|
+
function_name = "DBOS.recv"
|
998
1022
|
topic = topic if topic is not None else _dbos_null_topic
|
999
1023
|
|
1000
1024
|
# First, check for previous executions.
|
1001
|
-
recorded_output = self.check_operation_execution(
|
1025
|
+
recorded_output = self.check_operation_execution(
|
1026
|
+
workflow_uuid, function_id, function_name
|
1027
|
+
)
|
1002
1028
|
if self._debug_mode and recorded_output is None:
|
1003
1029
|
raise Exception("called recv in debug mode without a previous execution")
|
1004
1030
|
if recorded_output is not None:
|
@@ -1075,7 +1101,7 @@ class SystemDatabase:
|
|
1075
1101
|
{
|
1076
1102
|
"workflow_uuid": workflow_uuid,
|
1077
1103
|
"function_id": function_id,
|
1078
|
-
"function_name":
|
1104
|
+
"function_name": function_name,
|
1079
1105
|
"output": _serialization.serialize(
|
1080
1106
|
message
|
1081
1107
|
), # None will be serialized to 'null'
|
@@ -1149,7 +1175,10 @@ class SystemDatabase:
|
|
1149
1175
|
seconds: float,
|
1150
1176
|
skip_sleep: bool = False,
|
1151
1177
|
) -> float:
|
1152
|
-
|
1178
|
+
function_name = "DBOS.sleep"
|
1179
|
+
recorded_output = self.check_operation_execution(
|
1180
|
+
workflow_uuid, function_id, function_name
|
1181
|
+
)
|
1153
1182
|
end_time: float
|
1154
1183
|
if self._debug_mode and recorded_output is None:
|
1155
1184
|
raise Exception("called sleep in debug mode without a previous execution")
|
@@ -1166,7 +1195,7 @@ class SystemDatabase:
|
|
1166
1195
|
{
|
1167
1196
|
"workflow_uuid": workflow_uuid,
|
1168
1197
|
"function_id": function_id,
|
1169
|
-
"function_name":
|
1198
|
+
"function_name": function_name,
|
1170
1199
|
"output": _serialization.serialize(end_time),
|
1171
1200
|
"error": None,
|
1172
1201
|
}
|
@@ -1185,9 +1214,10 @@ class SystemDatabase:
|
|
1185
1214
|
key: str,
|
1186
1215
|
message: Any,
|
1187
1216
|
) -> None:
|
1217
|
+
function_name = "DBOS.setEvent"
|
1188
1218
|
with self.engine.begin() as c:
|
1189
1219
|
recorded_output = self.check_operation_execution(
|
1190
|
-
workflow_uuid, function_id, conn=c
|
1220
|
+
workflow_uuid, function_id, function_name, conn=c
|
1191
1221
|
)
|
1192
1222
|
if self._debug_mode and recorded_output is None:
|
1193
1223
|
raise Exception(
|
@@ -1214,7 +1244,7 @@ class SystemDatabase:
|
|
1214
1244
|
output: OperationResultInternal = {
|
1215
1245
|
"workflow_uuid": workflow_uuid,
|
1216
1246
|
"function_id": function_id,
|
1217
|
-
"function_name":
|
1247
|
+
"function_name": function_name,
|
1218
1248
|
"output": None,
|
1219
1249
|
"error": None,
|
1220
1250
|
}
|
@@ -1227,6 +1257,7 @@ class SystemDatabase:
|
|
1227
1257
|
timeout_seconds: float = 60,
|
1228
1258
|
caller_ctx: Optional[GetEventWorkflowContext] = None,
|
1229
1259
|
) -> Any:
|
1260
|
+
function_name = "DBOS.getEvent"
|
1230
1261
|
get_sql = sa.select(
|
1231
1262
|
SystemSchema.workflow_events.c.value,
|
1232
1263
|
).where(
|
@@ -1236,7 +1267,7 @@ class SystemDatabase:
|
|
1236
1267
|
# Check for previous executions only if it's in a workflow
|
1237
1268
|
if caller_ctx is not None:
|
1238
1269
|
recorded_output = self.check_operation_execution(
|
1239
|
-
caller_ctx["workflow_uuid"], caller_ctx["function_id"]
|
1270
|
+
caller_ctx["workflow_uuid"], caller_ctx["function_id"], function_name
|
1240
1271
|
)
|
1241
1272
|
if self._debug_mode and recorded_output is None:
|
1242
1273
|
raise Exception(
|
@@ -1295,7 +1326,7 @@ class SystemDatabase:
|
|
1295
1326
|
{
|
1296
1327
|
"workflow_uuid": caller_ctx["workflow_uuid"],
|
1297
1328
|
"function_id": caller_ctx["function_id"],
|
1298
|
-
"function_name":
|
1329
|
+
"function_name": function_name,
|
1299
1330
|
"output": _serialization.serialize(
|
1300
1331
|
value
|
1301
1332
|
), # None will be serialized to 'null'
|
@@ -1534,6 +1565,40 @@ class SystemDatabase:
|
|
1534
1565
|
)
|
1535
1566
|
return True
|
1536
1567
|
|
1568
|
+
T = TypeVar("T")
|
1569
|
+
|
1570
|
+
def call_function_as_step(self, fn: Callable[[], T], function_name: str) -> T:
|
1571
|
+
ctx = get_local_dbos_context()
|
1572
|
+
if ctx and ctx.is_within_workflow():
|
1573
|
+
ctx.function_id += 1
|
1574
|
+
res = self.check_operation_execution(
|
1575
|
+
ctx.workflow_id, ctx.function_id, function_name
|
1576
|
+
)
|
1577
|
+
if res is not None:
|
1578
|
+
if res["output"] is not None:
|
1579
|
+
resstat: SystemDatabase.T = _serialization.deserialize(
|
1580
|
+
res["output"]
|
1581
|
+
)
|
1582
|
+
return resstat
|
1583
|
+
elif res["error"] is not None:
|
1584
|
+
raise _serialization.deserialize_exception(res["error"])
|
1585
|
+
else:
|
1586
|
+
raise Exception(
|
1587
|
+
f"Recorded output and error are both None for {function_name}"
|
1588
|
+
)
|
1589
|
+
result = fn()
|
1590
|
+
if ctx and ctx.is_within_workflow():
|
1591
|
+
self.record_operation_result(
|
1592
|
+
{
|
1593
|
+
"workflow_uuid": ctx.workflow_id,
|
1594
|
+
"function_id": ctx.function_id,
|
1595
|
+
"function_name": function_name,
|
1596
|
+
"output": _serialization.serialize(result),
|
1597
|
+
"error": None,
|
1598
|
+
}
|
1599
|
+
)
|
1600
|
+
return result
|
1601
|
+
|
1537
1602
|
|
1538
1603
|
def reset_system_database(config: ConfigFile) -> None:
|
1539
1604
|
sysdb_name = (
|
dbos/_workflow_commands.py
CHANGED
@@ -45,6 +45,9 @@ class WorkflowStatus:
|
|
45
45
|
executor_id: Optional[str]
|
46
46
|
# The application version on which this workflow was started
|
47
47
|
app_version: Optional[str]
|
48
|
+
|
49
|
+
# INTERNAL FIELDS
|
50
|
+
|
48
51
|
# The ID of the application executing this workflow
|
49
52
|
app_id: Optional[str]
|
50
53
|
# The number of times this workflow's execution has been attempted
|
dbos/cli/cli.py
CHANGED
@@ -20,6 +20,7 @@ from dbos._debug import debug_workflow, parse_start_command
|
|
20
20
|
from .. import load_config
|
21
21
|
from .._app_db import ApplicationDatabase
|
22
22
|
from .._dbos_config import _is_valid_app_name
|
23
|
+
from .._docker_pg_helper import start_docker_pg, stop_docker_pg
|
23
24
|
from .._sys_db import SystemDatabase, reset_system_database
|
24
25
|
from .._workflow_commands import (
|
25
26
|
get_workflow,
|
@@ -37,6 +38,21 @@ queue = typer.Typer()
|
|
37
38
|
app.add_typer(workflow, name="workflow", help="Manage DBOS workflows")
|
38
39
|
workflow.add_typer(queue, name="queue", help="Manage enqueued workflows")
|
39
40
|
|
41
|
+
postgres = typer.Typer()
|
42
|
+
app.add_typer(
|
43
|
+
postgres, name="postgres", help="Manage local Postgres database with Docker"
|
44
|
+
)
|
45
|
+
|
46
|
+
|
47
|
+
@postgres.command(name="start", help="Start a local Postgres database")
|
48
|
+
def pg_start() -> None:
|
49
|
+
start_docker_pg()
|
50
|
+
|
51
|
+
|
52
|
+
@postgres.command(name="stop", help="Stop the local Postgres database")
|
53
|
+
def pg_stop() -> None:
|
54
|
+
stop_docker_pg()
|
55
|
+
|
40
56
|
|
41
57
|
def _on_windows() -> bool:
|
42
58
|
return platform.system() == "Windows"
|
@@ -246,7 +262,7 @@ def reset(
|
|
246
262
|
def debug(
|
247
263
|
workflow_id: Annotated[str, typer.Argument(help="Workflow ID to debug")],
|
248
264
|
) -> None:
|
249
|
-
config = load_config(silent=True
|
265
|
+
config = load_config(silent=True)
|
250
266
|
start = config["runtimeConfig"]["start"]
|
251
267
|
if not start:
|
252
268
|
typer.echo("No start commands found in 'dbos-config.yaml'")
|
dbos/dbos-config.schema.json
CHANGED
@@ -62,10 +62,6 @@
|
|
62
62
|
"type": "string",
|
63
63
|
"description": "If using SSL/TLS to securely connect to a database, path to an SSL root certificate file"
|
64
64
|
},
|
65
|
-
"local_suffix": {
|
66
|
-
"type": "boolean",
|
67
|
-
"description": "Whether to suffix app_db_name with '_local'. Set to true when doing local development using a DBOS Cloud database."
|
68
|
-
},
|
69
65
|
"app_db_client": {
|
70
66
|
"type": "string",
|
71
67
|
"description": "Specify the database client to use to connect to the application database",
|
@@ -1,26 +1,23 @@
|
|
1
|
-
dbos-0.26.
|
2
|
-
dbos-0.26.
|
3
|
-
dbos-0.26.
|
4
|
-
dbos-0.26.
|
1
|
+
dbos-0.26.0a9.dist-info/METADATA,sha256=Olpmk9xnLEbfqKPtj7_cLfCShqh_RFowiykc6KtBTNQ,5553
|
2
|
+
dbos-0.26.0a9.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
|
3
|
+
dbos-0.26.0a9.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
4
|
+
dbos-0.26.0a9.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
5
5
|
dbos/__init__.py,sha256=3NQfGlBiiUSM_v88STdVP3rNZvGkUL_9WbSotKb8Voo,873
|
6
6
|
dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
|
7
7
|
dbos/_admin_server.py,sha256=vxPG_YJ6lYrkfPCSp42FiATVLBOij7Fm52Yngg5Z_tE,7027
|
8
|
-
dbos/_app_db.py,sha256=
|
8
|
+
dbos/_app_db.py,sha256=IwnNlHEQYp2bl5BM66vVPFa40h8DOtvRgUWTJ1dz20A,8963
|
9
9
|
dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
|
10
10
|
dbos/_client.py,sha256=fzW_Gagh-oyWyDYtREcQDBesoVl_LsEoMeJAsn5-C5s,7262
|
11
|
-
dbos/
|
12
|
-
dbos/_cloudutils/cloudutils.py,sha256=YC7jGsIopT0KveLsqbRpQk2KlRBk-nIRC_UCgep4f3o,7797
|
13
|
-
dbos/_cloudutils/databases.py,sha256=_shqaqSvhY4n2ScgQ8IP5PDZvzvcx3YBKV8fj-cxhSY,8543
|
14
|
-
dbos/_conductor/conductor.py,sha256=PzUFCX_JXGHClTF-hqTLR0ssO4kXdet4ZwHhJtuevEM,16839
|
11
|
+
dbos/_conductor/conductor.py,sha256=HYzVL29IMMrs2Mnms_7cHJynCnmmEN5SDQOMjzn3UoU,16840
|
15
12
|
dbos/_conductor/protocol.py,sha256=xN7pmooyF1pqbH1b6WhllU5718P7zSb_b0KCwA6bzcs,6716
|
16
13
|
dbos/_context.py,sha256=I8sLkdKTTkZEz7wG-MjynaQB6XEF2bLXuwNksiauP7w,19430
|
17
|
-
dbos/_core.py,sha256=
|
14
|
+
dbos/_core.py,sha256=tjBGVbSgOn59lR29gcYi5f6fcKNKQM5EP1QXrQGUkXA,45426
|
18
15
|
dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
|
19
|
-
dbos/
|
20
|
-
dbos/
|
21
|
-
dbos/
|
22
|
-
dbos/
|
23
|
-
dbos/_error.py,sha256=
|
16
|
+
dbos/_dbos.py,sha256=TOLi95Aca50huyOAWl9H5fii4nMYaGwN-zQ8GlLWdOg,45569
|
17
|
+
dbos/_dbos_config.py,sha256=m05IFjM0jSwZBsnFMF_4qP2JkjVFc0gqyM2tnotXq20,20636
|
18
|
+
dbos/_debug.py,sha256=MNlQVZ6TscGCRQeEEL0VE8Uignvr6dPeDDDefS3xgIE,1823
|
19
|
+
dbos/_docker_pg_helper.py,sha256=9OGbuavRA_cwE-uPiLZJSdpbQu-6PPgl9clQZB2zT_U,5852
|
20
|
+
dbos/_error.py,sha256=HtdV6Qy7qRyGD57wxLwE7YT0WdYtlx5ZLEe_Kv_gC-U,5953
|
24
21
|
dbos/_fastapi.py,sha256=PhaKftbApHnjtYEOw0EYna_3K0cmz__J9of7mRJWzu4,3704
|
25
22
|
dbos/_flask.py,sha256=DZKUZR5-xOzPI7tYZ53r2PvvHVoAb8SYwLzMVFsVfjI,2608
|
26
23
|
dbos/_kafka.py,sha256=pz0xZ9F3X9Ky1k-VSbeF3tfPhP3UPr3lUUhUfE41__U,4198
|
@@ -47,7 +44,7 @@ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
47
44
|
dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
|
48
45
|
dbos/_schemas/system_database.py,sha256=W9eSpL7SZzQkxcEZ4W07BOcwkkDr35b9oCjUOgfHWek,5336
|
49
46
|
dbos/_serialization.py,sha256=YCYv0qKAwAZ1djZisBC7khvKqG-5OcIv9t9EC5PFIog,1743
|
50
|
-
dbos/_sys_db.py,sha256=
|
47
|
+
dbos/_sys_db.py,sha256=BqXZ0l4X4Y4cFKDyaa8ZirWCnRlof9A12yp-XflGnb0,68229
|
51
48
|
dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
|
52
49
|
dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
53
50
|
dbos/_templates/dbos-db-starter/__package/main.py,sha256=nJMN3ZD2lmwg4Dcgmiwqc-tQGuCJuJal2Xl85iA277U,2453
|
@@ -60,11 +57,11 @@ dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sh
|
|
60
57
|
dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
|
61
58
|
dbos/_tracer.py,sha256=dFDSFlta-rfA3-ahIRLYwnnoAOmlavdxAGllqwFgnCA,2440
|
62
59
|
dbos/_utils.py,sha256=nFRUHzVjXG5AusF85AlYHikj63Tzi-kQm992ihsrAxA,201
|
63
|
-
dbos/_workflow_commands.py,sha256=
|
60
|
+
dbos/_workflow_commands.py,sha256=Tf7_hZQoPgP90KHQjMNlBggCNrLLCNRJxHtAJLvarc4,6153
|
64
61
|
dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
|
65
62
|
dbos/cli/_template_init.py,sha256=-WW3kbq0W_Tq4WbMqb1UGJG3xvJb3woEY5VspG95Srk,2857
|
66
|
-
dbos/cli/cli.py,sha256=
|
67
|
-
dbos/dbos-config.schema.json,sha256=
|
63
|
+
dbos/cli/cli.py,sha256=Lb_RYmXoT5KH0xDbwaYpROE4c-svZ0eCq2Kxg7cAxTw,16537
|
64
|
+
dbos/dbos-config.schema.json,sha256=i7jcxXqByKq0Jzv3nAUavONtj03vTwj6vWP4ylmBr8o,5694
|
68
65
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
69
66
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
70
|
-
dbos-0.26.
|
67
|
+
dbos-0.26.0a9.dist-info/RECORD,,
|