dbos 0.23.0a5__tar.gz → 0.23.0a9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- {dbos-0.23.0a5 → dbos-0.23.0a9}/PKG-INFO +6 -3
- {dbos-0.23.0a5 → dbos-0.23.0a9}/README.md +5 -2
- dbos-0.23.0a9/dbos/__main__.py +26 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_app_db.py +29 -24
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_core.py +45 -25
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_croniter.py +2 -2
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_dbos.py +15 -5
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_dbos_config.py +45 -11
- dbos-0.23.0a9/dbos/_debug.py +45 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +1 -1
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_sys_db.py +119 -71
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +1 -1
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_workflow_commands.py +4 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/cli/cli.py +18 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/pyproject.toml +1 -1
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_config.py +38 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_croniter.py +2 -2
- dbos-0.23.0a9/tests/test_debug.py +147 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_workflow_cmds.py +15 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/LICENSE +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/__init__.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_admin_server.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_classproperty.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_cloudutils/authentication.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_cloudutils/cloudutils.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_cloudutils/databases.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_context.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_db_wizard.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_error.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_fastapi.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_flask.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_kafka.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_kafka_message.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_logger.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_migrations/env.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_migrations/script.py.mako +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_outcome.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_queue.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_recovery.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_registrations.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_request.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_roles.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_scheduler.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_schemas/__init__.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_schemas/application_database.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_schemas/system_database.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_serialization.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_templates/dbos-db-starter/README.md +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_templates/dbos-db-starter/__package/main.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_tracer.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/_utils.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/cli/_github_init.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/cli/_template_init.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/dbos-config.schema.json +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/dbos/py.typed +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/__init__.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/atexit_no_ctor.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/atexit_no_launch.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/classdefs.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/conftest.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/more_classdefs.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/queuedworkflow.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_admin_server.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_async.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_classdecorators.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_concurrency.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_dbos.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_failures.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_fastapi.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_fastapi_roles.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_flask.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_kafka.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_outcome.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_package.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_queue.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_scheduler.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_schema_migration.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_singleton.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_spans.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_sqlalchemy.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/tests/test_workflow_cancel.py +0 -0
- {dbos-0.23.0a5 → dbos-0.23.0a9}/version/__init__.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: dbos
|
|
3
|
-
Version: 0.23.
|
|
3
|
+
Version: 0.23.0a9
|
|
4
4
|
Summary: Ultra-lightweight durable execution in Python
|
|
5
5
|
Author-Email: "DBOS, Inc." <contact@dbos.dev>
|
|
6
6
|
License: MIT
|
|
@@ -78,6 +78,9 @@ You can use DBOS to add reliable background jobs or cron scheduling or queues to
|
|
|
78
78
|
Install and configure with:
|
|
79
79
|
|
|
80
80
|
```shell
|
|
81
|
+
python3 -m venv dbos-example/.venv
|
|
82
|
+
cd dbos-example
|
|
83
|
+
source .venv/bin/activate
|
|
81
84
|
pip install dbos
|
|
82
85
|
dbos init --config
|
|
83
86
|
```
|
|
@@ -103,7 +106,7 @@ def step_two():
|
|
|
103
106
|
def dbos_workflow():
|
|
104
107
|
step_one()
|
|
105
108
|
for _ in range(5):
|
|
106
|
-
print("Press Control +
|
|
109
|
+
print("Press Control + C twice to stop the app...")
|
|
107
110
|
DBOS.sleep(1)
|
|
108
111
|
step_two()
|
|
109
112
|
|
|
@@ -114,7 +117,7 @@ def fastapi_endpoint():
|
|
|
114
117
|
|
|
115
118
|
Save the program into `main.py` and start it with `fastapi run`.
|
|
116
119
|
Visit `localhost:8000` in your browser to start the workflow.
|
|
117
|
-
When prompted, press `Control +
|
|
120
|
+
When prompted, press `Control + C` (You may need to press `Control + C` twice quickly, or press `Control + \`, if `Control + C` is not effective in your environment) to force quit your application.
|
|
118
121
|
It should crash midway through the workflow, having completed step one but not step two.
|
|
119
122
|
Then, restart your app with `fastapi run`.
|
|
120
123
|
It should resume the workflow from where it left off, completing step two without re-executing step one.
|
|
@@ -51,6 +51,9 @@ You can use DBOS to add reliable background jobs or cron scheduling or queues to
|
|
|
51
51
|
Install and configure with:
|
|
52
52
|
|
|
53
53
|
```shell
|
|
54
|
+
python3 -m venv dbos-example/.venv
|
|
55
|
+
cd dbos-example
|
|
56
|
+
source .venv/bin/activate
|
|
54
57
|
pip install dbos
|
|
55
58
|
dbos init --config
|
|
56
59
|
```
|
|
@@ -76,7 +79,7 @@ def step_two():
|
|
|
76
79
|
def dbos_workflow():
|
|
77
80
|
step_one()
|
|
78
81
|
for _ in range(5):
|
|
79
|
-
print("Press Control +
|
|
82
|
+
print("Press Control + C twice to stop the app...")
|
|
80
83
|
DBOS.sleep(1)
|
|
81
84
|
step_two()
|
|
82
85
|
|
|
@@ -87,7 +90,7 @@ def fastapi_endpoint():
|
|
|
87
90
|
|
|
88
91
|
Save the program into `main.py` and start it with `fastapi run`.
|
|
89
92
|
Visit `localhost:8000` in your browser to start the workflow.
|
|
90
|
-
When prompted, press `Control +
|
|
93
|
+
When prompted, press `Control + C` (You may need to press `Control + C` twice quickly, or press `Control + \`, if `Control + C` is not effective in your environment) to force quit your application.
|
|
91
94
|
It should crash midway through the workflow, having completed step one but not step two.
|
|
92
95
|
Then, restart your app with `fastapi run`.
|
|
93
96
|
It should resume the workflow from where it left off, completing step two without re-executing step one.
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import re
|
|
2
|
+
import sys
|
|
3
|
+
from typing import NoReturn, Optional, Union
|
|
4
|
+
|
|
5
|
+
from dbos.cli.cli import app
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def main() -> NoReturn:
|
|
9
|
+
# Modify sys.argv[0] to remove script or executable extensions
|
|
10
|
+
sys.argv[0] = re.sub(r"(-script\.pyw|\.exe)?$", "", sys.argv[0])
|
|
11
|
+
|
|
12
|
+
retval: Optional[Union[str, int]] = 1
|
|
13
|
+
try:
|
|
14
|
+
app()
|
|
15
|
+
retval = None
|
|
16
|
+
except SystemExit as e:
|
|
17
|
+
retval = e.code
|
|
18
|
+
except Exception as e:
|
|
19
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
20
|
+
retval = 1
|
|
21
|
+
finally:
|
|
22
|
+
sys.exit(retval)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
if __name__ == "__main__":
|
|
26
|
+
main()
|
|
@@ -27,29 +27,30 @@ class RecordedResult(TypedDict):
|
|
|
27
27
|
|
|
28
28
|
class ApplicationDatabase:
|
|
29
29
|
|
|
30
|
-
def __init__(self, config: ConfigFile):
|
|
30
|
+
def __init__(self, config: ConfigFile, *, debug_mode: bool = False):
|
|
31
31
|
self.config = config
|
|
32
32
|
|
|
33
33
|
app_db_name = config["database"]["app_db_name"]
|
|
34
34
|
|
|
35
35
|
# If the application database does not already exist, create it
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
36
|
+
if not debug_mode:
|
|
37
|
+
postgres_db_url = sa.URL.create(
|
|
38
|
+
"postgresql+psycopg",
|
|
39
|
+
username=config["database"]["username"],
|
|
40
|
+
password=config["database"]["password"],
|
|
41
|
+
host=config["database"]["hostname"],
|
|
42
|
+
port=config["database"]["port"],
|
|
43
|
+
database="postgres",
|
|
44
|
+
)
|
|
45
|
+
postgres_db_engine = sa.create_engine(postgres_db_url)
|
|
46
|
+
with postgres_db_engine.connect() as conn:
|
|
47
|
+
conn.execution_options(isolation_level="AUTOCOMMIT")
|
|
48
|
+
if not conn.execute(
|
|
49
|
+
sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
|
|
50
|
+
parameters={"db_name": app_db_name},
|
|
51
|
+
).scalar():
|
|
52
|
+
conn.execute(sa.text(f"CREATE DATABASE {app_db_name}"))
|
|
53
|
+
postgres_db_engine.dispose()
|
|
53
54
|
|
|
54
55
|
# Create a connection pool for the application database
|
|
55
56
|
app_db_url = sa.URL.create(
|
|
@@ -64,14 +65,16 @@ class ApplicationDatabase:
|
|
|
64
65
|
app_db_url, pool_size=20, max_overflow=5, pool_timeout=30
|
|
65
66
|
)
|
|
66
67
|
self.sessionmaker = sessionmaker(bind=self.engine)
|
|
68
|
+
self.debug_mode = debug_mode
|
|
67
69
|
|
|
68
70
|
# Create the dbos schema and transaction_outputs table in the application database
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
71
|
+
if not debug_mode:
|
|
72
|
+
with self.engine.begin() as conn:
|
|
73
|
+
schema_creation_query = sa.text(
|
|
74
|
+
f"CREATE SCHEMA IF NOT EXISTS {ApplicationSchema.schema}"
|
|
75
|
+
)
|
|
76
|
+
conn.execute(schema_creation_query)
|
|
77
|
+
ApplicationSchema.metadata_obj.create_all(self.engine)
|
|
75
78
|
|
|
76
79
|
def destroy(self) -> None:
|
|
77
80
|
self.engine.dispose()
|
|
@@ -100,6 +103,8 @@ class ApplicationDatabase:
|
|
|
100
103
|
raise
|
|
101
104
|
|
|
102
105
|
def record_transaction_error(self, output: TransactionResultInternal) -> None:
|
|
106
|
+
if self.debug_mode:
|
|
107
|
+
raise Exception("called record_transaction_error in debug mode")
|
|
103
108
|
try:
|
|
104
109
|
with self.engine.begin() as conn:
|
|
105
110
|
conn.execute(
|
|
@@ -186,21 +186,31 @@ def _init_workflow(
|
|
|
186
186
|
inputs = {"args": inputs["args"][1:], "kwargs": inputs["kwargs"]}
|
|
187
187
|
|
|
188
188
|
wf_status = status["status"]
|
|
189
|
-
if
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
wf_status =
|
|
194
|
-
status, max_recovery_attempts=max_recovery_attempts
|
|
195
|
-
)
|
|
196
|
-
# TODO: Modify the inputs if they were changed by `update_workflow_inputs`
|
|
197
|
-
dbos._sys_db.update_workflow_inputs(wfid, _serialization.serialize_args(inputs))
|
|
189
|
+
if dbos.debug_mode:
|
|
190
|
+
get_status_result = dbos._sys_db.get_workflow_status(wfid)
|
|
191
|
+
if get_status_result is None:
|
|
192
|
+
raise DBOSNonExistentWorkflowError(wfid)
|
|
193
|
+
wf_status = get_status_result["status"]
|
|
198
194
|
else:
|
|
199
|
-
|
|
200
|
-
|
|
195
|
+
if temp_wf_type != "transaction" or queue is not None:
|
|
196
|
+
# Synchronously record the status and inputs for workflows and single-step workflows
|
|
197
|
+
# We also have to do this for single-step workflows because of the foreign key constraint on the operation outputs table
|
|
198
|
+
# TODO: Make this transactional (and with the queue step below)
|
|
199
|
+
wf_status = dbos._sys_db.insert_workflow_status(
|
|
200
|
+
status, max_recovery_attempts=max_recovery_attempts
|
|
201
|
+
)
|
|
202
|
+
# TODO: Modify the inputs if they were changed by `update_workflow_inputs`
|
|
203
|
+
dbos._sys_db.update_workflow_inputs(
|
|
204
|
+
wfid, _serialization.serialize_args(inputs)
|
|
205
|
+
)
|
|
206
|
+
else:
|
|
207
|
+
# Buffer the inputs for single-transaction workflows, but don't buffer the status
|
|
208
|
+
dbos._sys_db.buffer_workflow_inputs(
|
|
209
|
+
wfid, _serialization.serialize_args(inputs)
|
|
210
|
+
)
|
|
201
211
|
|
|
202
|
-
|
|
203
|
-
|
|
212
|
+
if queue is not None and wf_status == WorkflowStatusString.ENQUEUED.value:
|
|
213
|
+
dbos._sys_db.enqueue(wfid, queue)
|
|
204
214
|
|
|
205
215
|
status["status"] = wf_status
|
|
206
216
|
return status
|
|
@@ -215,10 +225,11 @@ def _get_wf_invoke_func(
|
|
|
215
225
|
output = func()
|
|
216
226
|
status["status"] = "SUCCESS"
|
|
217
227
|
status["output"] = _serialization.serialize(output)
|
|
218
|
-
if
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
228
|
+
if not dbos.debug_mode:
|
|
229
|
+
if status["queue_name"] is not None:
|
|
230
|
+
queue = dbos._registry.queue_info_map[status["queue_name"]]
|
|
231
|
+
dbos._sys_db.remove_from_queue(status["workflow_uuid"], queue)
|
|
232
|
+
dbos._sys_db.buffer_workflow_status(status)
|
|
222
233
|
return output
|
|
223
234
|
except DBOSWorkflowConflictIDError:
|
|
224
235
|
# Retrieve the workflow handle and wait for the result.
|
|
@@ -233,10 +244,11 @@ def _get_wf_invoke_func(
|
|
|
233
244
|
except Exception as error:
|
|
234
245
|
status["status"] = "ERROR"
|
|
235
246
|
status["error"] = _serialization.serialize_exception(error)
|
|
236
|
-
if
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
247
|
+
if not dbos.debug_mode:
|
|
248
|
+
if status["queue_name"] is not None:
|
|
249
|
+
queue = dbos._registry.queue_info_map[status["queue_name"]]
|
|
250
|
+
dbos._sys_db.remove_from_queue(status["workflow_uuid"], queue)
|
|
251
|
+
dbos._sys_db.update_workflow_status(status)
|
|
240
252
|
raise
|
|
241
253
|
|
|
242
254
|
return persist
|
|
@@ -422,10 +434,12 @@ def start_workflow(
|
|
|
422
434
|
|
|
423
435
|
wf_status = status["status"]
|
|
424
436
|
|
|
425
|
-
if (
|
|
426
|
-
not
|
|
427
|
-
|
|
428
|
-
|
|
437
|
+
if not execute_workflow or (
|
|
438
|
+
not dbos.debug_mode
|
|
439
|
+
and (
|
|
440
|
+
wf_status == WorkflowStatusString.ERROR.value
|
|
441
|
+
or wf_status == WorkflowStatusString.SUCCESS.value
|
|
442
|
+
)
|
|
429
443
|
):
|
|
430
444
|
dbos.logger.debug(
|
|
431
445
|
f"Workflow {new_wf_id} already completed with status {wf_status}. Directly returning a workflow handle."
|
|
@@ -597,6 +611,10 @@ def decorate_transaction(
|
|
|
597
611
|
ctx.function_id,
|
|
598
612
|
)
|
|
599
613
|
)
|
|
614
|
+
if dbos.debug_mode and recorded_output is None:
|
|
615
|
+
raise DBOSException(
|
|
616
|
+
"Transaction output not found in debug mode"
|
|
617
|
+
)
|
|
600
618
|
if recorded_output:
|
|
601
619
|
dbos.logger.debug(
|
|
602
620
|
f"Replaying transaction, id: {ctx.function_id}, name: {attributes['name']}"
|
|
@@ -780,6 +798,8 @@ def decorate_step(
|
|
|
780
798
|
recorded_output = dbos._sys_db.check_operation_execution(
|
|
781
799
|
ctx.workflow_id, ctx.function_id
|
|
782
800
|
)
|
|
801
|
+
if dbos.debug_mode and recorded_output is None:
|
|
802
|
+
raise DBOSException("Step output not found in debug mode")
|
|
783
803
|
if recorded_output:
|
|
784
804
|
dbos.logger.debug(
|
|
785
805
|
f"Replaying step, id: {ctx.function_id}, name: {attributes['name']}"
|
|
@@ -5,14 +5,14 @@ Copyright (C) 2010-2012 Matsumoto Taichi.
|
|
|
5
5
|
|
|
6
6
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this
|
|
7
7
|
software and associated documentation files (the "Software"), to deal in the Software
|
|
8
|
-
without restriction, including without limitation the rights to use, copy, modify,
|
|
8
|
+
without restriction, including without limitation the rights to use, copy, modify,
|
|
9
9
|
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
|
|
10
10
|
persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
11
11
|
|
|
12
12
|
The above copyright notice and this permission notice shall be included in all
|
|
13
13
|
copies or substantial portions of the Software.
|
|
14
14
|
|
|
15
|
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
|
|
16
16
|
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
|
|
17
17
|
PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
|
|
18
18
|
FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
|
@@ -313,6 +313,7 @@ class DBOS:
|
|
|
313
313
|
dbos_logger.info("Initializing DBOS")
|
|
314
314
|
self.config: ConfigFile = config
|
|
315
315
|
self._launched: bool = False
|
|
316
|
+
self._debug_mode: bool = False
|
|
316
317
|
self._sys_db_field: Optional[SystemDatabase] = None
|
|
317
318
|
self._app_db_field: Optional[ApplicationDatabase] = None
|
|
318
319
|
self._registry: DBOSRegistry = _get_or_create_dbos_registry()
|
|
@@ -380,23 +381,32 @@ class DBOS:
|
|
|
380
381
|
rv: AdminServer = self._admin_server_field
|
|
381
382
|
return rv
|
|
382
383
|
|
|
384
|
+
@property
|
|
385
|
+
def debug_mode(self) -> bool:
|
|
386
|
+
return self._debug_mode
|
|
387
|
+
|
|
383
388
|
@classmethod
|
|
384
|
-
def launch(cls) -> None:
|
|
389
|
+
def launch(cls, *, debug_mode: bool = False) -> None:
|
|
385
390
|
if _dbos_global_instance is not None:
|
|
386
|
-
_dbos_global_instance._launch()
|
|
391
|
+
_dbos_global_instance._launch(debug_mode=debug_mode)
|
|
387
392
|
|
|
388
|
-
def _launch(self) -> None:
|
|
393
|
+
def _launch(self, *, debug_mode: bool = False) -> None:
|
|
389
394
|
try:
|
|
390
395
|
if self._launched:
|
|
391
396
|
dbos_logger.warning(f"DBOS was already launched")
|
|
392
397
|
return
|
|
393
398
|
self._launched = True
|
|
399
|
+
self._debug_mode = debug_mode
|
|
394
400
|
if GlobalParams.app_version == "":
|
|
395
401
|
GlobalParams.app_version = self._registry.compute_app_version()
|
|
396
402
|
dbos_logger.info(f"Application version: {GlobalParams.app_version}")
|
|
397
403
|
self._executor_field = ThreadPoolExecutor(max_workers=64)
|
|
398
|
-
self._sys_db_field = SystemDatabase(self.config)
|
|
399
|
-
self._app_db_field = ApplicationDatabase(self.config)
|
|
404
|
+
self._sys_db_field = SystemDatabase(self.config, debug_mode=debug_mode)
|
|
405
|
+
self._app_db_field = ApplicationDatabase(self.config, debug_mode=debug_mode)
|
|
406
|
+
|
|
407
|
+
if debug_mode:
|
|
408
|
+
return
|
|
409
|
+
|
|
400
410
|
admin_port = self.config["runtimeConfig"].get("admin_port")
|
|
401
411
|
if admin_port is None:
|
|
402
412
|
admin_port = 3001
|
|
@@ -192,7 +192,11 @@ def load_config(
|
|
|
192
192
|
data = cast(ConfigFile, data)
|
|
193
193
|
db_connection = load_db_connection()
|
|
194
194
|
if not silent:
|
|
195
|
-
if
|
|
195
|
+
if os.getenv("DBOS_DBHOST"):
|
|
196
|
+
print(
|
|
197
|
+
"[bold blue]Loading database connection parameters from debug environment variables[/bold blue]"
|
|
198
|
+
)
|
|
199
|
+
elif data["database"].get("hostname"):
|
|
196
200
|
print(
|
|
197
201
|
"[bold blue]Loading database connection parameters from dbos-config.yaml[/bold blue]"
|
|
198
202
|
)
|
|
@@ -205,32 +209,62 @@ def load_config(
|
|
|
205
209
|
"[bold blue]Using default database connection parameters (localhost)[/bold blue]"
|
|
206
210
|
)
|
|
207
211
|
|
|
212
|
+
dbos_dbport: Optional[int] = None
|
|
213
|
+
dbport_env = os.getenv("DBOS_DBPORT")
|
|
214
|
+
if dbport_env:
|
|
215
|
+
try:
|
|
216
|
+
dbos_dbport = int(dbport_env)
|
|
217
|
+
except ValueError:
|
|
218
|
+
pass
|
|
219
|
+
dbos_dblocalsuffix: Optional[bool] = None
|
|
220
|
+
dblocalsuffix_env = os.getenv("DBOS_DBLOCALSUFFIX")
|
|
221
|
+
if dblocalsuffix_env:
|
|
222
|
+
try:
|
|
223
|
+
dbos_dblocalsuffix = dblocalsuffix_env.casefold() == "true".casefold()
|
|
224
|
+
except ValueError:
|
|
225
|
+
pass
|
|
226
|
+
|
|
208
227
|
data["database"]["hostname"] = (
|
|
209
|
-
|
|
228
|
+
os.getenv("DBOS_DBHOST")
|
|
229
|
+
or data["database"].get("hostname")
|
|
230
|
+
or db_connection.get("hostname")
|
|
231
|
+
or "localhost"
|
|
210
232
|
)
|
|
233
|
+
|
|
211
234
|
data["database"]["port"] = (
|
|
212
|
-
data["database"].get("port") or db_connection.get("port") or 5432
|
|
235
|
+
dbos_dbport or data["database"].get("port") or db_connection.get("port") or 5432
|
|
213
236
|
)
|
|
214
237
|
data["database"]["username"] = (
|
|
215
|
-
|
|
238
|
+
os.getenv("DBOS_DBUSER")
|
|
239
|
+
or data["database"].get("username")
|
|
240
|
+
or db_connection.get("username")
|
|
241
|
+
or "postgres"
|
|
216
242
|
)
|
|
217
243
|
data["database"]["password"] = (
|
|
218
|
-
|
|
244
|
+
os.getenv("DBOS_DBPASSWORD")
|
|
245
|
+
or data["database"].get("password")
|
|
219
246
|
or db_connection.get("password")
|
|
220
247
|
or os.environ.get("PGPASSWORD")
|
|
221
248
|
or "dbos"
|
|
222
249
|
)
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
250
|
+
|
|
251
|
+
local_suffix = False
|
|
252
|
+
dbcon_local_suffix = db_connection.get("local_suffix")
|
|
253
|
+
if dbcon_local_suffix is not None:
|
|
254
|
+
local_suffix = dbcon_local_suffix
|
|
255
|
+
if data["database"].get("local_suffix") is not None:
|
|
256
|
+
local_suffix = data["database"].get("local_suffix")
|
|
257
|
+
if dbos_dblocalsuffix is not None:
|
|
258
|
+
local_suffix = dbos_dblocalsuffix
|
|
259
|
+
data["database"]["local_suffix"] = local_suffix
|
|
228
260
|
|
|
229
261
|
# Configure the DBOS logger
|
|
230
262
|
config_logger(data)
|
|
231
263
|
|
|
232
264
|
# Check the connectivity to the database and make sure it's properly configured
|
|
233
|
-
if
|
|
265
|
+
# Note, never use db wizard if the DBOS is running in debug mode (i.e. DBOS_DEBUG_WORKFLOW_ID env var is set)
|
|
266
|
+
debugWorkflowId = os.getenv("DBOS_DEBUG_WORKFLOW_ID")
|
|
267
|
+
if use_db_wizard and debugWorkflowId is None:
|
|
234
268
|
data = db_wizard(data, config_file_path)
|
|
235
269
|
|
|
236
270
|
if "local_suffix" in data["database"] and data["database"]["local_suffix"]:
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import re
|
|
2
|
+
import runpy
|
|
3
|
+
import sys
|
|
4
|
+
from typing import Union
|
|
5
|
+
|
|
6
|
+
from dbos import DBOS
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class PythonModule:
|
|
10
|
+
def __init__(self, module_name: str):
|
|
11
|
+
self.module_name = module_name
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def debug_workflow(workflow_id: str, entrypoint: Union[str, PythonModule]) -> None:
|
|
15
|
+
# include the current directory (represented by empty string) in the search path
|
|
16
|
+
# if it not already included
|
|
17
|
+
if "" not in sys.path:
|
|
18
|
+
sys.path.insert(0, "")
|
|
19
|
+
if isinstance(entrypoint, str):
|
|
20
|
+
runpy.run_path(entrypoint)
|
|
21
|
+
elif isinstance(entrypoint, PythonModule):
|
|
22
|
+
runpy.run_module(entrypoint.module_name)
|
|
23
|
+
else:
|
|
24
|
+
raise ValueError("Invalid entrypoint type. Must be a string or PythonModule.")
|
|
25
|
+
|
|
26
|
+
DBOS.logger.info(f"Debugging workflow {workflow_id}...")
|
|
27
|
+
DBOS.launch(debug_mode=True)
|
|
28
|
+
handle = DBOS.execute_workflow_id(workflow_id)
|
|
29
|
+
handle.get_result()
|
|
30
|
+
DBOS.logger.info("Workflow Debugging complete. Exiting process.")
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def parse_start_command(command: str) -> Union[str, PythonModule]:
|
|
34
|
+
match = re.match(r"fastapi\s+run\s+(\.?[\w/]+\.py)", command)
|
|
35
|
+
if match:
|
|
36
|
+
return match.group(1)
|
|
37
|
+
match = re.match(r"python3?\s+(\.?[\w/]+\.py)", command)
|
|
38
|
+
if match:
|
|
39
|
+
return match.group(1)
|
|
40
|
+
match = re.match(r"python3?\s+-m\s+([\w\.]+)", command)
|
|
41
|
+
if match:
|
|
42
|
+
return PythonModule(match.group(1))
|
|
43
|
+
raise ValueError(
|
|
44
|
+
"Invalid command format. Must be 'fastapi run <script>' or 'python <script>' or 'python -m <module>'"
|
|
45
|
+
)
|