dbos 0.27.0a3__tar.gz → 0.27.0a4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (104) hide show
  1. {dbos-0.27.0a3 → dbos-0.27.0a4}/PKG-INFO +1 -1
  2. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_app_db.py +1 -1
  3. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_client.py +7 -5
  4. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_dbos.py +21 -5
  5. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_sys_db.py +2 -14
  6. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/cli/cli.py +161 -112
  7. {dbos-0.27.0a3 → dbos-0.27.0a4}/pyproject.toml +1 -1
  8. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/conftest.py +1 -1
  9. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_client.py +16 -5
  10. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_package.py +76 -6
  11. {dbos-0.27.0a3 → dbos-0.27.0a4}/LICENSE +0 -0
  12. {dbos-0.27.0a3 → dbos-0.27.0a4}/README.md +0 -0
  13. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/__init__.py +0 -0
  14. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/__main__.py +0 -0
  15. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_admin_server.py +0 -0
  16. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_classproperty.py +0 -0
  17. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_conductor/conductor.py +0 -0
  18. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_conductor/protocol.py +0 -0
  19. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_context.py +0 -0
  20. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_core.py +0 -0
  21. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_croniter.py +0 -0
  22. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_dbos_config.py +0 -0
  23. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_debug.py +0 -0
  24. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_docker_pg_helper.py +0 -0
  25. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_error.py +0 -0
  26. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_event_loop.py +0 -0
  27. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_fastapi.py +0 -0
  28. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_flask.py +0 -0
  29. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_kafka.py +0 -0
  30. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_kafka_message.py +0 -0
  31. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_logger.py +0 -0
  32. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_migrations/env.py +0 -0
  33. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_migrations/script.py.mako +0 -0
  34. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
  35. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
  36. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
  37. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -0
  38. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
  39. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
  40. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
  41. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
  42. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
  43. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_outcome.py +0 -0
  44. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_queue.py +0 -0
  45. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_recovery.py +0 -0
  46. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_registrations.py +0 -0
  47. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_request.py +0 -0
  48. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_roles.py +0 -0
  49. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_scheduler.py +0 -0
  50. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_schemas/__init__.py +0 -0
  51. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_schemas/application_database.py +0 -0
  52. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_schemas/system_database.py +0 -0
  53. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_serialization.py +0 -0
  54. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_templates/dbos-db-starter/README.md +0 -0
  55. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
  56. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_templates/dbos-db-starter/__package/main.py +0 -0
  57. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
  58. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
  59. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
  60. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
  61. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
  62. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
  63. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
  64. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_tracer.py +0 -0
  65. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_utils.py +0 -0
  66. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/_workflow_commands.py +0 -0
  67. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/cli/_github_init.py +0 -0
  68. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/cli/_template_init.py +0 -0
  69. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/dbos-config.schema.json +0 -0
  70. {dbos-0.27.0a3 → dbos-0.27.0a4}/dbos/py.typed +0 -0
  71. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/__init__.py +0 -0
  72. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/atexit_no_ctor.py +0 -0
  73. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/atexit_no_launch.py +0 -0
  74. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/classdefs.py +0 -0
  75. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/client_collateral.py +0 -0
  76. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/client_worker.py +0 -0
  77. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/dupname_classdefs1.py +0 -0
  78. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/dupname_classdefsa.py +0 -0
  79. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/more_classdefs.py +0 -0
  80. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/queuedworkflow.py +0 -0
  81. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_admin_server.py +0 -0
  82. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_async.py +0 -0
  83. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_classdecorators.py +0 -0
  84. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_concurrency.py +0 -0
  85. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_config.py +0 -0
  86. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_croniter.py +0 -0
  87. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_dbos.py +0 -0
  88. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_debug.py +0 -0
  89. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_docker_secrets.py +0 -0
  90. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_failures.py +0 -0
  91. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_fastapi.py +0 -0
  92. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_fastapi_roles.py +0 -0
  93. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_flask.py +0 -0
  94. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_kafka.py +0 -0
  95. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_outcome.py +0 -0
  96. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_queue.py +0 -0
  97. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_scheduler.py +0 -0
  98. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_schema_migration.py +0 -0
  99. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_singleton.py +0 -0
  100. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_spans.py +0 -0
  101. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_sqlalchemy.py +0 -0
  102. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_workflow_introspection.py +0 -0
  103. {dbos-0.27.0a3 → dbos-0.27.0a4}/tests/test_workflow_management.py +0 -0
  104. {dbos-0.27.0a3 → dbos-0.27.0a4}/version/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 0.27.0a3
3
+ Version: 0.27.0a4
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -7,7 +7,7 @@ from sqlalchemy.exc import DBAPIError
7
7
  from sqlalchemy.orm import Session, sessionmaker
8
8
 
9
9
  from . import _serialization
10
- from ._dbos_config import ConfigFile, DatabaseConfig
10
+ from ._dbos_config import DatabaseConfig
11
11
  from ._error import DBOSUnexpectedStepError, DBOSWorkflowConflictIDError
12
12
  from ._schemas.application_database import ApplicationSchema
13
13
  from ._sys_db import StepInfo
@@ -1,9 +1,10 @@
1
1
  import asyncio
2
2
  import sys
3
- import time
4
3
  import uuid
5
4
  from typing import Any, Generic, List, Optional, TypedDict, TypeVar
6
5
 
6
+ from sqlalchemy import URL
7
+
7
8
  from dbos._app_db import ApplicationDatabase
8
9
 
9
10
  if sys.version_info < (3, 11):
@@ -57,7 +58,7 @@ class WorkflowHandleClientPolling(Generic[R]):
57
58
  return res
58
59
 
59
60
  def get_status(self) -> WorkflowStatus:
60
- status = get_workflow(self._sys_db, self.workflow_id, True)
61
+ status = get_workflow(self._sys_db, self.workflow_id, False)
61
62
  if status is None:
62
63
  raise DBOSNonExistentWorkflowError(self.workflow_id)
63
64
  return status
@@ -80,7 +81,7 @@ class WorkflowHandleClientAsyncPolling(Generic[R]):
80
81
 
81
82
  async def get_status(self) -> WorkflowStatus:
82
83
  status = await asyncio.to_thread(
83
- get_workflow, self._sys_db, self.workflow_id, True
84
+ get_workflow, self._sys_db, self.workflow_id, False
84
85
  )
85
86
  if status is None:
86
87
  raise DBOSNonExistentWorkflowError(self.workflow_id)
@@ -94,6 +95,7 @@ class DBOSClient:
94
95
  db_config["sys_db_name"] = system_database
95
96
  self._sys_db = SystemDatabase(db_config)
96
97
  self._app_db = ApplicationDatabase(db_config)
98
+ self._db_url = database_url
97
99
 
98
100
  def destroy(self) -> None:
99
101
  self._sys_db.destroy()
@@ -159,13 +161,13 @@ class DBOSClient:
159
161
  return WorkflowHandleClientAsyncPolling[R](workflow_id, self._sys_db)
160
162
 
161
163
  def retrieve_workflow(self, workflow_id: str) -> WorkflowHandle[R]:
162
- status = get_workflow(self._sys_db, workflow_id, True)
164
+ status = get_workflow(self._sys_db, workflow_id, False)
163
165
  if status is None:
164
166
  raise DBOSNonExistentWorkflowError(workflow_id)
165
167
  return WorkflowHandleClientPolling[R](workflow_id, self._sys_db)
166
168
 
167
169
  async def retrieve_workflow_async(self, workflow_id: str) -> WorkflowHandleAsync[R]:
168
- status = asyncio.to_thread(get_workflow, self._sys_db, workflow_id, True)
170
+ status = asyncio.to_thread(get_workflow, self._sys_db, workflow_id, False)
169
171
  if status is None:
170
172
  raise DBOSNonExistentWorkflowError(workflow_id)
171
173
  return WorkflowHandleClientAsyncPolling[R](workflow_id, self._sys_db)
@@ -64,7 +64,8 @@ from ._registrations import (
64
64
  )
65
65
  from ._roles import default_required_roles, required_roles
66
66
  from ._scheduler import ScheduledWorkflow, scheduled
67
- from ._sys_db import StepInfo, WorkflowStatus, reset_system_database
67
+ from ._schemas.system_database import SystemSchema
68
+ from ._sys_db import StepInfo, SystemDatabase, WorkflowStatus, reset_system_database
68
69
  from ._tracer import DBOSTracer, dbos_tracer
69
70
 
70
71
  if TYPE_CHECKING:
@@ -73,14 +74,15 @@ if TYPE_CHECKING:
73
74
  from ._request import Request
74
75
  from flask import Flask
75
76
 
77
+ from sqlalchemy import URL
76
78
  from sqlalchemy.orm import Session
77
79
 
78
80
  from ._request import Request
79
81
 
80
82
  if sys.version_info < (3, 10):
81
- from typing_extensions import ParamSpec, TypeAlias
83
+ from typing_extensions import ParamSpec
82
84
  else:
83
- from typing import ParamSpec, TypeAlias
85
+ from typing import ParamSpec
84
86
 
85
87
  from ._admin_server import AdminServer
86
88
  from ._app_db import ApplicationDatabase
@@ -109,7 +111,6 @@ from ._error import (
109
111
  )
110
112
  from ._event_loop import BackgroundEventLoop
111
113
  from ._logger import add_otlp_to_all_loggers, config_logger, dbos_logger, init_logger
112
- from ._sys_db import SystemDatabase
113
114
  from ._workflow_commands import get_workflow, list_workflow_steps
114
115
 
115
116
  # Most DBOS functions are just any callable F, so decorators / wrappers work on F
@@ -563,7 +564,22 @@ class DBOS:
563
564
  assert (
564
565
  not self._launched
565
566
  ), "The system database cannot be reset after DBOS is launched. Resetting the system database is a destructive operation that should only be used in a test environment."
566
- reset_system_database(self._config)
567
+
568
+ sysdb_name = (
569
+ self._config["database"]["sys_db_name"]
570
+ if "sys_db_name" in self._config["database"]
571
+ and self._config["database"]["sys_db_name"]
572
+ else self._config["database"]["app_db_name"] + SystemSchema.sysdb_suffix
573
+ )
574
+ postgres_db_url = URL.create(
575
+ "postgresql+psycopg",
576
+ username=self._config["database"]["username"],
577
+ password=self._config["database"]["password"],
578
+ host=self._config["database"]["hostname"],
579
+ port=self._config["database"]["port"],
580
+ database="postgres",
581
+ )
582
+ reset_system_database(postgres_db_url, sysdb_name)
567
583
 
568
584
  def _destroy(self) -> None:
569
585
  self._initialized = False
@@ -248,6 +248,7 @@ class SystemDatabase:
248
248
  sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
249
249
  parameters={"db_name": sysdb_name},
250
250
  ).scalar():
251
+ dbos_logger.info(f"Creating system database {sysdb_name}")
251
252
  conn.execute(sa.text(f"CREATE DATABASE {sysdb_name}"))
252
253
  engine.dispose()
253
254
 
@@ -1897,20 +1898,7 @@ class SystemDatabase:
1897
1898
  return wf_status, workflow_deadline_epoch_ms
1898
1899
 
1899
1900
 
1900
- def reset_system_database(config: ConfigFile) -> None:
1901
- sysdb_name = (
1902
- config["database"]["sys_db_name"]
1903
- if "sys_db_name" in config["database"] and config["database"]["sys_db_name"]
1904
- else config["database"]["app_db_name"] + SystemSchema.sysdb_suffix
1905
- )
1906
- postgres_db_url = sa.URL.create(
1907
- "postgresql+psycopg",
1908
- username=config["database"]["username"],
1909
- password=config["database"]["password"],
1910
- host=config["database"]["hostname"],
1911
- port=config["database"]["port"],
1912
- database="postgres",
1913
- )
1901
+ def reset_system_database(postgres_db_url: sa.URL, sysdb_name: str) -> None:
1914
1902
  try:
1915
1903
  # Connect to postgres default database
1916
1904
  engine = sa.create_engine(postgres_db_url)
@@ -5,10 +5,10 @@ import subprocess
5
5
  import time
6
6
  import typing
7
7
  from os import path
8
- from typing import Any
8
+ from typing import Any, Optional
9
+ from urllib.parse import quote
9
10
 
10
11
  import jsonpickle # type: ignore
11
- import requests
12
12
  import sqlalchemy as sa
13
13
  import typer
14
14
  from rich import print
@@ -19,18 +19,28 @@ from dbos._debug import debug_workflow, parse_start_command
19
19
 
20
20
  from .. import load_config
21
21
  from .._app_db import ApplicationDatabase
22
+ from .._client import DBOSClient
22
23
  from .._dbos_config import _is_valid_app_name
23
24
  from .._docker_pg_helper import start_docker_pg, stop_docker_pg
25
+ from .._schemas.system_database import SystemSchema
24
26
  from .._sys_db import SystemDatabase, reset_system_database
25
- from .._workflow_commands import (
26
- get_workflow,
27
- list_queued_workflows,
28
- list_workflow_steps,
29
- list_workflows,
30
- )
31
27
  from ..cli._github_init import create_template_from_github
32
28
  from ._template_init import copy_template, get_project_name, get_templates_directory
33
29
 
30
+
31
+ def start_client(db_url: Optional[str] = None) -> DBOSClient:
32
+ database_url = db_url
33
+ if database_url is None:
34
+ database_url = os.getenv("DBOS_DATABASE_URL")
35
+ if database_url is None:
36
+ config = load_config(silent=True)
37
+ database = config["database"]
38
+ username = quote(database["username"])
39
+ password = quote(database["password"])
40
+ database_url = f"postgresql://{username}:{password}@{database['hostname']}:{database['port']}/{database['app_db_name']}"
41
+ return DBOSClient(database_url=database_url)
42
+
43
+
34
44
  app = typer.Typer()
35
45
  workflow = typer.Typer()
36
46
  queue = typer.Typer()
@@ -241,7 +251,23 @@ def migrate() -> None:
241
251
 
242
252
  @app.command(help="Reset the DBOS system database")
243
253
  def reset(
244
- yes: bool = typer.Option(False, "-y", "--yes", help="Skip confirmation prompt")
254
+ yes: bool = typer.Option(False, "-y", "--yes", help="Skip confirmation prompt"),
255
+ sys_db_name: Annotated[
256
+ typing.Optional[str],
257
+ typer.Option(
258
+ "--sys-db-name",
259
+ "-s",
260
+ help="Specify the name of the system database to reset",
261
+ ),
262
+ ] = None,
263
+ db_url: Annotated[
264
+ typing.Optional[str],
265
+ typer.Option(
266
+ "--db-url",
267
+ "-D",
268
+ help="Your DBOS application database URL",
269
+ ),
270
+ ] = None,
245
271
  ) -> None:
246
272
  if not yes:
247
273
  confirm = typer.confirm(
@@ -250,9 +276,18 @@ def reset(
250
276
  if not confirm:
251
277
  typer.echo("Operation cancelled.")
252
278
  raise typer.Exit()
253
- config = load_config()
254
279
  try:
255
- reset_system_database(config)
280
+ client = start_client(db_url=db_url)
281
+ pg_db_url = sa.make_url(client._db_url).set(drivername="postgresql+psycopg")
282
+ assert (
283
+ pg_db_url.database is not None
284
+ ), f"Database name is required in URL: {pg_db_url.render_as_string(hide_password=True)}"
285
+ sysdb_name = (
286
+ sys_db_name
287
+ if sys_db_name
288
+ else (pg_db_url.database + SystemSchema.sysdb_suffix)
289
+ )
290
+ reset_system_database(pg_db_url.set(database="postgres"), sysdb_name)
256
291
  except sa.exc.SQLAlchemyError as e:
257
292
  typer.echo(f"Error resetting system database: {str(e)}")
258
293
  return
@@ -276,6 +311,14 @@ def debug(
276
311
 
277
312
  @workflow.command(help="List workflows for your application")
278
313
  def list(
314
+ db_url: Annotated[
315
+ typing.Optional[str],
316
+ typer.Option(
317
+ "--db-url",
318
+ "-D",
319
+ help="Your DBOS application database URL",
320
+ ),
321
+ ] = None,
279
322
  limit: Annotated[
280
323
  int,
281
324
  typer.Option("--limit", "-l", help="Limit the results returned"),
@@ -324,21 +367,31 @@ def list(
324
367
  help="Retrieve workflows with this name",
325
368
  ),
326
369
  ] = None,
327
- request: Annotated[
370
+ sort_desc: Annotated[
328
371
  bool,
329
- typer.Option("--request", help="Retrieve workflow request information"),
372
+ typer.Option(
373
+ "--sort-desc",
374
+ "-d",
375
+ help="Sort the results in descending order",
376
+ ),
330
377
  ] = False,
378
+ offset: Annotated[
379
+ typing.Optional[int],
380
+ typer.Option(
381
+ "--offset",
382
+ "-o",
383
+ help="Offset for pagination",
384
+ ),
385
+ ] = None,
331
386
  ) -> None:
332
- config = load_config(silent=True)
333
- sys_db = SystemDatabase(config["database"])
334
- workflows = list_workflows(
335
- sys_db,
387
+ workflows = start_client(db_url=db_url).list_workflows(
336
388
  limit=limit,
389
+ offset=offset,
390
+ sort_desc=sort_desc,
337
391
  user=user,
338
392
  start_time=starttime,
339
393
  end_time=endtime,
340
394
  status=status,
341
- request=request,
342
395
  app_version=appversion,
343
396
  name=name,
344
397
  )
@@ -348,28 +401,39 @@ def list(
348
401
  @workflow.command(help="Retrieve the status of a workflow")
349
402
  def get(
350
403
  workflow_id: Annotated[str, typer.Argument()],
351
- request: Annotated[
352
- bool,
353
- typer.Option("--request", help="Retrieve workflow request information"),
354
- ] = False,
404
+ db_url: Annotated[
405
+ typing.Optional[str],
406
+ typer.Option(
407
+ "--db-url",
408
+ "-D",
409
+ help="Your DBOS application database URL",
410
+ ),
411
+ ] = None,
355
412
  ) -> None:
356
- config = load_config(silent=True)
357
- sys_db = SystemDatabase(config["database"])
358
- print(
359
- jsonpickle.encode(get_workflow(sys_db, workflow_id, request), unpicklable=False)
413
+ status = (
414
+ start_client(db_url=db_url)
415
+ .retrieve_workflow(workflow_id=workflow_id)
416
+ .get_status()
360
417
  )
418
+ print(jsonpickle.encode(status, unpicklable=False))
361
419
 
362
420
 
363
421
  @workflow.command(help="List the steps of a workflow")
364
422
  def steps(
365
423
  workflow_id: Annotated[str, typer.Argument()],
424
+ db_url: Annotated[
425
+ typing.Optional[str],
426
+ typer.Option(
427
+ "--db-url",
428
+ "-D",
429
+ help="Your DBOS application database URL",
430
+ ),
431
+ ] = None,
366
432
  ) -> None:
367
- config = load_config(silent=True)
368
- sys_db = SystemDatabase(config["database"])
369
- app_db = ApplicationDatabase(config["database"])
370
433
  print(
371
434
  jsonpickle.encode(
372
- list_workflow_steps(sys_db, app_db, workflow_id), unpicklable=False
435
+ start_client(db_url=db_url).list_workflow_steps(workflow_id=workflow_id),
436
+ unpicklable=False,
373
437
  )
374
438
  )
375
439
 
@@ -379,74 +443,51 @@ def steps(
379
443
  )
380
444
  def cancel(
381
445
  uuid: Annotated[str, typer.Argument()],
382
- host: Annotated[
446
+ db_url: Annotated[
383
447
  typing.Optional[str],
384
- typer.Option("--host", "-H", help="Specify the admin host"),
385
- ] = "localhost",
386
- port: Annotated[
387
- typing.Optional[int],
388
- typer.Option("--port", "-p", help="Specify the admin port"),
389
- ] = 3001,
448
+ typer.Option(
449
+ "--db-url",
450
+ "-D",
451
+ help="Your DBOS application database URL",
452
+ ),
453
+ ] = None,
390
454
  ) -> None:
391
- response = requests.post(
392
- f"http://{host}:{port}/workflows/{uuid}/cancel", json=[], timeout=5
393
- )
394
-
395
- if response.status_code == 204:
396
- print(f"Workflow {uuid} has been cancelled")
397
- else:
398
- print(f"Failed to cancel workflow {uuid}. Status code: {response.status_code}")
455
+ start_client(db_url=db_url).cancel_workflow(workflow_id=uuid)
399
456
 
400
457
 
401
458
  @workflow.command(help="Resume a workflow that has been cancelled")
402
459
  def resume(
403
460
  uuid: Annotated[str, typer.Argument()],
404
- host: Annotated[
461
+ db_url: Annotated[
405
462
  typing.Optional[str],
406
- typer.Option("--host", "-H", help="Specify the admin host"),
407
- ] = "localhost",
408
- port: Annotated[
409
- typing.Optional[int],
410
- typer.Option("--port", "-p", help="Specify the admin port"),
411
- ] = 3001,
463
+ typer.Option(
464
+ "--db-url",
465
+ "-D",
466
+ help="Your DBOS application database URL",
467
+ ),
468
+ ] = None,
412
469
  ) -> None:
413
- response = requests.post(
414
- f"http://{host}:{port}/workflows/{uuid}/resume", json=[], timeout=5
415
- )
416
-
417
- if response.status_code == 204:
418
- print(f"Workflow {uuid} has been resumed")
419
- else:
420
- print(f"Failed to resume workflow {uuid}. Status code: {response.status_code}")
470
+ start_client(db_url=db_url).resume_workflow(workflow_id=uuid)
421
471
 
422
472
 
423
473
  @workflow.command(help="Restart a workflow from the beginning with a new id")
424
474
  def restart(
425
475
  uuid: Annotated[str, typer.Argument()],
426
- host: Annotated[
476
+ db_url: Annotated[
427
477
  typing.Optional[str],
428
- typer.Option("--host", "-H", help="Specify the admin host"),
429
- ] = "localhost",
430
- port: Annotated[
431
- typing.Optional[int],
432
- typer.Option("--port", "-p", help="Specify the admin port"),
433
- ] = 3001,
478
+ typer.Option(
479
+ "--db-url",
480
+ "-D",
481
+ help="Your DBOS application database URL",
482
+ ),
483
+ ] = None,
434
484
  ) -> None:
435
- response = requests.post(
436
- f"http://{host}:{port}/workflows/{uuid}/restart",
437
- json=[],
438
- timeout=5,
485
+ status = (
486
+ start_client(db_url=db_url)
487
+ .fork_workflow(workflow_id=uuid, start_step=1)
488
+ .get_status()
439
489
  )
440
-
441
- if response.status_code == 204:
442
- print(f"Workflow {uuid} has been restarted")
443
- else:
444
- error_message = response.json().get("error", "Unknown error")
445
- print(
446
- f"Failed to restart workflow {uuid}. "
447
- f"Status code: {response.status_code}. "
448
- f"Error: {error_message}"
449
- )
490
+ print(jsonpickle.encode(status, unpicklable=False))
450
491
 
451
492
 
452
493
  @workflow.command(
@@ -454,43 +495,41 @@ def restart(
454
495
  )
455
496
  def fork(
456
497
  uuid: Annotated[str, typer.Argument()],
457
- host: Annotated[
458
- typing.Optional[str],
459
- typer.Option("--host", "-H", help="Specify the admin host"),
460
- ] = "localhost",
461
- port: Annotated[
462
- typing.Optional[int],
463
- typer.Option("--port", "-p", help="Specify the admin port"),
464
- ] = 3001,
465
498
  step: Annotated[
466
- typing.Optional[int],
499
+ int,
467
500
  typer.Option(
468
501
  "--step",
469
502
  "-s",
470
503
  help="Restart from this step (default: first step)",
471
504
  ),
472
505
  ] = 1,
506
+ db_url: Annotated[
507
+ typing.Optional[str],
508
+ typer.Option(
509
+ "--db-url",
510
+ "-D",
511
+ help="Your DBOS application database URL",
512
+ ),
513
+ ] = None,
473
514
  ) -> None:
474
- print(f"Forking workflow {uuid} from step {step}")
475
- response = requests.post(
476
- f"http://{host}:{port}/workflows/{uuid}/fork",
477
- json={"start_step": step},
478
- timeout=5,
515
+ status = (
516
+ start_client(db_url=db_url)
517
+ .fork_workflow(workflow_id=uuid, start_step=step)
518
+ .get_status()
479
519
  )
480
-
481
- if response.status_code == 204:
482
- print(f"Workflow {uuid} has been forked")
483
- else:
484
- error_message = response.json().get("error", "Unknown error")
485
- print(
486
- f"Failed to fork workflow {uuid}. "
487
- f"Status code: {response.status_code}. "
488
- f"Error: {error_message}"
489
- )
520
+ print(jsonpickle.encode(status, unpicklable=False))
490
521
 
491
522
 
492
523
  @queue.command(name="list", help="List enqueued functions for your application")
493
524
  def list_queue(
525
+ db_url: Annotated[
526
+ typing.Optional[str],
527
+ typer.Option(
528
+ "--db-url",
529
+ "-D",
530
+ help="Your DBOS application database URL",
531
+ ),
532
+ ] = None,
494
533
  limit: Annotated[
495
534
  typing.Optional[int],
496
535
  typer.Option("--limit", "-l", help="Limit the results returned"),
@@ -535,21 +574,31 @@ def list_queue(
535
574
  help="Retrieve functions on this queue",
536
575
  ),
537
576
  ] = None,
538
- request: Annotated[
577
+ sort_desc: Annotated[
539
578
  bool,
540
- typer.Option("--request", help="Retrieve workflow request information"),
579
+ typer.Option(
580
+ "--sort-desc",
581
+ "-d",
582
+ help="Sort the results in descending order",
583
+ ),
541
584
  ] = False,
585
+ offset: Annotated[
586
+ typing.Optional[int],
587
+ typer.Option(
588
+ "--offset",
589
+ "-o",
590
+ help="Offset for pagination",
591
+ ),
592
+ ] = None,
542
593
  ) -> None:
543
- config = load_config(silent=True)
544
- sys_db = SystemDatabase(config["database"])
545
- workflows = list_queued_workflows(
546
- sys_db=sys_db,
594
+ workflows = start_client(db_url=db_url).list_queued_workflows(
547
595
  limit=limit,
596
+ offset=offset,
597
+ sort_desc=sort_desc,
548
598
  start_time=start_time,
549
599
  end_time=end_time,
550
600
  queue_name=queue_name,
551
601
  status=status,
552
- request=request,
553
602
  name=name,
554
603
  )
555
604
  print(jsonpickle.encode(workflows, unpicklable=False))
@@ -28,7 +28,7 @@ dependencies = [
28
28
  ]
29
29
  requires-python = ">=3.9"
30
30
  readme = "README.md"
31
- version = "0.27.0a3"
31
+ version = "0.27.0a4"
32
32
 
33
33
  [project.license]
34
34
  text = "MIT"
@@ -31,7 +31,7 @@ def default_config() -> ConfigFile:
31
31
  "hostname": "localhost",
32
32
  "port": 5432,
33
33
  "username": "postgres",
34
- "password": os.environ["PGPASSWORD"],
34
+ "password": os.environ.get("PGPASSWORD", "dbos"),
35
35
  "app_db_name": "dbostestpy",
36
36
  },
37
37
  }
@@ -1,4 +1,3 @@
1
- import json
2
1
  import math
3
2
  import os
4
3
  import runpy
@@ -6,14 +5,13 @@ import subprocess
6
5
  import sys
7
6
  import time
8
7
  import uuid
9
- from typing import Any, Optional, TypedDict, cast
8
+ from typing import Any, Optional, TypedDict
10
9
 
11
10
  import pytest
12
11
  import sqlalchemy as sa
13
12
 
14
- from dbos import DBOS, ConfigFile, DBOSClient, EnqueueOptions, Queue, SetWorkflowID
13
+ from dbos import DBOS, ConfigFile, DBOSClient, EnqueueOptions, SetWorkflowID
15
14
  from dbos._dbos import WorkflowHandle, WorkflowHandleAsync
16
- from dbos._schemas.system_database import SystemSchema
17
15
  from dbos._sys_db import SystemDatabase
18
16
  from dbos._utils import GlobalParams
19
17
  from tests.client_collateral import event_test, retrieve_test, send_test
@@ -47,17 +45,30 @@ def test_client_enqueue_and_get_result(dbos: DBOS, client: DBOSClient) -> None:
47
45
  result = handle.get_result()
48
46
  assert result == '42-test-{"first": "John", "last": "Doe", "age": 30}'
49
47
 
48
+ list_results = client.list_workflows()
49
+ assert len(list_results) == 1
50
+ assert list_results[0].workflow_id == wfid
51
+ assert list_results[0].status == "SUCCESS"
52
+
50
53
 
51
54
  def test_enqueue_with_timeout(dbos: DBOS, client: DBOSClient) -> None:
52
55
  run_client_collateral()
53
56
 
57
+ wfid = str(uuid.uuid4())
54
58
  options: EnqueueOptions = {
55
59
  "queue_name": "test_queue",
56
60
  "workflow_name": "blocked_workflow",
57
- "workflow_timeout": 0.1,
61
+ "workflow_timeout": 1,
62
+ "workflow_id": wfid,
58
63
  }
59
64
 
60
65
  handle: WorkflowHandle[str] = client.enqueue(options)
66
+
67
+ list_results = client.list_queued_workflows()
68
+ assert len(list_results) == 1
69
+ assert list_results[0].workflow_id == wfid
70
+ assert list_results[0].status in ["PENDING", "ENQUEUED"]
71
+
61
72
  with pytest.raises(Exception) as exc_info:
62
73
  handle.get_result()
63
74
  assert "was cancelled" in str(exc_info.value)
@@ -139,7 +139,13 @@ def test_reset(postgres_db_engine: sa.Engine) -> None:
139
139
  assert result == 1
140
140
 
141
141
  # Call reset and verify it's destroyed
142
- subprocess.check_call(["dbos", "reset", "-y"], cwd=temp_path)
142
+ db_url = postgres_db_engine.url.set(database="reset_app").render_as_string(
143
+ hide_password=False
144
+ )
145
+ subprocess.check_call(
146
+ ["dbos", "reset", "-y", "--db-url", db_url, "--sys-db-name", sysdb_name],
147
+ cwd=temp_path,
148
+ )
143
149
  with postgres_db_engine.connect() as c:
144
150
  c.execution_options(isolation_level="AUTOCOMMIT")
145
151
  result = c.execute(
@@ -150,8 +156,11 @@ def test_reset(postgres_db_engine: sa.Engine) -> None:
150
156
  assert result == 0
151
157
 
152
158
 
153
- def test_list_commands() -> None:
159
+ def test_workflow_commands(postgres_db_engine: sa.Engine) -> None:
154
160
  app_name = "reset-app"
161
+ db_url = postgres_db_engine.url.set(database="dbos_toolbox").render_as_string(
162
+ hide_password=False
163
+ )
155
164
  with tempfile.TemporaryDirectory() as temp_path:
156
165
  subprocess.check_call(
157
166
  ["dbos", "init", app_name, "--template", "dbos-toolbox"],
@@ -179,17 +188,78 @@ def test_list_commands() -> None:
179
188
  time.sleep(1)
180
189
  time.sleep(1) # So the queued workflows can start
181
190
  finally:
191
+ # Because the toolbox steps sleep for 5 seconds, all the steps should be PENDING
182
192
  os.kill(process.pid, signal.SIGINT)
183
193
  process.wait()
184
194
 
185
195
  # Verify the output is valid JSON
186
- output = subprocess.check_output(["dbos", "workflow", "list"], cwd=temp_path)
196
+ output = subprocess.check_output(
197
+ ["dbos", "workflow", "list", "--db-url", db_url], cwd=temp_path
198
+ )
187
199
  data = json.loads(output)
188
200
  assert isinstance(data, list) and len(data) == 10
189
201
 
190
202
  # Verify the output is valid JSON
191
203
  output = subprocess.check_output(
192
- ["dbos", "workflow", "queue", "list"], cwd=temp_path
204
+ ["dbos", "workflow", "queue", "list", "--db-url", db_url], cwd=temp_path
193
205
  )
194
- data = json.loads(output)
195
- assert isinstance(data, list) and len(data) == 10
206
+ workflows = json.loads(output)
207
+ assert isinstance(workflows, list) and len(workflows) == 10
208
+ for wf in workflows:
209
+ output = subprocess.check_output(
210
+ ["dbos", "workflow", "get", wf["workflow_id"], "--db-url", db_url],
211
+ cwd=temp_path,
212
+ )
213
+ get_wf_data = json.loads(output)
214
+ assert isinstance(get_wf_data, dict)
215
+ assert get_wf_data["workflow_id"] == wf["workflow_id"]
216
+
217
+ # workflow ID is a preffix to each step ID
218
+ wf_id = "-".join(workflows[0]["workflow_id"].split("-")[:-1])
219
+ get_steps_output = subprocess.check_output(
220
+ ["dbos", "workflow", "steps", wf_id, "--db-url", db_url], cwd=temp_path
221
+ )
222
+ get_steps_data = json.loads(get_steps_output)
223
+ assert isinstance(get_steps_data, list)
224
+ assert len(get_steps_data) == 10
225
+
226
+ # cancel the workflow and check the status is CANCELED
227
+ subprocess.check_output(
228
+ ["dbos", "workflow", "cancel", wf_id, "--db-url", db_url], cwd=temp_path
229
+ )
230
+ output = subprocess.check_output(
231
+ ["dbos", "workflow", "get", wf_id], cwd=temp_path
232
+ )
233
+ get_wf_data = json.loads(output)
234
+ assert isinstance(get_wf_data, dict)
235
+ assert get_wf_data["status"] == "CANCELLED"
236
+
237
+ # resume the workflow and check the status is ENQUEUED
238
+ subprocess.check_output(
239
+ ["dbos", "workflow", "resume", wf_id, "--db-url", db_url], cwd=temp_path
240
+ )
241
+ output = subprocess.check_output(
242
+ ["dbos", "workflow", "get", wf_id], cwd=temp_path
243
+ )
244
+ get_wf_data = json.loads(output)
245
+ assert isinstance(get_wf_data, dict)
246
+ assert get_wf_data["status"] == "ENQUEUED"
247
+
248
+ # restart the workflow and check it has a new ID and its status is ENQUEUED
249
+ output = subprocess.check_output(
250
+ ["dbos", "workflow", "restart", wf_id, "--db-url", db_url], cwd=temp_path
251
+ )
252
+ restart_wf_data = json.loads(output)
253
+ assert isinstance(restart_wf_data, dict)
254
+ assert restart_wf_data["workflow_id"] != wf_id
255
+ assert restart_wf_data["status"] == "ENQUEUED"
256
+
257
+ # fork the workflow at step 5 and check it has a new ID and its status is ENQUEUED
258
+ output = subprocess.check_output(
259
+ ["dbos", "workflow", "fork", wf_id, "--step", "5", "--db-url", db_url],
260
+ cwd=temp_path,
261
+ )
262
+ fork_wf_data = json.loads(output)
263
+ assert isinstance(fork_wf_data, dict)
264
+ assert fork_wf_data["workflow_id"] != wf_id
265
+ assert fork_wf_data["status"] == "ENQUEUED"
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes