dbos 1.8.0a3__tar.gz → 1.8.0a8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. {dbos-1.8.0a3 → dbos-1.8.0a8}/PKG-INFO +1 -1
  2. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_client.py +15 -4
  3. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_dbos.py +106 -30
  4. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_dbos_config.py +24 -1
  5. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_error.py +5 -5
  6. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_sys_db.py +15 -17
  7. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/cli/cli.py +4 -4
  8. {dbos-1.8.0a3 → dbos-1.8.0a8}/pyproject.toml +1 -1
  9. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/conftest.py +2 -1
  10. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_admin_server.py +1 -1
  11. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_async.py +6 -2
  12. dbos-1.8.0a8/tests/test_async_workflow_management.py +264 -0
  13. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_classdecorators.py +27 -0
  14. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_config.py +16 -0
  15. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_failures.py +7 -4
  16. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_queue.py +1 -1
  17. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_scheduler.py +3 -0
  18. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_singleton.py +0 -24
  19. {dbos-1.8.0a3 → dbos-1.8.0a8}/LICENSE +0 -0
  20. {dbos-1.8.0a3 → dbos-1.8.0a8}/README.md +0 -0
  21. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/__init__.py +0 -0
  22. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/__main__.py +0 -0
  23. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_admin_server.py +0 -0
  24. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_app_db.py +0 -0
  25. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_classproperty.py +0 -0
  26. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_conductor/conductor.py +0 -0
  27. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_conductor/protocol.py +0 -0
  28. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_context.py +0 -0
  29. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_core.py +0 -0
  30. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_croniter.py +0 -0
  31. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_debug.py +0 -0
  32. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_docker_pg_helper.py +0 -0
  33. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_event_loop.py +0 -0
  34. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_fastapi.py +0 -0
  35. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_flask.py +0 -0
  36. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_kafka.py +0 -0
  37. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_kafka_message.py +0 -0
  38. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_logger.py +0 -0
  39. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_migrations/env.py +0 -0
  40. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_migrations/script.py.mako +0 -0
  41. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
  42. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -0
  43. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
  44. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
  45. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -0
  46. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -0
  47. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -0
  48. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
  49. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
  50. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
  51. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_migrations/versions/d994145b47b6_consolidate_inputs.py +0 -0
  52. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
  53. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
  54. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_outcome.py +0 -0
  55. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_queue.py +0 -0
  56. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_recovery.py +0 -0
  57. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_registrations.py +0 -0
  58. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_roles.py +0 -0
  59. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_scheduler.py +0 -0
  60. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_schemas/__init__.py +0 -0
  61. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_schemas/application_database.py +0 -0
  62. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_schemas/system_database.py +0 -0
  63. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_serialization.py +0 -0
  64. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_templates/dbos-db-starter/README.md +0 -0
  65. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
  66. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
  67. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
  68. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
  69. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
  70. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
  71. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
  72. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
  73. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
  74. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_tracer.py +0 -0
  75. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_utils.py +0 -0
  76. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/_workflow_commands.py +0 -0
  77. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/cli/_github_init.py +0 -0
  78. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/cli/_template_init.py +0 -0
  79. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/dbos-config.schema.json +0 -0
  80. {dbos-1.8.0a3 → dbos-1.8.0a8}/dbos/py.typed +0 -0
  81. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/__init__.py +0 -0
  82. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/atexit_no_ctor.py +0 -0
  83. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/atexit_no_launch.py +0 -0
  84. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/classdefs.py +0 -0
  85. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/client_collateral.py +0 -0
  86. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/client_worker.py +0 -0
  87. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/dupname_classdefs1.py +0 -0
  88. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/dupname_classdefsa.py +0 -0
  89. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/more_classdefs.py +0 -0
  90. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/queuedworkflow.py +0 -0
  91. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_cli.py +0 -0
  92. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_client.py +0 -0
  93. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_concurrency.py +0 -0
  94. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_croniter.py +0 -0
  95. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_dbos.py +0 -0
  96. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_debug.py +0 -0
  97. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_docker_secrets.py +0 -0
  98. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_fastapi.py +0 -0
  99. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_fastapi_roles.py +0 -0
  100. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_flask.py +0 -0
  101. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_kafka.py +0 -0
  102. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_outcome.py +0 -0
  103. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_package.py +0 -0
  104. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_schema_migration.py +0 -0
  105. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_spans.py +0 -0
  106. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_sqlalchemy.py +0 -0
  107. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_workflow_introspection.py +0 -0
  108. {dbos-1.8.0a3 → dbos-1.8.0a8}/tests/test_workflow_management.py +0 -0
  109. {dbos-1.8.0a3 → dbos-1.8.0a8}/version/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 1.8.0a3
3
+ Version: 1.8.0a8
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -13,7 +13,7 @@ else:
13
13
 
14
14
  from dbos import _serialization
15
15
  from dbos._dbos import WorkflowHandle, WorkflowHandleAsync
16
- from dbos._dbos_config import is_valid_database_url
16
+ from dbos._dbos_config import get_system_database_url, is_valid_database_url
17
17
  from dbos._error import DBOSException, DBOSNonExistentWorkflowError
18
18
  from dbos._registrations import DEFAULT_MAX_RECOVERY_ATTEMPTS
19
19
  from dbos._serialization import WorkflowInputs
@@ -97,17 +97,28 @@ class WorkflowHandleClientAsyncPolling(Generic[R]):
97
97
 
98
98
 
99
99
  class DBOSClient:
100
- def __init__(self, database_url: str, *, system_database: Optional[str] = None):
100
+ def __init__(
101
+ self,
102
+ database_url: str,
103
+ *,
104
+ system_database_url: Optional[str] = None,
105
+ system_database: Optional[str] = None,
106
+ ):
101
107
  assert is_valid_database_url(database_url)
102
108
  # We only create database connections but do not run migrations
103
109
  self._sys_db = SystemDatabase(
104
- database_url=database_url,
110
+ system_database_url=get_system_database_url(
111
+ {
112
+ "system_database_url": system_database_url,
113
+ "database_url": database_url,
114
+ "database": {"sys_db_name": system_database},
115
+ }
116
+ ),
105
117
  engine_kwargs={
106
118
  "pool_timeout": 30,
107
119
  "max_overflow": 0,
108
120
  "pool_size": 2,
109
121
  },
110
- sys_db_name=system_database,
111
122
  )
112
123
  self._sys_db.check_connection()
113
124
  self._app_db = ApplicationDatabase(
@@ -91,6 +91,7 @@ from ._context import (
91
91
  from ._dbos_config import (
92
92
  ConfigFile,
93
93
  DBOSConfig,
94
+ get_system_database_url,
94
95
  overwrite_config,
95
96
  process_config,
96
97
  translate_dbos_config_to_config_file,
@@ -424,9 +425,8 @@ class DBOS:
424
425
  assert self._config["database_url"] is not None
425
426
  assert self._config["database"]["sys_db_engine_kwargs"] is not None
426
427
  self._sys_db_field = SystemDatabase(
427
- database_url=self._config["database_url"],
428
+ system_database_url=get_system_database_url(self._config),
428
429
  engine_kwargs=self._config["database"]["sys_db_engine_kwargs"],
429
- sys_db_name=self._config["database"]["sys_db_name"],
430
430
  debug_mode=debug_mode,
431
431
  )
432
432
  assert self._config["database"]["db_engine_kwargs"] is not None
@@ -966,6 +966,12 @@ class DBOS:
966
966
  fn, "DBOS.cancelWorkflow"
967
967
  )
968
968
 
969
+ @classmethod
970
+ async def cancel_workflow_async(cls, workflow_id: str) -> None:
971
+ """Cancel a workflow by ID."""
972
+ await cls._configure_asyncio_thread_pool()
973
+ await asyncio.to_thread(cls.cancel_workflow, workflow_id)
974
+
969
975
  @classmethod
970
976
  async def _configure_asyncio_thread_pool(cls) -> None:
971
977
  """
@@ -987,11 +993,23 @@ class DBOS:
987
993
  _get_dbos_instance()._sys_db.call_function_as_step(fn, "DBOS.resumeWorkflow")
988
994
  return cls.retrieve_workflow(workflow_id)
989
995
 
996
+ @classmethod
997
+ async def resume_workflow_async(cls, workflow_id: str) -> WorkflowHandleAsync[Any]:
998
+ """Resume a workflow by ID."""
999
+ await cls._configure_asyncio_thread_pool()
1000
+ await asyncio.to_thread(cls.resume_workflow, workflow_id)
1001
+ return await cls.retrieve_workflow_async(workflow_id)
1002
+
990
1003
  @classmethod
991
1004
  def restart_workflow(cls, workflow_id: str) -> WorkflowHandle[Any]:
992
1005
  """Restart a workflow with a new workflow ID"""
993
1006
  return cls.fork_workflow(workflow_id, 1)
994
1007
 
1008
+ @classmethod
1009
+ async def restart_workflow_async(cls, workflow_id: str) -> WorkflowHandleAsync[Any]:
1010
+ """Restart a workflow with a new workflow ID"""
1011
+ return await cls.fork_workflow_async(workflow_id, 1)
1012
+
995
1013
  @classmethod
996
1014
  def fork_workflow(
997
1015
  cls,
@@ -1017,6 +1035,23 @@ class DBOS:
1017
1035
  )
1018
1036
  return cls.retrieve_workflow(new_id)
1019
1037
 
1038
+ @classmethod
1039
+ async def fork_workflow_async(
1040
+ cls,
1041
+ workflow_id: str,
1042
+ start_step: int,
1043
+ *,
1044
+ application_version: Optional[str] = None,
1045
+ ) -> WorkflowHandleAsync[Any]:
1046
+ """Restart a workflow with a new workflow ID from a specific step"""
1047
+ await cls._configure_asyncio_thread_pool()
1048
+ new_id = await asyncio.to_thread(
1049
+ lambda: cls.fork_workflow(
1050
+ workflow_id, start_step, application_version=application_version
1051
+ ).get_workflow_id()
1052
+ )
1053
+ return await cls.retrieve_workflow_async(new_id)
1054
+
1020
1055
  @classmethod
1021
1056
  def list_workflows(
1022
1057
  cls,
@@ -1057,6 +1092,42 @@ class DBOS:
1057
1092
  fn, "DBOS.listWorkflows"
1058
1093
  )
1059
1094
 
1095
+ @classmethod
1096
+ async def list_workflows_async(
1097
+ cls,
1098
+ *,
1099
+ workflow_ids: Optional[List[str]] = None,
1100
+ status: Optional[Union[str, List[str]]] = None,
1101
+ start_time: Optional[str] = None,
1102
+ end_time: Optional[str] = None,
1103
+ name: Optional[str] = None,
1104
+ app_version: Optional[str] = None,
1105
+ user: Optional[str] = None,
1106
+ limit: Optional[int] = None,
1107
+ offset: Optional[int] = None,
1108
+ sort_desc: bool = False,
1109
+ workflow_id_prefix: Optional[str] = None,
1110
+ load_input: bool = True,
1111
+ load_output: bool = True,
1112
+ ) -> List[WorkflowStatus]:
1113
+ await cls._configure_asyncio_thread_pool()
1114
+ return await asyncio.to_thread(
1115
+ cls.list_workflows,
1116
+ workflow_ids=workflow_ids,
1117
+ status=status,
1118
+ start_time=start_time,
1119
+ end_time=end_time,
1120
+ name=name,
1121
+ app_version=app_version,
1122
+ user=user,
1123
+ limit=limit,
1124
+ offset=offset,
1125
+ sort_desc=sort_desc,
1126
+ workflow_id_prefix=workflow_id_prefix,
1127
+ load_input=load_input,
1128
+ load_output=load_output,
1129
+ )
1130
+
1060
1131
  @classmethod
1061
1132
  def list_queued_workflows(
1062
1133
  cls,
@@ -1089,6 +1160,34 @@ class DBOS:
1089
1160
  fn, "DBOS.listQueuedWorkflows"
1090
1161
  )
1091
1162
 
1163
+ @classmethod
1164
+ async def list_queued_workflows_async(
1165
+ cls,
1166
+ *,
1167
+ queue_name: Optional[str] = None,
1168
+ status: Optional[Union[str, List[str]]] = None,
1169
+ start_time: Optional[str] = None,
1170
+ end_time: Optional[str] = None,
1171
+ name: Optional[str] = None,
1172
+ limit: Optional[int] = None,
1173
+ offset: Optional[int] = None,
1174
+ sort_desc: bool = False,
1175
+ load_input: bool = True,
1176
+ ) -> List[WorkflowStatus]:
1177
+ await cls._configure_asyncio_thread_pool()
1178
+ return await asyncio.to_thread(
1179
+ cls.list_queued_workflows,
1180
+ queue_name=queue_name,
1181
+ status=status,
1182
+ start_time=start_time,
1183
+ end_time=end_time,
1184
+ name=name,
1185
+ limit=limit,
1186
+ offset=offset,
1187
+ sort_desc=sort_desc,
1188
+ load_input=load_input,
1189
+ )
1190
+
1092
1191
  @classmethod
1093
1192
  def list_workflow_steps(cls, workflow_id: str) -> List[StepInfo]:
1094
1193
  def fn() -> List[StepInfo]:
@@ -1100,6 +1199,11 @@ class DBOS:
1100
1199
  fn, "DBOS.listWorkflowSteps"
1101
1200
  )
1102
1201
 
1202
+ @classmethod
1203
+ async def list_workflow_steps_async(cls, workflow_id: str) -> List[StepInfo]:
1204
+ await cls._configure_asyncio_thread_pool()
1205
+ return await asyncio.to_thread(cls.list_workflow_steps, workflow_id)
1206
+
1103
1207
  @classproperty
1104
1208
  def logger(cls) -> Logger:
1105
1209
  """Return the DBOS `Logger` for the current context."""
@@ -1272,31 +1376,3 @@ class DBOSConfiguredInstance:
1272
1376
  def __init__(self, config_name: str) -> None:
1273
1377
  self.config_name = config_name
1274
1378
  DBOS.register_instance(self)
1275
-
1276
-
1277
- # Apps that import DBOS probably don't exit. If they do, let's see if
1278
- # it looks like startup was abandoned or a call was forgotten...
1279
- def _dbos_exit_hook() -> None:
1280
- if _dbos_global_registry is None:
1281
- # Probably used as or for a support module
1282
- return
1283
- if _dbos_global_instance is None:
1284
- print("DBOS exiting; functions were registered but DBOS() was not called")
1285
- dbos_logger.warning(
1286
- "DBOS exiting; functions were registered but DBOS() was not called"
1287
- )
1288
- return
1289
- if not _dbos_global_instance._launched:
1290
- if _dbos_global_instance.fastapi is not None:
1291
- # FastAPI lifespan middleware will call launch/destroy, so we can ignore this.
1292
- # This is likely to happen during fastapi dev runs, where the reloader loads the module multiple times.
1293
- return
1294
- print("DBOS exiting; DBOS exists but launch() was not called")
1295
- dbos_logger.warning("DBOS exiting; DBOS exists but launch() was not called")
1296
- return
1297
- # If we get here, we're exiting normally
1298
- _dbos_global_instance.destroy()
1299
-
1300
-
1301
- # Register the exit hook
1302
- atexit.register(_dbos_exit_hook)
@@ -23,7 +23,8 @@ class DBOSConfig(TypedDict, total=False):
23
23
  Attributes:
24
24
  name (str): Application name
25
25
  database_url (str): Database connection string
26
- sys_db_name (str): System database name
26
+ system_database_url (str): Connection string for the system database (if different from the application database)
27
+ sys_db_name (str): System database name (deprecated)
27
28
  sys_db_pool_size (int): System database pool size
28
29
  db_engine_kwargs (Dict[str, Any]): SQLAlchemy engine kwargs (See https://docs.sqlalchemy.org/en/20/core/engines.html#sqlalchemy.create_engine)
29
30
  log_level (str): Log level
@@ -36,6 +37,7 @@ class DBOSConfig(TypedDict, total=False):
36
37
 
37
38
  name: str
38
39
  database_url: Optional[str]
40
+ system_database_url: Optional[str]
39
41
  sys_db_name: Optional[str]
40
42
  sys_db_pool_size: Optional[int]
41
43
  db_engine_kwargs: Optional[Dict[str, Any]]
@@ -111,6 +113,7 @@ class ConfigFile(TypedDict, total=False):
111
113
  runtimeConfig: RuntimeConfig
112
114
  database: DatabaseConfig
113
115
  database_url: Optional[str]
116
+ system_database_url: Optional[str]
114
117
  telemetry: Optional[TelemetryConfig]
115
118
  env: Dict[str, str]
116
119
 
@@ -136,6 +139,8 @@ def translate_dbos_config_to_config_file(config: DBOSConfig) -> ConfigFile:
136
139
 
137
140
  if "database_url" in config:
138
141
  translated_config["database_url"] = config.get("database_url")
142
+ if "system_database_url" in config:
143
+ translated_config["system_database_url"] = config.get("system_database_url")
139
144
 
140
145
  # Runtime config
141
146
  translated_config["runtimeConfig"] = {"run_admin_server": True}
@@ -488,6 +493,8 @@ def overwrite_config(provided_config: ConfigFile) -> ConfigFile:
488
493
  "DBOS_DATABASE_URL environment variable is not set. This is required to connect to the database."
489
494
  )
490
495
  provided_config["database_url"] = db_url
496
+ if "system_database_url" in provided_config:
497
+ del provided_config["system_database_url"]
491
498
 
492
499
  # Telemetry config
493
500
  if "telemetry" not in provided_config or provided_config["telemetry"] is None:
@@ -537,3 +544,19 @@ def overwrite_config(provided_config: ConfigFile) -> ConfigFile:
537
544
  del provided_config["env"]
538
545
 
539
546
  return provided_config
547
+
548
+
549
+ def get_system_database_url(config: ConfigFile) -> str:
550
+ if "system_database_url" in config and config["system_database_url"] is not None:
551
+ return config["system_database_url"]
552
+ else:
553
+ assert config["database_url"] is not None
554
+ app_db_url = make_url(config["database_url"])
555
+ if config["database"].get("sys_db_name") is not None:
556
+ sys_db_name = config["database"]["sys_db_name"]
557
+ else:
558
+ assert app_db_url.database is not None
559
+ sys_db_name = app_db_url.database + SystemSchema.sysdb_suffix
560
+ return app_db_url.set(database=sys_db_name).render_as_string(
561
+ hide_password=False
562
+ )
@@ -55,7 +55,7 @@ class DBOSErrorCode(Enum):
55
55
  InitializationError = 3
56
56
  WorkflowFunctionNotFound = 4
57
57
  NonExistentWorkflowError = 5
58
- DeadLetterQueueError = 6
58
+ MaxRecoveryAttemptsExceeded = 6
59
59
  MaxStepRetriesExceeded = 7
60
60
  NotAuthorized = 8
61
61
  ConflictingWorkflowError = 9
@@ -121,13 +121,13 @@ class DBOSNonExistentWorkflowError(DBOSException):
121
121
  )
122
122
 
123
123
 
124
- class DBOSDeadLetterQueueError(DBOSException):
125
- """Exception raised when a workflow database record does not exist for a given ID."""
124
+ class MaxRecoveryAttemptsExceededError(DBOSException):
125
+ """Exception raised when a workflow exceeds its max recovery attempts."""
126
126
 
127
127
  def __init__(self, wf_id: str, max_retries: int):
128
128
  super().__init__(
129
- f"Workflow {wf_id} has been moved to the dead-letter queue after exceeding the maximum of {max_retries} retries",
130
- dbos_error_code=DBOSErrorCode.DeadLetterQueueError.value,
129
+ f"Workflow {wf_id} has exceeded its maximum of {max_retries} execution or recovery attempts. Further attempts to execute or recover it will fail. See documentation for details: https://docs.dbos.dev/python/reference/decorators",
130
+ dbos_error_code=DBOSErrorCode.MaxRecoveryAttemptsExceeded.value,
131
131
  )
132
132
 
133
133
 
@@ -37,12 +37,12 @@ from ._context import get_local_dbos_context
37
37
  from ._error import (
38
38
  DBOSAwaitedWorkflowCancelledError,
39
39
  DBOSConflictingWorkflowError,
40
- DBOSDeadLetterQueueError,
41
40
  DBOSNonExistentWorkflowError,
42
41
  DBOSQueueDeduplicatedError,
43
42
  DBOSUnexpectedStepError,
44
43
  DBOSWorkflowCancelledError,
45
44
  DBOSWorkflowConflictIDError,
45
+ MaxRecoveryAttemptsExceededError,
46
46
  )
47
47
  from ._logger import dbos_logger
48
48
  from ._schemas.system_database import SystemSchema
@@ -57,20 +57,25 @@ class WorkflowStatusString(Enum):
57
57
  PENDING = "PENDING"
58
58
  SUCCESS = "SUCCESS"
59
59
  ERROR = "ERROR"
60
- RETRIES_EXCEEDED = "RETRIES_EXCEEDED"
60
+ MAX_RECOVERY_ATTEMPTS_EXCEEDED = "MAX_RECOVERY_ATTEMPTS_EXCEEDED"
61
61
  CANCELLED = "CANCELLED"
62
62
  ENQUEUED = "ENQUEUED"
63
63
 
64
64
 
65
65
  WorkflowStatuses = Literal[
66
- "PENDING", "SUCCESS", "ERROR", "RETRIES_EXCEEDED", "CANCELLED", "ENQUEUED"
66
+ "PENDING",
67
+ "SUCCESS",
68
+ "ERROR",
69
+ "MAX_RECOVERY_ATTEMPTS_EXCEEDED",
70
+ "CANCELLED",
71
+ "ENQUEUED",
67
72
  ]
68
73
 
69
74
 
70
75
  class WorkflowStatus:
71
76
  # The workflow ID
72
77
  workflow_id: str
73
- # The workflow status. Must be one of ENQUEUED, PENDING, SUCCESS, ERROR, CANCELLED, or RETRIES_EXCEEDED
78
+ # The workflow status. Must be one of ENQUEUED, PENDING, SUCCESS, ERROR, CANCELLED, or MAX_RECOVERY_ATTEMPTS_EXCEEDED
74
79
  status: str
75
80
  # The name of the workflow function
76
81
  name: str
@@ -331,22 +336,15 @@ class SystemDatabase:
331
336
  def __init__(
332
337
  self,
333
338
  *,
334
- database_url: str,
339
+ system_database_url: str,
335
340
  engine_kwargs: Dict[str, Any],
336
- sys_db_name: Optional[str] = None,
337
341
  debug_mode: bool = False,
338
342
  ):
339
343
  # Set driver
340
- system_db_url = sa.make_url(database_url).set(drivername="postgresql+psycopg")
341
- # Resolve system database name
342
- sysdb_name = sys_db_name
343
- if not sysdb_name:
344
- assert system_db_url.database is not None
345
- sysdb_name = system_db_url.database + SystemSchema.sysdb_suffix
346
- system_db_url = system_db_url.set(database=sysdb_name)
344
+ url = sa.make_url(system_database_url).set(drivername="postgresql+psycopg")
347
345
 
348
346
  self.engine = sa.create_engine(
349
- system_db_url,
347
+ url,
350
348
  **engine_kwargs,
351
349
  )
352
350
  self._engine_kwargs = engine_kwargs
@@ -522,7 +520,7 @@ class SystemDatabase:
522
520
  raise DBOSConflictingWorkflowError(status["workflow_uuid"], err_msg)
523
521
 
524
522
  # Every time we start executing a workflow (and thus attempt to insert its status), we increment `recovery_attempts` by 1.
525
- # When this number becomes equal to `maxRetries + 1`, we mark the workflow as `RETRIES_EXCEEDED`.
523
+ # When this number becomes equal to `maxRetries + 1`, we mark the workflow as `MAX_RECOVERY_ATTEMPTS_EXCEEDED`.
526
524
  if (
527
525
  (wf_status != "SUCCESS" and wf_status != "ERROR")
528
526
  and max_recovery_attempts is not None
@@ -539,7 +537,7 @@ class SystemDatabase:
539
537
  == WorkflowStatusString.PENDING.value
540
538
  )
541
539
  .values(
542
- status=WorkflowStatusString.RETRIES_EXCEEDED.value,
540
+ status=WorkflowStatusString.MAX_RECOVERY_ATTEMPTS_EXCEEDED.value,
543
541
  deduplication_id=None,
544
542
  started_at_epoch_ms=None,
545
543
  queue_name=None,
@@ -548,7 +546,7 @@ class SystemDatabase:
548
546
  conn.execute(dlq_cmd)
549
547
  # Need to commit here because we're throwing an exception
550
548
  conn.commit()
551
- raise DBOSDeadLetterQueueError(
549
+ raise MaxRecoveryAttemptsExceededError(
552
550
  status["workflow_uuid"], max_recovery_attempts
553
551
  )
554
552
 
@@ -21,6 +21,7 @@ from .._client import DBOSClient
21
21
  from .._dbos_config import (
22
22
  _app_name_to_db_name,
23
23
  _is_valid_app_name,
24
+ get_system_database_url,
24
25
  is_valid_database_url,
25
26
  load_config,
26
27
  )
@@ -294,13 +295,12 @@ def migrate(
294
295
  sys_db = None
295
296
  try:
296
297
  sys_db = SystemDatabase(
297
- database_url=connection_string,
298
+ system_database_url=get_system_database_url(config),
298
299
  engine_kwargs={
299
300
  "pool_timeout": 30,
300
301
  "max_overflow": 0,
301
302
  "pool_size": 2,
302
303
  },
303
- sys_db_name=sys_db_name,
304
304
  )
305
305
  app_db = ApplicationDatabase(
306
306
  database_url=connection_string,
@@ -450,7 +450,7 @@ def list(
450
450
  typer.Option(
451
451
  "--status",
452
452
  "-S",
453
- help="Retrieve workflows with this status (PENDING, SUCCESS, ERROR, RETRIES_EXCEEDED, ENQUEUED, or CANCELLED)",
453
+ help="Retrieve workflows with this status (PENDING, SUCCESS, ERROR, ENQUEUED, CANCELLED, or MAX_RECOVERY_ATTEMPTS_EXCEEDED)",
454
454
  ),
455
455
  ] = None,
456
456
  appversion: Annotated[
@@ -657,7 +657,7 @@ def list_queue(
657
657
  typer.Option(
658
658
  "--status",
659
659
  "-S",
660
- help="Retrieve functions with this status (PENDING, SUCCESS, ERROR, RETRIES_EXCEEDED, ENQUEUED, or CANCELLED)",
660
+ help="Retrieve functions with this status (PENDING, SUCCESS, ERROR, ENQUEUED, CANCELLED, or MAX_RECOVERY_ATTEMPTS_EXCEEDED)",
661
661
  ),
662
662
  ] = None,
663
663
  queue_name: Annotated[
@@ -27,7 +27,7 @@ dependencies = [
27
27
  ]
28
28
  requires-python = ">=3.9"
29
29
  readme = "README.md"
30
- version = "1.8.0a3"
30
+ version = "1.8.0a8"
31
31
 
32
32
  [project.license]
33
33
  text = "MIT"
@@ -12,6 +12,7 @@ from flask import Flask
12
12
 
13
13
  from dbos import DBOS, DBOSClient, DBOSConfig
14
14
  from dbos._app_db import ApplicationDatabase
15
+ from dbos._dbos_config import get_system_database_url
15
16
  from dbos._schemas.system_database import SystemSchema
16
17
  from dbos._sys_db import SystemDatabase
17
18
 
@@ -40,7 +41,7 @@ def config() -> DBOSConfig:
40
41
  def sys_db(config: DBOSConfig) -> Generator[SystemDatabase, Any, None]:
41
42
  assert config["database_url"] is not None
42
43
  sys_db = SystemDatabase(
43
- database_url=config["database_url"],
44
+ system_database_url=f"{config['database_url']}_dbos_sys",
44
45
  engine_kwargs={
45
46
  "pool_timeout": 30,
46
47
  "max_overflow": 0,
@@ -104,7 +104,7 @@ def test_deactivate(dbos: DBOS, config: DBOSConfig) -> None:
104
104
  assert event.is_set()
105
105
  # Verify the scheduled workflow does not run anymore
106
106
  time.sleep(5)
107
- assert wf_counter <= val + 1
107
+ assert wf_counter <= val + 2
108
108
  # Enqueue a workflow, verify it still runs
109
109
  assert queue.enqueue(regular_workflow).get_result() == 5
110
110
 
@@ -32,12 +32,12 @@ async def test_async_workflow(dbos: DBOS) -> None:
32
32
  nonlocal wf_counter
33
33
  wf_counter += 1
34
34
  res1 = test_transaction(var1)
35
- res2 = test_step(var2)
35
+ res2 = await test_step(var2)
36
36
  DBOS.logger.info("I'm test_workflow")
37
37
  return res1 + res2
38
38
 
39
39
  @DBOS.step()
40
- def test_step(var: str) -> str:
40
+ async def test_step(var: str) -> str:
41
41
  nonlocal step_counter
42
42
  step_counter += 1
43
43
  DBOS.logger.info("I'm test_step")
@@ -73,6 +73,10 @@ async def test_async_workflow(dbos: DBOS) -> None:
73
73
  sync_handle = DBOS.start_workflow(test_workflow, "alice", "bob")
74
74
  assert sync_handle.get_result() == "alicetxn31bobstep3" # type: ignore
75
75
 
76
+ # Test DBOS.start_workflow_async on steps
77
+ handle = await DBOS.start_workflow_async(test_step, "alice")
78
+ assert (await handle.get_result()) == "alicestep4"
79
+
76
80
 
77
81
  @pytest.mark.asyncio
78
82
  async def test_async_step(dbos: DBOS) -> None: