dbos 1.6.0__tar.gz → 1.8.0a1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (108) hide show
  1. {dbos-1.6.0 → dbos-1.8.0a1}/PKG-INFO +1 -1
  2. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_admin_server.py +25 -9
  3. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_client.py +12 -0
  4. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_conductor/conductor.py +6 -0
  5. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_conductor/protocol.py +5 -2
  6. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_core.py +4 -1
  7. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_dbos.py +17 -1
  8. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_queue.py +18 -3
  9. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_sys_db.py +49 -27
  10. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_workflow_commands.py +9 -2
  11. {dbos-1.6.0 → dbos-1.8.0a1}/pyproject.toml +1 -1
  12. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_admin_server.py +266 -23
  13. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_client.py +20 -0
  14. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_dbos.py +26 -0
  15. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_queue.py +8 -8
  16. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_scheduler.py +8 -8
  17. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_workflow_introspection.py +70 -0
  18. {dbos-1.6.0 → dbos-1.8.0a1}/LICENSE +0 -0
  19. {dbos-1.6.0 → dbos-1.8.0a1}/README.md +0 -0
  20. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/__init__.py +0 -0
  21. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/__main__.py +0 -0
  22. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_app_db.py +0 -0
  23. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_classproperty.py +0 -0
  24. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_context.py +0 -0
  25. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_croniter.py +0 -0
  26. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_dbos_config.py +0 -0
  27. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_debug.py +0 -0
  28. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_docker_pg_helper.py +0 -0
  29. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_error.py +0 -0
  30. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_event_loop.py +0 -0
  31. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_fastapi.py +0 -0
  32. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_flask.py +0 -0
  33. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_kafka.py +0 -0
  34. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_kafka_message.py +0 -0
  35. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_logger.py +0 -0
  36. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_migrations/env.py +0 -0
  37. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_migrations/script.py.mako +0 -0
  38. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
  39. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -0
  40. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
  41. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
  42. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -0
  43. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -0
  44. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -0
  45. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
  46. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
  47. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
  48. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_migrations/versions/d994145b47b6_consolidate_inputs.py +0 -0
  49. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
  50. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
  51. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_outcome.py +0 -0
  52. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_recovery.py +0 -0
  53. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_registrations.py +0 -0
  54. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_roles.py +0 -0
  55. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_scheduler.py +0 -0
  56. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_schemas/__init__.py +0 -0
  57. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_schemas/application_database.py +0 -0
  58. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_schemas/system_database.py +0 -0
  59. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_serialization.py +0 -0
  60. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_templates/dbos-db-starter/README.md +0 -0
  61. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
  62. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
  63. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
  64. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
  65. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
  66. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
  67. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
  68. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
  69. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
  70. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_tracer.py +0 -0
  71. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/_utils.py +0 -0
  72. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/cli/_github_init.py +0 -0
  73. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/cli/_template_init.py +0 -0
  74. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/cli/cli.py +0 -0
  75. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/dbos-config.schema.json +0 -0
  76. {dbos-1.6.0 → dbos-1.8.0a1}/dbos/py.typed +0 -0
  77. {dbos-1.6.0 → dbos-1.8.0a1}/tests/__init__.py +0 -0
  78. {dbos-1.6.0 → dbos-1.8.0a1}/tests/atexit_no_ctor.py +0 -0
  79. {dbos-1.6.0 → dbos-1.8.0a1}/tests/atexit_no_launch.py +0 -0
  80. {dbos-1.6.0 → dbos-1.8.0a1}/tests/classdefs.py +0 -0
  81. {dbos-1.6.0 → dbos-1.8.0a1}/tests/client_collateral.py +0 -0
  82. {dbos-1.6.0 → dbos-1.8.0a1}/tests/client_worker.py +0 -0
  83. {dbos-1.6.0 → dbos-1.8.0a1}/tests/conftest.py +0 -0
  84. {dbos-1.6.0 → dbos-1.8.0a1}/tests/dupname_classdefs1.py +0 -0
  85. {dbos-1.6.0 → dbos-1.8.0a1}/tests/dupname_classdefsa.py +0 -0
  86. {dbos-1.6.0 → dbos-1.8.0a1}/tests/more_classdefs.py +0 -0
  87. {dbos-1.6.0 → dbos-1.8.0a1}/tests/queuedworkflow.py +0 -0
  88. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_async.py +0 -0
  89. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_classdecorators.py +0 -0
  90. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_cli.py +0 -0
  91. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_concurrency.py +0 -0
  92. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_config.py +0 -0
  93. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_croniter.py +0 -0
  94. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_debug.py +0 -0
  95. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_docker_secrets.py +0 -0
  96. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_failures.py +0 -0
  97. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_fastapi.py +0 -0
  98. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_fastapi_roles.py +0 -0
  99. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_flask.py +0 -0
  100. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_kafka.py +0 -0
  101. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_outcome.py +0 -0
  102. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_package.py +0 -0
  103. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_schema_migration.py +0 -0
  104. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_singleton.py +0 -0
  105. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_spans.py +0 -0
  106. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_sqlalchemy.py +0 -0
  107. {dbos-1.6.0 → dbos-1.8.0a1}/tests/test_workflow_management.py +0 -0
  108. {dbos-1.6.0 → dbos-1.8.0a1}/version/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 1.6.0
3
+ Version: 1.8.0a1
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -9,6 +9,7 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, TypedDict
9
9
 
10
10
  from dbos._workflow_commands import garbage_collect, global_timeout
11
11
 
12
+ from ._conductor import protocol as conductor_protocol
12
13
  from ._context import SetWorkflowID
13
14
  from ._error import DBOSException
14
15
  from ._logger import dbos_logger
@@ -118,7 +119,12 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
118
119
  self.send_response(404)
119
120
  self._end_headers()
120
121
  return
121
- response_body = json.dumps(workflows[0].__dict__).encode("utf-8")
122
+ workflow_output = (
123
+ conductor_protocol.WorkflowsOutput.from_workflow_information(
124
+ workflows[0]
125
+ )
126
+ )
127
+ response_body = json.dumps(workflow_output.__dict__).encode("utf-8")
122
128
  self.send_response(200)
123
129
  self.send_header("Content-Type", "application/json")
124
130
  self.send_header("Content-Length", str(len(response_body)))
@@ -326,20 +332,26 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
326
332
 
327
333
  def _handle_workflows(self, filters: Dict[str, Any]) -> None:
328
334
  workflows = self.dbos.list_workflows(
329
- workflow_ids=filters.get("workflow_ids"),
330
- name=filters.get("name"),
335
+ workflow_ids=filters.get("workflow_uuids"),
336
+ user=filters.get("authenticated_user"),
331
337
  start_time=filters.get("start_time"),
332
338
  end_time=filters.get("end_time"),
333
339
  status=filters.get("status"),
334
340
  app_version=filters.get("application_version"),
341
+ name=filters.get("workflow_name"),
335
342
  limit=filters.get("limit"),
336
343
  offset=filters.get("offset"),
337
344
  sort_desc=filters.get("sort_desc", False),
338
345
  workflow_id_prefix=filters.get("workflow_id_prefix"),
346
+ load_input=filters.get("load_input", False),
347
+ load_output=filters.get("load_output", False),
339
348
  )
340
-
349
+ workflows_output = [
350
+ conductor_protocol.WorkflowsOutput.from_workflow_information(i)
351
+ for i in workflows
352
+ ]
341
353
  response_body = json.dumps(
342
- [workflow.__dict__ for workflow in workflows]
354
+ [workflow.__dict__ for workflow in workflows_output]
343
355
  ).encode("utf-8")
344
356
  self.send_response(200)
345
357
  self.send_header("Content-Type", "application/json")
@@ -349,18 +361,22 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
349
361
 
350
362
  def _handle_queued_workflows(self, filters: Dict[str, Any]) -> None:
351
363
  workflows = self.dbos.list_queued_workflows(
352
- queue_name=filters.get("queue_name"),
353
- name=filters.get("name"),
354
364
  start_time=filters.get("start_time"),
355
365
  end_time=filters.get("end_time"),
356
366
  status=filters.get("status"),
367
+ name=filters.get("workflow_name"),
357
368
  limit=filters.get("limit"),
358
369
  offset=filters.get("offset"),
370
+ queue_name=filters.get("queue_name"),
359
371
  sort_desc=filters.get("sort_desc", False),
372
+ load_input=filters.get("load_input", False),
360
373
  )
361
-
374
+ workflows_output = [
375
+ conductor_protocol.WorkflowsOutput.from_workflow_information(i)
376
+ for i in workflows
377
+ ]
362
378
  response_body = json.dumps(
363
- [workflow.__dict__ for workflow in workflows]
379
+ [workflow.__dict__ for workflow in workflows_output]
364
380
  ).encode("utf-8")
365
381
  self.send_response(200)
366
382
  self.send_header("Content-Type", "application/json")
@@ -294,6 +294,8 @@ class DBOSClient:
294
294
  offset: Optional[int] = None,
295
295
  sort_desc: bool = False,
296
296
  workflow_id_prefix: Optional[str] = None,
297
+ load_input: bool = True,
298
+ load_output: bool = True,
297
299
  ) -> List[WorkflowStatus]:
298
300
  return list_workflows(
299
301
  self._sys_db,
@@ -308,6 +310,8 @@ class DBOSClient:
308
310
  offset=offset,
309
311
  sort_desc=sort_desc,
310
312
  workflow_id_prefix=workflow_id_prefix,
313
+ load_input=load_input,
314
+ load_output=load_output,
311
315
  )
312
316
 
313
317
  async def list_workflows_async(
@@ -324,6 +328,8 @@ class DBOSClient:
324
328
  offset: Optional[int] = None,
325
329
  sort_desc: bool = False,
326
330
  workflow_id_prefix: Optional[str] = None,
331
+ load_input: bool = True,
332
+ load_output: bool = True,
327
333
  ) -> List[WorkflowStatus]:
328
334
  return await asyncio.to_thread(
329
335
  self.list_workflows,
@@ -338,6 +344,8 @@ class DBOSClient:
338
344
  offset=offset,
339
345
  sort_desc=sort_desc,
340
346
  workflow_id_prefix=workflow_id_prefix,
347
+ load_input=load_input,
348
+ load_output=load_output,
341
349
  )
342
350
 
343
351
  def list_queued_workflows(
@@ -351,6 +359,7 @@ class DBOSClient:
351
359
  limit: Optional[int] = None,
352
360
  offset: Optional[int] = None,
353
361
  sort_desc: bool = False,
362
+ load_input: bool = True,
354
363
  ) -> List[WorkflowStatus]:
355
364
  return list_queued_workflows(
356
365
  self._sys_db,
@@ -362,6 +371,7 @@ class DBOSClient:
362
371
  limit=limit,
363
372
  offset=offset,
364
373
  sort_desc=sort_desc,
374
+ load_input=load_input,
365
375
  )
366
376
 
367
377
  async def list_queued_workflows_async(
@@ -375,6 +385,7 @@ class DBOSClient:
375
385
  limit: Optional[int] = None,
376
386
  offset: Optional[int] = None,
377
387
  sort_desc: bool = False,
388
+ load_input: bool = True,
378
389
  ) -> List[WorkflowStatus]:
379
390
  return await asyncio.to_thread(
380
391
  self.list_queued_workflows,
@@ -386,6 +397,7 @@ class DBOSClient:
386
397
  limit=limit,
387
398
  offset=offset,
388
399
  sort_desc=sort_desc,
400
+ load_input=load_input,
389
401
  )
390
402
 
391
403
  def list_workflow_steps(self, workflow_id: str) -> List[StepInfo]:
@@ -223,6 +223,8 @@ class ConductorWebsocket(threading.Thread):
223
223
  body = list_workflows_message.body
224
224
  infos = []
225
225
  try:
226
+ load_input = body.get("load_input", False)
227
+ load_output = body.get("load_output", False)
226
228
  infos = list_workflows(
227
229
  self.dbos._sys_db,
228
230
  workflow_ids=body["workflow_uuids"],
@@ -235,6 +237,8 @@ class ConductorWebsocket(threading.Thread):
235
237
  limit=body["limit"],
236
238
  offset=body["offset"],
237
239
  sort_desc=body["sort_desc"],
240
+ load_input=load_input,
241
+ load_output=load_output,
238
242
  )
239
243
  except Exception as e:
240
244
  error_message = f"Exception encountered when listing workflows: {traceback.format_exc()}"
@@ -257,6 +261,7 @@ class ConductorWebsocket(threading.Thread):
257
261
  q_body = list_queued_workflows_message.body
258
262
  infos = []
259
263
  try:
264
+ q_load_input = q_body.get("load_input", False)
260
265
  infos = list_queued_workflows(
261
266
  self.dbos._sys_db,
262
267
  start_time=q_body["start_time"],
@@ -267,6 +272,7 @@ class ConductorWebsocket(threading.Thread):
267
272
  offset=q_body["offset"],
268
273
  queue_name=q_body["queue_name"],
269
274
  sort_desc=q_body["sort_desc"],
275
+ load_input=q_load_input,
270
276
  )
271
277
  except Exception as e:
272
278
  error_message = f"Exception encountered when listing queued workflows: {traceback.format_exc()}"
@@ -110,7 +110,7 @@ class RestartResponse(BaseMessage):
110
110
  error_message: Optional[str] = None
111
111
 
112
112
 
113
- class ListWorkflowsBody(TypedDict):
113
+ class ListWorkflowsBody(TypedDict, total=False):
114
114
  workflow_uuids: List[str]
115
115
  workflow_name: Optional[str]
116
116
  authenticated_user: Optional[str]
@@ -121,6 +121,8 @@ class ListWorkflowsBody(TypedDict):
121
121
  limit: Optional[int]
122
122
  offset: Optional[int]
123
123
  sort_desc: bool
124
+ load_input: bool
125
+ load_output: bool
124
126
 
125
127
 
126
128
  @dataclass
@@ -209,7 +211,7 @@ class ListWorkflowsResponse(BaseMessage):
209
211
  error_message: Optional[str] = None
210
212
 
211
213
 
212
- class ListQueuedWorkflowsBody(TypedDict):
214
+ class ListQueuedWorkflowsBody(TypedDict, total=False):
213
215
  workflow_name: Optional[str]
214
216
  start_time: Optional[str]
215
217
  end_time: Optional[str]
@@ -218,6 +220,7 @@ class ListQueuedWorkflowsBody(TypedDict):
218
220
  limit: Optional[int]
219
221
  offset: Optional[int]
220
222
  sort_desc: bool
223
+ load_input: bool
221
224
 
222
225
 
223
226
  @dataclass
@@ -1157,13 +1157,16 @@ def decorate_step(
1157
1157
  def wrapper(*args: Any, **kwargs: Any) -> Any:
1158
1158
  rr: Optional[str] = check_required_roles(func, fi)
1159
1159
  # Entering step is allowed:
1160
+ # No DBOS, just call the original function directly
1160
1161
  # In a step already, just call the original function directly.
1161
1162
  # In a workflow (that is not in a step already)
1162
1163
  # Not in a workflow (we will start the single op workflow)
1164
+ if not dbosreg.dbos or not dbosreg.dbos._launched:
1165
+ # Call the original function directly
1166
+ return func(*args, **kwargs)
1163
1167
  ctx = get_local_dbos_context()
1164
1168
  if ctx and ctx.is_step():
1165
1169
  # Call the original function directly
1166
-
1167
1170
  return func(*args, **kwargs)
1168
1171
  if ctx and ctx.is_within_workflow():
1169
1172
  assert ctx.is_workflow(), "Steps must be called from within workflows"
@@ -7,7 +7,6 @@ import inspect
7
7
  import os
8
8
  import sys
9
9
  import threading
10
- import traceback
11
10
  import uuid
12
11
  from concurrent.futures import ThreadPoolExecutor
13
12
  from logging import Logger
@@ -28,6 +27,7 @@ from typing import (
28
27
  )
29
28
 
30
29
  from opentelemetry.trace import Span
30
+ from rich import print
31
31
 
32
32
  from dbos._conductor.conductor import ConductorWebsocket
33
33
  from dbos._sys_db import WorkflowStatus
@@ -517,6 +517,16 @@ class DBOS:
517
517
 
518
518
  dbos_logger.info("DBOS launched!")
519
519
 
520
+ if self.conductor_key is None and os.environ.get("DBOS__CLOUD") != "true":
521
+ # Hint the user to open the URL to register and set up Conductor
522
+ app_name = self._config["name"]
523
+ conductor_registration_url = (
524
+ f"https://console.dbos.dev/self-host?appname={app_name}"
525
+ )
526
+ print(
527
+ f"[bold]To view and manage workflows, connect to DBOS Conductor at:[/bold] [bold blue]{conductor_registration_url}[/bold blue]"
528
+ )
529
+
520
530
  # Flush handlers and add OTLP to all loggers if enabled
521
531
  # to enable their export in DBOS Cloud
522
532
  for handler in dbos_logger.handlers:
@@ -1022,6 +1032,8 @@ class DBOS:
1022
1032
  offset: Optional[int] = None,
1023
1033
  sort_desc: bool = False,
1024
1034
  workflow_id_prefix: Optional[str] = None,
1035
+ load_input: bool = True,
1036
+ load_output: bool = True,
1025
1037
  ) -> List[WorkflowStatus]:
1026
1038
  def fn() -> List[WorkflowStatus]:
1027
1039
  return list_workflows(
@@ -1037,6 +1049,8 @@ class DBOS:
1037
1049
  offset=offset,
1038
1050
  sort_desc=sort_desc,
1039
1051
  workflow_id_prefix=workflow_id_prefix,
1052
+ load_input=load_input,
1053
+ load_output=load_output,
1040
1054
  )
1041
1055
 
1042
1056
  return _get_dbos_instance()._sys_db.call_function_as_step(
@@ -1055,6 +1069,7 @@ class DBOS:
1055
1069
  limit: Optional[int] = None,
1056
1070
  offset: Optional[int] = None,
1057
1071
  sort_desc: bool = False,
1072
+ load_input: bool = True,
1058
1073
  ) -> List[WorkflowStatus]:
1059
1074
  def fn() -> List[WorkflowStatus]:
1060
1075
  return list_queued_workflows(
@@ -1067,6 +1082,7 @@ class DBOS:
1067
1082
  limit=limit,
1068
1083
  offset=offset,
1069
1084
  sort_desc=sort_desc,
1085
+ load_input=load_input,
1070
1086
  )
1071
1087
 
1072
1088
  return _get_dbos_instance()._sys_db.call_function_as_step(
@@ -1,3 +1,4 @@
1
+ import random
1
2
  import threading
2
3
  from typing import TYPE_CHECKING, Any, Callable, Coroutine, Optional, TypedDict
3
4
 
@@ -94,8 +95,12 @@ class Queue:
94
95
 
95
96
 
96
97
  def queue_thread(stop_event: threading.Event, dbos: "DBOS") -> None:
98
+ polling_interval = 1.0
99
+ min_polling_interval = 1.0
100
+ max_polling_interval = 120.0
97
101
  while not stop_event.is_set():
98
- if stop_event.wait(timeout=1):
102
+ # Wait for the polling interval with jitter
103
+ if stop_event.wait(timeout=polling_interval * random.uniform(0.95, 1.05)):
99
104
  return
100
105
  queues = dict(dbos._registry.queue_info_map)
101
106
  for _, queue in queues.items():
@@ -106,12 +111,22 @@ def queue_thread(stop_event: threading.Event, dbos: "DBOS") -> None:
106
111
  for id in wf_ids:
107
112
  execute_workflow_by_id(dbos, id)
108
113
  except OperationalError as e:
109
- # Ignore serialization error
110
- if not isinstance(
114
+ if isinstance(
111
115
  e.orig, (errors.SerializationFailure, errors.LockNotAvailable)
112
116
  ):
117
+ # If a serialization error is encountered, increase the polling interval
118
+ polling_interval = min(
119
+ max_polling_interval,
120
+ polling_interval * 2.0,
121
+ )
122
+ dbos.logger.warning(
123
+ f"Contention detected in queue thread for {queue.name}. Increasing polling interval to {polling_interval:.2f}."
124
+ )
125
+ else:
113
126
  dbos.logger.warning(f"Exception encountered in queue thread: {e}")
114
127
  except Exception as e:
115
128
  if not stop_event.is_set():
116
129
  # Only print the error if the thread is not stopping
117
130
  dbos.logger.warning(f"Exception encountered in queue thread: {e}")
131
+ # Attempt to scale back the polling interval on each iteration
132
+ polling_interval = max(min_polling_interval, polling_interval * 0.9)
@@ -788,11 +788,17 @@ class SystemDatabase:
788
788
  pass # CB: I guess we're assuming the WF will show up eventually.
789
789
  time.sleep(1)
790
790
 
791
- def get_workflows(self, input: GetWorkflowsInput) -> List[WorkflowStatus]:
791
+ def get_workflows(
792
+ self,
793
+ input: GetWorkflowsInput,
794
+ *,
795
+ load_input: bool = True,
796
+ load_output: bool = True,
797
+ ) -> List[WorkflowStatus]:
792
798
  """
793
799
  Retrieve a list of workflows result and inputs based on the input criteria. The result is a list of external-facing workflow status objects.
794
800
  """
795
- query = sa.select(
801
+ load_columns = [
796
802
  SystemSchema.workflow_status.c.workflow_uuid,
797
803
  SystemSchema.workflow_status.c.status,
798
804
  SystemSchema.workflow_status.c.name,
@@ -808,12 +814,16 @@ class SystemDatabase:
808
814
  SystemSchema.workflow_status.c.updated_at,
809
815
  SystemSchema.workflow_status.c.application_version,
810
816
  SystemSchema.workflow_status.c.application_id,
811
- SystemSchema.workflow_status.c.inputs,
812
- SystemSchema.workflow_status.c.output,
813
- SystemSchema.workflow_status.c.error,
814
817
  SystemSchema.workflow_status.c.workflow_deadline_epoch_ms,
815
818
  SystemSchema.workflow_status.c.workflow_timeout_ms,
816
- )
819
+ ]
820
+ if load_input:
821
+ load_columns.append(SystemSchema.workflow_status.c.inputs)
822
+ if load_output:
823
+ load_columns.append(SystemSchema.workflow_status.c.output)
824
+ load_columns.append(SystemSchema.workflow_status.c.error)
825
+
826
+ query = sa.select(*load_columns)
817
827
  if input.sort_desc:
818
828
  query = query.order_by(SystemSchema.workflow_status.c.created_at.desc())
819
829
  else:
@@ -880,29 +890,35 @@ class SystemDatabase:
880
890
  info.updated_at = row[12]
881
891
  info.app_version = row[13]
882
892
  info.app_id = row[14]
893
+ info.workflow_deadline_epoch_ms = row[15]
894
+ info.workflow_timeout_ms = row[16]
883
895
 
896
+ raw_input = row[17] if load_input else None
897
+ raw_output = row[18] if load_output else None
898
+ raw_error = row[19] if load_output else None
884
899
  inputs, output, exception = _serialization.safe_deserialize(
885
900
  info.workflow_id,
886
- serialized_input=row[15],
887
- serialized_output=row[16],
888
- serialized_exception=row[17],
901
+ serialized_input=raw_input,
902
+ serialized_output=raw_output,
903
+ serialized_exception=raw_error,
889
904
  )
890
905
  info.input = inputs
891
906
  info.output = output
892
907
  info.error = exception
893
- info.workflow_deadline_epoch_ms = row[18]
894
- info.workflow_timeout_ms = row[19]
895
908
 
896
909
  infos.append(info)
897
910
  return infos
898
911
 
899
912
  def get_queued_workflows(
900
- self, input: GetQueuedWorkflowsInput
913
+ self,
914
+ input: GetQueuedWorkflowsInput,
915
+ *,
916
+ load_input: bool = True,
901
917
  ) -> List[WorkflowStatus]:
902
918
  """
903
919
  Retrieve a list of queued workflows result and inputs based on the input criteria. The result is a list of external-facing workflow status objects.
904
920
  """
905
- query = sa.select(
921
+ load_columns = [
906
922
  SystemSchema.workflow_status.c.workflow_uuid,
907
923
  SystemSchema.workflow_status.c.status,
908
924
  SystemSchema.workflow_status.c.name,
@@ -918,12 +934,13 @@ class SystemDatabase:
918
934
  SystemSchema.workflow_status.c.updated_at,
919
935
  SystemSchema.workflow_status.c.application_version,
920
936
  SystemSchema.workflow_status.c.application_id,
921
- SystemSchema.workflow_status.c.inputs,
922
- SystemSchema.workflow_status.c.output,
923
- SystemSchema.workflow_status.c.error,
924
937
  SystemSchema.workflow_status.c.workflow_deadline_epoch_ms,
925
938
  SystemSchema.workflow_status.c.workflow_timeout_ms,
926
- ).where(
939
+ ]
940
+ if load_input:
941
+ load_columns.append(SystemSchema.workflow_status.c.inputs)
942
+
943
+ query = sa.select(*load_columns).where(
927
944
  sa.and_(
928
945
  SystemSchema.workflow_status.c.queue_name.isnot(None),
929
946
  SystemSchema.workflow_status.c.status.in_(["ENQUEUED", "PENDING"]),
@@ -984,18 +1001,21 @@ class SystemDatabase:
984
1001
  info.updated_at = row[12]
985
1002
  info.app_version = row[13]
986
1003
  info.app_id = row[14]
1004
+ info.workflow_deadline_epoch_ms = row[15]
1005
+ info.workflow_timeout_ms = row[16]
1006
+
1007
+ raw_input = row[17] if load_input else None
987
1008
 
1009
+ # Error and Output are not loaded because they should always be None for queued workflows.
988
1010
  inputs, output, exception = _serialization.safe_deserialize(
989
1011
  info.workflow_id,
990
- serialized_input=row[15],
991
- serialized_output=row[16],
992
- serialized_exception=row[17],
1012
+ serialized_input=raw_input,
1013
+ serialized_output=None,
1014
+ serialized_exception=None,
993
1015
  )
994
1016
  info.input = inputs
995
1017
  info.output = output
996
1018
  info.error = exception
997
- info.workflow_deadline_epoch_ms = row[18]
998
- info.workflow_timeout_ms = row[19]
999
1019
 
1000
1020
  infos.append(info)
1001
1021
 
@@ -1650,7 +1670,7 @@ class SystemDatabase:
1650
1670
  return []
1651
1671
 
1652
1672
  # Compute max_tasks, the number of workflows that can be dequeued given local and global concurrency limits,
1653
- max_tasks = float("inf")
1673
+ max_tasks = 100 # To minimize contention with large queues, never dequeue more than 100 tasks
1654
1674
  if queue.worker_concurrency is not None or queue.concurrency is not None:
1655
1675
  # Count how many workflows on this queue are currently PENDING both locally and globally.
1656
1676
  pending_tasks_query = (
@@ -1694,6 +1714,7 @@ class SystemDatabase:
1694
1714
 
1695
1715
  # Retrieve the first max_tasks workflows in the queue.
1696
1716
  # Only retrieve workflows of the local version (or without version set)
1717
+ skip_locks = queue.concurrency is None
1697
1718
  query = (
1698
1719
  sa.select(
1699
1720
  SystemSchema.workflow_status.c.workflow_uuid,
@@ -1711,7 +1732,10 @@ class SystemDatabase:
1711
1732
  SystemSchema.workflow_status.c.application_version.is_(None),
1712
1733
  )
1713
1734
  )
1714
- .with_for_update(nowait=True) # Error out early
1735
+ # Unless global concurrency is set, use skip_locked to only select
1736
+ # rows that can be locked. If global concurrency is set, use no_wait
1737
+ # to ensure all processes have a consistent view of the table.
1738
+ .with_for_update(skip_locked=skip_locks, nowait=(not skip_locks))
1715
1739
  )
1716
1740
  if queue.priority_enabled:
1717
1741
  query = query.order_by(
@@ -1720,9 +1744,7 @@ class SystemDatabase:
1720
1744
  )
1721
1745
  else:
1722
1746
  query = query.order_by(SystemSchema.workflow_status.c.created_at.asc())
1723
- # Apply limit only if max_tasks is finite
1724
- if max_tasks != float("inf"):
1725
- query = query.limit(int(max_tasks))
1747
+ query = query.limit(int(max_tasks))
1726
1748
 
1727
1749
  rows = c.execute(query).fetchall()
1728
1750
 
@@ -33,6 +33,8 @@ def list_workflows(
33
33
  offset: Optional[int] = None,
34
34
  sort_desc: bool = False,
35
35
  workflow_id_prefix: Optional[str] = None,
36
+ load_input: bool = True,
37
+ load_output: bool = True,
36
38
  ) -> List[WorkflowStatus]:
37
39
  input = GetWorkflowsInput()
38
40
  input.workflow_ids = workflow_ids
@@ -47,7 +49,9 @@ def list_workflows(
47
49
  input.sort_desc = sort_desc
48
50
  input.workflow_id_prefix = workflow_id_prefix
49
51
 
50
- infos: List[WorkflowStatus] = sys_db.get_workflows(input)
52
+ infos: List[WorkflowStatus] = sys_db.get_workflows(
53
+ input, load_input=load_input, load_output=load_output
54
+ )
51
55
 
52
56
  return infos
53
57
 
@@ -63,6 +67,7 @@ def list_queued_workflows(
63
67
  limit: Optional[int] = None,
64
68
  offset: Optional[int] = None,
65
69
  sort_desc: bool = False,
70
+ load_input: bool = True,
66
71
  ) -> List[WorkflowStatus]:
67
72
  input: GetQueuedWorkflowsInput = {
68
73
  "queue_name": queue_name,
@@ -75,7 +80,9 @@ def list_queued_workflows(
75
80
  "sort_desc": sort_desc,
76
81
  }
77
82
 
78
- infos: List[WorkflowStatus] = sys_db.get_queued_workflows(input)
83
+ infos: List[WorkflowStatus] = sys_db.get_queued_workflows(
84
+ input, load_input=load_input
85
+ )
79
86
  return infos
80
87
 
81
88
 
@@ -27,7 +27,7 @@ dependencies = [
27
27
  ]
28
28
  requires-python = ">=3.9"
29
29
  readme = "README.md"
30
- version = "1.6.0"
30
+ version = "1.8.0a1"
31
31
 
32
32
  [project.license]
33
33
  text = "MIT"