dbos 1.5.0a10__tar.gz → 1.8.0a1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (108) hide show
  1. {dbos-1.5.0a10 → dbos-1.8.0a1}/PKG-INFO +1 -1
  2. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_admin_server.py +25 -9
  3. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_client.py +19 -7
  4. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_conductor/conductor.py +6 -0
  5. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_conductor/protocol.py +5 -2
  6. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_context.py +17 -12
  7. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_core.py +55 -35
  8. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_dbos.py +37 -7
  9. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_kafka.py +2 -1
  10. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_queue.py +18 -3
  11. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_registrations.py +5 -3
  12. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_roles.py +3 -2
  13. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_scheduler.py +11 -8
  14. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_sys_db.py +59 -34
  15. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_workflow_commands.py +14 -7
  16. {dbos-1.5.0a10 → dbos-1.8.0a1}/pyproject.toml +1 -1
  17. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_admin_server.py +266 -23
  18. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_classdecorators.py +1 -0
  19. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_client.py +20 -0
  20. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_dbos.py +82 -4
  21. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_failures.py +1 -1
  22. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_fastapi_roles.py +3 -3
  23. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_queue.py +49 -8
  24. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_scheduler.py +21 -7
  25. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_spans.py +21 -15
  26. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_workflow_introspection.py +74 -0
  27. {dbos-1.5.0a10 → dbos-1.8.0a1}/LICENSE +0 -0
  28. {dbos-1.5.0a10 → dbos-1.8.0a1}/README.md +0 -0
  29. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/__init__.py +0 -0
  30. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/__main__.py +0 -0
  31. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_app_db.py +0 -0
  32. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_classproperty.py +0 -0
  33. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_croniter.py +0 -0
  34. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_dbos_config.py +0 -0
  35. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_debug.py +0 -0
  36. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_docker_pg_helper.py +0 -0
  37. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_error.py +0 -0
  38. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_event_loop.py +0 -0
  39. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_fastapi.py +0 -0
  40. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_flask.py +0 -0
  41. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_kafka_message.py +0 -0
  42. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_logger.py +0 -0
  43. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_migrations/env.py +0 -0
  44. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_migrations/script.py.mako +0 -0
  45. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
  46. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -0
  47. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
  48. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
  49. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -0
  50. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -0
  51. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -0
  52. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
  53. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
  54. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
  55. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_migrations/versions/d994145b47b6_consolidate_inputs.py +0 -0
  56. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
  57. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
  58. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_outcome.py +0 -0
  59. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_recovery.py +0 -0
  60. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_schemas/__init__.py +0 -0
  61. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_schemas/application_database.py +0 -0
  62. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_schemas/system_database.py +0 -0
  63. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_serialization.py +0 -0
  64. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_templates/dbos-db-starter/README.md +0 -0
  65. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
  66. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
  67. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
  68. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
  69. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
  70. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
  71. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
  72. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
  73. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
  74. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_tracer.py +0 -0
  75. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/_utils.py +0 -0
  76. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/cli/_github_init.py +0 -0
  77. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/cli/_template_init.py +0 -0
  78. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/cli/cli.py +0 -0
  79. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/dbos-config.schema.json +0 -0
  80. {dbos-1.5.0a10 → dbos-1.8.0a1}/dbos/py.typed +0 -0
  81. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/__init__.py +0 -0
  82. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/atexit_no_ctor.py +0 -0
  83. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/atexit_no_launch.py +0 -0
  84. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/classdefs.py +0 -0
  85. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/client_collateral.py +0 -0
  86. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/client_worker.py +0 -0
  87. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/conftest.py +0 -0
  88. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/dupname_classdefs1.py +0 -0
  89. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/dupname_classdefsa.py +0 -0
  90. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/more_classdefs.py +0 -0
  91. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/queuedworkflow.py +0 -0
  92. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_async.py +0 -0
  93. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_cli.py +0 -0
  94. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_concurrency.py +0 -0
  95. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_config.py +0 -0
  96. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_croniter.py +0 -0
  97. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_debug.py +0 -0
  98. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_docker_secrets.py +0 -0
  99. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_fastapi.py +0 -0
  100. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_flask.py +0 -0
  101. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_kafka.py +0 -0
  102. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_outcome.py +0 -0
  103. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_package.py +0 -0
  104. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_schema_migration.py +0 -0
  105. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_singleton.py +0 -0
  106. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_sqlalchemy.py +0 -0
  107. {dbos-1.5.0a10 → dbos-1.8.0a1}/tests/test_workflow_management.py +0 -0
  108. {dbos-1.5.0a10 → dbos-1.8.0a1}/version/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 1.5.0a10
3
+ Version: 1.8.0a1
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -9,6 +9,7 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, TypedDict
9
9
 
10
10
  from dbos._workflow_commands import garbage_collect, global_timeout
11
11
 
12
+ from ._conductor import protocol as conductor_protocol
12
13
  from ._context import SetWorkflowID
13
14
  from ._error import DBOSException
14
15
  from ._logger import dbos_logger
@@ -118,7 +119,12 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
118
119
  self.send_response(404)
119
120
  self._end_headers()
120
121
  return
121
- response_body = json.dumps(workflows[0].__dict__).encode("utf-8")
122
+ workflow_output = (
123
+ conductor_protocol.WorkflowsOutput.from_workflow_information(
124
+ workflows[0]
125
+ )
126
+ )
127
+ response_body = json.dumps(workflow_output.__dict__).encode("utf-8")
122
128
  self.send_response(200)
123
129
  self.send_header("Content-Type", "application/json")
124
130
  self.send_header("Content-Length", str(len(response_body)))
@@ -326,20 +332,26 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
326
332
 
327
333
  def _handle_workflows(self, filters: Dict[str, Any]) -> None:
328
334
  workflows = self.dbos.list_workflows(
329
- workflow_ids=filters.get("workflow_ids"),
330
- name=filters.get("name"),
335
+ workflow_ids=filters.get("workflow_uuids"),
336
+ user=filters.get("authenticated_user"),
331
337
  start_time=filters.get("start_time"),
332
338
  end_time=filters.get("end_time"),
333
339
  status=filters.get("status"),
334
340
  app_version=filters.get("application_version"),
341
+ name=filters.get("workflow_name"),
335
342
  limit=filters.get("limit"),
336
343
  offset=filters.get("offset"),
337
344
  sort_desc=filters.get("sort_desc", False),
338
345
  workflow_id_prefix=filters.get("workflow_id_prefix"),
346
+ load_input=filters.get("load_input", False),
347
+ load_output=filters.get("load_output", False),
339
348
  )
340
-
349
+ workflows_output = [
350
+ conductor_protocol.WorkflowsOutput.from_workflow_information(i)
351
+ for i in workflows
352
+ ]
341
353
  response_body = json.dumps(
342
- [workflow.__dict__ for workflow in workflows]
354
+ [workflow.__dict__ for workflow in workflows_output]
343
355
  ).encode("utf-8")
344
356
  self.send_response(200)
345
357
  self.send_header("Content-Type", "application/json")
@@ -349,18 +361,22 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
349
361
 
350
362
  def _handle_queued_workflows(self, filters: Dict[str, Any]) -> None:
351
363
  workflows = self.dbos.list_queued_workflows(
352
- queue_name=filters.get("queue_name"),
353
- name=filters.get("name"),
354
364
  start_time=filters.get("start_time"),
355
365
  end_time=filters.get("end_time"),
356
366
  status=filters.get("status"),
367
+ name=filters.get("workflow_name"),
357
368
  limit=filters.get("limit"),
358
369
  offset=filters.get("offset"),
370
+ queue_name=filters.get("queue_name"),
359
371
  sort_desc=filters.get("sort_desc", False),
372
+ load_input=filters.get("load_input", False),
360
373
  )
361
-
374
+ workflows_output = [
375
+ conductor_protocol.WorkflowsOutput.from_workflow_information(i)
376
+ for i in workflows
377
+ ]
362
378
  response_body = json.dumps(
363
- [workflow.__dict__ for workflow in workflows]
379
+ [workflow.__dict__ for workflow in workflows_output]
364
380
  ).encode("utf-8")
365
381
  self.send_response(200)
366
382
  self.send_header("Content-Type", "application/json")
@@ -1,7 +1,7 @@
1
1
  import asyncio
2
2
  import sys
3
3
  import uuid
4
- from typing import Any, Generic, List, Optional, TypedDict, TypeVar
4
+ from typing import Any, Generic, List, Optional, TypedDict, TypeVar, Union
5
5
 
6
6
  from dbos._app_db import ApplicationDatabase
7
7
  from dbos._context import MaxPriority, MinPriority
@@ -128,7 +128,6 @@ class DBOSClient:
128
128
  workflow_name = options["workflow_name"]
129
129
  queue_name = options["queue_name"]
130
130
 
131
- app_version = options.get("app_version")
132
131
  max_recovery_attempts = options.get("max_recovery_attempts")
133
132
  if max_recovery_attempts is None:
134
133
  max_recovery_attempts = DEFAULT_MAX_RECOVERY_ATTEMPTS
@@ -139,6 +138,7 @@ class DBOSClient:
139
138
  enqueue_options_internal: EnqueueOptionsInternal = {
140
139
  "deduplication_id": options.get("deduplication_id"),
141
140
  "priority": options.get("priority"),
141
+ "app_version": options.get("app_version"),
142
142
  }
143
143
 
144
144
  inputs: WorkflowInputs = {
@@ -152,7 +152,7 @@ class DBOSClient:
152
152
  "name": workflow_name,
153
153
  "class_name": None,
154
154
  "queue_name": queue_name,
155
- "app_version": app_version,
155
+ "app_version": enqueue_options_internal["app_version"],
156
156
  "config_name": None,
157
157
  "authenticated_user": None,
158
158
  "assumed_role": None,
@@ -284,7 +284,7 @@ class DBOSClient:
284
284
  self,
285
285
  *,
286
286
  workflow_ids: Optional[List[str]] = None,
287
- status: Optional[str] = None,
287
+ status: Optional[Union[str, List[str]]] = None,
288
288
  start_time: Optional[str] = None,
289
289
  end_time: Optional[str] = None,
290
290
  name: Optional[str] = None,
@@ -294,6 +294,8 @@ class DBOSClient:
294
294
  offset: Optional[int] = None,
295
295
  sort_desc: bool = False,
296
296
  workflow_id_prefix: Optional[str] = None,
297
+ load_input: bool = True,
298
+ load_output: bool = True,
297
299
  ) -> List[WorkflowStatus]:
298
300
  return list_workflows(
299
301
  self._sys_db,
@@ -308,13 +310,15 @@ class DBOSClient:
308
310
  offset=offset,
309
311
  sort_desc=sort_desc,
310
312
  workflow_id_prefix=workflow_id_prefix,
313
+ load_input=load_input,
314
+ load_output=load_output,
311
315
  )
312
316
 
313
317
  async def list_workflows_async(
314
318
  self,
315
319
  *,
316
320
  workflow_ids: Optional[List[str]] = None,
317
- status: Optional[str] = None,
321
+ status: Optional[Union[str, List[str]]] = None,
318
322
  start_time: Optional[str] = None,
319
323
  end_time: Optional[str] = None,
320
324
  name: Optional[str] = None,
@@ -324,6 +328,8 @@ class DBOSClient:
324
328
  offset: Optional[int] = None,
325
329
  sort_desc: bool = False,
326
330
  workflow_id_prefix: Optional[str] = None,
331
+ load_input: bool = True,
332
+ load_output: bool = True,
327
333
  ) -> List[WorkflowStatus]:
328
334
  return await asyncio.to_thread(
329
335
  self.list_workflows,
@@ -338,19 +344,22 @@ class DBOSClient:
338
344
  offset=offset,
339
345
  sort_desc=sort_desc,
340
346
  workflow_id_prefix=workflow_id_prefix,
347
+ load_input=load_input,
348
+ load_output=load_output,
341
349
  )
342
350
 
343
351
  def list_queued_workflows(
344
352
  self,
345
353
  *,
346
354
  queue_name: Optional[str] = None,
347
- status: Optional[str] = None,
355
+ status: Optional[Union[str, List[str]]] = None,
348
356
  start_time: Optional[str] = None,
349
357
  end_time: Optional[str] = None,
350
358
  name: Optional[str] = None,
351
359
  limit: Optional[int] = None,
352
360
  offset: Optional[int] = None,
353
361
  sort_desc: bool = False,
362
+ load_input: bool = True,
354
363
  ) -> List[WorkflowStatus]:
355
364
  return list_queued_workflows(
356
365
  self._sys_db,
@@ -362,19 +371,21 @@ class DBOSClient:
362
371
  limit=limit,
363
372
  offset=offset,
364
373
  sort_desc=sort_desc,
374
+ load_input=load_input,
365
375
  )
366
376
 
367
377
  async def list_queued_workflows_async(
368
378
  self,
369
379
  *,
370
380
  queue_name: Optional[str] = None,
371
- status: Optional[str] = None,
381
+ status: Optional[Union[str, List[str]]] = None,
372
382
  start_time: Optional[str] = None,
373
383
  end_time: Optional[str] = None,
374
384
  name: Optional[str] = None,
375
385
  limit: Optional[int] = None,
376
386
  offset: Optional[int] = None,
377
387
  sort_desc: bool = False,
388
+ load_input: bool = True,
378
389
  ) -> List[WorkflowStatus]:
379
390
  return await asyncio.to_thread(
380
391
  self.list_queued_workflows,
@@ -386,6 +397,7 @@ class DBOSClient:
386
397
  limit=limit,
387
398
  offset=offset,
388
399
  sort_desc=sort_desc,
400
+ load_input=load_input,
389
401
  )
390
402
 
391
403
  def list_workflow_steps(self, workflow_id: str) -> List[StepInfo]:
@@ -223,6 +223,8 @@ class ConductorWebsocket(threading.Thread):
223
223
  body = list_workflows_message.body
224
224
  infos = []
225
225
  try:
226
+ load_input = body.get("load_input", False)
227
+ load_output = body.get("load_output", False)
226
228
  infos = list_workflows(
227
229
  self.dbos._sys_db,
228
230
  workflow_ids=body["workflow_uuids"],
@@ -235,6 +237,8 @@ class ConductorWebsocket(threading.Thread):
235
237
  limit=body["limit"],
236
238
  offset=body["offset"],
237
239
  sort_desc=body["sort_desc"],
240
+ load_input=load_input,
241
+ load_output=load_output,
238
242
  )
239
243
  except Exception as e:
240
244
  error_message = f"Exception encountered when listing workflows: {traceback.format_exc()}"
@@ -257,6 +261,7 @@ class ConductorWebsocket(threading.Thread):
257
261
  q_body = list_queued_workflows_message.body
258
262
  infos = []
259
263
  try:
264
+ q_load_input = q_body.get("load_input", False)
260
265
  infos = list_queued_workflows(
261
266
  self.dbos._sys_db,
262
267
  start_time=q_body["start_time"],
@@ -267,6 +272,7 @@ class ConductorWebsocket(threading.Thread):
267
272
  offset=q_body["offset"],
268
273
  queue_name=q_body["queue_name"],
269
274
  sort_desc=q_body["sort_desc"],
275
+ load_input=q_load_input,
270
276
  )
271
277
  except Exception as e:
272
278
  error_message = f"Exception encountered when listing queued workflows: {traceback.format_exc()}"
@@ -110,7 +110,7 @@ class RestartResponse(BaseMessage):
110
110
  error_message: Optional[str] = None
111
111
 
112
112
 
113
- class ListWorkflowsBody(TypedDict):
113
+ class ListWorkflowsBody(TypedDict, total=False):
114
114
  workflow_uuids: List[str]
115
115
  workflow_name: Optional[str]
116
116
  authenticated_user: Optional[str]
@@ -121,6 +121,8 @@ class ListWorkflowsBody(TypedDict):
121
121
  limit: Optional[int]
122
122
  offset: Optional[int]
123
123
  sort_desc: bool
124
+ load_input: bool
125
+ load_output: bool
124
126
 
125
127
 
126
128
  @dataclass
@@ -209,7 +211,7 @@ class ListWorkflowsResponse(BaseMessage):
209
211
  error_message: Optional[str] = None
210
212
 
211
213
 
212
- class ListQueuedWorkflowsBody(TypedDict):
214
+ class ListQueuedWorkflowsBody(TypedDict, total=False):
213
215
  workflow_name: Optional[str]
214
216
  start_time: Optional[str]
215
217
  end_time: Optional[str]
@@ -218,6 +220,7 @@ class ListQueuedWorkflowsBody(TypedDict):
218
220
  limit: Optional[int]
219
221
  offset: Optional[int]
220
222
  sort_desc: bool
223
+ load_input: bool
221
224
 
222
225
 
223
226
  @dataclass
@@ -93,6 +93,8 @@ class DBOSContext:
93
93
  self.assumed_role: Optional[str] = None
94
94
  self.step_status: Optional[StepStatus] = None
95
95
 
96
+ self.app_version: Optional[str] = None
97
+
96
98
  # A user-specified workflow timeout. Takes priority over a propagated deadline.
97
99
  self.workflow_timeout_ms: Optional[int] = None
98
100
  # A propagated workflow deadline.
@@ -138,23 +140,18 @@ class DBOSContext:
138
140
  self,
139
141
  wfid: Optional[str],
140
142
  attributes: TracedAttributes,
141
- is_temp_workflow: bool = False,
142
143
  ) -> None:
143
144
  if wfid is None or len(wfid) == 0:
144
145
  wfid = self.assign_workflow_id()
145
146
  self.id_assigned_for_next_workflow = ""
146
147
  self.workflow_id = wfid
147
148
  self.function_id = 0
148
- if not is_temp_workflow:
149
- self._start_span(attributes)
149
+ self._start_span(attributes)
150
150
 
151
- def end_workflow(
152
- self, exc_value: Optional[BaseException], is_temp_workflow: bool = False
153
- ) -> None:
151
+ def end_workflow(self, exc_value: Optional[BaseException]) -> None:
154
152
  self.workflow_id = ""
155
153
  self.function_id = -1
156
- if not is_temp_workflow:
157
- self._end_span(exc_value)
154
+ self._end_span(exc_value)
158
155
 
159
156
  def is_within_workflow(self) -> bool:
160
157
  return len(self.workflow_id) > 0
@@ -435,7 +432,11 @@ class SetEnqueueOptions:
435
432
  """
436
433
 
437
434
  def __init__(
438
- self, *, deduplication_id: Optional[str] = None, priority: Optional[int] = None
435
+ self,
436
+ *,
437
+ deduplication_id: Optional[str] = None,
438
+ priority: Optional[int] = None,
439
+ app_version: Optional[str] = None,
439
440
  ) -> None:
440
441
  self.created_ctx = False
441
442
  self.deduplication_id: Optional[str] = deduplication_id
@@ -446,6 +447,8 @@ class SetEnqueueOptions:
446
447
  )
447
448
  self.priority: Optional[int] = priority
448
449
  self.saved_priority: Optional[int] = None
450
+ self.app_version: Optional[str] = app_version
451
+ self.saved_app_version: Optional[str] = None
449
452
 
450
453
  def __enter__(self) -> SetEnqueueOptions:
451
454
  # Code to create a basic context
@@ -458,6 +461,8 @@ class SetEnqueueOptions:
458
461
  ctx.deduplication_id = self.deduplication_id
459
462
  self.saved_priority = ctx.priority
460
463
  ctx.priority = self.priority
464
+ self.saved_app_version = ctx.app_version
465
+ ctx.app_version = self.app_version
461
466
  return self
462
467
 
463
468
  def __exit__(
@@ -469,6 +474,7 @@ class SetEnqueueOptions:
469
474
  curr_ctx = assert_current_dbos_context()
470
475
  curr_ctx.deduplication_id = self.saved_deduplication_id
471
476
  curr_ctx.priority = self.saved_priority
477
+ curr_ctx.app_version = self.saved_app_version
472
478
  # Code to clean up the basic context if we created it
473
479
  if self.created_ctx:
474
480
  _clear_local_dbos_context()
@@ -479,7 +485,6 @@ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
479
485
  def __init__(self, attributes: TracedAttributes) -> None:
480
486
  self.created_ctx = False
481
487
  self.attributes = attributes
482
- self.is_temp_workflow = attributes["name"] == "temp_wf"
483
488
  self.saved_workflow_timeout: Optional[int] = None
484
489
  self.saved_deduplication_id: Optional[str] = None
485
490
  self.saved_priority: Optional[int] = None
@@ -503,7 +508,7 @@ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
503
508
  self.saved_priority = ctx.priority
504
509
  ctx.priority = None
505
510
  ctx.start_workflow(
506
- None, self.attributes, self.is_temp_workflow
511
+ None, self.attributes
507
512
  ) # Will get from the context's next workflow ID
508
513
  return ctx
509
514
 
@@ -515,7 +520,7 @@ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
515
520
  ) -> Literal[False]:
516
521
  ctx = assert_current_dbos_context()
517
522
  assert ctx.is_within_workflow()
518
- ctx.end_workflow(exc_value, self.is_temp_workflow)
523
+ ctx.end_workflow(exc_value)
519
524
  # Restore the saved workflow timeout
520
525
  ctx.workflow_timeout_ms = self.saved_workflow_timeout
521
526
  # Clear any propagating timeout
@@ -270,7 +270,12 @@ def _init_workflow(
270
270
  "output": None,
271
271
  "error": None,
272
272
  "app_id": ctx.app_id,
273
- "app_version": GlobalParams.app_version,
273
+ "app_version": (
274
+ enqueue_options["app_version"]
275
+ if enqueue_options is not None
276
+ and enqueue_options["app_version"] is not None
277
+ else GlobalParams.app_version
278
+ ),
274
279
  "executor_id": ctx.executor_id,
275
280
  "recovery_attempts": None,
276
281
  "authenticated_user": ctx.authenticated_user,
@@ -387,7 +392,7 @@ def _execute_workflow_wthread(
387
392
  **kwargs: Any,
388
393
  ) -> R:
389
394
  attributes: TracedAttributes = {
390
- "name": func.__name__,
395
+ "name": get_dbos_func_name(func),
391
396
  "operationType": OperationType.WORKFLOW.value,
392
397
  }
393
398
  with DBOSContextSwap(ctx):
@@ -420,7 +425,7 @@ async def _execute_workflow_async(
420
425
  **kwargs: Any,
421
426
  ) -> R:
422
427
  attributes: TracedAttributes = {
423
- "name": func.__name__,
428
+ "name": get_dbos_func_name(func),
424
429
  "operationType": OperationType.WORKFLOW.value,
425
430
  }
426
431
  with DBOSContextSwap(ctx):
@@ -445,7 +450,8 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
445
450
  wf_func = dbos._registry.workflow_info_map.get(status["name"], None)
446
451
  if not wf_func:
447
452
  raise DBOSWorkflowFunctionNotFoundError(
448
- workflow_id, "Workflow function not found"
453
+ workflow_id,
454
+ f"Cannot execute workflow because {status['name']} is not a registered workflow function",
449
455
  )
450
456
  with DBOSContextEnsure():
451
457
  # If this function belongs to a configured class, add that class instance as its first argument
@@ -526,7 +532,8 @@ def start_workflow(
526
532
  fi = get_func_info(func)
527
533
  if fi is None:
528
534
  raise DBOSWorkflowFunctionNotFoundError(
529
- "<NONE>", f"start_workflow: function {func.__name__} is not registered"
535
+ "<NONE>",
536
+ f"start_workflow: function {func.__name__} is not registered",
530
537
  )
531
538
 
532
539
  func = cast("Workflow[P, R]", func.__orig_func) # type: ignore
@@ -546,6 +553,7 @@ def start_workflow(
546
553
  enqueue_options = EnqueueOptionsInternal(
547
554
  deduplication_id=local_ctx.deduplication_id if local_ctx is not None else None,
548
555
  priority=local_ctx.priority if local_ctx is not None else None,
556
+ app_version=local_ctx.app_version if local_ctx is not None else None,
549
557
  )
550
558
  new_wf_id, new_wf_ctx = _get_new_wf()
551
559
 
@@ -620,7 +628,8 @@ async def start_workflow_async(
620
628
  fi = get_func_info(func)
621
629
  if fi is None:
622
630
  raise DBOSWorkflowFunctionNotFoundError(
623
- "<NONE>", f"start_workflow: function {func.__name__} is not registered"
631
+ "<NONE>",
632
+ f"start_workflow: function {func.__name__} is not registered",
624
633
  )
625
634
 
626
635
  func = cast("Workflow[P, R]", func.__orig_func) # type: ignore
@@ -637,6 +646,7 @@ async def start_workflow_async(
637
646
  enqueue_options = EnqueueOptionsInternal(
638
647
  deduplication_id=local_ctx.deduplication_id if local_ctx is not None else None,
639
648
  priority=local_ctx.priority if local_ctx is not None else None,
649
+ app_version=local_ctx.app_version if local_ctx is not None else None,
640
650
  )
641
651
  new_wf_id, new_wf_ctx = _get_new_wf()
642
652
 
@@ -723,13 +733,13 @@ def workflow_wrapper(
723
733
  assert fi is not None
724
734
  if dbosreg.dbos is None:
725
735
  raise DBOSException(
726
- f"Function {func.__name__} invoked before DBOS initialized"
736
+ f"Function {get_dbos_func_name(func)} invoked before DBOS initialized"
727
737
  )
728
738
  dbos = dbosreg.dbos
729
739
 
730
740
  rr: Optional[str] = check_required_roles(func, fi)
731
741
  attributes: TracedAttributes = {
732
- "name": func.__name__,
742
+ "name": get_dbos_func_name(func),
733
743
  "operationType": OperationType.WORKFLOW.value,
734
744
  }
735
745
  inputs: WorkflowInputs = {
@@ -829,27 +839,30 @@ def workflow_wrapper(
829
839
 
830
840
 
831
841
  def decorate_workflow(
832
- reg: "DBOSRegistry", max_recovery_attempts: Optional[int]
842
+ reg: "DBOSRegistry", name: Optional[str], max_recovery_attempts: Optional[int]
833
843
  ) -> Callable[[Callable[P, R]], Callable[P, R]]:
834
844
  def _workflow_decorator(func: Callable[P, R]) -> Callable[P, R]:
835
845
  wrapped_func = workflow_wrapper(reg, func, max_recovery_attempts)
836
- reg.register_wf_function(func.__qualname__, wrapped_func, "workflow")
846
+ func_name = name if name is not None else func.__qualname__
847
+ set_dbos_func_name(func, func_name)
848
+ set_dbos_func_name(wrapped_func, func_name)
849
+ reg.register_wf_function(func_name, wrapped_func, "workflow")
837
850
  return wrapped_func
838
851
 
839
852
  return _workflow_decorator
840
853
 
841
854
 
842
855
  def decorate_transaction(
843
- dbosreg: "DBOSRegistry", isolation_level: "IsolationLevel" = "SERIALIZABLE"
856
+ dbosreg: "DBOSRegistry", name: Optional[str], isolation_level: "IsolationLevel"
844
857
  ) -> Callable[[F], F]:
845
858
  def decorator(func: F) -> F:
846
859
 
847
- transaction_name = func.__qualname__
860
+ transaction_name = name if name is not None else func.__qualname__
848
861
 
849
862
  def invoke_tx(*args: Any, **kwargs: Any) -> Any:
850
863
  if dbosreg.dbos is None:
851
864
  raise DBOSException(
852
- f"Function {func.__name__} invoked before DBOS initialized"
865
+ f"Function {transaction_name} invoked before DBOS initialized"
853
866
  )
854
867
 
855
868
  dbos = dbosreg.dbos
@@ -857,12 +870,12 @@ def decorate_transaction(
857
870
  status = dbos._sys_db.get_workflow_status(ctx.workflow_id)
858
871
  if status and status["status"] == WorkflowStatusString.CANCELLED.value:
859
872
  raise DBOSWorkflowCancelledError(
860
- f"Workflow {ctx.workflow_id} is cancelled. Aborting transaction {func.__name__}."
873
+ f"Workflow {ctx.workflow_id} is cancelled. Aborting transaction {transaction_name}."
861
874
  )
862
875
 
863
876
  with dbos._app_db.sessionmaker() as session:
864
877
  attributes: TracedAttributes = {
865
- "name": func.__name__,
878
+ "name": transaction_name,
866
879
  "operationType": OperationType.TRANSACTION.value,
867
880
  }
868
881
  with EnterDBOSTransaction(session, attributes=attributes):
@@ -963,7 +976,7 @@ def decorate_transaction(
963
976
  raise
964
977
  except InvalidRequestError as invalid_request_error:
965
978
  dbos.logger.error(
966
- f"InvalidRequestError in transaction {func.__qualname__} \033[1m Hint: Do not call commit() or rollback() within a DBOS transaction.\033[0m"
979
+ f"InvalidRequestError in transaction {transaction_name} \033[1m Hint: Do not call commit() or rollback() within a DBOS transaction.\033[0m"
967
980
  )
968
981
  txn_error = invalid_request_error
969
982
  raise
@@ -983,7 +996,7 @@ def decorate_transaction(
983
996
 
984
997
  if inspect.iscoroutinefunction(func):
985
998
  raise DBOSException(
986
- f"Function {func.__name__} is a coroutine function, but DBOS.transaction does not support coroutine functions"
999
+ f"Function {transaction_name} is a coroutine function, but DBOS.transaction does not support coroutine functions"
987
1000
  )
988
1001
 
989
1002
  fi = get_or_create_func_info(func)
@@ -1002,15 +1015,19 @@ def decorate_transaction(
1002
1015
  with DBOSAssumeRole(rr):
1003
1016
  return invoke_tx(*args, **kwargs)
1004
1017
  else:
1005
- tempwf = dbosreg.workflow_info_map.get("<temp>." + func.__qualname__)
1018
+ tempwf = dbosreg.workflow_info_map.get("<temp>." + transaction_name)
1006
1019
  assert tempwf
1007
1020
  return tempwf(*args, **kwargs)
1008
1021
 
1022
+ set_dbos_func_name(func, transaction_name)
1023
+ set_dbos_func_name(wrapper, transaction_name)
1024
+
1009
1025
  def temp_wf(*args: Any, **kwargs: Any) -> Any:
1010
1026
  return wrapper(*args, **kwargs)
1011
1027
 
1012
1028
  wrapped_wf = workflow_wrapper(dbosreg, temp_wf)
1013
- set_dbos_func_name(temp_wf, "<temp>." + func.__qualname__)
1029
+ set_dbos_func_name(temp_wf, "<temp>." + transaction_name)
1030
+ set_dbos_func_name(wrapped_wf, "<temp>." + transaction_name)
1014
1031
  set_temp_workflow_type(temp_wf, "transaction")
1015
1032
  dbosreg.register_wf_function(
1016
1033
  get_dbos_func_name(temp_wf), wrapped_wf, "transaction"
@@ -1027,24 +1044,25 @@ def decorate_transaction(
1027
1044
  def decorate_step(
1028
1045
  dbosreg: "DBOSRegistry",
1029
1046
  *,
1030
- retries_allowed: bool = False,
1031
- interval_seconds: float = 1.0,
1032
- max_attempts: int = 3,
1033
- backoff_rate: float = 2.0,
1047
+ name: Optional[str],
1048
+ retries_allowed: bool,
1049
+ interval_seconds: float,
1050
+ max_attempts: int,
1051
+ backoff_rate: float,
1034
1052
  ) -> Callable[[Callable[P, R]], Callable[P, R]]:
1035
1053
  def decorator(func: Callable[P, R]) -> Callable[P, R]:
1036
1054
 
1037
- step_name = func.__qualname__
1055
+ step_name = name if name is not None else func.__qualname__
1038
1056
 
1039
1057
  def invoke_step(*args: Any, **kwargs: Any) -> Any:
1040
1058
  if dbosreg.dbos is None:
1041
1059
  raise DBOSException(
1042
- f"Function {func.__name__} invoked before DBOS initialized"
1060
+ f"Function {step_name} invoked before DBOS initialized"
1043
1061
  )
1044
1062
  dbos = dbosreg.dbos
1045
1063
 
1046
1064
  attributes: TracedAttributes = {
1047
- "name": func.__name__,
1065
+ "name": step_name,
1048
1066
  "operationType": OperationType.STEP.value,
1049
1067
  }
1050
1068
 
@@ -1123,7 +1141,7 @@ def decorate_step(
1123
1141
  stepOutcome = stepOutcome.retry(
1124
1142
  max_attempts,
1125
1143
  on_exception,
1126
- lambda i, e: DBOSMaxStepRetriesExceeded(func.__name__, i, e),
1144
+ lambda i, e: DBOSMaxStepRetriesExceeded(step_name, i, e),
1127
1145
  )
1128
1146
 
1129
1147
  outcome = (
@@ -1139,20 +1157,23 @@ def decorate_step(
1139
1157
  def wrapper(*args: Any, **kwargs: Any) -> Any:
1140
1158
  rr: Optional[str] = check_required_roles(func, fi)
1141
1159
  # Entering step is allowed:
1160
+ # No DBOS, just call the original function directly
1142
1161
  # In a step already, just call the original function directly.
1143
1162
  # In a workflow (that is not in a step already)
1144
1163
  # Not in a workflow (we will start the single op workflow)
1164
+ if not dbosreg.dbos or not dbosreg.dbos._launched:
1165
+ # Call the original function directly
1166
+ return func(*args, **kwargs)
1145
1167
  ctx = get_local_dbos_context()
1146
1168
  if ctx and ctx.is_step():
1147
1169
  # Call the original function directly
1148
-
1149
1170
  return func(*args, **kwargs)
1150
1171
  if ctx and ctx.is_within_workflow():
1151
1172
  assert ctx.is_workflow(), "Steps must be called from within workflows"
1152
1173
  with DBOSAssumeRole(rr):
1153
1174
  return invoke_step(*args, **kwargs)
1154
1175
  else:
1155
- tempwf = dbosreg.workflow_info_map.get("<temp>." + func.__qualname__)
1176
+ tempwf = dbosreg.workflow_info_map.get("<temp>." + step_name)
1156
1177
  assert tempwf
1157
1178
  return tempwf(*args, **kwargs)
1158
1179
 
@@ -1160,20 +1181,19 @@ def decorate_step(
1160
1181
  _mark_coroutine(wrapper) if inspect.iscoroutinefunction(func) else wrapper # type: ignore
1161
1182
  )
1162
1183
 
1184
+ set_dbos_func_name(func, step_name)
1185
+ set_dbos_func_name(wrapper, step_name)
1186
+
1163
1187
  def temp_wf_sync(*args: Any, **kwargs: Any) -> Any:
1164
1188
  return wrapper(*args, **kwargs)
1165
1189
 
1166
1190
  async def temp_wf_async(*args: Any, **kwargs: Any) -> Any:
1167
1191
  return await wrapper(*args, **kwargs)
1168
1192
 
1169
- # Other code in transact-py depends on the name of temporary workflow functions to be "temp_wf"
1170
- # so set the name of both sync and async temporary workflow functions explicitly
1171
- temp_wf_sync.__name__ = "temp_wf"
1172
- temp_wf_async.__name__ = "temp_wf"
1173
-
1174
1193
  temp_wf = temp_wf_async if inspect.iscoroutinefunction(func) else temp_wf_sync
1175
1194
  wrapped_wf = workflow_wrapper(dbosreg, temp_wf)
1176
- set_dbos_func_name(temp_wf, "<temp>." + func.__qualname__)
1195
+ set_dbos_func_name(temp_wf, "<temp>." + step_name)
1196
+ set_dbos_func_name(wrapped_wf, "<temp>." + step_name)
1177
1197
  set_temp_workflow_type(temp_wf, "step")
1178
1198
  dbosreg.register_wf_function(get_dbos_func_name(temp_wf), wrapped_wf, "step")
1179
1199
  wrapper.__orig_func = temp_wf # type: ignore