dbos 1.8.0a5__tar.gz → 1.9.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (109) hide show
  1. {dbos-1.8.0a5 → dbos-1.9.0}/PKG-INFO +1 -1
  2. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_core.py +2 -1
  3. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_dbos.py +25 -53
  4. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_error.py +5 -5
  5. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_sys_db.py +12 -7
  6. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/cli/cli.py +8 -8
  7. {dbos-1.8.0a5 → dbos-1.9.0}/pyproject.toml +1 -1
  8. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_admin_server.py +13 -8
  9. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_async.py +15 -9
  10. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_async_workflow_management.py +3 -3
  11. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_classdecorators.py +54 -0
  12. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_concurrency.py +9 -3
  13. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_dbos.py +40 -20
  14. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_failures.py +15 -8
  15. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_fastapi.py +3 -1
  16. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_flask.py +3 -1
  17. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_queue.py +12 -7
  18. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_scheduler.py +6 -1
  19. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_singleton.py +0 -24
  20. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_workflow_introspection.py +27 -9
  21. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_workflow_management.py +10 -6
  22. {dbos-1.8.0a5 → dbos-1.9.0}/LICENSE +0 -0
  23. {dbos-1.8.0a5 → dbos-1.9.0}/README.md +0 -0
  24. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/__init__.py +0 -0
  25. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/__main__.py +0 -0
  26. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_admin_server.py +0 -0
  27. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_app_db.py +0 -0
  28. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_classproperty.py +0 -0
  29. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_client.py +0 -0
  30. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_conductor/conductor.py +0 -0
  31. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_conductor/protocol.py +0 -0
  32. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_context.py +0 -0
  33. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_croniter.py +0 -0
  34. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_dbos_config.py +0 -0
  35. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_debug.py +0 -0
  36. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_docker_pg_helper.py +0 -0
  37. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_event_loop.py +0 -0
  38. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_fastapi.py +0 -0
  39. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_flask.py +0 -0
  40. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_kafka.py +0 -0
  41. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_kafka_message.py +0 -0
  42. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_logger.py +0 -0
  43. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_migrations/env.py +0 -0
  44. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_migrations/script.py.mako +0 -0
  45. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
  46. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -0
  47. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
  48. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
  49. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -0
  50. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -0
  51. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -0
  52. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
  53. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
  54. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
  55. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_migrations/versions/d994145b47b6_consolidate_inputs.py +0 -0
  56. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
  57. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
  58. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_outcome.py +0 -0
  59. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_queue.py +0 -0
  60. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_recovery.py +0 -0
  61. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_registrations.py +0 -0
  62. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_roles.py +0 -0
  63. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_scheduler.py +0 -0
  64. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_schemas/__init__.py +0 -0
  65. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_schemas/application_database.py +0 -0
  66. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_schemas/system_database.py +0 -0
  67. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_serialization.py +0 -0
  68. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_templates/dbos-db-starter/README.md +0 -0
  69. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
  70. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
  71. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
  72. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
  73. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
  74. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
  75. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
  76. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
  77. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
  78. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_tracer.py +0 -0
  79. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_utils.py +0 -0
  80. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/_workflow_commands.py +0 -0
  81. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/cli/_github_init.py +0 -0
  82. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/cli/_template_init.py +0 -0
  83. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/dbos-config.schema.json +0 -0
  84. {dbos-1.8.0a5 → dbos-1.9.0}/dbos/py.typed +0 -0
  85. {dbos-1.8.0a5 → dbos-1.9.0}/tests/__init__.py +0 -0
  86. {dbos-1.8.0a5 → dbos-1.9.0}/tests/atexit_no_ctor.py +0 -0
  87. {dbos-1.8.0a5 → dbos-1.9.0}/tests/atexit_no_launch.py +0 -0
  88. {dbos-1.8.0a5 → dbos-1.9.0}/tests/classdefs.py +0 -0
  89. {dbos-1.8.0a5 → dbos-1.9.0}/tests/client_collateral.py +0 -0
  90. {dbos-1.8.0a5 → dbos-1.9.0}/tests/client_worker.py +0 -0
  91. {dbos-1.8.0a5 → dbos-1.9.0}/tests/conftest.py +0 -0
  92. {dbos-1.8.0a5 → dbos-1.9.0}/tests/dupname_classdefs1.py +0 -0
  93. {dbos-1.8.0a5 → dbos-1.9.0}/tests/dupname_classdefsa.py +0 -0
  94. {dbos-1.8.0a5 → dbos-1.9.0}/tests/more_classdefs.py +0 -0
  95. {dbos-1.8.0a5 → dbos-1.9.0}/tests/queuedworkflow.py +0 -0
  96. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_cli.py +0 -0
  97. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_client.py +0 -0
  98. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_config.py +0 -0
  99. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_croniter.py +0 -0
  100. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_debug.py +0 -0
  101. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_docker_secrets.py +0 -0
  102. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_fastapi_roles.py +0 -0
  103. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_kafka.py +0 -0
  104. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_outcome.py +0 -0
  105. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_package.py +0 -0
  106. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_schema_migration.py +0 -0
  107. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_spans.py +0 -0
  108. {dbos-1.8.0a5 → dbos-1.9.0}/tests/test_sqlalchemy.py +0 -0
  109. {dbos-1.8.0a5 → dbos-1.9.0}/version/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 1.8.0a5
3
+ Version: 1.9.0
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -49,6 +49,7 @@ from ._context import (
49
49
  get_local_dbos_context,
50
50
  )
51
51
  from ._error import (
52
+ DBOSAwaitedWorkflowCancelledError,
52
53
  DBOSException,
53
54
  DBOSMaxStepRetriesExceeded,
54
55
  DBOSNonExistentWorkflowError,
@@ -370,7 +371,7 @@ def _get_wf_invoke_func(
370
371
  r: R = dbos._sys_db.await_workflow_result(status["workflow_uuid"])
371
372
  return r
372
373
  except DBOSWorkflowCancelledError as error:
373
- raise
374
+ raise DBOSAwaitedWorkflowCancelledError(status["workflow_uuid"])
374
375
  except Exception as error:
375
376
  if not dbos.debug_mode:
376
377
  dbos._sys_db.update_workflow_outcome(
@@ -1,7 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import asyncio
4
- import atexit
5
4
  import hashlib
6
5
  import inspect
7
6
  import os
@@ -1219,39 +1218,40 @@ class DBOS:
1219
1218
  return rv
1220
1219
 
1221
1220
  @classproperty
1222
- def workflow_id(cls) -> str:
1223
- """Return the workflow ID for the current context, which must be executing a workflow function."""
1224
- ctx = assert_current_dbos_context()
1225
- assert (
1226
- ctx.is_within_workflow()
1227
- ), "workflow_id is only available within a DBOS operation."
1228
- return ctx.workflow_id
1221
+ def workflow_id(cls) -> Optional[str]:
1222
+ """Return the ID of the currently executing workflow. If a workflow is not executing, return None."""
1223
+ ctx = get_local_dbos_context()
1224
+ if ctx and ctx.is_within_workflow():
1225
+ return ctx.workflow_id
1226
+ else:
1227
+ return None
1229
1228
 
1230
1229
  @classproperty
1231
- def step_id(cls) -> int:
1232
- """Return the step ID for the currently executing step. This is a unique identifier of the current step within the workflow."""
1233
- ctx = assert_current_dbos_context()
1234
- assert (
1235
- ctx.is_step() or ctx.is_transaction()
1236
- ), "step_id is only available within a DBOS step."
1237
- return ctx.function_id
1230
+ def step_id(cls) -> Optional[int]:
1231
+ """Return the step ID for the currently executing step. This is a unique identifier of the current step within the workflow. If a step is not currently executing, return None."""
1232
+ ctx = get_local_dbos_context()
1233
+ if ctx and (ctx.is_step() or ctx.is_transaction()):
1234
+ return ctx.function_id
1235
+ else:
1236
+ return None
1238
1237
 
1239
1238
  @classproperty
1240
- def step_status(cls) -> StepStatus:
1241
- """Return the status of the currently executing step."""
1242
- ctx = assert_current_dbos_context()
1243
- assert ctx.is_step(), "step_status is only available within a DBOS step."
1244
- assert ctx.step_status is not None
1245
- return ctx.step_status
1239
+ def step_status(cls) -> Optional[StepStatus]:
1240
+ """Return the status of the currently executing step. If a step is not currently executing, return None."""
1241
+ ctx = get_local_dbos_context()
1242
+ if ctx and ctx.is_step():
1243
+ return ctx.step_status
1244
+ else:
1245
+ return None
1246
1246
 
1247
1247
  @classproperty
1248
1248
  def parent_workflow_id(cls) -> str:
1249
1249
  """
1250
- Return the workflow ID for the parent workflow.
1251
-
1252
- `parent_workflow_id` must be accessed from within a workflow function.
1250
+ This method is deprecated and should not be used.
1253
1251
  """
1254
-
1252
+ dbos_logger.warning(
1253
+ "DBOS.parent_workflow_id is deprecated and should not be used"
1254
+ )
1255
1255
  ctx = assert_current_dbos_context()
1256
1256
  assert (
1257
1257
  ctx.is_within_workflow()
@@ -1376,31 +1376,3 @@ class DBOSConfiguredInstance:
1376
1376
  def __init__(self, config_name: str) -> None:
1377
1377
  self.config_name = config_name
1378
1378
  DBOS.register_instance(self)
1379
-
1380
-
1381
- # Apps that import DBOS probably don't exit. If they do, let's see if
1382
- # it looks like startup was abandoned or a call was forgotten...
1383
- def _dbos_exit_hook() -> None:
1384
- if _dbos_global_registry is None:
1385
- # Probably used as or for a support module
1386
- return
1387
- if _dbos_global_instance is None:
1388
- print("DBOS exiting; functions were registered but DBOS() was not called")
1389
- dbos_logger.warning(
1390
- "DBOS exiting; functions were registered but DBOS() was not called"
1391
- )
1392
- return
1393
- if not _dbos_global_instance._launched:
1394
- if _dbos_global_instance.fastapi is not None:
1395
- # FastAPI lifespan middleware will call launch/destroy, so we can ignore this.
1396
- # This is likely to happen during fastapi dev runs, where the reloader loads the module multiple times.
1397
- return
1398
- print("DBOS exiting; DBOS exists but launch() was not called")
1399
- dbos_logger.warning("DBOS exiting; DBOS exists but launch() was not called")
1400
- return
1401
- # If we get here, we're exiting normally
1402
- _dbos_global_instance.destroy()
1403
-
1404
-
1405
- # Register the exit hook
1406
- atexit.register(_dbos_exit_hook)
@@ -55,7 +55,7 @@ class DBOSErrorCode(Enum):
55
55
  InitializationError = 3
56
56
  WorkflowFunctionNotFound = 4
57
57
  NonExistentWorkflowError = 5
58
- DeadLetterQueueError = 6
58
+ MaxRecoveryAttemptsExceeded = 6
59
59
  MaxStepRetriesExceeded = 7
60
60
  NotAuthorized = 8
61
61
  ConflictingWorkflowError = 9
@@ -121,13 +121,13 @@ class DBOSNonExistentWorkflowError(DBOSException):
121
121
  )
122
122
 
123
123
 
124
- class DBOSDeadLetterQueueError(DBOSException):
125
- """Exception raised when a workflow database record does not exist for a given ID."""
124
+ class MaxRecoveryAttemptsExceededError(DBOSException):
125
+ """Exception raised when a workflow exceeds its max recovery attempts."""
126
126
 
127
127
  def __init__(self, wf_id: str, max_retries: int):
128
128
  super().__init__(
129
- f"Workflow {wf_id} has been moved to the dead-letter queue after exceeding the maximum of {max_retries} retries",
130
- dbos_error_code=DBOSErrorCode.DeadLetterQueueError.value,
129
+ f"Workflow {wf_id} has exceeded its maximum of {max_retries} execution or recovery attempts. Further attempts to execute or recover it will fail. See documentation for details: https://docs.dbos.dev/python/reference/decorators",
130
+ dbos_error_code=DBOSErrorCode.MaxRecoveryAttemptsExceeded.value,
131
131
  )
132
132
 
133
133
 
@@ -37,12 +37,12 @@ from ._context import get_local_dbos_context
37
37
  from ._error import (
38
38
  DBOSAwaitedWorkflowCancelledError,
39
39
  DBOSConflictingWorkflowError,
40
- DBOSDeadLetterQueueError,
41
40
  DBOSNonExistentWorkflowError,
42
41
  DBOSQueueDeduplicatedError,
43
42
  DBOSUnexpectedStepError,
44
43
  DBOSWorkflowCancelledError,
45
44
  DBOSWorkflowConflictIDError,
45
+ MaxRecoveryAttemptsExceededError,
46
46
  )
47
47
  from ._logger import dbos_logger
48
48
  from ._schemas.system_database import SystemSchema
@@ -57,20 +57,25 @@ class WorkflowStatusString(Enum):
57
57
  PENDING = "PENDING"
58
58
  SUCCESS = "SUCCESS"
59
59
  ERROR = "ERROR"
60
- RETRIES_EXCEEDED = "RETRIES_EXCEEDED"
60
+ MAX_RECOVERY_ATTEMPTS_EXCEEDED = "MAX_RECOVERY_ATTEMPTS_EXCEEDED"
61
61
  CANCELLED = "CANCELLED"
62
62
  ENQUEUED = "ENQUEUED"
63
63
 
64
64
 
65
65
  WorkflowStatuses = Literal[
66
- "PENDING", "SUCCESS", "ERROR", "RETRIES_EXCEEDED", "CANCELLED", "ENQUEUED"
66
+ "PENDING",
67
+ "SUCCESS",
68
+ "ERROR",
69
+ "MAX_RECOVERY_ATTEMPTS_EXCEEDED",
70
+ "CANCELLED",
71
+ "ENQUEUED",
67
72
  ]
68
73
 
69
74
 
70
75
  class WorkflowStatus:
71
76
  # The workflow ID
72
77
  workflow_id: str
73
- # The workflow status. Must be one of ENQUEUED, PENDING, SUCCESS, ERROR, CANCELLED, or RETRIES_EXCEEDED
78
+ # The workflow status. Must be one of ENQUEUED, PENDING, SUCCESS, ERROR, CANCELLED, or MAX_RECOVERY_ATTEMPTS_EXCEEDED
74
79
  status: str
75
80
  # The name of the workflow function
76
81
  name: str
@@ -515,7 +520,7 @@ class SystemDatabase:
515
520
  raise DBOSConflictingWorkflowError(status["workflow_uuid"], err_msg)
516
521
 
517
522
  # Every time we start executing a workflow (and thus attempt to insert its status), we increment `recovery_attempts` by 1.
518
- # When this number becomes equal to `maxRetries + 1`, we mark the workflow as `RETRIES_EXCEEDED`.
523
+ # When this number becomes equal to `maxRetries + 1`, we mark the workflow as `MAX_RECOVERY_ATTEMPTS_EXCEEDED`.
519
524
  if (
520
525
  (wf_status != "SUCCESS" and wf_status != "ERROR")
521
526
  and max_recovery_attempts is not None
@@ -532,7 +537,7 @@ class SystemDatabase:
532
537
  == WorkflowStatusString.PENDING.value
533
538
  )
534
539
  .values(
535
- status=WorkflowStatusString.RETRIES_EXCEEDED.value,
540
+ status=WorkflowStatusString.MAX_RECOVERY_ATTEMPTS_EXCEEDED.value,
536
541
  deduplication_id=None,
537
542
  started_at_epoch_ms=None,
538
543
  queue_name=None,
@@ -541,7 +546,7 @@ class SystemDatabase:
541
546
  conn.execute(dlq_cmd)
542
547
  # Need to commit here because we're throwing an exception
543
548
  conn.commit()
544
- raise DBOSDeadLetterQueueError(
549
+ raise MaxRecoveryAttemptsExceededError(
545
550
  status["workflow_uuid"], max_recovery_attempts
546
551
  )
547
552
 
@@ -10,7 +10,7 @@ from typing import Any, Optional
10
10
  import jsonpickle # type: ignore
11
11
  import sqlalchemy as sa
12
12
  import typer
13
- from rich import print
13
+ from rich import print as richprint
14
14
  from rich.prompt import IntPrompt
15
15
  from typing_extensions import Annotated, List
16
16
 
@@ -196,7 +196,7 @@ def init(
196
196
  path.join(templates_dir, template), project_name, config_mode=config
197
197
  )
198
198
  except Exception as e:
199
- print(f"[red]{e}[/red]")
199
+ richprint(f"[red]{e}[/red]")
200
200
 
201
201
 
202
202
  def _resolve_project_name_and_template(
@@ -217,9 +217,9 @@ def _resolve_project_name_and_template(
217
217
  if template not in templates:
218
218
  raise Exception(f"Template {template} not found in {templates_dir}")
219
219
  else:
220
- print("\n[bold]Available templates:[/bold]")
220
+ richprint("\n[bold]Available templates:[/bold]")
221
221
  for idx, template_name in enumerate(templates, 1):
222
- print(f" {idx}. {template_name}")
222
+ richprint(f" {idx}. {template_name}")
223
223
  while True:
224
224
  try:
225
225
  choice = IntPrompt.ask(
@@ -231,13 +231,13 @@ def _resolve_project_name_and_template(
231
231
  template = templates[choice - 1]
232
232
  break
233
233
  else:
234
- print(
234
+ richprint(
235
235
  "[red]Invalid selection. Please choose a number from the list.[/red]"
236
236
  )
237
237
  except (KeyboardInterrupt, EOFError):
238
238
  raise typer.Abort()
239
239
  except ValueError:
240
- print("[red]Please enter a valid number.[/red]")
240
+ richprint("[red]Please enter a valid number.[/red]")
241
241
 
242
242
  if template in git_templates:
243
243
  if project_name is None:
@@ -450,7 +450,7 @@ def list(
450
450
  typer.Option(
451
451
  "--status",
452
452
  "-S",
453
- help="Retrieve workflows with this status (PENDING, SUCCESS, ERROR, RETRIES_EXCEEDED, ENQUEUED, or CANCELLED)",
453
+ help="Retrieve workflows with this status (PENDING, SUCCESS, ERROR, ENQUEUED, CANCELLED, or MAX_RECOVERY_ATTEMPTS_EXCEEDED)",
454
454
  ),
455
455
  ] = None,
456
456
  appversion: Annotated[
@@ -657,7 +657,7 @@ def list_queue(
657
657
  typer.Option(
658
658
  "--status",
659
659
  "-S",
660
- help="Retrieve functions with this status (PENDING, SUCCESS, ERROR, RETRIES_EXCEEDED, ENQUEUED, or CANCELLED)",
660
+ help="Retrieve functions with this status (PENDING, SUCCESS, ERROR, ENQUEUED, CANCELLED, or MAX_RECOVERY_ATTEMPTS_EXCEEDED)",
661
661
  ),
662
662
  ] = None,
663
663
  queue_name: Annotated[
@@ -27,7 +27,7 @@ dependencies = [
27
27
  ]
28
28
  requires-python = ">=3.9"
29
29
  readme = "README.md"
30
- version = "1.8.0a5"
30
+ version = "1.9.0"
31
31
 
32
32
  [project.license]
33
33
  text = "MIT"
@@ -20,7 +20,7 @@ from dbos import (
20
20
  WorkflowHandle,
21
21
  _workflow_commands,
22
22
  )
23
- from dbos._error import DBOSWorkflowCancelledError
23
+ from dbos._error import DBOSAwaitedWorkflowCancelledError
24
24
  from dbos._schemas.system_database import SystemSchema
25
25
  from dbos._sys_db import SystemDatabase, WorkflowStatusString
26
26
  from dbos._utils import INTERNAL_QUEUE_NAME, GlobalParams
@@ -104,7 +104,7 @@ def test_deactivate(dbos: DBOS, config: DBOSConfig) -> None:
104
104
  assert event.is_set()
105
105
  # Verify the scheduled workflow does not run anymore
106
106
  time.sleep(5)
107
- assert wf_counter <= val + 1
107
+ assert wf_counter <= val + 2
108
108
  # Enqueue a workflow, verify it still runs
109
109
  assert queue.enqueue(regular_workflow).get_result() == 5
110
110
 
@@ -142,7 +142,6 @@ def test_admin_recovery(config: DBOSConfig) -> None:
142
142
 
143
143
  @DBOS.workflow()
144
144
  def test_workflow(var: str, var2: str) -> str:
145
- DBOS.logger.info("WFID: " + DBOS.workflow_id)
146
145
  nonlocal wf_counter
147
146
  wf_counter += 1
148
147
  res = test_step(var2)
@@ -307,7 +306,7 @@ def test_admin_workflow_resume(dbos: DBOS, sys_db: SystemDatabase) -> None:
307
306
  )
308
307
  assert response.status_code == 204
309
308
  event.set()
310
- with pytest.raises(DBOSWorkflowCancelledError):
309
+ with pytest.raises(DBOSAwaitedWorkflowCancelledError):
311
310
  handle.get_result()
312
311
  info = _workflow_commands.get_workflow(sys_db, wfid)
313
312
  assert info is not None
@@ -464,7 +463,9 @@ def test_list_workflows(dbos: DBOS) -> None:
464
463
 
465
464
  @DBOS.workflow()
466
465
  def test_workflow_2(my_time: datetime) -> str:
467
- return DBOS.workflow_id + " completed at " + my_time.isoformat()
466
+ workflow_id = DBOS.workflow_id
467
+ assert workflow_id is not None
468
+ return workflow_id + " completed at " + my_time.isoformat()
468
469
 
469
470
  # Start workflows
470
471
  handle_1 = DBOS.start_workflow(test_workflow_1)
@@ -656,7 +657,9 @@ def test_get_workflow_by_id(dbos: DBOS) -> None:
656
657
 
657
658
  @DBOS.workflow()
658
659
  def test_workflow_2(my_time: datetime) -> str:
659
- return DBOS.workflow_id + " completed at " + my_time.isoformat()
660
+ workflow_id = DBOS.workflow_id
661
+ assert workflow_id is not None
662
+ return workflow_id + " completed at " + my_time.isoformat()
660
663
 
661
664
  # Start workflows
662
665
  handle_1 = DBOS.start_workflow(test_workflow_1)
@@ -713,7 +716,9 @@ def test_admin_garbage_collect(dbos: DBOS) -> None:
713
716
 
714
717
  @DBOS.workflow()
715
718
  def workflow() -> str:
716
- return DBOS.workflow_id
719
+ workflow_id = DBOS.workflow_id
720
+ assert workflow_id is not None
721
+ return workflow_id
717
722
 
718
723
  workflow()
719
724
 
@@ -745,7 +750,7 @@ def test_admin_global_timeout(dbos: DBOS) -> None:
745
750
  timeout=5,
746
751
  )
747
752
  response.raise_for_status()
748
- with pytest.raises(DBOSWorkflowCancelledError):
753
+ with pytest.raises(DBOSAwaitedWorkflowCancelledError):
749
754
  handle.get_result()
750
755
 
751
756
 
@@ -18,7 +18,7 @@ from dbos import (
18
18
  from dbos._context import assert_current_dbos_context
19
19
  from dbos._dbos import WorkflowHandle
20
20
  from dbos._dbos_config import ConfigFile
21
- from dbos._error import DBOSException, DBOSWorkflowCancelledError
21
+ from dbos._error import DBOSAwaitedWorkflowCancelledError, DBOSException
22
22
 
23
23
 
24
24
  @pytest.mark.asyncio
@@ -32,12 +32,12 @@ async def test_async_workflow(dbos: DBOS) -> None:
32
32
  nonlocal wf_counter
33
33
  wf_counter += 1
34
34
  res1 = test_transaction(var1)
35
- res2 = test_step(var2)
35
+ res2 = await test_step(var2)
36
36
  DBOS.logger.info("I'm test_workflow")
37
37
  return res1 + res2
38
38
 
39
39
  @DBOS.step()
40
- def test_step(var: str) -> str:
40
+ async def test_step(var: str) -> str:
41
41
  nonlocal step_counter
42
42
  step_counter += 1
43
43
  DBOS.logger.info("I'm test_step")
@@ -73,6 +73,10 @@ async def test_async_workflow(dbos: DBOS) -> None:
73
73
  sync_handle = DBOS.start_workflow(test_workflow, "alice", "bob")
74
74
  assert sync_handle.get_result() == "alicetxn31bobstep3" # type: ignore
75
75
 
76
+ # Test DBOS.start_workflow_async on steps
77
+ handle = await DBOS.start_workflow_async(test_step, "alice")
78
+ assert (await handle.get_result()) == "alicestep4"
79
+
76
80
 
77
81
  @pytest.mark.asyncio
78
82
  async def test_async_step(dbos: DBOS) -> None:
@@ -298,7 +302,9 @@ async def test_sleep(dbos: DBOS) -> None:
298
302
  @DBOS.workflow()
299
303
  async def test_sleep_workflow(secs: float) -> str:
300
304
  await dbos.sleep_async(secs)
301
- return DBOS.workflow_id
305
+ workflow_id = DBOS.workflow_id
306
+ assert workflow_id is not None
307
+ return workflow_id
302
308
 
303
309
  start_time = time.time()
304
310
  sleep_uuid = await test_sleep_workflow(1.5)
@@ -486,20 +492,20 @@ async def test_workflow_timeout_async(dbos: DBOS) -> None:
486
492
  DBOS.sleep(0.1)
487
493
 
488
494
  with SetWorkflowTimeout(0.1):
489
- with pytest.raises(DBOSWorkflowCancelledError):
495
+ with pytest.raises(DBOSAwaitedWorkflowCancelledError):
490
496
  await blocked_workflow()
491
497
  handle = await DBOS.start_workflow_async(blocked_workflow)
492
- with pytest.raises(DBOSWorkflowCancelledError):
498
+ with pytest.raises(DBOSAwaitedWorkflowCancelledError):
493
499
  await handle.get_result()
494
500
 
495
501
  @DBOS.workflow()
496
502
  async def parent_workflow_with_timeout() -> None:
497
503
  assert assert_current_dbos_context().workflow_deadline_epoch_ms is None
498
504
  with SetWorkflowTimeout(0.1):
499
- with pytest.raises(DBOSWorkflowCancelledError):
505
+ with pytest.raises(DBOSAwaitedWorkflowCancelledError):
500
506
  await blocked_workflow()
501
507
  handle = await DBOS.start_workflow_async(blocked_workflow)
502
- with pytest.raises(DBOSWorkflowCancelledError):
508
+ with pytest.raises(DBOSAwaitedWorkflowCancelledError):
503
509
  await handle.get_result()
504
510
  assert assert_current_dbos_context().workflow_deadline_epoch_ms is None
505
511
 
@@ -520,7 +526,7 @@ async def test_workflow_timeout_async(dbos: DBOS) -> None:
520
526
  # Verify if a parent called with a timeout calls a blocked child
521
527
  # the deadline propagates and the children are also cancelled.
522
528
  with SetWorkflowTimeout(1.0):
523
- with pytest.raises(DBOSWorkflowCancelledError):
529
+ with pytest.raises(DBOSAwaitedWorkflowCancelledError):
524
530
  await parent_workflow()
525
531
 
526
532
  with pytest.raises(Exception) as exc_info:
@@ -7,7 +7,7 @@ from typing import List
7
7
  import pytest
8
8
 
9
9
  from dbos import DBOS, Queue, SetWorkflowID
10
- from dbos._error import DBOSWorkflowCancelledError
10
+ from dbos._error import DBOSAwaitedWorkflowCancelledError
11
11
  from dbos._sys_db import StepInfo, WorkflowStatus
12
12
  from tests.conftest import queue_entries_are_cleaned_up
13
13
 
@@ -46,7 +46,7 @@ async def test_cancel_workflow_async(dbos: DBOS) -> None:
46
46
  await DBOS.cancel_workflow_async(wfid)
47
47
  workflow_event.set()
48
48
 
49
- with pytest.raises(DBOSWorkflowCancelledError):
49
+ with pytest.raises(DBOSAwaitedWorkflowCancelledError):
50
50
  handle.get_result()
51
51
  assert steps_completed == 1
52
52
 
@@ -85,7 +85,7 @@ async def test_resume_workflow_async(dbos: DBOS) -> None:
85
85
  DBOS.cancel_workflow(wfid)
86
86
  workflow_event.set()
87
87
 
88
- with pytest.raises(DBOSWorkflowCancelledError):
88
+ with pytest.raises(DBOSAwaitedWorkflowCancelledError):
89
89
  handle.get_result()
90
90
  assert steps_completed == 1
91
91
 
@@ -10,6 +10,7 @@ from dbos import DBOS, DBOSConfiguredInstance, Queue, SetWorkflowID
10
10
 
11
11
  # Private API used because this is a test
12
12
  from dbos._context import DBOSContextEnsure, assert_current_dbos_context
13
+ from dbos._dbos_config import DBOSConfig
13
14
  from tests.conftest import queue_entries_are_cleaned_up
14
15
 
15
16
 
@@ -884,3 +885,56 @@ def test_mixed_methods(dbos: DBOS) -> None:
884
885
  status = handle.get_status()
885
886
  assert status.class_name == None
886
887
  assert status.config_name == None
888
+
889
+
890
+ def test_class_step_without_dbos(dbos: DBOS, config: DBOSConfig) -> None:
891
+ DBOS.destroy(destroy_registry=True)
892
+
893
+ @DBOS.dbos_class()
894
+ class TestClass(DBOSConfiguredInstance):
895
+ def __init__(self, x: int) -> None:
896
+ self.x = x
897
+ super().__init__("test")
898
+
899
+ @DBOS.step()
900
+ def step(self, x: int) -> int:
901
+ return self.x + x
902
+
903
+ input = 5
904
+ inst = TestClass(input)
905
+ assert inst.step(input) == input + input
906
+
907
+ DBOS(config=config)
908
+
909
+ assert inst.step(input) == input + input
910
+
911
+ DBOS.launch()
912
+
913
+ assert inst.step(input) == input + input
914
+
915
+
916
+ def test_class_with_only_steps(dbos: DBOS) -> None:
917
+
918
+ class StepClass:
919
+ def __init__(self, x: int) -> None:
920
+ self.x = x
921
+
922
+ @DBOS.step()
923
+ def step(self, x: int, expr: Callable[[int, int], int]) -> int:
924
+ return expr(self.x, x)
925
+
926
+ input = 5
927
+ inst = StepClass(5)
928
+
929
+ l = lambda x, y: x + y
930
+
931
+ @DBOS.workflow()
932
+ def test_workflow() -> int:
933
+ return inst.step(input, l) + inst.step(input, l)
934
+
935
+ handle = DBOS.start_workflow(test_workflow)
936
+ assert handle.get_result() == input * 4
937
+
938
+ steps = DBOS.list_workflow_steps(handle.workflow_id)
939
+ assert len(steps) == 2
940
+ assert steps[1]["output"] == steps[1]["output"] == input * 2
@@ -14,7 +14,9 @@ def test_concurrent_workflows(dbos: DBOS) -> None:
14
14
  @DBOS.workflow()
15
15
  def test_workflow() -> str:
16
16
  time.sleep(1)
17
- return DBOS.workflow_id
17
+ workflow_id = DBOS.workflow_id
18
+ assert workflow_id is not None
19
+ return workflow_id
18
20
 
19
21
  def test_thread(id: str) -> str:
20
22
  with SetWorkflowID(id):
@@ -48,7 +50,9 @@ def test_concurrent_conflict_uuid(dbos: DBOS) -> None:
48
50
  condition.notify()
49
51
  condition.release()
50
52
 
51
- return DBOS.workflow_id
53
+ workflow_id = DBOS.workflow_id
54
+ assert workflow_id is not None
55
+ return workflow_id
52
56
 
53
57
  @DBOS.workflow()
54
58
  def test_workflow() -> str:
@@ -74,7 +78,9 @@ def test_concurrent_conflict_uuid(dbos: DBOS) -> None:
74
78
  condition.notify()
75
79
  condition.release()
76
80
 
77
- return DBOS.workflow_id
81
+ workflow_id = DBOS.workflow_id
82
+ assert workflow_id is not None
83
+ return workflow_id
78
84
 
79
85
  def test_txn_thread(id: str) -> str:
80
86
  with SetWorkflowID(id):