dbos 1.8.0a5__tar.gz → 1.9.0a1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (109) hide show
  1. {dbos-1.8.0a5 → dbos-1.9.0a1}/PKG-INFO +1 -1
  2. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_dbos.py +25 -53
  3. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_error.py +5 -5
  4. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_sys_db.py +12 -7
  5. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/cli/cli.py +2 -2
  6. {dbos-1.8.0a5 → dbos-1.9.0a1}/pyproject.toml +1 -1
  7. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_admin_server.py +10 -5
  8. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_async.py +9 -3
  9. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_classdecorators.py +27 -0
  10. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_concurrency.py +9 -3
  11. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_dbos.py +34 -13
  12. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_failures.py +15 -8
  13. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_fastapi.py +3 -1
  14. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_flask.py +3 -1
  15. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_queue.py +10 -4
  16. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_scheduler.py +6 -1
  17. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_singleton.py +0 -24
  18. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_workflow_introspection.py +27 -9
  19. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_workflow_management.py +6 -2
  20. {dbos-1.8.0a5 → dbos-1.9.0a1}/LICENSE +0 -0
  21. {dbos-1.8.0a5 → dbos-1.9.0a1}/README.md +0 -0
  22. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/__init__.py +0 -0
  23. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/__main__.py +0 -0
  24. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_admin_server.py +0 -0
  25. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_app_db.py +0 -0
  26. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_classproperty.py +0 -0
  27. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_client.py +0 -0
  28. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_conductor/conductor.py +0 -0
  29. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_conductor/protocol.py +0 -0
  30. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_context.py +0 -0
  31. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_core.py +0 -0
  32. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_croniter.py +0 -0
  33. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_dbos_config.py +0 -0
  34. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_debug.py +0 -0
  35. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_docker_pg_helper.py +0 -0
  36. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_event_loop.py +0 -0
  37. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_fastapi.py +0 -0
  38. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_flask.py +0 -0
  39. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_kafka.py +0 -0
  40. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_kafka_message.py +0 -0
  41. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_logger.py +0 -0
  42. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_migrations/env.py +0 -0
  43. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_migrations/script.py.mako +0 -0
  44. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
  45. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -0
  46. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
  47. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
  48. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -0
  49. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -0
  50. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -0
  51. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
  52. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
  53. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
  54. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_migrations/versions/d994145b47b6_consolidate_inputs.py +0 -0
  55. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
  56. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
  57. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_outcome.py +0 -0
  58. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_queue.py +0 -0
  59. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_recovery.py +0 -0
  60. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_registrations.py +0 -0
  61. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_roles.py +0 -0
  62. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_scheduler.py +0 -0
  63. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_schemas/__init__.py +0 -0
  64. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_schemas/application_database.py +0 -0
  65. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_schemas/system_database.py +0 -0
  66. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_serialization.py +0 -0
  67. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_templates/dbos-db-starter/README.md +0 -0
  68. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
  69. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
  70. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
  71. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
  72. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
  73. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
  74. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
  75. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
  76. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
  77. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_tracer.py +0 -0
  78. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_utils.py +0 -0
  79. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/_workflow_commands.py +0 -0
  80. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/cli/_github_init.py +0 -0
  81. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/cli/_template_init.py +0 -0
  82. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/dbos-config.schema.json +0 -0
  83. {dbos-1.8.0a5 → dbos-1.9.0a1}/dbos/py.typed +0 -0
  84. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/__init__.py +0 -0
  85. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/atexit_no_ctor.py +0 -0
  86. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/atexit_no_launch.py +0 -0
  87. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/classdefs.py +0 -0
  88. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/client_collateral.py +0 -0
  89. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/client_worker.py +0 -0
  90. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/conftest.py +0 -0
  91. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/dupname_classdefs1.py +0 -0
  92. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/dupname_classdefsa.py +0 -0
  93. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/more_classdefs.py +0 -0
  94. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/queuedworkflow.py +0 -0
  95. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_async_workflow_management.py +0 -0
  96. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_cli.py +0 -0
  97. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_client.py +0 -0
  98. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_config.py +0 -0
  99. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_croniter.py +0 -0
  100. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_debug.py +0 -0
  101. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_docker_secrets.py +0 -0
  102. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_fastapi_roles.py +0 -0
  103. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_kafka.py +0 -0
  104. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_outcome.py +0 -0
  105. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_package.py +0 -0
  106. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_schema_migration.py +0 -0
  107. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_spans.py +0 -0
  108. {dbos-1.8.0a5 → dbos-1.9.0a1}/tests/test_sqlalchemy.py +0 -0
  109. {dbos-1.8.0a5 → dbos-1.9.0a1}/version/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 1.8.0a5
3
+ Version: 1.9.0a1
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,7 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import asyncio
4
- import atexit
5
4
  import hashlib
6
5
  import inspect
7
6
  import os
@@ -1219,39 +1218,40 @@ class DBOS:
1219
1218
  return rv
1220
1219
 
1221
1220
  @classproperty
1222
- def workflow_id(cls) -> str:
1223
- """Return the workflow ID for the current context, which must be executing a workflow function."""
1224
- ctx = assert_current_dbos_context()
1225
- assert (
1226
- ctx.is_within_workflow()
1227
- ), "workflow_id is only available within a DBOS operation."
1228
- return ctx.workflow_id
1221
+ def workflow_id(cls) -> Optional[str]:
1222
+ """Return the ID of the currently executing workflow. If a workflow is not executing, return None."""
1223
+ ctx = get_local_dbos_context()
1224
+ if ctx and ctx.is_within_workflow():
1225
+ return ctx.workflow_id
1226
+ else:
1227
+ return None
1229
1228
 
1230
1229
  @classproperty
1231
- def step_id(cls) -> int:
1232
- """Return the step ID for the currently executing step. This is a unique identifier of the current step within the workflow."""
1233
- ctx = assert_current_dbos_context()
1234
- assert (
1235
- ctx.is_step() or ctx.is_transaction()
1236
- ), "step_id is only available within a DBOS step."
1237
- return ctx.function_id
1230
+ def step_id(cls) -> Optional[int]:
1231
+ """Return the step ID for the currently executing step. This is a unique identifier of the current step within the workflow. If a step is not currently executing, return None."""
1232
+ ctx = get_local_dbos_context()
1233
+ if ctx and (ctx.is_step() or ctx.is_transaction()):
1234
+ return ctx.function_id
1235
+ else:
1236
+ return None
1238
1237
 
1239
1238
  @classproperty
1240
- def step_status(cls) -> StepStatus:
1241
- """Return the status of the currently executing step."""
1242
- ctx = assert_current_dbos_context()
1243
- assert ctx.is_step(), "step_status is only available within a DBOS step."
1244
- assert ctx.step_status is not None
1245
- return ctx.step_status
1239
+ def step_status(cls) -> Optional[StepStatus]:
1240
+ """Return the status of the currently executing step. If a step is not currently executing, return None."""
1241
+ ctx = get_local_dbos_context()
1242
+ if ctx and ctx.is_step():
1243
+ return ctx.step_status
1244
+ else:
1245
+ return None
1246
1246
 
1247
1247
  @classproperty
1248
1248
  def parent_workflow_id(cls) -> str:
1249
1249
  """
1250
- Return the workflow ID for the parent workflow.
1251
-
1252
- `parent_workflow_id` must be accessed from within a workflow function.
1250
+ This method is deprecated and should not be used.
1253
1251
  """
1254
-
1252
+ dbos_logger.warning(
1253
+ "DBOS.parent_workflow_id is deprecated and should not be used"
1254
+ )
1255
1255
  ctx = assert_current_dbos_context()
1256
1256
  assert (
1257
1257
  ctx.is_within_workflow()
@@ -1376,31 +1376,3 @@ class DBOSConfiguredInstance:
1376
1376
  def __init__(self, config_name: str) -> None:
1377
1377
  self.config_name = config_name
1378
1378
  DBOS.register_instance(self)
1379
-
1380
-
1381
- # Apps that import DBOS probably don't exit. If they do, let's see if
1382
- # it looks like startup was abandoned or a call was forgotten...
1383
- def _dbos_exit_hook() -> None:
1384
- if _dbos_global_registry is None:
1385
- # Probably used as or for a support module
1386
- return
1387
- if _dbos_global_instance is None:
1388
- print("DBOS exiting; functions were registered but DBOS() was not called")
1389
- dbos_logger.warning(
1390
- "DBOS exiting; functions were registered but DBOS() was not called"
1391
- )
1392
- return
1393
- if not _dbos_global_instance._launched:
1394
- if _dbos_global_instance.fastapi is not None:
1395
- # FastAPI lifespan middleware will call launch/destroy, so we can ignore this.
1396
- # This is likely to happen during fastapi dev runs, where the reloader loads the module multiple times.
1397
- return
1398
- print("DBOS exiting; DBOS exists but launch() was not called")
1399
- dbos_logger.warning("DBOS exiting; DBOS exists but launch() was not called")
1400
- return
1401
- # If we get here, we're exiting normally
1402
- _dbos_global_instance.destroy()
1403
-
1404
-
1405
- # Register the exit hook
1406
- atexit.register(_dbos_exit_hook)
@@ -55,7 +55,7 @@ class DBOSErrorCode(Enum):
55
55
  InitializationError = 3
56
56
  WorkflowFunctionNotFound = 4
57
57
  NonExistentWorkflowError = 5
58
- DeadLetterQueueError = 6
58
+ MaxRecoveryAttemptsExceeded = 6
59
59
  MaxStepRetriesExceeded = 7
60
60
  NotAuthorized = 8
61
61
  ConflictingWorkflowError = 9
@@ -121,13 +121,13 @@ class DBOSNonExistentWorkflowError(DBOSException):
121
121
  )
122
122
 
123
123
 
124
- class DBOSDeadLetterQueueError(DBOSException):
125
- """Exception raised when a workflow database record does not exist for a given ID."""
124
+ class MaxRecoveryAttemptsExceededError(DBOSException):
125
+ """Exception raised when a workflow exceeds its max recovery attempts."""
126
126
 
127
127
  def __init__(self, wf_id: str, max_retries: int):
128
128
  super().__init__(
129
- f"Workflow {wf_id} has been moved to the dead-letter queue after exceeding the maximum of {max_retries} retries",
130
- dbos_error_code=DBOSErrorCode.DeadLetterQueueError.value,
129
+ f"Workflow {wf_id} has exceeded its maximum of {max_retries} execution or recovery attempts. Further attempts to execute or recover it will fail. See documentation for details: https://docs.dbos.dev/python/reference/decorators",
130
+ dbos_error_code=DBOSErrorCode.MaxRecoveryAttemptsExceeded.value,
131
131
  )
132
132
 
133
133
 
@@ -37,12 +37,12 @@ from ._context import get_local_dbos_context
37
37
  from ._error import (
38
38
  DBOSAwaitedWorkflowCancelledError,
39
39
  DBOSConflictingWorkflowError,
40
- DBOSDeadLetterQueueError,
41
40
  DBOSNonExistentWorkflowError,
42
41
  DBOSQueueDeduplicatedError,
43
42
  DBOSUnexpectedStepError,
44
43
  DBOSWorkflowCancelledError,
45
44
  DBOSWorkflowConflictIDError,
45
+ MaxRecoveryAttemptsExceededError,
46
46
  )
47
47
  from ._logger import dbos_logger
48
48
  from ._schemas.system_database import SystemSchema
@@ -57,20 +57,25 @@ class WorkflowStatusString(Enum):
57
57
  PENDING = "PENDING"
58
58
  SUCCESS = "SUCCESS"
59
59
  ERROR = "ERROR"
60
- RETRIES_EXCEEDED = "RETRIES_EXCEEDED"
60
+ MAX_RECOVERY_ATTEMPTS_EXCEEDED = "MAX_RECOVERY_ATTEMPTS_EXCEEDED"
61
61
  CANCELLED = "CANCELLED"
62
62
  ENQUEUED = "ENQUEUED"
63
63
 
64
64
 
65
65
  WorkflowStatuses = Literal[
66
- "PENDING", "SUCCESS", "ERROR", "RETRIES_EXCEEDED", "CANCELLED", "ENQUEUED"
66
+ "PENDING",
67
+ "SUCCESS",
68
+ "ERROR",
69
+ "MAX_RECOVERY_ATTEMPTS_EXCEEDED",
70
+ "CANCELLED",
71
+ "ENQUEUED",
67
72
  ]
68
73
 
69
74
 
70
75
  class WorkflowStatus:
71
76
  # The workflow ID
72
77
  workflow_id: str
73
- # The workflow status. Must be one of ENQUEUED, PENDING, SUCCESS, ERROR, CANCELLED, or RETRIES_EXCEEDED
78
+ # The workflow status. Must be one of ENQUEUED, PENDING, SUCCESS, ERROR, CANCELLED, or MAX_RECOVERY_ATTEMPTS_EXCEEDED
74
79
  status: str
75
80
  # The name of the workflow function
76
81
  name: str
@@ -515,7 +520,7 @@ class SystemDatabase:
515
520
  raise DBOSConflictingWorkflowError(status["workflow_uuid"], err_msg)
516
521
 
517
522
  # Every time we start executing a workflow (and thus attempt to insert its status), we increment `recovery_attempts` by 1.
518
- # When this number becomes equal to `maxRetries + 1`, we mark the workflow as `RETRIES_EXCEEDED`.
523
+ # When this number becomes equal to `maxRetries + 1`, we mark the workflow as `MAX_RECOVERY_ATTEMPTS_EXCEEDED`.
519
524
  if (
520
525
  (wf_status != "SUCCESS" and wf_status != "ERROR")
521
526
  and max_recovery_attempts is not None
@@ -532,7 +537,7 @@ class SystemDatabase:
532
537
  == WorkflowStatusString.PENDING.value
533
538
  )
534
539
  .values(
535
- status=WorkflowStatusString.RETRIES_EXCEEDED.value,
540
+ status=WorkflowStatusString.MAX_RECOVERY_ATTEMPTS_EXCEEDED.value,
536
541
  deduplication_id=None,
537
542
  started_at_epoch_ms=None,
538
543
  queue_name=None,
@@ -541,7 +546,7 @@ class SystemDatabase:
541
546
  conn.execute(dlq_cmd)
542
547
  # Need to commit here because we're throwing an exception
543
548
  conn.commit()
544
- raise DBOSDeadLetterQueueError(
549
+ raise MaxRecoveryAttemptsExceededError(
545
550
  status["workflow_uuid"], max_recovery_attempts
546
551
  )
547
552
 
@@ -450,7 +450,7 @@ def list(
450
450
  typer.Option(
451
451
  "--status",
452
452
  "-S",
453
- help="Retrieve workflows with this status (PENDING, SUCCESS, ERROR, RETRIES_EXCEEDED, ENQUEUED, or CANCELLED)",
453
+ help="Retrieve workflows with this status (PENDING, SUCCESS, ERROR, ENQUEUED, CANCELLED, or MAX_RECOVERY_ATTEMPTS_EXCEEDED)",
454
454
  ),
455
455
  ] = None,
456
456
  appversion: Annotated[
@@ -657,7 +657,7 @@ def list_queue(
657
657
  typer.Option(
658
658
  "--status",
659
659
  "-S",
660
- help="Retrieve functions with this status (PENDING, SUCCESS, ERROR, RETRIES_EXCEEDED, ENQUEUED, or CANCELLED)",
660
+ help="Retrieve functions with this status (PENDING, SUCCESS, ERROR, ENQUEUED, CANCELLED, or MAX_RECOVERY_ATTEMPTS_EXCEEDED)",
661
661
  ),
662
662
  ] = None,
663
663
  queue_name: Annotated[
@@ -27,7 +27,7 @@ dependencies = [
27
27
  ]
28
28
  requires-python = ">=3.9"
29
29
  readme = "README.md"
30
- version = "1.8.0a5"
30
+ version = "1.9.0a1"
31
31
 
32
32
  [project.license]
33
33
  text = "MIT"
@@ -104,7 +104,7 @@ def test_deactivate(dbos: DBOS, config: DBOSConfig) -> None:
104
104
  assert event.is_set()
105
105
  # Verify the scheduled workflow does not run anymore
106
106
  time.sleep(5)
107
- assert wf_counter <= val + 1
107
+ assert wf_counter <= val + 2
108
108
  # Enqueue a workflow, verify it still runs
109
109
  assert queue.enqueue(regular_workflow).get_result() == 5
110
110
 
@@ -142,7 +142,6 @@ def test_admin_recovery(config: DBOSConfig) -> None:
142
142
 
143
143
  @DBOS.workflow()
144
144
  def test_workflow(var: str, var2: str) -> str:
145
- DBOS.logger.info("WFID: " + DBOS.workflow_id)
146
145
  nonlocal wf_counter
147
146
  wf_counter += 1
148
147
  res = test_step(var2)
@@ -464,7 +463,9 @@ def test_list_workflows(dbos: DBOS) -> None:
464
463
 
465
464
  @DBOS.workflow()
466
465
  def test_workflow_2(my_time: datetime) -> str:
467
- return DBOS.workflow_id + " completed at " + my_time.isoformat()
466
+ workflow_id = DBOS.workflow_id
467
+ assert workflow_id is not None
468
+ return workflow_id + " completed at " + my_time.isoformat()
468
469
 
469
470
  # Start workflows
470
471
  handle_1 = DBOS.start_workflow(test_workflow_1)
@@ -656,7 +657,9 @@ def test_get_workflow_by_id(dbos: DBOS) -> None:
656
657
 
657
658
  @DBOS.workflow()
658
659
  def test_workflow_2(my_time: datetime) -> str:
659
- return DBOS.workflow_id + " completed at " + my_time.isoformat()
660
+ workflow_id = DBOS.workflow_id
661
+ assert workflow_id is not None
662
+ return workflow_id + " completed at " + my_time.isoformat()
660
663
 
661
664
  # Start workflows
662
665
  handle_1 = DBOS.start_workflow(test_workflow_1)
@@ -713,7 +716,9 @@ def test_admin_garbage_collect(dbos: DBOS) -> None:
713
716
 
714
717
  @DBOS.workflow()
715
718
  def workflow() -> str:
716
- return DBOS.workflow_id
719
+ workflow_id = DBOS.workflow_id
720
+ assert workflow_id is not None
721
+ return workflow_id
717
722
 
718
723
  workflow()
719
724
 
@@ -32,12 +32,12 @@ async def test_async_workflow(dbos: DBOS) -> None:
32
32
  nonlocal wf_counter
33
33
  wf_counter += 1
34
34
  res1 = test_transaction(var1)
35
- res2 = test_step(var2)
35
+ res2 = await test_step(var2)
36
36
  DBOS.logger.info("I'm test_workflow")
37
37
  return res1 + res2
38
38
 
39
39
  @DBOS.step()
40
- def test_step(var: str) -> str:
40
+ async def test_step(var: str) -> str:
41
41
  nonlocal step_counter
42
42
  step_counter += 1
43
43
  DBOS.logger.info("I'm test_step")
@@ -73,6 +73,10 @@ async def test_async_workflow(dbos: DBOS) -> None:
73
73
  sync_handle = DBOS.start_workflow(test_workflow, "alice", "bob")
74
74
  assert sync_handle.get_result() == "alicetxn31bobstep3" # type: ignore
75
75
 
76
+ # Test DBOS.start_workflow_async on steps
77
+ handle = await DBOS.start_workflow_async(test_step, "alice")
78
+ assert (await handle.get_result()) == "alicestep4"
79
+
76
80
 
77
81
  @pytest.mark.asyncio
78
82
  async def test_async_step(dbos: DBOS) -> None:
@@ -298,7 +302,9 @@ async def test_sleep(dbos: DBOS) -> None:
298
302
  @DBOS.workflow()
299
303
  async def test_sleep_workflow(secs: float) -> str:
300
304
  await dbos.sleep_async(secs)
301
- return DBOS.workflow_id
305
+ workflow_id = DBOS.workflow_id
306
+ assert workflow_id is not None
307
+ return workflow_id
302
308
 
303
309
  start_time = time.time()
304
310
  sleep_uuid = await test_sleep_workflow(1.5)
@@ -10,6 +10,7 @@ from dbos import DBOS, DBOSConfiguredInstance, Queue, SetWorkflowID
10
10
 
11
11
  # Private API used because this is a test
12
12
  from dbos._context import DBOSContextEnsure, assert_current_dbos_context
13
+ from dbos._dbos_config import DBOSConfig
13
14
  from tests.conftest import queue_entries_are_cleaned_up
14
15
 
15
16
 
@@ -884,3 +885,29 @@ def test_mixed_methods(dbos: DBOS) -> None:
884
885
  status = handle.get_status()
885
886
  assert status.class_name == None
886
887
  assert status.config_name == None
888
+
889
+
890
+ def test_class_step_without_dbos(dbos: DBOS, config: DBOSConfig) -> None:
891
+ DBOS.destroy(destroy_registry=True)
892
+
893
+ @DBOS.dbos_class()
894
+ class TestClass(DBOSConfiguredInstance):
895
+ def __init__(self, x: int) -> None:
896
+ self.x = x
897
+ super().__init__("test")
898
+
899
+ @DBOS.step()
900
+ def step(self, x: int) -> int:
901
+ return self.x + x
902
+
903
+ input = 5
904
+ inst = TestClass(input)
905
+ assert inst.step(input) == input + input
906
+
907
+ DBOS(config=config)
908
+
909
+ assert inst.step(input) == input + input
910
+
911
+ DBOS.launch()
912
+
913
+ assert inst.step(input) == input + input
@@ -14,7 +14,9 @@ def test_concurrent_workflows(dbos: DBOS) -> None:
14
14
  @DBOS.workflow()
15
15
  def test_workflow() -> str:
16
16
  time.sleep(1)
17
- return DBOS.workflow_id
17
+ workflow_id = DBOS.workflow_id
18
+ assert workflow_id is not None
19
+ return workflow_id
18
20
 
19
21
  def test_thread(id: str) -> str:
20
22
  with SetWorkflowID(id):
@@ -48,7 +50,9 @@ def test_concurrent_conflict_uuid(dbos: DBOS) -> None:
48
50
  condition.notify()
49
51
  condition.release()
50
52
 
51
- return DBOS.workflow_id
53
+ workflow_id = DBOS.workflow_id
54
+ assert workflow_id is not None
55
+ return workflow_id
52
56
 
53
57
  @DBOS.workflow()
54
58
  def test_workflow() -> str:
@@ -74,7 +78,9 @@ def test_concurrent_conflict_uuid(dbos: DBOS) -> None:
74
78
  condition.notify()
75
79
  condition.release()
76
80
 
77
- return DBOS.workflow_id
81
+ workflow_id = DBOS.workflow_id
82
+ assert workflow_id is not None
83
+ return workflow_id
78
84
 
79
85
  def test_txn_thread(id: str) -> str:
80
86
  with SetWorkflowID(id):
@@ -61,9 +61,11 @@ def test_simple_workflow(dbos: DBOS) -> None:
61
61
  @DBOS.step()
62
62
  def test_step(var: str) -> str:
63
63
  assert DBOS.step_id == 2
64
- assert DBOS.step_status.step_id == 2
65
- assert DBOS.step_status.current_attempt is None
66
- assert DBOS.step_status.max_attempts is None
64
+ step_status = DBOS.step_status
65
+ assert step_status is not None
66
+ assert step_status.step_id == 2
67
+ assert step_status.current_attempt is None
68
+ assert step_status.max_attempts is None
67
69
  nonlocal step_counter
68
70
  step_counter += 1
69
71
  DBOS.logger.info("I'm test_step " + var)
@@ -133,10 +135,6 @@ def test_child_workflow(dbos: DBOS) -> None:
133
135
  @DBOS.workflow()
134
136
  def test_workflow(var: str, var2: str) -> str:
135
137
  DBOS.logger.info("I'm test_workflow")
136
- if len(DBOS.parent_workflow_id):
137
- DBOS.logger.info(" This is a child test_workflow")
138
- # Note this assertion is only true if child wasn't assigned an ID explicitly
139
- assert DBOS.workflow_id.startswith(DBOS.parent_workflow_id)
140
138
  nonlocal wf_counter
141
139
  wf_counter += 1
142
140
  res = test_transaction(var2)
@@ -651,7 +649,9 @@ def test_retrieve_workflow(dbos: DBOS) -> None:
651
649
  @DBOS.workflow()
652
650
  def test_sleep_workflow(secs: float) -> str:
653
651
  dbos.sleep(secs)
654
- return DBOS.workflow_id
652
+ workflow_id = DBOS.workflow_id
653
+ assert workflow_id is not None
654
+ return workflow_id
655
655
 
656
656
  @DBOS.workflow()
657
657
  def test_sleep_workthrow(secs: float) -> str:
@@ -732,7 +732,9 @@ def test_retrieve_workflow_in_workflow(dbos: DBOS) -> None:
732
732
  @DBOS.workflow()
733
733
  def test_sleep_workflow(secs: float) -> str:
734
734
  dbos.sleep(secs)
735
- return DBOS.workflow_id
735
+ workflow_id = DBOS.workflow_id
736
+ assert workflow_id is not None
737
+ return workflow_id
736
738
 
737
739
  @DBOS.workflow()
738
740
  def test_workflow_status_a() -> str:
@@ -793,7 +795,9 @@ def test_sleep(dbos: DBOS) -> None:
793
795
  @DBOS.workflow()
794
796
  def test_sleep_workflow(secs: float) -> str:
795
797
  dbos.sleep(secs)
796
- return DBOS.workflow_id
798
+ workflow_id = DBOS.workflow_id
799
+ assert workflow_id is not None
800
+ return workflow_id
797
801
 
798
802
  start_time = time.time()
799
803
  sleep_uuid = test_sleep_workflow(1.5)
@@ -1573,7 +1577,9 @@ def test_custom_names(dbos: DBOS) -> None:
1573
1577
 
1574
1578
  @DBOS.workflow(name=workflow_name)
1575
1579
  def workflow() -> str:
1576
- return DBOS.workflow_id
1580
+ workflow_id = DBOS.workflow_id
1581
+ assert workflow_id is not None
1582
+ return workflow_id
1577
1583
 
1578
1584
  handle = queue.enqueue(workflow)
1579
1585
  assert handle.get_status().name == workflow_name
@@ -1581,7 +1587,9 @@ def test_custom_names(dbos: DBOS) -> None:
1581
1587
 
1582
1588
  @DBOS.step(name=step_name)
1583
1589
  def step() -> str:
1584
- return DBOS.workflow_id
1590
+ workflow_id = DBOS.workflow_id
1591
+ assert workflow_id is not None
1592
+ return workflow_id
1585
1593
 
1586
1594
  handle = queue.enqueue(step)
1587
1595
  assert handle.get_status().name == f"<temp>.{step_name}"
@@ -1589,7 +1597,9 @@ def test_custom_names(dbos: DBOS) -> None:
1589
1597
 
1590
1598
  @DBOS.transaction(name=txn_name)
1591
1599
  def txn() -> str:
1592
- return DBOS.workflow_id
1600
+ workflow_id = DBOS.workflow_id
1601
+ assert workflow_id is not None
1602
+ return workflow_id
1593
1603
 
1594
1604
  handle = queue.enqueue(txn)
1595
1605
  assert handle.get_status().name == f"<temp>.{txn_name}"
@@ -1614,12 +1624,22 @@ def test_custom_names(dbos: DBOS) -> None:
1614
1624
  async def test_step_without_dbos(dbos: DBOS, config: DBOSConfig) -> None:
1615
1625
  DBOS.destroy(destroy_registry=True)
1616
1626
 
1627
+ is_dbos_active = False
1628
+
1617
1629
  @DBOS.step()
1618
1630
  def step(x: int) -> int:
1631
+ if is_dbos_active:
1632
+ assert DBOS.workflow_id is not None
1633
+ else:
1634
+ assert DBOS.workflow_id is None
1619
1635
  return x
1620
1636
 
1621
1637
  @DBOS.step()
1622
1638
  async def async_step(x: int) -> int:
1639
+ if is_dbos_active:
1640
+ assert DBOS.workflow_id is not None
1641
+ else:
1642
+ assert DBOS.workflow_id is None
1623
1643
  return x
1624
1644
 
1625
1645
  assert step(5) == 5
@@ -1631,6 +1651,7 @@ async def test_step_without_dbos(dbos: DBOS, config: DBOSConfig) -> None:
1631
1651
  assert await async_step(5) == 5
1632
1652
 
1633
1653
  DBOS.launch()
1654
+ is_dbos_active = True
1634
1655
 
1635
1656
  assert step(5) == 5
1636
1657
  assert await async_step(5) == 5
@@ -10,11 +10,11 @@ from sqlalchemy.exc import InvalidRequestError, OperationalError
10
10
  from dbos import DBOS, Queue, SetWorkflowID
11
11
  from dbos._error import (
12
12
  DBOSAwaitedWorkflowCancelledError,
13
- DBOSDeadLetterQueueError,
14
13
  DBOSMaxStepRetriesExceeded,
15
14
  DBOSNotAuthorizedError,
16
15
  DBOSQueueDeduplicatedError,
17
16
  DBOSUnexpectedStepError,
17
+ MaxRecoveryAttemptsExceededError,
18
18
  )
19
19
  from dbos._registrations import DEFAULT_MAX_RECOVERY_ATTEMPTS
20
20
  from dbos._serialization import (
@@ -179,12 +179,15 @@ def test_dead_letter_queue(dbos: DBOS) -> None:
179
179
  # and puts the workflow in the DLQ status.
180
180
  with pytest.raises(Exception) as exc_info:
181
181
  DBOS._recover_pending_workflows()
182
- assert exc_info.errisinstance(DBOSDeadLetterQueueError)
183
- assert handle.get_status().status == WorkflowStatusString.RETRIES_EXCEEDED.value
182
+ assert exc_info.errisinstance(MaxRecoveryAttemptsExceededError)
183
+ assert (
184
+ handle.get_status().status
185
+ == WorkflowStatusString.MAX_RECOVERY_ATTEMPTS_EXCEEDED.value
186
+ )
184
187
  with pytest.raises(Exception) as exc_info:
185
188
  with SetWorkflowID(wfid):
186
189
  dead_letter_workflow()
187
- assert exc_info.errisinstance(DBOSDeadLetterQueueError)
190
+ assert exc_info.errisinstance(MaxRecoveryAttemptsExceededError)
188
191
 
189
192
  # Resume the workflow. Verify it can recover again without error.
190
193
  resumed_handle = dbos.resume_workflow(wfid)
@@ -379,9 +382,11 @@ def test_step_status(dbos: DBOS) -> None:
379
382
  @DBOS.step(retries_allowed=True, interval_seconds=0, max_attempts=max_attempts)
380
383
  def failing_step() -> None:
381
384
  nonlocal step_counter
382
- assert DBOS.step_status.step_id == 1
383
- assert DBOS.step_status.current_attempt == step_counter
384
- assert DBOS.step_status.max_attempts == max_attempts
385
+ step_status = DBOS.step_status
386
+ assert step_status is not None
387
+ assert step_status.step_id == 1
388
+ assert step_status.current_attempt == step_counter
389
+ assert step_status.max_attempts == max_attempts
385
390
  step_counter += 1
386
391
  if step_counter < max_attempts:
387
392
  raise Exception("fail")
@@ -439,7 +444,9 @@ def test_keyboardinterrupt_during_retries(dbos: DBOS) -> None:
439
444
  @DBOS.workflow()
440
445
  def failing_workflow() -> str:
441
446
  failing_step()
442
- return DBOS.workflow_id
447
+ workflow_id = DBOS.workflow_id
448
+ assert workflow_id is not None
449
+ return workflow_id
443
450
 
444
451
  with pytest.raises(KeyboardInterrupt):
445
452
  failing_workflow()
@@ -112,7 +112,9 @@ def test_endpoint_recovery(dbos_fastapi: Tuple[DBOS, FastAPI]) -> None:
112
112
 
113
113
  @DBOS.workflow()
114
114
  def test_workflow(var1: str) -> tuple[str, str]:
115
- return var1, DBOS.workflow_id
115
+ workflow_id = DBOS.workflow_id
116
+ assert workflow_id is not None
117
+ return var1, workflow_id
116
118
 
117
119
  @app.get("/{var1}/{var2}")
118
120
  def test_endpoint(var1: str, var2: str) -> dict[str, str]:
@@ -72,7 +72,9 @@ def test_endpoint_recovery(dbos_flask: Tuple[DBOS, Flask]) -> None:
72
72
 
73
73
  @DBOS.workflow()
74
74
  def test_workflow(var1: str) -> tuple[str, str]:
75
- return var1, DBOS.workflow_id
75
+ workflow_id = DBOS.workflow_id
76
+ assert workflow_id is not None
77
+ return var1, workflow_id
76
78
 
77
79
  @app.route("/<var1>/<var2>")
78
80
  def test_endpoint(var1: str, var2: str) -> dict[str, str]:
@@ -1041,7 +1041,7 @@ def test_dlq_enqueued_workflows(dbos: DBOS) -> None:
1041
1041
  time.sleep(2)
1042
1042
  assert (
1043
1043
  blocked_handle.get_status().status
1044
- == WorkflowStatusString.RETRIES_EXCEEDED.value
1044
+ == WorkflowStatusString.MAX_RECOVERY_ATTEMPTS_EXCEEDED.value
1045
1045
  )
1046
1046
  with dbos._sys_db.engine.begin() as c:
1047
1047
  query = sa.select(SystemSchema.workflow_status.c.recovery_attempts).where(
@@ -1343,7 +1343,9 @@ def test_worker_concurrency_across_versions(dbos: DBOS, client: DBOSClient) -> N
1343
1343
 
1344
1344
  @DBOS.workflow()
1345
1345
  def test_workflow() -> str:
1346
- return DBOS.workflow_id
1346
+ workflow_id = DBOS.workflow_id
1347
+ assert workflow_id is not None
1348
+ return workflow_id
1347
1349
 
1348
1350
  # First enqueue a workflow on the other version, then on the current version
1349
1351
  other_version = "other_version"
@@ -1417,7 +1419,9 @@ def test_unsetting_timeout(dbos: DBOS) -> None:
1417
1419
  def child() -> str:
1418
1420
  for _ in range(5):
1419
1421
  DBOS.sleep(1)
1420
- return DBOS.workflow_id
1422
+ workflow_id = DBOS.workflow_id
1423
+ assert workflow_id is not None
1424
+ return workflow_id
1421
1425
 
1422
1426
  @DBOS.workflow()
1423
1427
  def parent(child_one: str, child_two: str) -> None:
@@ -1448,7 +1452,9 @@ def test_queue_executor_id(dbos: DBOS) -> None:
1448
1452
 
1449
1453
  @DBOS.workflow()
1450
1454
  def example_workflow() -> str:
1451
- return DBOS.workflow_id
1455
+ workflow_id = DBOS.workflow_id
1456
+ assert workflow_id is not None
1457
+ return workflow_id
1452
1458
 
1453
1459
  # Set an executor ID
1454
1460
  original_executor_id = str(uuid.uuid4())
@@ -195,7 +195,9 @@ def test_scheduler_oaoo(dbos: DBOS) -> None:
195
195
  nonlocal wf_counter
196
196
  wf_counter += 1
197
197
  nonlocal workflow_id
198
- workflow_id = DBOS.workflow_id
198
+ wf_id = DBOS.workflow_id
199
+ assert wf_id is not None
200
+ workflow_id = wf_id
199
201
 
200
202
  @DBOS.transaction()
201
203
  def test_transaction() -> None:
@@ -219,6 +221,9 @@ def test_scheduler_oaoo(dbos: DBOS) -> None:
219
221
  for evt in dbos.poller_stop_events:
220
222
  evt.set()
221
223
 
224
+ # Wait for workflows to finish
225
+ time.sleep(2)
226
+
222
227
  dbos._sys_db.update_workflow_outcome(workflow_id, "PENDING")
223
228
 
224
229
  workflow_handles = DBOS._recover_pending_workflows()
@@ -129,27 +129,3 @@ def test_dbos_singleton_negative(cleanup_test_databases: None) -> None:
129
129
  assert "launch" in str(exc_info.value)
130
130
 
131
131
  DBOS.destroy()
132
-
133
-
134
- def test_dbos_atexit_no_dbos(cleanup_test_databases: None) -> None:
135
- # Run the .py as a separate process
136
- result = subprocess.run(
137
- [sys.executable, path.join("tests", "atexit_no_ctor.py")],
138
- capture_output=True,
139
- text=True,
140
- )
141
-
142
- # Assert that the output contains the warning message
143
- assert "DBOS exiting; functions were registered" in result.stdout
144
-
145
-
146
- def test_dbos_atexit_no_launch(cleanup_test_databases: None) -> None:
147
- # Run the .py as a separate process
148
- result = subprocess.run(
149
- [sys.executable, path.join("tests", "atexit_no_launch.py")],
150
- capture_output=True,
151
- text=True,
152
- )
153
-
154
- # Assert that the output contains the warning message
155
- assert "DBOS exists but launch() was not called" in result.stdout
@@ -456,7 +456,9 @@ def test_set_get_event(dbos: DBOS) -> None:
456
456
  DBOS.set_event("key", value)
457
457
  stepOne()
458
458
  DBOS.get_event("fake_id", "fake_value", 0)
459
- return DBOS.get_event(DBOS.workflow_id, "key", 1)
459
+ workflow_id = DBOS.workflow_id
460
+ assert workflow_id is not None
461
+ return DBOS.get_event(workflow_id, "key", 1)
460
462
 
461
463
  @DBOS.step()
462
464
  def stepOne() -> None:
@@ -500,7 +502,9 @@ def test_callchild_first_sync(dbos: DBOS) -> None:
500
502
 
501
503
  @DBOS.workflow()
502
504
  def child_workflow() -> str:
503
- return DBOS.workflow_id
505
+ workflow_id = DBOS.workflow_id
506
+ assert workflow_id is not None
507
+ return workflow_id
504
508
 
505
509
  wfid = str(uuid.uuid4())
506
510
  with SetWorkflowID(wfid):
@@ -540,7 +544,9 @@ async def test_callchild_direct_asyncio(dbos: DBOS) -> None:
540
544
 
541
545
  @DBOS.workflow()
542
546
  async def child_workflow() -> str:
543
- return DBOS.workflow_id
547
+ workflow_id = DBOS.workflow_id
548
+ assert workflow_id is not None
549
+ return workflow_id
544
550
 
545
551
  wfid = str(uuid.uuid4())
546
552
  with SetWorkflowID(wfid):
@@ -773,7 +779,9 @@ def test_callchild_middle_async_thread(dbos: DBOS) -> None:
773
779
 
774
780
  @DBOS.step()
775
781
  def stepOne() -> str:
776
- return DBOS.workflow_id
782
+ workflow_id = DBOS.workflow_id
783
+ assert workflow_id is not None
784
+ return workflow_id
777
785
 
778
786
  @DBOS.step()
779
787
  def stepTwo() -> None:
@@ -781,7 +789,9 @@ def test_callchild_middle_async_thread(dbos: DBOS) -> None:
781
789
 
782
790
  @DBOS.workflow()
783
791
  def child_workflow() -> str:
784
- return DBOS.workflow_id
792
+ workflow_id = DBOS.workflow_id
793
+ assert workflow_id is not None
794
+ return workflow_id
785
795
 
786
796
  wfid = str(uuid.uuid4())
787
797
  with SetWorkflowID(wfid):
@@ -821,7 +831,9 @@ async def test_callchild_first_asyncio(dbos: DBOS) -> None:
821
831
 
822
832
  @DBOS.step()
823
833
  def stepOne() -> str:
824
- return DBOS.workflow_id
834
+ workflow_id = DBOS.workflow_id
835
+ assert workflow_id is not None
836
+ return workflow_id
825
837
 
826
838
  @DBOS.step()
827
839
  def stepTwo() -> None:
@@ -829,7 +841,9 @@ async def test_callchild_first_asyncio(dbos: DBOS) -> None:
829
841
 
830
842
  @DBOS.workflow()
831
843
  async def child_workflow() -> str:
832
- return DBOS.workflow_id
844
+ workflow_id = DBOS.workflow_id
845
+ assert workflow_id is not None
846
+ return workflow_id
833
847
 
834
848
  wfid = str(uuid.uuid4())
835
849
  with SetWorkflowID(wfid):
@@ -1045,11 +1059,15 @@ def test_call_as_step_within_step(dbos: DBOS) -> None:
1045
1059
 
1046
1060
  @DBOS.workflow()
1047
1061
  def getStatusWorkflow() -> str:
1048
- return getStatus(DBOS.workflow_id)
1062
+ workflow_id = DBOS.workflow_id
1063
+ assert workflow_id is not None
1064
+ return getStatus(workflow_id)
1049
1065
 
1050
1066
  @DBOS.transaction()
1051
1067
  def transactionStatus() -> None:
1052
- DBOS.get_workflow_status(DBOS.workflow_id)
1068
+ workflow_id = DBOS.workflow_id
1069
+ assert workflow_id is not None
1070
+ DBOS.get_workflow_status(workflow_id)
1053
1071
 
1054
1072
  wfid = str(uuid.uuid4())
1055
1073
  with SetWorkflowID(wfid):
@@ -652,7 +652,9 @@ def test_garbage_collection(dbos: DBOS) -> None:
652
652
  def blocked_workflow() -> str:
653
653
  txn(0)
654
654
  event.wait()
655
- return DBOS.workflow_id
655
+ workflow_id = DBOS.workflow_id
656
+ assert workflow_id is not None
657
+ return workflow_id
656
658
 
657
659
  num_workflows = 10
658
660
 
@@ -729,7 +731,9 @@ def test_global_timeout(dbos: DBOS) -> None:
729
731
  def blocked_workflow() -> str:
730
732
  while not event.wait(0):
731
733
  DBOS.sleep(0.1)
732
- return DBOS.workflow_id
734
+ workflow_id = DBOS.workflow_id
735
+ assert workflow_id is not None
736
+ return workflow_id
733
737
 
734
738
  num_workflows = 10
735
739
  handles = [DBOS.start_workflow(blocked_workflow) for _ in range(num_workflows)]
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes