dbos 1.11.0a6__tar.gz → 1.12.0a2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (113) hide show
  1. {dbos-1.11.0a6 → dbos-1.12.0a2}/PKG-INFO +1 -1
  2. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_context.py +31 -0
  3. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_core.py +10 -24
  4. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_dbos.py +4 -0
  5. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_error.py +1 -1
  6. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_recovery.py +17 -12
  7. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_registrations.py +1 -1
  8. {dbos-1.11.0a6 → dbos-1.12.0a2}/pyproject.toml +1 -1
  9. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_async.py +0 -24
  10. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_classdecorators.py +1 -1
  11. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_concurrency.py +0 -13
  12. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_dbos.py +30 -22
  13. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_failures.py +29 -9
  14. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_queue.py +2 -1
  15. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_spans.py +15 -33
  16. {dbos-1.11.0a6 → dbos-1.12.0a2}/LICENSE +0 -0
  17. {dbos-1.11.0a6 → dbos-1.12.0a2}/README.md +0 -0
  18. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/__init__.py +0 -0
  19. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/__main__.py +0 -0
  20. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_admin_server.py +0 -0
  21. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_app_db.py +0 -0
  22. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_classproperty.py +0 -0
  23. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_client.py +0 -0
  24. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_conductor/conductor.py +0 -0
  25. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_conductor/protocol.py +0 -0
  26. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_croniter.py +0 -0
  27. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_dbos_config.py +0 -0
  28. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_debug.py +0 -0
  29. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_docker_pg_helper.py +0 -0
  30. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_event_loop.py +0 -0
  31. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_fastapi.py +0 -0
  32. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_flask.py +0 -0
  33. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_kafka.py +0 -0
  34. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_kafka_message.py +0 -0
  35. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_logger.py +0 -0
  36. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_migrations/env.py +0 -0
  37. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_migrations/script.py.mako +0 -0
  38. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_migrations/versions/01ce9f07bd10_streaming.py +0 -0
  39. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
  40. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -0
  41. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
  42. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
  43. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -0
  44. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -0
  45. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -0
  46. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
  47. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
  48. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
  49. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_migrations/versions/d994145b47b6_consolidate_inputs.py +0 -0
  50. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
  51. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
  52. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_outcome.py +0 -0
  53. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_queue.py +0 -0
  54. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_roles.py +0 -0
  55. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_scheduler.py +0 -0
  56. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_schemas/__init__.py +0 -0
  57. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_schemas/application_database.py +0 -0
  58. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_schemas/system_database.py +0 -0
  59. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_serialization.py +0 -0
  60. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_sys_db.py +0 -0
  61. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_templates/dbos-db-starter/README.md +0 -0
  62. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
  63. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
  64. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
  65. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
  66. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
  67. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
  68. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
  69. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
  70. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
  71. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_tracer.py +0 -0
  72. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_utils.py +0 -0
  73. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/_workflow_commands.py +0 -0
  74. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/cli/_github_init.py +0 -0
  75. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/cli/_template_init.py +0 -0
  76. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/cli/cli.py +0 -0
  77. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/cli/migration.py +0 -0
  78. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/dbos-config.schema.json +0 -0
  79. {dbos-1.11.0a6 → dbos-1.12.0a2}/dbos/py.typed +0 -0
  80. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/__init__.py +0 -0
  81. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/atexit_no_ctor.py +0 -0
  82. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/atexit_no_launch.py +0 -0
  83. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/classdefs.py +0 -0
  84. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/client_collateral.py +0 -0
  85. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/client_worker.py +0 -0
  86. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/conftest.py +0 -0
  87. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/dupname_classdefs1.py +0 -0
  88. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/dupname_classdefsa.py +0 -0
  89. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/more_classdefs.py +0 -0
  90. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/queuedworkflow.py +0 -0
  91. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_admin_server.py +0 -0
  92. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_async_workflow_management.py +0 -0
  93. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_cli.py +0 -0
  94. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_client.py +0 -0
  95. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_config.py +0 -0
  96. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_croniter.py +0 -0
  97. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_debug.py +0 -0
  98. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_docker_secrets.py +0 -0
  99. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_fastapi.py +0 -0
  100. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_fastapi_roles.py +0 -0
  101. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_flask.py +0 -0
  102. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_kafka.py +0 -0
  103. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_migrate.py +0 -0
  104. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_outcome.py +0 -0
  105. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_package.py +0 -0
  106. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_scheduler.py +0 -0
  107. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_schema_migration.py +0 -0
  108. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_singleton.py +0 -0
  109. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_sqlalchemy.py +0 -0
  110. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_streaming.py +0 -0
  111. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_workflow_introspection.py +0 -0
  112. {dbos-1.11.0a6 → dbos-1.12.0a2}/tests/test_workflow_management.py +0 -0
  113. {dbos-1.11.0a6 → dbos-1.12.0a2}/version/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 1.11.0a6
3
+ Version: 1.12.0a2
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -753,3 +753,34 @@ class DBOSAssumeRole:
753
753
  assert ctx.assumed_role == self.assume_role
754
754
  ctx.assumed_role = self.prior_role
755
755
  return False # Did not handle
756
+
757
+
758
+ class UseLogAttributes:
759
+ """Temporarily set context attributes for logging"""
760
+
761
+ def __init__(self, *, workflow_id: str = "") -> None:
762
+ self.workflow_id = workflow_id
763
+ self.created_ctx = False
764
+
765
+ def __enter__(self) -> UseLogAttributes:
766
+ ctx = get_local_dbos_context()
767
+ if ctx is None:
768
+ self.created_ctx = True
769
+ _set_local_dbos_context(DBOSContext())
770
+ ctx = assert_current_dbos_context()
771
+ self.saved_workflow_id = ctx.workflow_id
772
+ ctx.workflow_id = self.workflow_id
773
+ return self
774
+
775
+ def __exit__(
776
+ self,
777
+ exc_type: Optional[Type[BaseException]],
778
+ exc_value: Optional[BaseException],
779
+ traceback: Optional[TracebackType],
780
+ ) -> Literal[False]:
781
+ ctx = assert_current_dbos_context()
782
+ ctx.workflow_id = self.saved_workflow_id
783
+ # Clean up the basic context if we created it
784
+ if self.created_ctx:
785
+ _clear_local_dbos_context()
786
+ return False # Did not handle
@@ -5,7 +5,6 @@ import json
5
5
  import sys
6
6
  import threading
7
7
  import time
8
- import traceback
9
8
  from concurrent.futures import Future
10
9
  from functools import wraps
11
10
  from typing import (
@@ -66,7 +65,6 @@ from ._registrations import (
66
65
  get_dbos_func_name,
67
66
  get_func_info,
68
67
  get_or_create_func_info,
69
- get_temp_workflow_type,
70
68
  set_dbos_func_name,
71
69
  set_func_info,
72
70
  set_temp_workflow_type,
@@ -452,7 +450,7 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
452
450
  if not wf_func:
453
451
  raise DBOSWorkflowFunctionNotFoundError(
454
452
  workflow_id,
455
- f"Cannot execute workflow because {status['name']} is not a registered workflow function",
453
+ f"{status['name']} is not a registered workflow function",
456
454
  )
457
455
  with DBOSContextEnsure():
458
456
  # If this function belongs to a configured class, add that class instance as its first argument
@@ -463,7 +461,7 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
463
461
  if iname not in dbos._registry.instance_info_map:
464
462
  raise DBOSWorkflowFunctionNotFoundError(
465
463
  workflow_id,
466
- f"Cannot execute workflow because instance '{iname}' is not registered",
464
+ f"configured class instance '{iname}' is not registered",
467
465
  )
468
466
  class_instance = dbos._registry.instance_info_map[iname]
469
467
  inputs["args"] = (class_instance,) + inputs["args"]
@@ -473,7 +471,7 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
473
471
  if class_name not in dbos._registry.class_info_map:
474
472
  raise DBOSWorkflowFunctionNotFoundError(
475
473
  workflow_id,
476
- f"Cannot execute workflow because class '{class_name}' is not registered",
474
+ f"class '{class_name}' is not registered",
477
475
  )
478
476
  class_object = dbos._registry.class_info_map[class_name]
479
477
  inputs["args"] = (class_object,) + inputs["args"]
@@ -534,7 +532,7 @@ def start_workflow(
534
532
  if fi is None:
535
533
  raise DBOSWorkflowFunctionNotFoundError(
536
534
  "<NONE>",
537
- f"start_workflow: function {func.__name__} is not registered",
535
+ f"{func.__name__} is not a registered workflow function",
538
536
  )
539
537
 
540
538
  func = cast("Workflow[P, R]", func.__orig_func) # type: ignore
@@ -630,7 +628,7 @@ async def start_workflow_async(
630
628
  if fi is None:
631
629
  raise DBOSWorkflowFunctionNotFoundError(
632
630
  "<NONE>",
633
- f"start_workflow: function {func.__name__} is not registered",
631
+ f"{func.__name__} is not a registered workflow function",
634
632
  )
635
633
 
636
634
  func = cast("Workflow[P, R]", func.__orig_func) # type: ignore
@@ -1158,27 +1156,15 @@ def decorate_step(
1158
1156
 
1159
1157
  @wraps(func)
1160
1158
  def wrapper(*args: Any, **kwargs: Any) -> Any:
1161
- rr: Optional[str] = check_required_roles(func, fi)
1162
- # Entering step is allowed:
1163
- # No DBOS, just call the original function directly
1164
- # In a step already, just call the original function directly.
1165
- # In a workflow (that is not in a step already)
1166
- # Not in a workflow (we will start the single op workflow)
1167
- if not dbosreg.dbos or not dbosreg.dbos._launched:
1168
- # Call the original function directly
1169
- return func(*args, **kwargs)
1159
+ # If the step is called from a workflow, run it as a step.
1160
+ # Otherwise, run it as a normal function.
1170
1161
  ctx = get_local_dbos_context()
1171
- if ctx and ctx.is_step():
1172
- # Call the original function directly
1173
- return func(*args, **kwargs)
1174
- if ctx and ctx.is_within_workflow():
1175
- assert ctx.is_workflow(), "Steps must be called from within workflows"
1162
+ if ctx and ctx.is_workflow():
1163
+ rr: Optional[str] = check_required_roles(func, fi)
1176
1164
  with DBOSAssumeRole(rr):
1177
1165
  return invoke_step(*args, **kwargs)
1178
1166
  else:
1179
- tempwf = dbosreg.workflow_info_map.get("<temp>." + step_name)
1180
- assert tempwf
1181
- return tempwf(*args, **kwargs)
1167
+ return func(*args, **kwargs)
1182
1168
 
1183
1169
  wrapper = (
1184
1170
  _mark_coroutine(wrapper) if inspect.iscoroutinefunction(func) else wrapper # type: ignore
@@ -211,6 +211,10 @@ class DBOSRegistry:
211
211
  def register_instance(self, inst: object) -> None:
212
212
  config_name = getattr(inst, "config_name")
213
213
  class_name = _class_fqn(inst.__class__)
214
+ if self.dbos and self.dbos._launched:
215
+ dbos_logger.warning(
216
+ f"Configured instance {config_name} of class {class_name} was registered after DBOS was launched. This may cause errors during workflow recovery. All configured instances should be instantiated before DBOS is launched."
217
+ )
214
218
  fn = f"{class_name}/{config_name}"
215
219
  if fn in self.instance_info_map:
216
220
  if self.instance_info_map[fn] is not inst:
@@ -106,7 +106,7 @@ class DBOSWorkflowFunctionNotFoundError(DBOSException):
106
106
 
107
107
  def __init__(self, workflow_id: str, message: Optional[str] = None):
108
108
  super().__init__(
109
- f"Workflow function not found for workflow ID {workflow_id}: {message}",
109
+ f"Could not execute workflow {workflow_id}: {message}",
110
110
  dbos_error_code=DBOSErrorCode.WorkflowFunctionNotFound.value,
111
111
  )
112
112
 
@@ -2,6 +2,7 @@ import threading
2
2
  import time
3
3
  from typing import TYPE_CHECKING, Any, List
4
4
 
5
+ from dbos._context import UseLogAttributes
5
6
  from dbos._utils import GlobalParams
6
7
 
7
8
  from ._core import execute_workflow_by_id
@@ -29,17 +30,19 @@ def startup_recovery_thread(
29
30
  stop_event = threading.Event()
30
31
  dbos.background_thread_stop_events.append(stop_event)
31
32
  while not stop_event.is_set() and len(pending_workflows) > 0:
32
- try:
33
- for pending_workflow in list(pending_workflows):
33
+ for pending_workflow in list(pending_workflows):
34
+ try:
34
35
  _recover_workflow(dbos, pending_workflow)
35
36
  pending_workflows.remove(pending_workflow)
36
- except DBOSWorkflowFunctionNotFoundError:
37
- time.sleep(1)
38
- except Exception as e:
39
- dbos.logger.error(
40
- f"Exception encountered when recovering workflows:", exc_info=e
41
- )
42
- raise
37
+ except DBOSWorkflowFunctionNotFoundError:
38
+ time.sleep(1)
39
+ except Exception as e:
40
+ with UseLogAttributes(workflow_id=pending_workflow.workflow_uuid):
41
+ dbos.logger.error(
42
+ f"Exception encountered when recovering workflow {pending_workflow.workflow_uuid}:",
43
+ exc_info=e,
44
+ )
45
+ raise
43
46
 
44
47
 
45
48
  def recover_pending_workflows(
@@ -56,9 +59,11 @@ def recover_pending_workflows(
56
59
  handle = _recover_workflow(dbos, pending_workflow)
57
60
  workflow_handles.append(handle)
58
61
  except Exception as e:
59
- dbos.logger.error(
60
- f"Exception encountered when recovering workflows:", exc_info=e
61
- )
62
+ with UseLogAttributes(workflow_id=pending_workflow.workflow_uuid):
63
+ dbos.logger.error(
64
+ f"Exception encountered when recovering workflow {pending_workflow.workflow_uuid}:",
65
+ exc_info=e,
66
+ )
62
67
  raise
63
68
  dbos.logger.info(
64
69
  f"Recovering {len(pending_workflows)} workflows for executor {executor_id} from version {GlobalParams.app_version}"
@@ -13,7 +13,7 @@ def get_dbos_func_name(f: Any) -> str:
13
13
  if hasattr(f, "dbos_function_name"):
14
14
  return str(getattr(f, "dbos_function_name"))
15
15
  raise DBOSWorkflowFunctionNotFoundError(
16
- "<NONE>", f"function {f.__name__} is not registered"
16
+ "<NONE>", f"{f.__name__} is not a registered workflow function"
17
17
  )
18
18
 
19
19
 
@@ -27,7 +27,7 @@ dependencies = [
27
27
  ]
28
28
  requires-python = ">=3.9"
29
29
  readme = "README.md"
30
- version = "1.11.0a6"
30
+ version = "1.12.0a2"
31
31
 
32
32
  [project.license]
33
33
  text = "MIT"
@@ -329,30 +329,6 @@ def test_async_tx_raises(config: ConfigFile) -> None:
329
329
  DBOS.destroy(destroy_registry=True)
330
330
 
331
331
 
332
- @pytest.mark.asyncio
333
- async def test_async_step_temp(dbos: DBOS) -> None:
334
- step_counter: int = 0
335
-
336
- @DBOS.step()
337
- async def test_step(var: str) -> str:
338
- await asyncio.sleep(0.1)
339
- nonlocal step_counter
340
- step_counter += 1
341
- DBOS.logger.info("I'm test_step")
342
- return var + f"step{step_counter}"
343
-
344
- wfuuid = f"test_async_step_temp-{time.time_ns()}"
345
- with SetWorkflowID(wfuuid):
346
- result = await test_step("alice")
347
- assert result == "alicestep1"
348
-
349
- with SetWorkflowID(wfuuid):
350
- result = await test_step("alice")
351
- assert result == "alicestep1"
352
-
353
- assert step_counter == 1
354
-
355
-
356
332
  @pytest.mark.asyncio
357
333
  async def test_start_workflow_async(dbos: DBOS) -> None:
358
334
  wf_counter: int = 0
@@ -574,7 +574,7 @@ def test_step_recovery(dbos: DBOS) -> None:
574
574
  def call_step() -> None:
575
575
  with SetWorkflowID(wfid):
576
576
  nonlocal return_value
577
- return_value = inst.step(input)
577
+ return_value = DBOS.start_workflow(inst.step, input).get_result()
578
578
 
579
579
  thread = threading.Thread(target=call_step)
580
580
  thread.start()
@@ -59,10 +59,6 @@ def test_concurrent_conflict_uuid(dbos: DBOS) -> None:
59
59
  res = test_step()
60
60
  return res
61
61
 
62
- def test_comm_thread(id: str) -> str:
63
- with SetWorkflowID(id):
64
- return test_step()
65
-
66
62
  # Need to set isolation level to a lower one, otherwise it gets serialization error instead (we already handle it correctly by automatic retries).
67
63
  @DBOS.transaction(isolation_level="REPEATABLE READ")
68
64
  def test_transaction() -> str:
@@ -97,15 +93,6 @@ def test_concurrent_conflict_uuid(dbos: DBOS) -> None:
97
93
  assert wf_handle1.get_result() == wfuuid
98
94
  assert wf_handle2.get_result() == wfuuid
99
95
 
100
- # Make sure temp workflows can handle conflicts as well.
101
- wfuuid = str(uuid.uuid4())
102
- with ThreadPoolExecutor(max_workers=2) as executor:
103
- future1 = executor.submit(test_comm_thread, wfuuid)
104
- future2 = executor.submit(test_comm_thread, wfuuid)
105
-
106
- assert future1.result() == wfuuid
107
- assert future2.result() == wfuuid
108
-
109
96
  # Make sure temp transactions can handle conflicts as well.
110
97
  wfuuid = str(uuid.uuid4())
111
98
  with ThreadPoolExecutor(max_workers=2) as executor:
@@ -307,16 +307,12 @@ def test_temp_workflow(dbos: DBOS) -> None:
307
307
  assert res == "var"
308
308
 
309
309
  wfs = dbos._sys_db.get_workflows(gwi)
310
- assert len(wfs) == 2
310
+ assert len(wfs) == 1
311
311
 
312
312
  wfi1 = dbos._sys_db.get_workflow_status(wfs[0].workflow_id)
313
313
  assert wfi1
314
314
  assert wfi1["name"].startswith("<temp>")
315
315
 
316
- wfi2 = dbos._sys_db.get_workflow_status(wfs[1].workflow_id)
317
- assert wfi2
318
- assert wfi2["name"].startswith("<temp>")
319
-
320
316
  assert txn_counter == 1
321
317
  assert step_counter == 1
322
318
 
@@ -350,7 +346,7 @@ def test_temp_workflow_errors(dbos: DBOS) -> None:
350
346
  def test_retried_step(var: str) -> str:
351
347
  nonlocal retried_step_counter
352
348
  retried_step_counter += 1
353
- raise Exception(var)
349
+ raise ValueError(var)
354
350
 
355
351
  with pytest.raises(Exception) as exc_info:
356
352
  test_transaction("tval")
@@ -360,12 +356,12 @@ def test_temp_workflow_errors(dbos: DBOS) -> None:
360
356
  test_step("cval")
361
357
  assert "cval" == str(exc_info.value)
362
358
 
363
- with pytest.raises(DBOSMaxStepRetriesExceeded) as exc_info:
359
+ with pytest.raises(ValueError) as exc_info:
364
360
  test_retried_step("rval")
365
361
 
366
362
  assert txn_counter == 1
367
363
  assert step_counter == 1
368
- assert retried_step_counter == 3
364
+ assert retried_step_counter == 1
369
365
 
370
366
 
371
367
  def test_recovery_workflow(dbos: DBOS) -> None:
@@ -1102,9 +1098,6 @@ def test_nonserializable_values(dbos: DBOS) -> None:
1102
1098
  with pytest.raises(Exception) as exc_info:
1103
1099
  test_ns_transaction("h")
1104
1100
  assert "data item should not be a function" in str(exc_info.value)
1105
- with pytest.raises(Exception) as exc_info:
1106
- test_ns_step("f")
1107
- assert "data item should not be a function" in str(exc_info.value)
1108
1101
  with pytest.raises(Exception) as exc_info:
1109
1102
  test_ns_wf("g")
1110
1103
  assert "data item should not be a function" in str(exc_info.value)
@@ -1645,22 +1638,14 @@ def test_custom_names(dbos: DBOS) -> None:
1645
1638
  async def test_step_without_dbos(dbos: DBOS, config: DBOSConfig) -> None:
1646
1639
  DBOS.destroy(destroy_registry=True)
1647
1640
 
1648
- is_dbos_active = False
1649
-
1650
1641
  @DBOS.step()
1651
1642
  def step(x: int) -> int:
1652
- if is_dbos_active:
1653
- assert DBOS.workflow_id is not None
1654
- else:
1655
- assert DBOS.workflow_id is None
1643
+ assert DBOS.workflow_id is None
1656
1644
  return x
1657
1645
 
1658
1646
  @DBOS.step()
1659
1647
  async def async_step(x: int) -> int:
1660
- if is_dbos_active:
1661
- assert DBOS.workflow_id is not None
1662
- else:
1663
- assert DBOS.workflow_id is None
1648
+ assert DBOS.workflow_id is None
1664
1649
  return x
1665
1650
 
1666
1651
  assert step(5) == 5
@@ -1672,7 +1657,30 @@ async def test_step_without_dbos(dbos: DBOS, config: DBOSConfig) -> None:
1672
1657
  assert await async_step(5) == 5
1673
1658
 
1674
1659
  DBOS.launch()
1675
- is_dbos_active = True
1676
1660
 
1677
1661
  assert step(5) == 5
1678
1662
  assert await async_step(5) == 5
1663
+
1664
+ assert len(DBOS.list_workflows()) == 0
1665
+
1666
+
1667
+ def test_nested_steps(dbos: DBOS) -> None:
1668
+
1669
+ @DBOS.step()
1670
+ def outer_step() -> str:
1671
+ return inner_step()
1672
+
1673
+ @DBOS.step()
1674
+ def inner_step() -> str:
1675
+ id = DBOS.workflow_id
1676
+ assert id is not None
1677
+ return id
1678
+
1679
+ @DBOS.workflow()
1680
+ def workflow() -> str:
1681
+ return outer_step()
1682
+
1683
+ id = workflow()
1684
+ steps = DBOS.list_workflow_steps(id)
1685
+ assert len(steps) == 1
1686
+ assert steps[0]["function_name"] == outer_step.__qualname__
@@ -8,12 +8,14 @@ from psycopg.errors import SerializationFailure
8
8
  from sqlalchemy.exc import InvalidRequestError, OperationalError
9
9
 
10
10
  from dbos import DBOS, Queue, SetWorkflowID
11
+ from dbos._dbos_config import DBOSConfig
11
12
  from dbos._error import (
12
13
  DBOSAwaitedWorkflowCancelledError,
13
14
  DBOSMaxStepRetriesExceeded,
14
15
  DBOSNotAuthorizedError,
15
16
  DBOSQueueDeduplicatedError,
16
17
  DBOSUnexpectedStepError,
18
+ DBOSWorkflowFunctionNotFoundError,
17
19
  MaxRecoveryAttemptsExceededError,
18
20
  )
19
21
  from dbos._registrations import DEFAULT_MAX_RECOVERY_ATTEMPTS
@@ -331,15 +333,8 @@ def test_step_retries(dbos: DBOS) -> None:
331
333
  error_message = f"Step {failing_step.__qualname__} has exceeded its maximum of {max_attempts} retries"
332
334
 
333
335
  # Test calling the step directly
334
- with pytest.raises(DBOSMaxStepRetriesExceeded) as excinfo:
336
+ with pytest.raises(Exception) as excinfo:
335
337
  failing_step()
336
- assert error_message in str(excinfo.value)
337
- assert step_counter == max_attempts
338
- assert len(excinfo.value.errors) == max_attempts
339
- for error in excinfo.value.errors:
340
- assert isinstance(error, Exception)
341
- assert error
342
- assert "fail" in str(error)
343
338
 
344
339
  # Test calling the workflow
345
340
  step_counter = 0
@@ -347,6 +342,11 @@ def test_step_retries(dbos: DBOS) -> None:
347
342
  failing_workflow()
348
343
  assert error_message in str(excinfo.value)
349
344
  assert step_counter == max_attempts
345
+ assert len(excinfo.value.errors) == max_attempts
346
+ for error in excinfo.value.errors:
347
+ assert isinstance(error, Exception)
348
+ assert error
349
+ assert "fail" in str(error)
350
350
 
351
351
  # Test enqueueing the step
352
352
  step_counter = 0
@@ -397,7 +397,6 @@ def test_step_status(dbos: DBOS) -> None:
397
397
 
398
398
  assert failing_workflow() == None
399
399
  step_counter = 0
400
- assert failing_step() == None
401
400
 
402
401
 
403
402
  def test_recovery_during_retries(dbos: DBOS) -> None:
@@ -499,3 +498,24 @@ def test_error_serialization() -> None:
499
498
  assert output is None
500
499
  assert isinstance(exception, str)
501
500
  assert "Message: 1, 2" in exception
501
+
502
+
503
+ def test_unregistered_workflow(dbos: DBOS, config: DBOSConfig) -> None:
504
+
505
+ @DBOS.workflow()
506
+ def workflow() -> None:
507
+ return
508
+
509
+ wfid = str(uuid.uuid4())
510
+ with SetWorkflowID(wfid):
511
+ workflow()
512
+
513
+ dbos._sys_db.update_workflow_outcome(wfid, "PENDING")
514
+
515
+ DBOS.destroy(destroy_registry=True)
516
+ config["executor_id"] = str(uuid.uuid4())
517
+ DBOS(config=config)
518
+ DBOS.launch()
519
+
520
+ with pytest.raises(DBOSWorkflowFunctionNotFoundError):
521
+ DBOS._recover_pending_workflows()
@@ -181,7 +181,8 @@ def test_queue_step(dbos: DBOS) -> None:
181
181
  handle = queue.enqueue(test_step, "abc")
182
182
  assert handle.get_result() == "abc1"
183
183
  with SetWorkflowID(wfid):
184
- assert test_step("abc") == "abc1"
184
+ handle = queue.enqueue(test_step, "abc")
185
+ assert handle.get_result() == "abc1"
185
186
  assert step_counter == 1
186
187
 
187
188
 
@@ -53,11 +53,10 @@ def test_spans(config: DBOSConfig) -> None:
53
53
  dbos_logger.addHandler(LoggingHandler(logger_provider=log_provider))
54
54
 
55
55
  test_workflow()
56
- test_step()
57
56
 
58
57
  log_processor.force_flush(timeout_millis=5000)
59
58
  logs = log_exporter.get_finished_logs()
60
- assert len(logs) == 3
59
+ assert len(logs) == 2
61
60
  for log in logs:
62
61
  assert log.log_record.attributes is not None
63
62
  assert (
@@ -78,7 +77,7 @@ def test_spans(config: DBOSConfig) -> None:
78
77
 
79
78
  spans = exporter.get_finished_spans()
80
79
 
81
- assert len(spans) == 5
80
+ assert len(spans) == 3
82
81
 
83
82
  for span in spans:
84
83
  assert span.attributes is not None
@@ -92,26 +91,19 @@ def test_spans(config: DBOSConfig) -> None:
92
91
  assert spans[0].name == test_step.__qualname__
93
92
  assert spans[1].name == "a new span"
94
93
  assert spans[2].name == test_workflow.__qualname__
95
- assert spans[3].name == test_step.__qualname__
96
- assert spans[4].name == f"<temp>.{test_step.__qualname__}"
97
94
 
98
95
  assert spans[0].parent.span_id == spans[2].context.span_id # type: ignore
99
96
  assert spans[1].parent.span_id == spans[2].context.span_id # type: ignore
100
97
  assert spans[2].parent == None
101
- assert spans[3].parent.span_id == spans[4].context.span_id # type: ignore
102
- assert spans[4].parent == None
103
98
 
104
99
  # Span ID and trace ID should match the log record
105
100
  # For pyright
106
101
  assert spans[0].context is not None
107
102
  assert spans[2].context is not None
108
- assert spans[3].context is not None
109
103
  assert logs[0].log_record.span_id == spans[0].context.span_id
110
104
  assert logs[0].log_record.trace_id == spans[0].context.trace_id
111
105
  assert logs[1].log_record.span_id == spans[2].context.span_id
112
106
  assert logs[1].log_record.trace_id == spans[2].context.trace_id
113
- assert logs[2].log_record.span_id == spans[3].context.span_id
114
- assert logs[2].log_record.trace_id == spans[3].context.trace_id
115
107
 
116
108
 
117
109
  @pytest.mark.asyncio
@@ -147,11 +139,10 @@ async def test_spans_async(dbos: DBOS) -> None:
147
139
  dbos_logger.addHandler(LoggingHandler(logger_provider=log_provider))
148
140
 
149
141
  await test_workflow()
150
- await test_step()
151
142
 
152
143
  log_processor.force_flush(timeout_millis=5000)
153
144
  logs = log_exporter.get_finished_logs()
154
- assert len(logs) == 3
145
+ assert len(logs) == 2
155
146
  for log in logs:
156
147
  assert log.log_record.attributes is not None
157
148
  assert (
@@ -171,7 +162,7 @@ async def test_spans_async(dbos: DBOS) -> None:
171
162
 
172
163
  spans = exporter.get_finished_spans()
173
164
 
174
- assert len(spans) == 5
165
+ assert len(spans) == 3
175
166
 
176
167
  for span in spans:
177
168
  assert span.attributes is not None
@@ -184,34 +175,27 @@ async def test_spans_async(dbos: DBOS) -> None:
184
175
  assert spans[0].name == test_step.__qualname__
185
176
  assert spans[1].name == "a new span"
186
177
  assert spans[2].name == test_workflow.__qualname__
187
- assert spans[3].name == test_step.__qualname__
188
- assert spans[4].name == f"<temp>.{test_step.__qualname__}"
189
178
 
190
179
  assert spans[0].parent.span_id == spans[2].context.span_id # type: ignore
191
180
  assert spans[1].parent.span_id == spans[2].context.span_id # type: ignore
192
181
  assert spans[2].parent == None
193
- assert spans[3].parent.span_id == spans[4].context.span_id # type: ignore
194
- assert spans[4].parent == None
195
182
 
196
183
  # Span ID and trace ID should match the log record
197
184
  assert spans[0].context is not None
198
185
  assert spans[2].context is not None
199
- assert spans[3].context is not None
200
186
  assert logs[0].log_record.span_id == spans[0].context.span_id
201
187
  assert logs[0].log_record.trace_id == spans[0].context.trace_id
202
188
  assert logs[1].log_record.span_id == spans[2].context.span_id
203
189
  assert logs[1].log_record.trace_id == spans[2].context.trace_id
204
- assert logs[2].log_record.span_id == spans[3].context.span_id
205
- assert logs[2].log_record.trace_id == spans[3].context.trace_id
206
190
 
207
191
 
208
- def test_temp_wf_fastapi(dbos_fastapi: Tuple[DBOS, FastAPI]) -> None:
192
+ def test_wf_fastapi(dbos_fastapi: Tuple[DBOS, FastAPI]) -> None:
209
193
  dbos, app = dbos_fastapi
210
194
 
211
- @app.get("/step")
212
- @DBOS.step()
213
- def test_step_endpoint() -> str:
214
- dbos.logger.info("This is a test_step_endpoint")
195
+ @app.get("/wf")
196
+ @DBOS.workflow()
197
+ def test_workflow_endpoint() -> str:
198
+ dbos.logger.info("This is a test_workflow_endpoint")
215
199
  return "test"
216
200
 
217
201
  exporter = InMemorySpanExporter()
@@ -229,7 +213,7 @@ def test_temp_wf_fastapi(dbos_fastapi: Tuple[DBOS, FastAPI]) -> None:
229
213
  dbos_logger.addHandler(LoggingHandler(logger_provider=log_provider))
230
214
 
231
215
  client = TestClient(app)
232
- response = client.get("/step")
216
+ response = client.get("/wf")
233
217
  assert response.status_code == 200
234
218
  assert response.text == '"test"'
235
219
 
@@ -242,14 +226,14 @@ def test_temp_wf_fastapi(dbos_fastapi: Tuple[DBOS, FastAPI]) -> None:
242
226
  )
243
227
  assert logs[0].log_record.span_id is not None and logs[0].log_record.span_id > 0
244
228
  assert logs[0].log_record.trace_id is not None and logs[0].log_record.trace_id > 0
245
- assert logs[0].log_record.body == "This is a test_step_endpoint"
229
+ assert logs[0].log_record.body == "This is a test_workflow_endpoint"
246
230
  assert logs[0].log_record.attributes["traceId"] == format_trace_id(
247
231
  logs[0].log_record.trace_id
248
232
  )
249
233
 
250
234
  spans = exporter.get_finished_spans()
251
235
 
252
- assert len(spans) == 3
236
+ assert len(spans) == 2
253
237
 
254
238
  for span in spans:
255
239
  assert span.attributes is not None
@@ -258,13 +242,11 @@ def test_temp_wf_fastapi(dbos_fastapi: Tuple[DBOS, FastAPI]) -> None:
258
242
  assert span.context.span_id > 0
259
243
  assert span.context.trace_id > 0
260
244
 
261
- assert spans[0].name == test_step_endpoint.__qualname__
262
- assert spans[1].name == f"<temp>.{test_step_endpoint.__qualname__}"
263
- assert spans[2].name == "/step"
245
+ assert spans[0].name == test_workflow_endpoint.__qualname__
246
+ assert spans[1].name == "/wf"
264
247
 
265
248
  assert spans[0].parent.span_id == spans[1].context.span_id # type: ignore
266
- assert spans[1].parent.span_id == spans[2].context.span_id # type: ignore
267
- assert spans[2].parent == None
249
+ assert spans[1].parent == None
268
250
 
269
251
  # Span ID and trace ID should match the log record
270
252
  assert spans[0].context is not None
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes