dbos 0.25.0a1__tar.gz → 0.25.0a7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (99) hide show
  1. {dbos-0.25.0a1 → dbos-0.25.0a7}/PKG-INFO +1 -1
  2. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/__main__.py +3 -0
  3. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_admin_server.py +20 -2
  4. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_context.py +3 -0
  5. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_core.py +74 -4
  6. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_db_wizard.py +3 -7
  7. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_dbos.py +51 -0
  8. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_dbos_config.py +2 -1
  9. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_fastapi.py +4 -1
  10. dbos-0.25.0a7/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +46 -0
  11. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_schemas/system_database.py +2 -0
  12. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_sys_db.py +80 -0
  13. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_templates/dbos-db-starter/__package/main.py +6 -11
  14. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +2 -4
  15. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_workflow_commands.py +6 -0
  16. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/cli/_template_init.py +8 -3
  17. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/cli/cli.py +17 -1
  18. {dbos-0.25.0a1 → dbos-0.25.0a7}/pyproject.toml +1 -1
  19. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_async.py +27 -0
  20. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_config.py +1 -2
  21. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_dbos.py +6 -2
  22. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_failures.py +54 -0
  23. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_package.py +1 -2
  24. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_workflow_cmds.py +325 -0
  25. {dbos-0.25.0a1 → dbos-0.25.0a7}/LICENSE +0 -0
  26. {dbos-0.25.0a1 → dbos-0.25.0a7}/README.md +0 -0
  27. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/__init__.py +0 -0
  28. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_app_db.py +0 -0
  29. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_classproperty.py +0 -0
  30. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_cloudutils/authentication.py +0 -0
  31. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_cloudutils/cloudutils.py +0 -0
  32. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_cloudutils/databases.py +0 -0
  33. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_conductor/conductor.py +0 -0
  34. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_conductor/protocol.py +0 -0
  35. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_croniter.py +0 -0
  36. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_debug.py +0 -0
  37. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_error.py +0 -0
  38. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_flask.py +0 -0
  39. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_kafka.py +0 -0
  40. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_kafka_message.py +0 -0
  41. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_logger.py +0 -0
  42. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_migrations/env.py +0 -0
  43. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_migrations/script.py.mako +0 -0
  44. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
  45. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
  46. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
  47. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
  48. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
  49. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
  50. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
  51. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_outcome.py +0 -0
  52. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_queue.py +0 -0
  53. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_recovery.py +0 -0
  54. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_registrations.py +0 -0
  55. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_request.py +0 -0
  56. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_roles.py +0 -0
  57. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_scheduler.py +0 -0
  58. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_schemas/__init__.py +0 -0
  59. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_schemas/application_database.py +0 -0
  60. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_serialization.py +0 -0
  61. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_templates/dbos-db-starter/README.md +0 -0
  62. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
  63. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
  64. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
  65. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
  66. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
  67. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
  68. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
  69. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_tracer.py +0 -0
  70. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/_utils.py +0 -0
  71. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/cli/_github_init.py +0 -0
  72. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/dbos-config.schema.json +0 -0
  73. {dbos-0.25.0a1 → dbos-0.25.0a7}/dbos/py.typed +0 -0
  74. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/__init__.py +0 -0
  75. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/atexit_no_ctor.py +0 -0
  76. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/atexit_no_launch.py +0 -0
  77. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/classdefs.py +0 -0
  78. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/conftest.py +0 -0
  79. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/more_classdefs.py +0 -0
  80. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/queuedworkflow.py +0 -0
  81. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_admin_server.py +0 -0
  82. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_classdecorators.py +0 -0
  83. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_concurrency.py +0 -0
  84. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_croniter.py +0 -0
  85. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_dbwizard.py +0 -0
  86. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_debug.py +0 -0
  87. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_fastapi.py +0 -0
  88. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_fastapi_roles.py +0 -0
  89. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_flask.py +0 -0
  90. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_kafka.py +0 -0
  91. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_outcome.py +0 -0
  92. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_queue.py +0 -0
  93. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_scheduler.py +0 -0
  94. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_schema_migration.py +0 -0
  95. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_singleton.py +0 -0
  96. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_spans.py +0 -0
  97. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_sqlalchemy.py +0 -0
  98. {dbos-0.25.0a1 → dbos-0.25.0a7}/tests/test_workflow_cancel.py +0 -0
  99. {dbos-0.25.0a1 → dbos-0.25.0a7}/version/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 0.25.0a1
3
+ Version: 0.25.0a7
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -4,6 +4,9 @@ from typing import NoReturn, Optional, Union
4
4
 
5
5
  from dbos.cli.cli import app
6
6
 
7
+ # This is used by the debugger to execute DBOS as a module.
8
+ # Never used otherwise.
9
+
7
10
 
8
11
  def main() -> NoReturn:
9
12
  # Modify sys.argv[0] to remove script or executable extensions
@@ -7,6 +7,8 @@ from functools import partial
7
7
  from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
8
8
  from typing import TYPE_CHECKING, Any, List, TypedDict
9
9
 
10
+ import jsonpickle # type: ignore
11
+
10
12
  from ._logger import dbos_logger
11
13
  from ._recovery import recover_pending_workflows
12
14
 
@@ -20,6 +22,7 @@ _workflow_queues_metadata_path = "/dbos-workflow-queues-metadata"
20
22
  # /workflows/:workflow_id/cancel
21
23
  # /workflows/:workflow_id/resume
22
24
  # /workflows/:workflow_id/restart
25
+ # /workflows/:workflow_id/steps
23
26
 
24
27
 
25
28
  class AdminServer:
@@ -86,8 +89,16 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
86
89
  self._end_headers()
87
90
  self.wfile.write(json.dumps(queue_metadata_array).encode("utf-8"))
88
91
  else:
89
- self.send_response(404)
90
- self._end_headers()
92
+ steps_match = re.match(
93
+ r"^/workflows/(?P<workflow_id>[^/]+)/steps$", self.path
94
+ )
95
+
96
+ if steps_match:
97
+ workflow_id = steps_match.group("workflow_id")
98
+ self._handle_steps(workflow_id)
99
+ else:
100
+ self.send_response(404)
101
+ self._end_headers()
91
102
 
92
103
  def do_POST(self) -> None:
93
104
  content_length = int(
@@ -149,6 +160,13 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
149
160
  self.send_response(204)
150
161
  self._end_headers()
151
162
 
163
+ def _handle_steps(self, workflow_id: str) -> None:
164
+ steps = self.dbos._sys_db.get_workflow_steps(workflow_id)
165
+ json_steps = jsonpickle.encode(steps, unpicklable=False).encode("utf-8")
166
+ self.send_response(200)
167
+ self._end_headers()
168
+ self.wfile.write(json_steps)
169
+
152
170
 
153
171
  # Be consistent with DBOS-TS response.
154
172
  class PerfUtilization(TypedDict):
@@ -92,6 +92,9 @@ class DBOSContext:
92
92
  rv.assumed_role = self.assumed_role
93
93
  return rv
94
94
 
95
+ def has_parent(self) -> bool:
96
+ return len(self.parent_workflow_id) > 0
97
+
95
98
  def assign_workflow_id(self) -> str:
96
99
  if len(self.id_assigned_for_next_workflow) > 0:
97
100
  wfid = self.id_assigned_for_next_workflow
@@ -243,12 +243,15 @@ def _init_workflow(
243
243
  wf_status = dbos._sys_db.insert_workflow_status(
244
244
  status, max_recovery_attempts=max_recovery_attempts
245
245
  )
246
+
246
247
  # TODO: Modify the inputs if they were changed by `update_workflow_inputs`
247
248
  dbos._sys_db.update_workflow_inputs(
248
249
  wfid, _serialization.serialize_args(inputs)
249
250
  )
251
+
250
252
  else:
251
253
  # Buffer the inputs for single-transaction workflows, but don't buffer the status
254
+
252
255
  dbos._sys_db.buffer_workflow_inputs(
253
256
  wfid, _serialization.serialize_args(inputs)
254
257
  )
@@ -475,6 +478,15 @@ def start_workflow(
475
478
 
476
479
  new_wf_id, new_wf_ctx = _get_new_wf()
477
480
 
481
+ ctx = new_wf_ctx
482
+ new_child_workflow_id = ctx.id_assigned_for_next_workflow
483
+ if ctx.has_parent():
484
+ child_workflow_id = dbos._sys_db.check_child_workflow(
485
+ ctx.parent_workflow_id, ctx.parent_workflow_fid
486
+ )
487
+ if child_workflow_id is not None:
488
+ return WorkflowHandlePolling(child_workflow_id, dbos)
489
+
478
490
  status = _init_workflow(
479
491
  dbos,
480
492
  new_wf_ctx,
@@ -488,6 +500,13 @@ def start_workflow(
488
500
  )
489
501
 
490
502
  wf_status = status["status"]
503
+ if ctx.has_parent():
504
+ dbos._sys_db.record_child_workflow(
505
+ ctx.parent_workflow_id,
506
+ new_child_workflow_id,
507
+ ctx.parent_workflow_fid,
508
+ func.__name__,
509
+ )
491
510
 
492
511
  if not execute_workflow or (
493
512
  not dbos.debug_mode
@@ -544,6 +563,17 @@ async def start_workflow_async(
544
563
 
545
564
  new_wf_id, new_wf_ctx = _get_new_wf()
546
565
 
566
+ ctx = new_wf_ctx
567
+ new_child_workflow_id = ctx.id_assigned_for_next_workflow
568
+ if ctx.has_parent():
569
+ child_workflow_id = await asyncio.to_thread(
570
+ dbos._sys_db.check_child_workflow,
571
+ ctx.parent_workflow_id,
572
+ ctx.parent_workflow_fid,
573
+ )
574
+ if child_workflow_id is not None:
575
+ return WorkflowHandleAsyncPolling(child_workflow_id, dbos)
576
+
547
577
  status = await asyncio.to_thread(
548
578
  _init_workflow,
549
579
  dbos,
@@ -557,6 +587,15 @@ async def start_workflow_async(
557
587
  max_recovery_attempts=fi.max_recovery_attempts,
558
588
  )
559
589
 
590
+ if ctx.has_parent():
591
+ await asyncio.to_thread(
592
+ dbos._sys_db.record_child_workflow,
593
+ ctx.parent_workflow_id,
594
+ new_child_workflow_id,
595
+ ctx.parent_workflow_fid,
596
+ func.__name__,
597
+ )
598
+
560
599
  wf_status = status["status"]
561
600
 
562
601
  if not execute_workflow or (
@@ -599,6 +638,8 @@ def workflow_wrapper(
599
638
  ) -> Callable[P, R]:
600
639
  func.__orig_func = func # type: ignore
601
640
 
641
+ funcName = func.__name__
642
+
602
643
  fi = get_or_create_func_info(func)
603
644
  fi.max_recovery_attempts = max_recovery_attempts
604
645
 
@@ -629,7 +670,24 @@ def workflow_wrapper(
629
670
  wfOutcome = Outcome[R].make(functools.partial(func, *args, **kwargs))
630
671
 
631
672
  def init_wf() -> Callable[[Callable[[], R]], R]:
673
+
674
+ def recorded_result(
675
+ c_wfid: str, dbos: "DBOS"
676
+ ) -> Callable[[Callable[[], R]], R]:
677
+ def recorded_result_inner(func: Callable[[], R]) -> R:
678
+ return WorkflowHandlePolling(c_wfid, dbos).get_result()
679
+
680
+ return recorded_result_inner
681
+
632
682
  ctx = assert_current_dbos_context() # Now the child ctx
683
+
684
+ if ctx.has_parent():
685
+ child_workflow_id = dbos._sys_db.check_child_workflow(
686
+ ctx.parent_workflow_id, ctx.parent_workflow_fid
687
+ )
688
+ if child_workflow_id is not None:
689
+ return recorded_result(child_workflow_id, dbos)
690
+
633
691
  status = _init_workflow(
634
692
  dbos,
635
693
  ctx,
@@ -640,11 +698,20 @@ def workflow_wrapper(
640
698
  temp_wf_type=get_temp_workflow_type(func),
641
699
  max_recovery_attempts=max_recovery_attempts,
642
700
  )
701
+
643
702
  # TODO: maybe modify the parameters if they've been changed by `_init_workflow`
644
703
  dbos.logger.debug(
645
704
  f"Running workflow, id: {ctx.workflow_id}, name: {get_dbos_func_name(func)}"
646
705
  )
647
706
 
707
+ if ctx.has_parent():
708
+ dbos._sys_db.record_child_workflow(
709
+ ctx.parent_workflow_id,
710
+ ctx.workflow_id,
711
+ ctx.parent_workflow_fid,
712
+ funcName,
713
+ )
714
+
648
715
  return _get_wf_invoke_func(dbos, status)
649
716
 
650
717
  outcome = (
@@ -853,6 +920,8 @@ def decorate_step(
853
920
  ) -> Callable[[Callable[P, R]], Callable[P, R]]:
854
921
  def decorator(func: Callable[P, R]) -> Callable[P, R]:
855
922
 
923
+ stepName = func.__name__
924
+
856
925
  def invoke_step(*args: Any, **kwargs: Any) -> Any:
857
926
  if dbosreg.dbos is None:
858
927
  raise DBOSException(
@@ -897,19 +966,20 @@ def decorate_step(
897
966
  step_output: OperationResultInternal = {
898
967
  "workflow_uuid": ctx.workflow_id,
899
968
  "function_id": ctx.function_id,
969
+ "function_name": stepName,
900
970
  "output": None,
901
971
  "error": None,
902
972
  }
903
973
 
904
974
  try:
905
975
  output = func()
906
- step_output["output"] = _serialization.serialize(output)
907
- return output
908
976
  except Exception as error:
909
977
  step_output["error"] = _serialization.serialize_exception(error)
910
- raise
911
- finally:
912
978
  dbos._sys_db.record_operation_result(step_output)
979
+ raise
980
+ step_output["output"] = _serialization.serialize(output)
981
+ dbos._sys_db.record_operation_result(step_output)
982
+ return output
913
983
 
914
984
  def check_existing_result() -> Union[NoResult, R]:
915
985
  ctx = assert_current_dbos_context()
@@ -49,6 +49,7 @@ def db_wizard(config: "ConfigFile") -> "ConfigFile":
49
49
 
50
50
  # 2. If the error is due to password authentication or the configuration is non-default, surface the error and exit.
51
51
  error_str = str(db_connection_error)
52
+ dbos_logger.debug(f"Error connecting to Postgres: {error_str}")
52
53
  if (
53
54
  "password authentication failed" in error_str
54
55
  or "28P01" in error_str
@@ -182,17 +183,12 @@ def _check_db_connectivity(config: "ConfigFile") -> Optional[Exception]:
182
183
  host=config["database"]["hostname"],
183
184
  port=config["database"]["port"],
184
185
  database="postgres",
185
- query={"connect_timeout": "1"},
186
+ query={"connect_timeout": "2"},
186
187
  )
187
188
  postgres_db_engine = create_engine(postgres_db_url)
188
189
  try:
189
190
  with postgres_db_engine.connect() as conn:
190
- val = conn.execute(text("SELECT 1")).scalar()
191
- if val != 1:
192
- dbos_logger.error(
193
- f"Unexpected value returned from database: expected 1, received {val}"
194
- )
195
- return Exception()
191
+ conn.execute(text("SELECT 1")).scalar()
196
192
  except Exception as e:
197
193
  return e
198
194
  finally:
@@ -39,6 +39,7 @@ from dbos._utils import GlobalParams
39
39
  from ._classproperty import classproperty
40
40
  from ._core import (
41
41
  TEMP_SEND_WF_NAME,
42
+ WorkflowHandleAsyncPolling,
42
43
  WorkflowHandlePolling,
43
44
  decorate_step,
44
45
  decorate_transaction,
@@ -757,6 +758,44 @@ class DBOS:
757
758
  ),
758
759
  )
759
760
 
761
+ @classmethod
762
+ async def get_workflow_status_async(
763
+ cls, workflow_id: str
764
+ ) -> Optional[WorkflowStatus]:
765
+ """Return the status of a workflow execution."""
766
+ ctx = get_local_dbos_context()
767
+ if ctx and ctx.is_within_workflow():
768
+ ctx.function_id += 1
769
+ stat = await asyncio.to_thread(
770
+ lambda: _get_dbos_instance()._sys_db.get_workflow_status_within_wf(
771
+ workflow_id, ctx.workflow_id, ctx.function_id
772
+ )
773
+ )
774
+ else:
775
+ stat = await asyncio.to_thread(
776
+ lambda: _get_dbos_instance()._sys_db.get_workflow_status(workflow_id)
777
+ )
778
+ if stat is None:
779
+ return None
780
+
781
+ return WorkflowStatus(
782
+ workflow_id=workflow_id,
783
+ status=stat["status"],
784
+ name=stat["name"],
785
+ executor_id=stat["executor_id"],
786
+ recovery_attempts=stat["recovery_attempts"],
787
+ class_name=stat["class_name"],
788
+ config_name=stat["config_name"],
789
+ queue_name=stat["queue_name"],
790
+ authenticated_user=stat["authenticated_user"],
791
+ assumed_role=stat["assumed_role"],
792
+ authenticated_roles=(
793
+ json.loads(stat["authenticated_roles"])
794
+ if stat["authenticated_roles"] is not None
795
+ else None
796
+ ),
797
+ )
798
+
760
799
  @classmethod
761
800
  def retrieve_workflow(
762
801
  cls, workflow_id: str, existing_workflow: bool = True
@@ -769,6 +808,18 @@ class DBOS:
769
808
  raise DBOSNonExistentWorkflowError(workflow_id)
770
809
  return WorkflowHandlePolling(workflow_id, dbos)
771
810
 
811
+ @classmethod
812
+ async def retrieve_workflow_async(
813
+ cls, workflow_id: str, existing_workflow: bool = True
814
+ ) -> WorkflowHandleAsync[R]:
815
+ """Return a `WorkflowHandle` for a workflow execution."""
816
+ dbos = _get_dbos_instance()
817
+ if existing_workflow:
818
+ stat = await dbos.get_workflow_status_async(workflow_id)
819
+ if stat is None:
820
+ raise DBOSNonExistentWorkflowError(workflow_id)
821
+ return WorkflowHandleAsyncPolling(workflow_id, dbos)
822
+
772
823
  @classmethod
773
824
  def send(
774
825
  cls, destination_id: str, message: Any, topic: Optional[str] = None
@@ -512,7 +512,8 @@ def overwrite_config(provided_config: ConfigFile) -> ConfigFile:
512
512
  "sys_db_name"
513
513
  ]
514
514
  provided_config["database"]["ssl"] = config_from_file["database"]["ssl"]
515
- provided_config["database"]["ssl_ca"] = config_from_file["database"]["ssl_ca"]
515
+ if "ssl_ca" in config_from_file["database"]:
516
+ provided_config["database"]["ssl_ca"] = config_from_file["database"]["ssl_ca"]
516
517
 
517
518
  # Telemetry config
518
519
  if "telemetry" not in provided_config or provided_config["telemetry"] is None:
@@ -63,7 +63,10 @@ class LifespanMiddleware:
63
63
  if scope["type"] == "lifespan":
64
64
 
65
65
  async def wrapped_send(message: MutableMapping[str, Any]) -> None:
66
- if message["type"] == "lifespan.startup.complete":
66
+ if (
67
+ message["type"] == "lifespan.startup.complete"
68
+ and not self.dbos._launched
69
+ ):
67
70
  self.dbos._launch()
68
71
  elif message["type"] == "lifespan.shutdown.complete":
69
72
  self.dbos._destroy()
@@ -0,0 +1,46 @@
1
+ """functionname_childid_op_outputs
2
+
3
+ Revision ID: f4b9b32ba814
4
+ Revises: 04ca4f231047
5
+ Create Date: 2025-03-21 14:32:43.091074
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ import sqlalchemy as sa
12
+ from alembic import op
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = "f4b9b32ba814"
16
+ down_revision: Union[str, None] = "04ca4f231047"
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ op.add_column(
23
+ "operation_outputs",
24
+ sa.Column(
25
+ "function_name",
26
+ sa.Text(),
27
+ nullable=False,
28
+ default="",
29
+ ),
30
+ schema="dbos",
31
+ )
32
+
33
+ op.add_column(
34
+ "operation_outputs",
35
+ sa.Column(
36
+ "child_workflow_id",
37
+ sa.Text(),
38
+ nullable=True,
39
+ ),
40
+ schema="dbos",
41
+ )
42
+
43
+
44
+ def downgrade() -> None:
45
+ op.drop_column("operation_outputs", "function_name", schema="dbos")
46
+ op.drop_column("operation_outputs", "child_workflow_id", schema="dbos")
@@ -71,8 +71,10 @@ class SystemSchema:
71
71
  nullable=False,
72
72
  ),
73
73
  Column("function_id", Integer, nullable=False),
74
+ Column("function_name", Text, nullable=False, default=""),
74
75
  Column("output", Text, nullable=True),
75
76
  Column("error", Text, nullable=True),
77
+ Column("child_workflow_id", Text, nullable=True),
76
78
  PrimaryKeyConstraint("workflow_uuid", "function_id"),
77
79
  )
78
80
 
@@ -28,6 +28,7 @@ from sqlalchemy.sql import func
28
28
  from dbos._utils import GlobalParams
29
29
 
30
30
  from . import _serialization
31
+ from ._context import get_local_dbos_context
31
32
  from ._dbos_config import ConfigFile
32
33
  from ._error import (
33
34
  DBOSConflictingWorkflowError,
@@ -89,6 +90,7 @@ class RecordedResult(TypedDict):
89
90
  class OperationResultInternal(TypedDict):
90
91
  workflow_uuid: str
91
92
  function_id: int
93
+ function_name: str
92
94
  output: Optional[str] # JSON (jsonpickle)
93
95
  error: Optional[str] # JSON (jsonpickle)
94
96
 
@@ -151,6 +153,14 @@ class GetPendingWorkflowsOutput:
151
153
  self.queue_name: Optional[str] = queue_name
152
154
 
153
155
 
156
+ class StepInfo(TypedDict):
157
+ function_id: int
158
+ function_name: str
159
+ output: Optional[str] # JSON (jsonpickle)
160
+ error: Optional[str] # JSON (jsonpickle)
161
+ child_workflow_id: Optional[str]
162
+
163
+
154
164
  _dbos_null_topic = "__null__topic__"
155
165
  _buffer_flush_batch_size = 100
156
166
  _buffer_flush_interval_secs = 1.0
@@ -550,6 +560,7 @@ class SystemDatabase:
550
560
  {
551
561
  "workflow_uuid": calling_wf,
552
562
  "function_id": calling_wf_fn,
563
+ "function_name": "DBOS.getStatus",
553
564
  "output": _serialization.serialize(stat),
554
565
  "error": None,
555
566
  }
@@ -771,6 +782,28 @@ class SystemDatabase:
771
782
  for row in rows
772
783
  ]
773
784
 
785
+ def get_workflow_steps(self, workflow_id: str) -> List[StepInfo]:
786
+ with self.engine.begin() as c:
787
+ rows = c.execute(
788
+ sa.select(
789
+ SystemSchema.operation_outputs.c.function_id,
790
+ SystemSchema.operation_outputs.c.function_name,
791
+ SystemSchema.operation_outputs.c.output,
792
+ SystemSchema.operation_outputs.c.error,
793
+ SystemSchema.operation_outputs.c.child_workflow_id,
794
+ ).where(SystemSchema.operation_outputs.c.workflow_uuid == workflow_id)
795
+ ).fetchall()
796
+ return [
797
+ StepInfo(
798
+ function_id=row[0],
799
+ function_name=row[1],
800
+ output=row[2], # Preserve JSON data
801
+ error=row[3],
802
+ child_workflow_id=row[4],
803
+ )
804
+ for row in rows
805
+ ]
806
+
774
807
  def record_operation_result(
775
808
  self, result: OperationResultInternal, conn: Optional[sa.Connection] = None
776
809
  ) -> None:
@@ -782,6 +815,7 @@ class SystemDatabase:
782
815
  sql = pg.insert(SystemSchema.operation_outputs).values(
783
816
  workflow_uuid=result["workflow_uuid"],
784
817
  function_id=result["function_id"],
818
+ function_name=result["function_name"],
785
819
  output=output,
786
820
  error=error,
787
821
  )
@@ -796,6 +830,30 @@ class SystemDatabase:
796
830
  raise DBOSWorkflowConflictIDError(result["workflow_uuid"])
797
831
  raise
798
832
 
833
+ def record_child_workflow(
834
+ self,
835
+ parentUUID: str,
836
+ childUUID: str,
837
+ functionID: int,
838
+ functionName: str,
839
+ ) -> None:
840
+ if self._debug_mode:
841
+ raise Exception("called record_child_workflow in debug mode")
842
+
843
+ sql = pg.insert(SystemSchema.operation_outputs).values(
844
+ workflow_uuid=parentUUID,
845
+ function_id=functionID,
846
+ function_name=functionName,
847
+ child_workflow_id=childUUID,
848
+ )
849
+ try:
850
+ with self.engine.begin() as c:
851
+ c.execute(sql)
852
+ except DBAPIError as dbapi_error:
853
+ if dbapi_error.orig.sqlstate == "23505": # type: ignore
854
+ raise DBOSWorkflowConflictIDError(parentUUID)
855
+ raise
856
+
799
857
  def check_operation_execution(
800
858
  self, workflow_uuid: str, function_id: int, conn: Optional[sa.Connection] = None
801
859
  ) -> Optional[RecordedResult]:
@@ -822,6 +880,23 @@ class SystemDatabase:
822
880
  }
823
881
  return result
824
882
 
883
+ def check_child_workflow(
884
+ self, workflow_uuid: str, function_id: int
885
+ ) -> Optional[str]:
886
+ sql = sa.select(SystemSchema.operation_outputs.c.child_workflow_id).where(
887
+ SystemSchema.operation_outputs.c.workflow_uuid == workflow_uuid,
888
+ SystemSchema.operation_outputs.c.function_id == function_id,
889
+ )
890
+
891
+ # If in a transaction, use the provided connection
892
+ row: Any
893
+ with self.engine.begin() as c:
894
+ row = c.execute(sql).fetchone()
895
+
896
+ if row is None:
897
+ return None
898
+ return str(row[0])
899
+
825
900
  def send(
826
901
  self,
827
902
  workflow_uuid: str,
@@ -866,6 +941,7 @@ class SystemDatabase:
866
941
  output: OperationResultInternal = {
867
942
  "workflow_uuid": workflow_uuid,
868
943
  "function_id": function_id,
944
+ "function_name": "DBOS.send",
869
945
  "output": None,
870
946
  "error": None,
871
947
  }
@@ -959,6 +1035,7 @@ class SystemDatabase:
959
1035
  {
960
1036
  "workflow_uuid": workflow_uuid,
961
1037
  "function_id": function_id,
1038
+ "function_name": "DBOS.recv",
962
1039
  "output": _serialization.serialize(
963
1040
  message
964
1041
  ), # None will be serialized to 'null'
@@ -1049,6 +1126,7 @@ class SystemDatabase:
1049
1126
  {
1050
1127
  "workflow_uuid": workflow_uuid,
1051
1128
  "function_id": function_id,
1129
+ "function_name": "DBOS.sleep",
1052
1130
  "output": _serialization.serialize(end_time),
1053
1131
  "error": None,
1054
1132
  }
@@ -1096,6 +1174,7 @@ class SystemDatabase:
1096
1174
  output: OperationResultInternal = {
1097
1175
  "workflow_uuid": workflow_uuid,
1098
1176
  "function_id": function_id,
1177
+ "function_name": "DBOS.setEvent",
1099
1178
  "output": None,
1100
1179
  "error": None,
1101
1180
  }
@@ -1176,6 +1255,7 @@ class SystemDatabase:
1176
1255
  {
1177
1256
  "workflow_uuid": caller_ctx["workflow_uuid"],
1178
1257
  "function_id": caller_ctx["function_id"],
1258
+ "function_name": "DBOS.getEvent",
1179
1259
  "output": _serialization.serialize(
1180
1260
  value
1181
1261
  ), # None will be serialized to 'null'
@@ -6,6 +6,7 @@
6
6
 
7
7
  # First, let's do imports, create a FastAPI app, and initialize DBOS.
8
8
 
9
+ import uvicorn
9
10
  from fastapi import FastAPI
10
11
  from fastapi.responses import HTMLResponse
11
12
 
@@ -37,7 +38,7 @@ def example_transaction(name: str) -> str:
37
38
  return greeting
38
39
 
39
40
 
40
- # Finally, let's use FastAPI to serve an HTML + CSS readme
41
+ # Now, let's use FastAPI to serve an HTML + CSS readme
41
42
  # from the root path.
42
43
 
43
44
 
@@ -66,14 +67,8 @@ def readme() -> HTMLResponse:
66
67
  return HTMLResponse(readme)
67
68
 
68
69
 
69
- # To deploy this app to DBOS Cloud:
70
- # - "npm i -g @dbos-inc/dbos-cloud@latest" to install the Cloud CLI (requires Node)
71
- # - "dbos-cloud app deploy" to deploy your app
72
- # - Deploy outputs a URL--visit it to see your app!
70
+ # Finally, we'll launch DBOS then start the FastAPI server.
73
71
 
74
-
75
- # To run this app locally:
76
- # - Make sure you have a Postgres database to connect to
77
- # - "dbos migrate" to set up your database tables
78
- # - "dbos start" to start the app
79
- # - Visit localhost:8000 to see your app!
72
+ if __name__ == "__main__":
73
+ DBOS.launch()
74
+ uvicorn.run(app, host="0.0.0.0", port=8000)
@@ -7,8 +7,6 @@ name: ${project_name}
7
7
  language: python
8
8
  runtimeConfig:
9
9
  start:
10
- - "fastapi run ${package_name}/main.py"
10
+ - "${start_command}"
11
11
  database_url: ${DBOS_DATABASE_URL}
12
- database:
13
- migrate:
14
- - ${migration_command}
12
+ ${migration_section}
@@ -5,6 +5,7 @@ from ._sys_db import (
5
5
  GetQueuedWorkflowsInput,
6
6
  GetWorkflowsInput,
7
7
  GetWorkflowsOutput,
8
+ StepInfo,
8
9
  SystemDatabase,
9
10
  WorkflowStatuses,
10
11
  )
@@ -146,3 +147,8 @@ def get_workflow(
146
147
  winfo.request = None
147
148
 
148
149
  return winfo
150
+
151
+
152
+ def list_workflow_steps(sys_db: SystemDatabase, workflow_uuid: str) -> List[StepInfo]:
153
+ output = sys_db.get_workflow_steps(workflow_uuid)
154
+ return output
@@ -58,15 +58,20 @@ def copy_template(src_dir: str, project_name: str, config_mode: bool) -> None:
58
58
  dst_dir = path.abspath(".")
59
59
 
60
60
  package_name = project_name.replace("-", "_")
61
+ default_migration_section = """database:
62
+ migrate:
63
+ - alembic upgrade head
64
+ """
61
65
  ctx = {
62
66
  "project_name": project_name,
63
67
  "package_name": package_name,
64
- "migration_command": "alembic upgrade head",
68
+ "start_command": f"python3 -m {package_name}.main",
69
+ "migration_section": default_migration_section,
65
70
  }
66
71
 
67
72
  if config_mode:
68
- ctx["package_name"] = "."
69
- ctx["migration_command"] = "echo 'No migrations specified'"
73
+ ctx["start_command"] = "python3 main.py"
74
+ ctx["migration_section"] = ""
70
75
  _copy_dbos_template(
71
76
  os.path.join(src_dir, "dbos-config.yaml.dbos"),
72
77
  os.path.join(dst_dir, "dbos-config.yaml"),