dbos 0.6.1__tar.gz → 0.7.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (72) hide show
  1. {dbos-0.6.1 → dbos-0.7.0}/PKG-INFO +2 -2
  2. {dbos-0.6.1 → dbos-0.7.0}/dbos/__init__.py +2 -0
  3. {dbos-0.6.1 → dbos-0.7.0}/dbos/application_database.py +6 -11
  4. {dbos-0.6.1 → dbos-0.7.0}/dbos/context.py +3 -2
  5. {dbos-0.6.1 → dbos-0.7.0}/dbos/core.py +74 -54
  6. {dbos-0.6.1 → dbos-0.7.0}/dbos/dbos.py +57 -69
  7. {dbos-0.6.1 → dbos-0.7.0}/dbos/dbos_config.py +1 -1
  8. {dbos-0.6.1 → dbos-0.7.0}/dbos/error.py +0 -11
  9. {dbos-0.6.1 → dbos-0.7.0}/dbos/fastapi.py +46 -2
  10. {dbos-0.6.1 → dbos-0.7.0}/dbos/kafka.py +27 -12
  11. dbos-0.7.0/dbos/migrations/versions/eab0cc1d9a14_job_queue.py +55 -0
  12. dbos-0.7.0/dbos/queue.py +36 -0
  13. {dbos-0.6.1 → dbos-0.7.0}/dbos/recovery.py +1 -1
  14. {dbos-0.6.1 → dbos-0.7.0}/dbos/scheduler/scheduler.py +8 -10
  15. {dbos-0.6.1 → dbos-0.7.0}/dbos/schemas/system_database.py +23 -0
  16. {dbos-0.6.1 → dbos-0.7.0}/dbos/system_database.py +116 -83
  17. {dbos-0.6.1 → dbos-0.7.0}/pyproject.toml +2 -3
  18. {dbos-0.6.1 → dbos-0.7.0}/tests/conftest.py +1 -1
  19. {dbos-0.6.1 → dbos-0.7.0}/tests/scheduler/test_scheduler.py +20 -6
  20. {dbos-0.6.1 → dbos-0.7.0}/tests/test_admin_server.py +3 -2
  21. {dbos-0.6.1 → dbos-0.7.0}/tests/test_dbos.py +44 -21
  22. {dbos-0.6.1 → dbos-0.7.0}/tests/test_failures.py +18 -18
  23. {dbos-0.6.1 → dbos-0.7.0}/tests/test_fastapi.py +3 -2
  24. {dbos-0.6.1 → dbos-0.7.0}/tests/test_fastapi_roles.py +5 -5
  25. {dbos-0.6.1 → dbos-0.7.0}/tests/test_flask.py +3 -2
  26. {dbos-0.6.1 → dbos-0.7.0}/tests/test_kafka.py +49 -7
  27. dbos-0.7.0/tests/test_queue.py +110 -0
  28. {dbos-0.6.1 → dbos-0.7.0}/tests/test_schema_migration.py +6 -6
  29. {dbos-0.6.1 → dbos-0.7.0}/LICENSE +0 -0
  30. {dbos-0.6.1 → dbos-0.7.0}/README.md +0 -0
  31. {dbos-0.6.1 → dbos-0.7.0}/dbos/admin_sever.py +0 -0
  32. {dbos-0.6.1 → dbos-0.7.0}/dbos/cli.py +0 -0
  33. {dbos-0.6.1 → dbos-0.7.0}/dbos/dbos-config.schema.json +0 -0
  34. {dbos-0.6.1 → dbos-0.7.0}/dbos/decorators.py +0 -0
  35. {dbos-0.6.1 → dbos-0.7.0}/dbos/flask.py +0 -0
  36. {dbos-0.6.1 → dbos-0.7.0}/dbos/kafka_message.py +0 -0
  37. {dbos-0.6.1 → dbos-0.7.0}/dbos/logger.py +0 -0
  38. {dbos-0.6.1 → dbos-0.7.0}/dbos/migrations/env.py +0 -0
  39. {dbos-0.6.1 → dbos-0.7.0}/dbos/migrations/script.py.mako +0 -0
  40. {dbos-0.6.1 → dbos-0.7.0}/dbos/migrations/versions/5c361fc04708_added_system_tables.py +0 -0
  41. {dbos-0.6.1 → dbos-0.7.0}/dbos/migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
  42. {dbos-0.6.1 → dbos-0.7.0}/dbos/py.typed +0 -0
  43. {dbos-0.6.1 → dbos-0.7.0}/dbos/registrations.py +0 -0
  44. {dbos-0.6.1 → dbos-0.7.0}/dbos/request.py +0 -0
  45. {dbos-0.6.1 → dbos-0.7.0}/dbos/roles.py +0 -0
  46. {dbos-0.6.1 → dbos-0.7.0}/dbos/scheduler/croniter.py +0 -0
  47. {dbos-0.6.1 → dbos-0.7.0}/dbos/schemas/__init__.py +0 -0
  48. {dbos-0.6.1 → dbos-0.7.0}/dbos/schemas/application_database.py +0 -0
  49. {dbos-0.6.1 → dbos-0.7.0}/dbos/templates/hello/README.md +0 -0
  50. {dbos-0.6.1 → dbos-0.7.0}/dbos/templates/hello/__package/__init__.py +0 -0
  51. {dbos-0.6.1 → dbos-0.7.0}/dbos/templates/hello/__package/main.py +0 -0
  52. {dbos-0.6.1 → dbos-0.7.0}/dbos/templates/hello/__package/schema.py +0 -0
  53. {dbos-0.6.1 → dbos-0.7.0}/dbos/templates/hello/alembic.ini +0 -0
  54. {dbos-0.6.1 → dbos-0.7.0}/dbos/templates/hello/dbos-config.yaml.dbos +0 -0
  55. {dbos-0.6.1 → dbos-0.7.0}/dbos/templates/hello/migrations/env.py.dbos +0 -0
  56. {dbos-0.6.1 → dbos-0.7.0}/dbos/templates/hello/migrations/script.py.mako +0 -0
  57. {dbos-0.6.1 → dbos-0.7.0}/dbos/templates/hello/migrations/versions/2024_07_31_180642_init.py +0 -0
  58. {dbos-0.6.1 → dbos-0.7.0}/dbos/templates/hello/start_postgres_docker.py +0 -0
  59. {dbos-0.6.1 → dbos-0.7.0}/dbos/tracer.py +0 -0
  60. {dbos-0.6.1 → dbos-0.7.0}/dbos/utils.py +0 -0
  61. {dbos-0.6.1 → dbos-0.7.0}/tests/__init__.py +0 -0
  62. {dbos-0.6.1 → dbos-0.7.0}/tests/atexit_no_ctor.py +0 -0
  63. {dbos-0.6.1 → dbos-0.7.0}/tests/atexit_no_launch.py +0 -0
  64. {dbos-0.6.1 → dbos-0.7.0}/tests/classdefs.py +0 -0
  65. {dbos-0.6.1 → dbos-0.7.0}/tests/more_classdefs.py +0 -0
  66. {dbos-0.6.1 → dbos-0.7.0}/tests/scheduler/test_croniter.py +0 -0
  67. {dbos-0.6.1 → dbos-0.7.0}/tests/test_classdecorators.py +0 -0
  68. {dbos-0.6.1 → dbos-0.7.0}/tests/test_concurrency.py +0 -0
  69. {dbos-0.6.1 → dbos-0.7.0}/tests/test_config.py +0 -0
  70. {dbos-0.6.1 → dbos-0.7.0}/tests/test_package.py +0 -0
  71. {dbos-0.6.1 → dbos-0.7.0}/tests/test_singleton.py +0 -0
  72. {dbos-0.6.1 → dbos-0.7.0}/version/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 0.6.1
3
+ Version: 0.7.0
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -8,7 +8,6 @@ Requires-Python: >=3.9
8
8
  Requires-Dist: pyyaml>=6.0.2
9
9
  Requires-Dist: jsonschema>=4.23.0
10
10
  Requires-Dist: alembic>=1.13.2
11
- Requires-Dist: psycopg2-binary>=2.9.9
12
11
  Requires-Dist: typing-extensions>=4.12.2; python_version < "3.10"
13
12
  Requires-Dist: typer>=0.12.3
14
13
  Requires-Dist: jsonpickle>=3.2.2
@@ -19,6 +18,7 @@ Requires-Dist: python-dateutil>=2.9.0.post0
19
18
  Requires-Dist: fastapi[standard]>=0.112.1
20
19
  Requires-Dist: psutil>=6.0.0
21
20
  Requires-Dist: tomlkit>=0.13.2
21
+ Requires-Dist: psycopg>=3.2.1
22
22
  Description-Content-Type: text/markdown
23
23
 
24
24
 
@@ -3,6 +3,7 @@ from .context import DBOSContextEnsure, SetWorkflowID
3
3
  from .dbos import DBOS, DBOSConfiguredInstance, WorkflowHandle, WorkflowStatus
4
4
  from .dbos_config import ConfigFile, get_dbos_database_url, load_config
5
5
  from .kafka_message import KafkaMessage
6
+ from .queue import Queue
6
7
  from .system_database import GetWorkflowsInput, WorkflowStatusString
7
8
 
8
9
  __all__ = [
@@ -19,4 +20,5 @@ __all__ = [
19
20
  "load_config",
20
21
  "get_dbos_database_url",
21
22
  "error",
23
+ "Queue",
22
24
  ]
@@ -2,7 +2,6 @@ from typing import Optional, TypedDict, cast
2
2
 
3
3
  import sqlalchemy as sa
4
4
  import sqlalchemy.dialects.postgresql as pg
5
- import sqlalchemy.exc as sa_exc
6
5
  from sqlalchemy.exc import DBAPIError
7
6
  from sqlalchemy.orm import Session, sessionmaker
8
7
 
@@ -36,7 +35,7 @@ class ApplicationDatabase:
36
35
 
37
36
  # If the application database does not already exist, create it
38
37
  postgres_db_url = sa.URL.create(
39
- "postgresql",
38
+ "postgresql+psycopg",
40
39
  username=config["database"]["username"],
41
40
  password=config["database"]["password"],
42
41
  host=config["database"]["hostname"],
@@ -55,7 +54,7 @@ class ApplicationDatabase:
55
54
 
56
55
  # Create a connection pool for the application database
57
56
  app_db_url = sa.URL.create(
58
- "postgresql",
57
+ "postgresql+psycopg",
59
58
  username=config["database"]["username"],
60
59
  password=config["database"]["password"],
61
60
  host=config["database"]["hostname"],
@@ -97,11 +96,9 @@ class ApplicationDatabase:
97
96
  )
98
97
  )
99
98
  except DBAPIError as dbapi_error:
100
- if dbapi_error.orig.pgcode == "23505": # type: ignore
99
+ if dbapi_error.orig.sqlstate == "23505": # type: ignore
101
100
  raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
102
- raise dbapi_error
103
- except Exception as e:
104
- raise e
101
+ raise
105
102
 
106
103
  def record_transaction_error(self, output: TransactionResultInternal) -> None:
107
104
  try:
@@ -122,11 +119,9 @@ class ApplicationDatabase:
122
119
  )
123
120
  )
124
121
  except DBAPIError as dbapi_error:
125
- if dbapi_error.orig.pgcode == "23505": # type: ignore
122
+ if dbapi_error.orig.sqlstate == "23505": # type: ignore
126
123
  raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
127
- raise dbapi_error
128
- except Exception as e:
129
- raise e
124
+ raise
130
125
 
131
126
  @staticmethod
132
127
  def check_transaction_execution(
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
  import json
4
4
  import os
5
5
  import uuid
6
+ from contextlib import AbstractContextManager
6
7
  from contextvars import ContextVar
7
8
  from enum import Enum
8
9
  from types import TracebackType
@@ -344,7 +345,7 @@ class SetWorkflowRecovery:
344
345
  return False # Did not handle
345
346
 
346
347
 
347
- class EnterDBOSWorkflow:
348
+ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
348
349
  def __init__(self, attributes: TracedAttributes) -> None:
349
350
  self.created_ctx = False
350
351
  self.attributes = attributes
@@ -377,7 +378,7 @@ class EnterDBOSWorkflow:
377
378
  return False # Did not handle
378
379
 
379
380
 
380
- class EnterDBOSChildWorkflow:
381
+ class EnterDBOSChildWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
381
382
  def __init__(self, attributes: TracedAttributes) -> None:
382
383
  self.parent_ctx: Optional[DBOSContext] = None
383
384
  self.child_ctx: Optional[DBOSContext] = None
@@ -63,6 +63,7 @@ from dbos.system_database import (
63
63
  OperationResultInternal,
64
64
  WorkflowInputs,
65
65
  WorkflowStatusInternal,
66
+ WorkflowStatusString,
66
67
  )
67
68
 
68
69
  if TYPE_CHECKING:
@@ -108,7 +109,7 @@ class _WorkflowHandlePolling(Generic[R]):
108
109
  return self.workflow_id
109
110
 
110
111
  def get_result(self) -> R:
111
- res: R = self.dbos.sys_db.await_workflow_result(self.workflow_id)
112
+ res: R = self.dbos._sys_db.await_workflow_result(self.workflow_id)
112
113
  return res
113
114
 
114
115
  def get_status(self) -> "WorkflowStatus":
@@ -126,6 +127,7 @@ def _init_workflow(
126
127
  class_name: Optional[str],
127
128
  config_name: Optional[str],
128
129
  temp_wf_type: Optional[str],
130
+ queue: Optional[str] = None,
129
131
  ) -> WorkflowStatusInternal:
130
132
  wfid = (
131
133
  ctx.workflow_id
@@ -134,7 +136,11 @@ def _init_workflow(
134
136
  )
135
137
  status: WorkflowStatusInternal = {
136
138
  "workflow_uuid": wfid,
137
- "status": "PENDING",
139
+ "status": (
140
+ WorkflowStatusString.PENDING.value
141
+ if queue is None
142
+ else WorkflowStatusString.ENQUEUED.value
143
+ ),
138
144
  "name": wf_name,
139
145
  "class_name": class_name,
140
146
  "config_name": config_name,
@@ -150,20 +156,25 @@ def _init_workflow(
150
156
  json.dumps(ctx.authenticated_roles) if ctx.authenticated_roles else None
151
157
  ),
152
158
  "assumed_role": ctx.assumed_role,
159
+ "queue_name": queue,
153
160
  }
154
161
 
155
162
  # If we have a class name, the first arg is the instance and do not serialize
156
163
  if class_name is not None:
157
164
  inputs = {"args": inputs["args"][1:], "kwargs": inputs["kwargs"]}
158
165
 
159
- if temp_wf_type != "transaction":
166
+ if temp_wf_type != "transaction" or queue is not None:
160
167
  # Synchronously record the status and inputs for workflows and single-step workflows
161
168
  # We also have to do this for single-step workflows because of the foreign key constraint on the operation outputs table
162
- dbos.sys_db.update_workflow_status(status, False, ctx.in_recovery)
163
- dbos.sys_db.update_workflow_inputs(wfid, utils.serialize(inputs))
169
+ # TODO: Make this transactional (and with the queue step below)
170
+ dbos._sys_db.update_workflow_status(status, False, ctx.in_recovery)
171
+ dbos._sys_db.update_workflow_inputs(wfid, utils.serialize(inputs))
164
172
  else:
165
173
  # Buffer the inputs for single-transaction workflows, but don't buffer the status
166
- dbos.sys_db.buffer_workflow_inputs(wfid, utils.serialize(inputs))
174
+ dbos._sys_db.buffer_workflow_inputs(wfid, utils.serialize(inputs))
175
+
176
+ if queue is not None:
177
+ dbos._sys_db.enqueue(wfid, queue)
167
178
 
168
179
  return status
169
180
 
@@ -179,7 +190,9 @@ def _execute_workflow(
179
190
  output = func(*args, **kwargs)
180
191
  status["status"] = "SUCCESS"
181
192
  status["output"] = utils.serialize(output)
182
- dbos.sys_db.buffer_workflow_status(status)
193
+ if status["queue_name"] is not None:
194
+ dbos._sys_db.remove_from_queue(status["workflow_uuid"])
195
+ dbos._sys_db.buffer_workflow_status(status)
183
196
  except DBOSWorkflowConflictIDError:
184
197
  # Retrieve the workflow handle and wait for the result.
185
198
  # Must use existing_workflow=False because workflow status might not be set yet for single transaction workflows.
@@ -191,8 +204,10 @@ def _execute_workflow(
191
204
  except Exception as error:
192
205
  status["status"] = "ERROR"
193
206
  status["error"] = utils.serialize(error)
194
- dbos.sys_db.update_workflow_status(status)
195
- raise error
207
+ if status["queue_name"] is not None:
208
+ dbos._sys_db.remove_from_queue(status["workflow_uuid"])
209
+ dbos._sys_db.update_workflow_status(status)
210
+ raise
196
211
 
197
212
  return output
198
213
 
@@ -217,14 +232,14 @@ def _execute_workflow_wthread(
217
232
  dbos.logger.error(
218
233
  f"Exception encountered in asynchronous workflow: {traceback.format_exc()}"
219
234
  )
220
- raise e
235
+ raise
221
236
 
222
237
 
223
238
  def _execute_workflow_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[Any]":
224
- status = dbos.sys_db.get_workflow_status(workflow_id)
239
+ status = dbos._sys_db.get_workflow_status(workflow_id)
225
240
  if not status:
226
241
  raise DBOSRecoveryError(workflow_id, "Workflow status not found")
227
- inputs = dbos.sys_db.get_workflow_inputs(workflow_id)
242
+ inputs = dbos._sys_db.get_workflow_inputs(workflow_id)
228
243
  if not inputs:
229
244
  raise DBOSRecoveryError(workflow_id, "Workflow inputs not found")
230
245
  wf_func = dbos._registry.workflow_info_map.get(status["name"], None)
@@ -249,6 +264,8 @@ def _execute_workflow_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[Any]
249
264
  return _start_workflow(
250
265
  dbos,
251
266
  wf_func,
267
+ status["queue_name"],
268
+ True,
252
269
  dbos._registry.instance_info_map[iname],
253
270
  *inputs["args"],
254
271
  **inputs["kwargs"],
@@ -264,6 +281,8 @@ def _execute_workflow_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[Any]
264
281
  return _start_workflow(
265
282
  dbos,
266
283
  wf_func,
284
+ status["queue_name"],
285
+ True,
267
286
  dbos._registry.class_info_map[class_name],
268
287
  *inputs["args"],
269
288
  **inputs["kwargs"],
@@ -271,7 +290,12 @@ def _execute_workflow_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[Any]
271
290
  else:
272
291
  with SetWorkflowID(workflow_id):
273
292
  return _start_workflow(
274
- dbos, wf_func, *inputs["args"], **inputs["kwargs"]
293
+ dbos,
294
+ wf_func,
295
+ status["queue_name"],
296
+ True,
297
+ *inputs["args"],
298
+ **inputs["kwargs"],
275
299
  )
276
300
 
277
301
 
@@ -298,34 +322,22 @@ def _workflow_wrapper(dbosreg: "_DBOSRegistry", func: F) -> F:
298
322
  "kwargs": kwargs,
299
323
  }
300
324
  ctx = get_local_dbos_context()
301
- if ctx and ctx.is_workflow():
302
- with EnterDBOSChildWorkflow(attributes), DBOSAssumeRole(rr):
303
- ctx = assert_current_dbos_context() # Now the child ctx
304
- status = _init_workflow(
305
- dbos,
306
- ctx,
307
- inputs=inputs,
308
- wf_name=get_dbos_func_name(func),
309
- class_name=get_dbos_class_name(fi, func, args),
310
- config_name=get_config_name(fi, func, args),
311
- temp_wf_type=get_temp_workflow_type(func),
312
- )
313
-
314
- return _execute_workflow(dbos, status, func, *args, **kwargs)
315
- else:
316
- with EnterDBOSWorkflow(attributes), DBOSAssumeRole(rr):
317
- ctx = assert_current_dbos_context()
318
- status = _init_workflow(
319
- dbos,
320
- ctx,
321
- inputs=inputs,
322
- wf_name=get_dbos_func_name(func),
323
- class_name=get_dbos_class_name(fi, func, args),
324
- config_name=get_config_name(fi, func, args),
325
- temp_wf_type=get_temp_workflow_type(func),
326
- )
325
+ enterWorkflowCtxMgr = (
326
+ EnterDBOSChildWorkflow if ctx and ctx.is_workflow() else EnterDBOSWorkflow
327
+ )
328
+ with enterWorkflowCtxMgr(attributes), DBOSAssumeRole(rr):
329
+ ctx = assert_current_dbos_context() # Now the child ctx
330
+ status = _init_workflow(
331
+ dbos,
332
+ ctx,
333
+ inputs=inputs,
334
+ wf_name=get_dbos_func_name(func),
335
+ class_name=get_dbos_class_name(fi, func, args),
336
+ config_name=get_config_name(fi, func, args),
337
+ temp_wf_type=get_temp_workflow_type(func),
338
+ )
327
339
 
328
- return _execute_workflow(dbos, status, func, *args, **kwargs)
340
+ return _execute_workflow(dbos, status, func, *args, **kwargs)
329
341
 
330
342
  wrapped_func = cast(F, wrapper)
331
343
  return wrapped_func
@@ -343,6 +355,8 @@ def _workflow(reg: "_DBOSRegistry") -> Callable[[F], F]:
343
355
  def _start_workflow(
344
356
  dbos: "DBOS",
345
357
  func: "Workflow[P, R]",
358
+ queue_name: Optional[str],
359
+ execute_workflow: bool,
346
360
  *args: P.args,
347
361
  **kwargs: P.kwargs,
348
362
  ) -> "WorkflowHandle[R]":
@@ -396,10 +410,14 @@ def _start_workflow(
396
410
  class_name=get_dbos_class_name(fi, func, gin_args),
397
411
  config_name=get_config_name(fi, func, gin_args),
398
412
  temp_wf_type=get_temp_workflow_type(func),
413
+ queue=queue_name,
399
414
  )
400
415
 
416
+ if not execute_workflow:
417
+ return _WorkflowHandlePolling(new_wf_id, dbos)
418
+
401
419
  if fself is not None:
402
- future = dbos.executor.submit(
420
+ future = dbos._executor.submit(
403
421
  cast(Callable[..., R], _execute_workflow_wthread),
404
422
  dbos,
405
423
  status,
@@ -410,7 +428,7 @@ def _start_workflow(
410
428
  **kwargs,
411
429
  )
412
430
  else:
413
- future = dbos.executor.submit(
431
+ future = dbos._executor.submit(
414
432
  cast(Callable[..., R], _execute_workflow_wthread),
415
433
  dbos,
416
434
  status,
@@ -432,7 +450,7 @@ def _transaction(
432
450
  f"Function {func.__name__} invoked before DBOS initialized"
433
451
  )
434
452
  dbos = dbosreg.dbos
435
- with dbos.app_db.sessionmaker() as session:
453
+ with dbos._app_db.sessionmaker() as session:
436
454
  attributes: TracedAttributes = {
437
455
  "name": func.__name__,
438
456
  "operationType": OperationType.TRANSACTION.value,
@@ -493,7 +511,7 @@ def _transaction(
493
511
  )
494
512
  break
495
513
  except DBAPIError as dbapi_error:
496
- if dbapi_error.orig.pgcode == "40001": # type: ignore
514
+ if dbapi_error.orig.sqlstate == "40001": # type: ignore
497
515
  # Retry on serialization failure
498
516
  ctx.get_current_span().add_event(
499
517
  "Transaction Serialization Failure",
@@ -505,13 +523,13 @@ def _transaction(
505
523
  max_retry_wait_seconds,
506
524
  )
507
525
  continue
508
- raise dbapi_error
526
+ raise
509
527
  except Exception as error:
510
528
  # Don't record the error if it was already recorded
511
529
  if not has_recorded_error:
512
530
  txn_output["error"] = utils.serialize(error)
513
- dbos.app_db.record_transaction_error(txn_output)
514
- raise error
531
+ dbos._app_db.record_transaction_error(txn_output)
532
+ raise
515
533
  return output
516
534
 
517
535
  fi = get_or_create_func_info(func)
@@ -541,6 +559,7 @@ def _transaction(
541
559
  set_dbos_func_name(temp_wf, "<temp>." + func.__qualname__)
542
560
  set_temp_workflow_type(temp_wf, "transaction")
543
561
  dbosreg.register_wf_function(get_dbos_func_name(temp_wf), wrapped_wf)
562
+ wrapper.__orig_func = temp_wf # type: ignore
544
563
 
545
564
  return cast(F, wrapper)
546
565
 
@@ -575,7 +594,7 @@ def _step(
575
594
  "output": None,
576
595
  "error": None,
577
596
  }
578
- recorded_output = dbos.sys_db.check_operation_execution(
597
+ recorded_output = dbos._sys_db.check_operation_execution(
579
598
  ctx.workflow_id, ctx.function_id
580
599
  )
581
600
  if recorded_output:
@@ -622,7 +641,7 @@ def _step(
622
641
  step_output["error"] = (
623
642
  utils.serialize(error) if error is not None else None
624
643
  )
625
- dbos.sys_db.record_operation_result(step_output)
644
+ dbos._sys_db.record_operation_result(step_output)
626
645
 
627
646
  if error is not None:
628
647
  raise error
@@ -657,6 +676,7 @@ def _step(
657
676
  set_dbos_func_name(temp_wf, "<temp>." + func.__qualname__)
658
677
  set_temp_workflow_type(temp_wf, "step")
659
678
  dbosreg.register_wf_function(get_dbos_func_name(temp_wf), wrapped_wf)
679
+ wrapper.__orig_func = temp_wf # type: ignore
660
680
 
661
681
  return cast(F, wrapper)
662
682
 
@@ -671,7 +691,7 @@ def _send(
671
691
  "name": "send",
672
692
  }
673
693
  with EnterDBOSStep(attributes) as ctx:
674
- dbos.sys_db.send(
694
+ dbos._sys_db.send(
675
695
  ctx.workflow_id,
676
696
  ctx.curr_step_function_id,
677
697
  destination_id,
@@ -702,7 +722,7 @@ def _recv(
702
722
  with EnterDBOSStep(attributes) as ctx:
703
723
  ctx.function_id += 1 # Reserve for the sleep
704
724
  timeout_function_id = ctx.function_id
705
- return dbos.sys_db.recv(
725
+ return dbos._sys_db.recv(
706
726
  ctx.workflow_id,
707
727
  ctx.curr_step_function_id,
708
728
  timeout_function_id,
@@ -725,7 +745,7 @@ def _set_event(dbos: "DBOS", key: str, value: Any) -> None:
725
745
  "name": "set_event",
726
746
  }
727
747
  with EnterDBOSStep(attributes) as ctx:
728
- dbos.sys_db.set_event(
748
+ dbos._sys_db.set_event(
729
749
  ctx.workflow_id, ctx.curr_step_function_id, key, value
730
750
  )
731
751
  else:
@@ -753,7 +773,7 @@ def _get_event(
753
773
  "function_id": ctx.curr_step_function_id,
754
774
  "timeout_function_id": timeout_function_id,
755
775
  }
756
- return dbos.sys_db.get_event(workflow_id, key, timeout_seconds, caller_ctx)
776
+ return dbos._sys_db.get_event(workflow_id, key, timeout_seconds, caller_ctx)
757
777
  else:
758
778
  # Directly call it outside of a workflow
759
- return dbos.sys_db.get_event(workflow_id, key, timeout_seconds)
779
+ return dbos._sys_db.get_event(workflow_id, key, timeout_seconds)