dbos 0.11.0a4__py3-none-any.whl → 0.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (52) hide show
  1. dbos/__init__.py +7 -7
  2. dbos/{admin_sever.py → _admin_sever.py} +20 -11
  3. dbos/{application_database.py → _app_db.py} +4 -5
  4. dbos/{decorators.py → _classproperty.py} +3 -3
  5. dbos/{context.py → _context.py} +26 -26
  6. dbos/{core.py → _core.py} +121 -107
  7. dbos/{dbos.py → _dbos.py} +57 -59
  8. dbos/{dbos_config.py → _dbos_config.py} +9 -9
  9. dbos/{fastapi.py → _fastapi.py} +10 -11
  10. dbos/{flask.py → _flask.py} +6 -7
  11. dbos/{kafka.py → _kafka.py} +18 -18
  12. dbos/{logger.py → _logger.py} +13 -13
  13. dbos/{queue.py → _queue.py} +7 -7
  14. dbos/{recovery.py → _recovery.py} +8 -8
  15. dbos/{roles.py → _roles.py} +5 -5
  16. dbos/{scheduler/scheduler.py → _scheduler.py} +17 -7
  17. dbos/{utils.py → _serialization.py} +4 -4
  18. dbos/{system_database.py → _sys_db.py} +42 -37
  19. dbos/{tracer.py → _tracer.py} +2 -2
  20. dbos/cli.py +21 -21
  21. {dbos-0.11.0a4.dist-info → dbos-0.12.0.dist-info}/METADATA +1 -1
  22. dbos-0.12.0.dist-info/RECORD +54 -0
  23. {dbos-0.11.0a4.dist-info → dbos-0.12.0.dist-info}/WHEEL +1 -1
  24. dbos-0.11.0a4.dist-info/RECORD +0 -54
  25. /dbos/{scheduler/croniter.py → _croniter.py} +0 -0
  26. /dbos/{error.py → _error.py} +0 -0
  27. /dbos/{kafka_message.py → _kafka_message.py} +0 -0
  28. /dbos/{migrations → _migrations}/env.py +0 -0
  29. /dbos/{migrations → _migrations}/script.py.mako +0 -0
  30. /dbos/{migrations → _migrations}/versions/50f3227f0b4b_fix_job_queue.py +0 -0
  31. /dbos/{migrations → _migrations}/versions/5c361fc04708_added_system_tables.py +0 -0
  32. /dbos/{migrations → _migrations}/versions/a3b18ad34abe_added_triggers.py +0 -0
  33. /dbos/{migrations → _migrations}/versions/d76646551a6b_job_queue_limiter.py +0 -0
  34. /dbos/{migrations → _migrations}/versions/d76646551a6c_workflow_queue.py +0 -0
  35. /dbos/{migrations → _migrations}/versions/eab0cc1d9a14_job_queue.py +0 -0
  36. /dbos/{registrations.py → _registrations.py} +0 -0
  37. /dbos/{request.py → _request.py} +0 -0
  38. /dbos/{schemas → _schemas}/__init__.py +0 -0
  39. /dbos/{schemas → _schemas}/application_database.py +0 -0
  40. /dbos/{schemas → _schemas}/system_database.py +0 -0
  41. /dbos/{templates → _templates}/hello/README.md +0 -0
  42. /dbos/{templates → _templates}/hello/__package/__init__.py +0 -0
  43. /dbos/{templates → _templates}/hello/__package/main.py +0 -0
  44. /dbos/{templates → _templates}/hello/__package/schema.py +0 -0
  45. /dbos/{templates → _templates}/hello/alembic.ini +0 -0
  46. /dbos/{templates → _templates}/hello/dbos-config.yaml.dbos +0 -0
  47. /dbos/{templates → _templates}/hello/migrations/env.py.dbos +0 -0
  48. /dbos/{templates → _templates}/hello/migrations/script.py.mako +0 -0
  49. /dbos/{templates → _templates}/hello/migrations/versions/2024_07_31_180642_init.py +0 -0
  50. /dbos/{templates → _templates}/hello/start_postgres_docker.py +0 -0
  51. {dbos-0.11.0a4.dist-info → dbos-0.12.0.dist-info}/entry_points.txt +0 -0
  52. {dbos-0.11.0a4.dist-info → dbos-0.12.0.dist-info}/licenses/LICENSE +0 -0
@@ -25,63 +25,61 @@ from typing import (
25
25
 
26
26
  from opentelemetry.trace import Span
27
27
 
28
- from dbos.core import (
28
+ from ._classproperty import classproperty
29
+ from ._core import (
29
30
  TEMP_SEND_WF_NAME,
30
- _execute_workflow_id,
31
- _get_event,
32
- _recv,
33
- _send,
34
- _set_event,
35
- _start_workflow,
36
- _step,
37
- _transaction,
38
- _workflow,
39
- _workflow_wrapper,
40
- _WorkflowHandlePolling,
31
+ WorkflowHandlePolling,
32
+ decorate_step,
33
+ decorate_transaction,
34
+ decorate_workflow,
35
+ execute_workflow_by_id,
36
+ get_event,
37
+ recv,
38
+ send,
39
+ set_event,
40
+ start_workflow,
41
+ workflow_wrapper,
41
42
  )
42
- from dbos.decorators import classproperty
43
- from dbos.queue import Queue, queue_thread
44
- from dbos.recovery import _recover_pending_workflows, _startup_recovery_thread
45
- from dbos.registrations import (
43
+ from ._queue import Queue, _queue_thread
44
+ from ._recovery import recover_pending_workflows, startup_recovery_thread
45
+ from ._registrations import (
46
46
  DEFAULT_MAX_RECOVERY_ATTEMPTS,
47
47
  DBOSClassInfo,
48
48
  get_or_create_class_info,
49
49
  set_dbos_func_name,
50
50
  set_temp_workflow_type,
51
51
  )
52
- from dbos.roles import default_required_roles, required_roles
53
- from dbos.scheduler.scheduler import ScheduledWorkflow, scheduled
54
-
55
- from .tracer import dbos_tracer
52
+ from ._roles import default_required_roles, required_roles
53
+ from ._scheduler import ScheduledWorkflow, scheduled
54
+ from ._tracer import dbos_tracer
56
55
 
57
56
  if TYPE_CHECKING:
58
57
  from fastapi import FastAPI
59
- from dbos.kafka import KafkaConsumerWorkflow
60
- from .request import Request
58
+ from ._kafka import _KafkaConsumerWorkflow
59
+ from ._request import Request
61
60
  from flask import Flask
62
61
 
63
62
  from sqlalchemy.orm import Session
64
63
 
65
- from dbos.request import Request
64
+ from ._request import Request
66
65
 
67
66
  if sys.version_info < (3, 10):
68
67
  from typing_extensions import ParamSpec, TypeAlias
69
68
  else:
70
69
  from typing import ParamSpec, TypeAlias
71
70
 
72
- from dbos.admin_sever import AdminServer
73
- from dbos.context import (
71
+ from ._admin_sever import AdminServer
72
+ from ._app_db import ApplicationDatabase
73
+ from ._context import (
74
74
  EnterDBOSStep,
75
75
  TracedAttributes,
76
76
  assert_current_dbos_context,
77
77
  get_local_dbos_context,
78
78
  )
79
- from dbos.error import DBOSException, DBOSNonExistentWorkflowError
80
-
81
- from .application_database import ApplicationDatabase
82
- from .dbos_config import ConfigFile, load_config, set_env_vars
83
- from .logger import add_otlp_to_all_loggers, config_logger, dbos_logger, init_logger
84
- from .system_database import SystemDatabase
79
+ from ._dbos_config import ConfigFile, _set_env_vars, load_config
80
+ from ._error import DBOSException, DBOSNonExistentWorkflowError
81
+ from ._logger import add_otlp_to_all_loggers, config_logger, dbos_logger, init_logger
82
+ from ._sys_db import SystemDatabase
85
83
 
86
84
  # Most DBOS functions are just any callable F, so decorators / wrappers work on F
87
85
  # There are cases where the parameters P and return value R should be separate
@@ -112,7 +110,7 @@ IsolationLevel = Literal[
112
110
  ]
113
111
 
114
112
  _dbos_global_instance: Optional[DBOS] = None
115
- _dbos_global_registry: Optional[_DBOSRegistry] = None
113
+ _dbos_global_registry: Optional[DBOSRegistry] = None
116
114
 
117
115
 
118
116
  def _get_dbos_instance() -> DBOS:
@@ -122,26 +120,26 @@ def _get_dbos_instance() -> DBOS:
122
120
  raise DBOSException("No DBOS was created yet")
123
121
 
124
122
 
125
- def _get_or_create_dbos_registry() -> _DBOSRegistry:
123
+ def _get_or_create_dbos_registry() -> DBOSRegistry:
126
124
  # Currently get / init the global registry
127
125
  global _dbos_global_registry
128
126
  if _dbos_global_registry is None:
129
- _dbos_global_registry = _DBOSRegistry()
127
+ _dbos_global_registry = DBOSRegistry()
130
128
  return _dbos_global_registry
131
129
 
132
130
 
133
- _RegisteredJob = Tuple[
131
+ RegisteredJob = Tuple[
134
132
  threading.Event, Callable[..., Any], Tuple[Any, ...], dict[str, Any]
135
133
  ]
136
134
 
137
135
 
138
- class _DBOSRegistry:
136
+ class DBOSRegistry:
139
137
  def __init__(self) -> None:
140
138
  self.workflow_info_map: dict[str, Workflow[..., Any]] = {}
141
139
  self.class_info_map: dict[str, type] = {}
142
140
  self.instance_info_map: dict[str, object] = {}
143
141
  self.queue_info_map: dict[str, Queue] = {}
144
- self.pollers: list[_RegisteredJob] = []
142
+ self.pollers: list[RegisteredJob] = []
145
143
  self.dbos: Optional[DBOS] = None
146
144
  self.config: Optional[ConfigFile] = None
147
145
 
@@ -263,14 +261,14 @@ class DBOS:
263
261
  if config is None:
264
262
  config = load_config()
265
263
  config_logger(config)
266
- set_env_vars(config)
264
+ _set_env_vars(config)
267
265
  dbos_tracer.config(config)
268
266
  dbos_logger.info("Initializing DBOS")
269
267
  self.config: ConfigFile = config
270
268
  self._launched: bool = False
271
269
  self._sys_db_field: Optional[SystemDatabase] = None
272
270
  self._app_db_field: Optional[ApplicationDatabase] = None
273
- self._registry: _DBOSRegistry = _get_or_create_dbos_registry()
271
+ self._registry: DBOSRegistry = _get_or_create_dbos_registry()
274
272
  self._registry.dbos = self
275
273
  self._admin_server_field: Optional[AdminServer] = None
276
274
  self.stop_events: List[threading.Event] = []
@@ -281,13 +279,13 @@ class DBOS:
281
279
 
282
280
  # If using FastAPI, set up middleware and lifecycle events
283
281
  if self.fastapi is not None:
284
- from dbos.fastapi import setup_fastapi_middleware
282
+ from ._fastapi import setup_fastapi_middleware
285
283
 
286
284
  setup_fastapi_middleware(self.fastapi, _get_dbos_instance())
287
285
 
288
286
  # If using Flask, set up middleware
289
287
  if self.flask is not None:
290
- from dbos.flask import setup_flask_middleware
288
+ from ._flask import setup_flask_middleware
291
289
 
292
290
  setup_flask_middleware(self.flask)
293
291
 
@@ -297,7 +295,7 @@ class DBOS:
297
295
  ) -> None:
298
296
  self.send(destination_id, message, topic)
299
297
 
300
- temp_send_wf = _workflow_wrapper(self._registry, send_temp_workflow)
298
+ temp_send_wf = workflow_wrapper(self._registry, send_temp_workflow)
301
299
  set_dbos_func_name(send_temp_workflow, TEMP_SEND_WF_NAME)
302
300
  set_temp_workflow_type(send_temp_workflow, "send")
303
301
  self._registry.register_wf_function(TEMP_SEND_WF_NAME, temp_send_wf)
@@ -356,7 +354,7 @@ class DBOS:
356
354
 
357
355
  if not os.environ.get("DBOS__VMID"):
358
356
  workflow_ids = self._sys_db.get_pending_workflows("local")
359
- self._executor.submit(_startup_recovery_thread, self, workflow_ids)
357
+ self._executor.submit(startup_recovery_thread, self, workflow_ids)
360
358
 
361
359
  # Listen to notifications
362
360
  notification_listener_thread = threading.Thread(
@@ -378,7 +376,7 @@ class DBOS:
378
376
  evt = threading.Event()
379
377
  self.stop_events.append(evt)
380
378
  bg_queue_thread = threading.Thread(
381
- target=queue_thread, args=(evt, self), daemon=True
379
+ target=_queue_thread, args=(evt, self), daemon=True
382
380
  )
383
381
  bg_queue_thread.start()
384
382
  self._background_threads.append(bg_queue_thread)
@@ -435,7 +433,7 @@ class DBOS:
435
433
  cls, *, max_recovery_attempts: int = DEFAULT_MAX_RECOVERY_ATTEMPTS
436
434
  ) -> Callable[[F], F]:
437
435
  """Decorate a function for use as a DBOS workflow."""
438
- return _workflow(_get_or_create_dbos_registry(), max_recovery_attempts)
436
+ return decorate_workflow(_get_or_create_dbos_registry(), max_recovery_attempts)
439
437
 
440
438
  @classmethod
441
439
  def transaction(
@@ -448,7 +446,7 @@ class DBOS:
448
446
  isolation_level(IsolationLevel): Transaction isolation level
449
447
 
450
448
  """
451
- return _transaction(_get_or_create_dbos_registry(), isolation_level)
449
+ return decorate_transaction(_get_or_create_dbos_registry(), isolation_level)
452
450
 
453
451
  @classmethod
454
452
  def step(
@@ -470,7 +468,7 @@ class DBOS:
470
468
 
471
469
  """
472
470
 
473
- return _step(
471
+ return decorate_step(
474
472
  _get_or_create_dbos_registry(),
475
473
  retries_allowed=retries_allowed,
476
474
  interval_seconds=interval_seconds,
@@ -530,10 +528,10 @@ class DBOS:
530
528
  config: dict[str, Any],
531
529
  topics: list[str],
532
530
  in_order: bool = False,
533
- ) -> Callable[[KafkaConsumerWorkflow], KafkaConsumerWorkflow]:
531
+ ) -> Callable[[_KafkaConsumerWorkflow], _KafkaConsumerWorkflow]:
534
532
  """Decorate a function to be used as a Kafka consumer."""
535
533
  try:
536
- from dbos.kafka import kafka_consumer
534
+ from ._kafka import kafka_consumer
537
535
 
538
536
  return kafka_consumer(
539
537
  _get_or_create_dbos_registry(), config, topics, in_order
@@ -551,7 +549,7 @@ class DBOS:
551
549
  **kwargs: P.kwargs,
552
550
  ) -> WorkflowHandle[R]:
553
551
  """Invoke a workflow function in the background, returning a handle to the ongoing execution."""
554
- return _start_workflow(_get_dbos_instance(), func, None, True, *args, **kwargs)
552
+ return start_workflow(_get_dbos_instance(), func, None, True, *args, **kwargs)
555
553
 
556
554
  @classmethod
557
555
  def get_workflow_status(cls, workflow_id: str) -> Optional[WorkflowStatus]:
@@ -594,14 +592,14 @@ class DBOS:
594
592
  stat = dbos.get_workflow_status(workflow_id)
595
593
  if stat is None:
596
594
  raise DBOSNonExistentWorkflowError(workflow_id)
597
- return _WorkflowHandlePolling(workflow_id, dbos)
595
+ return WorkflowHandlePolling(workflow_id, dbos)
598
596
 
599
597
  @classmethod
600
598
  def send(
601
599
  cls, destination_id: str, message: Any, topic: Optional[str] = None
602
600
  ) -> None:
603
601
  """Send a message to a workflow execution."""
604
- return _send(_get_dbos_instance(), destination_id, message, topic)
602
+ return send(_get_dbos_instance(), destination_id, message, topic)
605
603
 
606
604
  @classmethod
607
605
  def recv(cls, topic: Optional[str] = None, timeout_seconds: float = 60) -> Any:
@@ -611,7 +609,7 @@ class DBOS:
611
609
  This function is to be called from within a workflow.
612
610
  `recv` will return the message sent on `topic`, waiting if necessary.
613
611
  """
614
- return _recv(_get_dbos_instance(), topic, timeout_seconds)
612
+ return recv(_get_dbos_instance(), topic, timeout_seconds)
615
613
 
616
614
  @classmethod
617
615
  def sleep(cls, seconds: float) -> None:
@@ -649,7 +647,7 @@ class DBOS:
649
647
  value(Any): A serializable value to associate with the key
650
648
 
651
649
  """
652
- return _set_event(_get_dbos_instance(), key, value)
650
+ return set_event(_get_dbos_instance(), key, value)
653
651
 
654
652
  @classmethod
655
653
  def get_event(cls, workflow_id: str, key: str, timeout_seconds: float = 60) -> Any:
@@ -664,19 +662,19 @@ class DBOS:
664
662
  timeout_seconds(float): The amount of time to wait, in case `set_event` has not yet been called byt the workflow
665
663
 
666
664
  """
667
- return _get_event(_get_dbos_instance(), workflow_id, key, timeout_seconds)
665
+ return get_event(_get_dbos_instance(), workflow_id, key, timeout_seconds)
668
666
 
669
667
  @classmethod
670
668
  def execute_workflow_id(cls, workflow_id: str) -> WorkflowHandle[Any]:
671
669
  """Execute a workflow by ID (for recovery)."""
672
- return _execute_workflow_id(_get_dbos_instance(), workflow_id)
670
+ return execute_workflow_by_id(_get_dbos_instance(), workflow_id)
673
671
 
674
672
  @classmethod
675
673
  def recover_pending_workflows(
676
674
  cls, executor_ids: List[str] = ["local"]
677
675
  ) -> List[WorkflowHandle[Any]]:
678
676
  """Find all PENDING workflows and execute them."""
679
- return _recover_pending_workflows(_get_dbos_instance(), executor_ids)
677
+ return recover_pending_workflows(_get_dbos_instance(), executor_ids)
680
678
 
681
679
  @classproperty
682
680
  def logger(cls) -> Logger:
@@ -853,7 +851,7 @@ class DBOSConfiguredInstance:
853
851
 
854
852
  # Apps that import DBOS probably don't exit. If they do, let's see if
855
853
  # it looks like startup was abandoned or a call was forgotten...
856
- def dbos_exit_hook() -> None:
854
+ def _dbos_exit_hook() -> None:
857
855
  if _dbos_global_registry is None:
858
856
  # Probably used as or for a support module
859
857
  return
@@ -872,4 +870,4 @@ def dbos_exit_hook() -> None:
872
870
 
873
871
 
874
872
  # Register the exit hook
875
- atexit.register(dbos_exit_hook)
873
+ atexit.register(_dbos_exit_hook)
@@ -8,8 +8,8 @@ import yaml
8
8
  from jsonschema import ValidationError, validate
9
9
  from sqlalchemy import URL
10
10
 
11
- from dbos.error import DBOSInitializationError
12
- from dbos.logger import dbos_logger
11
+ from ._error import DBOSInitializationError
12
+ from ._logger import dbos_logger
13
13
 
14
14
 
15
15
  class RuntimeConfig(TypedDict, total=False):
@@ -74,7 +74,7 @@ class ConfigFile(TypedDict, total=False):
74
74
  application: Dict[str, Any]
75
75
 
76
76
 
77
- def substitute_env_vars(content: str) -> str:
77
+ def _substitute_env_vars(content: str) -> str:
78
78
  regex = r"\$\{([^}]+)\}" # Regex to match ${VAR_NAME} style placeholders
79
79
 
80
80
  def replace_func(match: re.Match[str]) -> str:
@@ -133,7 +133,7 @@ def load_config(config_file_path: str = "dbos-config.yaml") -> ConfigFile:
133
133
 
134
134
  with open(config_file_path, "r") as file:
135
135
  content = file.read()
136
- substituted_content = substitute_env_vars(content)
136
+ substituted_content = _substitute_env_vars(content)
137
137
  data = yaml.safe_load(substituted_content)
138
138
 
139
139
  # Load the JSON schema relative to the package root
@@ -167,13 +167,13 @@ def load_config(config_file_path: str = "dbos-config.yaml") -> ConfigFile:
167
167
 
168
168
  data = cast(ConfigFile, data)
169
169
 
170
- if not is_valid_app_name(data["name"]):
170
+ if not _is_valid_app_name(data["name"]):
171
171
  raise DBOSInitializationError(
172
172
  f'Invalid app name {data["name"]}. App names must be between 3 and 30 characters and contain only alphanumeric characters, dashes, and underscores.'
173
173
  )
174
174
 
175
175
  if "app_db_name" not in data["database"]:
176
- data["database"]["app_db_name"] = app_name_to_db_name(data["name"])
176
+ data["database"]["app_db_name"] = _app_name_to_db_name(data["name"])
177
177
 
178
178
  if "local_suffix" in data["database"] and data["database"]["local_suffix"]:
179
179
  data["database"]["app_db_name"] = f"{data['database']['app_db_name']}_local"
@@ -182,7 +182,7 @@ def load_config(config_file_path: str = "dbos-config.yaml") -> ConfigFile:
182
182
  return data # type: ignore
183
183
 
184
184
 
185
- def is_valid_app_name(name: str) -> bool:
185
+ def _is_valid_app_name(name: str) -> bool:
186
186
  name_len = len(name)
187
187
  if name_len < 3 or name_len > 30:
188
188
  return False
@@ -190,12 +190,12 @@ def is_valid_app_name(name: str) -> bool:
190
190
  return True if match != None else False
191
191
 
192
192
 
193
- def app_name_to_db_name(app_name: str) -> str:
193
+ def _app_name_to_db_name(app_name: str) -> str:
194
194
  name = app_name.replace("-", "_")
195
195
  return name if not name[0].isdigit() else f"_{name}"
196
196
 
197
197
 
198
- def set_env_vars(config: ConfigFile) -> None:
198
+ def _set_env_vars(config: ConfigFile) -> None:
199
199
  for env, value in config.get("env", {}).items():
200
200
  if value is not None:
201
201
  os.environ[env] = value
@@ -6,20 +6,19 @@ from fastapi import Request as FastAPIRequest
6
6
  from fastapi.responses import JSONResponse
7
7
  from starlette.types import ASGIApp, Message, Receive, Scope, Send
8
8
 
9
- from dbos import DBOS
10
- from dbos.error import DBOSException
11
-
12
- from .context import (
9
+ from . import DBOS
10
+ from ._context import (
13
11
  EnterDBOSHandler,
14
12
  OperationType,
15
13
  SetWorkflowID,
16
14
  TracedAttributes,
17
15
  assert_current_dbos_context,
18
16
  )
19
- from .request import Address, Request, request_id_header
17
+ from ._error import DBOSException
18
+ from ._request import Address, Request, request_id_header
20
19
 
21
20
 
22
- def get_or_generate_request_id(request: FastAPIRequest) -> str:
21
+ def _get_or_generate_request_id(request: FastAPIRequest) -> str:
23
22
  request_id = request.headers.get(request_id_header, None)
24
23
  if request_id is not None:
25
24
  return request_id
@@ -27,7 +26,7 @@ def get_or_generate_request_id(request: FastAPIRequest) -> str:
27
26
  return str(uuid.uuid4())
28
27
 
29
28
 
30
- def make_request(request: FastAPIRequest) -> Request:
29
+ def _make_request(request: FastAPIRequest) -> Request:
31
30
  return Request(
32
31
  headers=request.headers,
33
32
  path_params=request.path_params,
@@ -40,7 +39,7 @@ def make_request(request: FastAPIRequest) -> Request:
40
39
  )
41
40
 
42
41
 
43
- async def dbos_error_handler(request: FastAPIRequest, gexc: Exception) -> JSONResponse:
42
+ async def _dbos_error_handler(request: FastAPIRequest, gexc: Exception) -> JSONResponse:
44
43
  exc: DBOSException = cast(DBOSException, gexc)
45
44
  status_code = 500
46
45
  if exc.status_code is not None:
@@ -78,7 +77,7 @@ class LifespanMiddleware:
78
77
  def setup_fastapi_middleware(app: FastAPI, dbos: DBOS) -> None:
79
78
 
80
79
  app.add_middleware(LifespanMiddleware, dbos=dbos)
81
- app.add_exception_handler(DBOSException, dbos_error_handler)
80
+ app.add_exception_handler(DBOSException, _dbos_error_handler)
82
81
 
83
82
  @app.middleware("http")
84
83
  async def dbos_fastapi_middleware(
@@ -86,7 +85,7 @@ def setup_fastapi_middleware(app: FastAPI, dbos: DBOS) -> None:
86
85
  ) -> Any:
87
86
  attributes: TracedAttributes = {
88
87
  "name": str(request.url.path),
89
- "requestID": get_or_generate_request_id(request),
88
+ "requestID": _get_or_generate_request_id(request),
90
89
  "requestIP": request.client.host if request.client is not None else None,
91
90
  "requestURL": str(request.url),
92
91
  "requestMethod": request.method,
@@ -94,7 +93,7 @@ def setup_fastapi_middleware(app: FastAPI, dbos: DBOS) -> None:
94
93
  }
95
94
  with EnterDBOSHandler(attributes):
96
95
  ctx = assert_current_dbos_context()
97
- ctx.request = make_request(request)
96
+ ctx.request = _make_request(request)
98
97
  workflow_id = request.headers.get("dbos-idempotency-key", "")
99
98
  with SetWorkflowID(workflow_id):
100
99
  response = await call_next(request)
@@ -5,15 +5,14 @@ from urllib.parse import urlparse
5
5
  from flask import Flask, request
6
6
  from werkzeug.wrappers import Request as WRequest
7
7
 
8
- from dbos.context import (
8
+ from ._context import (
9
9
  EnterDBOSHandler,
10
10
  OperationType,
11
11
  SetWorkflowID,
12
12
  TracedAttributes,
13
13
  assert_current_dbos_context,
14
14
  )
15
-
16
- from .request import Address, Request, request_id_header
15
+ from ._request import Address, Request, request_id_header
17
16
 
18
17
 
19
18
  class FlaskMiddleware:
@@ -24,7 +23,7 @@ class FlaskMiddleware:
24
23
  request = WRequest(environ)
25
24
  attributes: TracedAttributes = {
26
25
  "name": urlparse(request.url).path,
27
- "requestID": get_or_generate_request_id(request),
26
+ "requestID": _get_or_generate_request_id(request),
28
27
  "requestIP": (
29
28
  request.remote_addr if request.remote_addr is not None else None
30
29
  ),
@@ -34,14 +33,14 @@ class FlaskMiddleware:
34
33
  }
35
34
  with EnterDBOSHandler(attributes):
36
35
  ctx = assert_current_dbos_context()
37
- ctx.request = make_request(request)
36
+ ctx.request = _make_request(request)
38
37
  workflow_id = request.headers.get("dbos-idempotency-key", "")
39
38
  with SetWorkflowID(workflow_id):
40
39
  response = self.app(environ, start_response)
41
40
  return response
42
41
 
43
42
 
44
- def get_or_generate_request_id(request: WRequest) -> str:
43
+ def _get_or_generate_request_id(request: WRequest) -> str:
45
44
  request_id = request.headers.get(request_id_header, None)
46
45
  if request_id is not None:
47
46
  return request_id
@@ -49,7 +48,7 @@ def get_or_generate_request_id(request: WRequest) -> str:
49
48
  return str(uuid.uuid4())
50
49
 
51
50
 
52
- def make_request(request: WRequest) -> Request:
51
+ def _make_request(request: WRequest) -> Request:
53
52
  parsed_url = urlparse(request.url)
54
53
  base_url = f"{parsed_url.scheme}://{parsed_url.netloc}"
55
54
 
@@ -3,24 +3,24 @@ from typing import TYPE_CHECKING, Any, Callable, NoReturn
3
3
 
4
4
  from confluent_kafka import Consumer, KafkaError, KafkaException
5
5
 
6
- from dbos.queue import Queue
6
+ from ._queue import Queue
7
7
 
8
8
  if TYPE_CHECKING:
9
- from dbos.dbos import _DBOSRegistry
9
+ from ._dbos import DBOSRegistry
10
10
 
11
- from .context import SetWorkflowID
12
- from .error import DBOSInitializationError
13
- from .kafka_message import KafkaMessage
14
- from .logger import dbos_logger
11
+ from ._context import SetWorkflowID
12
+ from ._error import DBOSInitializationError
13
+ from ._kafka_message import KafkaMessage
14
+ from ._logger import dbos_logger
15
15
 
16
- KafkaConsumerWorkflow = Callable[[KafkaMessage], None]
16
+ _KafkaConsumerWorkflow = Callable[[KafkaMessage], None]
17
17
 
18
- kafka_queue: Queue
19
- in_order_kafka_queues: dict[str, Queue] = {}
18
+ _kafka_queue: Queue
19
+ _in_order_kafka_queues: dict[str, Queue] = {}
20
20
 
21
21
 
22
22
  def _kafka_consumer_loop(
23
- func: KafkaConsumerWorkflow,
23
+ func: _KafkaConsumerWorkflow,
24
24
  config: dict[str, Any],
25
25
  topics: list[str],
26
26
  stop_event: threading.Event,
@@ -76,19 +76,19 @@ def _kafka_consumer_loop(
76
76
  ):
77
77
  if in_order:
78
78
  assert msg.topic is not None
79
- queue = in_order_kafka_queues[msg.topic]
79
+ queue = _in_order_kafka_queues[msg.topic]
80
80
  queue.enqueue(func, msg)
81
81
  else:
82
- kafka_queue.enqueue(func, msg)
82
+ _kafka_queue.enqueue(func, msg)
83
83
 
84
84
  finally:
85
85
  consumer.close()
86
86
 
87
87
 
88
88
  def kafka_consumer(
89
- dbosreg: "_DBOSRegistry", config: dict[str, Any], topics: list[str], in_order: bool
90
- ) -> Callable[[KafkaConsumerWorkflow], KafkaConsumerWorkflow]:
91
- def decorator(func: KafkaConsumerWorkflow) -> KafkaConsumerWorkflow:
89
+ dbosreg: "DBOSRegistry", config: dict[str, Any], topics: list[str], in_order: bool
90
+ ) -> Callable[[_KafkaConsumerWorkflow], _KafkaConsumerWorkflow]:
91
+ def decorator(func: _KafkaConsumerWorkflow) -> _KafkaConsumerWorkflow:
92
92
  if in_order:
93
93
  for topic in topics:
94
94
  if topic.startswith("^"):
@@ -96,10 +96,10 @@ def kafka_consumer(
96
96
  f"Error: in-order processing is not supported for regular expression topic selectors ({topic})"
97
97
  )
98
98
  queue = Queue(f"_dbos_kafka_queue_topic_{topic}", concurrency=1)
99
- in_order_kafka_queues[topic] = queue
99
+ _in_order_kafka_queues[topic] = queue
100
100
  else:
101
- global kafka_queue
102
- kafka_queue = Queue("_dbos_internal_queue")
101
+ global _kafka_queue
102
+ _kafka_queue = Queue("_dbos_internal_queue")
103
103
  stop_event = threading.Event()
104
104
  dbosreg.register_poller(
105
105
  stop_event, _kafka_consumer_loop, func, config, topics, stop_event, in_order
@@ -9,10 +9,10 @@ from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
9
9
  from opentelemetry.sdk.resources import Resource
10
10
 
11
11
  if TYPE_CHECKING:
12
- from dbos.dbos_config import ConfigFile
12
+ from ._dbos_config import ConfigFile
13
13
 
14
14
  dbos_logger = logging.getLogger("dbos")
15
- otlp_handler, otlp_transformer = None, None
15
+ _otlp_handler, _otlp_transformer = None, None
16
16
 
17
17
 
18
18
  class DBOSLogTransformer(logging.Filter):
@@ -74,28 +74,28 @@ def config_logger(config: "ConfigFile") -> None:
74
74
  export_timeout_millis=5000,
75
75
  )
76
76
  )
77
- global otlp_handler
78
- otlp_handler = LoggingHandler(logger_provider=log_provider)
77
+ global _otlp_handler
78
+ _otlp_handler = LoggingHandler(logger_provider=log_provider)
79
79
 
80
80
  # Attach DBOS-specific attributes to all log entries.
81
- global otlp_transformer
82
- otlp_transformer = DBOSLogTransformer()
81
+ global _otlp_transformer
82
+ _otlp_transformer = DBOSLogTransformer()
83
83
 
84
84
  # Direct DBOS logs to OTLP
85
- dbos_logger.addHandler(otlp_handler)
86
- dbos_logger.addFilter(otlp_transformer)
85
+ dbos_logger.addHandler(_otlp_handler)
86
+ dbos_logger.addFilter(_otlp_transformer)
87
87
 
88
88
 
89
89
  def add_otlp_to_all_loggers() -> None:
90
- if otlp_handler is not None and otlp_transformer is not None:
90
+ if _otlp_handler is not None and _otlp_transformer is not None:
91
91
  root = logging.root
92
92
 
93
- root.addHandler(otlp_handler)
94
- root.addFilter(otlp_transformer)
93
+ root.addHandler(_otlp_handler)
94
+ root.addFilter(_otlp_transformer)
95
95
 
96
96
  for logger_name in root.manager.loggerDict:
97
97
  if logger_name != dbos_logger.name:
98
98
  logger = logging.getLogger(logger_name)
99
99
  if not logger.propagate:
100
- logger.addHandler(otlp_handler)
101
- logger.addFilter(otlp_transformer)
100
+ logger.addHandler(_otlp_handler)
101
+ logger.addFilter(_otlp_transformer)
@@ -2,10 +2,10 @@ import threading
2
2
  import traceback
3
3
  from typing import TYPE_CHECKING, Optional, TypedDict
4
4
 
5
- from dbos.core import P, R, _execute_workflow_id, _start_workflow
5
+ from ._core import P, R, execute_workflow_by_id, start_workflow
6
6
 
7
7
  if TYPE_CHECKING:
8
- from dbos.dbos import DBOS, Workflow, WorkflowHandle
8
+ from ._dbos import DBOS, Workflow, WorkflowHandle
9
9
 
10
10
 
11
11
  class QueueRateLimit(TypedDict):
@@ -37,7 +37,7 @@ class Queue:
37
37
  self.name = name
38
38
  self.concurrency = concurrency
39
39
  self.limiter = limiter
40
- from dbos.dbos import _get_or_create_dbos_registry
40
+ from ._dbos import _get_or_create_dbos_registry
41
41
 
42
42
  registry = _get_or_create_dbos_registry()
43
43
  registry.queue_info_map[self.name] = self
@@ -45,13 +45,13 @@ class Queue:
45
45
  def enqueue(
46
46
  self, func: "Workflow[P, R]", *args: P.args, **kwargs: P.kwargs
47
47
  ) -> "WorkflowHandle[R]":
48
- from dbos.dbos import _get_dbos_instance
48
+ from ._dbos import _get_dbos_instance
49
49
 
50
50
  dbos = _get_dbos_instance()
51
- return _start_workflow(dbos, func, self.name, False, *args, **kwargs)
51
+ return start_workflow(dbos, func, self.name, False, *args, **kwargs)
52
52
 
53
53
 
54
- def queue_thread(stop_event: threading.Event, dbos: "DBOS") -> None:
54
+ def _queue_thread(stop_event: threading.Event, dbos: "DBOS") -> None:
55
55
  while not stop_event.is_set():
56
56
  if stop_event.wait(timeout=1):
57
57
  return
@@ -59,7 +59,7 @@ def queue_thread(stop_event: threading.Event, dbos: "DBOS") -> None:
59
59
  try:
60
60
  wf_ids = dbos._sys_db.start_queued_workflows(queue)
61
61
  for id in wf_ids:
62
- _execute_workflow_id(dbos, id)
62
+ execute_workflow_by_id(dbos, id)
63
63
  except Exception:
64
64
  dbos.logger.warning(
65
65
  f"Exception encountered in queue thread: {traceback.format_exc()}"