dbos 0.11.0a4__py3-none-any.whl → 0.13.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (52) hide show
  1. dbos/__init__.py +7 -7
  2. dbos/{admin_sever.py → _admin_sever.py} +20 -11
  3. dbos/{application_database.py → _app_db.py} +4 -5
  4. dbos/{decorators.py → _classproperty.py} +3 -3
  5. dbos/{context.py → _context.py} +26 -26
  6. dbos/{core.py → _core.py} +121 -107
  7. dbos/{dbos.py → _dbos.py} +57 -59
  8. dbos/{dbos_config.py → _dbos_config.py} +9 -9
  9. dbos/{fastapi.py → _fastapi.py} +10 -11
  10. dbos/{flask.py → _flask.py} +6 -7
  11. dbos/{kafka.py → _kafka.py} +18 -18
  12. dbos/{logger.py → _logger.py} +13 -13
  13. dbos/{queue.py → _queue.py} +7 -7
  14. dbos/{recovery.py → _recovery.py} +8 -8
  15. dbos/{roles.py → _roles.py} +5 -5
  16. dbos/{scheduler/scheduler.py → _scheduler.py} +17 -7
  17. dbos/{utils.py → _serialization.py} +4 -4
  18. dbos/{system_database.py → _sys_db.py} +42 -37
  19. dbos/{tracer.py → _tracer.py} +2 -2
  20. dbos/cli.py +21 -21
  21. {dbos-0.11.0a4.dist-info → dbos-0.13.0a0.dist-info}/METADATA +1 -1
  22. dbos-0.13.0a0.dist-info/RECORD +54 -0
  23. {dbos-0.11.0a4.dist-info → dbos-0.13.0a0.dist-info}/WHEEL +1 -1
  24. dbos-0.11.0a4.dist-info/RECORD +0 -54
  25. /dbos/{scheduler/croniter.py → _croniter.py} +0 -0
  26. /dbos/{error.py → _error.py} +0 -0
  27. /dbos/{kafka_message.py → _kafka_message.py} +0 -0
  28. /dbos/{migrations → _migrations}/env.py +0 -0
  29. /dbos/{migrations → _migrations}/script.py.mako +0 -0
  30. /dbos/{migrations → _migrations}/versions/50f3227f0b4b_fix_job_queue.py +0 -0
  31. /dbos/{migrations → _migrations}/versions/5c361fc04708_added_system_tables.py +0 -0
  32. /dbos/{migrations → _migrations}/versions/a3b18ad34abe_added_triggers.py +0 -0
  33. /dbos/{migrations → _migrations}/versions/d76646551a6b_job_queue_limiter.py +0 -0
  34. /dbos/{migrations → _migrations}/versions/d76646551a6c_workflow_queue.py +0 -0
  35. /dbos/{migrations → _migrations}/versions/eab0cc1d9a14_job_queue.py +0 -0
  36. /dbos/{registrations.py → _registrations.py} +0 -0
  37. /dbos/{request.py → _request.py} +0 -0
  38. /dbos/{schemas → _schemas}/__init__.py +0 -0
  39. /dbos/{schemas → _schemas}/application_database.py +0 -0
  40. /dbos/{schemas → _schemas}/system_database.py +0 -0
  41. /dbos/{templates → _templates}/hello/README.md +0 -0
  42. /dbos/{templates → _templates}/hello/__package/__init__.py +0 -0
  43. /dbos/{templates → _templates}/hello/__package/main.py +0 -0
  44. /dbos/{templates → _templates}/hello/__package/schema.py +0 -0
  45. /dbos/{templates → _templates}/hello/alembic.ini +0 -0
  46. /dbos/{templates → _templates}/hello/dbos-config.yaml.dbos +0 -0
  47. /dbos/{templates → _templates}/hello/migrations/env.py.dbos +0 -0
  48. /dbos/{templates → _templates}/hello/migrations/script.py.mako +0 -0
  49. /dbos/{templates → _templates}/hello/migrations/versions/2024_07_31_180642_init.py +0 -0
  50. /dbos/{templates → _templates}/hello/start_postgres_docker.py +0 -0
  51. {dbos-0.11.0a4.dist-info → dbos-0.13.0a0.dist-info}/entry_points.txt +0 -0
  52. {dbos-0.11.0a4.dist-info → dbos-0.13.0a0.dist-info}/licenses/LICENSE +0 -0
@@ -4,15 +4,15 @@ import time
4
4
  import traceback
5
5
  from typing import TYPE_CHECKING, Any, List
6
6
 
7
- from dbos.context import SetWorkflowRecovery
8
- from dbos.core import _execute_workflow_id
9
- from dbos.error import DBOSWorkflowFunctionNotFoundError
7
+ from ._context import SetWorkflowRecovery
8
+ from ._core import execute_workflow_by_id
9
+ from ._error import DBOSWorkflowFunctionNotFoundError
10
10
 
11
11
  if TYPE_CHECKING:
12
- from dbos.dbos import DBOS, WorkflowHandle
12
+ from ._dbos import DBOS, WorkflowHandle
13
13
 
14
14
 
15
- def _startup_recovery_thread(dbos: "DBOS", workflow_ids: List[str]) -> None:
15
+ def startup_recovery_thread(dbos: "DBOS", workflow_ids: List[str]) -> None:
16
16
  """Attempt to recover local pending workflows on startup using a background thread."""
17
17
  stop_event = threading.Event()
18
18
  dbos.stop_events.append(stop_event)
@@ -20,7 +20,7 @@ def _startup_recovery_thread(dbos: "DBOS", workflow_ids: List[str]) -> None:
20
20
  try:
21
21
  for workflowID in list(workflow_ids):
22
22
  with SetWorkflowRecovery():
23
- _execute_workflow_id(dbos, workflowID)
23
+ execute_workflow_by_id(dbos, workflowID)
24
24
  workflow_ids.remove(workflowID)
25
25
  except DBOSWorkflowFunctionNotFoundError:
26
26
  time.sleep(1)
@@ -31,7 +31,7 @@ def _startup_recovery_thread(dbos: "DBOS", workflow_ids: List[str]) -> None:
31
31
  raise e
32
32
 
33
33
 
34
- def _recover_pending_workflows(
34
+ def recover_pending_workflows(
35
35
  dbos: "DBOS", executor_ids: List[str] = ["local"]
36
36
  ) -> List["WorkflowHandle[Any]"]:
37
37
  workflow_handles: List["WorkflowHandle[Any]"] = []
@@ -46,7 +46,7 @@ def _recover_pending_workflows(
46
46
 
47
47
  for workflowID in workflow_ids:
48
48
  with SetWorkflowRecovery():
49
- handle = _execute_workflow_id(dbos, workflowID)
49
+ handle = execute_workflow_by_id(dbos, workflowID)
50
50
  workflow_handles.append(handle)
51
51
 
52
52
  dbos.logger.info("Recovered pending workflows")
@@ -1,13 +1,13 @@
1
1
  from functools import wraps
2
2
  from typing import TYPE_CHECKING, Any, Callable, List, Optional, Type, TypeVar, cast
3
3
 
4
- from dbos.error import DBOSNotAuthorizedError
4
+ from ._error import DBOSNotAuthorizedError
5
5
 
6
6
  if TYPE_CHECKING:
7
- from dbos.dbos import _DBOSRegistry
7
+ from ._dbos import DBOSRegistry
8
8
 
9
- from dbos.context import DBOSAssumeRole, get_local_dbos_context
10
- from dbos.registrations import (
9
+ from ._context import DBOSAssumeRole, get_local_dbos_context
10
+ from ._registrations import (
11
11
  DBOSFuncInfo,
12
12
  get_class_info_for_func,
13
13
  get_or_create_class_info,
@@ -65,7 +65,7 @@ def required_roles(roles: List[str]) -> Callable[[F], F]:
65
65
 
66
66
 
67
67
  def default_required_roles(
68
- dbosreg: "_DBOSRegistry", roles: List[str]
68
+ dbosreg: "DBOSRegistry", roles: List[str]
69
69
  ) -> Callable[[Type[T]], Type[T]]:
70
70
  def set_roles(cls: Type[T]) -> Type[T]:
71
71
  ci = get_or_create_class_info(cls)
@@ -2,14 +2,14 @@ import threading
2
2
  from datetime import datetime, timezone
3
3
  from typing import TYPE_CHECKING, Callable
4
4
 
5
- from dbos.logger import dbos_logger
6
- from dbos.queue import Queue
5
+ from ._logger import dbos_logger
6
+ from ._queue import Queue
7
7
 
8
8
  if TYPE_CHECKING:
9
- from dbos.dbos import _DBOSRegistry
9
+ from ._dbos import DBOSRegistry
10
10
 
11
- from ..context import SetWorkflowID
12
- from .croniter import croniter # type: ignore
11
+ from ._context import SetWorkflowID
12
+ from ._croniter import croniter # type: ignore
13
13
 
14
14
  ScheduledWorkflow = Callable[[datetime, datetime], None]
15
15
 
@@ -31,13 +31,23 @@ def scheduler_loop(
31
31
  if stop_event.wait(timeout=sleepTime.total_seconds()):
32
32
  return
33
33
  with SetWorkflowID(f"sched-{func.__qualname__}-{nextExecTime.isoformat()}"):
34
- scheduler_queue.enqueue(func, nextExecTime, datetime.now(timezone.utc))
34
+ try:
35
+ scheduler_queue.enqueue(func, nextExecTime, datetime.now(timezone.utc))
36
+ except Exception as e:
37
+ dbos_logger.warning(f"Error scheduling workflow: ", e)
35
38
 
36
39
 
37
40
  def scheduled(
38
- dbosreg: "_DBOSRegistry", cron: str
41
+ dbosreg: "DBOSRegistry", cron: str
39
42
  ) -> Callable[[ScheduledWorkflow], ScheduledWorkflow]:
40
43
  def decorator(func: ScheduledWorkflow) -> ScheduledWorkflow:
44
+ try:
45
+ croniter(cron, datetime.now(timezone.utc), second_at_beginning=True)
46
+ except Exception as e:
47
+ raise ValueError(
48
+ f'Invalid crontab "{cron}" for scheduled function function {func.__name__}.'
49
+ )
50
+
41
51
  global scheduler_queue
42
52
  scheduler_queue = Queue("_dbos_internal_queue")
43
53
  stop_event = threading.Event()
@@ -9,14 +9,14 @@ class WorkflowInputs(TypedDict):
9
9
  kwargs: Dict[str, Any]
10
10
 
11
11
 
12
- def validate_item(data: Any) -> None:
12
+ def _validate_item(data: Any) -> None:
13
13
  if isinstance(data, (types.FunctionType, types.MethodType)):
14
14
  raise TypeError("Serialized data item should not be a function")
15
15
 
16
16
 
17
17
  def serialize(data: Any) -> str:
18
18
  """Serialize an object to a JSON string using jsonpickle."""
19
- validate_item(data)
19
+ _validate_item(data)
20
20
  encoded_data: str = jsonpickle.encode(data, unpicklable=True)
21
21
  return encoded_data
22
22
 
@@ -25,9 +25,9 @@ def serialize_args(data: WorkflowInputs) -> str:
25
25
  """Serialize args to a JSON string using jsonpickle."""
26
26
  arg: Any
27
27
  for arg in data["args"]:
28
- validate_item(arg)
28
+ _validate_item(arg)
29
29
  for arg in data["kwargs"].values():
30
- validate_item(arg)
30
+ _validate_item(arg)
31
31
  encoded_data: str = jsonpickle.encode(data, unpicklable=True)
32
32
  return encoded_data
33
33
 
@@ -25,20 +25,19 @@ from alembic import command
25
25
  from alembic.config import Config
26
26
  from sqlalchemy.exc import DBAPIError
27
27
 
28
- import dbos.utils as utils
29
- from dbos.error import (
28
+ from . import _serialization
29
+ from ._dbos_config import ConfigFile
30
+ from ._error import (
30
31
  DBOSDeadLetterQueueError,
31
32
  DBOSNonExistentWorkflowError,
32
33
  DBOSWorkflowConflictIDError,
33
34
  )
34
- from dbos.registrations import DEFAULT_MAX_RECOVERY_ATTEMPTS
35
-
36
- from .dbos_config import ConfigFile
37
- from .logger import dbos_logger
38
- from .schemas.system_database import SystemSchema
35
+ from ._logger import dbos_logger
36
+ from ._registrations import DEFAULT_MAX_RECOVERY_ATTEMPTS
37
+ from ._schemas.system_database import SystemSchema
39
38
 
40
39
  if TYPE_CHECKING:
41
- from .queue import Queue
40
+ from ._queue import Queue
42
41
 
43
42
 
44
43
  class WorkflowStatusString(Enum):
@@ -143,15 +142,15 @@ class WorkflowInformation(TypedDict, total=False):
143
142
  # The role used to run this workflow. Empty string if authorization is not required.
144
143
  authenticated_roles: List[str]
145
144
  # All roles the authenticated user has, if any.
146
- input: Optional[utils.WorkflowInputs]
145
+ input: Optional[_serialization.WorkflowInputs]
147
146
  output: Optional[str]
148
147
  error: Optional[str]
149
148
  request: Optional[str]
150
149
 
151
150
 
152
- dbos_null_topic = "__null__topic__"
153
- buffer_flush_batch_size = 100
154
- buffer_flush_interval_secs = 1.0
151
+ _dbos_null_topic = "__null__topic__"
152
+ _buffer_flush_batch_size = 100
153
+ _buffer_flush_interval_secs = 1.0
155
154
 
156
155
 
157
156
  class SystemDatabase:
@@ -200,7 +199,7 @@ class SystemDatabase:
200
199
 
201
200
  # Run a schema migration for the system database
202
201
  migration_dir = os.path.join(
203
- os.path.dirname(os.path.realpath(__file__)), "migrations"
202
+ os.path.dirname(os.path.realpath(__file__)), "_migrations"
204
203
  )
205
204
  alembic_cfg = Config()
206
205
  alembic_cfg.set_main_option("script_location", migration_dir)
@@ -395,7 +394,9 @@ class SystemDatabase:
395
394
  res = self.check_operation_execution(calling_wf, calling_wf_fn)
396
395
  if res is not None:
397
396
  if res["output"]:
398
- resstat: WorkflowStatusInternal = utils.deserialize(res["output"])
397
+ resstat: WorkflowStatusInternal = _serialization.deserialize(
398
+ res["output"]
399
+ )
399
400
  return resstat
400
401
  return None
401
402
  stat = self.get_workflow_status(workflow_uuid)
@@ -403,7 +404,7 @@ class SystemDatabase:
403
404
  {
404
405
  "workflow_uuid": calling_wf,
405
406
  "function_id": calling_wf_fn,
406
- "output": utils.serialize(stat),
407
+ "output": _serialization.serialize(stat),
407
408
  "error": None,
408
409
  }
409
410
  )
@@ -491,9 +492,9 @@ class SystemDatabase:
491
492
  return None
492
493
  status: str = stat["status"]
493
494
  if status == str(WorkflowStatusString.SUCCESS.value):
494
- return utils.deserialize(stat["output"])
495
+ return _serialization.deserialize(stat["output"])
495
496
  elif status == str(WorkflowStatusString.ERROR.value):
496
- raise utils.deserialize_exception(stat["error"])
497
+ raise _serialization.deserialize_exception(stat["error"])
497
498
  return None
498
499
 
499
500
  def get_workflow_info(
@@ -533,7 +534,9 @@ class SystemDatabase:
533
534
  self._exported_temp_txn_wf_status.discard(workflow_uuid)
534
535
  self._temp_txn_wf_ids.discard(workflow_uuid)
535
536
 
536
- def get_workflow_inputs(self, workflow_uuid: str) -> Optional[utils.WorkflowInputs]:
537
+ def get_workflow_inputs(
538
+ self, workflow_uuid: str
539
+ ) -> Optional[_serialization.WorkflowInputs]:
537
540
  with self.engine.begin() as c:
538
541
  row = c.execute(
539
542
  sa.select(SystemSchema.workflow_inputs.c.inputs).where(
@@ -542,7 +545,9 @@ class SystemDatabase:
542
545
  ).fetchone()
543
546
  if row is None:
544
547
  return None
545
- inputs: utils.WorkflowInputs = utils.deserialize_args(row[0])
548
+ inputs: _serialization.WorkflowInputs = _serialization.deserialize_args(
549
+ row[0]
550
+ )
546
551
  return inputs
547
552
 
548
553
  def get_workflows(self, input: GetWorkflowsInput) -> GetWorkflowsOutput:
@@ -651,7 +656,7 @@ class SystemDatabase:
651
656
  message: Any,
652
657
  topic: Optional[str] = None,
653
658
  ) -> None:
654
- topic = topic if topic is not None else dbos_null_topic
659
+ topic = topic if topic is not None else _dbos_null_topic
655
660
  with self.engine.begin() as c:
656
661
  recorded_output = self.check_operation_execution(
657
662
  workflow_uuid, function_id, conn=c
@@ -671,7 +676,7 @@ class SystemDatabase:
671
676
  pg.insert(SystemSchema.notifications).values(
672
677
  destination_uuid=destination_uuid,
673
678
  topic=topic,
674
- message=utils.serialize(message),
679
+ message=_serialization.serialize(message),
675
680
  )
676
681
  )
677
682
  except DBAPIError as dbapi_error:
@@ -695,14 +700,14 @@ class SystemDatabase:
695
700
  topic: Optional[str],
696
701
  timeout_seconds: float = 60,
697
702
  ) -> Any:
698
- topic = topic if topic is not None else dbos_null_topic
703
+ topic = topic if topic is not None else _dbos_null_topic
699
704
 
700
705
  # First, check for previous executions.
701
706
  recorded_output = self.check_operation_execution(workflow_uuid, function_id)
702
707
  if recorded_output is not None:
703
708
  dbos_logger.debug(f"Replaying recv, id: {function_id}, topic: {topic}")
704
709
  if recorded_output["output"] is not None:
705
- return utils.deserialize(recorded_output["output"])
710
+ return _serialization.deserialize(recorded_output["output"])
706
711
  else:
707
712
  raise Exception("No output recorded in the last recv")
708
713
  else:
@@ -768,12 +773,12 @@ class SystemDatabase:
768
773
  rows = c.execute(delete_stmt).fetchall()
769
774
  message: Any = None
770
775
  if len(rows) > 0:
771
- message = utils.deserialize(rows[0][0])
776
+ message = _serialization.deserialize(rows[0][0])
772
777
  self.record_operation_result(
773
778
  {
774
779
  "workflow_uuid": workflow_uuid,
775
780
  "function_id": function_id,
776
- "output": utils.serialize(
781
+ "output": _serialization.serialize(
777
782
  message
778
783
  ), # None will be serialized to 'null'
779
784
  "error": None,
@@ -851,7 +856,7 @@ class SystemDatabase:
851
856
  if recorded_output is not None:
852
857
  dbos_logger.debug(f"Replaying sleep, id: {function_id}, seconds: {seconds}")
853
858
  assert recorded_output["output"] is not None, "no recorded end time"
854
- end_time = utils.deserialize(recorded_output["output"])
859
+ end_time = _serialization.deserialize(recorded_output["output"])
855
860
  else:
856
861
  dbos_logger.debug(f"Running sleep, id: {function_id}, seconds: {seconds}")
857
862
  end_time = time.time() + seconds
@@ -860,7 +865,7 @@ class SystemDatabase:
860
865
  {
861
866
  "workflow_uuid": workflow_uuid,
862
867
  "function_id": function_id,
863
- "output": utils.serialize(end_time),
868
+ "output": _serialization.serialize(end_time),
864
869
  "error": None,
865
870
  }
866
871
  )
@@ -893,11 +898,11 @@ class SystemDatabase:
893
898
  .values(
894
899
  workflow_uuid=workflow_uuid,
895
900
  key=key,
896
- value=utils.serialize(message),
901
+ value=_serialization.serialize(message),
897
902
  )
898
903
  .on_conflict_do_update(
899
904
  index_elements=["workflow_uuid", "key"],
900
- set_={"value": utils.serialize(message)},
905
+ set_={"value": _serialization.serialize(message)},
901
906
  )
902
907
  )
903
908
  output: OperationResultInternal = {
@@ -931,7 +936,7 @@ class SystemDatabase:
931
936
  f"Replaying get_event, id: {caller_ctx['function_id']}, key: {key}"
932
937
  )
933
938
  if recorded_output["output"] is not None:
934
- return utils.deserialize(recorded_output["output"])
939
+ return _serialization.deserialize(recorded_output["output"])
935
940
  else:
936
941
  raise Exception("No output recorded in the last get_event")
937
942
  else:
@@ -951,7 +956,7 @@ class SystemDatabase:
951
956
 
952
957
  value: Any = None
953
958
  if len(init_recv) > 0:
954
- value = utils.deserialize(init_recv[0][0])
959
+ value = _serialization.deserialize(init_recv[0][0])
955
960
  else:
956
961
  # Wait for the notification
957
962
  actual_timeout = timeout_seconds
@@ -969,7 +974,7 @@ class SystemDatabase:
969
974
  with self.engine.begin() as c:
970
975
  final_recv = c.execute(get_sql).fetchall()
971
976
  if len(final_recv) > 0:
972
- value = utils.deserialize(final_recv[0][0])
977
+ value = _serialization.deserialize(final_recv[0][0])
973
978
  condition.release()
974
979
  self.workflow_events_map.pop(payload)
975
980
 
@@ -979,7 +984,7 @@ class SystemDatabase:
979
984
  {
980
985
  "workflow_uuid": caller_ctx["workflow_uuid"],
981
986
  "function_id": caller_ctx["function_id"],
982
- "output": utils.serialize(
987
+ "output": _serialization.serialize(
983
988
  value
984
989
  ), # None will be serialized to 'null'
985
990
  "error": None,
@@ -999,7 +1004,7 @@ class SystemDatabase:
999
1004
  status_iter = iter(list(self._workflow_status_buffer))
1000
1005
  wf_id: Optional[str] = None
1001
1006
  while (
1002
- exported < buffer_flush_batch_size
1007
+ exported < _buffer_flush_batch_size
1003
1008
  and (wf_id := next(status_iter, None)) is not None
1004
1009
  ):
1005
1010
  # Pop the first key in the buffer (FIFO)
@@ -1029,7 +1034,7 @@ class SystemDatabase:
1029
1034
  input_iter = iter(list(self._workflow_inputs_buffer))
1030
1035
  wf_id: Optional[str] = None
1031
1036
  while (
1032
- exported < buffer_flush_batch_size
1037
+ exported < _buffer_flush_batch_size
1033
1038
  and (wf_id := next(input_iter, None)) is not None
1034
1039
  ):
1035
1040
  if wf_id not in self._exported_temp_txn_wf_status:
@@ -1060,10 +1065,10 @@ class SystemDatabase:
1060
1065
  self._is_flushing_status_buffer = False
1061
1066
  if self._is_buffers_empty:
1062
1067
  # Only sleep if both buffers are empty
1063
- time.sleep(buffer_flush_interval_secs)
1068
+ time.sleep(_buffer_flush_interval_secs)
1064
1069
  except Exception as e:
1065
1070
  dbos_logger.error(f"Error while flushing buffers: {e}")
1066
- time.sleep(buffer_flush_interval_secs)
1071
+ time.sleep(_buffer_flush_interval_secs)
1067
1072
  # Will retry next time
1068
1073
 
1069
1074
  def buffer_workflow_status(self, status: WorkflowStatusInternal) -> None:
@@ -7,10 +7,10 @@ from opentelemetry.sdk.trace import TracerProvider
7
7
  from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter
8
8
  from opentelemetry.trace import Span
9
9
 
10
- from dbos.dbos_config import ConfigFile
10
+ from ._dbos_config import ConfigFile
11
11
 
12
12
  if TYPE_CHECKING:
13
- from .context import TracedAttributes
13
+ from ._context import TracedAttributes
14
14
 
15
15
 
16
16
  class DBOSTracer:
dbos/cli.py CHANGED
@@ -15,15 +15,15 @@ from rich import print
15
15
  from rich.prompt import Prompt
16
16
  from typing_extensions import Annotated
17
17
 
18
- from dbos import load_config
19
- from dbos.application_database import ApplicationDatabase
20
- from dbos.dbos_config import is_valid_app_name
21
- from dbos.system_database import SystemDatabase
18
+ from . import load_config
19
+ from ._app_db import ApplicationDatabase
20
+ from ._dbos_config import _is_valid_app_name
21
+ from ._sys_db import SystemDatabase
22
22
 
23
23
  app = typer.Typer()
24
24
 
25
25
 
26
- def on_windows() -> bool:
26
+ def _on_windows() -> bool:
27
27
  return platform.system() == "Windows"
28
28
 
29
29
 
@@ -41,7 +41,7 @@ def start() -> None:
41
41
  command,
42
42
  shell=True,
43
43
  text=True,
44
- preexec_fn=os.setsid if not on_windows() else None,
44
+ preexec_fn=os.setsid if not _on_windows() else None,
45
45
  )
46
46
 
47
47
  def signal_handler(signum: int, frame: Any) -> None:
@@ -70,20 +70,20 @@ def start() -> None:
70
70
 
71
71
  # Configure the single handler only on Unix-like systems.
72
72
  # TODO: Also kill the children on Windows.
73
- if not on_windows():
73
+ if not _on_windows():
74
74
  signal.signal(signal.SIGINT, signal_handler)
75
75
  signal.signal(signal.SIGTERM, signal_handler)
76
76
  process.wait()
77
77
 
78
78
 
79
- def get_templates_directory() -> str:
79
+ def _get_templates_directory() -> str:
80
80
  import dbos
81
81
 
82
82
  package_dir = path.abspath(path.dirname(dbos.__file__))
83
- return path.join(package_dir, "templates")
83
+ return path.join(package_dir, "_templates")
84
84
 
85
85
 
86
- def copy_dbos_template(src: str, dst: str, ctx: dict[str, str]) -> None:
86
+ def _copy_dbos_template(src: str, dst: str, ctx: dict[str, str]) -> None:
87
87
  with open(src, "r") as f:
88
88
  content = f.read()
89
89
 
@@ -94,7 +94,7 @@ def copy_dbos_template(src: str, dst: str, ctx: dict[str, str]) -> None:
94
94
  f.write(content)
95
95
 
96
96
 
97
- def copy_template_dir(src_dir: str, dst_dir: str, ctx: dict[str, str]) -> None:
97
+ def _copy_template_dir(src_dir: str, dst_dir: str, ctx: dict[str, str]) -> None:
98
98
 
99
99
  for root, dirs, files in os.walk(src_dir, topdown=True):
100
100
  dirs[:] = [d for d in dirs if d != "__package"]
@@ -116,12 +116,12 @@ def copy_template_dir(src_dir: str, dst_dir: str, ctx: dict[str, str]) -> None:
116
116
  continue
117
117
 
118
118
  if ext == ".dbos":
119
- copy_dbos_template(src, dst, ctx)
119
+ _copy_dbos_template(src, dst, ctx)
120
120
  else:
121
121
  shutil.copy(src, dst)
122
122
 
123
123
 
124
- def copy_template(src_dir: str, project_name: str, config_mode: bool) -> None:
124
+ def _copy_template(src_dir: str, project_name: str, config_mode: bool) -> None:
125
125
 
126
126
  dst_dir = path.abspath(".")
127
127
 
@@ -135,19 +135,19 @@ def copy_template(src_dir: str, project_name: str, config_mode: bool) -> None:
135
135
  if config_mode:
136
136
  ctx["package_name"] = "."
137
137
  ctx["migration_command"] = "echo 'No migrations specified'"
138
- copy_dbos_template(
138
+ _copy_dbos_template(
139
139
  os.path.join(src_dir, "dbos-config.yaml.dbos"),
140
140
  os.path.join(dst_dir, "dbos-config.yaml"),
141
141
  ctx,
142
142
  )
143
143
  else:
144
- copy_template_dir(src_dir, dst_dir, ctx)
145
- copy_template_dir(
144
+ _copy_template_dir(src_dir, dst_dir, ctx)
145
+ _copy_template_dir(
146
146
  path.join(src_dir, "__package"), path.join(dst_dir, package_name), ctx
147
147
  )
148
148
 
149
149
 
150
- def get_project_name() -> typing.Union[str, None]:
150
+ def _get_project_name() -> typing.Union[str, None]:
151
151
  name = None
152
152
  try:
153
153
  with open("pyproject.toml", "rb") as file:
@@ -183,13 +183,13 @@ def init(
183
183
  try:
184
184
  if project_name is None:
185
185
  project_name = typing.cast(
186
- str, typer.prompt("What is your project's name?", get_project_name())
186
+ str, typer.prompt("What is your project's name?", _get_project_name())
187
187
  )
188
188
 
189
- if not is_valid_app_name(project_name):
189
+ if not _is_valid_app_name(project_name):
190
190
  raise Exception(f"{project_name} is an invalid DBOS app name")
191
191
 
192
- templates_dir = get_templates_directory()
192
+ templates_dir = _get_templates_directory()
193
193
  templates = [x.name for x in os.scandir(templates_dir) if x.is_dir()]
194
194
  if len(templates) == 0:
195
195
  raise Exception(f"no DBOS templates found in {templates_dir} ")
@@ -205,7 +205,7 @@ def init(
205
205
  if template not in templates:
206
206
  raise Exception(f"template {template} not found in {templates_dir}")
207
207
 
208
- copy_template(
208
+ _copy_template(
209
209
  path.join(templates_dir, template), project_name, config_mode=config
210
210
  )
211
211
  except Exception as e:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 0.11.0a4
3
+ Version: 0.13.0a0
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -0,0 +1,54 @@
1
+ dbos-0.13.0a0.dist-info/METADATA,sha256=8apeZFcAMPz_gwgZNdZliGTyxwVEejmZziEnfD7I7pM,5017
2
+ dbos-0.13.0a0.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
3
+ dbos-0.13.0a0.dist-info/entry_points.txt,sha256=z6GcVANQV7Uw_82H9Ob2axJX6V3imftyZsljdh-M1HU,54
4
+ dbos-0.13.0a0.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
+ dbos/__init__.py,sha256=CxRHBHEthPL4PZoLbZhp3rdm44-KkRTT2-7DkK9d4QQ,724
6
+ dbos/_admin_sever.py,sha256=hnWAILD5hqm_CWUJ6q1pR_kVNFbpymR3licYnviPYUQ,3799
7
+ dbos/_app_db.py,sha256=_tv2vmPjjiaikwgxH3mqxgJ4nUUcG2-0uMXKWCqVu1c,5509
8
+ dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
9
+ dbos/_context.py,sha256=SJZTOB-saxCNNGT2LLop9LasqR-X2MQLTay3rjXRUCw,17748
10
+ dbos/_core.py,sha256=HUteo2HP9C4UvB-y41xwUphLZyYRTdb0sW5XqZ6QPAY,31167
11
+ dbos/_croniter.py,sha256=hbhgfsHBqclUS8VeLnJ9PSE9Z54z6mi4nnrr1aUXn0k,47561
12
+ dbos/_dbos.py,sha256=Or5z_PLwuUOz2wcI4PrBwAQYv7EarGpRU44WbvSU9pw,30986
13
+ dbos/_dbos_config.py,sha256=vUm3_1zMcbp-0D14wwJyt37sGiQP6yTIhxaTkRSnvFA,6362
14
+ dbos/_error.py,sha256=UETk8CoZL-TO2Utn1-E7OSWelhShWmKM-fOlODMR9PE,3893
15
+ dbos/_fastapi.py,sha256=iyefCZq-ZDKRUjN_rgYQmFmyvWf4gPrSlC6CLbfq4a8,3419
16
+ dbos/_flask.py,sha256=z1cijbTi5Dpq6kqikPCx1LcR2YHHv2oc41NehOWjw74,2431
17
+ dbos/_kafka.py,sha256=OmOKfO7_3Z2FUFv_sJaIfebd7xnqtuRRndzNTTufgb8,3654
18
+ dbos/_kafka_message.py,sha256=NYvOXNG3Qn7bghn1pv3fg4Pbs86ILZGcK4IB-MLUNu0,409
19
+ dbos/_logger.py,sha256=iYwbA7DLyXalWa2Yu07HO6Xm301nRuenMU64GgwUMkU,3576
20
+ dbos/_migrations/env.py,sha256=38SIGVbmn_VV2x2u1aHLcPOoWgZ84eCymf3g_NljmbU,1626
21
+ dbos/_migrations/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
22
+ dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py,sha256=ZBYrtTdxy64HxIAlOes89fVIk2P1gNaJack7wuC_epg,873
23
+ dbos/_migrations/versions/5c361fc04708_added_system_tables.py,sha256=QMgFMb0aLgC25YicsvPSr6AHRCA6Zd66hyaRUhwKzrQ,6404
24
+ dbos/_migrations/versions/a3b18ad34abe_added_triggers.py,sha256=Rv0ZsZYZ_WdgGEULYsPfnp4YzaO5L198gDTgYY39AVA,2022
25
+ dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py,sha256=8PyFi8rd6CN-mUro43wGhsg5wcQWKZPRHD6jw8R5pVc,986
26
+ dbos/_migrations/versions/d76646551a6c_workflow_queue.py,sha256=G942nophZ2uC2vc4hGBC02Ptng1715roTjY3xiyzZU4,729
27
+ dbos/_migrations/versions/eab0cc1d9a14_job_queue.py,sha256=uvhFOtqbBreCePhAxZfIT0qCAI7BiZTou9wt6QnbY7c,1412
28
+ dbos/_queue.py,sha256=hAXwrfBmtv6BGrlmFq-Ol6b_ED-HDaYqSSxumMJC6Xo,1938
29
+ dbos/_recovery.py,sha256=jbzGYxICA2drzyzlBSy2UiXhKV_16tBVacKQdTkqf-w,2008
30
+ dbos/_registrations.py,sha256=mei6q6_3R5uei8i_Wo_TqGZs85s10shOekDX41sFYD0,6642
31
+ dbos/_request.py,sha256=cX1B3Atlh160phgS35gF1VEEV4pD126c9F3BDgBmxZU,929
32
+ dbos/_roles.py,sha256=iOsgmIAf1XVzxs3gYWdGRe1B880YfOw5fpU7Jwx8_A8,2271
33
+ dbos/_scheduler.py,sha256=0I3e8Y-OIBG3wiUCIskShd-Sk_eUFCFyRB5u4L7IHXI,1940
34
+ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
35
+ dbos/_schemas/application_database.py,sha256=KeyoPrF7hy_ODXV7QNike_VFSD74QBRfQ76D7QyE9HI,966
36
+ dbos/_schemas/system_database.py,sha256=7iw7eHJzEvkatHMOaHORoSvtfisF73wW5j8hRt_Ph14,5126
37
+ dbos/_serialization.py,sha256=YCYv0qKAwAZ1djZisBC7khvKqG-5OcIv9t9EC5PFIog,1743
38
+ dbos/_sys_db.py,sha256=8bPWjrKM7MZSuxbGmC1M9yshG2LqymyedOXUtZedYX8,48538
39
+ dbos/_templates/hello/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
40
+ dbos/_templates/hello/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
+ dbos/_templates/hello/__package/main.py,sha256=eI0SS9Nwj-fldtiuSzIlIG6dC91GXXwdRsoHxv6S_WI,2719
42
+ dbos/_templates/hello/__package/schema.py,sha256=7Z27JGC8yy7Z44cbVXIREYxtUhU4JVkLCp5Q7UahVQ0,260
43
+ dbos/_templates/hello/alembic.ini,sha256=VKBn4Gy8mMuCdY7Hip1jmo3wEUJ1VG1aW7EqY0_n-as,3695
44
+ dbos/_templates/hello/dbos-config.yaml.dbos,sha256=7yu1q8FAgOZnwJtU-e_5qgV-wkHRn6cqo-GEmk9rK8U,577
45
+ dbos/_templates/hello/migrations/env.py.dbos,sha256=GUV6sjkDzf9Vl6wkGEd0RSkK-ftRfV6EUwSQdd0qFXg,2392
46
+ dbos/_templates/hello/migrations/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
47
+ dbos/_templates/hello/migrations/versions/2024_07_31_180642_init.py,sha256=U5thFWGqNN4QLrNXT7wUUqftIFDNE5eSdqD8JNW1mec,942
48
+ dbos/_templates/hello/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
49
+ dbos/_tracer.py,sha256=rvBY1RQU6DO7rL7EnaJJxGcmd4tP_PpGqUEE6imZnhY,2518
50
+ dbos/cli.py,sha256=Ks1jzP0tWju5jIwbLfrJpLXmk5gkU3v18FLJiNi5p8Q,8304
51
+ dbos/dbos-config.schema.json,sha256=tgtiirOTEdIRI27eI75UAER9sAV84CDnv5lRPt0qiuQ,5672
52
+ dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
53
+ version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
54
+ dbos-0.13.0a0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: pdm-backend (2.4.2)
2
+ Generator: pdm-backend (2.4.3)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any