dbos 0.26.0a18__py3-none-any.whl → 0.26.0a19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dbos/_app_db.py CHANGED
@@ -74,9 +74,12 @@ class ApplicationDatabase:
74
74
  database["connectionTimeoutMillis"] / 1000
75
75
  )
76
76
 
77
+ pool_size = database.get("app_db_pool_size")
78
+ if pool_size is None:
79
+ pool_size = 20
77
80
  self.engine = sa.create_engine(
78
81
  app_db_url,
79
- pool_size=database["app_db_pool_size"],
82
+ pool_size=pool_size,
80
83
  max_overflow=0,
81
84
  pool_timeout=30,
82
85
  connect_args=connect_args,
dbos/_client.py CHANGED
@@ -3,6 +3,8 @@ import sys
3
3
  import uuid
4
4
  from typing import Any, Generic, List, Optional, TypedDict, TypeVar
5
5
 
6
+ from dbos._app_db import ApplicationDatabase
7
+
6
8
  if sys.version_info < (3, 11):
7
9
  from typing_extensions import NotRequired
8
10
  else:
@@ -14,11 +16,18 @@ from dbos._dbos_config import parse_database_url_to_dbconfig
14
16
  from dbos._error import DBOSNonExistentWorkflowError
15
17
  from dbos._registrations import DEFAULT_MAX_RECOVERY_ATTEMPTS
16
18
  from dbos._serialization import WorkflowInputs
17
- from dbos._sys_db import SystemDatabase, WorkflowStatusInternal, WorkflowStatusString
19
+ from dbos._sys_db import (
20
+ StepInfo,
21
+ SystemDatabase,
22
+ WorkflowStatusInternal,
23
+ WorkflowStatusString,
24
+ )
18
25
  from dbos._workflow_commands import (
19
26
  WorkflowStatus,
27
+ fork_workflow,
20
28
  get_workflow,
21
29
  list_queued_workflows,
30
+ list_workflow_steps,
22
31
  list_workflows,
23
32
  )
24
33
 
@@ -82,6 +91,7 @@ class DBOSClient:
82
91
  if system_database is not None:
83
92
  db_config["sys_db_name"] = system_database
84
93
  self._sys_db = SystemDatabase(db_config)
94
+ self._app_db = ApplicationDatabase(db_config)
85
95
 
86
96
  def destroy(self) -> None:
87
97
  self._sys_db.destroy()
@@ -321,3 +331,23 @@ class DBOSClient:
321
331
  offset=offset,
322
332
  sort_desc=sort_desc,
323
333
  )
334
+
335
+ def list_workflow_steps(self, workflow_id: str) -> List[StepInfo]:
336
+ return list_workflow_steps(self._sys_db, self._app_db, workflow_id)
337
+
338
+ async def list_workflow_steps_async(self, workflow_id: str) -> List[StepInfo]:
339
+ return await asyncio.to_thread(self.list_workflow_steps, workflow_id)
340
+
341
+ def fork_workflow(self, workflow_id: str, start_step: int) -> WorkflowHandle[R]:
342
+ forked_workflow_id = fork_workflow(
343
+ self._sys_db, self._app_db, workflow_id, start_step
344
+ )
345
+ return WorkflowHandleClientPolling[R](forked_workflow_id, self._sys_db)
346
+
347
+ async def fork_workflow_async(
348
+ self, workflow_id: str, start_step: int
349
+ ) -> WorkflowHandleAsync[R]:
350
+ forked_workflow_id = await asyncio.to_thread(
351
+ fork_workflow, self._sys_db, self._app_db, workflow_id, start_step
352
+ )
353
+ return WorkflowHandleClientAsyncPolling[R](forked_workflow_id, self._sys_db)
dbos/_dbos.py CHANGED
@@ -34,6 +34,7 @@ from dbos._conductor.conductor import ConductorWebsocket
34
34
  from dbos._utils import INTERNAL_QUEUE_NAME, GlobalParams
35
35
  from dbos._workflow_commands import (
36
36
  WorkflowStatus,
37
+ fork_workflow,
37
38
  list_queued_workflows,
38
39
  list_workflows,
39
40
  )
@@ -67,7 +68,7 @@ from ._registrations import (
67
68
  )
68
69
  from ._roles import default_required_roles, required_roles
69
70
  from ._scheduler import ScheduledWorkflow, scheduled
70
- from ._sys_db import reset_system_database
71
+ from ._sys_db import StepInfo, reset_system_database
71
72
  from ._tracer import dbos_tracer
72
73
 
73
74
  if TYPE_CHECKING:
@@ -113,7 +114,7 @@ from ._error import (
113
114
  from ._event_loop import BackgroundEventLoop
114
115
  from ._logger import add_otlp_to_all_loggers, config_logger, dbos_logger, init_logger
115
116
  from ._sys_db import SystemDatabase
116
- from ._workflow_commands import WorkflowStatus, get_workflow
117
+ from ._workflow_commands import WorkflowStatus, get_workflow, list_workflow_steps
117
118
 
118
119
  # Most DBOS functions are just any callable F, so decorators / wrappers work on F
119
120
  # There are cases where the parameters P and return value R should be separate
@@ -959,40 +960,19 @@ class DBOS:
959
960
  @classmethod
960
961
  def restart_workflow(cls, workflow_id: str) -> WorkflowHandle[Any]:
961
962
  """Restart a workflow with a new workflow ID"""
962
-
963
963
  return cls.fork_workflow(workflow_id, 1)
964
964
 
965
965
  @classmethod
966
- def fork_workflow(
967
- cls, workflow_id: str, start_step: int = 1
968
- ) -> WorkflowHandle[Any]:
969
- """Restart a workflow with a new workflow ID"""
970
-
971
- def get_max_function_id(workflow_uuid: str) -> int:
972
- max_transactions = (
973
- _get_dbos_instance()._app_db.get_max_function_id(workflow_uuid) or 0
974
- )
975
- max_operations = (
976
- _get_dbos_instance()._sys_db.get_max_function_id(workflow_uuid) or 0
977
- )
978
- return max(max_transactions, max_operations)
979
-
980
- max_function_id = get_max_function_id(workflow_id)
981
- if max_function_id > 0 and start_step > max_function_id:
982
- raise DBOSException(
983
- f"Cannot fork workflow {workflow_id} at step {start_step}. The workflow has {max_function_id} steps."
984
- )
966
+ def fork_workflow(cls, workflow_id: str, start_step: int) -> WorkflowHandle[Any]:
967
+ """Restart a workflow with a new workflow ID from a specific step"""
985
968
 
986
969
  def fn() -> str:
987
- forked_workflow_id = str(uuid.uuid4())
988
970
  dbos_logger.info(f"Forking workflow: {workflow_id} from step {start_step}")
989
-
990
- _get_dbos_instance()._app_db.clone_workflow_transactions(
991
- workflow_id, forked_workflow_id, start_step
992
- )
993
-
994
- return _get_dbos_instance()._sys_db.fork_workflow(
995
- workflow_id, forked_workflow_id, start_step
971
+ return fork_workflow(
972
+ _get_dbos_instance()._sys_db,
973
+ _get_dbos_instance()._app_db,
974
+ workflow_id,
975
+ start_step,
996
976
  )
997
977
 
998
978
  new_id = _get_dbos_instance()._sys_db.call_function_as_step(
@@ -1066,6 +1046,17 @@ class DBOS:
1066
1046
  fn, "DBOS.listQueuedWorkflows"
1067
1047
  )
1068
1048
 
1049
+ @classmethod
1050
+ def list_workflow_steps(cls, workflow_id: str) -> List[StepInfo]:
1051
+ def fn() -> List[StepInfo]:
1052
+ return list_workflow_steps(
1053
+ _get_dbos_instance()._sys_db, _get_dbos_instance()._app_db, workflow_id
1054
+ )
1055
+
1056
+ return _get_dbos_instance()._sys_db.call_function_as_step(
1057
+ fn, "DBOS.listWorkflowSteps"
1058
+ )
1059
+
1069
1060
  @classproperty
1070
1061
  def logger(cls) -> Logger:
1071
1062
  """Return the DBOS `Logger` for the current context."""
@@ -2,6 +2,8 @@ import json
2
2
  import uuid
3
3
  from typing import Any, List, Optional
4
4
 
5
+ from dbos._error import DBOSException
6
+
5
7
  from . import _serialization
6
8
  from ._app_db import ApplicationDatabase
7
9
  from ._sys_db import (
@@ -185,3 +187,25 @@ def list_workflow_steps(
185
187
  merged_steps = steps + transactions
186
188
  merged_steps.sort(key=lambda step: step["function_id"])
187
189
  return merged_steps
190
+
191
+
192
+ def fork_workflow(
193
+ sys_db: SystemDatabase,
194
+ app_db: ApplicationDatabase,
195
+ workflow_id: str,
196
+ start_step: int,
197
+ ) -> str:
198
+ def get_max_function_id(workflow_uuid: str) -> int:
199
+ max_transactions = app_db.get_max_function_id(workflow_uuid) or 0
200
+ max_operations = sys_db.get_max_function_id(workflow_uuid) or 0
201
+ return max(max_transactions, max_operations)
202
+
203
+ max_function_id = get_max_function_id(workflow_id)
204
+ if max_function_id > 0 and start_step > max_function_id:
205
+ raise DBOSException(
206
+ f"Cannot fork workflow {workflow_id} from step {start_step}. The workflow has {max_function_id} steps."
207
+ )
208
+ forked_workflow_id = str(uuid.uuid4())
209
+ app_db.clone_workflow_transactions(workflow_id, forked_workflow_id, start_step)
210
+ sys_db.fork_workflow(workflow_id, forked_workflow_id, start_step)
211
+ return forked_workflow_id
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 0.26.0a18
3
+ Version: 0.26.0a19
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,19 +1,19 @@
1
- dbos-0.26.0a18.dist-info/METADATA,sha256=QG3XkqovR0FvEIL1_sHK6K80-comGqhpyC1LCWUDEzA,5554
2
- dbos-0.26.0a18.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
- dbos-0.26.0a18.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
- dbos-0.26.0a18.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
1
+ dbos-0.26.0a19.dist-info/METADATA,sha256=YWeav0kTgNnVV03IK_iG7vuKR04nKeOnIM4M1cmCELw,5554
2
+ dbos-0.26.0a19.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
+ dbos-0.26.0a19.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
+ dbos-0.26.0a19.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
5
  dbos/__init__.py,sha256=3NQfGlBiiUSM_v88STdVP3rNZvGkUL_9WbSotKb8Voo,873
6
6
  dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
7
7
  dbos/_admin_server.py,sha256=RrbABfR1D3p9c_QLrCSrgFuYce6FKi0fjMRIYLjO_Y8,9038
8
- dbos/_app_db.py,sha256=Q9lEyCJFoZMTlnjMO8Pj8bczVmVWyDOP8qPQ6l5PpEU,11241
8
+ dbos/_app_db.py,sha256=obNlgC9IZ20y8tqQeA1q4TjceG3jBFalxz70ieDOWCA,11332
9
9
  dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
10
- dbos/_client.py,sha256=QiIR-mwRYb1ffgwGR96ICQgFORki2QpR5najtVJ2WsA,10906
10
+ dbos/_client.py,sha256=PtOZv_4TCd7I0y9kw_0a93Lf_cUkytdDjCdrrHnyTS4,12020
11
11
  dbos/_conductor/conductor.py,sha256=HYzVL29IMMrs2Mnms_7cHJynCnmmEN5SDQOMjzn3UoU,16840
12
12
  dbos/_conductor/protocol.py,sha256=xN7pmooyF1pqbH1b6WhllU5718P7zSb_b0KCwA6bzcs,6716
13
13
  dbos/_context.py,sha256=I8sLkdKTTkZEz7wG-MjynaQB6XEF2bLXuwNksiauP7w,19430
14
14
  dbos/_core.py,sha256=uxDIJui4WS_2V1k2np0Ifue_IRzLTyq-c52bgZSQYn4,45118
15
15
  dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
16
- dbos/_dbos.py,sha256=jtvBQOvwdXFfknx9pDHgKC4DuiH58ICAs_0NoJQMI4w,47526
16
+ dbos/_dbos.py,sha256=Waz_5d9PkDjxD9LUe-nDf4gn5ds2kO0ZyJFQd8Tkz9w,47155
17
17
  dbos/_dbos_config.py,sha256=m05IFjM0jSwZBsnFMF_4qP2JkjVFc0gqyM2tnotXq20,20636
18
18
  dbos/_debug.py,sha256=MNlQVZ6TscGCRQeEEL0VE8Uignvr6dPeDDDefS3xgIE,1823
19
19
  dbos/_docker_pg_helper.py,sha256=NmcgqmR5rQA_4igfeqh8ugNT2z3YmoOvuep_MEtxTiY,5854
@@ -58,11 +58,11 @@ dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sh
58
58
  dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
59
59
  dbos/_tracer.py,sha256=dFDSFlta-rfA3-ahIRLYwnnoAOmlavdxAGllqwFgnCA,2440
60
60
  dbos/_utils.py,sha256=nFRUHzVjXG5AusF85AlYHikj63Tzi-kQm992ihsrAxA,201
61
- dbos/_workflow_commands.py,sha256=hHNcW4zopgxVXWfg3flHwqZEFGYpYp8ZAfUXmqiULUk,6261
61
+ dbos/_workflow_commands.py,sha256=BzvWGOQ-4fbHlAoFI5Hdwk1PimUUgBn1kISLnSMt0To,7189
62
62
  dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
63
63
  dbos/cli/_template_init.py,sha256=-WW3kbq0W_Tq4WbMqb1UGJG3xvJb3woEY5VspG95Srk,2857
64
64
  dbos/cli/cli.py,sha256=1qCTs__A9LOEfU44XZ6TufwmRwe68ZEwbWEPli3vnVM,17873
65
65
  dbos/dbos-config.schema.json,sha256=i7jcxXqByKq0Jzv3nAUavONtj03vTwj6vWP4ylmBr8o,5694
66
66
  dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
67
67
  version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
68
- dbos-0.26.0a18.dist-info/RECORD,,
68
+ dbos-0.26.0a19.dist-info/RECORD,,