dbos 1.1.0a4__py3-none-any.whl → 1.2.0a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dbos/_admin_server.py CHANGED
@@ -5,8 +5,9 @@ import re
5
5
  import threading
6
6
  from functools import partial
7
7
  from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
8
- from typing import TYPE_CHECKING, Any, List, TypedDict
8
+ from typing import TYPE_CHECKING, Any, List, Optional, TypedDict
9
9
 
10
+ from ._context import SetWorkflowID
10
11
  from ._error import DBOSException
11
12
  from ._logger import dbos_logger
12
13
  from ._recovery import recover_pending_workflows
@@ -141,7 +142,11 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
141
142
  try:
142
143
  data = json.loads(post_data.decode("utf-8"))
143
144
  start_step: int = data.get("start_step", 1)
144
- self._handle_fork(workflow_id, start_step)
145
+ new_workflow_id: Optional[str] = data.get("new_workflow_id")
146
+ application_version: Optional[str] = data.get("application_version")
147
+ self._handle_fork(
148
+ workflow_id, start_step, new_workflow_id, application_version
149
+ )
145
150
  except (json.JSONDecodeError, AttributeError) as e:
146
151
  self.send_response(500)
147
152
  self.send_header("Content-Type", "application/json")
@@ -191,9 +196,24 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
191
196
  self.end_headers()
192
197
  self.wfile.write(response_body)
193
198
 
194
- def _handle_fork(self, workflow_id: str, start_step: int) -> None:
199
+ def _handle_fork(
200
+ self,
201
+ workflow_id: str,
202
+ start_step: int,
203
+ new_workflow_id: Optional[str],
204
+ application_version: Optional[str],
205
+ ) -> None:
195
206
  try:
196
- handle = self.dbos.fork_workflow(workflow_id, start_step)
207
+ print(f"Forking workflow {workflow_id} from step {start_step}")
208
+ if new_workflow_id is not None:
209
+ with SetWorkflowID(new_workflow_id):
210
+ handle = self.dbos.fork_workflow(
211
+ workflow_id, start_step, application_version=application_version
212
+ )
213
+ else:
214
+ handle = self.dbos.fork_workflow(
215
+ workflow_id, start_step, application_version=application_version
216
+ )
197
217
  response_body = json.dumps(
198
218
  {
199
219
  "workflow_id": handle.workflow_id,
dbos/_app_db.py CHANGED
@@ -216,21 +216,6 @@ class ApplicationDatabase:
216
216
  for row in rows
217
217
  ]
218
218
 
219
- def get_max_function_id(self, workflow_uuid: str) -> Optional[int]:
220
- with self.engine.begin() as conn:
221
- max_function_id_row = conn.execute(
222
- sa.select(
223
- sa.func.max(ApplicationSchema.transaction_outputs.c.function_id)
224
- ).where(
225
- ApplicationSchema.transaction_outputs.c.workflow_uuid
226
- == workflow_uuid
227
- )
228
- ).fetchone()
229
-
230
- max_function_id = max_function_id_row[0] if max_function_id_row else None
231
-
232
- return max_function_id
233
-
234
219
  def clone_workflow_transactions(
235
220
  self, src_workflow_id: str, forked_workflow_id: str, start_step: int
236
221
  ) -> None:
dbos/_core.py CHANGED
@@ -602,7 +602,6 @@ async def start_workflow_async(
602
602
  *args: P.args,
603
603
  **kwargs: P.kwargs,
604
604
  ) -> "WorkflowHandleAsync[R]":
605
-
606
605
  # If the function has a class, add the class object as its first argument
607
606
  fself: Optional[object] = None
608
607
  if hasattr(func, "__self__"):
dbos/_dbos.py CHANGED
@@ -299,6 +299,7 @@ class DBOS:
299
299
 
300
300
  self._launched: bool = False
301
301
  self._debug_mode: bool = False
302
+ self._configured_threadpool: bool = False
302
303
  self._sys_db_field: Optional[SystemDatabase] = None
303
304
  self._app_db_field: Optional[ApplicationDatabase] = None
304
305
  self._registry: DBOSRegistry = _get_or_create_dbos_registry()
@@ -719,6 +720,7 @@ class DBOS:
719
720
  **kwargs: P.kwargs,
720
721
  ) -> WorkflowHandleAsync[R]:
721
722
  """Invoke a workflow function on the event loop, returning a handle to the ongoing execution."""
723
+ await cls._configure_asyncio_thread_pool()
722
724
  return await start_workflow_async(
723
725
  _get_dbos_instance(), func, None, True, *args, **kwargs
724
726
  )
@@ -736,6 +738,7 @@ class DBOS:
736
738
  async def get_workflow_status_async(
737
739
  cls, workflow_id: str
738
740
  ) -> Optional[WorkflowStatus]:
741
+ await cls._configure_asyncio_thread_pool()
739
742
  """Return the status of a workflow execution."""
740
743
  return await asyncio.to_thread(cls.get_workflow_status, workflow_id)
741
744
 
@@ -757,6 +760,7 @@ class DBOS:
757
760
  ) -> WorkflowHandleAsync[R]:
758
761
  """Return a `WorkflowHandle` for a workflow execution."""
759
762
  dbos = _get_dbos_instance()
763
+ await cls._configure_asyncio_thread_pool()
760
764
  if existing_workflow:
761
765
  stat = await dbos.get_workflow_status_async(workflow_id)
762
766
  if stat is None:
@@ -775,6 +779,7 @@ class DBOS:
775
779
  cls, destination_id: str, message: Any, topic: Optional[str] = None
776
780
  ) -> None:
777
781
  """Send a message to a workflow execution."""
782
+ await cls._configure_asyncio_thread_pool()
778
783
  await asyncio.to_thread(lambda: DBOS.send(destination_id, message, topic))
779
784
 
780
785
  @classmethod
@@ -797,6 +802,7 @@ class DBOS:
797
802
  This function is to be called from within a workflow.
798
803
  `recv_async` will return the message sent on `topic`, asyncronously waiting if necessary.
799
804
  """
805
+ await cls._configure_asyncio_thread_pool()
800
806
  return await asyncio.to_thread(lambda: DBOS.recv(topic, timeout_seconds))
801
807
 
802
808
  @classmethod
@@ -835,6 +841,7 @@ class DBOS:
835
841
  It is important to use `DBOS.sleep` or `DBOS.sleep_async` (as opposed to any other sleep) within workflows,
836
842
  as the DBOS sleep methods are durable and completed sleeps will be skipped during recovery.
837
843
  """
844
+ await cls._configure_asyncio_thread_pool()
838
845
  await asyncio.to_thread(lambda: DBOS.sleep(seconds))
839
846
 
840
847
  @classmethod
@@ -869,6 +876,7 @@ class DBOS:
869
876
  value(Any): A serializable value to associate with the key
870
877
 
871
878
  """
879
+ await cls._configure_asyncio_thread_pool()
872
880
  await asyncio.to_thread(lambda: DBOS.set_event(key, value))
873
881
 
874
882
  @classmethod
@@ -901,6 +909,7 @@ class DBOS:
901
909
  timeout_seconds(float): The amount of time to wait, in case `set_event` has not yet been called byt the workflow
902
910
 
903
911
  """
912
+ await cls._configure_asyncio_thread_pool()
904
913
  return await asyncio.to_thread(
905
914
  lambda: DBOS.get_event(workflow_id, key, timeout_seconds)
906
915
  )
@@ -929,6 +938,19 @@ class DBOS:
929
938
  fn, "DBOS.cancelWorkflow"
930
939
  )
931
940
 
941
+ @classmethod
942
+ async def _configure_asyncio_thread_pool(cls) -> None:
943
+ """
944
+ Configure the thread pool for asyncio.to_thread.
945
+
946
+ This function is called before the first call to asyncio.to_thread.
947
+ """
948
+ if _get_dbos_instance()._configured_threadpool:
949
+ return
950
+ loop = asyncio.get_running_loop()
951
+ loop.set_default_executor(_get_dbos_instance()._executor)
952
+ _get_dbos_instance()._configured_threadpool = True
953
+
932
954
  @classmethod
933
955
  def resume_workflow(cls, workflow_id: str) -> WorkflowHandle[Any]:
934
956
  """Resume a workflow by ID."""
dbos/_event_loop.py CHANGED
@@ -1,5 +1,6 @@
1
1
  import asyncio
2
2
  import threading
3
+ from concurrent.futures import ThreadPoolExecutor
3
4
  from typing import Any, Coroutine, Optional, TypeVar
4
5
 
5
6
 
@@ -33,15 +34,17 @@ class BackgroundEventLoop:
33
34
 
34
35
  def _run_event_loop(self) -> None:
35
36
  self._loop = asyncio.new_event_loop()
36
- asyncio.set_event_loop(self._loop)
37
+ with ThreadPoolExecutor(max_workers=64) as thread_pool:
38
+ self._loop.set_default_executor(thread_pool)
39
+ asyncio.set_event_loop(self._loop)
37
40
 
38
- self._running = True
39
- self._ready.set() # Signal that the loop is ready
41
+ self._running = True
42
+ self._ready.set() # Signal that the loop is ready
40
43
 
41
- try:
42
- self._loop.run_forever()
43
- finally:
44
- self._loop.close()
44
+ try:
45
+ self._loop.run_forever()
46
+ finally:
47
+ self._loop.close()
45
48
 
46
49
  async def _shutdown(self) -> None:
47
50
  if self._loop is None:
dbos/_sys_db.py CHANGED
@@ -601,18 +601,6 @@ class SystemDatabase:
601
601
  )
602
602
  )
603
603
 
604
- def get_max_function_id(self, workflow_uuid: str) -> Optional[int]:
605
- with self.engine.begin() as conn:
606
- max_function_id_row = conn.execute(
607
- sa.select(
608
- sa.func.max(SystemSchema.operation_outputs.c.function_id)
609
- ).where(SystemSchema.operation_outputs.c.workflow_uuid == workflow_uuid)
610
- ).fetchone()
611
-
612
- max_function_id = max_function_id_row[0] if max_function_id_row else None
613
-
614
- return max_function_id
615
-
616
604
  def fork_workflow(
617
605
  self,
618
606
  original_workflow_id: str,
@@ -103,16 +103,7 @@ def fork_workflow(
103
103
  *,
104
104
  application_version: Optional[str],
105
105
  ) -> str:
106
- def get_max_function_id(workflow_uuid: str) -> int:
107
- max_transactions = app_db.get_max_function_id(workflow_uuid) or 0
108
- max_operations = sys_db.get_max_function_id(workflow_uuid) or 0
109
- return max(max_transactions, max_operations)
110
-
111
- max_function_id = get_max_function_id(workflow_id)
112
- if max_function_id > 0 and start_step > max_function_id:
113
- raise DBOSException(
114
- f"Cannot fork workflow {workflow_id} from step {start_step}. The workflow has {max_function_id} steps."
115
- )
106
+
116
107
  ctx = get_local_dbos_context()
117
108
  if ctx is not None and len(ctx.id_assigned_for_next_workflow) > 0:
118
109
  forked_workflow_id = ctx.id_assigned_for_next_workflow
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 1.1.0a4
3
+ Version: 1.2.0a2
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,24 +1,24 @@
1
- dbos-1.1.0a4.dist-info/METADATA,sha256=U7kpwMutGlWwRdkgX-Zt_y9g_6hPYwZ5NFyMbTW4S48,13267
2
- dbos-1.1.0a4.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
- dbos-1.1.0a4.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
- dbos-1.1.0a4.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
1
+ dbos-1.2.0a2.dist-info/METADATA,sha256=at-2zS4N-BoXxmKlOeZt7HRA5shIG9pu3pb98t8VFNs,13267
2
+ dbos-1.2.0a2.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
+ dbos-1.2.0a2.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
+ dbos-1.2.0a2.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
5
  dbos/__init__.py,sha256=NssPCubaBxdiKarOWa-wViz1hdJSkmBGcpLX_gQ4NeA,891
6
6
  dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
7
- dbos/_admin_server.py,sha256=A_28_nJ1nBBYDmCxtklJR9O2v14JRMtD1rAo_D4y8Kc,9764
8
- dbos/_app_db.py,sha256=wxZz3ja9QgVuyp5YLsAqa_MpuyD5tl0C5GHTLl8fwF0,10514
7
+ dbos/_admin_server.py,sha256=TWXi4drrzKFpKkUmEJpJkQBZxAtOalnhtYicEn2nDK0,10618
8
+ dbos/_app_db.py,sha256=0PKqpxJ3EbIaak3Wl0lNl3hXvhBfz4EEHaCw1bUOvIM,9937
9
9
  dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
10
10
  dbos/_client.py,sha256=-nK2GjS9D0qnD2DkRDs7gKxNECwYlsvW6hFCjADlnv0,14186
11
11
  dbos/_conductor/conductor.py,sha256=o0IaZjwnZ2TOyHeP2H4iSX6UnXLXQ4uODvWAKD9hHMs,21703
12
12
  dbos/_conductor/protocol.py,sha256=wgOFZxmS81bv0WCB9dAyg0s6QzldpzVKQDoSPeaX0Ws,6967
13
13
  dbos/_context.py,sha256=5ajoWAmToAfzzmMLylnJZoL4Ny9rBwZWuG05sXadMIA,24798
14
- dbos/_core.py,sha256=UDpSgRA9m_YuViNXR9tVgNFLC-zxKZPxjlkj2a-Kj00,48317
14
+ dbos/_core.py,sha256=7ukQH_KClBaMFy0sVTSR5tWylW-RqI9qaReBY-LDKrk,48316
15
15
  dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
16
- dbos/_dbos.py,sha256=f5s9cVgsiMkAkpvctLHE6sjVAEuC-eFEpRddYBIKxiA,46430
16
+ dbos/_dbos.py,sha256=MuMYbtqUyk2uihCH8aMVDeHmn_P8X8-udqeNT1RLesY,47365
17
17
  dbos/_dbos_config.py,sha256=IufNrIC-M2xSNTXyT_KXlEdfB3j03pPLv_nE0fEq4_U,20955
18
18
  dbos/_debug.py,sha256=MNlQVZ6TscGCRQeEEL0VE8Uignvr6dPeDDDefS3xgIE,1823
19
19
  dbos/_docker_pg_helper.py,sha256=tLJXWqZ4S-ExcaPnxg_i6cVxL6ZxrYlZjaGsklY-s2I,6115
20
20
  dbos/_error.py,sha256=q0OQJZTbR8FFHV9hEpAGpz9oWBT5L509zUhmyff7FJw,8500
21
- dbos/_event_loop.py,sha256=NmaLbEQFfEK36S_0KhVD39YdYrGce3qSKCTJ-5RqKQ0,2136
21
+ dbos/_event_loop.py,sha256=ts2T1_imfQjdu6hPs7-WZHui4DtmsZ2HUsPgIJ1GXZg,2335
22
22
  dbos/_fastapi.py,sha256=m4SL3H9P-NBQ_ZrbFxAWMOqNyIi3HGEn2ODR7xAK038,3118
23
23
  dbos/_flask.py,sha256=Npnakt-a3W5OykONFRkDRnumaDhTQmA0NPdUCGRYKXE,1652
24
24
  dbos/_kafka.py,sha256=pz0xZ9F3X9Ky1k-VSbeF3tfPhP3UPr3lUUhUfE41__U,4198
@@ -47,7 +47,7 @@ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
47
  dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
48
48
  dbos/_schemas/system_database.py,sha256=3Z0L72bOgHnusK1hBaETWU9RfiLBP0QnS-fdu41i0yY,5835
49
49
  dbos/_serialization.py,sha256=bWuwhXSQcGmiazvhJHA5gwhrRWxtmFmcCFQSDJnqqkU,3666
50
- dbos/_sys_db.py,sha256=gVa5arMBT8rKHkycPS8HyRzfvQdQRxYqIclw0Fcp6CM,84240
50
+ dbos/_sys_db.py,sha256=IMmRbeIcrsOFJfVcBhMkDWiA3_SvxeKbOGipFiplHPM,83735
51
51
  dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
52
52
  dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
53
53
  dbos/_templates/dbos-db-starter/__package/main.py.dbos,sha256=aQnBPSSQpkB8ERfhf7gB7P9tsU6OPKhZscfeh0yiaD8,2702
@@ -60,11 +60,11 @@ dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sh
60
60
  dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
61
61
  dbos/_tracer.py,sha256=yN6GRDKu_1p-EqtQLNarMocPfga2ZuqpzStzzSPYhzo,2732
62
62
  dbos/_utils.py,sha256=UbpMYRBSyvJqdXeWAnfSw8xXM1R1mfnyl1oTunhEjJM,513
63
- dbos/_workflow_commands.py,sha256=2E8FRUv_nLYkpBTwfhh_ELhySYpMrm8qGB9J44g6DSE,3872
63
+ dbos/_workflow_commands.py,sha256=UCpHWvCEXjVZtf5FNanFvtJpgUJDSI1EFBqQP0x_2A0,3346
64
64
  dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
65
65
  dbos/cli/_template_init.py,sha256=7JBcpMqP1r2mfCnvWatu33z8ctEGHJarlZYKgB83cXE,2972
66
66
  dbos/cli/cli.py,sha256=HinoCGrAUTiSeq7AAoCFfhdiE0uDw7vLMuDMN1_YTLI,20705
67
67
  dbos/dbos-config.schema.json,sha256=CjaspeYmOkx6Ip_pcxtmfXJTn_YGdSx_0pcPBF7KZmo,6060
68
68
  dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
69
69
  version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
70
- dbos-1.1.0a4.dist-info/RECORD,,
70
+ dbos-1.2.0a2.dist-info/RECORD,,
File without changes