dbos 1.6.0a5__py3-none-any.whl → 1.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

dbos/_admin_server.py CHANGED
@@ -9,6 +9,7 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, TypedDict
9
9
 
10
10
  from dbos._workflow_commands import garbage_collect, global_timeout
11
11
 
12
+ from ._conductor import protocol as conductor_protocol
12
13
  from ._context import SetWorkflowID
13
14
  from ._error import DBOSException
14
15
  from ._logger import dbos_logger
@@ -118,7 +119,12 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
118
119
  self.send_response(404)
119
120
  self._end_headers()
120
121
  return
121
- response_body = json.dumps(workflows[0].__dict__).encode("utf-8")
122
+ workflow_output = (
123
+ conductor_protocol.WorkflowsOutput.from_workflow_information(
124
+ workflows[0]
125
+ )
126
+ )
127
+ response_body = json.dumps(workflow_output.__dict__).encode("utf-8")
122
128
  self.send_response(200)
123
129
  self.send_header("Content-Type", "application/json")
124
130
  self.send_header("Content-Length", str(len(response_body)))
@@ -326,20 +332,24 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
326
332
 
327
333
  def _handle_workflows(self, filters: Dict[str, Any]) -> None:
328
334
  workflows = self.dbos.list_workflows(
329
- workflow_ids=filters.get("workflow_ids"),
330
- name=filters.get("name"),
335
+ workflow_ids=filters.get("workflow_uuids"),
336
+ user=filters.get("authenticated_user"),
331
337
  start_time=filters.get("start_time"),
332
338
  end_time=filters.get("end_time"),
333
339
  status=filters.get("status"),
334
340
  app_version=filters.get("application_version"),
341
+ name=filters.get("workflow_name"),
335
342
  limit=filters.get("limit"),
336
343
  offset=filters.get("offset"),
337
344
  sort_desc=filters.get("sort_desc", False),
338
345
  workflow_id_prefix=filters.get("workflow_id_prefix"),
339
346
  )
340
-
347
+ workflows_output = [
348
+ conductor_protocol.WorkflowsOutput.from_workflow_information(i)
349
+ for i in workflows
350
+ ]
341
351
  response_body = json.dumps(
342
- [workflow.__dict__ for workflow in workflows]
352
+ [workflow.__dict__ for workflow in workflows_output]
343
353
  ).encode("utf-8")
344
354
  self.send_response(200)
345
355
  self.send_header("Content-Type", "application/json")
@@ -349,18 +359,21 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
349
359
 
350
360
  def _handle_queued_workflows(self, filters: Dict[str, Any]) -> None:
351
361
  workflows = self.dbos.list_queued_workflows(
352
- queue_name=filters.get("queue_name"),
353
- name=filters.get("name"),
354
362
  start_time=filters.get("start_time"),
355
363
  end_time=filters.get("end_time"),
356
364
  status=filters.get("status"),
365
+ name=filters.get("workflow_name"),
357
366
  limit=filters.get("limit"),
358
367
  offset=filters.get("offset"),
368
+ queue_name=filters.get("queue_name"),
359
369
  sort_desc=filters.get("sort_desc", False),
360
370
  )
361
-
371
+ workflows_output = [
372
+ conductor_protocol.WorkflowsOutput.from_workflow_information(i)
373
+ for i in workflows
374
+ ]
362
375
  response_body = json.dumps(
363
- [workflow.__dict__ for workflow in workflows]
376
+ [workflow.__dict__ for workflow in workflows_output]
364
377
  ).encode("utf-8")
365
378
  self.send_response(200)
366
379
  self.send_header("Content-Type", "application/json")
dbos/_core.py CHANGED
@@ -1157,13 +1157,16 @@ def decorate_step(
1157
1157
  def wrapper(*args: Any, **kwargs: Any) -> Any:
1158
1158
  rr: Optional[str] = check_required_roles(func, fi)
1159
1159
  # Entering step is allowed:
1160
+ # No DBOS, just call the original function directly
1160
1161
  # In a step already, just call the original function directly.
1161
1162
  # In a workflow (that is not in a step already)
1162
1163
  # Not in a workflow (we will start the single op workflow)
1164
+ if not dbosreg.dbos or not dbosreg.dbos._launched:
1165
+ # Call the original function directly
1166
+ return func(*args, **kwargs)
1163
1167
  ctx = get_local_dbos_context()
1164
1168
  if ctx and ctx.is_step():
1165
1169
  # Call the original function directly
1166
-
1167
1170
  return func(*args, **kwargs)
1168
1171
  if ctx and ctx.is_within_workflow():
1169
1172
  assert ctx.is_workflow(), "Steps must be called from within workflows"
dbos/_dbos.py CHANGED
@@ -7,7 +7,6 @@ import inspect
7
7
  import os
8
8
  import sys
9
9
  import threading
10
- import traceback
11
10
  import uuid
12
11
  from concurrent.futures import ThreadPoolExecutor
13
12
  from logging import Logger
@@ -28,6 +27,7 @@ from typing import (
28
27
  )
29
28
 
30
29
  from opentelemetry.trace import Span
30
+ from rich import print
31
31
 
32
32
  from dbos._conductor.conductor import ConductorWebsocket
33
33
  from dbos._sys_db import WorkflowStatus
@@ -517,6 +517,16 @@ class DBOS:
517
517
 
518
518
  dbos_logger.info("DBOS launched!")
519
519
 
520
+ if self.conductor_key is None and os.environ.get("DBOS__CLOUD") != "true":
521
+ # Hint the user to open the URL to register and set up Conductor
522
+ app_name = self._config["name"]
523
+ conductor_registration_url = (
524
+ f"https://console.dbos.dev/self-host?appname={app_name}"
525
+ )
526
+ print(
527
+ f"[bold]To view and manage workflows, connect to DBOS Conductor at:[/bold] [bold blue]{conductor_registration_url}[/bold blue]"
528
+ )
529
+
520
530
  # Flush handlers and add OTLP to all loggers if enabled
521
531
  # to enable their export in DBOS Cloud
522
532
  for handler in dbos_logger.handlers:
dbos/_queue.py CHANGED
@@ -1,3 +1,4 @@
1
+ import random
1
2
  import threading
2
3
  from typing import TYPE_CHECKING, Any, Callable, Coroutine, Optional, TypedDict
3
4
 
@@ -94,8 +95,12 @@ class Queue:
94
95
 
95
96
 
96
97
  def queue_thread(stop_event: threading.Event, dbos: "DBOS") -> None:
98
+ polling_interval = 1.0
99
+ min_polling_interval = 1.0
100
+ max_polling_interval = 120.0
97
101
  while not stop_event.is_set():
98
- if stop_event.wait(timeout=1):
102
+ # Wait for the polling interval with jitter
103
+ if stop_event.wait(timeout=polling_interval * random.uniform(0.95, 1.05)):
99
104
  return
100
105
  queues = dict(dbos._registry.queue_info_map)
101
106
  for _, queue in queues.items():
@@ -106,12 +111,22 @@ def queue_thread(stop_event: threading.Event, dbos: "DBOS") -> None:
106
111
  for id in wf_ids:
107
112
  execute_workflow_by_id(dbos, id)
108
113
  except OperationalError as e:
109
- # Ignore serialization error
110
- if not isinstance(
114
+ if isinstance(
111
115
  e.orig, (errors.SerializationFailure, errors.LockNotAvailable)
112
116
  ):
117
+ # If a serialization error is encountered, increase the polling interval
118
+ polling_interval = min(
119
+ max_polling_interval,
120
+ polling_interval * 2.0,
121
+ )
122
+ dbos.logger.warning(
123
+ f"Contention detected in queue thread for {queue.name}. Increasing polling interval to {polling_interval:.2f}."
124
+ )
125
+ else:
113
126
  dbos.logger.warning(f"Exception encountered in queue thread: {e}")
114
127
  except Exception as e:
115
128
  if not stop_event.is_set():
116
129
  # Only print the error if the thread is not stopping
117
130
  dbos.logger.warning(f"Exception encountered in queue thread: {e}")
131
+ # Attempt to scale back the polling interval on each iteration
132
+ polling_interval = max(min_polling_interval, polling_interval * 0.9)
dbos/_sys_db.py CHANGED
@@ -1650,7 +1650,7 @@ class SystemDatabase:
1650
1650
  return []
1651
1651
 
1652
1652
  # Compute max_tasks, the number of workflows that can be dequeued given local and global concurrency limits,
1653
- max_tasks = float("inf")
1653
+ max_tasks = 100 # To minimize contention with large queues, never dequeue more than 100 tasks
1654
1654
  if queue.worker_concurrency is not None or queue.concurrency is not None:
1655
1655
  # Count how many workflows on this queue are currently PENDING both locally and globally.
1656
1656
  pending_tasks_query = (
@@ -1694,6 +1694,7 @@ class SystemDatabase:
1694
1694
 
1695
1695
  # Retrieve the first max_tasks workflows in the queue.
1696
1696
  # Only retrieve workflows of the local version (or without version set)
1697
+ skip_locks = queue.concurrency is None
1697
1698
  query = (
1698
1699
  sa.select(
1699
1700
  SystemSchema.workflow_status.c.workflow_uuid,
@@ -1711,7 +1712,10 @@ class SystemDatabase:
1711
1712
  SystemSchema.workflow_status.c.application_version.is_(None),
1712
1713
  )
1713
1714
  )
1714
- .with_for_update(nowait=True) # Error out early
1715
+ # Unless global concurrency is set, use skip_locked to only select
1716
+ # rows that can be locked. If global concurrency is set, use no_wait
1717
+ # to ensure all processes have a consistent view of the table.
1718
+ .with_for_update(skip_locked=skip_locks, nowait=(not skip_locks))
1715
1719
  )
1716
1720
  if queue.priority_enabled:
1717
1721
  query = query.order_by(
@@ -1720,9 +1724,7 @@ class SystemDatabase:
1720
1724
  )
1721
1725
  else:
1722
1726
  query = query.order_by(SystemSchema.workflow_status.c.created_at.asc())
1723
- # Apply limit only if max_tasks is finite
1724
- if max_tasks != float("inf"):
1725
- query = query.limit(int(max_tasks))
1727
+ query = query.limit(int(max_tasks))
1726
1728
 
1727
1729
  rows = c.execute(query).fetchall()
1728
1730
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 1.6.0a5
3
+ Version: 1.7.0
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,19 +1,19 @@
1
- dbos-1.6.0a5.dist-info/METADATA,sha256=IVcP3LMYY56MWqRLSEYfDNyOyZC2Jn2N75esPSn_Ksc,13267
2
- dbos-1.6.0a5.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
- dbos-1.6.0a5.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
- dbos-1.6.0a5.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
1
+ dbos-1.7.0.dist-info/METADATA,sha256=e9Qrhom0oFhCJ8kNU5nWYj6wYzSWfCcdUJzpfkhVuwE,13265
2
+ dbos-1.7.0.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
+ dbos-1.7.0.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
+ dbos-1.7.0.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
5
  dbos/__init__.py,sha256=NssPCubaBxdiKarOWa-wViz1hdJSkmBGcpLX_gQ4NeA,891
6
6
  dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
7
- dbos/_admin_server.py,sha256=l46ZX4NpvBP9W8cl9gE7OqMNwUCevLMt2VztM7crBv0,15465
7
+ dbos/_admin_server.py,sha256=S2hFr3m5R3WkbOp3Yz9lWt5iLBfWPnvhYwYLokVax0A,16094
8
8
  dbos/_app_db.py,sha256=htblDPfqrpb_uZoFcvaud7cgQ-PDyn6Bn-cBidxdCTA,10603
9
9
  dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
10
10
  dbos/_client.py,sha256=DeiJHo5fTedWsipr7qlQQIcDmVAPjzzX94X01121oQM,14780
11
11
  dbos/_conductor/conductor.py,sha256=y_T-8kEHwKWt6W8LtcFMctB_6EvYFWsuGLxiFuuKKBU,23702
12
12
  dbos/_conductor/protocol.py,sha256=DOTprPSd7oHDcvwWSyZpnlPds_JfILtcKzHZa-qBsF4,7330
13
13
  dbos/_context.py,sha256=zhje6jObpBcRALYfHyyIEumHtk_enl_PxLl01j4oDME,24897
14
- dbos/_core.py,sha256=m3e1WZ_210p2DT8c1sTh4S_CVM748UjkBdiGO846mVg,49269
14
+ dbos/_core.py,sha256=kRY2PXVryfpwjbOCmgzPA_-qNsFmRMLi-CxYCnyp1V8,49495
15
15
  dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
16
- dbos/_dbos.py,sha256=qzXD55bGJJW2SxI6HESykDRIpBmODNwIUt_jRkcRBVw,47588
16
+ dbos/_dbos.py,sha256=BprKIGPT-QDeoxtKM6kjRUK9dyF8sPCFfHIyIt0u7CE,48142
17
17
  dbos/_dbos_config.py,sha256=JUG4V1rrP0p1AYESgih4ea80qOH_13UsgoIIm8X84pw,20562
18
18
  dbos/_debug.py,sha256=99j2SChWmCPAlZoDmjsJGe77tpU2LEa8E2TtLAnnh7o,1831
19
19
  dbos/_docker_pg_helper.py,sha256=tLJXWqZ4S-ExcaPnxg_i6cVxL6ZxrYlZjaGsklY-s2I,6115
@@ -40,7 +40,7 @@ dbos/_migrations/versions/d994145b47b6_consolidate_inputs.py,sha256=_J0jP247fuo6
40
40
  dbos/_migrations/versions/eab0cc1d9a14_job_queue.py,sha256=uvhFOtqbBreCePhAxZfIT0qCAI7BiZTou9wt6QnbY7c,1412
41
41
  dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py,sha256=m90Lc5YH0ZISSq1MyxND6oq3RZrZKrIqEsZtwJ1jWxA,1049
42
42
  dbos/_outcome.py,sha256=Kz3aL7517q9UEFTx3Cq9zzztjWyWVOx_08fZyHo9dvg,7035
43
- dbos/_queue.py,sha256=Kq7aldTDLRF7cZtkXmsCy6wV2PR24enkhghEG25NtaU,4080
43
+ dbos/_queue.py,sha256=0kJTPwXy3nZ4Epzt-lHky9M9S4L31645drPGFR8fIJY,4854
44
44
  dbos/_recovery.py,sha256=TBNjkmSEqBU-g5YXExsLJ9XoCe4iekqtREsskXZECEg,2507
45
45
  dbos/_registrations.py,sha256=U-PwDZBuyuJjA2LYtud7D3VxDR440mVpMYE-S11BWDo,7369
46
46
  dbos/_roles.py,sha256=kCuhhg8XLtrHCgKgm44I0abIRTGHltf88OwjEKAUggk,2317
@@ -49,7 +49,7 @@ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
49
  dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
50
50
  dbos/_schemas/system_database.py,sha256=rbFKggONdvvbb45InvGz0TM6a7c-Ux9dcaL-h_7Z7pU,4438
51
51
  dbos/_serialization.py,sha256=bWuwhXSQcGmiazvhJHA5gwhrRWxtmFmcCFQSDJnqqkU,3666
52
- dbos/_sys_db.py,sha256=yhwhH23QvehbhPW3k6f4TRQ6mDjmvMILqsR8YffFZBg,80368
52
+ dbos/_sys_db.py,sha256=PaWa5Y8ublSMqPQXCHvYqln01cGf2LtPdXaLEHJq500,80653
53
53
  dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
54
54
  dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
55
55
  dbos/_templates/dbos-db-starter/__package/main.py.dbos,sha256=aQnBPSSQpkB8ERfhf7gB7P9tsU6OPKhZscfeh0yiaD8,2702
@@ -69,4 +69,4 @@ dbos/cli/cli.py,sha256=IcfaX4rrSrk6f24S2jrlR33snYMyNyEIx_lNQtuVr2E,22081
69
69
  dbos/dbos-config.schema.json,sha256=CjaspeYmOkx6Ip_pcxtmfXJTn_YGdSx_0pcPBF7KZmo,6060
70
70
  dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
71
71
  version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
72
- dbos-1.6.0a5.dist-info/RECORD,,
72
+ dbos-1.7.0.dist-info/RECORD,,
File without changes