dbos 0.27.0a6__py3-none-any.whl → 0.27.0a8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

dbos/__init__.py CHANGED
@@ -1,6 +1,12 @@
1
1
  from . import _error as error
2
2
  from ._client import DBOSClient, EnqueueOptions
3
- from ._context import DBOSContextEnsure, DBOSContextSetAuth, SetWorkflowID
3
+ from ._context import (
4
+ DBOSContextEnsure,
5
+ DBOSContextSetAuth,
6
+ SetEnqueueOptions,
7
+ SetWorkflowID,
8
+ SetWorkflowTimeout,
9
+ )
4
10
  from ._dbos import DBOS, DBOSConfiguredInstance, WorkflowHandle, WorkflowHandleAsync
5
11
  from ._dbos_config import ConfigFile, DBOSConfig, get_dbos_database_url, load_config
6
12
  from ._kafka_message import KafkaMessage
@@ -19,6 +25,8 @@ __all__ = [
19
25
  "GetWorkflowsInput",
20
26
  "KafkaMessage",
21
27
  "SetWorkflowID",
28
+ "SetWorkflowTimeout",
29
+ "SetEnqueueOptions",
22
30
  "WorkflowHandle",
23
31
  "WorkflowHandleAsync",
24
32
  "WorkflowStatus",
dbos/_client.py CHANGED
@@ -19,6 +19,7 @@ from dbos._error import DBOSNonExistentWorkflowError
19
19
  from dbos._registrations import DEFAULT_MAX_RECOVERY_ATTEMPTS
20
20
  from dbos._serialization import WorkflowInputs
21
21
  from dbos._sys_db import (
22
+ EnqueueOptionsInternal,
22
23
  StepInfo,
23
24
  SystemDatabase,
24
25
  WorkflowStatus,
@@ -42,6 +43,7 @@ class EnqueueOptions(TypedDict):
42
43
  workflow_id: NotRequired[str]
43
44
  app_version: NotRequired[str]
44
45
  workflow_timeout: NotRequired[float]
46
+ deduplication_id: NotRequired[str]
45
47
 
46
48
 
47
49
  class WorkflowHandleClientPolling(Generic[R]):
@@ -112,6 +114,9 @@ class DBOSClient:
112
114
  if workflow_id is None:
113
115
  workflow_id = str(uuid.uuid4())
114
116
  workflow_timeout = options.get("workflow_timeout", None)
117
+ enqueue_options_internal: EnqueueOptionsInternal = {
118
+ "deduplication_id": options.get("deduplication_id"),
119
+ }
115
120
 
116
121
  status: WorkflowStatusInternal = {
117
122
  "workflow_uuid": workflow_id,
@@ -144,7 +149,10 @@ class DBOSClient:
144
149
  }
145
150
 
146
151
  self._sys_db.init_workflow(
147
- status, _serialization.serialize_args(inputs), max_recovery_attempts=None
152
+ status,
153
+ _serialization.serialize_args(inputs),
154
+ max_recovery_attempts=None,
155
+ enqueue_options=enqueue_options_internal,
148
156
  )
149
157
  return workflow_id
150
158
 
dbos/_context.py CHANGED
@@ -98,6 +98,9 @@ class DBOSContext:
98
98
  # A propagated workflow deadline.
99
99
  self.workflow_deadline_epoch_ms: Optional[int] = None
100
100
 
101
+ # A user-specified deduplication ID for the enqueuing workflow.
102
+ self.deduplication_id: Optional[str] = None
103
+
101
104
  def create_child(self) -> DBOSContext:
102
105
  rv = DBOSContext()
103
106
  rv.logger = self.logger
@@ -413,12 +416,53 @@ class SetWorkflowTimeout:
413
416
  return False # Did not handle
414
417
 
415
418
 
419
+ class SetEnqueueOptions:
420
+ """
421
+ Set the workflow enqueue options for the enclosed enqueue operation.
422
+
423
+ Usage:
424
+ ```
425
+ with SetEnqueueOptions(deduplication_id=<deduplication id>):
426
+ queue.enqueue(...)
427
+ ```
428
+ """
429
+
430
+ def __init__(self, *, deduplication_id: Optional[str] = None) -> None:
431
+ self.created_ctx = False
432
+ self.deduplication_id: Optional[str] = deduplication_id
433
+ self.saved_deduplication_id: Optional[str] = None
434
+
435
+ def __enter__(self) -> SetEnqueueOptions:
436
+ # Code to create a basic context
437
+ ctx = get_local_dbos_context()
438
+ if ctx is None:
439
+ self.created_ctx = True
440
+ _set_local_dbos_context(DBOSContext())
441
+ ctx = assert_current_dbos_context()
442
+ self.saved_deduplication_id = ctx.deduplication_id
443
+ ctx.deduplication_id = self.deduplication_id
444
+ return self
445
+
446
+ def __exit__(
447
+ self,
448
+ exc_type: Optional[Type[BaseException]],
449
+ exc_value: Optional[BaseException],
450
+ traceback: Optional[TracebackType],
451
+ ) -> Literal[False]:
452
+ assert_current_dbos_context().deduplication_id = self.saved_deduplication_id
453
+ # Code to clean up the basic context if we created it
454
+ if self.created_ctx:
455
+ _clear_local_dbos_context()
456
+ return False
457
+
458
+
416
459
  class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
417
460
  def __init__(self, attributes: TracedAttributes) -> None:
418
461
  self.created_ctx = False
419
462
  self.attributes = attributes
420
463
  self.is_temp_workflow = attributes["name"] == "temp_wf"
421
464
  self.saved_workflow_timeout: Optional[int] = None
465
+ self.saved_deduplication_id: Optional[str] = None
422
466
 
423
467
  def __enter__(self) -> DBOSContext:
424
468
  # Code to create a basic context
@@ -432,6 +476,10 @@ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
432
476
  # workflow's children (instead we propagate the deadline)
433
477
  self.saved_workflow_timeout = ctx.workflow_timeout_ms
434
478
  ctx.workflow_timeout_ms = None
479
+ # Unset the deduplication_id context var so it is not applied to this
480
+ # workflow's children
481
+ self.saved_deduplication_id = ctx.deduplication_id
482
+ ctx.deduplication_id = None
435
483
  ctx.start_workflow(
436
484
  None, self.attributes, self.is_temp_workflow
437
485
  ) # Will get from the context's next workflow ID
@@ -450,6 +498,8 @@ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
450
498
  ctx.workflow_timeout_ms = self.saved_workflow_timeout
451
499
  # Clear any propagating timeout
452
500
  ctx.workflow_deadline_epoch_ms = None
501
+ # Restore the saved deduplication ID
502
+ ctx.deduplication_id = self.saved_deduplication_id
453
503
  # Code to clean up the basic context if we created it
454
504
  if self.created_ctx:
455
505
  _clear_local_dbos_context()
dbos/_core.py CHANGED
@@ -71,6 +71,7 @@ from ._registrations import (
71
71
  from ._roles import check_required_roles
72
72
  from ._serialization import WorkflowInputs
73
73
  from ._sys_db import (
74
+ EnqueueOptionsInternal,
74
75
  GetEventWorkflowContext,
75
76
  OperationResultInternal,
76
77
  WorkflowStatus,
@@ -234,6 +235,7 @@ def _init_workflow(
234
235
  workflow_timeout_ms: Optional[int],
235
236
  workflow_deadline_epoch_ms: Optional[int],
236
237
  max_recovery_attempts: Optional[int],
238
+ enqueue_options: Optional[EnqueueOptionsInternal],
237
239
  ) -> WorkflowStatusInternal:
238
240
  wfid = (
239
241
  ctx.workflow_id
@@ -289,6 +291,7 @@ def _init_workflow(
289
291
  status,
290
292
  _serialization.serialize_args(inputs),
291
293
  max_recovery_attempts=max_recovery_attempts,
294
+ enqueue_options=enqueue_options,
292
295
  )
293
296
 
294
297
  if workflow_deadline_epoch_ms is not None:
@@ -539,6 +542,9 @@ def start_workflow(
539
542
  workflow_timeout_ms = (
540
543
  local_ctx.workflow_timeout_ms if local_ctx is not None else None
541
544
  )
545
+ enqueue_options = EnqueueOptionsInternal(
546
+ deduplication_id=local_ctx.deduplication_id if local_ctx is not None else None,
547
+ )
542
548
  new_wf_id, new_wf_ctx = _get_new_wf()
543
549
 
544
550
  ctx = new_wf_ctx
@@ -561,6 +567,7 @@ def start_workflow(
561
567
  workflow_timeout_ms=workflow_timeout_ms,
562
568
  workflow_deadline_epoch_ms=workflow_deadline_epoch_ms,
563
569
  max_recovery_attempts=fi.max_recovery_attempts,
570
+ enqueue_options=enqueue_options,
564
571
  )
565
572
 
566
573
  wf_status = status["status"]
@@ -626,6 +633,9 @@ async def start_workflow_async(
626
633
  workflow_timeout_ms, workflow_deadline_epoch_ms = _get_timeout_deadline(
627
634
  local_ctx, queue_name
628
635
  )
636
+ enqueue_options = EnqueueOptionsInternal(
637
+ deduplication_id=local_ctx.deduplication_id if local_ctx is not None else None,
638
+ )
629
639
  new_wf_id, new_wf_ctx = _get_new_wf()
630
640
 
631
641
  ctx = new_wf_ctx
@@ -651,6 +661,7 @@ async def start_workflow_async(
651
661
  workflow_timeout_ms=workflow_timeout_ms,
652
662
  workflow_deadline_epoch_ms=workflow_deadline_epoch_ms,
653
663
  max_recovery_attempts=fi.max_recovery_attempts,
664
+ enqueue_options=enqueue_options,
654
665
  )
655
666
 
656
667
  if ctx.has_parent():
@@ -727,6 +738,7 @@ def workflow_wrapper(
727
738
  workflow_timeout_ms, workflow_deadline_epoch_ms = _get_timeout_deadline(
728
739
  ctx, queue=None
729
740
  )
741
+
730
742
  enterWorkflowCtxMgr = (
731
743
  EnterDBOSChildWorkflow if ctx and ctx.is_workflow() else EnterDBOSWorkflow
732
744
  )
@@ -768,6 +780,7 @@ def workflow_wrapper(
768
780
  workflow_timeout_ms=workflow_timeout_ms,
769
781
  workflow_deadline_epoch_ms=workflow_deadline_epoch_ms,
770
782
  max_recovery_attempts=max_recovery_attempts,
783
+ enqueue_options=None,
771
784
  )
772
785
 
773
786
  # TODO: maybe modify the parameters if they've been changed by `_init_workflow`
dbos/_error.py CHANGED
@@ -61,6 +61,7 @@ class DBOSErrorCode(Enum):
61
61
  ConflictingWorkflowError = 9
62
62
  WorkflowCancelled = 10
63
63
  UnexpectedStep = 11
64
+ QueueDeduplicated = 12
64
65
  ConflictingRegistrationError = 25
65
66
 
66
67
 
@@ -178,6 +179,18 @@ class DBOSUnexpectedStepError(DBOSException):
178
179
  )
179
180
 
180
181
 
182
+ class DBOSQueueDeduplicatedError(DBOSException):
183
+ """Exception raised when a workflow is deduplicated in the queue."""
184
+
185
+ def __init__(
186
+ self, workflow_id: str, queue_name: str, deduplication_id: str
187
+ ) -> None:
188
+ super().__init__(
189
+ f"Workflow {workflow_id} was deduplicated due to an existing workflow in queue {queue_name} with deduplication ID {deduplication_id}.",
190
+ dbos_error_code=DBOSErrorCode.QueueDeduplicated.value,
191
+ )
192
+
193
+
181
194
  #######################################
182
195
  ## BaseException
183
196
  #######################################
@@ -0,0 +1,45 @@
1
+ """add queue dedup
2
+
3
+ Revision ID: 27ac6900c6ad
4
+ Revises: 83f3732ae8e7
5
+ Create Date: 2025-04-23 16:18:48.530047
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ import sqlalchemy as sa
12
+ from alembic import op
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = "27ac6900c6ad"
16
+ down_revision: Union[str, None] = "83f3732ae8e7"
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ op.add_column(
23
+ "workflow_queue",
24
+ sa.Column(
25
+ "deduplication_id",
26
+ sa.Text(),
27
+ nullable=True,
28
+ ),
29
+ schema="dbos",
30
+ )
31
+
32
+ # Unique constraint for queue_name, deduplication_id
33
+ op.create_unique_constraint(
34
+ "uq_workflow_queue_name_dedup_id",
35
+ "workflow_queue",
36
+ ["queue_name", "deduplication_id"],
37
+ schema="dbos",
38
+ )
39
+
40
+
41
+ def downgrade() -> None:
42
+ op.drop_constraint(
43
+ "uq_workflow_queue_name_dedup_id", "workflow_queue", schema="dbos"
44
+ )
45
+ op.drop_column("workflow_queue", "deduplication_id", schema="dbos")
dbos/_queue.py CHANGED
@@ -99,6 +99,8 @@ def queue_thread(stop_event: threading.Event, dbos: "DBOS") -> None:
99
99
  f"Exception encountered in queue thread: {traceback.format_exc()}"
100
100
  )
101
101
  except Exception:
102
- dbos.logger.warning(
103
- f"Exception encountered in queue thread: {traceback.format_exc()}"
104
- )
102
+ if not stop_event.is_set():
103
+ # Only print the error if the thread is not stopping
104
+ dbos.logger.warning(
105
+ f"Exception encountered in queue thread: {traceback.format_exc()}"
106
+ )
@@ -10,6 +10,7 @@ from sqlalchemy import (
10
10
  String,
11
11
  Table,
12
12
  Text,
13
+ UniqueConstraint,
13
14
  text,
14
15
  )
15
16
 
@@ -174,4 +175,12 @@ class SystemSchema:
174
175
  "completed_at_epoch_ms",
175
176
  BigInteger(),
176
177
  ),
178
+ Column(
179
+ "deduplication_id",
180
+ Text,
181
+ nullable=True,
182
+ ),
183
+ UniqueConstraint(
184
+ "queue_name", "deduplication_id", name="uq_workflow_queue_name_dedup_id"
185
+ ),
177
186
  )
dbos/_sys_db.py CHANGED
@@ -37,6 +37,7 @@ from ._error import (
37
37
  DBOSConflictingWorkflowError,
38
38
  DBOSDeadLetterQueueError,
39
39
  DBOSNonExistentWorkflowError,
40
+ DBOSQueueDeduplicatedError,
40
41
  DBOSUnexpectedStepError,
41
42
  DBOSWorkflowCancelledError,
42
43
  DBOSWorkflowConflictIDError,
@@ -135,6 +136,10 @@ class WorkflowStatusInternal(TypedDict):
135
136
  workflow_deadline_epoch_ms: Optional[int]
136
137
 
137
138
 
139
+ class EnqueueOptionsInternal(TypedDict):
140
+ deduplication_id: Optional[str] # Unique ID for deduplication on a queue
141
+
142
+
138
143
  class RecordedResult(TypedDict):
139
144
  output: Optional[str] # JSON (jsonpickle)
140
145
  error: Optional[str] # JSON (jsonpickle)
@@ -1597,17 +1602,43 @@ class SystemDatabase:
1597
1602
  )
1598
1603
  return value
1599
1604
 
1600
- def enqueue(self, workflow_id: str, queue_name: str, conn: sa.Connection) -> None:
1605
+ def enqueue(
1606
+ self,
1607
+ workflow_id: str,
1608
+ queue_name: str,
1609
+ conn: sa.Connection,
1610
+ *,
1611
+ enqueue_options: Optional[EnqueueOptionsInternal],
1612
+ ) -> None:
1601
1613
  if self._debug_mode:
1602
1614
  raise Exception("called enqueue in debug mode")
1603
- conn.execute(
1604
- pg.insert(SystemSchema.workflow_queue)
1605
- .values(
1606
- workflow_uuid=workflow_id,
1607
- queue_name=queue_name,
1615
+ try:
1616
+ deduplication_id = (
1617
+ enqueue_options["deduplication_id"]
1618
+ if enqueue_options is not None
1619
+ else None
1608
1620
  )
1609
- .on_conflict_do_nothing()
1610
- )
1621
+ query = (
1622
+ pg.insert(SystemSchema.workflow_queue)
1623
+ .values(
1624
+ workflow_uuid=workflow_id,
1625
+ queue_name=queue_name,
1626
+ deduplication_id=deduplication_id,
1627
+ )
1628
+ .on_conflict_do_nothing(
1629
+ index_elements=SystemSchema.workflow_queue.primary_key.columns
1630
+ )
1631
+ ) # Ignore primary key constraint violation
1632
+ conn.execute(query)
1633
+ except DBAPIError as dbapi_error:
1634
+ # Unique constraint violation for the deduplication ID
1635
+ if dbapi_error.orig.sqlstate == "23505": # type: ignore
1636
+ assert (
1637
+ deduplication_id is not None
1638
+ ), f"deduplication_id should not be None. Workflow ID: {workflow_id}, Queue name: {queue_name}."
1639
+ raise DBOSQueueDeduplicatedError(
1640
+ workflow_id, queue_name, deduplication_id
1641
+ )
1611
1642
 
1612
1643
  def start_queued_workflows(
1613
1644
  self, queue: "Queue", executor_id: str, app_version: str
@@ -1879,6 +1910,7 @@ class SystemDatabase:
1879
1910
  inputs: str,
1880
1911
  *,
1881
1912
  max_recovery_attempts: Optional[int],
1913
+ enqueue_options: Optional[EnqueueOptionsInternal],
1882
1914
  ) -> tuple[WorkflowStatuses, Optional[int]]:
1883
1915
  """
1884
1916
  Synchronously record the status and inputs for workflows in a single transaction
@@ -1894,7 +1926,12 @@ class SystemDatabase:
1894
1926
  status["queue_name"] is not None
1895
1927
  and wf_status == WorkflowStatusString.ENQUEUED.value
1896
1928
  ):
1897
- self.enqueue(status["workflow_uuid"], status["queue_name"], conn)
1929
+ self.enqueue(
1930
+ status["workflow_uuid"],
1931
+ status["queue_name"],
1932
+ conn,
1933
+ enqueue_options=enqueue_options,
1934
+ )
1898
1935
  return wf_status, workflow_deadline_epoch_ms
1899
1936
 
1900
1937
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 0.27.0a6
3
+ Version: 0.27.0a8
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,23 +1,23 @@
1
- dbos-0.27.0a6.dist-info/METADATA,sha256=zkUFvsOjiEivL10Uz8W8LqkCI_c98CmbwvcAk5r98rY,5553
2
- dbos-0.27.0a6.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
- dbos-0.27.0a6.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
- dbos-0.27.0a6.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
- dbos/__init__.py,sha256=HgYmqo90vIabiROcK5LaKXXT7KfqDARiI9dUUK9sww8,890
1
+ dbos-0.27.0a8.dist-info/METADATA,sha256=PYejwuANw5E3b9HqIz8vY9yAf1izQ5GyKhAHnUGvkd4,5553
2
+ dbos-0.27.0a8.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
+ dbos-0.27.0a8.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
+ dbos-0.27.0a8.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
+ dbos/__init__.py,sha256=-FdBlOlr-f2tY__C23J4v22MoCAXqcDN_-zXsJXdoZ0,1005
6
6
  dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
7
7
  dbos/_admin_server.py,sha256=bR7hO8WS5hUzxjbDS3X0hXWuW8k3AQQSAvaynnthhtc,9031
8
8
  dbos/_app_db.py,sha256=3j8_5-MlSDY0otLRszFE-GfenU6JC20fcfSL-drSNYk,11800
9
9
  dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
10
- dbos/_client.py,sha256=jMY73ymYKGr_nnjXfOgCB6adZhd4cQw7UWZmxt-iq6c,12574
10
+ dbos/_client.py,sha256=gxN7iiFT3wA4dbElB8ASdlX27F5CH5NQzHh-J1d6YyI,12856
11
11
  dbos/_conductor/conductor.py,sha256=HYzVL29IMMrs2Mnms_7cHJynCnmmEN5SDQOMjzn3UoU,16840
12
12
  dbos/_conductor/protocol.py,sha256=zEKIuOQdIaSduNqfZKpo8PSD9_1oNpKIPnBNCu3RUyE,6681
13
- dbos/_context.py,sha256=aHzJxO7LLAz9w3G2dkZnOcFW_GG-Yaxd02AaoLu4Et8,21861
14
- dbos/_core.py,sha256=ylTVSv02h2M5SmDgYEJAZmNiKX35zPq0z-9WA-f4byY,47900
13
+ dbos/_context.py,sha256=5aJHOjh6-2Zc7Fwzw924Vg0utLEkaR-oBMRdz3cE95k,23680
14
+ dbos/_core.py,sha256=7zhdO-VfZe84wgOzBVsliqO-BI20OzcLTFqvrGyxttw,48425
15
15
  dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
16
16
  dbos/_dbos.py,sha256=t76_SVyPpyScCfotGthae27h6XxznpphVm5zIRxfnpY,48164
17
17
  dbos/_dbos_config.py,sha256=L0Z0OOB5FoPM9g-joZqXGeJnlxWQsEUtgPtgtg9Uf48,21732
18
18
  dbos/_debug.py,sha256=MNlQVZ6TscGCRQeEEL0VE8Uignvr6dPeDDDefS3xgIE,1823
19
19
  dbos/_docker_pg_helper.py,sha256=NmcgqmR5rQA_4igfeqh8ugNT2z3YmoOvuep_MEtxTiY,5854
20
- dbos/_error.py,sha256=9ITvFsN_Udpx0xXtYQHXXXb6PjPr3TmMondGmprV-L0,7003
20
+ dbos/_error.py,sha256=FOvv40rCWezx9J-0z45ScPYHO8WpmI2IHErZ8Wl1NU4,7510
21
21
  dbos/_event_loop.py,sha256=NmaLbEQFfEK36S_0KhVD39YdYrGce3qSKCTJ-5RqKQ0,2136
22
22
  dbos/_fastapi.py,sha256=PhaKftbApHnjtYEOw0EYna_3K0cmz__J9of7mRJWzu4,3704
23
23
  dbos/_flask.py,sha256=DZKUZR5-xOzPI7tYZ53r2PvvHVoAb8SYwLzMVFsVfjI,2608
@@ -27,6 +27,7 @@ dbos/_logger.py,sha256=qv2srteCF2rSRjCK1VGOck3ieIkwUe9Lvbv60mJc16E,4069
27
27
  dbos/_migrations/env.py,sha256=38SIGVbmn_VV2x2u1aHLcPOoWgZ84eCymf3g_NljmbU,1626
28
28
  dbos/_migrations/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
29
29
  dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py,sha256=ICLPl8CN9tQXMsLDsAj8z1TsL831-Z3F8jSBvrR-wyw,736
30
+ dbos/_migrations/versions/27ac6900c6ad_add_queue_dedup.py,sha256=56w1v6TdofW3V18iwm0MP0SAeSaAUPSS40HIcn6qYIE,1072
30
31
  dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py,sha256=ZBYrtTdxy64HxIAlOes89fVIk2P1gNaJack7wuC_epg,873
31
32
  dbos/_migrations/versions/5c361fc04708_added_system_tables.py,sha256=Xr9hBDJjkAtymlauOmAy00yUHj0VVUaEz7kNwEM9IwE,6403
32
33
  dbos/_migrations/versions/83f3732ae8e7_workflow_timeout.py,sha256=Q_R35pb8AfVI3sg5mzKwyoPfYB88Ychcc8gwxpM9R7A,1035
@@ -36,7 +37,7 @@ dbos/_migrations/versions/d76646551a6c_workflow_queue.py,sha256=G942nophZ2uC2vc4
36
37
  dbos/_migrations/versions/eab0cc1d9a14_job_queue.py,sha256=uvhFOtqbBreCePhAxZfIT0qCAI7BiZTou9wt6QnbY7c,1412
37
38
  dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py,sha256=m90Lc5YH0ZISSq1MyxND6oq3RZrZKrIqEsZtwJ1jWxA,1049
38
39
  dbos/_outcome.py,sha256=EXxBg4jXCVJsByDQ1VOCIedmbeq_03S6d-p1vqQrLFU,6810
39
- dbos/_queue.py,sha256=l0g_CXJbxEmftCA9yhy-cyaR_sddfQSCfm-5XgIWzqU,3397
40
+ dbos/_queue.py,sha256=aKCGahWBGJOLOv5PCOOId96Va3YQ4ICuHWXy-eQXohE,3526
40
41
  dbos/_recovery.py,sha256=98Py7icfytyIELJ54gIsdvmURBvTb0HmWaxEAuYL0dc,2546
41
42
  dbos/_registrations.py,sha256=EZzG3ZfYmWA2bHX2hpnSIQ3PTi3-cXsvbcmXjyOusMk,7302
42
43
  dbos/_request.py,sha256=cX1B3Atlh160phgS35gF1VEEV4pD126c9F3BDgBmxZU,929
@@ -44,9 +45,9 @@ dbos/_roles.py,sha256=iOsgmIAf1XVzxs3gYWdGRe1B880YfOw5fpU7Jwx8_A8,2271
44
45
  dbos/_scheduler.py,sha256=SR1oRZRcVzYsj-JauV2LA8JtwTkt8mru7qf6H1AzQ1U,2027
45
46
  dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
46
47
  dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
47
- dbos/_schemas/system_database.py,sha256=aChSK7uLECD-v-7BZeOfuZFbtWayllaS3PaowaKDHwY,5490
48
+ dbos/_schemas/system_database.py,sha256=wLqrhApNqrwZC1SdUxi_ca0y_66WzKaaBOxvND4_bdg,5738
48
49
  dbos/_serialization.py,sha256=YCYv0qKAwAZ1djZisBC7khvKqG-5OcIv9t9EC5PFIog,1743
49
- dbos/_sys_db.py,sha256=HBXmOL6AvVC8WjIDxCuOoodw2xtni8SgaU3mMft84Ts,80697
50
+ dbos/_sys_db.py,sha256=6rx-eygVfRlA8BPI-nBgG3BZ496Ag6XMdOYiOT5GhAI,82088
50
51
  dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
51
52
  dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
52
53
  dbos/_templates/dbos-db-starter/__package/main.py,sha256=nJMN3ZD2lmwg4Dcgmiwqc-tQGuCJuJal2Xl85iA277U,2453
@@ -66,4 +67,4 @@ dbos/cli/cli.py,sha256=a3rUrHog5-e22KjjUPOuTjH20PmUgSP0amRpMd6LVJE,18882
66
67
  dbos/dbos-config.schema.json,sha256=8KcwJb_sQc4-6tQG2TLmjE_nratfrQa0qVLl9XPsvWE,6367
67
68
  dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
68
69
  version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
69
- dbos-0.27.0a6.dist-info/RECORD,,
70
+ dbos-0.27.0a8.dist-info/RECORD,,