dbos 1.14.0a5__py3-none-any.whl → 1.14.0a6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dbos/__init__.py CHANGED
@@ -9,6 +9,7 @@ from ._context import (
9
9
  )
10
10
  from ._dbos import DBOS, DBOSConfiguredInstance, WorkflowHandle, WorkflowHandleAsync
11
11
  from ._dbos_config import DBOSConfig
12
+ from ._debouncer import Debouncer, DebouncerClient
12
13
  from ._kafka_message import KafkaMessage
13
14
  from ._queue import Queue
14
15
  from ._sys_db import GetWorkflowsInput, WorkflowStatus, WorkflowStatusString
@@ -32,4 +33,6 @@ __all__ = [
32
33
  "WorkflowStatusString",
33
34
  "error",
34
35
  "Queue",
36
+ "Debouncer",
37
+ "DebouncerClient",
35
38
  ]
dbos/_client.py CHANGED
@@ -3,6 +3,7 @@ import sys
3
3
  import time
4
4
  import uuid
5
5
  from typing import (
6
+ TYPE_CHECKING,
6
7
  Any,
7
8
  AsyncGenerator,
8
9
  Generator,
@@ -24,7 +25,10 @@ else:
24
25
  from typing import NotRequired
25
26
 
26
27
  from dbos import _serialization
27
- from dbos._dbos import WorkflowHandle, WorkflowHandleAsync
28
+
29
+ if TYPE_CHECKING:
30
+ from dbos._dbos import WorkflowHandle, WorkflowHandleAsync
31
+
28
32
  from dbos._dbos_config import (
29
33
  get_application_database_url,
30
34
  get_system_database_url,
@@ -224,23 +228,25 @@ class DBOSClient:
224
228
 
225
229
  def enqueue(
226
230
  self, options: EnqueueOptions, *args: Any, **kwargs: Any
227
- ) -> WorkflowHandle[R]:
231
+ ) -> "WorkflowHandle[R]":
228
232
  workflow_id = self._enqueue(options, *args, **kwargs)
229
233
  return WorkflowHandleClientPolling[R](workflow_id, self._sys_db)
230
234
 
231
235
  async def enqueue_async(
232
236
  self, options: EnqueueOptions, *args: Any, **kwargs: Any
233
- ) -> WorkflowHandleAsync[R]:
237
+ ) -> "WorkflowHandleAsync[R]":
234
238
  workflow_id = await asyncio.to_thread(self._enqueue, options, *args, **kwargs)
235
239
  return WorkflowHandleClientAsyncPolling[R](workflow_id, self._sys_db)
236
240
 
237
- def retrieve_workflow(self, workflow_id: str) -> WorkflowHandle[R]:
241
+ def retrieve_workflow(self, workflow_id: str) -> "WorkflowHandle[R]":
238
242
  status = get_workflow(self._sys_db, workflow_id)
239
243
  if status is None:
240
244
  raise DBOSNonExistentWorkflowError(workflow_id)
241
245
  return WorkflowHandleClientPolling[R](workflow_id, self._sys_db)
242
246
 
243
- async def retrieve_workflow_async(self, workflow_id: str) -> WorkflowHandleAsync[R]:
247
+ async def retrieve_workflow_async(
248
+ self, workflow_id: str
249
+ ) -> "WorkflowHandleAsync[R]":
244
250
  status = await asyncio.to_thread(get_workflow, self._sys_db, workflow_id)
245
251
  if status is None:
246
252
  raise DBOSNonExistentWorkflowError(workflow_id)
@@ -311,11 +317,13 @@ class DBOSClient:
311
317
  async def cancel_workflow_async(self, workflow_id: str) -> None:
312
318
  await asyncio.to_thread(self.cancel_workflow, workflow_id)
313
319
 
314
- def resume_workflow(self, workflow_id: str) -> WorkflowHandle[Any]:
320
+ def resume_workflow(self, workflow_id: str) -> "WorkflowHandle[Any]":
315
321
  self._sys_db.resume_workflow(workflow_id)
316
322
  return WorkflowHandleClientPolling[Any](workflow_id, self._sys_db)
317
323
 
318
- async def resume_workflow_async(self, workflow_id: str) -> WorkflowHandleAsync[Any]:
324
+ async def resume_workflow_async(
325
+ self, workflow_id: str
326
+ ) -> "WorkflowHandleAsync[Any]":
319
327
  await asyncio.to_thread(self.resume_workflow, workflow_id)
320
328
  return WorkflowHandleClientAsyncPolling[Any](workflow_id, self._sys_db)
321
329
 
@@ -451,7 +459,7 @@ class DBOSClient:
451
459
  start_step: int,
452
460
  *,
453
461
  application_version: Optional[str] = None,
454
- ) -> WorkflowHandle[Any]:
462
+ ) -> "WorkflowHandle[Any]":
455
463
  forked_workflow_id = fork_workflow(
456
464
  self._sys_db,
457
465
  self._app_db,
@@ -467,7 +475,7 @@ class DBOSClient:
467
475
  start_step: int,
468
476
  *,
469
477
  application_version: Optional[str] = None,
470
- ) -> WorkflowHandleAsync[Any]:
478
+ ) -> "WorkflowHandleAsync[Any]":
471
479
  forked_workflow_id = await asyncio.to_thread(
472
480
  fork_workflow,
473
481
  self._sys_db,
dbos/_context.py CHANGED
@@ -524,6 +524,7 @@ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
524
524
  self.saved_workflow_timeout: Optional[int] = None
525
525
  self.saved_deduplication_id: Optional[str] = None
526
526
  self.saved_priority: Optional[int] = None
527
+ self.saved_is_within_set_workflow_id_block: bool = False
527
528
 
528
529
  def __enter__(self) -> DBOSContext:
529
530
  # Code to create a basic context
@@ -533,6 +534,9 @@ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
533
534
  ctx = DBOSContext()
534
535
  _set_local_dbos_context(ctx)
535
536
  assert not ctx.is_within_workflow()
537
+ # Unset is_within_set_workflow_id_block as the workflow is not within a block
538
+ self.saved_is_within_set_workflow_id_block = ctx.is_within_set_workflow_id_block
539
+ ctx.is_within_set_workflow_id_block = False
536
540
  # Unset the workflow_timeout_ms context var so it is not applied to this
537
541
  # workflow's children (instead we propagate the deadline)
538
542
  self.saved_workflow_timeout = ctx.workflow_timeout_ms
@@ -557,6 +561,8 @@ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
557
561
  ctx = assert_current_dbos_context()
558
562
  assert ctx.is_within_workflow()
559
563
  ctx.end_workflow(exc_value)
564
+ # Restore is_within_set_workflow_id_block
565
+ ctx.is_within_set_workflow_id_block = self.saved_is_within_set_workflow_id_block
560
566
  # Restore the saved workflow timeout
561
567
  ctx.workflow_timeout_ms = self.saved_workflow_timeout
562
568
  # Clear any propagating timeout
dbos/_core.py CHANGED
@@ -50,6 +50,7 @@ from ._error import (
50
50
  DBOSException,
51
51
  DBOSMaxStepRetriesExceeded,
52
52
  DBOSNonExistentWorkflowError,
53
+ DBOSQueueDeduplicatedError,
53
54
  DBOSRecoveryError,
54
55
  DBOSUnexpectedStepError,
55
56
  DBOSWorkflowCancelledError,
@@ -95,6 +96,7 @@ R = TypeVar("R", covariant=True) # A generic type for workflow return values
95
96
  F = TypeVar("F", bound=Callable[..., Any])
96
97
 
97
98
  TEMP_SEND_WF_NAME = "<temp>.temp_send_workflow"
99
+ DEBOUNCER_WORKFLOW_NAME = "_dbos_debouncer_workflow"
98
100
 
99
101
 
100
102
  def check_is_in_coroutine() -> bool:
@@ -310,10 +312,22 @@ def _init_workflow(
310
312
  }
311
313
 
312
314
  # Synchronously record the status and inputs for workflows
313
- wf_status, workflow_deadline_epoch_ms = dbos._sys_db.init_workflow(
314
- status,
315
- max_recovery_attempts=max_recovery_attempts,
316
- )
315
+ try:
316
+ wf_status, workflow_deadline_epoch_ms = dbos._sys_db.init_workflow(
317
+ status,
318
+ max_recovery_attempts=max_recovery_attempts,
319
+ )
320
+ except DBOSQueueDeduplicatedError as e:
321
+ if ctx.has_parent():
322
+ result: OperationResultInternal = {
323
+ "workflow_uuid": ctx.parent_workflow_id,
324
+ "function_id": ctx.parent_workflow_fid,
325
+ "function_name": wf_name,
326
+ "output": None,
327
+ "error": _serialization.serialize_exception(e),
328
+ }
329
+ dbos._sys_db.record_operation_result(result)
330
+ raise
317
331
 
318
332
  if workflow_deadline_epoch_ms is not None:
319
333
  evt = threading.Event()
dbos/_dbos.py CHANGED
@@ -32,12 +32,14 @@ from opentelemetry.trace import Span
32
32
  from rich import print
33
33
 
34
34
  from dbos._conductor.conductor import ConductorWebsocket
35
+ from dbos._debouncer import debouncer_workflow
35
36
  from dbos._sys_db import SystemDatabase, WorkflowStatus
36
37
  from dbos._utils import INTERNAL_QUEUE_NAME, GlobalParams
37
38
  from dbos._workflow_commands import fork_workflow, list_queued_workflows, list_workflows
38
39
 
39
40
  from ._classproperty import classproperty
40
41
  from ._core import (
42
+ DEBOUNCER_WORKFLOW_NAME,
41
43
  TEMP_SEND_WF_NAME,
42
44
  WorkflowHandleAsyncPolling,
43
45
  WorkflowHandlePolling,
@@ -390,11 +392,12 @@ class DBOS:
390
392
  ) -> None:
391
393
  self.send(destination_id, message, topic)
392
394
 
393
- temp_send_wf = workflow_wrapper(self._registry, send_temp_workflow)
394
- set_dbos_func_name(send_temp_workflow, TEMP_SEND_WF_NAME)
395
- set_dbos_func_name(temp_send_wf, TEMP_SEND_WF_NAME)
396
- set_temp_workflow_type(send_temp_workflow, "send")
397
- self._registry.register_wf_function(TEMP_SEND_WF_NAME, temp_send_wf, "send")
395
+ decorate_workflow(self._registry, TEMP_SEND_WF_NAME, None)(send_temp_workflow)
396
+
397
+ # Register the debouncer workflow
398
+ decorate_workflow(self._registry, DEBOUNCER_WORKFLOW_NAME, None)(
399
+ debouncer_workflow
400
+ )
398
401
 
399
402
  for handler in dbos_logger.handlers:
400
403
  handler.flush()
dbos/_debouncer.py ADDED
@@ -0,0 +1,394 @@
1
+ import asyncio
2
+ import math
3
+ import sys
4
+ import time
5
+ import types
6
+ import uuid
7
+ from typing import (
8
+ TYPE_CHECKING,
9
+ Any,
10
+ Callable,
11
+ Coroutine,
12
+ Dict,
13
+ Generic,
14
+ Optional,
15
+ Tuple,
16
+ TypedDict,
17
+ TypeVar,
18
+ Union,
19
+ )
20
+
21
+ if sys.version_info < (3, 10):
22
+ from typing_extensions import ParamSpec
23
+ else:
24
+ from typing import ParamSpec
25
+
26
+ from dbos._client import (
27
+ DBOSClient,
28
+ EnqueueOptions,
29
+ WorkflowHandleClientAsyncPolling,
30
+ WorkflowHandleClientPolling,
31
+ )
32
+ from dbos._context import (
33
+ DBOSContextEnsure,
34
+ SetEnqueueOptions,
35
+ SetWorkflowID,
36
+ SetWorkflowTimeout,
37
+ assert_current_dbos_context,
38
+ )
39
+ from dbos._core import (
40
+ DEBOUNCER_WORKFLOW_NAME,
41
+ WorkflowHandleAsyncPolling,
42
+ WorkflowHandlePolling,
43
+ )
44
+ from dbos._error import DBOSQueueDeduplicatedError
45
+ from dbos._queue import Queue
46
+ from dbos._registrations import get_dbos_func_name
47
+ from dbos._serialization import WorkflowInputs
48
+ from dbos._utils import INTERNAL_QUEUE_NAME
49
+
50
+ if TYPE_CHECKING:
51
+ from dbos._dbos import WorkflowHandle, WorkflowHandleAsync
52
+
53
+ P = ParamSpec("P") # A generic type for workflow parameters
54
+ R = TypeVar("R", covariant=True) # A generic type for workflow return values
55
+
56
+
57
+ _DEBOUNCER_TOPIC = "DEBOUNCER_TOPIC"
58
+
59
+
60
+ # Options saved from the local context to pass through to the debounced function
61
+ class ContextOptions(TypedDict):
62
+ workflow_id: str
63
+ deduplication_id: Optional[str]
64
+ priority: Optional[int]
65
+ app_version: Optional[str]
66
+ workflow_timeout_sec: Optional[float]
67
+
68
+
69
+ # Parameters for the debouncer workflow
70
+ class DebouncerOptions(TypedDict):
71
+ workflow_name: str
72
+ debounce_timeout_sec: Optional[float]
73
+ queue_name: Optional[str]
74
+
75
+
76
+ # The message sent from a debounce to the debouncer workflow
77
+ class DebouncerMessage(TypedDict):
78
+ inputs: WorkflowInputs
79
+ message_id: str
80
+ debounce_period_sec: float
81
+
82
+
83
+ def debouncer_workflow(
84
+ initial_debounce_period_sec: float,
85
+ ctx: ContextOptions,
86
+ options: DebouncerOptions,
87
+ *args: Tuple[Any, ...],
88
+ **kwargs: Dict[str, Any],
89
+ ) -> None:
90
+ from dbos._dbos import DBOS, _get_dbos_instance
91
+
92
+ dbos = _get_dbos_instance()
93
+
94
+ workflow_inputs: WorkflowInputs = {"args": args, "kwargs": kwargs}
95
+ # Every time the debounced workflow is called, a message is sent to this workflow.
96
+ # It waits until debounce_period_sec have passed since the last message or until
97
+ # debounce_timeout_sec has elapsed.
98
+ debounce_deadline_epoch_sec = (
99
+ time.time() + options["debounce_timeout_sec"]
100
+ if options["debounce_timeout_sec"]
101
+ else math.inf
102
+ )
103
+ debounce_period_sec = initial_debounce_period_sec
104
+ while time.time() < debounce_deadline_epoch_sec:
105
+ time_until_deadline = max(debounce_deadline_epoch_sec - time.time(), 0)
106
+ timeout = min(debounce_period_sec, time_until_deadline)
107
+ message: DebouncerMessage = DBOS.recv(_DEBOUNCER_TOPIC, timeout_seconds=timeout)
108
+ if message is None:
109
+ break
110
+ else:
111
+ workflow_inputs = message["inputs"]
112
+ debounce_period_sec = message["debounce_period_sec"]
113
+ # Acknowledge receipt of the message
114
+ DBOS.set_event(message["message_id"], message["message_id"])
115
+ # After the timeout or period has elapsed, start the user workflow with the requested context parameters,
116
+ # either directly or on a queue.
117
+ with SetWorkflowID(ctx["workflow_id"]):
118
+ with SetWorkflowTimeout(ctx["workflow_timeout_sec"]):
119
+ func = dbos._registry.workflow_info_map.get(options["workflow_name"], None)
120
+ if not func:
121
+ raise Exception(
122
+ f"Invalid workflow name provided to debouncer: {options['workflow_name']}"
123
+ )
124
+ if options["queue_name"]:
125
+ queue = dbos._registry.queue_info_map.get(options["queue_name"], None)
126
+ if not queue:
127
+ raise Exception(
128
+ f"Invalid queue name provided to debouncer: {options['queue_name']}"
129
+ )
130
+ with SetEnqueueOptions(
131
+ deduplication_id=ctx["deduplication_id"],
132
+ priority=ctx["priority"],
133
+ app_version=ctx["app_version"],
134
+ ):
135
+ queue.enqueue(
136
+ func, *workflow_inputs["args"], **workflow_inputs["kwargs"]
137
+ )
138
+ else:
139
+ DBOS.start_workflow(
140
+ func, *workflow_inputs["args"], **workflow_inputs["kwargs"]
141
+ )
142
+
143
+
144
+ class Debouncer(Generic[P, R]):
145
+
146
+ def __init__(
147
+ self,
148
+ workflow_name: str,
149
+ *,
150
+ debounce_key: str,
151
+ debounce_timeout_sec: Optional[float] = None,
152
+ queue: Optional[Queue] = None,
153
+ ):
154
+ self.func_name = workflow_name
155
+ self.options: DebouncerOptions = {
156
+ "debounce_timeout_sec": debounce_timeout_sec,
157
+ "queue_name": queue.name if queue else None,
158
+ "workflow_name": workflow_name,
159
+ }
160
+ self.debounce_key = debounce_key
161
+
162
+ @staticmethod
163
+ def create(
164
+ workflow: Callable[P, R],
165
+ *,
166
+ debounce_key: str,
167
+ debounce_timeout_sec: Optional[float] = None,
168
+ queue: Optional[Queue] = None,
169
+ ) -> "Debouncer[P, R]":
170
+
171
+ if isinstance(workflow, (types.MethodType)):
172
+ raise TypeError("Only workflow functions may be debounced, not methods")
173
+ return Debouncer[P, R](
174
+ get_dbos_func_name(workflow),
175
+ debounce_key=debounce_key,
176
+ debounce_timeout_sec=debounce_timeout_sec,
177
+ queue=queue,
178
+ )
179
+
180
+ @staticmethod
181
+ def create_async(
182
+ workflow: Callable[P, Coroutine[Any, Any, R]],
183
+ *,
184
+ debounce_key: str,
185
+ debounce_timeout_sec: Optional[float] = None,
186
+ queue: Optional[Queue] = None,
187
+ ) -> "Debouncer[P, R]":
188
+
189
+ if isinstance(workflow, (types.MethodType)):
190
+ raise TypeError("Only workflow functions may be debounced, not methods")
191
+ return Debouncer[P, R](
192
+ get_dbos_func_name(workflow),
193
+ debounce_key=debounce_key,
194
+ debounce_timeout_sec=debounce_timeout_sec,
195
+ queue=queue,
196
+ )
197
+
198
+ def debounce(
199
+ self, debounce_period_sec: float, *args: P.args, **kwargs: P.kwargs
200
+ ) -> "WorkflowHandle[R]":
201
+ from dbos._dbos import DBOS, _get_dbos_instance
202
+
203
+ dbos = _get_dbos_instance()
204
+ internal_queue = dbos._registry.get_internal_queue()
205
+
206
+ # Read all workflow settings from context, pass them through ContextOptions
207
+ # into the debouncer to apply to the user workflow, then reset the context
208
+ # so workflow settings aren't applied to the debouncer.
209
+ with DBOSContextEnsure():
210
+ ctx = assert_current_dbos_context()
211
+
212
+ # Deterministically generate the user workflow ID and message ID
213
+ def assign_debounce_ids() -> tuple[str, str]:
214
+ return str(uuid.uuid4()), ctx.assign_workflow_id()
215
+
216
+ message_id, user_workflow_id = dbos._sys_db.call_function_as_step(
217
+ assign_debounce_ids, "DBOS.assign_debounce_ids"
218
+ )
219
+ ctx.id_assigned_for_next_workflow = ""
220
+ ctx.is_within_set_workflow_id_block = False
221
+ ctxOptions: ContextOptions = {
222
+ "workflow_id": user_workflow_id,
223
+ "app_version": ctx.app_version,
224
+ "deduplication_id": ctx.deduplication_id,
225
+ "priority": ctx.priority,
226
+ "workflow_timeout_sec": (
227
+ ctx.workflow_timeout_ms / 1000.0
228
+ if ctx.workflow_timeout_ms
229
+ else None
230
+ ),
231
+ }
232
+ while True:
233
+ try:
234
+ # Attempt to enqueue a debouncer for this workflow.
235
+ with SetEnqueueOptions(deduplication_id=self.debounce_key):
236
+ with SetWorkflowTimeout(None):
237
+ internal_queue.enqueue(
238
+ debouncer_workflow,
239
+ debounce_period_sec,
240
+ ctxOptions,
241
+ self.options,
242
+ *args,
243
+ **kwargs,
244
+ )
245
+ return WorkflowHandlePolling(user_workflow_id, dbos)
246
+ except DBOSQueueDeduplicatedError:
247
+ # If there is already a debouncer, send a message to it.
248
+ # Deterministically retrieve the ID of the debouncer
249
+ def get_deduplicated_workflow() -> Optional[str]:
250
+ return dbos._sys_db.get_deduplicated_workflow(
251
+ queue_name=internal_queue.name,
252
+ deduplication_id=self.debounce_key,
253
+ )
254
+
255
+ dedup_wfid = dbos._sys_db.call_function_as_step(
256
+ get_deduplicated_workflow, "DBOS.get_deduplicated_workflow"
257
+ )
258
+ if dedup_wfid is None:
259
+ continue
260
+ else:
261
+ workflow_inputs: WorkflowInputs = {"args": args, "kwargs": kwargs}
262
+ message: DebouncerMessage = {
263
+ "message_id": message_id,
264
+ "inputs": workflow_inputs,
265
+ "debounce_period_sec": debounce_period_sec,
266
+ }
267
+ DBOS.send(dedup_wfid, message, _DEBOUNCER_TOPIC)
268
+ # Wait for the debouncer to acknowledge receipt of the message.
269
+ # If the message is not acknowledged, this likely means the debouncer started its workflow
270
+ # and exited without processing this message, so try again.
271
+ if not DBOS.get_event(dedup_wfid, message_id, timeout_seconds=1):
272
+ continue
273
+ # Retrieve the user workflow ID from the input to the debouncer
274
+ # and return a handle to it
275
+ dedup_workflow_input = (
276
+ DBOS.retrieve_workflow(dedup_wfid).get_status().input
277
+ )
278
+ assert dedup_workflow_input is not None
279
+ user_workflow_id = dedup_workflow_input["args"][1]["workflow_id"]
280
+ return WorkflowHandlePolling(user_workflow_id, dbos)
281
+
282
+ async def debounce_async(
283
+ self,
284
+ debounce_period_sec: float,
285
+ *args: P.args,
286
+ **kwargs: P.kwargs,
287
+ ) -> "WorkflowHandleAsync[R]":
288
+ from dbos._dbos import _get_dbos_instance
289
+
290
+ dbos = _get_dbos_instance()
291
+ handle = await asyncio.to_thread(
292
+ self.debounce, debounce_period_sec, *args, **kwargs
293
+ )
294
+ return WorkflowHandleAsyncPolling(handle.workflow_id, dbos)
295
+
296
+
297
+ class DebouncerClient:
298
+
299
+ def __init__(
300
+ self,
301
+ client: DBOSClient,
302
+ workflow_options: EnqueueOptions,
303
+ *,
304
+ debounce_key: str,
305
+ debounce_timeout_sec: Optional[float] = None,
306
+ queue: Optional[Queue] = None,
307
+ ):
308
+ self.workflow_options = workflow_options
309
+ self.debouncer_options: DebouncerOptions = {
310
+ "debounce_timeout_sec": debounce_timeout_sec,
311
+ "queue_name": queue.name if queue else None,
312
+ "workflow_name": workflow_options["workflow_name"],
313
+ }
314
+ self.debounce_key = debounce_key
315
+ self.client = client
316
+
317
+ def debounce(
318
+ self, debounce_period_sec: float, *args: Any, **kwargs: Any
319
+ ) -> "WorkflowHandle[R]":
320
+
321
+ ctxOptions: ContextOptions = {
322
+ "workflow_id": (
323
+ self.workflow_options["workflow_id"]
324
+ if self.workflow_options.get("workflow_id")
325
+ else str(uuid.uuid4())
326
+ ),
327
+ "app_version": self.workflow_options.get("app_version"),
328
+ "deduplication_id": self.workflow_options.get("deduplication_id"),
329
+ "priority": self.workflow_options.get("priority"),
330
+ "workflow_timeout_sec": self.workflow_options.get("workflow_timeout"),
331
+ }
332
+ message_id = str(uuid.uuid4())
333
+ while True:
334
+ try:
335
+ # Attempt to enqueue a debouncer for this workflow.
336
+ debouncer_options: EnqueueOptions = {
337
+ "workflow_name": DEBOUNCER_WORKFLOW_NAME,
338
+ "queue_name": INTERNAL_QUEUE_NAME,
339
+ "deduplication_id": self.debounce_key,
340
+ }
341
+ self.client.enqueue(
342
+ debouncer_options,
343
+ debounce_period_sec,
344
+ ctxOptions,
345
+ self.debouncer_options,
346
+ *args,
347
+ **kwargs,
348
+ )
349
+ return WorkflowHandleClientPolling[R](
350
+ ctxOptions["workflow_id"], self.client._sys_db
351
+ )
352
+ except DBOSQueueDeduplicatedError:
353
+ # If there is already a debouncer, send a message to it.
354
+ dedup_wfid = self.client._sys_db.get_deduplicated_workflow(
355
+ queue_name=INTERNAL_QUEUE_NAME,
356
+ deduplication_id=self.debounce_key,
357
+ )
358
+ if dedup_wfid is None:
359
+ continue
360
+ else:
361
+ workflow_inputs: WorkflowInputs = {"args": args, "kwargs": kwargs}
362
+ message: DebouncerMessage = {
363
+ "message_id": message_id,
364
+ "inputs": workflow_inputs,
365
+ "debounce_period_sec": debounce_period_sec,
366
+ }
367
+ self.client.send(dedup_wfid, message, _DEBOUNCER_TOPIC)
368
+ # Wait for the debouncer to acknowledge receipt of the message.
369
+ # If the message is not acknowledged, this likely means the debouncer started its workflow
370
+ # and exited without processing this message, so try again.
371
+ if not self.client.get_event(
372
+ dedup_wfid, message_id, timeout_seconds=1
373
+ ):
374
+ continue
375
+ # Retrieve the user workflow ID from the input to the debouncer
376
+ # and return a handle to it
377
+ dedup_workflow_input = (
378
+ self.client.retrieve_workflow(dedup_wfid).get_status().input
379
+ )
380
+ assert dedup_workflow_input is not None
381
+ user_workflow_id = dedup_workflow_input["args"][1]["workflow_id"]
382
+ return WorkflowHandleClientPolling[R](
383
+ user_workflow_id, self.client._sys_db
384
+ )
385
+
386
+ async def debounce_async(
387
+ self, debounce_period_sec: float, *args: Any, **kwargs: Any
388
+ ) -> "WorkflowHandleAsync[R]":
389
+ handle: "WorkflowHandle[R]" = await asyncio.to_thread(
390
+ self.debounce, debounce_period_sec, *args, **kwargs
391
+ )
392
+ return WorkflowHandleClientAsyncPolling[R](
393
+ handle.workflow_id, self.client._sys_db
394
+ )
dbos/_serialization.py CHANGED
@@ -12,8 +12,13 @@ class WorkflowInputs(TypedDict):
12
12
 
13
13
 
14
14
  def _validate_item(data: Any) -> None:
15
- if isinstance(data, (types.FunctionType, types.MethodType)):
16
- raise TypeError("Serialized data item should not be a function")
15
+ if isinstance(data, (types.MethodType)):
16
+ raise TypeError("Serialized data item should not be a class method")
17
+ if isinstance(data, (types.FunctionType)):
18
+ if jsonpickle.decode(jsonpickle.encode(data, unpicklable=True)) is None:
19
+ raise TypeError(
20
+ "Serialized function should be defined at the top level of a module"
21
+ )
17
22
 
18
23
 
19
24
  def serialize(data: Any) -> str:
dbos/_sys_db.py CHANGED
@@ -740,6 +740,33 @@ class SystemDatabase(ABC):
740
740
  }
741
741
  return status
742
742
 
743
+ @db_retry()
744
+ def get_deduplicated_workflow(
745
+ self, queue_name: str, deduplication_id: str
746
+ ) -> Optional[str]:
747
+ """
748
+ Get the workflow ID associated with a given queue name and deduplication ID.
749
+
750
+ Args:
751
+ queue_name: The name of the queue
752
+ deduplication_id: The deduplication ID
753
+
754
+ Returns:
755
+ The workflow UUID if found, None otherwise
756
+ """
757
+ with self.engine.begin() as c:
758
+ row = c.execute(
759
+ sa.select(SystemSchema.workflow_status.c.workflow_uuid).where(
760
+ SystemSchema.workflow_status.c.queue_name == queue_name,
761
+ SystemSchema.workflow_status.c.deduplication_id == deduplication_id,
762
+ )
763
+ ).fetchone()
764
+
765
+ if row is None:
766
+ return None
767
+ workflow_id: str = row[0]
768
+ return workflow_id
769
+
743
770
  @db_retry()
744
771
  def await_workflow_result(self, workflow_id: str) -> Any:
745
772
  while True:
@@ -1221,7 +1248,10 @@ class SystemDatabase(ABC):
1221
1248
  def check_child_workflow(
1222
1249
  self, workflow_uuid: str, function_id: int
1223
1250
  ) -> Optional[str]:
1224
- sql = sa.select(SystemSchema.operation_outputs.c.child_workflow_id).where(
1251
+ sql = sa.select(
1252
+ SystemSchema.operation_outputs.c.child_workflow_id,
1253
+ SystemSchema.operation_outputs.c.error,
1254
+ ).where(
1225
1255
  SystemSchema.operation_outputs.c.workflow_uuid == workflow_uuid,
1226
1256
  SystemSchema.operation_outputs.c.function_id == function_id,
1227
1257
  )
@@ -1233,7 +1263,10 @@ class SystemDatabase(ABC):
1233
1263
 
1234
1264
  if row is None:
1235
1265
  return None
1236
- return str(row[0])
1266
+ elif row[1]:
1267
+ raise _serialization.deserialize_exception(row[1])
1268
+ else:
1269
+ return str(row[0])
1237
1270
 
1238
1271
  @db_retry()
1239
1272
  def send(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 1.14.0a5
3
+ Version: 1.14.0a6
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,8 +1,8 @@
1
- dbos-1.14.0a5.dist-info/METADATA,sha256=TDRJ02P3spTH9e3UKquTU4V0fhr2Hoq7i_yWosvisGM,13268
2
- dbos-1.14.0a5.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
3
- dbos-1.14.0a5.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
- dbos-1.14.0a5.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
- dbos/__init__.py,sha256=NssPCubaBxdiKarOWa-wViz1hdJSkmBGcpLX_gQ4NeA,891
1
+ dbos-1.14.0a6.dist-info/METADATA,sha256=SkLS0HlCdwrO5LyiXP_ls-yAZNRlnUJkFWSUbS4qiqM,13268
2
+ dbos-1.14.0a6.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
3
+ dbos-1.14.0a6.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
+ dbos-1.14.0a6.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
+ dbos/__init__.py,sha256=pT4BuNLDCrIQX27vQG8NlfxX6PZRU7r9miq4thJTszU,982
6
6
  dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
7
7
  dbos/_admin_server.py,sha256=e8ELhcDWqR3_PNobnNgUvLGh5lzZq0yFSF6dvtzoQRI,16267
8
8
  dbos/_alembic_migrations/env.py,sha256=38SIGVbmn_VV2x2u1aHLcPOoWgZ84eCymf3g_NljmbU,1626
@@ -24,14 +24,15 @@ dbos/_alembic_migrations/versions/eab0cc1d9a14_job_queue.py,sha256=uvhFOtqbBreCe
24
24
  dbos/_alembic_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py,sha256=m90Lc5YH0ZISSq1MyxND6oq3RZrZKrIqEsZtwJ1jWxA,1049
25
25
  dbos/_app_db.py,sha256=GsV-uYU0QsChWwQDxnrh8_iiZ_zMQB-bsP2jPGIe2aM,16094
26
26
  dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
27
- dbos/_client.py,sha256=_3Wc2QQc5VDcBuJ3cNb-lWg439OuITo2ex4Y7qb9l44,18800
27
+ dbos/_client.py,sha256=NgLpGQAPN1ehn6vIto2ToIvFUtprTWdEAbixu9wFZMU,18887
28
28
  dbos/_conductor/conductor.py,sha256=3E_hL3c9g9yWqKZkvI6KA0-ZzPMPRo06TOzT1esMiek,24114
29
29
  dbos/_conductor/protocol.py,sha256=q3rgLxINFtWFigdOONc-4gX4vn66UmMlJQD6Kj8LnL4,7420
30
- dbos/_context.py,sha256=fzozSsccFVwjtrJVXOfYBA0_x1S21RvvCTyN-VM644g,27111
31
- dbos/_core.py,sha256=0F1rNYTYLh3kjZB_APh38fblSaKev-2tG8G-9D0rxms,49876
30
+ dbos/_context.py,sha256=DC0yC8feklTckClG0Nc-uSDeRuTu7emLP5NmcuJiogk,27542
31
+ dbos/_core.py,sha256=plF80l5Rh_bBpy5PFZy3p3ux6agmYkUgZq8e36i68F4,50443
32
32
  dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
33
- dbos/_dbos.py,sha256=ftbVR5wAHSnc_PXpecVC1ZylY6c8UyiZRpcT2kSO8NQ,58226
33
+ dbos/_dbos.py,sha256=AgkcE9YSC9KWsDUNfEhdbkfR9NjT0seZDAOunb3n61w,58201
34
34
  dbos/_dbos_config.py,sha256=_26ktif8qAZW4Ujg6dZfLkYO7dE4CI8b3IQbw_5YkpA,25710
35
+ dbos/_debouncer.py,sha256=zsfGoFCNs2DnCTSkv6pDgNspzOnqsoquSlHgmi2nbXo,15124
35
36
  dbos/_debug.py,sha256=99j2SChWmCPAlZoDmjsJGe77tpU2LEa8E2TtLAnnh7o,1831
36
37
  dbos/_docker_pg_helper.py,sha256=tLJXWqZ4S-ExcaPnxg_i6cVxL6ZxrYlZjaGsklY-s2I,6115
37
38
  dbos/_error.py,sha256=GwO0Ng4d4iB52brY09-Ss6Cz_V28Xc0D0cRCzZ6XmNM,8688
@@ -51,8 +52,8 @@ dbos/_scheduler.py,sha256=CWeGVfl9h51VXfxt80y5Da_5pE8SPty_AYkfpJkkMxQ,2117
51
52
  dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
52
53
  dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
53
54
  dbos/_schemas/system_database.py,sha256=-dAKk-_Y3vzbpLT4ei-sIrBQgFyQiwPj1enZb1TYc8I,4943
54
- dbos/_serialization.py,sha256=bWuwhXSQcGmiazvhJHA5gwhrRWxtmFmcCFQSDJnqqkU,3666
55
- dbos/_sys_db.py,sha256=DhpthOe2hbG7Cp8CxJd5tRUyqgKJ7HWbgEySpt6GTwY,81756
55
+ dbos/_serialization.py,sha256=VOMpwuJ6IskOUEegFDPqjIoV5PoBWfH9BgtnuI1jLok,3906
56
+ dbos/_sys_db.py,sha256=fvrO3F-tOh8zgfxTNbnwksWLVdXYZVQJceIp6ZJOOlw,82838
56
57
  dbos/_sys_db_postgres.py,sha256=WcG-f1CUzUNBGEOjqKEp6DDraN63jTnJ6CAfieCcxOs,7555
57
58
  dbos/_sys_db_sqlite.py,sha256=xT9l-czMhLmfuu5UcnBzAyUxSFgzt3XtEWx9t_D8mZs,7361
58
59
  dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
@@ -75,4 +76,4 @@ dbos/cli/migration.py,sha256=5GiyagLZkyVvDz3StYxtFdkFoKFCmh6eSXjzsIGhZ_A,3330
75
76
  dbos/dbos-config.schema.json,sha256=LyUT1DOTaAwOP6suxQGS5KemVIqXGPyu_q7Hbo0neA8,6192
76
77
  dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
77
78
  version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
78
- dbos-1.14.0a5.dist-info/RECORD,,
79
+ dbos-1.14.0a6.dist-info/RECORD,,