dbos 0.24.0a11__py3-none-any.whl → 0.24.0a12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbos/_core.py +156 -38
- dbos/_dbos.py +39 -18
- dbos/_queue.py +14 -3
- {dbos-0.24.0a11.dist-info → dbos-0.24.0a12.dist-info}/METADATA +1 -1
- {dbos-0.24.0a11.dist-info → dbos-0.24.0a12.dist-info}/RECORD +8 -8
- {dbos-0.24.0a11.dist-info → dbos-0.24.0a12.dist-info}/WHEEL +0 -0
- {dbos-0.24.0a11.dist-info → dbos-0.24.0a12.dist-info}/entry_points.txt +0 -0
- {dbos-0.24.0a11.dist-info → dbos-0.24.0a12.dist-info}/licenses/LICENSE +0 -0
dbos/_core.py
CHANGED
|
@@ -58,6 +58,7 @@ from ._error import (
|
|
|
58
58
|
)
|
|
59
59
|
from ._registrations import (
|
|
60
60
|
DEFAULT_MAX_RECOVERY_ATTEMPTS,
|
|
61
|
+
DBOSFuncInfo,
|
|
61
62
|
get_config_name,
|
|
62
63
|
get_dbos_class_name,
|
|
63
64
|
get_dbos_func_name,
|
|
@@ -82,6 +83,7 @@ if TYPE_CHECKING:
|
|
|
82
83
|
DBOS,
|
|
83
84
|
Workflow,
|
|
84
85
|
WorkflowHandle,
|
|
86
|
+
WorkflowHandleAsync,
|
|
85
87
|
WorkflowStatus,
|
|
86
88
|
DBOSRegistry,
|
|
87
89
|
IsolationLevel,
|
|
@@ -136,6 +138,48 @@ class WorkflowHandlePolling(Generic[R]):
|
|
|
136
138
|
return stat
|
|
137
139
|
|
|
138
140
|
|
|
141
|
+
class WorkflowHandleAsyncTask(Generic[R]):
|
|
142
|
+
|
|
143
|
+
def __init__(self, workflow_id: str, task: asyncio.Task[R], dbos: "DBOS"):
|
|
144
|
+
self.workflow_id = workflow_id
|
|
145
|
+
self.task = task
|
|
146
|
+
self.dbos = dbos
|
|
147
|
+
|
|
148
|
+
def get_workflow_id(self) -> str:
|
|
149
|
+
return self.workflow_id
|
|
150
|
+
|
|
151
|
+
async def get_result(self) -> R:
|
|
152
|
+
return await self.task
|
|
153
|
+
|
|
154
|
+
async def get_status(self) -> "WorkflowStatus":
|
|
155
|
+
stat = await asyncio.to_thread(self.dbos.get_workflow_status, self.workflow_id)
|
|
156
|
+
if stat is None:
|
|
157
|
+
raise DBOSNonExistentWorkflowError(self.workflow_id)
|
|
158
|
+
return stat
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
class WorkflowHandleAsyncPolling(Generic[R]):
|
|
162
|
+
|
|
163
|
+
def __init__(self, workflow_id: str, dbos: "DBOS"):
|
|
164
|
+
self.workflow_id = workflow_id
|
|
165
|
+
self.dbos = dbos
|
|
166
|
+
|
|
167
|
+
def get_workflow_id(self) -> str:
|
|
168
|
+
return self.workflow_id
|
|
169
|
+
|
|
170
|
+
async def get_result(self) -> R:
|
|
171
|
+
res: R = await asyncio.to_thread(
|
|
172
|
+
self.dbos._sys_db.await_workflow_result, self.workflow_id
|
|
173
|
+
)
|
|
174
|
+
return res
|
|
175
|
+
|
|
176
|
+
async def get_status(self) -> "WorkflowStatus":
|
|
177
|
+
stat = await asyncio.to_thread(self.dbos.get_workflow_status, self.workflow_id)
|
|
178
|
+
if stat is None:
|
|
179
|
+
raise DBOSNonExistentWorkflowError(self.workflow_id)
|
|
180
|
+
return stat
|
|
181
|
+
|
|
182
|
+
|
|
139
183
|
def _init_workflow(
|
|
140
184
|
dbos: "DBOS",
|
|
141
185
|
ctx: DBOSContext,
|
|
@@ -285,6 +329,32 @@ def _execute_workflow_wthread(
|
|
|
285
329
|
raise
|
|
286
330
|
|
|
287
331
|
|
|
332
|
+
async def _execute_workflow_async(
|
|
333
|
+
dbos: "DBOS",
|
|
334
|
+
status: WorkflowStatusInternal,
|
|
335
|
+
func: "Workflow[P, Coroutine[Any, Any, R]]",
|
|
336
|
+
ctx: DBOSContext,
|
|
337
|
+
*args: Any,
|
|
338
|
+
**kwargs: Any,
|
|
339
|
+
) -> R:
|
|
340
|
+
attributes: TracedAttributes = {
|
|
341
|
+
"name": func.__name__,
|
|
342
|
+
"operationType": OperationType.WORKFLOW.value,
|
|
343
|
+
}
|
|
344
|
+
with DBOSContextSwap(ctx):
|
|
345
|
+
with EnterDBOSWorkflow(attributes):
|
|
346
|
+
try:
|
|
347
|
+
result = Pending[R](functools.partial(func, *args, **kwargs)).then(
|
|
348
|
+
_get_wf_invoke_func(dbos, status)
|
|
349
|
+
)
|
|
350
|
+
return await result()
|
|
351
|
+
except Exception:
|
|
352
|
+
dbos.logger.error(
|
|
353
|
+
f"Exception encountered in asynchronous workflow: {traceback.format_exc()}"
|
|
354
|
+
)
|
|
355
|
+
raise
|
|
356
|
+
|
|
357
|
+
|
|
288
358
|
def execute_workflow_by_id(
|
|
289
359
|
dbos: "DBOS", workflow_id: str, startNew: bool = False
|
|
290
360
|
) -> "WorkflowHandle[Any]":
|
|
@@ -349,26 +419,29 @@ def execute_workflow_by_id(
|
|
|
349
419
|
)
|
|
350
420
|
|
|
351
421
|
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
422
|
+
def _get_new_wf() -> tuple[str, DBOSContext]:
|
|
423
|
+
# Sequence of events for starting a workflow:
|
|
424
|
+
# First - is there a WF already running?
|
|
425
|
+
# (and not in step as that is an error)
|
|
426
|
+
# Assign an ID to the workflow, if it doesn't have an app-assigned one
|
|
427
|
+
# If this is a root workflow, assign a new ID
|
|
428
|
+
# If this is a child workflow, assign parent wf id with call# suffix
|
|
429
|
+
# Make a (system) DB record for the workflow
|
|
430
|
+
# Pass the new context to a worker thread that will run the wf function
|
|
431
|
+
cur_ctx = get_local_dbos_context()
|
|
432
|
+
if cur_ctx is not None and cur_ctx.is_within_workflow():
|
|
433
|
+
assert cur_ctx.is_workflow() # Not in a step
|
|
434
|
+
cur_ctx.function_id += 1
|
|
435
|
+
if len(cur_ctx.id_assigned_for_next_workflow) == 0:
|
|
436
|
+
cur_ctx.id_assigned_for_next_workflow = (
|
|
437
|
+
cur_ctx.workflow_id + "-" + str(cur_ctx.function_id)
|
|
438
|
+
)
|
|
361
439
|
|
|
440
|
+
new_wf_ctx = DBOSContext() if cur_ctx is None else cur_ctx.create_child()
|
|
441
|
+
new_wf_ctx.id_assigned_for_next_workflow = new_wf_ctx.assign_workflow_id()
|
|
442
|
+
new_wf_id = new_wf_ctx.id_assigned_for_next_workflow
|
|
362
443
|
|
|
363
|
-
|
|
364
|
-
def start_workflow(
|
|
365
|
-
dbos: "DBOS",
|
|
366
|
-
func: "Workflow[P, R]",
|
|
367
|
-
queue_name: Optional[str],
|
|
368
|
-
execute_workflow: bool,
|
|
369
|
-
*args: P.args,
|
|
370
|
-
**kwargs: P.kwargs,
|
|
371
|
-
) -> "WorkflowHandle[R]": ...
|
|
444
|
+
return (new_wf_id, new_wf_ctx)
|
|
372
445
|
|
|
373
446
|
|
|
374
447
|
def start_workflow(
|
|
@@ -379,6 +452,7 @@ def start_workflow(
|
|
|
379
452
|
*args: P.args,
|
|
380
453
|
**kwargs: P.kwargs,
|
|
381
454
|
) -> "WorkflowHandle[R]":
|
|
455
|
+
|
|
382
456
|
# If the function has a class, add the class object as its first argument
|
|
383
457
|
fself: Optional[object] = None
|
|
384
458
|
if hasattr(func, "__self__"):
|
|
@@ -399,26 +473,7 @@ def start_workflow(
|
|
|
399
473
|
"kwargs": kwargs,
|
|
400
474
|
}
|
|
401
475
|
|
|
402
|
-
|
|
403
|
-
# First - is there a WF already running?
|
|
404
|
-
# (and not in step as that is an error)
|
|
405
|
-
# Assign an ID to the workflow, if it doesn't have an app-assigned one
|
|
406
|
-
# If this is a root workflow, assign a new ID
|
|
407
|
-
# If this is a child workflow, assign parent wf id with call# suffix
|
|
408
|
-
# Make a (system) DB record for the workflow
|
|
409
|
-
# Pass the new context to a worker thread that will run the wf function
|
|
410
|
-
cur_ctx = get_local_dbos_context()
|
|
411
|
-
if cur_ctx is not None and cur_ctx.is_within_workflow():
|
|
412
|
-
assert cur_ctx.is_workflow() # Not in a step
|
|
413
|
-
cur_ctx.function_id += 1
|
|
414
|
-
if len(cur_ctx.id_assigned_for_next_workflow) == 0:
|
|
415
|
-
cur_ctx.id_assigned_for_next_workflow = (
|
|
416
|
-
cur_ctx.workflow_id + "-" + str(cur_ctx.function_id)
|
|
417
|
-
)
|
|
418
|
-
|
|
419
|
-
new_wf_ctx = DBOSContext() if cur_ctx is None else cur_ctx.create_child()
|
|
420
|
-
new_wf_ctx.id_assigned_for_next_workflow = new_wf_ctx.assign_workflow_id()
|
|
421
|
-
new_wf_id = new_wf_ctx.id_assigned_for_next_workflow
|
|
476
|
+
new_wf_id, new_wf_ctx = _get_new_wf()
|
|
422
477
|
|
|
423
478
|
status = _init_workflow(
|
|
424
479
|
dbos,
|
|
@@ -458,6 +513,69 @@ def start_workflow(
|
|
|
458
513
|
return WorkflowHandleFuture(new_wf_id, future, dbos)
|
|
459
514
|
|
|
460
515
|
|
|
516
|
+
async def start_workflow_async(
|
|
517
|
+
dbos: "DBOS",
|
|
518
|
+
func: "Workflow[P, Coroutine[Any, Any, R]]",
|
|
519
|
+
queue_name: Optional[str],
|
|
520
|
+
execute_workflow: bool,
|
|
521
|
+
*args: P.args,
|
|
522
|
+
**kwargs: P.kwargs,
|
|
523
|
+
) -> "WorkflowHandleAsync[R]":
|
|
524
|
+
|
|
525
|
+
# If the function has a class, add the class object as its first argument
|
|
526
|
+
fself: Optional[object] = None
|
|
527
|
+
if hasattr(func, "__self__"):
|
|
528
|
+
fself = func.__self__
|
|
529
|
+
if fself is not None:
|
|
530
|
+
args = (fself,) + args # type: ignore
|
|
531
|
+
|
|
532
|
+
fi = get_func_info(func)
|
|
533
|
+
if fi is None:
|
|
534
|
+
raise DBOSWorkflowFunctionNotFoundError(
|
|
535
|
+
"<NONE>", f"start_workflow: function {func.__name__} is not registered"
|
|
536
|
+
)
|
|
537
|
+
|
|
538
|
+
func = cast("Workflow[P, R]", func.__orig_func) # type: ignore
|
|
539
|
+
|
|
540
|
+
inputs: WorkflowInputs = {
|
|
541
|
+
"args": args,
|
|
542
|
+
"kwargs": kwargs,
|
|
543
|
+
}
|
|
544
|
+
|
|
545
|
+
new_wf_id, new_wf_ctx = _get_new_wf()
|
|
546
|
+
|
|
547
|
+
status = await asyncio.to_thread(
|
|
548
|
+
_init_workflow,
|
|
549
|
+
dbos,
|
|
550
|
+
new_wf_ctx,
|
|
551
|
+
inputs=inputs,
|
|
552
|
+
wf_name=get_dbos_func_name(func),
|
|
553
|
+
class_name=get_dbos_class_name(fi, func, args),
|
|
554
|
+
config_name=get_config_name(fi, func, args),
|
|
555
|
+
temp_wf_type=get_temp_workflow_type(func),
|
|
556
|
+
queue=queue_name,
|
|
557
|
+
max_recovery_attempts=fi.max_recovery_attempts,
|
|
558
|
+
)
|
|
559
|
+
|
|
560
|
+
wf_status = status["status"]
|
|
561
|
+
|
|
562
|
+
if not execute_workflow or (
|
|
563
|
+
not dbos.debug_mode
|
|
564
|
+
and (
|
|
565
|
+
wf_status == WorkflowStatusString.ERROR.value
|
|
566
|
+
or wf_status == WorkflowStatusString.SUCCESS.value
|
|
567
|
+
)
|
|
568
|
+
):
|
|
569
|
+
dbos.logger.debug(
|
|
570
|
+
f"Workflow {new_wf_id} already completed with status {wf_status}. Directly returning a workflow handle."
|
|
571
|
+
)
|
|
572
|
+
return WorkflowHandleAsyncPolling(new_wf_id, dbos)
|
|
573
|
+
|
|
574
|
+
coro = _execute_workflow_async(dbos, status, func, new_wf_ctx, *args, **kwargs)
|
|
575
|
+
task = asyncio.create_task(coro)
|
|
576
|
+
return WorkflowHandleAsyncTask(new_wf_id, task, dbos)
|
|
577
|
+
|
|
578
|
+
|
|
461
579
|
if sys.version_info < (3, 12):
|
|
462
580
|
|
|
463
581
|
def _mark_coroutine(func: Callable[P, R]) -> Callable[P, R]:
|
dbos/_dbos.py
CHANGED
|
@@ -49,6 +49,7 @@ from ._core import (
|
|
|
49
49
|
send,
|
|
50
50
|
set_event,
|
|
51
51
|
start_workflow,
|
|
52
|
+
start_workflow_async,
|
|
52
53
|
workflow_wrapper,
|
|
53
54
|
)
|
|
54
55
|
from ._queue import Queue, queue_thread
|
|
@@ -702,35 +703,26 @@ class DBOS:
|
|
|
702
703
|
f"{e.name} dependency not found. Please install {e.name} via your package manager."
|
|
703
704
|
) from e
|
|
704
705
|
|
|
705
|
-
@overload
|
|
706
|
-
@classmethod
|
|
707
|
-
def start_workflow(
|
|
708
|
-
cls,
|
|
709
|
-
func: Workflow[P, Coroutine[Any, Any, R]],
|
|
710
|
-
*args: P.args,
|
|
711
|
-
**kwargs: P.kwargs,
|
|
712
|
-
) -> WorkflowHandle[R]: ...
|
|
713
|
-
|
|
714
|
-
@overload
|
|
715
706
|
@classmethod
|
|
716
707
|
def start_workflow(
|
|
717
708
|
cls,
|
|
718
709
|
func: Workflow[P, R],
|
|
719
710
|
*args: P.args,
|
|
720
711
|
**kwargs: P.kwargs,
|
|
721
|
-
) -> WorkflowHandle[R]:
|
|
712
|
+
) -> WorkflowHandle[R]:
|
|
713
|
+
"""Invoke a workflow function in the background, returning a handle to the ongoing execution."""
|
|
714
|
+
return start_workflow(_get_dbos_instance(), func, None, True, *args, **kwargs)
|
|
722
715
|
|
|
723
716
|
@classmethod
|
|
724
|
-
def
|
|
717
|
+
async def start_workflow_async(
|
|
725
718
|
cls,
|
|
726
|
-
func: Workflow[P,
|
|
719
|
+
func: Workflow[P, Coroutine[Any, Any, R]],
|
|
727
720
|
*args: P.args,
|
|
728
721
|
**kwargs: P.kwargs,
|
|
729
|
-
) ->
|
|
730
|
-
"""Invoke a workflow function
|
|
731
|
-
return
|
|
732
|
-
|
|
733
|
-
start_workflow(_get_dbos_instance(), func, None, True, *args, **kwargs),
|
|
722
|
+
) -> WorkflowHandleAsync[R]:
|
|
723
|
+
"""Invoke a workflow function on the event loop, returning a handle to the ongoing execution."""
|
|
724
|
+
return await start_workflow_async(
|
|
725
|
+
_get_dbos_instance(), func, None, True, *args, **kwargs
|
|
734
726
|
)
|
|
735
727
|
|
|
736
728
|
@classmethod
|
|
@@ -1107,6 +1099,35 @@ class WorkflowHandle(Generic[R], Protocol):
|
|
|
1107
1099
|
...
|
|
1108
1100
|
|
|
1109
1101
|
|
|
1102
|
+
class WorkflowHandleAsync(Generic[R], Protocol):
|
|
1103
|
+
"""
|
|
1104
|
+
Handle to a workflow function.
|
|
1105
|
+
|
|
1106
|
+
`WorkflowHandleAsync` represents a current or previous workflow function invocation,
|
|
1107
|
+
allowing its status and result to be accessed.
|
|
1108
|
+
|
|
1109
|
+
Attributes:
|
|
1110
|
+
workflow_id(str): Workflow ID of the function invocation
|
|
1111
|
+
|
|
1112
|
+
"""
|
|
1113
|
+
|
|
1114
|
+
def __init__(self, workflow_id: str) -> None: ...
|
|
1115
|
+
|
|
1116
|
+
workflow_id: str
|
|
1117
|
+
|
|
1118
|
+
def get_workflow_id(self) -> str:
|
|
1119
|
+
"""Return the applicable workflow ID."""
|
|
1120
|
+
...
|
|
1121
|
+
|
|
1122
|
+
async def get_result(self) -> R:
|
|
1123
|
+
"""Return the result of the workflow function invocation, waiting if necessary."""
|
|
1124
|
+
...
|
|
1125
|
+
|
|
1126
|
+
async def get_status(self) -> WorkflowStatus:
|
|
1127
|
+
"""Return the current workflow function invocation status as `WorkflowStatus`."""
|
|
1128
|
+
...
|
|
1129
|
+
|
|
1130
|
+
|
|
1110
1131
|
class DBOSConfiguredInstance:
|
|
1111
1132
|
"""
|
|
1112
1133
|
Base class for classes containing DBOS member functions.
|
dbos/_queue.py
CHANGED
|
@@ -1,16 +1,16 @@
|
|
|
1
1
|
import threading
|
|
2
2
|
import traceback
|
|
3
|
-
from typing import TYPE_CHECKING, Optional, TypedDict
|
|
3
|
+
from typing import TYPE_CHECKING, Any, Coroutine, Optional, TypedDict
|
|
4
4
|
|
|
5
5
|
from psycopg import errors
|
|
6
6
|
from sqlalchemy.exc import OperationalError
|
|
7
7
|
|
|
8
8
|
from dbos._utils import GlobalParams
|
|
9
9
|
|
|
10
|
-
from ._core import P, R, execute_workflow_by_id, start_workflow
|
|
10
|
+
from ._core import P, R, execute_workflow_by_id, start_workflow, start_workflow_async
|
|
11
11
|
|
|
12
12
|
if TYPE_CHECKING:
|
|
13
|
-
from ._dbos import DBOS, Workflow, WorkflowHandle
|
|
13
|
+
from ._dbos import DBOS, Workflow, WorkflowHandle, WorkflowHandleAsync
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
class QueueRateLimit(TypedDict):
|
|
@@ -66,6 +66,17 @@ class Queue:
|
|
|
66
66
|
dbos = _get_dbos_instance()
|
|
67
67
|
return start_workflow(dbos, func, self.name, False, *args, **kwargs)
|
|
68
68
|
|
|
69
|
+
async def enqueue_async(
|
|
70
|
+
self,
|
|
71
|
+
func: "Workflow[P, Coroutine[Any, Any, R]]",
|
|
72
|
+
*args: P.args,
|
|
73
|
+
**kwargs: P.kwargs,
|
|
74
|
+
) -> "WorkflowHandleAsync[R]":
|
|
75
|
+
from ._dbos import _get_dbos_instance
|
|
76
|
+
|
|
77
|
+
dbos = _get_dbos_instance()
|
|
78
|
+
return await start_workflow_async(dbos, func, self.name, False, *args, **kwargs)
|
|
79
|
+
|
|
69
80
|
|
|
70
81
|
def queue_thread(stop_event: threading.Event, dbos: "DBOS") -> None:
|
|
71
82
|
while not stop_event.is_set():
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
dbos-0.24.
|
|
2
|
-
dbos-0.24.
|
|
3
|
-
dbos-0.24.
|
|
4
|
-
dbos-0.24.
|
|
1
|
+
dbos-0.24.0a12.dist-info/METADATA,sha256=bbX82S62nJ9ZMnyppiRzrgMB_NuRFhigHkLeTK7aa_I,5522
|
|
2
|
+
dbos-0.24.0a12.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
|
|
3
|
+
dbos-0.24.0a12.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
|
4
|
+
dbos-0.24.0a12.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
|
5
5
|
dbos/__init__.py,sha256=uq9LP5uY96kIS9N0yKqlvDwADmtg_Hl30uSUhyuUr-4,754
|
|
6
6
|
dbos/__main__.py,sha256=P7jAr-7L9XE5mrsQ7i4b-bLr2ap1tCQfhMByLCRWDj0,568
|
|
7
7
|
dbos/_admin_server.py,sha256=YiVn5lywz2Vg8_juyNHOYl0HVEy48--7b4phwK7r92o,5732
|
|
@@ -13,10 +13,10 @@ dbos/_cloudutils/databases.py,sha256=_shqaqSvhY4n2ScgQ8IP5PDZvzvcx3YBKV8fj-cxhSY
|
|
|
13
13
|
dbos/_conductor/conductor.py,sha256=5mFrE6yEAHWcFjkYr5p2AoPkV6iwYHxsB3K06yaOmqM,15112
|
|
14
14
|
dbos/_conductor/protocol.py,sha256=W-BnX-bD5folo96NGrUytuCHW72H-OQwPrkJ9q5Au5c,5534
|
|
15
15
|
dbos/_context.py,sha256=Ue5qu3rzLfRmPkz-UUZi9ZS8iXpapRN0NTM4mbA2QmQ,17738
|
|
16
|
-
dbos/_core.py,sha256=
|
|
16
|
+
dbos/_core.py,sha256=_a_rSkAWNLoHqzQbkqez0mpctkjDs301123ti3wmKHk,41340
|
|
17
17
|
dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
|
|
18
18
|
dbos/_db_wizard.py,sha256=YEW2qoy6hfHQv2fZ_4nHiPUeHMFofPpNTolJ1Kvw7AQ,8394
|
|
19
|
-
dbos/_dbos.py,sha256=
|
|
19
|
+
dbos/_dbos.py,sha256=ymQnOZ8RQehcPVAjjJipoW8StxM7bktTyT_4a_Zlse8,43599
|
|
20
20
|
dbos/_dbos_config.py,sha256=-DnOQ5gk3I8RPxNkL3GsAkaYvL8jv5umS8Xv5V7UQkw,20217
|
|
21
21
|
dbos/_debug.py,sha256=mmgvLkqlrljMBBow9wk01PPur9kUf2rI_11dTJXY4gw,1822
|
|
22
22
|
dbos/_error.py,sha256=B6Y9XLS1f6yrawxB2uAEYFMxFwk9BHhdxPNddKco-Fw,5399
|
|
@@ -35,7 +35,7 @@ dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py,sha256=8PyFi8rd6CN-m
|
|
|
35
35
|
dbos/_migrations/versions/d76646551a6c_workflow_queue.py,sha256=G942nophZ2uC2vc4hGBC02Ptng1715roTjY3xiyzZU4,729
|
|
36
36
|
dbos/_migrations/versions/eab0cc1d9a14_job_queue.py,sha256=uvhFOtqbBreCePhAxZfIT0qCAI7BiZTou9wt6QnbY7c,1412
|
|
37
37
|
dbos/_outcome.py,sha256=FDMgWVjZ06vm9xO-38H17mTqBImUYQxgKs_bDCSIAhE,6648
|
|
38
|
-
dbos/_queue.py,sha256=
|
|
38
|
+
dbos/_queue.py,sha256=OWUtbBAqdkDAArFWkwlF8STxykV4iQmrZxrF-_lavh4,3341
|
|
39
39
|
dbos/_recovery.py,sha256=4KyZb0XJEUGH7ekYT1kpx38i6y5vygPeH75Ta7RZjYo,2596
|
|
40
40
|
dbos/_registrations.py,sha256=_zy6k944Ll8QwqU12Kr3OP23ukVtm8axPNN1TS_kJRc,6717
|
|
41
41
|
dbos/_request.py,sha256=cX1B3Atlh160phgS35gF1VEEV4pD126c9F3BDgBmxZU,929
|
|
@@ -65,4 +65,4 @@ dbos/cli/cli.py,sha256=pet2vf4GLlSDfxfQbsplM9uewD6pJK2ZpLgZlwgBU5w,15627
|
|
|
65
65
|
dbos/dbos-config.schema.json,sha256=HtF_njVTGHLdzBGZ4OrGQz3qbPPT0Go-iwd1PgFVTNg,5847
|
|
66
66
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
|
67
67
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
|
68
|
-
dbos-0.24.
|
|
68
|
+
dbos-0.24.0a12.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|