dbos 0.26.0a3__py3-none-any.whl → 0.26.0a5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbos/_context.py +4 -2
- dbos/_core.py +17 -25
- dbos/_dbos.py +21 -8
- dbos/_kafka.py +1 -1
- dbos/_queue.py +2 -1
- dbos/_recovery.py +1 -1
- dbos/_scheduler.py +1 -1
- dbos/_sys_db.py +58 -7
- dbos/_utils.py +2 -0
- {dbos-0.26.0a3.dist-info → dbos-0.26.0a5.dist-info}/METADATA +1 -1
- {dbos-0.26.0a3.dist-info → dbos-0.26.0a5.dist-info}/RECORD +14 -14
- {dbos-0.26.0a3.dist-info → dbos-0.26.0a5.dist-info}/WHEEL +0 -0
- {dbos-0.26.0a3.dist-info → dbos-0.26.0a5.dist-info}/entry_points.txt +0 -0
- {dbos-0.26.0a3.dist-info → dbos-0.26.0a5.dist-info}/licenses/LICENSE +0 -0
dbos/_context.py
CHANGED
@@ -195,8 +195,10 @@ class DBOSContext:
|
|
195
195
|
def end_handler(self, exc_value: Optional[BaseException]) -> None:
|
196
196
|
self._end_span(exc_value)
|
197
197
|
|
198
|
-
def get_current_span(self) -> Span:
|
199
|
-
|
198
|
+
def get_current_span(self) -> Optional[Span]:
|
199
|
+
if len(self.spans):
|
200
|
+
return self.spans[-1]
|
201
|
+
return None
|
200
202
|
|
201
203
|
def _start_span(self, attributes: TracedAttributes) -> None:
|
202
204
|
attributes["operationUUID"] = (
|
dbos/_core.py
CHANGED
@@ -398,9 +398,7 @@ async def _execute_workflow_async(
|
|
398
398
|
raise
|
399
399
|
|
400
400
|
|
401
|
-
def execute_workflow_by_id(
|
402
|
-
dbos: "DBOS", workflow_id: str, startNew: bool = False
|
403
|
-
) -> "WorkflowHandle[Any]":
|
401
|
+
def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[Any]":
|
404
402
|
status = dbos._sys_db.get_workflow_status(workflow_id)
|
405
403
|
if not status:
|
406
404
|
raise DBOSRecoveryError(workflow_id, "Workflow status not found")
|
@@ -441,7 +439,7 @@ def execute_workflow_by_id(
|
|
441
439
|
class_object = dbos._registry.class_info_map[class_name]
|
442
440
|
inputs["args"] = (class_object,) + inputs["args"]
|
443
441
|
|
444
|
-
|
442
|
+
with SetWorkflowID(workflow_id):
|
445
443
|
return start_workflow(
|
446
444
|
dbos,
|
447
445
|
wf_func,
|
@@ -450,16 +448,6 @@ def execute_workflow_by_id(
|
|
450
448
|
*inputs["args"],
|
451
449
|
**inputs["kwargs"],
|
452
450
|
)
|
453
|
-
else:
|
454
|
-
with SetWorkflowID(workflow_id):
|
455
|
-
return start_workflow(
|
456
|
-
dbos,
|
457
|
-
wf_func,
|
458
|
-
status["queue_name"],
|
459
|
-
True,
|
460
|
-
*inputs["args"],
|
461
|
-
**inputs["kwargs"],
|
462
|
-
)
|
463
451
|
|
464
452
|
|
465
453
|
def _get_new_wf() -> tuple[str, DBOSContext]:
|
@@ -892,10 +880,12 @@ def decorate_transaction(
|
|
892
880
|
except DBAPIError as dbapi_error:
|
893
881
|
if dbapi_error.orig.sqlstate == "40001": # type: ignore
|
894
882
|
# Retry on serialization failure
|
895
|
-
ctx.get_current_span()
|
896
|
-
|
897
|
-
|
898
|
-
|
883
|
+
span = ctx.get_current_span()
|
884
|
+
if span:
|
885
|
+
span.add_event(
|
886
|
+
"Transaction Serialization Failure",
|
887
|
+
{"retry_wait_seconds": retry_wait_seconds},
|
888
|
+
)
|
899
889
|
time.sleep(retry_wait_seconds)
|
900
890
|
retry_wait_seconds = min(
|
901
891
|
retry_wait_seconds * backoff_factor,
|
@@ -1004,13 +994,15 @@ def decorate_step(
|
|
1004
994
|
f"Step being automatically retried. (attempt {attempt + 1} of {attempts}). {traceback.format_exc()}"
|
1005
995
|
)
|
1006
996
|
ctx = assert_current_dbos_context()
|
1007
|
-
ctx.get_current_span()
|
1008
|
-
|
1009
|
-
|
1010
|
-
"
|
1011
|
-
|
1012
|
-
|
1013
|
-
|
997
|
+
span = ctx.get_current_span()
|
998
|
+
if span:
|
999
|
+
span.add_event(
|
1000
|
+
f"Step attempt {attempt} failed",
|
1001
|
+
{
|
1002
|
+
"error": str(error),
|
1003
|
+
"retryIntervalSeconds": interval_seconds,
|
1004
|
+
},
|
1005
|
+
)
|
1014
1006
|
return min(
|
1015
1007
|
interval_seconds * (backoff_rate**attempt),
|
1016
1008
|
max_retry_interval_seconds,
|
dbos/_dbos.py
CHANGED
@@ -33,7 +33,7 @@ from opentelemetry.trace import Span
|
|
33
33
|
|
34
34
|
from dbos import _serialization
|
35
35
|
from dbos._conductor.conductor import ConductorWebsocket
|
36
|
-
from dbos._utils import GlobalParams
|
36
|
+
from dbos._utils import INTERNAL_QUEUE_NAME, GlobalParams
|
37
37
|
from dbos._workflow_commands import (
|
38
38
|
WorkflowStatus,
|
39
39
|
list_queued_workflows,
|
@@ -234,6 +234,13 @@ class DBOSRegistry:
|
|
234
234
|
hasher.update(source.encode("utf-8"))
|
235
235
|
return hasher.hexdigest()
|
236
236
|
|
237
|
+
def get_internal_queue(self) -> Queue:
|
238
|
+
"""
|
239
|
+
Get or create the internal queue used for the DBOS scheduler, for Kafka, and for
|
240
|
+
programmatic resuming and restarting of workflows.
|
241
|
+
"""
|
242
|
+
return Queue(INTERNAL_QUEUE_NAME)
|
243
|
+
|
237
244
|
|
238
245
|
class DBOS:
|
239
246
|
"""
|
@@ -489,6 +496,9 @@ class DBOS:
|
|
489
496
|
notification_listener_thread.start()
|
490
497
|
self._background_threads.append(notification_listener_thread)
|
491
498
|
|
499
|
+
# Create the internal queue if it has not yet been created
|
500
|
+
self._registry.get_internal_queue()
|
501
|
+
|
492
502
|
# Start the queue thread
|
493
503
|
evt = threading.Event()
|
494
504
|
self.stop_events.append(evt)
|
@@ -929,11 +939,6 @@ class DBOS:
|
|
929
939
|
"""Execute a workflow by ID (for recovery)."""
|
930
940
|
return execute_workflow_by_id(_get_dbos_instance(), workflow_id)
|
931
941
|
|
932
|
-
@classmethod
|
933
|
-
def restart_workflow(cls, workflow_id: str) -> None:
|
934
|
-
"""Execute a workflow by ID (for recovery)."""
|
935
|
-
execute_workflow_by_id(_get_dbos_instance(), workflow_id, True)
|
936
|
-
|
937
942
|
@classmethod
|
938
943
|
def recover_pending_workflows(
|
939
944
|
cls, executor_ids: List[str] = ["local"]
|
@@ -954,7 +959,13 @@ class DBOS:
|
|
954
959
|
dbos_logger.info(f"Resuming workflow: {workflow_id}")
|
955
960
|
_get_dbos_instance()._sys_db.resume_workflow(workflow_id)
|
956
961
|
_get_or_create_dbos_registry().clear_workflow_cancelled(workflow_id)
|
957
|
-
return
|
962
|
+
return cls.retrieve_workflow(workflow_id)
|
963
|
+
|
964
|
+
@classmethod
|
965
|
+
def restart_workflow(cls, workflow_id: str) -> WorkflowHandle[Any]:
|
966
|
+
"""Restart a workflow with a new workflow ID"""
|
967
|
+
forked_workflow_id = _get_dbos_instance()._sys_db.fork_workflow(workflow_id)
|
968
|
+
return cls.retrieve_workflow(forked_workflow_id)
|
958
969
|
|
959
970
|
@classmethod
|
960
971
|
def list_workflows(
|
@@ -1083,7 +1094,9 @@ class DBOS:
|
|
1083
1094
|
def span(cls) -> Span:
|
1084
1095
|
"""Return the tracing `Span` associated with the current context."""
|
1085
1096
|
ctx = assert_current_dbos_context()
|
1086
|
-
|
1097
|
+
span = ctx.get_current_span()
|
1098
|
+
assert span
|
1099
|
+
return span
|
1087
1100
|
|
1088
1101
|
@classproperty
|
1089
1102
|
def request(cls) -> Optional["Request"]:
|
dbos/_kafka.py
CHANGED
@@ -115,7 +115,7 @@ def kafka_consumer(
|
|
115
115
|
_in_order_kafka_queues[topic] = queue
|
116
116
|
else:
|
117
117
|
global _kafka_queue
|
118
|
-
_kafka_queue =
|
118
|
+
_kafka_queue = dbosreg.get_internal_queue()
|
119
119
|
stop_event = threading.Event()
|
120
120
|
dbosreg.register_poller(
|
121
121
|
stop_event, _kafka_consumer_loop, func, config, topics, stop_event, in_order
|
dbos/_queue.py
CHANGED
@@ -82,7 +82,8 @@ def queue_thread(stop_event: threading.Event, dbos: "DBOS") -> None:
|
|
82
82
|
while not stop_event.is_set():
|
83
83
|
if stop_event.wait(timeout=1):
|
84
84
|
return
|
85
|
-
|
85
|
+
queues = dict(dbos._registry.queue_info_map)
|
86
|
+
for _, queue in queues.items():
|
86
87
|
try:
|
87
88
|
wf_ids = dbos._sys_db.start_queued_workflows(
|
88
89
|
queue, GlobalParams.executor_id, GlobalParams.app_version
|
dbos/_recovery.py
CHANGED
@@ -17,7 +17,7 @@ if TYPE_CHECKING:
|
|
17
17
|
def _recover_workflow(
|
18
18
|
dbos: "DBOS", workflow: GetPendingWorkflowsOutput
|
19
19
|
) -> "WorkflowHandle[Any]":
|
20
|
-
if workflow.queue_name
|
20
|
+
if workflow.queue_name:
|
21
21
|
cleared = dbos._sys_db.clear_queue_assignment(workflow.workflow_uuid)
|
22
22
|
if cleared:
|
23
23
|
return dbos.retrieve_workflow(workflow.workflow_uuid)
|
dbos/_scheduler.py
CHANGED
@@ -52,7 +52,7 @@ def scheduled(
|
|
52
52
|
)
|
53
53
|
|
54
54
|
global scheduler_queue
|
55
|
-
scheduler_queue =
|
55
|
+
scheduler_queue = dbosreg.get_internal_queue()
|
56
56
|
stop_event = threading.Event()
|
57
57
|
dbosreg.register_poller(stop_event, scheduler_loop, func, cron, stop_event)
|
58
58
|
return func
|
dbos/_sys_db.py
CHANGED
@@ -4,6 +4,7 @@ import os
|
|
4
4
|
import re
|
5
5
|
import threading
|
6
6
|
import time
|
7
|
+
import uuid
|
7
8
|
from enum import Enum
|
8
9
|
from typing import (
|
9
10
|
TYPE_CHECKING,
|
@@ -25,7 +26,7 @@ from alembic.config import Config
|
|
25
26
|
from sqlalchemy.exc import DBAPIError
|
26
27
|
from sqlalchemy.sql import func
|
27
28
|
|
28
|
-
from dbos._utils import GlobalParams
|
29
|
+
from dbos._utils import INTERNAL_QUEUE_NAME, GlobalParams
|
29
30
|
|
30
31
|
from . import _serialization
|
31
32
|
from ._context import get_local_dbos_context
|
@@ -447,13 +448,12 @@ class SystemDatabase:
|
|
447
448
|
)
|
448
449
|
)
|
449
450
|
|
450
|
-
def resume_workflow(
|
451
|
-
self,
|
452
|
-
workflow_id: str,
|
453
|
-
) -> None:
|
451
|
+
def resume_workflow(self, workflow_id: str) -> None:
|
454
452
|
if self._debug_mode:
|
455
453
|
raise Exception("called resume_workflow in debug mode")
|
456
454
|
with self.engine.begin() as c:
|
455
|
+
# Execute with snapshot isolation in case of concurrent calls on the same workflow
|
456
|
+
c.execute(sa.text("SET TRANSACTION ISOLATION LEVEL REPEATABLE READ"))
|
457
457
|
# Check the status of the workflow. If it is complete, do nothing.
|
458
458
|
row = c.execute(
|
459
459
|
sa.select(
|
@@ -472,12 +472,63 @@ class SystemDatabase:
|
|
472
472
|
SystemSchema.workflow_queue.c.workflow_uuid == workflow_id
|
473
473
|
)
|
474
474
|
)
|
475
|
-
#
|
475
|
+
# Enqueue the workflow on the internal queue
|
476
|
+
c.execute(
|
477
|
+
pg.insert(SystemSchema.workflow_queue).values(
|
478
|
+
workflow_uuid=workflow_id,
|
479
|
+
queue_name=INTERNAL_QUEUE_NAME,
|
480
|
+
)
|
481
|
+
)
|
482
|
+
# Set the workflow's status to ENQUEUED and clear its recovery attempts.
|
476
483
|
c.execute(
|
477
484
|
sa.update(SystemSchema.workflow_status)
|
478
485
|
.where(SystemSchema.workflow_status.c.workflow_uuid == workflow_id)
|
479
|
-
.values(status=WorkflowStatusString.
|
486
|
+
.values(status=WorkflowStatusString.ENQUEUED.value, recovery_attempts=0)
|
487
|
+
)
|
488
|
+
|
489
|
+
def fork_workflow(self, original_workflow_id: str) -> str:
|
490
|
+
status = self.get_workflow_status(original_workflow_id)
|
491
|
+
if status is None:
|
492
|
+
raise Exception(f"Workflow {original_workflow_id} not found")
|
493
|
+
inputs = self.get_workflow_inputs(original_workflow_id)
|
494
|
+
if inputs is None:
|
495
|
+
raise Exception(f"Workflow {original_workflow_id} not found")
|
496
|
+
# Generate a random ID for the forked workflow
|
497
|
+
forked_workflow_id = str(uuid.uuid4())
|
498
|
+
with self.engine.begin() as c:
|
499
|
+
# Create an entry for the forked workflow with the same
|
500
|
+
# initial values as the original.
|
501
|
+
c.execute(
|
502
|
+
pg.insert(SystemSchema.workflow_status).values(
|
503
|
+
workflow_uuid=forked_workflow_id,
|
504
|
+
status=WorkflowStatusString.ENQUEUED.value,
|
505
|
+
name=status["name"],
|
506
|
+
class_name=status["class_name"],
|
507
|
+
config_name=status["config_name"],
|
508
|
+
application_version=status["app_version"],
|
509
|
+
application_id=status["app_id"],
|
510
|
+
request=status["request"],
|
511
|
+
authenticated_user=status["authenticated_user"],
|
512
|
+
authenticated_roles=status["authenticated_roles"],
|
513
|
+
assumed_role=status["assumed_role"],
|
514
|
+
queue_name=INTERNAL_QUEUE_NAME,
|
515
|
+
)
|
516
|
+
)
|
517
|
+
# Copy the original workflow's inputs into the forked workflow
|
518
|
+
c.execute(
|
519
|
+
pg.insert(SystemSchema.workflow_inputs).values(
|
520
|
+
workflow_uuid=forked_workflow_id,
|
521
|
+
inputs=_serialization.serialize_args(inputs),
|
522
|
+
)
|
523
|
+
)
|
524
|
+
# Enqueue the forked workflow on the internal queue
|
525
|
+
c.execute(
|
526
|
+
pg.insert(SystemSchema.workflow_queue).values(
|
527
|
+
workflow_uuid=forked_workflow_id,
|
528
|
+
queue_name=INTERNAL_QUEUE_NAME,
|
529
|
+
)
|
480
530
|
)
|
531
|
+
return forked_workflow_id
|
481
532
|
|
482
533
|
def get_workflow_status(
|
483
534
|
self, workflow_uuid: str
|
dbos/_utils.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1
|
-
dbos-0.26.
|
2
|
-
dbos-0.26.
|
3
|
-
dbos-0.26.
|
4
|
-
dbos-0.26.
|
1
|
+
dbos-0.26.0a5.dist-info/METADATA,sha256=2Nl_CWZemlvRczy9jLMeUe2qFV0GhvI-gOcs_Ydgydk,5553
|
2
|
+
dbos-0.26.0a5.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
|
3
|
+
dbos-0.26.0a5.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
4
|
+
dbos-0.26.0a5.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
5
5
|
dbos/__init__.py,sha256=3NQfGlBiiUSM_v88STdVP3rNZvGkUL_9WbSotKb8Voo,873
|
6
6
|
dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
|
7
7
|
dbos/_admin_server.py,sha256=vxPG_YJ6lYrkfPCSp42FiATVLBOij7Fm52Yngg5Z_tE,7027
|
@@ -13,17 +13,17 @@ dbos/_cloudutils/cloudutils.py,sha256=YC7jGsIopT0KveLsqbRpQk2KlRBk-nIRC_UCgep4f3
|
|
13
13
|
dbos/_cloudutils/databases.py,sha256=_shqaqSvhY4n2ScgQ8IP5PDZvzvcx3YBKV8fj-cxhSY,8543
|
14
14
|
dbos/_conductor/conductor.py,sha256=7elKINsgl4s1Tg5DwrU-K7xQ5vQvmDAIfAvUgfwpGN0,16784
|
15
15
|
dbos/_conductor/protocol.py,sha256=xN7pmooyF1pqbH1b6WhllU5718P7zSb_b0KCwA6bzcs,6716
|
16
|
-
dbos/_context.py,sha256=
|
17
|
-
dbos/_core.py,sha256=
|
16
|
+
dbos/_context.py,sha256=I8sLkdKTTkZEz7wG-MjynaQB6XEF2bLXuwNksiauP7w,19430
|
17
|
+
dbos/_core.py,sha256=9PM2LZcaMhqF628wMGEOLmZadqwo_ViuM6-1j36Qd6k,45645
|
18
18
|
dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
|
19
19
|
dbos/_db_wizard.py,sha256=VnMa6OL87Lc-XPDD1RnXp8NjsJE8YgiQLj3wtWAXp-8,8252
|
20
|
-
dbos/_dbos.py,sha256=
|
20
|
+
dbos/_dbos.py,sha256=9oZFA9ZbpMO76sPAa59xhTPik6ZaAEumhKZ7t9s44w0,45726
|
21
21
|
dbos/_dbos_config.py,sha256=rTn30Hgh-RzTxqHbnYh2pC3Ioo30eJV9K4YxhJd-Gj4,22718
|
22
22
|
dbos/_debug.py,sha256=mmgvLkqlrljMBBow9wk01PPur9kUf2rI_11dTJXY4gw,1822
|
23
23
|
dbos/_error.py,sha256=B6Y9XLS1f6yrawxB2uAEYFMxFwk9BHhdxPNddKco-Fw,5399
|
24
24
|
dbos/_fastapi.py,sha256=PhaKftbApHnjtYEOw0EYna_3K0cmz__J9of7mRJWzu4,3704
|
25
25
|
dbos/_flask.py,sha256=DZKUZR5-xOzPI7tYZ53r2PvvHVoAb8SYwLzMVFsVfjI,2608
|
26
|
-
dbos/_kafka.py,sha256=
|
26
|
+
dbos/_kafka.py,sha256=pz0xZ9F3X9Ky1k-VSbeF3tfPhP3UPr3lUUhUfE41__U,4198
|
27
27
|
dbos/_kafka_message.py,sha256=NYvOXNG3Qn7bghn1pv3fg4Pbs86ILZGcK4IB-MLUNu0,409
|
28
28
|
dbos/_logger.py,sha256=qv2srteCF2rSRjCK1VGOck3ieIkwUe9Lvbv60mJc16E,4069
|
29
29
|
dbos/_migrations/env.py,sha256=38SIGVbmn_VV2x2u1aHLcPOoWgZ84eCymf3g_NljmbU,1626
|
@@ -37,17 +37,17 @@ dbos/_migrations/versions/d76646551a6c_workflow_queue.py,sha256=G942nophZ2uC2vc4
|
|
37
37
|
dbos/_migrations/versions/eab0cc1d9a14_job_queue.py,sha256=uvhFOtqbBreCePhAxZfIT0qCAI7BiZTou9wt6QnbY7c,1412
|
38
38
|
dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py,sha256=m90Lc5YH0ZISSq1MyxND6oq3RZrZKrIqEsZtwJ1jWxA,1049
|
39
39
|
dbos/_outcome.py,sha256=EXxBg4jXCVJsByDQ1VOCIedmbeq_03S6d-p1vqQrLFU,6810
|
40
|
-
dbos/_queue.py,sha256=
|
41
|
-
dbos/_recovery.py,sha256=
|
40
|
+
dbos/_queue.py,sha256=l0g_CXJbxEmftCA9yhy-cyaR_sddfQSCfm-5XgIWzqU,3397
|
41
|
+
dbos/_recovery.py,sha256=98Py7icfytyIELJ54gIsdvmURBvTb0HmWaxEAuYL0dc,2546
|
42
42
|
dbos/_registrations.py,sha256=_zy6k944Ll8QwqU12Kr3OP23ukVtm8axPNN1TS_kJRc,6717
|
43
43
|
dbos/_request.py,sha256=cX1B3Atlh160phgS35gF1VEEV4pD126c9F3BDgBmxZU,929
|
44
44
|
dbos/_roles.py,sha256=iOsgmIAf1XVzxs3gYWdGRe1B880YfOw5fpU7Jwx8_A8,2271
|
45
|
-
dbos/_scheduler.py,sha256=
|
45
|
+
dbos/_scheduler.py,sha256=SR1oRZRcVzYsj-JauV2LA8JtwTkt8mru7qf6H1AzQ1U,2027
|
46
46
|
dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
47
47
|
dbos/_schemas/application_database.py,sha256=KeyoPrF7hy_ODXV7QNike_VFSD74QBRfQ76D7QyE9HI,966
|
48
48
|
dbos/_schemas/system_database.py,sha256=W9eSpL7SZzQkxcEZ4W07BOcwkkDr35b9oCjUOgfHWek,5336
|
49
49
|
dbos/_serialization.py,sha256=YCYv0qKAwAZ1djZisBC7khvKqG-5OcIv9t9EC5PFIog,1743
|
50
|
-
dbos/_sys_db.py,sha256=
|
50
|
+
dbos/_sys_db.py,sha256=VBYVyKqZrwlFbDJ5cFIkeS5WtDOKpkI3lWJbSd5rB2s,65362
|
51
51
|
dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
|
52
52
|
dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
53
53
|
dbos/_templates/dbos-db-starter/__package/main.py,sha256=nJMN3ZD2lmwg4Dcgmiwqc-tQGuCJuJal2Xl85iA277U,2453
|
@@ -59,7 +59,7 @@ dbos/_templates/dbos-db-starter/migrations/script.py.mako,sha256=MEqL-2qATlST9TA
|
|
59
59
|
dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sha256=MpS7LGaJS0CpvsjhfDkp9EJqvMvVCjRPfUp4c0aE2ys,941
|
60
60
|
dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
|
61
61
|
dbos/_tracer.py,sha256=dFDSFlta-rfA3-ahIRLYwnnoAOmlavdxAGllqwFgnCA,2440
|
62
|
-
dbos/_utils.py,sha256=
|
62
|
+
dbos/_utils.py,sha256=nFRUHzVjXG5AusF85AlYHikj63Tzi-kQm992ihsrAxA,201
|
63
63
|
dbos/_workflow_commands.py,sha256=SYp2khc9RSf6tjllG9CqT1zjBQnFTFq33ePXpvmRwME,5892
|
64
64
|
dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
|
65
65
|
dbos/cli/_template_init.py,sha256=-WW3kbq0W_Tq4WbMqb1UGJG3xvJb3woEY5VspG95Srk,2857
|
@@ -67,4 +67,4 @@ dbos/cli/cli.py,sha256=G55sZJxfmvUGvWr0hoIWwVZBy-fJdpCsTsZmuHT1CjA,16049
|
|
67
67
|
dbos/dbos-config.schema.json,sha256=4z2OXPfp7H0uNT1m5dKxjg31qbAfPyKkFXwHufuUMec,5910
|
68
68
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
69
69
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
70
|
-
dbos-0.26.
|
70
|
+
dbos-0.26.0a5.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|