dbos 0.26.0a3__py3-none-any.whl → 0.26.0a6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbos/_app_db.py +61 -2
- dbos/_conductor/conductor.py +1 -0
- dbos/_context.py +4 -2
- dbos/_core.py +21 -25
- dbos/_dbos.py +21 -8
- dbos/_kafka.py +1 -1
- dbos/_queue.py +2 -1
- dbos/_recovery.py +1 -1
- dbos/_scheduler.py +1 -1
- dbos/_schemas/application_database.py +1 -0
- dbos/_sys_db.py +58 -7
- dbos/_utils.py +2 -0
- dbos/_workflow_commands.py +9 -3
- dbos/cli/cli.py +4 -1
- {dbos-0.26.0a3.dist-info → dbos-0.26.0a6.dist-info}/METADATA +1 -1
- {dbos-0.26.0a3.dist-info → dbos-0.26.0a6.dist-info}/RECORD +19 -19
- {dbos-0.26.0a3.dist-info → dbos-0.26.0a6.dist-info}/WHEEL +0 -0
- {dbos-0.26.0a3.dist-info → dbos-0.26.0a6.dist-info}/entry_points.txt +0 -0
- {dbos-0.26.0a3.dist-info → dbos-0.26.0a6.dist-info}/licenses/LICENSE +0 -0
dbos/_app_db.py
CHANGED
@@ -1,13 +1,16 @@
|
|
1
|
-
from typing import Optional, TypedDict
|
1
|
+
from typing import List, Optional, TypedDict
|
2
2
|
|
3
3
|
import sqlalchemy as sa
|
4
4
|
import sqlalchemy.dialects.postgresql as pg
|
5
|
+
from sqlalchemy import inspect, text
|
5
6
|
from sqlalchemy.exc import DBAPIError
|
6
7
|
from sqlalchemy.orm import Session, sessionmaker
|
7
8
|
|
9
|
+
from . import _serialization
|
8
10
|
from ._dbos_config import ConfigFile, DatabaseConfig
|
9
11
|
from ._error import DBOSWorkflowConflictIDError
|
10
12
|
from ._schemas.application_database import ApplicationSchema
|
13
|
+
from ._sys_db import StepInfo
|
11
14
|
|
12
15
|
|
13
16
|
class TransactionResultInternal(TypedDict):
|
@@ -18,6 +21,7 @@ class TransactionResultInternal(TypedDict):
|
|
18
21
|
txn_id: Optional[str]
|
19
22
|
txn_snapshot: str
|
20
23
|
executor_id: Optional[str]
|
24
|
+
function_name: Optional[str]
|
21
25
|
|
22
26
|
|
23
27
|
class RecordedResult(TypedDict):
|
@@ -87,7 +91,30 @@ class ApplicationDatabase:
|
|
87
91
|
f"CREATE SCHEMA IF NOT EXISTS {ApplicationSchema.schema}"
|
88
92
|
)
|
89
93
|
conn.execute(schema_creation_query)
|
90
|
-
|
94
|
+
|
95
|
+
inspector = inspect(self.engine)
|
96
|
+
if not inspector.has_table(
|
97
|
+
"transaction_outputs", schema=ApplicationSchema.schema
|
98
|
+
):
|
99
|
+
ApplicationSchema.metadata_obj.create_all(self.engine)
|
100
|
+
else:
|
101
|
+
columns = inspector.get_columns(
|
102
|
+
"transaction_outputs", schema=ApplicationSchema.schema
|
103
|
+
)
|
104
|
+
column_names = [col["name"] for col in columns]
|
105
|
+
|
106
|
+
if "function_name" not in column_names:
|
107
|
+
# Column missing, alter table to add it
|
108
|
+
with self.engine.connect() as conn:
|
109
|
+
conn.execute(
|
110
|
+
text(
|
111
|
+
f"""
|
112
|
+
ALTER TABLE {ApplicationSchema.schema}.transaction_outputs
|
113
|
+
ADD COLUMN function_name TEXT NOT NULL DEFAULT '';
|
114
|
+
"""
|
115
|
+
)
|
116
|
+
)
|
117
|
+
conn.commit()
|
91
118
|
|
92
119
|
def destroy(self) -> None:
|
93
120
|
self.engine.dispose()
|
@@ -108,6 +135,7 @@ class ApplicationDatabase:
|
|
108
135
|
executor_id=(
|
109
136
|
output["executor_id"] if output["executor_id"] else None
|
110
137
|
),
|
138
|
+
function_name=output["function_name"],
|
111
139
|
)
|
112
140
|
)
|
113
141
|
except DBAPIError as dbapi_error:
|
@@ -133,6 +161,7 @@ class ApplicationDatabase:
|
|
133
161
|
executor_id=(
|
134
162
|
output["executor_id"] if output["executor_id"] else None
|
135
163
|
),
|
164
|
+
function_name=output["function_name"],
|
136
165
|
)
|
137
166
|
)
|
138
167
|
except DBAPIError as dbapi_error:
|
@@ -160,3 +189,33 @@ class ApplicationDatabase:
|
|
160
189
|
"error": rows[0][1],
|
161
190
|
}
|
162
191
|
return result
|
192
|
+
|
193
|
+
def get_transactions(self, workflow_uuid: str) -> List[StepInfo]:
|
194
|
+
with self.engine.begin() as conn:
|
195
|
+
rows = conn.execute(
|
196
|
+
sa.select(
|
197
|
+
ApplicationSchema.transaction_outputs.c.function_id,
|
198
|
+
ApplicationSchema.transaction_outputs.c.function_name,
|
199
|
+
ApplicationSchema.transaction_outputs.c.output,
|
200
|
+
ApplicationSchema.transaction_outputs.c.error,
|
201
|
+
).where(
|
202
|
+
ApplicationSchema.transaction_outputs.c.workflow_uuid
|
203
|
+
== workflow_uuid,
|
204
|
+
)
|
205
|
+
).all()
|
206
|
+
return [
|
207
|
+
StepInfo(
|
208
|
+
function_id=row[0],
|
209
|
+
function_name=row[1],
|
210
|
+
output=(
|
211
|
+
_serialization.deserialize(row[2]) if row[2] is not None else row[2]
|
212
|
+
),
|
213
|
+
error=(
|
214
|
+
_serialization.deserialize_exception(row[3])
|
215
|
+
if row[3] is not None
|
216
|
+
else row[3]
|
217
|
+
),
|
218
|
+
child_workflow_id=None,
|
219
|
+
)
|
220
|
+
for row in rows
|
221
|
+
]
|
dbos/_conductor/conductor.py
CHANGED
dbos/_context.py
CHANGED
@@ -195,8 +195,10 @@ class DBOSContext:
|
|
195
195
|
def end_handler(self, exc_value: Optional[BaseException]) -> None:
|
196
196
|
self._end_span(exc_value)
|
197
197
|
|
198
|
-
def get_current_span(self) -> Span:
|
199
|
-
|
198
|
+
def get_current_span(self) -> Optional[Span]:
|
199
|
+
if len(self.spans):
|
200
|
+
return self.spans[-1]
|
201
|
+
return None
|
200
202
|
|
201
203
|
def _start_span(self, attributes: TracedAttributes) -> None:
|
202
204
|
attributes["operationUUID"] = (
|
dbos/_core.py
CHANGED
@@ -398,9 +398,7 @@ async def _execute_workflow_async(
|
|
398
398
|
raise
|
399
399
|
|
400
400
|
|
401
|
-
def execute_workflow_by_id(
|
402
|
-
dbos: "DBOS", workflow_id: str, startNew: bool = False
|
403
|
-
) -> "WorkflowHandle[Any]":
|
401
|
+
def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[Any]":
|
404
402
|
status = dbos._sys_db.get_workflow_status(workflow_id)
|
405
403
|
if not status:
|
406
404
|
raise DBOSRecoveryError(workflow_id, "Workflow status not found")
|
@@ -441,7 +439,7 @@ def execute_workflow_by_id(
|
|
441
439
|
class_object = dbos._registry.class_info_map[class_name]
|
442
440
|
inputs["args"] = (class_object,) + inputs["args"]
|
443
441
|
|
444
|
-
|
442
|
+
with SetWorkflowID(workflow_id):
|
445
443
|
return start_workflow(
|
446
444
|
dbos,
|
447
445
|
wf_func,
|
@@ -450,16 +448,6 @@ def execute_workflow_by_id(
|
|
450
448
|
*inputs["args"],
|
451
449
|
**inputs["kwargs"],
|
452
450
|
)
|
453
|
-
else:
|
454
|
-
with SetWorkflowID(workflow_id):
|
455
|
-
return start_workflow(
|
456
|
-
dbos,
|
457
|
-
wf_func,
|
458
|
-
status["queue_name"],
|
459
|
-
True,
|
460
|
-
*inputs["args"],
|
461
|
-
**inputs["kwargs"],
|
462
|
-
)
|
463
451
|
|
464
452
|
|
465
453
|
def _get_new_wf() -> tuple[str, DBOSContext]:
|
@@ -794,6 +782,9 @@ def decorate_transaction(
|
|
794
782
|
dbosreg: "DBOSRegistry", isolation_level: "IsolationLevel" = "SERIALIZABLE"
|
795
783
|
) -> Callable[[F], F]:
|
796
784
|
def decorator(func: F) -> F:
|
785
|
+
|
786
|
+
transactionName = func.__qualname__
|
787
|
+
|
797
788
|
def invoke_tx(*args: Any, **kwargs: Any) -> Any:
|
798
789
|
if dbosreg.dbos is None:
|
799
790
|
raise DBOSException(
|
@@ -822,6 +813,7 @@ def decorate_transaction(
|
|
822
813
|
"txn_snapshot": "", # TODO: add actual snapshot
|
823
814
|
"executor_id": None,
|
824
815
|
"txn_id": None,
|
816
|
+
"function_name": transactionName,
|
825
817
|
}
|
826
818
|
retry_wait_seconds = 0.001
|
827
819
|
backoff_factor = 1.5
|
@@ -892,10 +884,12 @@ def decorate_transaction(
|
|
892
884
|
except DBAPIError as dbapi_error:
|
893
885
|
if dbapi_error.orig.sqlstate == "40001": # type: ignore
|
894
886
|
# Retry on serialization failure
|
895
|
-
ctx.get_current_span()
|
896
|
-
|
897
|
-
|
898
|
-
|
887
|
+
span = ctx.get_current_span()
|
888
|
+
if span:
|
889
|
+
span.add_event(
|
890
|
+
"Transaction Serialization Failure",
|
891
|
+
{"retry_wait_seconds": retry_wait_seconds},
|
892
|
+
)
|
899
893
|
time.sleep(retry_wait_seconds)
|
900
894
|
retry_wait_seconds = min(
|
901
895
|
retry_wait_seconds * backoff_factor,
|
@@ -1004,13 +998,15 @@ def decorate_step(
|
|
1004
998
|
f"Step being automatically retried. (attempt {attempt + 1} of {attempts}). {traceback.format_exc()}"
|
1005
999
|
)
|
1006
1000
|
ctx = assert_current_dbos_context()
|
1007
|
-
ctx.get_current_span()
|
1008
|
-
|
1009
|
-
|
1010
|
-
"
|
1011
|
-
|
1012
|
-
|
1013
|
-
|
1001
|
+
span = ctx.get_current_span()
|
1002
|
+
if span:
|
1003
|
+
span.add_event(
|
1004
|
+
f"Step attempt {attempt} failed",
|
1005
|
+
{
|
1006
|
+
"error": str(error),
|
1007
|
+
"retryIntervalSeconds": interval_seconds,
|
1008
|
+
},
|
1009
|
+
)
|
1014
1010
|
return min(
|
1015
1011
|
interval_seconds * (backoff_rate**attempt),
|
1016
1012
|
max_retry_interval_seconds,
|
dbos/_dbos.py
CHANGED
@@ -33,7 +33,7 @@ from opentelemetry.trace import Span
|
|
33
33
|
|
34
34
|
from dbos import _serialization
|
35
35
|
from dbos._conductor.conductor import ConductorWebsocket
|
36
|
-
from dbos._utils import GlobalParams
|
36
|
+
from dbos._utils import INTERNAL_QUEUE_NAME, GlobalParams
|
37
37
|
from dbos._workflow_commands import (
|
38
38
|
WorkflowStatus,
|
39
39
|
list_queued_workflows,
|
@@ -234,6 +234,13 @@ class DBOSRegistry:
|
|
234
234
|
hasher.update(source.encode("utf-8"))
|
235
235
|
return hasher.hexdigest()
|
236
236
|
|
237
|
+
def get_internal_queue(self) -> Queue:
|
238
|
+
"""
|
239
|
+
Get or create the internal queue used for the DBOS scheduler, for Kafka, and for
|
240
|
+
programmatic resuming and restarting of workflows.
|
241
|
+
"""
|
242
|
+
return Queue(INTERNAL_QUEUE_NAME)
|
243
|
+
|
237
244
|
|
238
245
|
class DBOS:
|
239
246
|
"""
|
@@ -489,6 +496,9 @@ class DBOS:
|
|
489
496
|
notification_listener_thread.start()
|
490
497
|
self._background_threads.append(notification_listener_thread)
|
491
498
|
|
499
|
+
# Create the internal queue if it has not yet been created
|
500
|
+
self._registry.get_internal_queue()
|
501
|
+
|
492
502
|
# Start the queue thread
|
493
503
|
evt = threading.Event()
|
494
504
|
self.stop_events.append(evt)
|
@@ -929,11 +939,6 @@ class DBOS:
|
|
929
939
|
"""Execute a workflow by ID (for recovery)."""
|
930
940
|
return execute_workflow_by_id(_get_dbos_instance(), workflow_id)
|
931
941
|
|
932
|
-
@classmethod
|
933
|
-
def restart_workflow(cls, workflow_id: str) -> None:
|
934
|
-
"""Execute a workflow by ID (for recovery)."""
|
935
|
-
execute_workflow_by_id(_get_dbos_instance(), workflow_id, True)
|
936
|
-
|
937
942
|
@classmethod
|
938
943
|
def recover_pending_workflows(
|
939
944
|
cls, executor_ids: List[str] = ["local"]
|
@@ -954,7 +959,13 @@ class DBOS:
|
|
954
959
|
dbos_logger.info(f"Resuming workflow: {workflow_id}")
|
955
960
|
_get_dbos_instance()._sys_db.resume_workflow(workflow_id)
|
956
961
|
_get_or_create_dbos_registry().clear_workflow_cancelled(workflow_id)
|
957
|
-
return
|
962
|
+
return cls.retrieve_workflow(workflow_id)
|
963
|
+
|
964
|
+
@classmethod
|
965
|
+
def restart_workflow(cls, workflow_id: str) -> WorkflowHandle[Any]:
|
966
|
+
"""Restart a workflow with a new workflow ID"""
|
967
|
+
forked_workflow_id = _get_dbos_instance()._sys_db.fork_workflow(workflow_id)
|
968
|
+
return cls.retrieve_workflow(forked_workflow_id)
|
958
969
|
|
959
970
|
@classmethod
|
960
971
|
def list_workflows(
|
@@ -1083,7 +1094,9 @@ class DBOS:
|
|
1083
1094
|
def span(cls) -> Span:
|
1084
1095
|
"""Return the tracing `Span` associated with the current context."""
|
1085
1096
|
ctx = assert_current_dbos_context()
|
1086
|
-
|
1097
|
+
span = ctx.get_current_span()
|
1098
|
+
assert span
|
1099
|
+
return span
|
1087
1100
|
|
1088
1101
|
@classproperty
|
1089
1102
|
def request(cls) -> Optional["Request"]:
|
dbos/_kafka.py
CHANGED
@@ -115,7 +115,7 @@ def kafka_consumer(
|
|
115
115
|
_in_order_kafka_queues[topic] = queue
|
116
116
|
else:
|
117
117
|
global _kafka_queue
|
118
|
-
_kafka_queue =
|
118
|
+
_kafka_queue = dbosreg.get_internal_queue()
|
119
119
|
stop_event = threading.Event()
|
120
120
|
dbosreg.register_poller(
|
121
121
|
stop_event, _kafka_consumer_loop, func, config, topics, stop_event, in_order
|
dbos/_queue.py
CHANGED
@@ -82,7 +82,8 @@ def queue_thread(stop_event: threading.Event, dbos: "DBOS") -> None:
|
|
82
82
|
while not stop_event.is_set():
|
83
83
|
if stop_event.wait(timeout=1):
|
84
84
|
return
|
85
|
-
|
85
|
+
queues = dict(dbos._registry.queue_info_map)
|
86
|
+
for _, queue in queues.items():
|
86
87
|
try:
|
87
88
|
wf_ids = dbos._sys_db.start_queued_workflows(
|
88
89
|
queue, GlobalParams.executor_id, GlobalParams.app_version
|
dbos/_recovery.py
CHANGED
@@ -17,7 +17,7 @@ if TYPE_CHECKING:
|
|
17
17
|
def _recover_workflow(
|
18
18
|
dbos: "DBOS", workflow: GetPendingWorkflowsOutput
|
19
19
|
) -> "WorkflowHandle[Any]":
|
20
|
-
if workflow.queue_name
|
20
|
+
if workflow.queue_name:
|
21
21
|
cleared = dbos._sys_db.clear_queue_assignment(workflow.workflow_uuid)
|
22
22
|
if cleared:
|
23
23
|
return dbos.retrieve_workflow(workflow.workflow_uuid)
|
dbos/_scheduler.py
CHANGED
@@ -52,7 +52,7 @@ def scheduled(
|
|
52
52
|
)
|
53
53
|
|
54
54
|
global scheduler_queue
|
55
|
-
scheduler_queue =
|
55
|
+
scheduler_queue = dbosreg.get_internal_queue()
|
56
56
|
stop_event = threading.Event()
|
57
57
|
dbosreg.register_poller(stop_event, scheduler_loop, func, cron, stop_event)
|
58
58
|
return func
|
dbos/_sys_db.py
CHANGED
@@ -4,6 +4,7 @@ import os
|
|
4
4
|
import re
|
5
5
|
import threading
|
6
6
|
import time
|
7
|
+
import uuid
|
7
8
|
from enum import Enum
|
8
9
|
from typing import (
|
9
10
|
TYPE_CHECKING,
|
@@ -25,7 +26,7 @@ from alembic.config import Config
|
|
25
26
|
from sqlalchemy.exc import DBAPIError
|
26
27
|
from sqlalchemy.sql import func
|
27
28
|
|
28
|
-
from dbos._utils import GlobalParams
|
29
|
+
from dbos._utils import INTERNAL_QUEUE_NAME, GlobalParams
|
29
30
|
|
30
31
|
from . import _serialization
|
31
32
|
from ._context import get_local_dbos_context
|
@@ -447,13 +448,12 @@ class SystemDatabase:
|
|
447
448
|
)
|
448
449
|
)
|
449
450
|
|
450
|
-
def resume_workflow(
|
451
|
-
self,
|
452
|
-
workflow_id: str,
|
453
|
-
) -> None:
|
451
|
+
def resume_workflow(self, workflow_id: str) -> None:
|
454
452
|
if self._debug_mode:
|
455
453
|
raise Exception("called resume_workflow in debug mode")
|
456
454
|
with self.engine.begin() as c:
|
455
|
+
# Execute with snapshot isolation in case of concurrent calls on the same workflow
|
456
|
+
c.execute(sa.text("SET TRANSACTION ISOLATION LEVEL REPEATABLE READ"))
|
457
457
|
# Check the status of the workflow. If it is complete, do nothing.
|
458
458
|
row = c.execute(
|
459
459
|
sa.select(
|
@@ -472,12 +472,63 @@ class SystemDatabase:
|
|
472
472
|
SystemSchema.workflow_queue.c.workflow_uuid == workflow_id
|
473
473
|
)
|
474
474
|
)
|
475
|
-
#
|
475
|
+
# Enqueue the workflow on the internal queue
|
476
|
+
c.execute(
|
477
|
+
pg.insert(SystemSchema.workflow_queue).values(
|
478
|
+
workflow_uuid=workflow_id,
|
479
|
+
queue_name=INTERNAL_QUEUE_NAME,
|
480
|
+
)
|
481
|
+
)
|
482
|
+
# Set the workflow's status to ENQUEUED and clear its recovery attempts.
|
476
483
|
c.execute(
|
477
484
|
sa.update(SystemSchema.workflow_status)
|
478
485
|
.where(SystemSchema.workflow_status.c.workflow_uuid == workflow_id)
|
479
|
-
.values(status=WorkflowStatusString.
|
486
|
+
.values(status=WorkflowStatusString.ENQUEUED.value, recovery_attempts=0)
|
487
|
+
)
|
488
|
+
|
489
|
+
def fork_workflow(self, original_workflow_id: str) -> str:
|
490
|
+
status = self.get_workflow_status(original_workflow_id)
|
491
|
+
if status is None:
|
492
|
+
raise Exception(f"Workflow {original_workflow_id} not found")
|
493
|
+
inputs = self.get_workflow_inputs(original_workflow_id)
|
494
|
+
if inputs is None:
|
495
|
+
raise Exception(f"Workflow {original_workflow_id} not found")
|
496
|
+
# Generate a random ID for the forked workflow
|
497
|
+
forked_workflow_id = str(uuid.uuid4())
|
498
|
+
with self.engine.begin() as c:
|
499
|
+
# Create an entry for the forked workflow with the same
|
500
|
+
# initial values as the original.
|
501
|
+
c.execute(
|
502
|
+
pg.insert(SystemSchema.workflow_status).values(
|
503
|
+
workflow_uuid=forked_workflow_id,
|
504
|
+
status=WorkflowStatusString.ENQUEUED.value,
|
505
|
+
name=status["name"],
|
506
|
+
class_name=status["class_name"],
|
507
|
+
config_name=status["config_name"],
|
508
|
+
application_version=status["app_version"],
|
509
|
+
application_id=status["app_id"],
|
510
|
+
request=status["request"],
|
511
|
+
authenticated_user=status["authenticated_user"],
|
512
|
+
authenticated_roles=status["authenticated_roles"],
|
513
|
+
assumed_role=status["assumed_role"],
|
514
|
+
queue_name=INTERNAL_QUEUE_NAME,
|
515
|
+
)
|
516
|
+
)
|
517
|
+
# Copy the original workflow's inputs into the forked workflow
|
518
|
+
c.execute(
|
519
|
+
pg.insert(SystemSchema.workflow_inputs).values(
|
520
|
+
workflow_uuid=forked_workflow_id,
|
521
|
+
inputs=_serialization.serialize_args(inputs),
|
522
|
+
)
|
523
|
+
)
|
524
|
+
# Enqueue the forked workflow on the internal queue
|
525
|
+
c.execute(
|
526
|
+
pg.insert(SystemSchema.workflow_queue).values(
|
527
|
+
workflow_uuid=forked_workflow_id,
|
528
|
+
queue_name=INTERNAL_QUEUE_NAME,
|
529
|
+
)
|
480
530
|
)
|
531
|
+
return forked_workflow_id
|
481
532
|
|
482
533
|
def get_workflow_status(
|
483
534
|
self, workflow_uuid: str
|
dbos/_utils.py
CHANGED
dbos/_workflow_commands.py
CHANGED
@@ -2,6 +2,7 @@ import json
|
|
2
2
|
from typing import Any, List, Optional
|
3
3
|
|
4
4
|
from . import _serialization
|
5
|
+
from ._app_db import ApplicationDatabase
|
5
6
|
from ._sys_db import (
|
6
7
|
GetQueuedWorkflowsInput,
|
7
8
|
GetWorkflowsInput,
|
@@ -170,6 +171,11 @@ def get_workflow(
|
|
170
171
|
return info
|
171
172
|
|
172
173
|
|
173
|
-
def list_workflow_steps(
|
174
|
-
|
175
|
-
|
174
|
+
def list_workflow_steps(
|
175
|
+
sys_db: SystemDatabase, app_db: ApplicationDatabase, workflow_id: str
|
176
|
+
) -> List[StepInfo]:
|
177
|
+
steps = sys_db.get_workflow_steps(workflow_id)
|
178
|
+
transactions = app_db.get_transactions(workflow_id)
|
179
|
+
merged_steps = steps + transactions
|
180
|
+
merged_steps.sort(key=lambda step: step["function_id"])
|
181
|
+
return merged_steps
|
dbos/cli/cli.py
CHANGED
@@ -350,8 +350,11 @@ def steps(
|
|
350
350
|
) -> None:
|
351
351
|
config = load_config(silent=True)
|
352
352
|
sys_db = SystemDatabase(config["database"])
|
353
|
+
app_db = ApplicationDatabase(config["database"])
|
353
354
|
print(
|
354
|
-
jsonpickle.encode(
|
355
|
+
jsonpickle.encode(
|
356
|
+
list_workflow_steps(sys_db, app_db, workflow_id), unpicklable=False
|
357
|
+
)
|
355
358
|
)
|
356
359
|
|
357
360
|
|
@@ -1,29 +1,29 @@
|
|
1
|
-
dbos-0.26.
|
2
|
-
dbos-0.26.
|
3
|
-
dbos-0.26.
|
4
|
-
dbos-0.26.
|
1
|
+
dbos-0.26.0a6.dist-info/METADATA,sha256=euVavzI6bef2LFE9IutUA2sCPrcp0aNIvYMYcttGSQk,5553
|
2
|
+
dbos-0.26.0a6.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
|
3
|
+
dbos-0.26.0a6.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
4
|
+
dbos-0.26.0a6.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
5
5
|
dbos/__init__.py,sha256=3NQfGlBiiUSM_v88STdVP3rNZvGkUL_9WbSotKb8Voo,873
|
6
6
|
dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
|
7
7
|
dbos/_admin_server.py,sha256=vxPG_YJ6lYrkfPCSp42FiATVLBOij7Fm52Yngg5Z_tE,7027
|
8
|
-
dbos/_app_db.py,sha256=
|
8
|
+
dbos/_app_db.py,sha256=XdjZgKJMezSVZQJkvxBNa9x4asLURl2O-QxdmLai7wA,8491
|
9
9
|
dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
|
10
10
|
dbos/_client.py,sha256=fzW_Gagh-oyWyDYtREcQDBesoVl_LsEoMeJAsn5-C5s,7262
|
11
11
|
dbos/_cloudutils/authentication.py,sha256=V0fCWQN9stCkhbuuxgPTGpvuQcDqfU3KAxPAh01vKW4,5007
|
12
12
|
dbos/_cloudutils/cloudutils.py,sha256=YC7jGsIopT0KveLsqbRpQk2KlRBk-nIRC_UCgep4f3o,7797
|
13
13
|
dbos/_cloudutils/databases.py,sha256=_shqaqSvhY4n2ScgQ8IP5PDZvzvcx3YBKV8fj-cxhSY,8543
|
14
|
-
dbos/_conductor/conductor.py,sha256=
|
14
|
+
dbos/_conductor/conductor.py,sha256=PzUFCX_JXGHClTF-hqTLR0ssO4kXdet4ZwHhJtuevEM,16839
|
15
15
|
dbos/_conductor/protocol.py,sha256=xN7pmooyF1pqbH1b6WhllU5718P7zSb_b0KCwA6bzcs,6716
|
16
|
-
dbos/_context.py,sha256=
|
17
|
-
dbos/_core.py,sha256=
|
16
|
+
dbos/_context.py,sha256=I8sLkdKTTkZEz7wG-MjynaQB6XEF2bLXuwNksiauP7w,19430
|
17
|
+
dbos/_core.py,sha256=EA9X4lTTTlimN8oa_mFICtl6Ke2biCvPdHl6PABjgGI,45749
|
18
18
|
dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
|
19
19
|
dbos/_db_wizard.py,sha256=VnMa6OL87Lc-XPDD1RnXp8NjsJE8YgiQLj3wtWAXp-8,8252
|
20
|
-
dbos/_dbos.py,sha256=
|
20
|
+
dbos/_dbos.py,sha256=9oZFA9ZbpMO76sPAa59xhTPik6ZaAEumhKZ7t9s44w0,45726
|
21
21
|
dbos/_dbos_config.py,sha256=rTn30Hgh-RzTxqHbnYh2pC3Ioo30eJV9K4YxhJd-Gj4,22718
|
22
22
|
dbos/_debug.py,sha256=mmgvLkqlrljMBBow9wk01PPur9kUf2rI_11dTJXY4gw,1822
|
23
23
|
dbos/_error.py,sha256=B6Y9XLS1f6yrawxB2uAEYFMxFwk9BHhdxPNddKco-Fw,5399
|
24
24
|
dbos/_fastapi.py,sha256=PhaKftbApHnjtYEOw0EYna_3K0cmz__J9of7mRJWzu4,3704
|
25
25
|
dbos/_flask.py,sha256=DZKUZR5-xOzPI7tYZ53r2PvvHVoAb8SYwLzMVFsVfjI,2608
|
26
|
-
dbos/_kafka.py,sha256=
|
26
|
+
dbos/_kafka.py,sha256=pz0xZ9F3X9Ky1k-VSbeF3tfPhP3UPr3lUUhUfE41__U,4198
|
27
27
|
dbos/_kafka_message.py,sha256=NYvOXNG3Qn7bghn1pv3fg4Pbs86ILZGcK4IB-MLUNu0,409
|
28
28
|
dbos/_logger.py,sha256=qv2srteCF2rSRjCK1VGOck3ieIkwUe9Lvbv60mJc16E,4069
|
29
29
|
dbos/_migrations/env.py,sha256=38SIGVbmn_VV2x2u1aHLcPOoWgZ84eCymf3g_NljmbU,1626
|
@@ -37,17 +37,17 @@ dbos/_migrations/versions/d76646551a6c_workflow_queue.py,sha256=G942nophZ2uC2vc4
|
|
37
37
|
dbos/_migrations/versions/eab0cc1d9a14_job_queue.py,sha256=uvhFOtqbBreCePhAxZfIT0qCAI7BiZTou9wt6QnbY7c,1412
|
38
38
|
dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py,sha256=m90Lc5YH0ZISSq1MyxND6oq3RZrZKrIqEsZtwJ1jWxA,1049
|
39
39
|
dbos/_outcome.py,sha256=EXxBg4jXCVJsByDQ1VOCIedmbeq_03S6d-p1vqQrLFU,6810
|
40
|
-
dbos/_queue.py,sha256=
|
41
|
-
dbos/_recovery.py,sha256=
|
40
|
+
dbos/_queue.py,sha256=l0g_CXJbxEmftCA9yhy-cyaR_sddfQSCfm-5XgIWzqU,3397
|
41
|
+
dbos/_recovery.py,sha256=98Py7icfytyIELJ54gIsdvmURBvTb0HmWaxEAuYL0dc,2546
|
42
42
|
dbos/_registrations.py,sha256=_zy6k944Ll8QwqU12Kr3OP23ukVtm8axPNN1TS_kJRc,6717
|
43
43
|
dbos/_request.py,sha256=cX1B3Atlh160phgS35gF1VEEV4pD126c9F3BDgBmxZU,929
|
44
44
|
dbos/_roles.py,sha256=iOsgmIAf1XVzxs3gYWdGRe1B880YfOw5fpU7Jwx8_A8,2271
|
45
|
-
dbos/_scheduler.py,sha256=
|
45
|
+
dbos/_scheduler.py,sha256=SR1oRZRcVzYsj-JauV2LA8JtwTkt8mru7qf6H1AzQ1U,2027
|
46
46
|
dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
47
|
-
dbos/_schemas/application_database.py,sha256=
|
47
|
+
dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
|
48
48
|
dbos/_schemas/system_database.py,sha256=W9eSpL7SZzQkxcEZ4W07BOcwkkDr35b9oCjUOgfHWek,5336
|
49
49
|
dbos/_serialization.py,sha256=YCYv0qKAwAZ1djZisBC7khvKqG-5OcIv9t9EC5PFIog,1743
|
50
|
-
dbos/_sys_db.py,sha256=
|
50
|
+
dbos/_sys_db.py,sha256=VBYVyKqZrwlFbDJ5cFIkeS5WtDOKpkI3lWJbSd5rB2s,65362
|
51
51
|
dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
|
52
52
|
dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
53
53
|
dbos/_templates/dbos-db-starter/__package/main.py,sha256=nJMN3ZD2lmwg4Dcgmiwqc-tQGuCJuJal2Xl85iA277U,2453
|
@@ -59,12 +59,12 @@ dbos/_templates/dbos-db-starter/migrations/script.py.mako,sha256=MEqL-2qATlST9TA
|
|
59
59
|
dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sha256=MpS7LGaJS0CpvsjhfDkp9EJqvMvVCjRPfUp4c0aE2ys,941
|
60
60
|
dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
|
61
61
|
dbos/_tracer.py,sha256=dFDSFlta-rfA3-ahIRLYwnnoAOmlavdxAGllqwFgnCA,2440
|
62
|
-
dbos/_utils.py,sha256=
|
63
|
-
dbos/_workflow_commands.py,sha256=
|
62
|
+
dbos/_utils.py,sha256=nFRUHzVjXG5AusF85AlYHikj63Tzi-kQm992ihsrAxA,201
|
63
|
+
dbos/_workflow_commands.py,sha256=w981c3rVvhbhYd6BBP268C0Q88ClmwBwnachBxfnRmU,6129
|
64
64
|
dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
|
65
65
|
dbos/cli/_template_init.py,sha256=-WW3kbq0W_Tq4WbMqb1UGJG3xvJb3woEY5VspG95Srk,2857
|
66
|
-
dbos/cli/cli.py,sha256=
|
66
|
+
dbos/cli/cli.py,sha256=FnI5ZAo-kAic-ij5wBqNJ2EJiYoBK1Ot-tTMh1WcXEM,16132
|
67
67
|
dbos/dbos-config.schema.json,sha256=4z2OXPfp7H0uNT1m5dKxjg31qbAfPyKkFXwHufuUMec,5910
|
68
68
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
69
69
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
70
|
-
dbos-0.26.
|
70
|
+
dbos-0.26.0a6.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|