dbos 1.10.0a2__py3-none-any.whl → 1.11.0a2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- dbos/_app_db.py +13 -4
- dbos/_client.py +82 -1
- dbos/_dbos.py +176 -1
- dbos/_migrations/versions/01ce9f07bd10_streaming.py +42 -0
- dbos/_schemas/system_database.py +17 -0
- dbos/_sys_db.py +129 -0
- dbos/cli/cli.py +19 -29
- dbos/cli/migration.py +91 -0
- {dbos-1.10.0a2.dist-info → dbos-1.11.0a2.dist-info}/METADATA +1 -1
- {dbos-1.10.0a2.dist-info → dbos-1.11.0a2.dist-info}/RECORD +13 -11
- {dbos-1.10.0a2.dist-info → dbos-1.11.0a2.dist-info}/WHEEL +0 -0
- {dbos-1.10.0a2.dist-info → dbos-1.11.0a2.dist-info}/entry_points.txt +0 -0
- {dbos-1.10.0a2.dist-info → dbos-1.11.0a2.dist-info}/licenses/LICENSE +0 -0
dbos/_app_db.py
CHANGED
|
@@ -74,10 +74,19 @@ class ApplicationDatabase:
|
|
|
74
74
|
|
|
75
75
|
# Create the dbos schema and transaction_outputs table in the application database
|
|
76
76
|
with self.engine.begin() as conn:
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
77
|
+
# Check if schema exists first
|
|
78
|
+
schema_exists = conn.execute(
|
|
79
|
+
sa.text(
|
|
80
|
+
"SELECT 1 FROM information_schema.schemata WHERE schema_name = :schema_name"
|
|
81
|
+
),
|
|
82
|
+
parameters={"schema_name": ApplicationSchema.schema},
|
|
83
|
+
).scalar()
|
|
84
|
+
|
|
85
|
+
if not schema_exists:
|
|
86
|
+
schema_creation_query = sa.text(
|
|
87
|
+
f"CREATE SCHEMA {ApplicationSchema.schema}"
|
|
88
|
+
)
|
|
89
|
+
conn.execute(schema_creation_query)
|
|
81
90
|
|
|
82
91
|
inspector = inspect(self.engine)
|
|
83
92
|
if not inspector.has_table(
|
dbos/_client.py
CHANGED
|
@@ -1,7 +1,18 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
import sys
|
|
3
|
+
import time
|
|
3
4
|
import uuid
|
|
4
|
-
from typing import
|
|
5
|
+
from typing import (
|
|
6
|
+
Any,
|
|
7
|
+
AsyncGenerator,
|
|
8
|
+
Generator,
|
|
9
|
+
Generic,
|
|
10
|
+
List,
|
|
11
|
+
Optional,
|
|
12
|
+
TypedDict,
|
|
13
|
+
TypeVar,
|
|
14
|
+
Union,
|
|
15
|
+
)
|
|
5
16
|
|
|
6
17
|
from dbos._app_db import ApplicationDatabase
|
|
7
18
|
from dbos._context import MaxPriority, MinPriority
|
|
@@ -24,6 +35,8 @@ from dbos._sys_db import (
|
|
|
24
35
|
WorkflowStatus,
|
|
25
36
|
WorkflowStatusInternal,
|
|
26
37
|
WorkflowStatusString,
|
|
38
|
+
_dbos_stream_closed_sentinel,
|
|
39
|
+
workflow_is_active,
|
|
27
40
|
)
|
|
28
41
|
from dbos._workflow_commands import (
|
|
29
42
|
fork_workflow,
|
|
@@ -449,3 +462,71 @@ class DBOSClient:
|
|
|
449
462
|
application_version=application_version,
|
|
450
463
|
)
|
|
451
464
|
return WorkflowHandleClientAsyncPolling[Any](forked_workflow_id, self._sys_db)
|
|
465
|
+
|
|
466
|
+
def read_stream(self, workflow_id: str, key: str) -> Generator[Any, Any, None]:
|
|
467
|
+
"""
|
|
468
|
+
Read values from a stream as a generator.
|
|
469
|
+
This function reads values from a stream identified by the workflow_id and key,
|
|
470
|
+
yielding each value in order until the stream is closed or the workflow terminates.
|
|
471
|
+
|
|
472
|
+
Args:
|
|
473
|
+
workflow_id: The ID of the workflow that wrote to the stream
|
|
474
|
+
key: The stream key to read from
|
|
475
|
+
|
|
476
|
+
Yields:
|
|
477
|
+
The values written to the stream in order
|
|
478
|
+
"""
|
|
479
|
+
offset = 0
|
|
480
|
+
while True:
|
|
481
|
+
try:
|
|
482
|
+
value = self._sys_db.read_stream(workflow_id, key, offset)
|
|
483
|
+
if value == _dbos_stream_closed_sentinel:
|
|
484
|
+
break
|
|
485
|
+
yield value
|
|
486
|
+
offset += 1
|
|
487
|
+
except ValueError:
|
|
488
|
+
# Poll the offset until a value arrives or the workflow terminates
|
|
489
|
+
status = get_workflow(self._sys_db, workflow_id)
|
|
490
|
+
if status is None:
|
|
491
|
+
break
|
|
492
|
+
if not workflow_is_active(status.status):
|
|
493
|
+
break
|
|
494
|
+
time.sleep(1.0)
|
|
495
|
+
continue
|
|
496
|
+
|
|
497
|
+
async def read_stream_async(
|
|
498
|
+
self, workflow_id: str, key: str
|
|
499
|
+
) -> AsyncGenerator[Any, None]:
|
|
500
|
+
"""
|
|
501
|
+
Read values from a stream as an async generator.
|
|
502
|
+
This function reads values from a stream identified by the workflow_id and key,
|
|
503
|
+
yielding each value in order until the stream is closed or the workflow terminates.
|
|
504
|
+
|
|
505
|
+
Args:
|
|
506
|
+
workflow_id: The ID of the workflow that wrote to the stream
|
|
507
|
+
key: The stream key to read from
|
|
508
|
+
|
|
509
|
+
Yields:
|
|
510
|
+
The values written to the stream in order
|
|
511
|
+
"""
|
|
512
|
+
offset = 0
|
|
513
|
+
while True:
|
|
514
|
+
try:
|
|
515
|
+
value = await asyncio.to_thread(
|
|
516
|
+
self._sys_db.read_stream, workflow_id, key, offset
|
|
517
|
+
)
|
|
518
|
+
if value == _dbos_stream_closed_sentinel:
|
|
519
|
+
break
|
|
520
|
+
yield value
|
|
521
|
+
offset += 1
|
|
522
|
+
except ValueError:
|
|
523
|
+
# Poll the offset until a value arrives or the workflow terminates
|
|
524
|
+
status = await asyncio.to_thread(
|
|
525
|
+
get_workflow, self._sys_db, workflow_id
|
|
526
|
+
)
|
|
527
|
+
if status is None:
|
|
528
|
+
break
|
|
529
|
+
if not workflow_is_active(status.status):
|
|
530
|
+
break
|
|
531
|
+
await asyncio.sleep(1.0)
|
|
532
|
+
continue
|
dbos/_dbos.py
CHANGED
|
@@ -6,14 +6,17 @@ import inspect
|
|
|
6
6
|
import os
|
|
7
7
|
import sys
|
|
8
8
|
import threading
|
|
9
|
+
import time
|
|
9
10
|
import uuid
|
|
10
11
|
from concurrent.futures import ThreadPoolExecutor
|
|
11
12
|
from logging import Logger
|
|
12
13
|
from typing import (
|
|
13
14
|
TYPE_CHECKING,
|
|
14
15
|
Any,
|
|
16
|
+
AsyncGenerator,
|
|
15
17
|
Callable,
|
|
16
18
|
Coroutine,
|
|
19
|
+
Generator,
|
|
17
20
|
Generic,
|
|
18
21
|
List,
|
|
19
22
|
Literal,
|
|
@@ -62,7 +65,14 @@ from ._registrations import (
|
|
|
62
65
|
)
|
|
63
66
|
from ._roles import default_required_roles, required_roles
|
|
64
67
|
from ._scheduler import ScheduledWorkflow, scheduled
|
|
65
|
-
from ._sys_db import
|
|
68
|
+
from ._sys_db import (
|
|
69
|
+
StepInfo,
|
|
70
|
+
SystemDatabase,
|
|
71
|
+
WorkflowStatus,
|
|
72
|
+
_dbos_stream_closed_sentinel,
|
|
73
|
+
reset_system_database,
|
|
74
|
+
workflow_is_active,
|
|
75
|
+
)
|
|
66
76
|
from ._tracer import DBOSTracer, dbos_tracer
|
|
67
77
|
|
|
68
78
|
if TYPE_CHECKING:
|
|
@@ -1304,6 +1314,171 @@ class DBOS:
|
|
|
1304
1314
|
ctx.authenticated_user = authenticated_user
|
|
1305
1315
|
ctx.authenticated_roles = authenticated_roles
|
|
1306
1316
|
|
|
1317
|
+
@classmethod
|
|
1318
|
+
def write_stream(cls, key: str, value: Any) -> None:
|
|
1319
|
+
"""
|
|
1320
|
+
Write a value to a stream.
|
|
1321
|
+
|
|
1322
|
+
Args:
|
|
1323
|
+
key(str): The stream key / name within the workflow
|
|
1324
|
+
value(Any): A serializable value to write to the stream
|
|
1325
|
+
|
|
1326
|
+
"""
|
|
1327
|
+
ctx = get_local_dbos_context()
|
|
1328
|
+
if ctx is not None:
|
|
1329
|
+
# Must call it within a workflow
|
|
1330
|
+
if ctx.is_workflow():
|
|
1331
|
+
attributes: TracedAttributes = {
|
|
1332
|
+
"name": "write_stream",
|
|
1333
|
+
}
|
|
1334
|
+
with EnterDBOSStep(attributes):
|
|
1335
|
+
ctx = assert_current_dbos_context()
|
|
1336
|
+
_get_dbos_instance()._sys_db.write_stream_from_workflow(
|
|
1337
|
+
ctx.workflow_id, ctx.function_id, key, value
|
|
1338
|
+
)
|
|
1339
|
+
elif ctx.is_step():
|
|
1340
|
+
_get_dbos_instance()._sys_db.write_stream_from_step(
|
|
1341
|
+
ctx.workflow_id, key, value
|
|
1342
|
+
)
|
|
1343
|
+
else:
|
|
1344
|
+
raise DBOSException(
|
|
1345
|
+
"write_stream() must be called from within a workflow or step"
|
|
1346
|
+
)
|
|
1347
|
+
else:
|
|
1348
|
+
# Cannot call it from outside of a workflow
|
|
1349
|
+
raise DBOSException(
|
|
1350
|
+
"write_stream() must be called from within a workflow or step"
|
|
1351
|
+
)
|
|
1352
|
+
|
|
1353
|
+
@classmethod
|
|
1354
|
+
def close_stream(cls, key: str) -> None:
|
|
1355
|
+
"""
|
|
1356
|
+
Close a stream.
|
|
1357
|
+
|
|
1358
|
+
Args:
|
|
1359
|
+
key(str): The stream key / name within the workflow
|
|
1360
|
+
|
|
1361
|
+
"""
|
|
1362
|
+
ctx = get_local_dbos_context()
|
|
1363
|
+
if ctx is not None:
|
|
1364
|
+
# Must call it within a workflow
|
|
1365
|
+
if ctx.is_workflow():
|
|
1366
|
+
attributes: TracedAttributes = {
|
|
1367
|
+
"name": "close_stream",
|
|
1368
|
+
}
|
|
1369
|
+
with EnterDBOSStep(attributes):
|
|
1370
|
+
ctx = assert_current_dbos_context()
|
|
1371
|
+
_get_dbos_instance()._sys_db.close_stream(
|
|
1372
|
+
ctx.workflow_id, ctx.function_id, key
|
|
1373
|
+
)
|
|
1374
|
+
else:
|
|
1375
|
+
raise DBOSException(
|
|
1376
|
+
"close_stream() must be called from within a workflow"
|
|
1377
|
+
)
|
|
1378
|
+
else:
|
|
1379
|
+
# Cannot call it from outside of a workflow
|
|
1380
|
+
raise DBOSException("close_stream() must be called from within a workflow")
|
|
1381
|
+
|
|
1382
|
+
@classmethod
|
|
1383
|
+
def read_stream(cls, workflow_id: str, key: str) -> Generator[Any, Any, None]:
|
|
1384
|
+
"""
|
|
1385
|
+
Read values from a stream as a generator.
|
|
1386
|
+
|
|
1387
|
+
This function reads values from a stream identified by the workflow_id and key,
|
|
1388
|
+
yielding each value in order until the stream is closed or the workflow terminates.
|
|
1389
|
+
|
|
1390
|
+
Args:
|
|
1391
|
+
workflow_id(str): The workflow instance ID that owns the stream
|
|
1392
|
+
key(str): The stream key / name within the workflow
|
|
1393
|
+
|
|
1394
|
+
Yields:
|
|
1395
|
+
Any: Each value in the stream until the stream is closed
|
|
1396
|
+
|
|
1397
|
+
"""
|
|
1398
|
+
offset = 0
|
|
1399
|
+
sys_db = _get_dbos_instance()._sys_db
|
|
1400
|
+
|
|
1401
|
+
while True:
|
|
1402
|
+
try:
|
|
1403
|
+
value = sys_db.read_stream(workflow_id, key, offset)
|
|
1404
|
+
if value == _dbos_stream_closed_sentinel:
|
|
1405
|
+
break
|
|
1406
|
+
yield value
|
|
1407
|
+
offset += 1
|
|
1408
|
+
except ValueError:
|
|
1409
|
+
# Poll the offset until a value arrives or the workflow terminates
|
|
1410
|
+
status = cls.retrieve_workflow(workflow_id).get_status().status
|
|
1411
|
+
if not workflow_is_active(status):
|
|
1412
|
+
break
|
|
1413
|
+
time.sleep(1.0)
|
|
1414
|
+
continue
|
|
1415
|
+
|
|
1416
|
+
@classmethod
|
|
1417
|
+
async def write_stream_async(cls, key: str, value: Any) -> None:
|
|
1418
|
+
"""
|
|
1419
|
+
Write a value to a stream asynchronously.
|
|
1420
|
+
|
|
1421
|
+
Args:
|
|
1422
|
+
key(str): The stream key / name within the workflow
|
|
1423
|
+
value(Any): A serializable value to write to the stream
|
|
1424
|
+
|
|
1425
|
+
"""
|
|
1426
|
+
await cls._configure_asyncio_thread_pool()
|
|
1427
|
+
await asyncio.to_thread(lambda: DBOS.write_stream(key, value))
|
|
1428
|
+
|
|
1429
|
+
@classmethod
|
|
1430
|
+
async def close_stream_async(cls, key: str) -> None:
|
|
1431
|
+
"""
|
|
1432
|
+
Close a stream asynchronously.
|
|
1433
|
+
|
|
1434
|
+
Args:
|
|
1435
|
+
key(str): The stream key / name within the workflow
|
|
1436
|
+
|
|
1437
|
+
"""
|
|
1438
|
+
await cls._configure_asyncio_thread_pool()
|
|
1439
|
+
await asyncio.to_thread(lambda: DBOS.close_stream(key))
|
|
1440
|
+
|
|
1441
|
+
@classmethod
|
|
1442
|
+
async def read_stream_async(
|
|
1443
|
+
cls, workflow_id: str, key: str
|
|
1444
|
+
) -> AsyncGenerator[Any, None]:
|
|
1445
|
+
"""
|
|
1446
|
+
Read values from a stream as an async generator.
|
|
1447
|
+
|
|
1448
|
+
This function reads values from a stream identified by the workflow_id and key,
|
|
1449
|
+
yielding each value in order until the stream is closed or the workflow terminates.
|
|
1450
|
+
|
|
1451
|
+
Args:
|
|
1452
|
+
workflow_id(str): The workflow instance ID that owns the stream
|
|
1453
|
+
key(str): The stream key / name within the workflow
|
|
1454
|
+
|
|
1455
|
+
Yields:
|
|
1456
|
+
Any: Each value in the stream until the stream is closed
|
|
1457
|
+
|
|
1458
|
+
"""
|
|
1459
|
+
await cls._configure_asyncio_thread_pool()
|
|
1460
|
+
offset = 0
|
|
1461
|
+
sys_db = _get_dbos_instance()._sys_db
|
|
1462
|
+
|
|
1463
|
+
while True:
|
|
1464
|
+
try:
|
|
1465
|
+
value = await asyncio.to_thread(
|
|
1466
|
+
sys_db.read_stream, workflow_id, key, offset
|
|
1467
|
+
)
|
|
1468
|
+
if value == _dbos_stream_closed_sentinel:
|
|
1469
|
+
break
|
|
1470
|
+
yield value
|
|
1471
|
+
offset += 1
|
|
1472
|
+
except ValueError:
|
|
1473
|
+
# Poll the offset until a value arrives or the workflow terminates
|
|
1474
|
+
status = (
|
|
1475
|
+
await (await cls.retrieve_workflow_async(workflow_id)).get_status()
|
|
1476
|
+
).status
|
|
1477
|
+
if not workflow_is_active(status):
|
|
1478
|
+
break
|
|
1479
|
+
await asyncio.sleep(1.0)
|
|
1480
|
+
continue
|
|
1481
|
+
|
|
1307
1482
|
@classproperty
|
|
1308
1483
|
def tracer(self) -> DBOSTracer:
|
|
1309
1484
|
"""Return the DBOS OpenTelemetry tracer."""
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
"""streaming
|
|
2
|
+
|
|
3
|
+
Revision ID: 01ce9f07bd10
|
|
4
|
+
Revises: d994145b47b6
|
|
5
|
+
Create Date: 2025-08-05 10:20:46.424975
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from typing import Sequence, Union
|
|
10
|
+
|
|
11
|
+
import sqlalchemy as sa
|
|
12
|
+
from alembic import op
|
|
13
|
+
|
|
14
|
+
# revision identifiers, used by Alembic.
|
|
15
|
+
revision: str = "01ce9f07bd10"
|
|
16
|
+
down_revision: Union[str, None] = "d994145b47b6"
|
|
17
|
+
branch_labels: Union[str, Sequence[str], None] = None
|
|
18
|
+
depends_on: Union[str, Sequence[str], None] = None
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def upgrade() -> None:
|
|
22
|
+
# Create streams table
|
|
23
|
+
op.create_table(
|
|
24
|
+
"streams",
|
|
25
|
+
sa.Column("workflow_uuid", sa.Text(), nullable=False),
|
|
26
|
+
sa.Column("key", sa.Text(), nullable=False),
|
|
27
|
+
sa.Column("value", sa.Text(), nullable=False),
|
|
28
|
+
sa.Column("offset", sa.Integer(), nullable=False),
|
|
29
|
+
sa.ForeignKeyConstraint(
|
|
30
|
+
["workflow_uuid"],
|
|
31
|
+
["dbos.workflow_status.workflow_uuid"],
|
|
32
|
+
onupdate="CASCADE",
|
|
33
|
+
ondelete="CASCADE",
|
|
34
|
+
),
|
|
35
|
+
sa.PrimaryKeyConstraint("workflow_uuid", "key", "offset"),
|
|
36
|
+
schema="dbos",
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def downgrade() -> None:
|
|
41
|
+
# Drop streams table
|
|
42
|
+
op.drop_table("streams", schema="dbos")
|
dbos/_schemas/system_database.py
CHANGED
|
@@ -132,3 +132,20 @@ class SystemSchema:
|
|
|
132
132
|
Column("value", Text, nullable=False),
|
|
133
133
|
PrimaryKeyConstraint("workflow_uuid", "key"),
|
|
134
134
|
)
|
|
135
|
+
|
|
136
|
+
streams = Table(
|
|
137
|
+
"streams",
|
|
138
|
+
metadata_obj,
|
|
139
|
+
Column(
|
|
140
|
+
"workflow_uuid",
|
|
141
|
+
Text,
|
|
142
|
+
ForeignKey(
|
|
143
|
+
"workflow_status.workflow_uuid", onupdate="CASCADE", ondelete="CASCADE"
|
|
144
|
+
),
|
|
145
|
+
nullable=False,
|
|
146
|
+
),
|
|
147
|
+
Column("key", Text, nullable=False),
|
|
148
|
+
Column("value", Text, nullable=False),
|
|
149
|
+
Column("offset", Integer, nullable=False),
|
|
150
|
+
PrimaryKeyConstraint("workflow_uuid", "key", "offset"),
|
|
151
|
+
)
|
dbos/_sys_db.py
CHANGED
|
@@ -62,6 +62,13 @@ class WorkflowStatusString(Enum):
|
|
|
62
62
|
ENQUEUED = "ENQUEUED"
|
|
63
63
|
|
|
64
64
|
|
|
65
|
+
def workflow_is_active(status: str) -> bool:
|
|
66
|
+
return (
|
|
67
|
+
status == WorkflowStatusString.ENQUEUED.value
|
|
68
|
+
or status == WorkflowStatusString.PENDING.value
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
|
|
65
72
|
WorkflowStatuses = Literal[
|
|
66
73
|
"PENDING",
|
|
67
74
|
"SUCCESS",
|
|
@@ -243,6 +250,7 @@ class StepInfo(TypedDict):
|
|
|
243
250
|
|
|
244
251
|
|
|
245
252
|
_dbos_null_topic = "__null__topic__"
|
|
253
|
+
_dbos_stream_closed_sentinel = "__DBOS_STREAM_CLOSED__"
|
|
246
254
|
|
|
247
255
|
|
|
248
256
|
class ConditionCount(TypedDict):
|
|
@@ -1882,6 +1890,127 @@ class SystemDatabase:
|
|
|
1882
1890
|
dbos_logger.error(f"Error connecting to the DBOS system database: {e}")
|
|
1883
1891
|
raise
|
|
1884
1892
|
|
|
1893
|
+
def write_stream_from_step(self, workflow_uuid: str, key: str, value: Any) -> None:
|
|
1894
|
+
"""
|
|
1895
|
+
Write a key-value pair to the stream at the first unused offset.
|
|
1896
|
+
"""
|
|
1897
|
+
if self._debug_mode:
|
|
1898
|
+
raise Exception("called write_stream in debug mode")
|
|
1899
|
+
|
|
1900
|
+
with self.engine.begin() as c:
|
|
1901
|
+
# Find the maximum offset for this workflow_uuid and key combination
|
|
1902
|
+
max_offset_result = c.execute(
|
|
1903
|
+
sa.select(sa.func.max(SystemSchema.streams.c.offset)).where(
|
|
1904
|
+
SystemSchema.streams.c.workflow_uuid == workflow_uuid,
|
|
1905
|
+
SystemSchema.streams.c.key == key,
|
|
1906
|
+
)
|
|
1907
|
+
).fetchone()
|
|
1908
|
+
|
|
1909
|
+
# Next offset is max + 1, or 0 if no records exist
|
|
1910
|
+
next_offset = (
|
|
1911
|
+
(max_offset_result[0] + 1)
|
|
1912
|
+
if max_offset_result is not None and max_offset_result[0] is not None
|
|
1913
|
+
else 0
|
|
1914
|
+
)
|
|
1915
|
+
|
|
1916
|
+
# Serialize the value before storing
|
|
1917
|
+
serialized_value = _serialization.serialize(value)
|
|
1918
|
+
|
|
1919
|
+
# Insert the new stream entry
|
|
1920
|
+
c.execute(
|
|
1921
|
+
sa.insert(SystemSchema.streams).values(
|
|
1922
|
+
workflow_uuid=workflow_uuid,
|
|
1923
|
+
key=key,
|
|
1924
|
+
value=serialized_value,
|
|
1925
|
+
offset=next_offset,
|
|
1926
|
+
)
|
|
1927
|
+
)
|
|
1928
|
+
|
|
1929
|
+
@db_retry()
|
|
1930
|
+
def write_stream_from_workflow(
|
|
1931
|
+
self, workflow_uuid: str, function_id: int, key: str, value: Any
|
|
1932
|
+
) -> None:
|
|
1933
|
+
"""
|
|
1934
|
+
Write a key-value pair to the stream at the first unused offset.
|
|
1935
|
+
"""
|
|
1936
|
+
function_name = (
|
|
1937
|
+
"DBOS.closeStream"
|
|
1938
|
+
if value == _dbos_stream_closed_sentinel
|
|
1939
|
+
else "DBOS.writeStream"
|
|
1940
|
+
)
|
|
1941
|
+
|
|
1942
|
+
with self.engine.begin() as c:
|
|
1943
|
+
|
|
1944
|
+
recorded_output = self._check_operation_execution_txn(
|
|
1945
|
+
workflow_uuid, function_id, function_name, conn=c
|
|
1946
|
+
)
|
|
1947
|
+
if self._debug_mode and recorded_output is None:
|
|
1948
|
+
raise Exception(
|
|
1949
|
+
"called set_event in debug mode without a previous execution"
|
|
1950
|
+
)
|
|
1951
|
+
# Find the maximum offset for this workflow_uuid and key combination
|
|
1952
|
+
max_offset_result = c.execute(
|
|
1953
|
+
sa.select(sa.func.max(SystemSchema.streams.c.offset)).where(
|
|
1954
|
+
SystemSchema.streams.c.workflow_uuid == workflow_uuid,
|
|
1955
|
+
SystemSchema.streams.c.key == key,
|
|
1956
|
+
)
|
|
1957
|
+
).fetchone()
|
|
1958
|
+
|
|
1959
|
+
# Next offset is max + 1, or 0 if no records exist
|
|
1960
|
+
next_offset = (
|
|
1961
|
+
(max_offset_result[0] + 1)
|
|
1962
|
+
if max_offset_result is not None and max_offset_result[0] is not None
|
|
1963
|
+
else 0
|
|
1964
|
+
)
|
|
1965
|
+
|
|
1966
|
+
# Serialize the value before storing
|
|
1967
|
+
serialized_value = _serialization.serialize(value)
|
|
1968
|
+
|
|
1969
|
+
# Insert the new stream entry
|
|
1970
|
+
c.execute(
|
|
1971
|
+
sa.insert(SystemSchema.streams).values(
|
|
1972
|
+
workflow_uuid=workflow_uuid,
|
|
1973
|
+
key=key,
|
|
1974
|
+
value=serialized_value,
|
|
1975
|
+
offset=next_offset,
|
|
1976
|
+
)
|
|
1977
|
+
)
|
|
1978
|
+
output: OperationResultInternal = {
|
|
1979
|
+
"workflow_uuid": workflow_uuid,
|
|
1980
|
+
"function_id": function_id,
|
|
1981
|
+
"function_name": function_name,
|
|
1982
|
+
"output": None,
|
|
1983
|
+
"error": None,
|
|
1984
|
+
}
|
|
1985
|
+
self._record_operation_result_txn(output, conn=c)
|
|
1986
|
+
|
|
1987
|
+
def close_stream(self, workflow_uuid: str, function_id: int, key: str) -> None:
|
|
1988
|
+
"""Write a sentinel value to the stream at the first unused offset to mark it as closed."""
|
|
1989
|
+
self.write_stream_from_workflow(
|
|
1990
|
+
workflow_uuid, function_id, key, _dbos_stream_closed_sentinel
|
|
1991
|
+
)
|
|
1992
|
+
|
|
1993
|
+
@db_retry()
|
|
1994
|
+
def read_stream(self, workflow_uuid: str, key: str, offset: int) -> Any:
|
|
1995
|
+
"""Read the value at the specified offset for the given workflow_uuid and key."""
|
|
1996
|
+
|
|
1997
|
+
with self.engine.begin() as c:
|
|
1998
|
+
result = c.execute(
|
|
1999
|
+
sa.select(SystemSchema.streams.c.value).where(
|
|
2000
|
+
SystemSchema.streams.c.workflow_uuid == workflow_uuid,
|
|
2001
|
+
SystemSchema.streams.c.key == key,
|
|
2002
|
+
SystemSchema.streams.c.offset == offset,
|
|
2003
|
+
)
|
|
2004
|
+
).fetchone()
|
|
2005
|
+
|
|
2006
|
+
if result is None:
|
|
2007
|
+
raise ValueError(
|
|
2008
|
+
f"No value found for workflow_uuid={workflow_uuid}, key={key}, offset={offset}"
|
|
2009
|
+
)
|
|
2010
|
+
|
|
2011
|
+
# Deserialize the value before returning
|
|
2012
|
+
return _serialization.deserialize(result[0])
|
|
2013
|
+
|
|
1885
2014
|
def garbage_collect(
|
|
1886
2015
|
self, cutoff_epoch_timestamp_ms: Optional[int], rows_threshold: Optional[int]
|
|
1887
2016
|
) -> Optional[tuple[int, list[str]]]:
|
dbos/cli/cli.py
CHANGED
|
@@ -15,6 +15,7 @@ from rich.prompt import IntPrompt
|
|
|
15
15
|
from typing_extensions import Annotated, List
|
|
16
16
|
|
|
17
17
|
from dbos._debug import debug_workflow, parse_start_command
|
|
18
|
+
from dbos.cli.migration import grant_dbos_schema_permissions, migrate_dbos_databases
|
|
18
19
|
|
|
19
20
|
from .._app_db import ApplicationDatabase
|
|
20
21
|
from .._client import DBOSClient
|
|
@@ -278,6 +279,14 @@ def migrate(
|
|
|
278
279
|
help="Your DBOS system database URL",
|
|
279
280
|
),
|
|
280
281
|
] = None,
|
|
282
|
+
application_role: Annotated[
|
|
283
|
+
typing.Optional[str],
|
|
284
|
+
typer.Option(
|
|
285
|
+
"--app-role",
|
|
286
|
+
"-r",
|
|
287
|
+
help="The role with which you will run your DBOS application",
|
|
288
|
+
),
|
|
289
|
+
] = None,
|
|
281
290
|
) -> None:
|
|
282
291
|
app_database_url = _get_db_url(app_database_url)
|
|
283
292
|
system_database_url = get_system_database_url(
|
|
@@ -293,37 +302,18 @@ def migrate(
|
|
|
293
302
|
typer.echo(f"System database: {sa.make_url(system_database_url)}")
|
|
294
303
|
|
|
295
304
|
# First, run DBOS migrations on the system database and the application database
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
"pool_size": 2,
|
|
305
|
-
},
|
|
305
|
+
migrate_dbos_databases(
|
|
306
|
+
app_database_url=app_database_url, system_database_url=system_database_url
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
# Next, assign permissions on the DBOS schema to the application role, if any
|
|
310
|
+
if application_role:
|
|
311
|
+
grant_dbos_schema_permissions(
|
|
312
|
+
database_url=app_database_url, role_name=application_role
|
|
306
313
|
)
|
|
307
|
-
|
|
308
|
-
database_url=
|
|
309
|
-
engine_kwargs={
|
|
310
|
-
"pool_timeout": 30,
|
|
311
|
-
"max_overflow": 0,
|
|
312
|
-
"pool_size": 2,
|
|
313
|
-
},
|
|
314
|
+
grant_dbos_schema_permissions(
|
|
315
|
+
database_url=system_database_url, role_name=application_role
|
|
314
316
|
)
|
|
315
|
-
sys_db.run_migrations()
|
|
316
|
-
app_db.run_migrations()
|
|
317
|
-
except Exception as e:
|
|
318
|
-
typer.echo(f"DBOS migrations failed: {e}")
|
|
319
|
-
raise typer.Exit(code=1)
|
|
320
|
-
finally:
|
|
321
|
-
if sys_db:
|
|
322
|
-
sys_db.destroy()
|
|
323
|
-
if app_db:
|
|
324
|
-
app_db.destroy()
|
|
325
|
-
|
|
326
|
-
typer.echo(f"DBOS migrations successful")
|
|
327
317
|
|
|
328
318
|
# Next, run any custom migration commands specified in the configuration
|
|
329
319
|
if os.path.exists("dbos-config.yaml"):
|
dbos/cli/migration.py
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import sqlalchemy as sa
|
|
2
|
+
import typer
|
|
3
|
+
|
|
4
|
+
from dbos._app_db import ApplicationDatabase
|
|
5
|
+
from dbos._sys_db import SystemDatabase
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def migrate_dbos_databases(app_database_url: str, system_database_url: str) -> None:
|
|
9
|
+
app_db = None
|
|
10
|
+
sys_db = None
|
|
11
|
+
try:
|
|
12
|
+
sys_db = SystemDatabase(
|
|
13
|
+
system_database_url=system_database_url,
|
|
14
|
+
engine_kwargs={
|
|
15
|
+
"pool_timeout": 30,
|
|
16
|
+
"max_overflow": 0,
|
|
17
|
+
"pool_size": 2,
|
|
18
|
+
},
|
|
19
|
+
)
|
|
20
|
+
app_db = ApplicationDatabase(
|
|
21
|
+
database_url=app_database_url,
|
|
22
|
+
engine_kwargs={
|
|
23
|
+
"pool_timeout": 30,
|
|
24
|
+
"max_overflow": 0,
|
|
25
|
+
"pool_size": 2,
|
|
26
|
+
},
|
|
27
|
+
)
|
|
28
|
+
sys_db.run_migrations()
|
|
29
|
+
app_db.run_migrations()
|
|
30
|
+
except Exception as e:
|
|
31
|
+
typer.echo(f"DBOS migrations failed: {e}")
|
|
32
|
+
raise typer.Exit(code=1)
|
|
33
|
+
finally:
|
|
34
|
+
if sys_db:
|
|
35
|
+
sys_db.destroy()
|
|
36
|
+
if app_db:
|
|
37
|
+
app_db.destroy()
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def grant_dbos_schema_permissions(database_url: str, role_name: str) -> None:
|
|
41
|
+
"""
|
|
42
|
+
Grant all permissions on all entities in the dbos schema to the specified role.
|
|
43
|
+
"""
|
|
44
|
+
typer.echo(
|
|
45
|
+
f"Granting permissions for DBOS schema to {role_name} in database {sa.make_url(database_url)}"
|
|
46
|
+
)
|
|
47
|
+
engine = None
|
|
48
|
+
try:
|
|
49
|
+
engine = sa.create_engine(database_url)
|
|
50
|
+
with engine.connect() as connection:
|
|
51
|
+
connection.execution_options(isolation_level="AUTOCOMMIT")
|
|
52
|
+
|
|
53
|
+
# Grant usage on the dbos schema
|
|
54
|
+
sql = f"GRANT USAGE ON SCHEMA dbos TO {role_name}"
|
|
55
|
+
typer.echo(sql)
|
|
56
|
+
connection.execute(sa.text(sql))
|
|
57
|
+
|
|
58
|
+
# Grant all privileges on all existing tables in dbos schema (includes views)
|
|
59
|
+
sql = f"GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA dbos TO {role_name}"
|
|
60
|
+
typer.echo(sql)
|
|
61
|
+
connection.execute(sa.text(sql))
|
|
62
|
+
|
|
63
|
+
# Grant all privileges on all sequences in dbos schema
|
|
64
|
+
sql = f"GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA dbos TO {role_name}"
|
|
65
|
+
typer.echo(sql)
|
|
66
|
+
connection.execute(sa.text(sql))
|
|
67
|
+
|
|
68
|
+
# Grant execute on all functions and procedures in dbos schema
|
|
69
|
+
sql = f"GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA dbos TO {role_name}"
|
|
70
|
+
typer.echo(sql)
|
|
71
|
+
connection.execute(sa.text(sql))
|
|
72
|
+
|
|
73
|
+
# Grant default privileges for future objects in dbos schema
|
|
74
|
+
sql = f"ALTER DEFAULT PRIVILEGES IN SCHEMA dbos GRANT ALL ON TABLES TO {role_name}"
|
|
75
|
+
typer.echo(sql)
|
|
76
|
+
connection.execute(sa.text(sql))
|
|
77
|
+
|
|
78
|
+
sql = f"ALTER DEFAULT PRIVILEGES IN SCHEMA dbos GRANT ALL ON SEQUENCES TO {role_name}"
|
|
79
|
+
typer.echo(sql)
|
|
80
|
+
connection.execute(sa.text(sql))
|
|
81
|
+
|
|
82
|
+
sql = f"ALTER DEFAULT PRIVILEGES IN SCHEMA dbos GRANT EXECUTE ON FUNCTIONS TO {role_name}"
|
|
83
|
+
typer.echo(sql)
|
|
84
|
+
connection.execute(sa.text(sql))
|
|
85
|
+
|
|
86
|
+
except Exception as e:
|
|
87
|
+
typer.echo(f"Failed to grant permissions to role {role_name}: {e}")
|
|
88
|
+
raise typer.Exit(code=1)
|
|
89
|
+
finally:
|
|
90
|
+
if engine:
|
|
91
|
+
engine.dispose()
|
|
@@ -1,19 +1,19 @@
|
|
|
1
|
-
dbos-1.
|
|
2
|
-
dbos-1.
|
|
3
|
-
dbos-1.
|
|
4
|
-
dbos-1.
|
|
1
|
+
dbos-1.11.0a2.dist-info/METADATA,sha256=jTYKGbbzmSgXEsQVMTTRai4_6dj_Sf-eMxSNXQScVa4,13268
|
|
2
|
+
dbos-1.11.0a2.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
|
|
3
|
+
dbos-1.11.0a2.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
|
4
|
+
dbos-1.11.0a2.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
|
5
5
|
dbos/__init__.py,sha256=NssPCubaBxdiKarOWa-wViz1hdJSkmBGcpLX_gQ4NeA,891
|
|
6
6
|
dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
|
|
7
7
|
dbos/_admin_server.py,sha256=e8ELhcDWqR3_PNobnNgUvLGh5lzZq0yFSF6dvtzoQRI,16267
|
|
8
|
-
dbos/_app_db.py,sha256=
|
|
8
|
+
dbos/_app_db.py,sha256=bUXQqzc0C9PHh4Zl2tHfBrQWNBURdI7F7XXjCpYirmw,10959
|
|
9
9
|
dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
|
|
10
|
-
dbos/_client.py,sha256=
|
|
10
|
+
dbos/_client.py,sha256=_wMe4qnRSwiRZo74xdqTBetbHlIVy3vQifdSd7os1ZY,18213
|
|
11
11
|
dbos/_conductor/conductor.py,sha256=3E_hL3c9g9yWqKZkvI6KA0-ZzPMPRo06TOzT1esMiek,24114
|
|
12
12
|
dbos/_conductor/protocol.py,sha256=q3rgLxINFtWFigdOONc-4gX4vn66UmMlJQD6Kj8LnL4,7420
|
|
13
13
|
dbos/_context.py,sha256=0vFtLAk3WF5BQYIYNFImDRBppKO2CTKOSy51zQC-Cu8,25723
|
|
14
14
|
dbos/_core.py,sha256=TA-UOSO_BhvM6L6j4__dwesK7x5Y93dk6mV1xx0WZBY,49593
|
|
15
15
|
dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
|
|
16
|
-
dbos/_dbos.py,sha256=
|
|
16
|
+
dbos/_dbos.py,sha256=h0ZtJNElMB4R2T1320jYD3PXKenn-xCLxnSkIiqpFVg,57386
|
|
17
17
|
dbos/_dbos_config.py,sha256=er8oF3e9zGlEG9KntX7uBSXrDuVvROtkzVidzXjOwUU,21746
|
|
18
18
|
dbos/_debug.py,sha256=99j2SChWmCPAlZoDmjsJGe77tpU2LEa8E2TtLAnnh7o,1831
|
|
19
19
|
dbos/_docker_pg_helper.py,sha256=tLJXWqZ4S-ExcaPnxg_i6cVxL6ZxrYlZjaGsklY-s2I,6115
|
|
@@ -26,6 +26,7 @@ dbos/_kafka_message.py,sha256=NYvOXNG3Qn7bghn1pv3fg4Pbs86ILZGcK4IB-MLUNu0,409
|
|
|
26
26
|
dbos/_logger.py,sha256=Dp6bHZKUtcm5gWwYHj_HA5Wj5OMuJGUrpl2g2i4xDZg,4620
|
|
27
27
|
dbos/_migrations/env.py,sha256=38SIGVbmn_VV2x2u1aHLcPOoWgZ84eCymf3g_NljmbU,1626
|
|
28
28
|
dbos/_migrations/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
|
|
29
|
+
dbos/_migrations/versions/01ce9f07bd10_streaming.py,sha256=5F2tCCXbjP3ZrRFVBwJdaf4FHLlWuhQkMQiYmypfSNM,1123
|
|
29
30
|
dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py,sha256=ICLPl8CN9tQXMsLDsAj8z1TsL831-Z3F8jSBvrR-wyw,736
|
|
30
31
|
dbos/_migrations/versions/27ac6900c6ad_add_queue_dedup.py,sha256=56w1v6TdofW3V18iwm0MP0SAeSaAUPSS40HIcn6qYIE,1072
|
|
31
32
|
dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py,sha256=ZBYrtTdxy64HxIAlOes89fVIk2P1gNaJack7wuC_epg,873
|
|
@@ -47,9 +48,9 @@ dbos/_roles.py,sha256=kCuhhg8XLtrHCgKgm44I0abIRTGHltf88OwjEKAUggk,2317
|
|
|
47
48
|
dbos/_scheduler.py,sha256=CWeGVfl9h51VXfxt80y5Da_5pE8SPty_AYkfpJkkMxQ,2117
|
|
48
49
|
dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
49
50
|
dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
|
|
50
|
-
dbos/_schemas/system_database.py,sha256
|
|
51
|
+
dbos/_schemas/system_database.py,sha256=-dAKk-_Y3vzbpLT4ei-sIrBQgFyQiwPj1enZb1TYc8I,4943
|
|
51
52
|
dbos/_serialization.py,sha256=bWuwhXSQcGmiazvhJHA5gwhrRWxtmFmcCFQSDJnqqkU,3666
|
|
52
|
-
dbos/_sys_db.py,sha256=
|
|
53
|
+
dbos/_sys_db.py,sha256=MWSrGeCUMgctOMxJ3SViswVzC9URUMy7COdI7vRSLv8,86256
|
|
53
54
|
dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
|
|
54
55
|
dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
55
56
|
dbos/_templates/dbos-db-starter/__package/main.py.dbos,sha256=aQnBPSSQpkB8ERfhf7gB7P9tsU6OPKhZscfeh0yiaD8,2702
|
|
@@ -65,8 +66,9 @@ dbos/_utils.py,sha256=uywq1QrjMwy17btjxW4bES49povlQwYwYbvKwMT6C2U,1575
|
|
|
65
66
|
dbos/_workflow_commands.py,sha256=EmmAaQfRWeOZm_WPTznuU-O3he3jiSzzT9VpYrhxugE,4835
|
|
66
67
|
dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
|
|
67
68
|
dbos/cli/_template_init.py,sha256=7JBcpMqP1r2mfCnvWatu33z8ctEGHJarlZYKgB83cXE,2972
|
|
68
|
-
dbos/cli/cli.py,sha256=
|
|
69
|
+
dbos/cli/cli.py,sha256=ey7E-lNFgvUWhsd-mkFwZvTdYorv6hU2zsMOS23n1yQ,22214
|
|
70
|
+
dbos/cli/migration.py,sha256=M65qa5ao1rmZnGtnRpg8wlbZH_aGiWBhAvwgG97jde4,3198
|
|
69
71
|
dbos/dbos-config.schema.json,sha256=CjaspeYmOkx6Ip_pcxtmfXJTn_YGdSx_0pcPBF7KZmo,6060
|
|
70
72
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
|
71
73
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
|
72
|
-
dbos-1.
|
|
74
|
+
dbos-1.11.0a2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|