dbos 1.10.0a2__tar.gz → 1.11.0a1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (111) hide show
  1. {dbos-1.10.0a2 → dbos-1.11.0a1}/PKG-INFO +1 -1
  2. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_client.py +82 -1
  3. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_dbos.py +176 -1
  4. dbos-1.11.0a1/dbos/_migrations/versions/01ce9f07bd10_streaming.py +42 -0
  5. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_schemas/system_database.py +17 -0
  6. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_sys_db.py +129 -0
  7. {dbos-1.10.0a2 → dbos-1.11.0a1}/pyproject.toml +1 -1
  8. dbos-1.11.0a1/tests/test_streaming.py +642 -0
  9. {dbos-1.10.0a2 → dbos-1.11.0a1}/LICENSE +0 -0
  10. {dbos-1.10.0a2 → dbos-1.11.0a1}/README.md +0 -0
  11. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/__init__.py +0 -0
  12. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/__main__.py +0 -0
  13. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_admin_server.py +0 -0
  14. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_app_db.py +0 -0
  15. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_classproperty.py +0 -0
  16. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_conductor/conductor.py +0 -0
  17. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_conductor/protocol.py +0 -0
  18. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_context.py +0 -0
  19. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_core.py +0 -0
  20. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_croniter.py +0 -0
  21. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_dbos_config.py +0 -0
  22. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_debug.py +0 -0
  23. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_docker_pg_helper.py +0 -0
  24. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_error.py +0 -0
  25. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_event_loop.py +0 -0
  26. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_fastapi.py +0 -0
  27. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_flask.py +0 -0
  28. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_kafka.py +0 -0
  29. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_kafka_message.py +0 -0
  30. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_logger.py +0 -0
  31. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_migrations/env.py +0 -0
  32. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_migrations/script.py.mako +0 -0
  33. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
  34. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -0
  35. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
  36. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
  37. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -0
  38. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -0
  39. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -0
  40. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
  41. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
  42. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
  43. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_migrations/versions/d994145b47b6_consolidate_inputs.py +0 -0
  44. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
  45. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
  46. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_outcome.py +0 -0
  47. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_queue.py +0 -0
  48. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_recovery.py +0 -0
  49. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_registrations.py +0 -0
  50. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_roles.py +0 -0
  51. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_scheduler.py +0 -0
  52. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_schemas/__init__.py +0 -0
  53. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_schemas/application_database.py +0 -0
  54. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_serialization.py +0 -0
  55. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_templates/dbos-db-starter/README.md +0 -0
  56. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
  57. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
  58. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
  59. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
  60. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
  61. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
  62. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
  63. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
  64. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
  65. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_tracer.py +0 -0
  66. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_utils.py +0 -0
  67. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/_workflow_commands.py +0 -0
  68. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/cli/_github_init.py +0 -0
  69. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/cli/_template_init.py +0 -0
  70. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/cli/cli.py +0 -0
  71. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/dbos-config.schema.json +0 -0
  72. {dbos-1.10.0a2 → dbos-1.11.0a1}/dbos/py.typed +0 -0
  73. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/__init__.py +0 -0
  74. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/atexit_no_ctor.py +0 -0
  75. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/atexit_no_launch.py +0 -0
  76. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/classdefs.py +0 -0
  77. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/client_collateral.py +0 -0
  78. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/client_worker.py +0 -0
  79. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/conftest.py +0 -0
  80. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/dupname_classdefs1.py +0 -0
  81. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/dupname_classdefsa.py +0 -0
  82. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/more_classdefs.py +0 -0
  83. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/queuedworkflow.py +0 -0
  84. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_admin_server.py +0 -0
  85. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_async.py +0 -0
  86. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_async_workflow_management.py +0 -0
  87. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_classdecorators.py +0 -0
  88. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_cli.py +0 -0
  89. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_client.py +0 -0
  90. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_concurrency.py +0 -0
  91. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_config.py +0 -0
  92. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_croniter.py +0 -0
  93. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_dbos.py +0 -0
  94. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_debug.py +0 -0
  95. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_docker_secrets.py +0 -0
  96. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_failures.py +0 -0
  97. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_fastapi.py +0 -0
  98. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_fastapi_roles.py +0 -0
  99. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_flask.py +0 -0
  100. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_kafka.py +0 -0
  101. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_outcome.py +0 -0
  102. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_package.py +0 -0
  103. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_queue.py +0 -0
  104. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_scheduler.py +0 -0
  105. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_schema_migration.py +0 -0
  106. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_singleton.py +0 -0
  107. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_spans.py +0 -0
  108. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_sqlalchemy.py +0 -0
  109. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_workflow_introspection.py +0 -0
  110. {dbos-1.10.0a2 → dbos-1.11.0a1}/tests/test_workflow_management.py +0 -0
  111. {dbos-1.10.0a2 → dbos-1.11.0a1}/version/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 1.10.0a2
3
+ Version: 1.11.0a1
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,7 +1,18 @@
1
1
  import asyncio
2
2
  import sys
3
+ import time
3
4
  import uuid
4
- from typing import Any, Generic, List, Optional, TypedDict, TypeVar, Union
5
+ from typing import (
6
+ Any,
7
+ AsyncGenerator,
8
+ Generator,
9
+ Generic,
10
+ List,
11
+ Optional,
12
+ TypedDict,
13
+ TypeVar,
14
+ Union,
15
+ )
5
16
 
6
17
  from dbos._app_db import ApplicationDatabase
7
18
  from dbos._context import MaxPriority, MinPriority
@@ -24,6 +35,8 @@ from dbos._sys_db import (
24
35
  WorkflowStatus,
25
36
  WorkflowStatusInternal,
26
37
  WorkflowStatusString,
38
+ _dbos_stream_closed_sentinel,
39
+ workflow_is_active,
27
40
  )
28
41
  from dbos._workflow_commands import (
29
42
  fork_workflow,
@@ -449,3 +462,71 @@ class DBOSClient:
449
462
  application_version=application_version,
450
463
  )
451
464
  return WorkflowHandleClientAsyncPolling[Any](forked_workflow_id, self._sys_db)
465
+
466
+ def read_stream(self, workflow_id: str, key: str) -> Generator[Any, Any, None]:
467
+ """
468
+ Read values from a stream as a generator.
469
+ This function reads values from a stream identified by the workflow_id and key,
470
+ yielding each value in order until the stream is closed or the workflow terminates.
471
+
472
+ Args:
473
+ workflow_id: The ID of the workflow that wrote to the stream
474
+ key: The stream key to read from
475
+
476
+ Yields:
477
+ The values written to the stream in order
478
+ """
479
+ offset = 0
480
+ while True:
481
+ try:
482
+ value = self._sys_db.read_stream(workflow_id, key, offset)
483
+ if value == _dbos_stream_closed_sentinel:
484
+ break
485
+ yield value
486
+ offset += 1
487
+ except ValueError:
488
+ # Poll the offset until a value arrives or the workflow terminates
489
+ status = get_workflow(self._sys_db, workflow_id)
490
+ if status is None:
491
+ break
492
+ if not workflow_is_active(status.status):
493
+ break
494
+ time.sleep(1.0)
495
+ continue
496
+
497
+ async def read_stream_async(
498
+ self, workflow_id: str, key: str
499
+ ) -> AsyncGenerator[Any, None]:
500
+ """
501
+ Read values from a stream as an async generator.
502
+ This function reads values from a stream identified by the workflow_id and key,
503
+ yielding each value in order until the stream is closed or the workflow terminates.
504
+
505
+ Args:
506
+ workflow_id: The ID of the workflow that wrote to the stream
507
+ key: The stream key to read from
508
+
509
+ Yields:
510
+ The values written to the stream in order
511
+ """
512
+ offset = 0
513
+ while True:
514
+ try:
515
+ value = await asyncio.to_thread(
516
+ self._sys_db.read_stream, workflow_id, key, offset
517
+ )
518
+ if value == _dbos_stream_closed_sentinel:
519
+ break
520
+ yield value
521
+ offset += 1
522
+ except ValueError:
523
+ # Poll the offset until a value arrives or the workflow terminates
524
+ status = await asyncio.to_thread(
525
+ get_workflow, self._sys_db, workflow_id
526
+ )
527
+ if status is None:
528
+ break
529
+ if not workflow_is_active(status.status):
530
+ break
531
+ await asyncio.sleep(1.0)
532
+ continue
@@ -6,14 +6,17 @@ import inspect
6
6
  import os
7
7
  import sys
8
8
  import threading
9
+ import time
9
10
  import uuid
10
11
  from concurrent.futures import ThreadPoolExecutor
11
12
  from logging import Logger
12
13
  from typing import (
13
14
  TYPE_CHECKING,
14
15
  Any,
16
+ AsyncGenerator,
15
17
  Callable,
16
18
  Coroutine,
19
+ Generator,
17
20
  Generic,
18
21
  List,
19
22
  Literal,
@@ -62,7 +65,14 @@ from ._registrations import (
62
65
  )
63
66
  from ._roles import default_required_roles, required_roles
64
67
  from ._scheduler import ScheduledWorkflow, scheduled
65
- from ._sys_db import StepInfo, SystemDatabase, WorkflowStatus, reset_system_database
68
+ from ._sys_db import (
69
+ StepInfo,
70
+ SystemDatabase,
71
+ WorkflowStatus,
72
+ _dbos_stream_closed_sentinel,
73
+ reset_system_database,
74
+ workflow_is_active,
75
+ )
66
76
  from ._tracer import DBOSTracer, dbos_tracer
67
77
 
68
78
  if TYPE_CHECKING:
@@ -1304,6 +1314,171 @@ class DBOS:
1304
1314
  ctx.authenticated_user = authenticated_user
1305
1315
  ctx.authenticated_roles = authenticated_roles
1306
1316
 
1317
+ @classmethod
1318
+ def write_stream(cls, key: str, value: Any) -> None:
1319
+ """
1320
+ Write a value to a stream.
1321
+
1322
+ Args:
1323
+ key(str): The stream key / name within the workflow
1324
+ value(Any): A serializable value to write to the stream
1325
+
1326
+ """
1327
+ ctx = get_local_dbos_context()
1328
+ if ctx is not None:
1329
+ # Must call it within a workflow
1330
+ if ctx.is_workflow():
1331
+ attributes: TracedAttributes = {
1332
+ "name": "write_stream",
1333
+ }
1334
+ with EnterDBOSStep(attributes):
1335
+ ctx = assert_current_dbos_context()
1336
+ _get_dbos_instance()._sys_db.write_stream_from_workflow(
1337
+ ctx.workflow_id, ctx.function_id, key, value
1338
+ )
1339
+ elif ctx.is_step():
1340
+ _get_dbos_instance()._sys_db.write_stream_from_step(
1341
+ ctx.workflow_id, key, value
1342
+ )
1343
+ else:
1344
+ raise DBOSException(
1345
+ "write_stream() must be called from within a workflow or step"
1346
+ )
1347
+ else:
1348
+ # Cannot call it from outside of a workflow
1349
+ raise DBOSException(
1350
+ "write_stream() must be called from within a workflow or step"
1351
+ )
1352
+
1353
+ @classmethod
1354
+ def close_stream(cls, key: str) -> None:
1355
+ """
1356
+ Close a stream.
1357
+
1358
+ Args:
1359
+ key(str): The stream key / name within the workflow
1360
+
1361
+ """
1362
+ ctx = get_local_dbos_context()
1363
+ if ctx is not None:
1364
+ # Must call it within a workflow
1365
+ if ctx.is_workflow():
1366
+ attributes: TracedAttributes = {
1367
+ "name": "close_stream",
1368
+ }
1369
+ with EnterDBOSStep(attributes):
1370
+ ctx = assert_current_dbos_context()
1371
+ _get_dbos_instance()._sys_db.close_stream(
1372
+ ctx.workflow_id, ctx.function_id, key
1373
+ )
1374
+ else:
1375
+ raise DBOSException(
1376
+ "close_stream() must be called from within a workflow"
1377
+ )
1378
+ else:
1379
+ # Cannot call it from outside of a workflow
1380
+ raise DBOSException("close_stream() must be called from within a workflow")
1381
+
1382
+ @classmethod
1383
+ def read_stream(cls, workflow_id: str, key: str) -> Generator[Any, Any, None]:
1384
+ """
1385
+ Read values from a stream as a generator.
1386
+
1387
+ This function reads values from a stream identified by the workflow_id and key,
1388
+ yielding each value in order until the stream is closed or the workflow terminates.
1389
+
1390
+ Args:
1391
+ workflow_id(str): The workflow instance ID that owns the stream
1392
+ key(str): The stream key / name within the workflow
1393
+
1394
+ Yields:
1395
+ Any: Each value in the stream until the stream is closed
1396
+
1397
+ """
1398
+ offset = 0
1399
+ sys_db = _get_dbos_instance()._sys_db
1400
+
1401
+ while True:
1402
+ try:
1403
+ value = sys_db.read_stream(workflow_id, key, offset)
1404
+ if value == _dbos_stream_closed_sentinel:
1405
+ break
1406
+ yield value
1407
+ offset += 1
1408
+ except ValueError:
1409
+ # Poll the offset until a value arrives or the workflow terminates
1410
+ status = cls.retrieve_workflow(workflow_id).get_status().status
1411
+ if not workflow_is_active(status):
1412
+ break
1413
+ time.sleep(1.0)
1414
+ continue
1415
+
1416
+ @classmethod
1417
+ async def write_stream_async(cls, key: str, value: Any) -> None:
1418
+ """
1419
+ Write a value to a stream asynchronously.
1420
+
1421
+ Args:
1422
+ key(str): The stream key / name within the workflow
1423
+ value(Any): A serializable value to write to the stream
1424
+
1425
+ """
1426
+ await cls._configure_asyncio_thread_pool()
1427
+ await asyncio.to_thread(lambda: DBOS.write_stream(key, value))
1428
+
1429
+ @classmethod
1430
+ async def close_stream_async(cls, key: str) -> None:
1431
+ """
1432
+ Close a stream asynchronously.
1433
+
1434
+ Args:
1435
+ key(str): The stream key / name within the workflow
1436
+
1437
+ """
1438
+ await cls._configure_asyncio_thread_pool()
1439
+ await asyncio.to_thread(lambda: DBOS.close_stream(key))
1440
+
1441
+ @classmethod
1442
+ async def read_stream_async(
1443
+ cls, workflow_id: str, key: str
1444
+ ) -> AsyncGenerator[Any, None]:
1445
+ """
1446
+ Read values from a stream as an async generator.
1447
+
1448
+ This function reads values from a stream identified by the workflow_id and key,
1449
+ yielding each value in order until the stream is closed or the workflow terminates.
1450
+
1451
+ Args:
1452
+ workflow_id(str): The workflow instance ID that owns the stream
1453
+ key(str): The stream key / name within the workflow
1454
+
1455
+ Yields:
1456
+ Any: Each value in the stream until the stream is closed
1457
+
1458
+ """
1459
+ await cls._configure_asyncio_thread_pool()
1460
+ offset = 0
1461
+ sys_db = _get_dbos_instance()._sys_db
1462
+
1463
+ while True:
1464
+ try:
1465
+ value = await asyncio.to_thread(
1466
+ sys_db.read_stream, workflow_id, key, offset
1467
+ )
1468
+ if value == _dbos_stream_closed_sentinel:
1469
+ break
1470
+ yield value
1471
+ offset += 1
1472
+ except ValueError:
1473
+ # Poll the offset until a value arrives or the workflow terminates
1474
+ status = (
1475
+ await (await cls.retrieve_workflow_async(workflow_id)).get_status()
1476
+ ).status
1477
+ if not workflow_is_active(status):
1478
+ break
1479
+ await asyncio.sleep(1.0)
1480
+ continue
1481
+
1307
1482
  @classproperty
1308
1483
  def tracer(self) -> DBOSTracer:
1309
1484
  """Return the DBOS OpenTelemetry tracer."""
@@ -0,0 +1,42 @@
1
+ """streaming
2
+
3
+ Revision ID: 01ce9f07bd10
4
+ Revises: d994145b47b6
5
+ Create Date: 2025-08-05 10:20:46.424975
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ import sqlalchemy as sa
12
+ from alembic import op
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = "01ce9f07bd10"
16
+ down_revision: Union[str, None] = "d994145b47b6"
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ # Create streams table
23
+ op.create_table(
24
+ "streams",
25
+ sa.Column("workflow_uuid", sa.Text(), nullable=False),
26
+ sa.Column("key", sa.Text(), nullable=False),
27
+ sa.Column("value", sa.Text(), nullable=False),
28
+ sa.Column("offset", sa.Integer(), nullable=False),
29
+ sa.ForeignKeyConstraint(
30
+ ["workflow_uuid"],
31
+ ["dbos.workflow_status.workflow_uuid"],
32
+ onupdate="CASCADE",
33
+ ondelete="CASCADE",
34
+ ),
35
+ sa.PrimaryKeyConstraint("workflow_uuid", "key", "offset"),
36
+ schema="dbos",
37
+ )
38
+
39
+
40
+ def downgrade() -> None:
41
+ # Drop streams table
42
+ op.drop_table("streams", schema="dbos")
@@ -132,3 +132,20 @@ class SystemSchema:
132
132
  Column("value", Text, nullable=False),
133
133
  PrimaryKeyConstraint("workflow_uuid", "key"),
134
134
  )
135
+
136
+ streams = Table(
137
+ "streams",
138
+ metadata_obj,
139
+ Column(
140
+ "workflow_uuid",
141
+ Text,
142
+ ForeignKey(
143
+ "workflow_status.workflow_uuid", onupdate="CASCADE", ondelete="CASCADE"
144
+ ),
145
+ nullable=False,
146
+ ),
147
+ Column("key", Text, nullable=False),
148
+ Column("value", Text, nullable=False),
149
+ Column("offset", Integer, nullable=False),
150
+ PrimaryKeyConstraint("workflow_uuid", "key", "offset"),
151
+ )
@@ -62,6 +62,13 @@ class WorkflowStatusString(Enum):
62
62
  ENQUEUED = "ENQUEUED"
63
63
 
64
64
 
65
+ def workflow_is_active(status: str) -> bool:
66
+ return (
67
+ status == WorkflowStatusString.ENQUEUED.value
68
+ or status == WorkflowStatusString.PENDING.value
69
+ )
70
+
71
+
65
72
  WorkflowStatuses = Literal[
66
73
  "PENDING",
67
74
  "SUCCESS",
@@ -243,6 +250,7 @@ class StepInfo(TypedDict):
243
250
 
244
251
 
245
252
  _dbos_null_topic = "__null__topic__"
253
+ _dbos_stream_closed_sentinel = "__DBOS_STREAM_CLOSED__"
246
254
 
247
255
 
248
256
  class ConditionCount(TypedDict):
@@ -1882,6 +1890,127 @@ class SystemDatabase:
1882
1890
  dbos_logger.error(f"Error connecting to the DBOS system database: {e}")
1883
1891
  raise
1884
1892
 
1893
+ def write_stream_from_step(self, workflow_uuid: str, key: str, value: Any) -> None:
1894
+ """
1895
+ Write a key-value pair to the stream at the first unused offset.
1896
+ """
1897
+ if self._debug_mode:
1898
+ raise Exception("called write_stream in debug mode")
1899
+
1900
+ with self.engine.begin() as c:
1901
+ # Find the maximum offset for this workflow_uuid and key combination
1902
+ max_offset_result = c.execute(
1903
+ sa.select(sa.func.max(SystemSchema.streams.c.offset)).where(
1904
+ SystemSchema.streams.c.workflow_uuid == workflow_uuid,
1905
+ SystemSchema.streams.c.key == key,
1906
+ )
1907
+ ).fetchone()
1908
+
1909
+ # Next offset is max + 1, or 0 if no records exist
1910
+ next_offset = (
1911
+ (max_offset_result[0] + 1)
1912
+ if max_offset_result is not None and max_offset_result[0] is not None
1913
+ else 0
1914
+ )
1915
+
1916
+ # Serialize the value before storing
1917
+ serialized_value = _serialization.serialize(value)
1918
+
1919
+ # Insert the new stream entry
1920
+ c.execute(
1921
+ sa.insert(SystemSchema.streams).values(
1922
+ workflow_uuid=workflow_uuid,
1923
+ key=key,
1924
+ value=serialized_value,
1925
+ offset=next_offset,
1926
+ )
1927
+ )
1928
+
1929
+ @db_retry()
1930
+ def write_stream_from_workflow(
1931
+ self, workflow_uuid: str, function_id: int, key: str, value: Any
1932
+ ) -> None:
1933
+ """
1934
+ Write a key-value pair to the stream at the first unused offset.
1935
+ """
1936
+ function_name = (
1937
+ "DBOS.closeStream"
1938
+ if value == _dbos_stream_closed_sentinel
1939
+ else "DBOS.writeStream"
1940
+ )
1941
+
1942
+ with self.engine.begin() as c:
1943
+
1944
+ recorded_output = self._check_operation_execution_txn(
1945
+ workflow_uuid, function_id, function_name, conn=c
1946
+ )
1947
+ if self._debug_mode and recorded_output is None:
1948
+ raise Exception(
1949
+ "called set_event in debug mode without a previous execution"
1950
+ )
1951
+ # Find the maximum offset for this workflow_uuid and key combination
1952
+ max_offset_result = c.execute(
1953
+ sa.select(sa.func.max(SystemSchema.streams.c.offset)).where(
1954
+ SystemSchema.streams.c.workflow_uuid == workflow_uuid,
1955
+ SystemSchema.streams.c.key == key,
1956
+ )
1957
+ ).fetchone()
1958
+
1959
+ # Next offset is max + 1, or 0 if no records exist
1960
+ next_offset = (
1961
+ (max_offset_result[0] + 1)
1962
+ if max_offset_result is not None and max_offset_result[0] is not None
1963
+ else 0
1964
+ )
1965
+
1966
+ # Serialize the value before storing
1967
+ serialized_value = _serialization.serialize(value)
1968
+
1969
+ # Insert the new stream entry
1970
+ c.execute(
1971
+ sa.insert(SystemSchema.streams).values(
1972
+ workflow_uuid=workflow_uuid,
1973
+ key=key,
1974
+ value=serialized_value,
1975
+ offset=next_offset,
1976
+ )
1977
+ )
1978
+ output: OperationResultInternal = {
1979
+ "workflow_uuid": workflow_uuid,
1980
+ "function_id": function_id,
1981
+ "function_name": function_name,
1982
+ "output": None,
1983
+ "error": None,
1984
+ }
1985
+ self._record_operation_result_txn(output, conn=c)
1986
+
1987
+ def close_stream(self, workflow_uuid: str, function_id: int, key: str) -> None:
1988
+ """Write a sentinel value to the stream at the first unused offset to mark it as closed."""
1989
+ self.write_stream_from_workflow(
1990
+ workflow_uuid, function_id, key, _dbos_stream_closed_sentinel
1991
+ )
1992
+
1993
+ @db_retry()
1994
+ def read_stream(self, workflow_uuid: str, key: str, offset: int) -> Any:
1995
+ """Read the value at the specified offset for the given workflow_uuid and key."""
1996
+
1997
+ with self.engine.begin() as c:
1998
+ result = c.execute(
1999
+ sa.select(SystemSchema.streams.c.value).where(
2000
+ SystemSchema.streams.c.workflow_uuid == workflow_uuid,
2001
+ SystemSchema.streams.c.key == key,
2002
+ SystemSchema.streams.c.offset == offset,
2003
+ )
2004
+ ).fetchone()
2005
+
2006
+ if result is None:
2007
+ raise ValueError(
2008
+ f"No value found for workflow_uuid={workflow_uuid}, key={key}, offset={offset}"
2009
+ )
2010
+
2011
+ # Deserialize the value before returning
2012
+ return _serialization.deserialize(result[0])
2013
+
1885
2014
  def garbage_collect(
1886
2015
  self, cutoff_epoch_timestamp_ms: Optional[int], rows_threshold: Optional[int]
1887
2016
  ) -> Optional[tuple[int, list[str]]]:
@@ -27,7 +27,7 @@ dependencies = [
27
27
  ]
28
28
  requires-python = ">=3.9"
29
29
  readme = "README.md"
30
- version = "1.10.0a2"
30
+ version = "1.11.0a1"
31
31
 
32
32
  [project.license]
33
33
  text = "MIT"