dbos 1.13.1__tar.gz → 1.14.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- {dbos-1.13.1 → dbos-1.14.0}/PKG-INFO +1 -1
- {dbos-1.13.1 → dbos-1.14.0}/dbos/__init__.py +3 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_client.py +17 -9
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_context.py +15 -2
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_core.py +45 -11
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_dbos.py +9 -6
- dbos-1.14.0/dbos/_debouncer.py +395 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_logger.py +1 -1
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_outcome.py +67 -13
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_serialization.py +7 -2
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_sys_db.py +41 -3
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_tracer.py +7 -0
- {dbos-1.13.1 → dbos-1.14.0}/pyproject.toml +2 -1
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_async.py +90 -4
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_async_workflow_management.py +1 -1
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_dbos.py +59 -11
- dbos-1.14.0/tests/test_debouncer.py +318 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_queue.py +38 -1
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_spans.py +128 -34
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_streaming.py +15 -7
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_workflow_introspection.py +4 -4
- {dbos-1.13.1 → dbos-1.14.0}/LICENSE +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/README.md +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/__main__.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_admin_server.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_alembic_migrations/env.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_alembic_migrations/script.py.mako +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_alembic_migrations/versions/01ce9f07bd10_streaming.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_alembic_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_alembic_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_alembic_migrations/versions/471b60d64126_dbos_migrations.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_alembic_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_alembic_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_alembic_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_alembic_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_alembic_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_alembic_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_alembic_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_alembic_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_alembic_migrations/versions/d994145b47b6_consolidate_inputs.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_alembic_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_alembic_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_app_db.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_classproperty.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_conductor/conductor.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_conductor/protocol.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_croniter.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_dbos_config.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_debug.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_docker_pg_helper.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_error.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_event_loop.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_fastapi.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_flask.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_kafka.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_kafka_message.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_migration.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_queue.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_recovery.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_registrations.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_roles.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_scheduler.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_schemas/__init__.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_schemas/application_database.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_schemas/system_database.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_sys_db_postgres.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_sys_db_sqlite.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_templates/dbos-db-starter/README.md +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_utils.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/_workflow_commands.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/cli/_github_init.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/cli/_template_init.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/cli/cli.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/cli/migration.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/dbos-config.schema.json +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/dbos/py.typed +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/__init__.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/atexit_no_ctor.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/atexit_no_launch.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/classdefs.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/client_collateral.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/client_worker.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/conftest.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/dupname_classdefs1.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/dupname_classdefsa.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/more_classdefs.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/queuedworkflow.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_admin_server.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_classdecorators.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_cli.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_client.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_concurrency.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_config.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_croniter.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_debug.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_docker_secrets.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_failures.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_fastapi.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_fastapi_roles.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_flask.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_kafka.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_outcome.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_package.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_scheduler.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_schema_migration.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_singleton.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_sqlalchemy.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/tests/test_workflow_management.py +0 -0
- {dbos-1.13.1 → dbos-1.14.0}/version/__init__.py +0 -0
|
@@ -9,6 +9,7 @@ from ._context import (
|
|
|
9
9
|
)
|
|
10
10
|
from ._dbos import DBOS, DBOSConfiguredInstance, WorkflowHandle, WorkflowHandleAsync
|
|
11
11
|
from ._dbos_config import DBOSConfig
|
|
12
|
+
from ._debouncer import Debouncer, DebouncerClient
|
|
12
13
|
from ._kafka_message import KafkaMessage
|
|
13
14
|
from ._queue import Queue
|
|
14
15
|
from ._sys_db import GetWorkflowsInput, WorkflowStatus, WorkflowStatusString
|
|
@@ -32,4 +33,6 @@ __all__ = [
|
|
|
32
33
|
"WorkflowStatusString",
|
|
33
34
|
"error",
|
|
34
35
|
"Queue",
|
|
36
|
+
"Debouncer",
|
|
37
|
+
"DebouncerClient",
|
|
35
38
|
]
|
|
@@ -3,6 +3,7 @@ import sys
|
|
|
3
3
|
import time
|
|
4
4
|
import uuid
|
|
5
5
|
from typing import (
|
|
6
|
+
TYPE_CHECKING,
|
|
6
7
|
Any,
|
|
7
8
|
AsyncGenerator,
|
|
8
9
|
Generator,
|
|
@@ -24,7 +25,10 @@ else:
|
|
|
24
25
|
from typing import NotRequired
|
|
25
26
|
|
|
26
27
|
from dbos import _serialization
|
|
27
|
-
|
|
28
|
+
|
|
29
|
+
if TYPE_CHECKING:
|
|
30
|
+
from dbos._dbos import WorkflowHandle, WorkflowHandleAsync
|
|
31
|
+
|
|
28
32
|
from dbos._dbos_config import (
|
|
29
33
|
get_application_database_url,
|
|
30
34
|
get_system_database_url,
|
|
@@ -224,23 +228,25 @@ class DBOSClient:
|
|
|
224
228
|
|
|
225
229
|
def enqueue(
|
|
226
230
|
self, options: EnqueueOptions, *args: Any, **kwargs: Any
|
|
227
|
-
) -> WorkflowHandle[R]:
|
|
231
|
+
) -> "WorkflowHandle[R]":
|
|
228
232
|
workflow_id = self._enqueue(options, *args, **kwargs)
|
|
229
233
|
return WorkflowHandleClientPolling[R](workflow_id, self._sys_db)
|
|
230
234
|
|
|
231
235
|
async def enqueue_async(
|
|
232
236
|
self, options: EnqueueOptions, *args: Any, **kwargs: Any
|
|
233
|
-
) -> WorkflowHandleAsync[R]:
|
|
237
|
+
) -> "WorkflowHandleAsync[R]":
|
|
234
238
|
workflow_id = await asyncio.to_thread(self._enqueue, options, *args, **kwargs)
|
|
235
239
|
return WorkflowHandleClientAsyncPolling[R](workflow_id, self._sys_db)
|
|
236
240
|
|
|
237
|
-
def retrieve_workflow(self, workflow_id: str) -> WorkflowHandle[R]:
|
|
241
|
+
def retrieve_workflow(self, workflow_id: str) -> "WorkflowHandle[R]":
|
|
238
242
|
status = get_workflow(self._sys_db, workflow_id)
|
|
239
243
|
if status is None:
|
|
240
244
|
raise DBOSNonExistentWorkflowError(workflow_id)
|
|
241
245
|
return WorkflowHandleClientPolling[R](workflow_id, self._sys_db)
|
|
242
246
|
|
|
243
|
-
async def retrieve_workflow_async(
|
|
247
|
+
async def retrieve_workflow_async(
|
|
248
|
+
self, workflow_id: str
|
|
249
|
+
) -> "WorkflowHandleAsync[R]":
|
|
244
250
|
status = await asyncio.to_thread(get_workflow, self._sys_db, workflow_id)
|
|
245
251
|
if status is None:
|
|
246
252
|
raise DBOSNonExistentWorkflowError(workflow_id)
|
|
@@ -311,11 +317,13 @@ class DBOSClient:
|
|
|
311
317
|
async def cancel_workflow_async(self, workflow_id: str) -> None:
|
|
312
318
|
await asyncio.to_thread(self.cancel_workflow, workflow_id)
|
|
313
319
|
|
|
314
|
-
def resume_workflow(self, workflow_id: str) -> WorkflowHandle[Any]:
|
|
320
|
+
def resume_workflow(self, workflow_id: str) -> "WorkflowHandle[Any]":
|
|
315
321
|
self._sys_db.resume_workflow(workflow_id)
|
|
316
322
|
return WorkflowHandleClientPolling[Any](workflow_id, self._sys_db)
|
|
317
323
|
|
|
318
|
-
async def resume_workflow_async(
|
|
324
|
+
async def resume_workflow_async(
|
|
325
|
+
self, workflow_id: str
|
|
326
|
+
) -> "WorkflowHandleAsync[Any]":
|
|
319
327
|
await asyncio.to_thread(self.resume_workflow, workflow_id)
|
|
320
328
|
return WorkflowHandleClientAsyncPolling[Any](workflow_id, self._sys_db)
|
|
321
329
|
|
|
@@ -451,7 +459,7 @@ class DBOSClient:
|
|
|
451
459
|
start_step: int,
|
|
452
460
|
*,
|
|
453
461
|
application_version: Optional[str] = None,
|
|
454
|
-
) -> WorkflowHandle[Any]:
|
|
462
|
+
) -> "WorkflowHandle[Any]":
|
|
455
463
|
forked_workflow_id = fork_workflow(
|
|
456
464
|
self._sys_db,
|
|
457
465
|
self._app_db,
|
|
@@ -467,7 +475,7 @@ class DBOSClient:
|
|
|
467
475
|
start_step: int,
|
|
468
476
|
*,
|
|
469
477
|
application_version: Optional[str] = None,
|
|
470
|
-
) -> WorkflowHandleAsync[Any]:
|
|
478
|
+
) -> "WorkflowHandleAsync[Any]":
|
|
471
479
|
forked_workflow_id = await asyncio.to_thread(
|
|
472
480
|
fork_workflow,
|
|
473
481
|
self._sys_db,
|
|
@@ -215,11 +215,18 @@ class DBOSContext:
|
|
|
215
215
|
def end_handler(self, exc_value: Optional[BaseException]) -> None:
|
|
216
216
|
self._end_span(exc_value)
|
|
217
217
|
|
|
218
|
-
|
|
218
|
+
""" Return the current DBOS span if any. It must be a span created by DBOS."""
|
|
219
|
+
|
|
220
|
+
def get_current_dbos_span(self) -> Optional[Span]:
|
|
219
221
|
if len(self.context_spans) > 0:
|
|
220
222
|
return self.context_spans[-1].span
|
|
221
223
|
return None
|
|
222
224
|
|
|
225
|
+
""" Return the current active span if any. It might not be a DBOS span."""
|
|
226
|
+
|
|
227
|
+
def get_current_active_span(self) -> Optional[Span]:
|
|
228
|
+
return dbos_tracer.get_current_span()
|
|
229
|
+
|
|
223
230
|
def _start_span(self, attributes: TracedAttributes) -> None:
|
|
224
231
|
if dbos_tracer.disable_otlp:
|
|
225
232
|
return
|
|
@@ -235,7 +242,7 @@ class DBOSContext:
|
|
|
235
242
|
attributes["authenticatedUserAssumedRole"] = self.assumed_role
|
|
236
243
|
span = dbos_tracer.start_span(
|
|
237
244
|
attributes,
|
|
238
|
-
parent=
|
|
245
|
+
parent=None, # It'll use the current active span as the parent
|
|
239
246
|
)
|
|
240
247
|
# Activate the current span
|
|
241
248
|
cm = use_span(
|
|
@@ -517,6 +524,7 @@ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
|
|
|
517
524
|
self.saved_workflow_timeout: Optional[int] = None
|
|
518
525
|
self.saved_deduplication_id: Optional[str] = None
|
|
519
526
|
self.saved_priority: Optional[int] = None
|
|
527
|
+
self.saved_is_within_set_workflow_id_block: bool = False
|
|
520
528
|
|
|
521
529
|
def __enter__(self) -> DBOSContext:
|
|
522
530
|
# Code to create a basic context
|
|
@@ -526,6 +534,9 @@ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
|
|
|
526
534
|
ctx = DBOSContext()
|
|
527
535
|
_set_local_dbos_context(ctx)
|
|
528
536
|
assert not ctx.is_within_workflow()
|
|
537
|
+
# Unset is_within_set_workflow_id_block as the workflow is not within a block
|
|
538
|
+
self.saved_is_within_set_workflow_id_block = ctx.is_within_set_workflow_id_block
|
|
539
|
+
ctx.is_within_set_workflow_id_block = False
|
|
529
540
|
# Unset the workflow_timeout_ms context var so it is not applied to this
|
|
530
541
|
# workflow's children (instead we propagate the deadline)
|
|
531
542
|
self.saved_workflow_timeout = ctx.workflow_timeout_ms
|
|
@@ -550,6 +561,8 @@ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
|
|
|
550
561
|
ctx = assert_current_dbos_context()
|
|
551
562
|
assert ctx.is_within_workflow()
|
|
552
563
|
ctx.end_workflow(exc_value)
|
|
564
|
+
# Restore is_within_set_workflow_id_block
|
|
565
|
+
ctx.is_within_set_workflow_id_block = self.saved_is_within_set_workflow_id_block
|
|
553
566
|
# Restore the saved workflow timeout
|
|
554
567
|
ctx.workflow_timeout_ms = self.saved_workflow_timeout
|
|
555
568
|
# Clear any propagating timeout
|
|
@@ -19,8 +19,6 @@ from typing import (
|
|
|
19
19
|
cast,
|
|
20
20
|
)
|
|
21
21
|
|
|
22
|
-
import psycopg
|
|
23
|
-
|
|
24
22
|
from dbos._outcome import Immediate, NoResult, Outcome, Pending
|
|
25
23
|
from dbos._utils import GlobalParams, retriable_postgres_exception
|
|
26
24
|
|
|
@@ -52,12 +50,14 @@ from ._error import (
|
|
|
52
50
|
DBOSException,
|
|
53
51
|
DBOSMaxStepRetriesExceeded,
|
|
54
52
|
DBOSNonExistentWorkflowError,
|
|
53
|
+
DBOSQueueDeduplicatedError,
|
|
55
54
|
DBOSRecoveryError,
|
|
56
55
|
DBOSUnexpectedStepError,
|
|
57
56
|
DBOSWorkflowCancelledError,
|
|
58
57
|
DBOSWorkflowConflictIDError,
|
|
59
58
|
DBOSWorkflowFunctionNotFoundError,
|
|
60
59
|
)
|
|
60
|
+
from ._logger import dbos_logger
|
|
61
61
|
from ._registrations import (
|
|
62
62
|
DEFAULT_MAX_RECOVERY_ATTEMPTS,
|
|
63
63
|
get_config_name,
|
|
@@ -96,6 +96,15 @@ R = TypeVar("R", covariant=True) # A generic type for workflow return values
|
|
|
96
96
|
F = TypeVar("F", bound=Callable[..., Any])
|
|
97
97
|
|
|
98
98
|
TEMP_SEND_WF_NAME = "<temp>.temp_send_workflow"
|
|
99
|
+
DEBOUNCER_WORKFLOW_NAME = "_dbos_debouncer_workflow"
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def check_is_in_coroutine() -> bool:
|
|
103
|
+
try:
|
|
104
|
+
asyncio.get_running_loop()
|
|
105
|
+
return True
|
|
106
|
+
except RuntimeError:
|
|
107
|
+
return False
|
|
99
108
|
|
|
100
109
|
|
|
101
110
|
class WorkflowHandleFuture(Generic[R]):
|
|
@@ -303,10 +312,22 @@ def _init_workflow(
|
|
|
303
312
|
}
|
|
304
313
|
|
|
305
314
|
# Synchronously record the status and inputs for workflows
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
315
|
+
try:
|
|
316
|
+
wf_status, workflow_deadline_epoch_ms = dbos._sys_db.init_workflow(
|
|
317
|
+
status,
|
|
318
|
+
max_recovery_attempts=max_recovery_attempts,
|
|
319
|
+
)
|
|
320
|
+
except DBOSQueueDeduplicatedError as e:
|
|
321
|
+
if ctx.has_parent():
|
|
322
|
+
result: OperationResultInternal = {
|
|
323
|
+
"workflow_uuid": ctx.parent_workflow_id,
|
|
324
|
+
"function_id": ctx.parent_workflow_fid,
|
|
325
|
+
"function_name": wf_name,
|
|
326
|
+
"output": None,
|
|
327
|
+
"error": _serialization.serialize_exception(e),
|
|
328
|
+
}
|
|
329
|
+
dbos._sys_db.record_operation_result(result)
|
|
330
|
+
raise
|
|
310
331
|
|
|
311
332
|
if workflow_deadline_epoch_ms is not None:
|
|
312
333
|
evt = threading.Event()
|
|
@@ -830,11 +851,16 @@ def workflow_wrapper(
|
|
|
830
851
|
dbos._sys_db.record_get_result(workflow_id, serialized_r, None)
|
|
831
852
|
return r
|
|
832
853
|
|
|
854
|
+
if check_is_in_coroutine() and not inspect.iscoroutinefunction(func):
|
|
855
|
+
dbos_logger.warning(
|
|
856
|
+
f"Sync workflow ({get_dbos_func_name(func)}) shouldn't be invoked from within another async function. Define it as async or use asyncio.to_thread instead."
|
|
857
|
+
)
|
|
858
|
+
|
|
833
859
|
outcome = (
|
|
834
|
-
wfOutcome.wrap(init_wf)
|
|
860
|
+
wfOutcome.wrap(init_wf, dbos=dbos)
|
|
835
861
|
.also(DBOSAssumeRole(rr))
|
|
836
862
|
.also(enterWorkflowCtxMgr(attributes))
|
|
837
|
-
.then(record_get_result)
|
|
863
|
+
.then(record_get_result, dbos=dbos)
|
|
838
864
|
)
|
|
839
865
|
return outcome() # type: ignore
|
|
840
866
|
|
|
@@ -959,7 +985,7 @@ def decorate_transaction(
|
|
|
959
985
|
dbapi_error
|
|
960
986
|
) or dbos._app_db._is_serialization_error(dbapi_error):
|
|
961
987
|
# Retry on serialization failure
|
|
962
|
-
span = ctx.
|
|
988
|
+
span = ctx.get_current_dbos_span()
|
|
963
989
|
if span:
|
|
964
990
|
span.add_event(
|
|
965
991
|
"Transaction Failure",
|
|
@@ -1011,6 +1037,10 @@ def decorate_transaction(
|
|
|
1011
1037
|
assert (
|
|
1012
1038
|
ctx.is_workflow()
|
|
1013
1039
|
), "Transactions must be called from within workflows"
|
|
1040
|
+
if check_is_in_coroutine():
|
|
1041
|
+
dbos_logger.warning(
|
|
1042
|
+
f"Transaction function ({get_dbos_func_name(func)}) shouldn't be invoked from within another async function. Use asyncio.to_thread instead."
|
|
1043
|
+
)
|
|
1014
1044
|
with DBOSAssumeRole(rr):
|
|
1015
1045
|
return invoke_tx(*args, **kwargs)
|
|
1016
1046
|
else:
|
|
@@ -1074,7 +1104,7 @@ def decorate_step(
|
|
|
1074
1104
|
exc_info=error,
|
|
1075
1105
|
)
|
|
1076
1106
|
ctx = assert_current_dbos_context()
|
|
1077
|
-
span = ctx.
|
|
1107
|
+
span = ctx.get_current_dbos_span()
|
|
1078
1108
|
if span:
|
|
1079
1109
|
span.add_event(
|
|
1080
1110
|
f"Step attempt {attempt} failed",
|
|
@@ -1146,7 +1176,7 @@ def decorate_step(
|
|
|
1146
1176
|
|
|
1147
1177
|
outcome = (
|
|
1148
1178
|
stepOutcome.then(record_step_result)
|
|
1149
|
-
.intercept(check_existing_result)
|
|
1179
|
+
.intercept(check_existing_result, dbos=dbos)
|
|
1150
1180
|
.also(EnterDBOSStep(attributes))
|
|
1151
1181
|
)
|
|
1152
1182
|
return outcome()
|
|
@@ -1155,6 +1185,10 @@ def decorate_step(
|
|
|
1155
1185
|
|
|
1156
1186
|
@wraps(func)
|
|
1157
1187
|
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
1188
|
+
if check_is_in_coroutine() and not inspect.iscoroutinefunction(func):
|
|
1189
|
+
dbos_logger.warning(
|
|
1190
|
+
f"Sync step ({get_dbos_func_name(func)}) shouldn't be invoked from within another async function. Define it as async or use asyncio.to_thread instead."
|
|
1191
|
+
)
|
|
1158
1192
|
# If the step is called from a workflow, run it as a step.
|
|
1159
1193
|
# Otherwise, run it as a normal function.
|
|
1160
1194
|
ctx = get_local_dbos_context()
|
|
@@ -32,12 +32,14 @@ from opentelemetry.trace import Span
|
|
|
32
32
|
from rich import print
|
|
33
33
|
|
|
34
34
|
from dbos._conductor.conductor import ConductorWebsocket
|
|
35
|
+
from dbos._debouncer import debouncer_workflow
|
|
35
36
|
from dbos._sys_db import SystemDatabase, WorkflowStatus
|
|
36
37
|
from dbos._utils import INTERNAL_QUEUE_NAME, GlobalParams
|
|
37
38
|
from dbos._workflow_commands import fork_workflow, list_queued_workflows, list_workflows
|
|
38
39
|
|
|
39
40
|
from ._classproperty import classproperty
|
|
40
41
|
from ._core import (
|
|
42
|
+
DEBOUNCER_WORKFLOW_NAME,
|
|
41
43
|
TEMP_SEND_WF_NAME,
|
|
42
44
|
WorkflowHandleAsyncPolling,
|
|
43
45
|
WorkflowHandlePolling,
|
|
@@ -390,11 +392,12 @@ class DBOS:
|
|
|
390
392
|
) -> None:
|
|
391
393
|
self.send(destination_id, message, topic)
|
|
392
394
|
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
395
|
+
decorate_workflow(self._registry, TEMP_SEND_WF_NAME, None)(send_temp_workflow)
|
|
396
|
+
|
|
397
|
+
# Register the debouncer workflow
|
|
398
|
+
decorate_workflow(self._registry, DEBOUNCER_WORKFLOW_NAME, None)(
|
|
399
|
+
debouncer_workflow
|
|
400
|
+
)
|
|
398
401
|
|
|
399
402
|
for handler in dbos_logger.handlers:
|
|
400
403
|
handler.flush()
|
|
@@ -1297,7 +1300,7 @@ class DBOS:
|
|
|
1297
1300
|
def span(cls) -> Span:
|
|
1298
1301
|
"""Return the tracing `Span` associated with the current context."""
|
|
1299
1302
|
ctx = assert_current_dbos_context()
|
|
1300
|
-
span = ctx.
|
|
1303
|
+
span = ctx.get_current_active_span()
|
|
1301
1304
|
assert span
|
|
1302
1305
|
return span
|
|
1303
1306
|
|