dbos 1.14.0a3__tar.gz → 1.14.0a5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dbos-1.14.0a3 → dbos-1.14.0a5}/PKG-INFO +1 -1
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_context.py +9 -2
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_core.py +24 -2
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_dbos.py +1 -1
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_logger.py +1 -1
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_tracer.py +7 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/pyproject.toml +2 -1
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_async.py +90 -4
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_async_workflow_management.py +1 -1
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_dbos.py +4 -3
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_spans.py +128 -34
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_workflow_introspection.py +4 -4
- {dbos-1.14.0a3 → dbos-1.14.0a5}/LICENSE +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/README.md +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/__init__.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/__main__.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_admin_server.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/env.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/script.py.mako +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/01ce9f07bd10_streaming.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/471b60d64126_dbos_migrations.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/d994145b47b6_consolidate_inputs.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_app_db.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_classproperty.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_client.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_conductor/conductor.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_conductor/protocol.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_croniter.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_dbos_config.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_debug.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_docker_pg_helper.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_error.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_event_loop.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_fastapi.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_flask.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_kafka.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_kafka_message.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_migration.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_outcome.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_queue.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_recovery.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_registrations.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_roles.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_scheduler.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_schemas/__init__.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_schemas/application_database.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_schemas/system_database.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_serialization.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_sys_db.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_sys_db_postgres.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_sys_db_sqlite.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_templates/dbos-db-starter/README.md +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_utils.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_workflow_commands.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/cli/_github_init.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/cli/_template_init.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/cli/cli.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/cli/migration.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/dbos-config.schema.json +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/py.typed +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/__init__.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/atexit_no_ctor.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/atexit_no_launch.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/classdefs.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/client_collateral.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/client_worker.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/conftest.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/dupname_classdefs1.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/dupname_classdefsa.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/more_classdefs.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/queuedworkflow.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_admin_server.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_classdecorators.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_cli.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_client.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_concurrency.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_config.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_croniter.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_debug.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_docker_secrets.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_failures.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_fastapi.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_fastapi_roles.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_flask.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_kafka.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_outcome.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_package.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_queue.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_scheduler.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_schema_migration.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_singleton.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_sqlalchemy.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_streaming.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/tests/test_workflow_management.py +0 -0
- {dbos-1.14.0a3 → dbos-1.14.0a5}/version/__init__.py +0 -0
@@ -215,11 +215,18 @@ class DBOSContext:
|
|
215
215
|
def end_handler(self, exc_value: Optional[BaseException]) -> None:
|
216
216
|
self._end_span(exc_value)
|
217
217
|
|
218
|
-
|
218
|
+
""" Return the current DBOS span if any. It must be a span created by DBOS."""
|
219
|
+
|
220
|
+
def get_current_dbos_span(self) -> Optional[Span]:
|
219
221
|
if len(self.context_spans) > 0:
|
220
222
|
return self.context_spans[-1].span
|
221
223
|
return None
|
222
224
|
|
225
|
+
""" Return the current active span if any. It might not be a DBOS span."""
|
226
|
+
|
227
|
+
def get_current_active_span(self) -> Optional[Span]:
|
228
|
+
return dbos_tracer.get_current_span()
|
229
|
+
|
223
230
|
def _start_span(self, attributes: TracedAttributes) -> None:
|
224
231
|
if dbos_tracer.disable_otlp:
|
225
232
|
return
|
@@ -235,7 +242,7 @@ class DBOSContext:
|
|
235
242
|
attributes["authenticatedUserAssumedRole"] = self.assumed_role
|
236
243
|
span = dbos_tracer.start_span(
|
237
244
|
attributes,
|
238
|
-
parent=
|
245
|
+
parent=None, # It'll use the current active span as the parent
|
239
246
|
)
|
240
247
|
# Activate the current span
|
241
248
|
cm = use_span(
|
@@ -56,6 +56,7 @@ from ._error import (
|
|
56
56
|
DBOSWorkflowConflictIDError,
|
57
57
|
DBOSWorkflowFunctionNotFoundError,
|
58
58
|
)
|
59
|
+
from ._logger import dbos_logger
|
59
60
|
from ._registrations import (
|
60
61
|
DEFAULT_MAX_RECOVERY_ATTEMPTS,
|
61
62
|
get_config_name,
|
@@ -96,6 +97,14 @@ F = TypeVar("F", bound=Callable[..., Any])
|
|
96
97
|
TEMP_SEND_WF_NAME = "<temp>.temp_send_workflow"
|
97
98
|
|
98
99
|
|
100
|
+
def check_is_in_coroutine() -> bool:
|
101
|
+
try:
|
102
|
+
asyncio.get_running_loop()
|
103
|
+
return True
|
104
|
+
except RuntimeError:
|
105
|
+
return False
|
106
|
+
|
107
|
+
|
99
108
|
class WorkflowHandleFuture(Generic[R]):
|
100
109
|
|
101
110
|
def __init__(self, workflow_id: str, future: Future[R], dbos: "DBOS"):
|
@@ -828,6 +837,11 @@ def workflow_wrapper(
|
|
828
837
|
dbos._sys_db.record_get_result(workflow_id, serialized_r, None)
|
829
838
|
return r
|
830
839
|
|
840
|
+
if check_is_in_coroutine() and not inspect.iscoroutinefunction(func):
|
841
|
+
dbos_logger.warning(
|
842
|
+
f"Sync workflow ({get_dbos_func_name(func)}) shouldn't be invoked from within another async function. Define it as async or use asyncio.to_thread instead."
|
843
|
+
)
|
844
|
+
|
831
845
|
outcome = (
|
832
846
|
wfOutcome.wrap(init_wf, dbos=dbos)
|
833
847
|
.also(DBOSAssumeRole(rr))
|
@@ -957,7 +971,7 @@ def decorate_transaction(
|
|
957
971
|
dbapi_error
|
958
972
|
) or dbos._app_db._is_serialization_error(dbapi_error):
|
959
973
|
# Retry on serialization failure
|
960
|
-
span = ctx.
|
974
|
+
span = ctx.get_current_dbos_span()
|
961
975
|
if span:
|
962
976
|
span.add_event(
|
963
977
|
"Transaction Failure",
|
@@ -1009,6 +1023,10 @@ def decorate_transaction(
|
|
1009
1023
|
assert (
|
1010
1024
|
ctx.is_workflow()
|
1011
1025
|
), "Transactions must be called from within workflows"
|
1026
|
+
if check_is_in_coroutine():
|
1027
|
+
dbos_logger.warning(
|
1028
|
+
f"Transaction function ({get_dbos_func_name(func)}) shouldn't be invoked from within another async function. Use asyncio.to_thread instead."
|
1029
|
+
)
|
1012
1030
|
with DBOSAssumeRole(rr):
|
1013
1031
|
return invoke_tx(*args, **kwargs)
|
1014
1032
|
else:
|
@@ -1072,7 +1090,7 @@ def decorate_step(
|
|
1072
1090
|
exc_info=error,
|
1073
1091
|
)
|
1074
1092
|
ctx = assert_current_dbos_context()
|
1075
|
-
span = ctx.
|
1093
|
+
span = ctx.get_current_dbos_span()
|
1076
1094
|
if span:
|
1077
1095
|
span.add_event(
|
1078
1096
|
f"Step attempt {attempt} failed",
|
@@ -1153,6 +1171,10 @@ def decorate_step(
|
|
1153
1171
|
|
1154
1172
|
@wraps(func)
|
1155
1173
|
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
1174
|
+
if check_is_in_coroutine() and not inspect.iscoroutinefunction(func):
|
1175
|
+
dbos_logger.warning(
|
1176
|
+
f"Sync step ({get_dbos_func_name(func)}) shouldn't be invoked from within another async function. Define it as async or use asyncio.to_thread instead."
|
1177
|
+
)
|
1156
1178
|
# If the step is called from a workflow, run it as a step.
|
1157
1179
|
# Otherwise, run it as a normal function.
|
1158
1180
|
ctx = get_local_dbos_context()
|
@@ -1297,7 +1297,7 @@ class DBOS:
|
|
1297
1297
|
def span(cls) -> Span:
|
1298
1298
|
"""Return the tracing `Span` associated with the current context."""
|
1299
1299
|
ctx = assert_current_dbos_context()
|
1300
|
-
span = ctx.
|
1300
|
+
span = ctx.get_current_active_span()
|
1301
1301
|
assert span
|
1302
1302
|
return span
|
1303
1303
|
|
@@ -39,7 +39,7 @@ class DBOSLogTransformer(logging.Filter):
|
|
39
39
|
if ctx:
|
40
40
|
if ctx.is_within_workflow():
|
41
41
|
record.operationUUID = ctx.workflow_id
|
42
|
-
span = ctx.
|
42
|
+
span = ctx.get_current_active_span()
|
43
43
|
if span:
|
44
44
|
trace_id = format_trace_id(span.get_span_context().trace_id)
|
45
45
|
record.traceId = trace_id
|
@@ -77,5 +77,12 @@ class DBOSTracer:
|
|
77
77
|
def end_span(self, span: Span) -> None:
|
78
78
|
span.end()
|
79
79
|
|
80
|
+
def get_current_span(self) -> Optional[Span]:
|
81
|
+
# Return the current active span if any. It might not be a DBOS span.
|
82
|
+
span = trace.get_current_span()
|
83
|
+
if span.get_span_context().is_valid:
|
84
|
+
return span
|
85
|
+
return None
|
86
|
+
|
80
87
|
|
81
88
|
dbos_tracer = DBOSTracer()
|
@@ -27,7 +27,7 @@ dependencies = [
|
|
27
27
|
]
|
28
28
|
requires-python = ">=3.9"
|
29
29
|
readme = "README.md"
|
30
|
-
version = "1.14.
|
30
|
+
version = "1.14.0a5"
|
31
31
|
|
32
32
|
[project.license]
|
33
33
|
text = "MIT"
|
@@ -90,4 +90,5 @@ dev = [
|
|
90
90
|
"pyright>=1.1.398",
|
91
91
|
"types-docker>=7.1.0.20241229",
|
92
92
|
"pytest-timeout>=2.3.1",
|
93
|
+
"inline-snapshot>=0.28.0",
|
93
94
|
]
|
@@ -5,6 +5,9 @@ from typing import List, Optional
|
|
5
5
|
|
6
6
|
import pytest
|
7
7
|
import sqlalchemy as sa
|
8
|
+
from opentelemetry._logs import set_logger_provider
|
9
|
+
from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
|
10
|
+
from opentelemetry.sdk._logs.export import BatchLogRecordProcessor, InMemoryLogExporter
|
8
11
|
|
9
12
|
# Public API
|
10
13
|
from dbos import (
|
@@ -19,6 +22,8 @@ from dbos._context import assert_current_dbos_context
|
|
19
22
|
from dbos._dbos import WorkflowHandle
|
20
23
|
from dbos._dbos_config import ConfigFile
|
21
24
|
from dbos._error import DBOSAwaitedWorkflowCancelledError, DBOSException
|
25
|
+
from dbos._logger import dbos_logger
|
26
|
+
from dbos._registrations import get_dbos_func_name
|
22
27
|
|
23
28
|
|
24
29
|
@pytest.mark.asyncio
|
@@ -31,7 +36,7 @@ async def test_async_workflow(dbos: DBOS) -> None:
|
|
31
36
|
async def test_workflow(var1: str, var2: str) -> str:
|
32
37
|
nonlocal wf_counter
|
33
38
|
wf_counter += 1
|
34
|
-
res1 = test_transaction
|
39
|
+
res1 = await asyncio.to_thread(test_transaction, var1)
|
35
40
|
res2 = await test_step(var2)
|
36
41
|
DBOS.logger.info("I'm test_workflow")
|
37
42
|
return res1 + res2
|
@@ -88,7 +93,7 @@ async def test_async_step(dbos: DBOS) -> None:
|
|
88
93
|
async def test_workflow(var1: str, var2: str) -> str:
|
89
94
|
nonlocal wf_counter
|
90
95
|
wf_counter += 1
|
91
|
-
res1 = test_transaction
|
96
|
+
res1 = await asyncio.to_thread(test_transaction, var1)
|
92
97
|
res2 = await test_step(var2)
|
93
98
|
DBOS.logger.info("I'm test_workflow")
|
94
99
|
return res1 + res2
|
@@ -325,6 +330,7 @@ def test_async_tx_raises(config: ConfigFile) -> None:
|
|
325
330
|
async def test_async_tx() -> None:
|
326
331
|
pass
|
327
332
|
|
333
|
+
assert "is a coroutine function" in str(exc_info.value)
|
328
334
|
# destroy call needed to avoid "functions were registered but DBOS() was not called" warning
|
329
335
|
DBOS.destroy(destroy_registry=True)
|
330
336
|
|
@@ -343,12 +349,12 @@ async def test_start_workflow_async(dbos: DBOS) -> None:
|
|
343
349
|
wf_el_id = id(asyncio.get_running_loop())
|
344
350
|
nonlocal wf_counter
|
345
351
|
wf_counter += 1
|
346
|
-
res2 = test_step(var2)
|
352
|
+
res2 = await test_step(var2)
|
347
353
|
DBOS.logger.info("I'm test_workflow")
|
348
354
|
return var1 + res2
|
349
355
|
|
350
356
|
@DBOS.step()
|
351
|
-
def test_step(var: str) -> str:
|
357
|
+
async def test_step(var: str) -> str:
|
352
358
|
nonlocal step_el_id
|
353
359
|
step_el_id = id(asyncio.get_running_loop())
|
354
360
|
nonlocal step_counter
|
@@ -605,3 +611,83 @@ async def test_workflow_with_task_cancellation(dbos: DBOS) -> None:
|
|
605
611
|
# Verify the workflow completes despite the task cancellation
|
606
612
|
handle: WorkflowHandleAsync[str] = await DBOS.retrieve_workflow_async(wfid)
|
607
613
|
assert await handle.get_result() == "completed"
|
614
|
+
|
615
|
+
|
616
|
+
@pytest.mark.asyncio
|
617
|
+
async def test_check_async_violation(dbos: DBOS) -> None:
|
618
|
+
# Set up in-memory log exporter
|
619
|
+
log_exporter = InMemoryLogExporter() # type: ignore
|
620
|
+
log_processor = BatchLogRecordProcessor(log_exporter)
|
621
|
+
log_provider = LoggerProvider()
|
622
|
+
log_provider.add_log_record_processor(log_processor)
|
623
|
+
set_logger_provider(log_provider)
|
624
|
+
dbos_logger.addHandler(LoggingHandler(logger_provider=log_provider))
|
625
|
+
|
626
|
+
@DBOS.workflow()
|
627
|
+
def sync_workflow() -> str:
|
628
|
+
return "sync"
|
629
|
+
|
630
|
+
@DBOS.step()
|
631
|
+
def sync_step() -> str:
|
632
|
+
return "step"
|
633
|
+
|
634
|
+
@DBOS.workflow()
|
635
|
+
async def async_workflow_sync_step() -> str:
|
636
|
+
return sync_step()
|
637
|
+
|
638
|
+
@DBOS.transaction()
|
639
|
+
def sync_transaction() -> str:
|
640
|
+
return "txn"
|
641
|
+
|
642
|
+
@DBOS.workflow()
|
643
|
+
async def async_workflow_sync_txn() -> str:
|
644
|
+
return sync_transaction()
|
645
|
+
|
646
|
+
# Call a sync workflow should log a warning
|
647
|
+
sync_workflow()
|
648
|
+
|
649
|
+
log_processor.force_flush(timeout_millis=5000)
|
650
|
+
logs = log_exporter.get_finished_logs()
|
651
|
+
assert len(logs) == 1
|
652
|
+
assert (
|
653
|
+
logs[0].log_record.body is not None
|
654
|
+
and f"Sync workflow ({get_dbos_func_name(sync_workflow)}) shouldn't be invoked from within another async function."
|
655
|
+
in logs[0].log_record.body
|
656
|
+
)
|
657
|
+
log_exporter.clear()
|
658
|
+
|
659
|
+
# Call a sync step from within an async workflow should log a warning
|
660
|
+
await async_workflow_sync_step()
|
661
|
+
log_processor.force_flush(timeout_millis=5000)
|
662
|
+
logs = log_exporter.get_finished_logs()
|
663
|
+
assert len(logs) == 1
|
664
|
+
assert (
|
665
|
+
logs[0].log_record.body is not None
|
666
|
+
and f"Sync step ({get_dbos_func_name(sync_step)}) shouldn't be invoked from within another async function."
|
667
|
+
in logs[0].log_record.body
|
668
|
+
)
|
669
|
+
log_exporter.clear()
|
670
|
+
|
671
|
+
# Directly call a sync step should log a warning
|
672
|
+
sync_step()
|
673
|
+
log_processor.force_flush(timeout_millis=5000)
|
674
|
+
logs = log_exporter.get_finished_logs()
|
675
|
+
assert len(logs) == 1
|
676
|
+
assert (
|
677
|
+
logs[0].log_record.body is not None
|
678
|
+
and f"Sync step ({get_dbos_func_name(sync_step)}) shouldn't be invoked from within another async function."
|
679
|
+
in logs[0].log_record.body
|
680
|
+
)
|
681
|
+
log_exporter.clear()
|
682
|
+
|
683
|
+
# Call a sync transaction from within an async workflow should log a warning
|
684
|
+
await async_workflow_sync_txn()
|
685
|
+
log_processor.force_flush(timeout_millis=5000)
|
686
|
+
logs = log_exporter.get_finished_logs()
|
687
|
+
assert len(logs) == 1
|
688
|
+
assert (
|
689
|
+
logs[0].log_record.body is not None
|
690
|
+
and f"Transaction function ({get_dbos_func_name(sync_transaction)}) shouldn't be invoked from within another async function."
|
691
|
+
in logs[0].log_record.body
|
692
|
+
)
|
693
|
+
log_exporter.clear()
|
@@ -148,7 +148,7 @@ async def test_fork_workflow_async(dbos: DBOS) -> None:
|
|
148
148
|
|
149
149
|
wfid = str(uuid.uuid4())
|
150
150
|
with SetWorkflowID(wfid):
|
151
|
-
assert simple_workflow
|
151
|
+
assert await asyncio.to_thread(simple_workflow, input_val) == output
|
152
152
|
|
153
153
|
assert step_one_count == 1
|
154
154
|
assert step_two_count == 1
|
@@ -1,5 +1,6 @@
|
|
1
1
|
# mypy: disable-error-code="no-redef"
|
2
2
|
|
3
|
+
import asyncio
|
3
4
|
import datetime
|
4
5
|
import logging
|
5
6
|
import os
|
@@ -1668,17 +1669,17 @@ async def test_step_without_dbos(dbos: DBOS, config: DBOSConfig) -> None:
|
|
1668
1669
|
assert DBOS.workflow_id is None
|
1669
1670
|
return x
|
1670
1671
|
|
1671
|
-
assert step
|
1672
|
+
assert await asyncio.to_thread(step, 5) == 5
|
1672
1673
|
assert await async_step(5) == 5
|
1673
1674
|
|
1674
1675
|
DBOS(config=config)
|
1675
1676
|
|
1676
|
-
assert step
|
1677
|
+
assert await asyncio.to_thread(step, 5) == 5
|
1677
1678
|
assert await async_step(5) == 5
|
1678
1679
|
|
1679
1680
|
DBOS.launch()
|
1680
1681
|
|
1681
|
-
assert step
|
1682
|
+
assert await asyncio.to_thread(step, 5) == 5
|
1682
1683
|
assert await async_step(5) == 5
|
1683
1684
|
|
1684
1685
|
assert len(DBOS.list_workflows()) == 0
|
@@ -1,8 +1,10 @@
|
|
1
|
-
from
|
1
|
+
from dataclasses import dataclass, field
|
2
|
+
from typing import Optional, Tuple
|
2
3
|
|
3
4
|
import pytest
|
4
5
|
from fastapi import FastAPI
|
5
6
|
from fastapi.testclient import TestClient
|
7
|
+
from inline_snapshot import snapshot
|
6
8
|
from opentelemetry._logs import set_logger_provider
|
7
9
|
from opentelemetry.sdk import trace as tracesdk
|
8
10
|
from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
|
@@ -17,33 +19,47 @@ from dbos._tracer import dbos_tracer
|
|
17
19
|
from dbos._utils import GlobalParams
|
18
20
|
|
19
21
|
|
22
|
+
@dataclass
|
23
|
+
class BasicSpan:
|
24
|
+
content: str
|
25
|
+
children: list["BasicSpan"] = field(default_factory=list)
|
26
|
+
parent_id: Optional[int] = field(repr=False, compare=False, default=None)
|
27
|
+
|
28
|
+
|
20
29
|
def test_spans(config: DBOSConfig) -> None:
|
30
|
+
exporter = InMemorySpanExporter()
|
31
|
+
span_processor = SimpleSpanProcessor(exporter)
|
32
|
+
provider = tracesdk.TracerProvider()
|
33
|
+
provider.add_span_processor(span_processor)
|
34
|
+
dbos_tracer.set_provider(provider)
|
35
|
+
|
21
36
|
DBOS.destroy(destroy_registry=True)
|
22
37
|
config["otlp_attributes"] = {"foo": "bar"}
|
23
38
|
DBOS(config=config)
|
24
39
|
DBOS.launch()
|
25
40
|
|
41
|
+
my_tracer = provider.get_tracer("dbos")
|
42
|
+
|
26
43
|
@DBOS.workflow()
|
27
44
|
def test_workflow() -> None:
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
45
|
+
with my_tracer.start_as_current_span( # pyright: ignore[reportAttributeAccessIssue]
|
46
|
+
"manual_span"
|
47
|
+
):
|
48
|
+
test_step()
|
49
|
+
current_span = DBOS.span
|
50
|
+
subspan = DBOS.tracer.start_span(
|
51
|
+
{"name": "a new span"}, parent=current_span
|
52
|
+
)
|
53
|
+
# Note: DBOS.tracer.start_span() does not set the new span as the current span. So this log is still attached to the workflow span.
|
54
|
+
DBOS.logger.info("This is a test_workflow")
|
55
|
+
subspan.add_event("greeting_event", {"name": "a new event"})
|
56
|
+
DBOS.tracer.end_span(subspan)
|
35
57
|
|
36
58
|
@DBOS.step()
|
37
59
|
def test_step() -> None:
|
38
60
|
DBOS.logger.info("This is a test_step")
|
39
61
|
return
|
40
62
|
|
41
|
-
exporter = InMemorySpanExporter()
|
42
|
-
span_processor = SimpleSpanProcessor(exporter)
|
43
|
-
provider = tracesdk.TracerProvider()
|
44
|
-
provider.add_span_processor(span_processor)
|
45
|
-
dbos_tracer.set_provider(provider)
|
46
|
-
|
47
63
|
# Set up in-memory log exporter
|
48
64
|
log_exporter = InMemoryLogExporter() # type: ignore
|
49
65
|
log_processor = BatchLogRecordProcessor(log_exporter)
|
@@ -77,9 +93,10 @@ def test_spans(config: DBOSConfig) -> None:
|
|
77
93
|
|
78
94
|
spans = exporter.get_finished_spans()
|
79
95
|
|
80
|
-
assert len(spans) == 3
|
81
|
-
|
82
96
|
for span in spans:
|
97
|
+
if span.name == "manual_span":
|
98
|
+
# Skip the manual span because it was not created by DBOS.tracer
|
99
|
+
continue
|
83
100
|
assert span.attributes is not None
|
84
101
|
assert span.attributes["applicationVersion"] == GlobalParams.app_version
|
85
102
|
assert span.attributes["executorID"] == GlobalParams.executor_id
|
@@ -90,11 +107,12 @@ def test_spans(config: DBOSConfig) -> None:
|
|
90
107
|
|
91
108
|
assert spans[0].name == test_step.__qualname__
|
92
109
|
assert spans[1].name == "a new span"
|
93
|
-
assert spans[
|
110
|
+
assert spans[3].name == test_workflow.__qualname__
|
94
111
|
|
95
112
|
assert spans[0].parent.span_id == spans[2].context.span_id # type: ignore
|
96
113
|
assert spans[1].parent.span_id == spans[2].context.span_id # type: ignore
|
97
|
-
assert spans[2].parent ==
|
114
|
+
assert spans[2].parent.span_id == spans[3].context.span_id # type: ignore
|
115
|
+
assert spans[3].parent == None
|
98
116
|
|
99
117
|
# Span ID and trace ID should match the log record
|
100
118
|
# For pyright
|
@@ -105,31 +123,70 @@ def test_spans(config: DBOSConfig) -> None:
|
|
105
123
|
assert logs[1].log_record.span_id == spans[2].context.span_id
|
106
124
|
assert logs[1].log_record.trace_id == spans[2].context.trace_id
|
107
125
|
|
126
|
+
# Test the span tree structure
|
127
|
+
basic_spans = {
|
128
|
+
span.context.span_id: BasicSpan( # pyright: ignore[reportOptionalMemberAccess]
|
129
|
+
content=span.name, parent_id=span.parent.span_id if span.parent else None
|
130
|
+
)
|
131
|
+
for span in spans
|
132
|
+
}
|
133
|
+
root_span = None
|
134
|
+
for basic_span in basic_spans.values():
|
135
|
+
if basic_span.parent_id is None:
|
136
|
+
root_span = basic_span
|
137
|
+
else:
|
138
|
+
parent_id = basic_span.parent_id
|
139
|
+
parent_span = basic_spans[parent_id]
|
140
|
+
parent_span.children.append(basic_span)
|
141
|
+
|
142
|
+
assert len(spans) == 4
|
143
|
+
# Make sure the span tree structure is correct
|
144
|
+
assert root_span == snapshot(
|
145
|
+
BasicSpan(
|
146
|
+
content="test_spans.<locals>.test_workflow",
|
147
|
+
children=[
|
148
|
+
BasicSpan(
|
149
|
+
content="manual_span",
|
150
|
+
children=[
|
151
|
+
BasicSpan(content="test_spans.<locals>.test_step"),
|
152
|
+
BasicSpan(content="a new span"),
|
153
|
+
],
|
154
|
+
)
|
155
|
+
],
|
156
|
+
)
|
157
|
+
)
|
158
|
+
|
108
159
|
|
109
160
|
@pytest.mark.asyncio
|
110
161
|
async def test_spans_async(dbos: DBOS) -> None:
|
162
|
+
exporter = InMemorySpanExporter()
|
163
|
+
span_processor = SimpleSpanProcessor(exporter)
|
164
|
+
provider = tracesdk.TracerProvider()
|
165
|
+
provider.add_span_processor(span_processor)
|
166
|
+
dbos_tracer.set_provider(provider)
|
167
|
+
|
168
|
+
my_tracer = provider.get_tracer("dbos")
|
111
169
|
|
112
170
|
@DBOS.workflow()
|
113
171
|
async def test_workflow() -> None:
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
172
|
+
with my_tracer.start_as_current_span( # pyright: ignore[reportAttributeAccessIssue]
|
173
|
+
"manual_span"
|
174
|
+
):
|
175
|
+
await test_step()
|
176
|
+
current_span = DBOS.span
|
177
|
+
subspan = DBOS.tracer.start_span(
|
178
|
+
{"name": "a new span"}, parent=current_span
|
179
|
+
)
|
180
|
+
# Note: DBOS.tracer.start_span() does not set the new span as the current span. So this log is still attached to the workflow span.
|
181
|
+
DBOS.logger.info("This is a test_workflow")
|
182
|
+
subspan.add_event("greeting_event", {"name": "a new event"})
|
183
|
+
DBOS.tracer.end_span(subspan)
|
121
184
|
|
122
185
|
@DBOS.step()
|
123
186
|
async def test_step() -> None:
|
124
187
|
DBOS.logger.info("This is a test_step")
|
125
188
|
return
|
126
189
|
|
127
|
-
exporter = InMemorySpanExporter()
|
128
|
-
span_processor = SimpleSpanProcessor(exporter)
|
129
|
-
provider = tracesdk.TracerProvider()
|
130
|
-
provider.add_span_processor(span_processor)
|
131
|
-
dbos_tracer.set_provider(provider)
|
132
|
-
|
133
190
|
# Set up in-memory log exporter
|
134
191
|
log_exporter = InMemoryLogExporter() # type: ignore
|
135
192
|
log_processor = BatchLogRecordProcessor(log_exporter)
|
@@ -162,9 +219,12 @@ async def test_spans_async(dbos: DBOS) -> None:
|
|
162
219
|
|
163
220
|
spans = exporter.get_finished_spans()
|
164
221
|
|
165
|
-
assert len(spans) ==
|
222
|
+
assert len(spans) == 4
|
166
223
|
|
167
224
|
for span in spans:
|
225
|
+
if span.name == "manual_span":
|
226
|
+
# Skip the manual span because it was not created by DBOS.tracer
|
227
|
+
continue
|
168
228
|
assert span.attributes is not None
|
169
229
|
assert span.attributes["applicationVersion"] == GlobalParams.app_version
|
170
230
|
assert span.attributes["executorID"] == GlobalParams.executor_id
|
@@ -174,11 +234,12 @@ async def test_spans_async(dbos: DBOS) -> None:
|
|
174
234
|
|
175
235
|
assert spans[0].name == test_step.__qualname__
|
176
236
|
assert spans[1].name == "a new span"
|
177
|
-
assert spans[
|
237
|
+
assert spans[3].name == test_workflow.__qualname__
|
178
238
|
|
179
239
|
assert spans[0].parent.span_id == spans[2].context.span_id # type: ignore
|
180
240
|
assert spans[1].parent.span_id == spans[2].context.span_id # type: ignore
|
181
|
-
assert spans[2].parent ==
|
241
|
+
assert spans[2].parent.span_id == spans[3].context.span_id # type: ignore
|
242
|
+
assert spans[3].parent == None
|
182
243
|
|
183
244
|
# Span ID and trace ID should match the log record
|
184
245
|
assert spans[0].context is not None
|
@@ -188,6 +249,39 @@ async def test_spans_async(dbos: DBOS) -> None:
|
|
188
249
|
assert logs[1].log_record.span_id == spans[2].context.span_id
|
189
250
|
assert logs[1].log_record.trace_id == spans[2].context.trace_id
|
190
251
|
|
252
|
+
# Test the span tree structure
|
253
|
+
basic_spans = {
|
254
|
+
span.context.span_id: BasicSpan( # pyright: ignore[reportOptionalMemberAccess]
|
255
|
+
content=span.name, parent_id=span.parent.span_id if span.parent else None
|
256
|
+
)
|
257
|
+
for span in spans
|
258
|
+
}
|
259
|
+
root_span = None
|
260
|
+
for basic_span in basic_spans.values():
|
261
|
+
if basic_span.parent_id is None:
|
262
|
+
root_span = basic_span
|
263
|
+
else:
|
264
|
+
parent_id = basic_span.parent_id
|
265
|
+
parent_span = basic_spans[parent_id]
|
266
|
+
parent_span.children.append(basic_span)
|
267
|
+
|
268
|
+
assert len(spans) == 4
|
269
|
+
# Make sure the span tree structure is correct
|
270
|
+
assert root_span == snapshot(
|
271
|
+
BasicSpan(
|
272
|
+
content="test_spans_async.<locals>.test_workflow",
|
273
|
+
children=[
|
274
|
+
BasicSpan(
|
275
|
+
content="manual_span",
|
276
|
+
children=[
|
277
|
+
BasicSpan(content="test_spans_async.<locals>.test_step"),
|
278
|
+
BasicSpan(content="a new span"),
|
279
|
+
],
|
280
|
+
)
|
281
|
+
],
|
282
|
+
)
|
283
|
+
)
|
284
|
+
|
191
285
|
|
192
286
|
def test_wf_fastapi(dbos_fastapi: Tuple[DBOS, FastAPI]) -> None:
|
193
287
|
dbos, app = dbos_fastapi
|
@@ -828,18 +828,18 @@ async def test_callchild_first_asyncio(dbos: DBOS) -> None:
|
|
828
828
|
async def parentWorkflow() -> str:
|
829
829
|
handle = await dbos.start_workflow_async(child_workflow)
|
830
830
|
child_id = await handle.get_result()
|
831
|
-
stepOne()
|
832
|
-
stepTwo()
|
831
|
+
await stepOne()
|
832
|
+
await stepTwo()
|
833
833
|
return child_id
|
834
834
|
|
835
835
|
@DBOS.step()
|
836
|
-
def stepOne() -> str:
|
836
|
+
async def stepOne() -> str:
|
837
837
|
workflow_id = DBOS.workflow_id
|
838
838
|
assert workflow_id is not None
|
839
839
|
return workflow_id
|
840
840
|
|
841
841
|
@DBOS.step()
|
842
|
-
def stepTwo() -> None:
|
842
|
+
async def stepTwo() -> None:
|
843
843
|
return
|
844
844
|
|
845
845
|
@DBOS.workflow()
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/27ac6900c6ad_add_queue_dedup.py
RENAMED
File without changes
|
{dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/471b60d64126_dbos_migrations.py
RENAMED
File without changes
|
{dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/50f3227f0b4b_fix_job_queue.py
RENAMED
File without changes
|
File without changes
|
{dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/66478e1b95e5_consolidate_queues.py
RENAMED
File without changes
|
{dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/83f3732ae8e7_workflow_timeout.py
RENAMED
File without changes
|
{dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/933e86bdac6a_add_queue_priority.py
RENAMED
File without changes
|
{dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/a3b18ad34abe_added_triggers.py
RENAMED
File without changes
|
{dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/d76646551a6b_job_queue_limiter.py
RENAMED
File without changes
|
{dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/d76646551a6c_workflow_queue.py
RENAMED
File without changes
|
{dbos-1.14.0a3 → dbos-1.14.0a5}/dbos/_alembic_migrations/versions/d994145b47b6_consolidate_inputs.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|