dbos 1.7.0a5__tar.gz → 1.8.0a3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- {dbos-1.7.0a5 → dbos-1.8.0a3}/PKG-INFO +1 -1
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_admin_server.py +3 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_client.py +12 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_conductor/conductor.py +6 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_conductor/protocol.py +5 -2
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_context.py +37 -12
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_dbos.py +6 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_error.py +1 -1
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_sys_db.py +50 -23
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_workflow_commands.py +9 -2
- {dbos-1.7.0a5 → dbos-1.8.0a3}/pyproject.toml +1 -1
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_admin_server.py +32 -9
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_client.py +20 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_queue.py +7 -0
- dbos-1.8.0a3/tests/test_spans.py +272 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_workflow_introspection.py +70 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_workflow_management.py +1 -1
- dbos-1.7.0a5/tests/test_spans.py +0 -147
- {dbos-1.7.0a5 → dbos-1.8.0a3}/LICENSE +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/README.md +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/__init__.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/__main__.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_app_db.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_classproperty.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_core.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_croniter.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_dbos_config.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_debug.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_docker_pg_helper.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_event_loop.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_fastapi.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_flask.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_kafka.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_kafka_message.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_logger.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_migrations/env.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_migrations/script.py.mako +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_migrations/versions/d994145b47b6_consolidate_inputs.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_outcome.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_queue.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_recovery.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_registrations.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_roles.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_scheduler.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_schemas/__init__.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_schemas/application_database.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_schemas/system_database.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_serialization.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_templates/dbos-db-starter/README.md +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_tracer.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/_utils.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/cli/_github_init.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/cli/_template_init.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/cli/cli.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/dbos-config.schema.json +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/dbos/py.typed +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/__init__.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/atexit_no_ctor.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/atexit_no_launch.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/classdefs.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/client_collateral.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/client_worker.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/conftest.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/dupname_classdefs1.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/dupname_classdefsa.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/more_classdefs.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/queuedworkflow.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_async.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_classdecorators.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_cli.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_concurrency.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_config.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_croniter.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_dbos.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_debug.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_docker_secrets.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_failures.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_fastapi.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_fastapi_roles.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_flask.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_kafka.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_outcome.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_package.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_scheduler.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_schema_migration.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_singleton.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/tests/test_sqlalchemy.py +0 -0
- {dbos-1.7.0a5 → dbos-1.8.0a3}/version/__init__.py +0 -0
|
@@ -343,6 +343,8 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
|
343
343
|
offset=filters.get("offset"),
|
|
344
344
|
sort_desc=filters.get("sort_desc", False),
|
|
345
345
|
workflow_id_prefix=filters.get("workflow_id_prefix"),
|
|
346
|
+
load_input=filters.get("load_input", False),
|
|
347
|
+
load_output=filters.get("load_output", False),
|
|
346
348
|
)
|
|
347
349
|
workflows_output = [
|
|
348
350
|
conductor_protocol.WorkflowsOutput.from_workflow_information(i)
|
|
@@ -367,6 +369,7 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
|
367
369
|
offset=filters.get("offset"),
|
|
368
370
|
queue_name=filters.get("queue_name"),
|
|
369
371
|
sort_desc=filters.get("sort_desc", False),
|
|
372
|
+
load_input=filters.get("load_input", False),
|
|
370
373
|
)
|
|
371
374
|
workflows_output = [
|
|
372
375
|
conductor_protocol.WorkflowsOutput.from_workflow_information(i)
|
|
@@ -294,6 +294,8 @@ class DBOSClient:
|
|
|
294
294
|
offset: Optional[int] = None,
|
|
295
295
|
sort_desc: bool = False,
|
|
296
296
|
workflow_id_prefix: Optional[str] = None,
|
|
297
|
+
load_input: bool = True,
|
|
298
|
+
load_output: bool = True,
|
|
297
299
|
) -> List[WorkflowStatus]:
|
|
298
300
|
return list_workflows(
|
|
299
301
|
self._sys_db,
|
|
@@ -308,6 +310,8 @@ class DBOSClient:
|
|
|
308
310
|
offset=offset,
|
|
309
311
|
sort_desc=sort_desc,
|
|
310
312
|
workflow_id_prefix=workflow_id_prefix,
|
|
313
|
+
load_input=load_input,
|
|
314
|
+
load_output=load_output,
|
|
311
315
|
)
|
|
312
316
|
|
|
313
317
|
async def list_workflows_async(
|
|
@@ -324,6 +328,8 @@ class DBOSClient:
|
|
|
324
328
|
offset: Optional[int] = None,
|
|
325
329
|
sort_desc: bool = False,
|
|
326
330
|
workflow_id_prefix: Optional[str] = None,
|
|
331
|
+
load_input: bool = True,
|
|
332
|
+
load_output: bool = True,
|
|
327
333
|
) -> List[WorkflowStatus]:
|
|
328
334
|
return await asyncio.to_thread(
|
|
329
335
|
self.list_workflows,
|
|
@@ -338,6 +344,8 @@ class DBOSClient:
|
|
|
338
344
|
offset=offset,
|
|
339
345
|
sort_desc=sort_desc,
|
|
340
346
|
workflow_id_prefix=workflow_id_prefix,
|
|
347
|
+
load_input=load_input,
|
|
348
|
+
load_output=load_output,
|
|
341
349
|
)
|
|
342
350
|
|
|
343
351
|
def list_queued_workflows(
|
|
@@ -351,6 +359,7 @@ class DBOSClient:
|
|
|
351
359
|
limit: Optional[int] = None,
|
|
352
360
|
offset: Optional[int] = None,
|
|
353
361
|
sort_desc: bool = False,
|
|
362
|
+
load_input: bool = True,
|
|
354
363
|
) -> List[WorkflowStatus]:
|
|
355
364
|
return list_queued_workflows(
|
|
356
365
|
self._sys_db,
|
|
@@ -362,6 +371,7 @@ class DBOSClient:
|
|
|
362
371
|
limit=limit,
|
|
363
372
|
offset=offset,
|
|
364
373
|
sort_desc=sort_desc,
|
|
374
|
+
load_input=load_input,
|
|
365
375
|
)
|
|
366
376
|
|
|
367
377
|
async def list_queued_workflows_async(
|
|
@@ -375,6 +385,7 @@ class DBOSClient:
|
|
|
375
385
|
limit: Optional[int] = None,
|
|
376
386
|
offset: Optional[int] = None,
|
|
377
387
|
sort_desc: bool = False,
|
|
388
|
+
load_input: bool = True,
|
|
378
389
|
) -> List[WorkflowStatus]:
|
|
379
390
|
return await asyncio.to_thread(
|
|
380
391
|
self.list_queued_workflows,
|
|
@@ -386,6 +397,7 @@ class DBOSClient:
|
|
|
386
397
|
limit=limit,
|
|
387
398
|
offset=offset,
|
|
388
399
|
sort_desc=sort_desc,
|
|
400
|
+
load_input=load_input,
|
|
389
401
|
)
|
|
390
402
|
|
|
391
403
|
def list_workflow_steps(self, workflow_id: str) -> List[StepInfo]:
|
|
@@ -223,6 +223,8 @@ class ConductorWebsocket(threading.Thread):
|
|
|
223
223
|
body = list_workflows_message.body
|
|
224
224
|
infos = []
|
|
225
225
|
try:
|
|
226
|
+
load_input = body.get("load_input", False)
|
|
227
|
+
load_output = body.get("load_output", False)
|
|
226
228
|
infos = list_workflows(
|
|
227
229
|
self.dbos._sys_db,
|
|
228
230
|
workflow_ids=body["workflow_uuids"],
|
|
@@ -235,6 +237,8 @@ class ConductorWebsocket(threading.Thread):
|
|
|
235
237
|
limit=body["limit"],
|
|
236
238
|
offset=body["offset"],
|
|
237
239
|
sort_desc=body["sort_desc"],
|
|
240
|
+
load_input=load_input,
|
|
241
|
+
load_output=load_output,
|
|
238
242
|
)
|
|
239
243
|
except Exception as e:
|
|
240
244
|
error_message = f"Exception encountered when listing workflows: {traceback.format_exc()}"
|
|
@@ -257,6 +261,7 @@ class ConductorWebsocket(threading.Thread):
|
|
|
257
261
|
q_body = list_queued_workflows_message.body
|
|
258
262
|
infos = []
|
|
259
263
|
try:
|
|
264
|
+
q_load_input = q_body.get("load_input", False)
|
|
260
265
|
infos = list_queued_workflows(
|
|
261
266
|
self.dbos._sys_db,
|
|
262
267
|
start_time=q_body["start_time"],
|
|
@@ -267,6 +272,7 @@ class ConductorWebsocket(threading.Thread):
|
|
|
267
272
|
offset=q_body["offset"],
|
|
268
273
|
queue_name=q_body["queue_name"],
|
|
269
274
|
sort_desc=q_body["sort_desc"],
|
|
275
|
+
load_input=q_load_input,
|
|
270
276
|
)
|
|
271
277
|
except Exception as e:
|
|
272
278
|
error_message = f"Exception encountered when listing queued workflows: {traceback.format_exc()}"
|
|
@@ -110,7 +110,7 @@ class RestartResponse(BaseMessage):
|
|
|
110
110
|
error_message: Optional[str] = None
|
|
111
111
|
|
|
112
112
|
|
|
113
|
-
class ListWorkflowsBody(TypedDict):
|
|
113
|
+
class ListWorkflowsBody(TypedDict, total=False):
|
|
114
114
|
workflow_uuids: List[str]
|
|
115
115
|
workflow_name: Optional[str]
|
|
116
116
|
authenticated_user: Optional[str]
|
|
@@ -121,6 +121,8 @@ class ListWorkflowsBody(TypedDict):
|
|
|
121
121
|
limit: Optional[int]
|
|
122
122
|
offset: Optional[int]
|
|
123
123
|
sort_desc: bool
|
|
124
|
+
load_input: bool
|
|
125
|
+
load_output: bool
|
|
124
126
|
|
|
125
127
|
|
|
126
128
|
@dataclass
|
|
@@ -209,7 +211,7 @@ class ListWorkflowsResponse(BaseMessage):
|
|
|
209
211
|
error_message: Optional[str] = None
|
|
210
212
|
|
|
211
213
|
|
|
212
|
-
class ListQueuedWorkflowsBody(TypedDict):
|
|
214
|
+
class ListQueuedWorkflowsBody(TypedDict, total=False):
|
|
213
215
|
workflow_name: Optional[str]
|
|
214
216
|
start_time: Optional[str]
|
|
215
217
|
end_time: Optional[str]
|
|
@@ -218,6 +220,7 @@ class ListQueuedWorkflowsBody(TypedDict):
|
|
|
218
220
|
limit: Optional[int]
|
|
219
221
|
offset: Optional[int]
|
|
220
222
|
sort_desc: bool
|
|
223
|
+
load_input: bool
|
|
221
224
|
|
|
222
225
|
|
|
223
226
|
@dataclass
|
|
@@ -10,7 +10,7 @@ from enum import Enum
|
|
|
10
10
|
from types import TracebackType
|
|
11
11
|
from typing import List, Literal, Optional, Type, TypedDict
|
|
12
12
|
|
|
13
|
-
from opentelemetry.trace import Span, Status, StatusCode
|
|
13
|
+
from opentelemetry.trace import Span, Status, StatusCode, use_span
|
|
14
14
|
from sqlalchemy.orm import Session
|
|
15
15
|
|
|
16
16
|
from dbos._utils import GlobalParams
|
|
@@ -68,6 +68,20 @@ class StepStatus:
|
|
|
68
68
|
max_attempts: Optional[int]
|
|
69
69
|
|
|
70
70
|
|
|
71
|
+
@dataclass
|
|
72
|
+
class ContextSpan:
|
|
73
|
+
"""
|
|
74
|
+
A span that is used to track the context of a workflow or step execution.
|
|
75
|
+
|
|
76
|
+
Attributes:
|
|
77
|
+
span: The OpenTelemetry span object.
|
|
78
|
+
context_manager: The context manager that is used to manage the span's lifecycle.
|
|
79
|
+
"""
|
|
80
|
+
|
|
81
|
+
span: Span
|
|
82
|
+
context_manager: AbstractContextManager[Span]
|
|
83
|
+
|
|
84
|
+
|
|
71
85
|
class DBOSContext:
|
|
72
86
|
def __init__(self) -> None:
|
|
73
87
|
self.executor_id = GlobalParams.executor_id
|
|
@@ -86,7 +100,7 @@ class DBOSContext:
|
|
|
86
100
|
self.curr_step_function_id: int = -1
|
|
87
101
|
self.curr_tx_function_id: int = -1
|
|
88
102
|
self.sql_session: Optional[Session] = None
|
|
89
|
-
self.
|
|
103
|
+
self.context_spans: list[ContextSpan] = []
|
|
90
104
|
|
|
91
105
|
self.authenticated_user: Optional[str] = None
|
|
92
106
|
self.authenticated_roles: Optional[List[str]] = None
|
|
@@ -202,8 +216,8 @@ class DBOSContext:
|
|
|
202
216
|
self._end_span(exc_value)
|
|
203
217
|
|
|
204
218
|
def get_current_span(self) -> Optional[Span]:
|
|
205
|
-
if len(self.
|
|
206
|
-
return self.
|
|
219
|
+
if len(self.context_spans) > 0:
|
|
220
|
+
return self.context_spans[-1].span
|
|
207
221
|
return None
|
|
208
222
|
|
|
209
223
|
def _start_span(self, attributes: TracedAttributes) -> None:
|
|
@@ -218,27 +232,38 @@ class DBOSContext:
|
|
|
218
232
|
)
|
|
219
233
|
attributes["authenticatedUserAssumedRole"] = self.assumed_role
|
|
220
234
|
span = dbos_tracer.start_span(
|
|
221
|
-
attributes,
|
|
235
|
+
attributes,
|
|
236
|
+
parent=self.context_spans[-1].span if len(self.context_spans) > 0 else None,
|
|
237
|
+
)
|
|
238
|
+
# Activate the current span
|
|
239
|
+
cm = use_span(
|
|
240
|
+
span,
|
|
241
|
+
end_on_exit=False,
|
|
242
|
+
record_exception=False,
|
|
243
|
+
set_status_on_exception=False,
|
|
222
244
|
)
|
|
223
|
-
self.
|
|
245
|
+
self.context_spans.append(ContextSpan(span, cm))
|
|
246
|
+
cm.__enter__()
|
|
224
247
|
|
|
225
248
|
def _end_span(self, exc_value: Optional[BaseException]) -> None:
|
|
249
|
+
context_span = self.context_spans.pop()
|
|
226
250
|
if exc_value is None:
|
|
227
|
-
|
|
251
|
+
context_span.span.set_status(Status(StatusCode.OK))
|
|
228
252
|
else:
|
|
229
|
-
|
|
253
|
+
context_span.span.set_status(
|
|
230
254
|
Status(StatusCode.ERROR, description=str(exc_value))
|
|
231
255
|
)
|
|
232
|
-
dbos_tracer.end_span(
|
|
256
|
+
dbos_tracer.end_span(context_span.span)
|
|
257
|
+
context_span.context_manager.__exit__(None, None, None)
|
|
233
258
|
|
|
234
259
|
def set_authentication(
|
|
235
260
|
self, user: Optional[str], roles: Optional[List[str]]
|
|
236
261
|
) -> None:
|
|
237
262
|
self.authenticated_user = user
|
|
238
263
|
self.authenticated_roles = roles
|
|
239
|
-
if user is not None and len(self.
|
|
240
|
-
self.
|
|
241
|
-
self.
|
|
264
|
+
if user is not None and len(self.context_spans) > 0:
|
|
265
|
+
self.context_spans[-1].span.set_attribute("authenticatedUser", user)
|
|
266
|
+
self.context_spans[-1].span.set_attribute(
|
|
242
267
|
"authenticatedUserRoles", json.dumps(roles) if roles is not None else ""
|
|
243
268
|
)
|
|
244
269
|
|
|
@@ -1032,6 +1032,8 @@ class DBOS:
|
|
|
1032
1032
|
offset: Optional[int] = None,
|
|
1033
1033
|
sort_desc: bool = False,
|
|
1034
1034
|
workflow_id_prefix: Optional[str] = None,
|
|
1035
|
+
load_input: bool = True,
|
|
1036
|
+
load_output: bool = True,
|
|
1035
1037
|
) -> List[WorkflowStatus]:
|
|
1036
1038
|
def fn() -> List[WorkflowStatus]:
|
|
1037
1039
|
return list_workflows(
|
|
@@ -1047,6 +1049,8 @@ class DBOS:
|
|
|
1047
1049
|
offset=offset,
|
|
1048
1050
|
sort_desc=sort_desc,
|
|
1049
1051
|
workflow_id_prefix=workflow_id_prefix,
|
|
1052
|
+
load_input=load_input,
|
|
1053
|
+
load_output=load_output,
|
|
1050
1054
|
)
|
|
1051
1055
|
|
|
1052
1056
|
return _get_dbos_instance()._sys_db.call_function_as_step(
|
|
@@ -1065,6 +1069,7 @@ class DBOS:
|
|
|
1065
1069
|
limit: Optional[int] = None,
|
|
1066
1070
|
offset: Optional[int] = None,
|
|
1067
1071
|
sort_desc: bool = False,
|
|
1072
|
+
load_input: bool = True,
|
|
1068
1073
|
) -> List[WorkflowStatus]:
|
|
1069
1074
|
def fn() -> List[WorkflowStatus]:
|
|
1070
1075
|
return list_queued_workflows(
|
|
@@ -1077,6 +1082,7 @@ class DBOS:
|
|
|
1077
1082
|
limit=limit,
|
|
1078
1083
|
offset=offset,
|
|
1079
1084
|
sort_desc=sort_desc,
|
|
1085
|
+
load_input=load_input,
|
|
1080
1086
|
)
|
|
1081
1087
|
|
|
1082
1088
|
return _get_dbos_instance()._sys_db.call_function_as_step(
|
|
@@ -126,7 +126,7 @@ class DBOSDeadLetterQueueError(DBOSException):
|
|
|
126
126
|
|
|
127
127
|
def __init__(self, wf_id: str, max_retries: int):
|
|
128
128
|
super().__init__(
|
|
129
|
-
f"Workflow {wf_id} has been moved to the dead-letter queue after exceeding the maximum of
|
|
129
|
+
f"Workflow {wf_id} has been moved to the dead-letter queue after exceeding the maximum of {max_retries} retries",
|
|
130
130
|
dbos_error_code=DBOSErrorCode.DeadLetterQueueError.value,
|
|
131
131
|
)
|
|
132
132
|
|
|
@@ -437,7 +437,14 @@ class SystemDatabase:
|
|
|
437
437
|
|
|
438
438
|
# Values to update when a row already exists for this workflow
|
|
439
439
|
update_values: dict[str, Any] = {
|
|
440
|
-
"recovery_attempts":
|
|
440
|
+
"recovery_attempts": sa.case(
|
|
441
|
+
(
|
|
442
|
+
SystemSchema.workflow_status.c.status
|
|
443
|
+
!= WorkflowStatusString.ENQUEUED.value,
|
|
444
|
+
SystemSchema.workflow_status.c.recovery_attempts + 1,
|
|
445
|
+
),
|
|
446
|
+
else_=SystemSchema.workflow_status.c.recovery_attempts,
|
|
447
|
+
),
|
|
441
448
|
"updated_at": func.extract("epoch", func.now()) * 1000,
|
|
442
449
|
}
|
|
443
450
|
# Don't update an existing executor ID when enqueueing a workflow.
|
|
@@ -788,11 +795,17 @@ class SystemDatabase:
|
|
|
788
795
|
pass # CB: I guess we're assuming the WF will show up eventually.
|
|
789
796
|
time.sleep(1)
|
|
790
797
|
|
|
791
|
-
def get_workflows(
|
|
798
|
+
def get_workflows(
|
|
799
|
+
self,
|
|
800
|
+
input: GetWorkflowsInput,
|
|
801
|
+
*,
|
|
802
|
+
load_input: bool = True,
|
|
803
|
+
load_output: bool = True,
|
|
804
|
+
) -> List[WorkflowStatus]:
|
|
792
805
|
"""
|
|
793
806
|
Retrieve a list of workflows result and inputs based on the input criteria. The result is a list of external-facing workflow status objects.
|
|
794
807
|
"""
|
|
795
|
-
|
|
808
|
+
load_columns = [
|
|
796
809
|
SystemSchema.workflow_status.c.workflow_uuid,
|
|
797
810
|
SystemSchema.workflow_status.c.status,
|
|
798
811
|
SystemSchema.workflow_status.c.name,
|
|
@@ -808,12 +821,16 @@ class SystemDatabase:
|
|
|
808
821
|
SystemSchema.workflow_status.c.updated_at,
|
|
809
822
|
SystemSchema.workflow_status.c.application_version,
|
|
810
823
|
SystemSchema.workflow_status.c.application_id,
|
|
811
|
-
SystemSchema.workflow_status.c.inputs,
|
|
812
|
-
SystemSchema.workflow_status.c.output,
|
|
813
|
-
SystemSchema.workflow_status.c.error,
|
|
814
824
|
SystemSchema.workflow_status.c.workflow_deadline_epoch_ms,
|
|
815
825
|
SystemSchema.workflow_status.c.workflow_timeout_ms,
|
|
816
|
-
|
|
826
|
+
]
|
|
827
|
+
if load_input:
|
|
828
|
+
load_columns.append(SystemSchema.workflow_status.c.inputs)
|
|
829
|
+
if load_output:
|
|
830
|
+
load_columns.append(SystemSchema.workflow_status.c.output)
|
|
831
|
+
load_columns.append(SystemSchema.workflow_status.c.error)
|
|
832
|
+
|
|
833
|
+
query = sa.select(*load_columns)
|
|
817
834
|
if input.sort_desc:
|
|
818
835
|
query = query.order_by(SystemSchema.workflow_status.c.created_at.desc())
|
|
819
836
|
else:
|
|
@@ -880,29 +897,35 @@ class SystemDatabase:
|
|
|
880
897
|
info.updated_at = row[12]
|
|
881
898
|
info.app_version = row[13]
|
|
882
899
|
info.app_id = row[14]
|
|
900
|
+
info.workflow_deadline_epoch_ms = row[15]
|
|
901
|
+
info.workflow_timeout_ms = row[16]
|
|
883
902
|
|
|
903
|
+
raw_input = row[17] if load_input else None
|
|
904
|
+
raw_output = row[18] if load_output else None
|
|
905
|
+
raw_error = row[19] if load_output else None
|
|
884
906
|
inputs, output, exception = _serialization.safe_deserialize(
|
|
885
907
|
info.workflow_id,
|
|
886
|
-
serialized_input=
|
|
887
|
-
serialized_output=
|
|
888
|
-
serialized_exception=
|
|
908
|
+
serialized_input=raw_input,
|
|
909
|
+
serialized_output=raw_output,
|
|
910
|
+
serialized_exception=raw_error,
|
|
889
911
|
)
|
|
890
912
|
info.input = inputs
|
|
891
913
|
info.output = output
|
|
892
914
|
info.error = exception
|
|
893
|
-
info.workflow_deadline_epoch_ms = row[18]
|
|
894
|
-
info.workflow_timeout_ms = row[19]
|
|
895
915
|
|
|
896
916
|
infos.append(info)
|
|
897
917
|
return infos
|
|
898
918
|
|
|
899
919
|
def get_queued_workflows(
|
|
900
|
-
self,
|
|
920
|
+
self,
|
|
921
|
+
input: GetQueuedWorkflowsInput,
|
|
922
|
+
*,
|
|
923
|
+
load_input: bool = True,
|
|
901
924
|
) -> List[WorkflowStatus]:
|
|
902
925
|
"""
|
|
903
926
|
Retrieve a list of queued workflows result and inputs based on the input criteria. The result is a list of external-facing workflow status objects.
|
|
904
927
|
"""
|
|
905
|
-
|
|
928
|
+
load_columns = [
|
|
906
929
|
SystemSchema.workflow_status.c.workflow_uuid,
|
|
907
930
|
SystemSchema.workflow_status.c.status,
|
|
908
931
|
SystemSchema.workflow_status.c.name,
|
|
@@ -918,12 +941,13 @@ class SystemDatabase:
|
|
|
918
941
|
SystemSchema.workflow_status.c.updated_at,
|
|
919
942
|
SystemSchema.workflow_status.c.application_version,
|
|
920
943
|
SystemSchema.workflow_status.c.application_id,
|
|
921
|
-
SystemSchema.workflow_status.c.inputs,
|
|
922
|
-
SystemSchema.workflow_status.c.output,
|
|
923
|
-
SystemSchema.workflow_status.c.error,
|
|
924
944
|
SystemSchema.workflow_status.c.workflow_deadline_epoch_ms,
|
|
925
945
|
SystemSchema.workflow_status.c.workflow_timeout_ms,
|
|
926
|
-
|
|
946
|
+
]
|
|
947
|
+
if load_input:
|
|
948
|
+
load_columns.append(SystemSchema.workflow_status.c.inputs)
|
|
949
|
+
|
|
950
|
+
query = sa.select(*load_columns).where(
|
|
927
951
|
sa.and_(
|
|
928
952
|
SystemSchema.workflow_status.c.queue_name.isnot(None),
|
|
929
953
|
SystemSchema.workflow_status.c.status.in_(["ENQUEUED", "PENDING"]),
|
|
@@ -984,18 +1008,21 @@ class SystemDatabase:
|
|
|
984
1008
|
info.updated_at = row[12]
|
|
985
1009
|
info.app_version = row[13]
|
|
986
1010
|
info.app_id = row[14]
|
|
1011
|
+
info.workflow_deadline_epoch_ms = row[15]
|
|
1012
|
+
info.workflow_timeout_ms = row[16]
|
|
1013
|
+
|
|
1014
|
+
raw_input = row[17] if load_input else None
|
|
987
1015
|
|
|
1016
|
+
# Error and Output are not loaded because they should always be None for queued workflows.
|
|
988
1017
|
inputs, output, exception = _serialization.safe_deserialize(
|
|
989
1018
|
info.workflow_id,
|
|
990
|
-
serialized_input=
|
|
991
|
-
serialized_output=
|
|
992
|
-
serialized_exception=
|
|
1019
|
+
serialized_input=raw_input,
|
|
1020
|
+
serialized_output=None,
|
|
1021
|
+
serialized_exception=None,
|
|
993
1022
|
)
|
|
994
1023
|
info.input = inputs
|
|
995
1024
|
info.output = output
|
|
996
1025
|
info.error = exception
|
|
997
|
-
info.workflow_deadline_epoch_ms = row[18]
|
|
998
|
-
info.workflow_timeout_ms = row[19]
|
|
999
1026
|
|
|
1000
1027
|
infos.append(info)
|
|
1001
1028
|
|
|
@@ -33,6 +33,8 @@ def list_workflows(
|
|
|
33
33
|
offset: Optional[int] = None,
|
|
34
34
|
sort_desc: bool = False,
|
|
35
35
|
workflow_id_prefix: Optional[str] = None,
|
|
36
|
+
load_input: bool = True,
|
|
37
|
+
load_output: bool = True,
|
|
36
38
|
) -> List[WorkflowStatus]:
|
|
37
39
|
input = GetWorkflowsInput()
|
|
38
40
|
input.workflow_ids = workflow_ids
|
|
@@ -47,7 +49,9 @@ def list_workflows(
|
|
|
47
49
|
input.sort_desc = sort_desc
|
|
48
50
|
input.workflow_id_prefix = workflow_id_prefix
|
|
49
51
|
|
|
50
|
-
infos: List[WorkflowStatus] = sys_db.get_workflows(
|
|
52
|
+
infos: List[WorkflowStatus] = sys_db.get_workflows(
|
|
53
|
+
input, load_input=load_input, load_output=load_output
|
|
54
|
+
)
|
|
51
55
|
|
|
52
56
|
return infos
|
|
53
57
|
|
|
@@ -63,6 +67,7 @@ def list_queued_workflows(
|
|
|
63
67
|
limit: Optional[int] = None,
|
|
64
68
|
offset: Optional[int] = None,
|
|
65
69
|
sort_desc: bool = False,
|
|
70
|
+
load_input: bool = True,
|
|
66
71
|
) -> List[WorkflowStatus]:
|
|
67
72
|
input: GetQueuedWorkflowsInput = {
|
|
68
73
|
"queue_name": queue_name,
|
|
@@ -75,7 +80,9 @@ def list_queued_workflows(
|
|
|
75
80
|
"sort_desc": sort_desc,
|
|
76
81
|
}
|
|
77
82
|
|
|
78
|
-
infos: List[WorkflowStatus] = sys_db.get_queued_workflows(
|
|
83
|
+
infos: List[WorkflowStatus] = sys_db.get_queued_workflows(
|
|
84
|
+
input, load_input=load_input
|
|
85
|
+
)
|
|
79
86
|
return infos
|
|
80
87
|
|
|
81
88
|
|
|
@@ -94,7 +94,7 @@ def test_deactivate(dbos: DBOS, config: DBOSConfig) -> None:
|
|
|
94
94
|
return 5
|
|
95
95
|
|
|
96
96
|
# Let the scheduled workflow run
|
|
97
|
-
time.sleep(
|
|
97
|
+
time.sleep(5)
|
|
98
98
|
val = wf_counter
|
|
99
99
|
assert val > 0
|
|
100
100
|
# Deactivate--scheduled workflow should stop
|
|
@@ -103,7 +103,7 @@ def test_deactivate(dbos: DBOS, config: DBOSConfig) -> None:
|
|
|
103
103
|
for event in dbos.poller_stop_events:
|
|
104
104
|
assert event.is_set()
|
|
105
105
|
# Verify the scheduled workflow does not run anymore
|
|
106
|
-
time.sleep(
|
|
106
|
+
time.sleep(5)
|
|
107
107
|
assert wf_counter <= val + 1
|
|
108
108
|
# Enqueue a workflow, verify it still runs
|
|
109
109
|
assert queue.enqueue(regular_workflow).get_result() == 5
|
|
@@ -512,8 +512,9 @@ def test_list_workflows(dbos: DBOS) -> None:
|
|
|
512
512
|
assert workflows[0]["AuthenticatedUser"] is None
|
|
513
513
|
assert workflows[0]["AssumedRole"] is None
|
|
514
514
|
assert workflows[0]["AuthenticatedRoles"] is None
|
|
515
|
-
|
|
516
|
-
assert workflows[0]["
|
|
515
|
+
# By default, input and output are not loaded
|
|
516
|
+
assert workflows[0]["Input"] is None
|
|
517
|
+
assert workflows[0]["Output"] is None
|
|
517
518
|
assert workflows[0]["Error"] is None
|
|
518
519
|
assert workflows[0]["CreatedAt"] is not None and len(workflows[0]["CreatedAt"]) > 0
|
|
519
520
|
assert workflows[0]["UpdatedAt"] is not None and len(workflows[0]["UpdatedAt"]) > 0
|
|
@@ -521,6 +522,21 @@ def test_list_workflows(dbos: DBOS) -> None:
|
|
|
521
522
|
assert workflows[0]["ApplicationVersion"] == GlobalParams.app_version
|
|
522
523
|
assert workflows[0]["ExecutorID"] == GlobalParams.executor_id
|
|
523
524
|
|
|
525
|
+
# Only load input and output as requested
|
|
526
|
+
filters = {
|
|
527
|
+
"workflow_uuids": workflow_ids,
|
|
528
|
+
"start_time": start_time_filter,
|
|
529
|
+
"load_input": True,
|
|
530
|
+
"load_output": True,
|
|
531
|
+
}
|
|
532
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
|
533
|
+
assert response.status_code == 200
|
|
534
|
+
|
|
535
|
+
workflows = response.json()
|
|
536
|
+
assert len(workflows) == 1
|
|
537
|
+
assert workflows[0]["Output"] is not None and len(workflows[0]["Output"]) > 0
|
|
538
|
+
assert workflows[0]["Input"] is not None and len(workflows[0]["Input"]) > 0
|
|
539
|
+
|
|
524
540
|
# Test POST /workflows without filters
|
|
525
541
|
response = requests.post("http://localhost:3001/workflows", json={}, timeout=5)
|
|
526
542
|
assert response.status_code == 200
|
|
@@ -777,11 +793,7 @@ def test_queued_workflows_endpoint(dbos: DBOS) -> None:
|
|
|
777
793
|
assert queued_workflows[0]["AuthenticatedUser"] is None
|
|
778
794
|
assert queued_workflows[0]["AssumedRole"] is None
|
|
779
795
|
assert queued_workflows[0]["AuthenticatedRoles"] is None
|
|
780
|
-
assert
|
|
781
|
-
queued_workflows[0]["Input"] is not None
|
|
782
|
-
and len(queued_workflows[0]["Input"]) > 0
|
|
783
|
-
)
|
|
784
|
-
assert "1" in queued_workflows[0]["Input"]
|
|
796
|
+
assert queued_workflows[0]["Input"] is None
|
|
785
797
|
assert queued_workflows[0]["Output"] is None
|
|
786
798
|
assert queued_workflows[0]["Error"] is None
|
|
787
799
|
assert (
|
|
@@ -808,6 +820,17 @@ def test_queued_workflows_endpoint(dbos: DBOS) -> None:
|
|
|
808
820
|
filtered_workflows[0]["WorkflowUUID"] == handles[2].workflow_id
|
|
809
821
|
), "First workflow should be the last one enqueued"
|
|
810
822
|
|
|
823
|
+
# Only load input as requested
|
|
824
|
+
filters = {
|
|
825
|
+
"load_input": True,
|
|
826
|
+
}
|
|
827
|
+
response = requests.post("http://localhost:3001/queues", json=filters, timeout=5)
|
|
828
|
+
assert response.status_code == 200
|
|
829
|
+
filtered_workflows = response.json()
|
|
830
|
+
assert len(filtered_workflows) == len(handles)
|
|
831
|
+
assert filtered_workflows[0]["Input"] is not None
|
|
832
|
+
assert "1" in filtered_workflows[0]["Input"]
|
|
833
|
+
|
|
811
834
|
# Test all filters
|
|
812
835
|
filters = {
|
|
813
836
|
"workflow_name": blocking_workflow.__qualname__,
|
|
@@ -66,6 +66,16 @@ def test_client_enqueue_and_get_result(dbos: DBOS, client: DBOSClient) -> None:
|
|
|
66
66
|
assert len(list_results) == 1
|
|
67
67
|
assert list_results[0].workflow_id == wfid
|
|
68
68
|
assert list_results[0].status == "SUCCESS"
|
|
69
|
+
assert list_results[0].output == result
|
|
70
|
+
assert list_results[0].input is not None
|
|
71
|
+
|
|
72
|
+
# Skip loading input and output
|
|
73
|
+
list_results = client.list_workflows(load_input=False, load_output=False)
|
|
74
|
+
assert len(list_results) == 1
|
|
75
|
+
assert list_results[0].workflow_id == wfid
|
|
76
|
+
assert list_results[0].status == "SUCCESS"
|
|
77
|
+
assert list_results[0].output is None
|
|
78
|
+
assert list_results[0].input is None
|
|
69
79
|
|
|
70
80
|
|
|
71
81
|
def test_enqueue_with_timeout(dbos: DBOS, client: DBOSClient) -> None:
|
|
@@ -529,6 +539,16 @@ def test_enqueue_with_deduplication(dbos: DBOS, client: DBOSClient) -> None:
|
|
|
529
539
|
assert len(list_results) == 1
|
|
530
540
|
assert list_results[0].workflow_id == wfid
|
|
531
541
|
assert list_results[0].status in ["PENDING", "ENQUEUED"]
|
|
542
|
+
assert list_results[0].input is not None
|
|
543
|
+
assert list_results[0].output is None
|
|
544
|
+
|
|
545
|
+
# Skip loading input
|
|
546
|
+
list_results = client.list_queued_workflows(load_input=False)
|
|
547
|
+
assert len(list_results) == 1
|
|
548
|
+
assert list_results[0].workflow_id == wfid
|
|
549
|
+
assert list_results[0].status in ["PENDING", "ENQUEUED"]
|
|
550
|
+
assert list_results[0].input is None
|
|
551
|
+
assert list_results[0].output is None
|
|
532
552
|
|
|
533
553
|
assert handle.get_result() == "abc"
|
|
534
554
|
assert handle2.get_result() == "abc"
|
|
@@ -1016,6 +1016,13 @@ def test_dlq_enqueued_workflows(dbos: DBOS) -> None:
|
|
|
1016
1016
|
blocked_handle = queue.enqueue(blocked_workflow)
|
|
1017
1017
|
regular_handle = queue.enqueue(regular_workflow)
|
|
1018
1018
|
|
|
1019
|
+
# Enqueue the blocked workflow repeatedly, verify recovery attempts is not increased
|
|
1020
|
+
for _ in range(max_recovery_attempts):
|
|
1021
|
+
with SetWorkflowID(blocked_handle.workflow_id):
|
|
1022
|
+
queue.enqueue(blocked_workflow)
|
|
1023
|
+
recovery_attempts = blocked_handle.get_status().recovery_attempts
|
|
1024
|
+
assert recovery_attempts is not None and recovery_attempts <= 1
|
|
1025
|
+
|
|
1019
1026
|
# Verify that the blocked workflow starts and is PENDING while the regular workflow remains ENQUEUED.
|
|
1020
1027
|
start_event.wait()
|
|
1021
1028
|
assert blocked_handle.get_status().status == WorkflowStatusString.PENDING.value
|