dbos 1.6.0a5__tar.gz → 1.7.0a3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dbos-1.6.0a5 → dbos-1.7.0a3}/PKG-INFO +1 -1
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_admin_server.py +17 -8
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_dbos.py +11 -1
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_queue.py +18 -3
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_sys_db.py +7 -5
- {dbos-1.6.0a5 → dbos-1.7.0a3}/pyproject.toml +1 -1
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_admin_server.py +217 -15
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_queue.py +8 -8
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_scheduler.py +8 -8
- {dbos-1.6.0a5 → dbos-1.7.0a3}/LICENSE +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/README.md +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/__init__.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/__main__.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_app_db.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_classproperty.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_client.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_conductor/conductor.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_conductor/protocol.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_context.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_core.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_croniter.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_dbos_config.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_debug.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_docker_pg_helper.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_error.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_event_loop.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_fastapi.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_flask.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_kafka.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_kafka_message.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_logger.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_migrations/env.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_migrations/script.py.mako +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_migrations/versions/d994145b47b6_consolidate_inputs.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_outcome.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_recovery.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_registrations.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_roles.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_scheduler.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_schemas/__init__.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_schemas/application_database.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_schemas/system_database.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_serialization.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_templates/dbos-db-starter/README.md +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_tracer.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_utils.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_workflow_commands.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/cli/_github_init.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/cli/_template_init.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/cli/cli.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/dbos-config.schema.json +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/py.typed +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/__init__.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/atexit_no_ctor.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/atexit_no_launch.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/classdefs.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/client_collateral.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/client_worker.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/conftest.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/dupname_classdefs1.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/dupname_classdefsa.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/more_classdefs.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/queuedworkflow.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_async.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_classdecorators.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_cli.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_client.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_concurrency.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_config.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_croniter.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_dbos.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_debug.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_docker_secrets.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_failures.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_fastapi.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_fastapi_roles.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_flask.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_kafka.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_outcome.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_package.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_schema_migration.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_singleton.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_spans.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_sqlalchemy.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_workflow_introspection.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/tests/test_workflow_management.py +0 -0
- {dbos-1.6.0a5 → dbos-1.7.0a3}/version/__init__.py +0 -0
@@ -3,12 +3,14 @@ from __future__ import annotations
|
|
3
3
|
import json
|
4
4
|
import re
|
5
5
|
import threading
|
6
|
+
from dataclasses import asdict
|
6
7
|
from functools import partial
|
7
8
|
from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
|
8
9
|
from typing import TYPE_CHECKING, Any, Dict, List, Optional, TypedDict
|
9
10
|
|
10
11
|
from dbos._workflow_commands import garbage_collect, global_timeout
|
11
12
|
|
13
|
+
from ._conductor import protocol as conductor_protocol
|
12
14
|
from ._context import SetWorkflowID
|
13
15
|
from ._error import DBOSException
|
14
16
|
from ._logger import dbos_logger
|
@@ -326,20 +328,24 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
326
328
|
|
327
329
|
def _handle_workflows(self, filters: Dict[str, Any]) -> None:
|
328
330
|
workflows = self.dbos.list_workflows(
|
329
|
-
workflow_ids=filters.get("
|
330
|
-
|
331
|
+
workflow_ids=filters.get("workflow_uuids"),
|
332
|
+
user=filters.get("authenticated_user"),
|
331
333
|
start_time=filters.get("start_time"),
|
332
334
|
end_time=filters.get("end_time"),
|
333
335
|
status=filters.get("status"),
|
334
336
|
app_version=filters.get("application_version"),
|
337
|
+
name=filters.get("workflow_name"),
|
335
338
|
limit=filters.get("limit"),
|
336
339
|
offset=filters.get("offset"),
|
337
340
|
sort_desc=filters.get("sort_desc", False),
|
338
341
|
workflow_id_prefix=filters.get("workflow_id_prefix"),
|
339
342
|
)
|
340
|
-
|
343
|
+
workflows_output = [
|
344
|
+
conductor_protocol.WorkflowsOutput.from_workflow_information(i)
|
345
|
+
for i in workflows
|
346
|
+
]
|
341
347
|
response_body = json.dumps(
|
342
|
-
[workflow.__dict__ for workflow in
|
348
|
+
[workflow.__dict__ for workflow in workflows_output]
|
343
349
|
).encode("utf-8")
|
344
350
|
self.send_response(200)
|
345
351
|
self.send_header("Content-Type", "application/json")
|
@@ -349,18 +355,21 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
349
355
|
|
350
356
|
def _handle_queued_workflows(self, filters: Dict[str, Any]) -> None:
|
351
357
|
workflows = self.dbos.list_queued_workflows(
|
352
|
-
queue_name=filters.get("queue_name"),
|
353
|
-
name=filters.get("name"),
|
354
358
|
start_time=filters.get("start_time"),
|
355
359
|
end_time=filters.get("end_time"),
|
356
360
|
status=filters.get("status"),
|
361
|
+
name=filters.get("workflow_name"),
|
357
362
|
limit=filters.get("limit"),
|
358
363
|
offset=filters.get("offset"),
|
364
|
+
queue_name=filters.get("queue_name"),
|
359
365
|
sort_desc=filters.get("sort_desc", False),
|
360
366
|
)
|
361
|
-
|
367
|
+
workflows_output = [
|
368
|
+
conductor_protocol.WorkflowsOutput.from_workflow_information(i)
|
369
|
+
for i in workflows
|
370
|
+
]
|
362
371
|
response_body = json.dumps(
|
363
|
-
[workflow.__dict__ for workflow in
|
372
|
+
[workflow.__dict__ for workflow in workflows_output]
|
364
373
|
).encode("utf-8")
|
365
374
|
self.send_response(200)
|
366
375
|
self.send_header("Content-Type", "application/json")
|
@@ -7,7 +7,6 @@ import inspect
|
|
7
7
|
import os
|
8
8
|
import sys
|
9
9
|
import threading
|
10
|
-
import traceback
|
11
10
|
import uuid
|
12
11
|
from concurrent.futures import ThreadPoolExecutor
|
13
12
|
from logging import Logger
|
@@ -28,6 +27,7 @@ from typing import (
|
|
28
27
|
)
|
29
28
|
|
30
29
|
from opentelemetry.trace import Span
|
30
|
+
from rich import print
|
31
31
|
|
32
32
|
from dbos._conductor.conductor import ConductorWebsocket
|
33
33
|
from dbos._sys_db import WorkflowStatus
|
@@ -517,6 +517,16 @@ class DBOS:
|
|
517
517
|
|
518
518
|
dbos_logger.info("DBOS launched!")
|
519
519
|
|
520
|
+
if self.conductor_key is None and os.environ.get("DBOS__CLOUD") != "true":
|
521
|
+
# Hint the user to open the URL to register and set up Conductor
|
522
|
+
app_name = self._config["name"]
|
523
|
+
conductor_registration_url = (
|
524
|
+
f"https://console.dbos.dev/self-host?appname={app_name}"
|
525
|
+
)
|
526
|
+
print(
|
527
|
+
f"[bold]To view and manage workflows, connect to DBOS Conductor at:[/bold] [bold blue]{conductor_registration_url}[/bold blue]"
|
528
|
+
)
|
529
|
+
|
520
530
|
# Flush handlers and add OTLP to all loggers if enabled
|
521
531
|
# to enable their export in DBOS Cloud
|
522
532
|
for handler in dbos_logger.handlers:
|
@@ -1,3 +1,4 @@
|
|
1
|
+
import random
|
1
2
|
import threading
|
2
3
|
from typing import TYPE_CHECKING, Any, Callable, Coroutine, Optional, TypedDict
|
3
4
|
|
@@ -94,8 +95,12 @@ class Queue:
|
|
94
95
|
|
95
96
|
|
96
97
|
def queue_thread(stop_event: threading.Event, dbos: "DBOS") -> None:
|
98
|
+
polling_interval = 1.0
|
99
|
+
min_polling_interval = 1.0
|
100
|
+
max_polling_interval = 120.0
|
97
101
|
while not stop_event.is_set():
|
98
|
-
|
102
|
+
# Wait for the polling interval with jitter
|
103
|
+
if stop_event.wait(timeout=polling_interval * random.uniform(0.95, 1.05)):
|
99
104
|
return
|
100
105
|
queues = dict(dbos._registry.queue_info_map)
|
101
106
|
for _, queue in queues.items():
|
@@ -106,12 +111,22 @@ def queue_thread(stop_event: threading.Event, dbos: "DBOS") -> None:
|
|
106
111
|
for id in wf_ids:
|
107
112
|
execute_workflow_by_id(dbos, id)
|
108
113
|
except OperationalError as e:
|
109
|
-
|
110
|
-
if not isinstance(
|
114
|
+
if isinstance(
|
111
115
|
e.orig, (errors.SerializationFailure, errors.LockNotAvailable)
|
112
116
|
):
|
117
|
+
# If a serialization error is encountered, increase the polling interval
|
118
|
+
polling_interval = min(
|
119
|
+
max_polling_interval,
|
120
|
+
polling_interval * 2.0,
|
121
|
+
)
|
122
|
+
dbos.logger.warning(
|
123
|
+
f"Contention detected in queue thread for {queue.name}. Increasing polling interval to {polling_interval:.2f}."
|
124
|
+
)
|
125
|
+
else:
|
113
126
|
dbos.logger.warning(f"Exception encountered in queue thread: {e}")
|
114
127
|
except Exception as e:
|
115
128
|
if not stop_event.is_set():
|
116
129
|
# Only print the error if the thread is not stopping
|
117
130
|
dbos.logger.warning(f"Exception encountered in queue thread: {e}")
|
131
|
+
# Attempt to scale back the polling interval on each iteration
|
132
|
+
polling_interval = max(min_polling_interval, polling_interval * 0.9)
|
@@ -1650,7 +1650,7 @@ class SystemDatabase:
|
|
1650
1650
|
return []
|
1651
1651
|
|
1652
1652
|
# Compute max_tasks, the number of workflows that can be dequeued given local and global concurrency limits,
|
1653
|
-
max_tasks =
|
1653
|
+
max_tasks = 100 # To minimize contention with large queues, never dequeue more than 100 tasks
|
1654
1654
|
if queue.worker_concurrency is not None or queue.concurrency is not None:
|
1655
1655
|
# Count how many workflows on this queue are currently PENDING both locally and globally.
|
1656
1656
|
pending_tasks_query = (
|
@@ -1694,6 +1694,7 @@ class SystemDatabase:
|
|
1694
1694
|
|
1695
1695
|
# Retrieve the first max_tasks workflows in the queue.
|
1696
1696
|
# Only retrieve workflows of the local version (or without version set)
|
1697
|
+
skip_locks = queue.concurrency is None
|
1697
1698
|
query = (
|
1698
1699
|
sa.select(
|
1699
1700
|
SystemSchema.workflow_status.c.workflow_uuid,
|
@@ -1711,7 +1712,10 @@ class SystemDatabase:
|
|
1711
1712
|
SystemSchema.workflow_status.c.application_version.is_(None),
|
1712
1713
|
)
|
1713
1714
|
)
|
1714
|
-
|
1715
|
+
# Unless global concurrency is set, use skip_locked to only select
|
1716
|
+
# rows that can be locked. If global concurrency is set, use no_wait
|
1717
|
+
# to ensure all processes have a consistent view of the table.
|
1718
|
+
.with_for_update(skip_locked=skip_locks, nowait=(not skip_locks))
|
1715
1719
|
)
|
1716
1720
|
if queue.priority_enabled:
|
1717
1721
|
query = query.order_by(
|
@@ -1720,9 +1724,7 @@ class SystemDatabase:
|
|
1720
1724
|
)
|
1721
1725
|
else:
|
1722
1726
|
query = query.order_by(SystemSchema.workflow_status.c.created_at.asc())
|
1723
|
-
|
1724
|
-
if max_tasks != float("inf"):
|
1725
|
-
query = query.limit(int(max_tasks))
|
1727
|
+
query = query.limit(int(max_tasks))
|
1726
1728
|
|
1727
1729
|
rows = c.execute(query).fetchall()
|
1728
1730
|
|
@@ -3,7 +3,8 @@ import socket
|
|
3
3
|
import threading
|
4
4
|
import time
|
5
5
|
import uuid
|
6
|
-
from datetime import datetime, timezone
|
6
|
+
from datetime import datetime, timedelta, timezone
|
7
|
+
from typing import Any, Dict
|
7
8
|
|
8
9
|
import pytest
|
9
10
|
import requests
|
@@ -462,13 +463,13 @@ def test_list_workflows(dbos: DBOS) -> None:
|
|
462
463
|
pass
|
463
464
|
|
464
465
|
@DBOS.workflow()
|
465
|
-
def test_workflow_2() ->
|
466
|
-
|
466
|
+
def test_workflow_2(my_time: datetime) -> str:
|
467
|
+
return DBOS.workflow_id + " completed at " + my_time.isoformat()
|
467
468
|
|
468
469
|
# Start workflows
|
469
470
|
handle_1 = DBOS.start_workflow(test_workflow_1)
|
470
471
|
time.sleep(2) # Sleep for 2 seconds between workflows
|
471
|
-
handle_2 = DBOS.start_workflow(test_workflow_2)
|
472
|
+
handle_2 = DBOS.start_workflow(test_workflow_2, datetime.now())
|
472
473
|
|
473
474
|
# Wait for workflows to complete
|
474
475
|
handle_1.get_result()
|
@@ -492,8 +493,8 @@ def test_list_workflows(dbos: DBOS) -> None:
|
|
492
493
|
).isoformat()
|
493
494
|
|
494
495
|
# Test POST /workflows with filters
|
495
|
-
filters = {
|
496
|
-
"
|
496
|
+
filters: Dict[str, Any] = {
|
497
|
+
"workflow_uuids": workflow_ids,
|
497
498
|
"start_time": start_time_filter,
|
498
499
|
}
|
499
500
|
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
@@ -501,7 +502,24 @@ def test_list_workflows(dbos: DBOS) -> None:
|
|
501
502
|
|
502
503
|
workflows = response.json()
|
503
504
|
assert len(workflows) == 1, f"Expected 1 workflows, but got {len(workflows)}"
|
504
|
-
|
505
|
+
|
506
|
+
# Make sure it contains all the expected fields
|
507
|
+
assert workflows[0]["WorkflowUUID"] == handle_2.workflow_id, "Workflow ID mismatch"
|
508
|
+
assert workflows[0]["WorkflowName"] == test_workflow_2.__qualname__
|
509
|
+
assert workflows[0]["Status"] == "SUCCESS"
|
510
|
+
assert workflows[0]["WorkflowClassName"] is None
|
511
|
+
assert workflows[0]["WorkflowConfigName"] is None
|
512
|
+
assert workflows[0]["AuthenticatedUser"] is None
|
513
|
+
assert workflows[0]["AssumedRole"] is None
|
514
|
+
assert workflows[0]["AuthenticatedRoles"] is None
|
515
|
+
assert workflows[0]["Input"] is not None and len(workflows[0]["Input"]) > 0
|
516
|
+
assert workflows[0]["Output"] is not None and len(workflows[0]["Output"]) > 0
|
517
|
+
assert workflows[0]["Error"] is None
|
518
|
+
assert workflows[0]["CreatedAt"] is not None and len(workflows[0]["CreatedAt"]) > 0
|
519
|
+
assert workflows[0]["UpdatedAt"] is not None and len(workflows[0]["UpdatedAt"]) > 0
|
520
|
+
assert workflows[0]["QueueName"] is None
|
521
|
+
assert workflows[0]["ApplicationVersion"] == GlobalParams.app_version
|
522
|
+
assert workflows[0]["ExecutorID"] == GlobalParams.executor_id
|
505
523
|
|
506
524
|
# Test POST /workflows without filters
|
507
525
|
response = requests.post("http://localhost:3001/workflows", json={}, timeout=5)
|
@@ -512,7 +530,106 @@ def test_list_workflows(dbos: DBOS) -> None:
|
|
512
530
|
workflows_list
|
513
531
|
), f"Expected {len(workflows_list)} workflows, but got {len(workflows)}"
|
514
532
|
for workflow in workflows:
|
515
|
-
assert workflow["
|
533
|
+
assert workflow["WorkflowUUID"] in workflow_ids, "Workflow ID mismatch"
|
534
|
+
|
535
|
+
# Verify sort_desc inverts the order
|
536
|
+
filters = {
|
537
|
+
"sort_desc": True,
|
538
|
+
}
|
539
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
540
|
+
assert response.status_code == 200
|
541
|
+
workflows = response.json()
|
542
|
+
assert len(workflows) == len(workflows_list)
|
543
|
+
assert (
|
544
|
+
workflows[0]["WorkflowUUID"] == handle_2.workflow_id
|
545
|
+
), "First workflow should be the last one started"
|
546
|
+
|
547
|
+
# Test all filters
|
548
|
+
filters = {
|
549
|
+
"workflow_uuids": ["not-a-valid-uuid"],
|
550
|
+
}
|
551
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
552
|
+
assert response.status_code == 200
|
553
|
+
workflows = response.json()
|
554
|
+
assert len(workflows) == 0, "Expected no workflows for invalid UUID"
|
555
|
+
|
556
|
+
filters = {
|
557
|
+
"workflow_uuids": [handle_1.workflow_id, handle_2.workflow_id],
|
558
|
+
}
|
559
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
560
|
+
assert response.status_code == 200
|
561
|
+
workflows = response.json()
|
562
|
+
assert len(workflows) == 2
|
563
|
+
|
564
|
+
filters = {
|
565
|
+
"authenticated_user": "no-user",
|
566
|
+
}
|
567
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
568
|
+
assert response.status_code == 200
|
569
|
+
workflows = response.json()
|
570
|
+
assert len(workflows) == 0
|
571
|
+
|
572
|
+
filters = {
|
573
|
+
"workflow_name": test_workflow_1.__qualname__,
|
574
|
+
}
|
575
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
576
|
+
assert response.status_code == 200
|
577
|
+
workflows = response.json()
|
578
|
+
assert len(workflows) == 1
|
579
|
+
assert workflows[0]["WorkflowUUID"] == handle_1.workflow_id
|
580
|
+
|
581
|
+
filters = {
|
582
|
+
"end_time": (datetime.now(timezone.utc) - timedelta(minutes=10)).isoformat()
|
583
|
+
}
|
584
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
585
|
+
assert response.status_code == 200
|
586
|
+
workflows = response.json()
|
587
|
+
assert len(workflows) == 0
|
588
|
+
|
589
|
+
filters = {
|
590
|
+
"start_time": (datetime.now(timezone.utc) + timedelta(minutes=10)).isoformat(),
|
591
|
+
}
|
592
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
593
|
+
assert response.status_code == 200
|
594
|
+
workflows = response.json()
|
595
|
+
assert len(workflows) == 0
|
596
|
+
|
597
|
+
filters = {
|
598
|
+
"status": ["SUCCESS", "CANCELLED"],
|
599
|
+
}
|
600
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
601
|
+
assert response.status_code == 200
|
602
|
+
workflows = response.json()
|
603
|
+
assert len(workflows) == 2
|
604
|
+
|
605
|
+
filters = {
|
606
|
+
"application_version": GlobalParams.app_version,
|
607
|
+
}
|
608
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
609
|
+
assert response.status_code == 200
|
610
|
+
workflows = response.json()
|
611
|
+
assert len(workflows) == 2
|
612
|
+
|
613
|
+
filters = {
|
614
|
+
"limit": 1,
|
615
|
+
"offset": 1,
|
616
|
+
}
|
617
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
618
|
+
assert response.status_code == 200
|
619
|
+
workflows = response.json()
|
620
|
+
assert len(workflows) == 1
|
621
|
+
assert workflows[0]["WorkflowUUID"] == handle_2.workflow_id
|
622
|
+
|
623
|
+
filters = {
|
624
|
+
"workflow_id_prefix": handle_1.workflow_id[
|
625
|
+
:10
|
626
|
+
], # First 10 characters of the workflow name
|
627
|
+
}
|
628
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
629
|
+
assert response.status_code == 200
|
630
|
+
workflows = response.json()
|
631
|
+
assert len(workflows) == 1
|
632
|
+
assert workflows[0]["WorkflowUUID"] == handle_1.workflow_id
|
516
633
|
|
517
634
|
|
518
635
|
def test_get_workflow_by_id(dbos: DBOS) -> None:
|
@@ -606,15 +723,15 @@ def test_queued_workflows_endpoint(dbos: DBOS) -> None:
|
|
606
723
|
test_queue2 = Queue("test-queue-2", concurrency=1)
|
607
724
|
|
608
725
|
@DBOS.workflow()
|
609
|
-
def blocking_workflow() -> str:
|
726
|
+
def blocking_workflow(i: int) -> str:
|
610
727
|
while True:
|
611
728
|
time.sleep(0.1)
|
612
729
|
|
613
730
|
# Enqueue some workflows to create queued entries
|
614
|
-
handles = []
|
615
|
-
handles.append(test_queue1.enqueue(blocking_workflow))
|
616
|
-
handles.append(test_queue1.enqueue(blocking_workflow))
|
617
|
-
handles.append(test_queue2.enqueue(blocking_workflow))
|
731
|
+
handles: list[WorkflowHandle[str]] = []
|
732
|
+
handles.append(test_queue1.enqueue(blocking_workflow, 1))
|
733
|
+
handles.append(test_queue1.enqueue(blocking_workflow, 2))
|
734
|
+
handles.append(test_queue2.enqueue(blocking_workflow, 3))
|
618
735
|
|
619
736
|
# Test basic queued workflows endpoint
|
620
737
|
response = requests.post("http://localhost:3001/queues", json={}, timeout=5)
|
@@ -628,16 +745,101 @@ def test_queued_workflows_endpoint(dbos: DBOS) -> None:
|
|
628
745
|
len(queued_workflows) == 3
|
629
746
|
), f"Expected 3 queued workflows, got {len(queued_workflows)}"
|
630
747
|
|
631
|
-
#
|
632
|
-
|
748
|
+
# Make sure it contains all the expected fields
|
749
|
+
assert queued_workflows[0]["WorkflowName"] == blocking_workflow.__qualname__
|
750
|
+
assert (
|
751
|
+
queued_workflows[0]["WorkflowUUID"] == handles[0].workflow_id
|
752
|
+
), "Workflow ID mismatch"
|
753
|
+
assert (
|
754
|
+
queued_workflows[0]["Status"] == "ENQUEUED"
|
755
|
+
or queued_workflows[0]["Status"] == "PENDING"
|
756
|
+
)
|
757
|
+
assert queued_workflows[0]["WorkflowClassName"] is None
|
758
|
+
assert queued_workflows[0]["WorkflowConfigName"] is None
|
759
|
+
assert queued_workflows[0]["AuthenticatedUser"] is None
|
760
|
+
assert queued_workflows[0]["AssumedRole"] is None
|
761
|
+
assert queued_workflows[0]["AuthenticatedRoles"] is None
|
762
|
+
assert (
|
763
|
+
queued_workflows[0]["Input"] is not None
|
764
|
+
and len(queued_workflows[0]["Input"]) > 0
|
765
|
+
)
|
766
|
+
assert "1" in queued_workflows[0]["Input"]
|
767
|
+
assert queued_workflows[0]["Output"] is None
|
768
|
+
assert queued_workflows[0]["Error"] is None
|
769
|
+
assert (
|
770
|
+
queued_workflows[0]["CreatedAt"] is not None
|
771
|
+
and len(queued_workflows[0]["CreatedAt"]) > 0
|
772
|
+
)
|
773
|
+
assert (
|
774
|
+
queued_workflows[0]["UpdatedAt"] is not None
|
775
|
+
and len(queued_workflows[0]["UpdatedAt"]) > 0
|
776
|
+
)
|
777
|
+
assert queued_workflows[0]["QueueName"] == test_queue1.name
|
778
|
+
assert queued_workflows[0]["ApplicationVersion"] == GlobalParams.app_version
|
779
|
+
assert queued_workflows[0]["ExecutorID"] == GlobalParams.executor_id
|
780
|
+
|
781
|
+
# Verify sort_desc inverts the order
|
782
|
+
filters: Dict[str, Any] = {
|
783
|
+
"sort_desc": True,
|
784
|
+
}
|
785
|
+
response = requests.post("http://localhost:3001/queues", json=filters, timeout=5)
|
786
|
+
assert response.status_code == 200
|
787
|
+
filtered_workflows = response.json()
|
788
|
+
assert len(filtered_workflows) == len(handles)
|
789
|
+
assert (
|
790
|
+
filtered_workflows[0]["WorkflowUUID"] == handles[2].workflow_id
|
791
|
+
), "First workflow should be the last one enqueued"
|
792
|
+
|
793
|
+
# Test all filters
|
794
|
+
filters = {
|
795
|
+
"workflow_name": blocking_workflow.__qualname__,
|
796
|
+
}
|
797
|
+
response = requests.post("http://localhost:3001/queues", json=filters, timeout=5)
|
798
|
+
assert response.status_code == 200
|
799
|
+
filtered_workflows = response.json()
|
800
|
+
assert len(filtered_workflows) == len(handles)
|
801
|
+
|
802
|
+
filters = {
|
803
|
+
"end_time": (datetime.now(timezone.utc) - timedelta(minutes=10)).isoformat(),
|
804
|
+
}
|
633
805
|
response = requests.post("http://localhost:3001/queues", json=filters, timeout=5)
|
634
806
|
assert response.status_code == 200
|
807
|
+
filtered_workflows = response.json()
|
808
|
+
assert len(filtered_workflows) == 0
|
809
|
+
|
810
|
+
filters = {
|
811
|
+
"start_time": (datetime.now(timezone.utc) + timedelta(minutes=10)).isoformat(),
|
812
|
+
}
|
813
|
+
response = requests.post("http://localhost:3001/queues", json=filters, timeout=5)
|
814
|
+
assert response.status_code == 200
|
815
|
+
filtered_workflows = response.json()
|
816
|
+
assert len(filtered_workflows) == 0
|
635
817
|
|
818
|
+
filters = {
|
819
|
+
"status": ["PENDING", "ENQUEUED"],
|
820
|
+
}
|
821
|
+
response = requests.post("http://localhost:3001/queues", json=filters, timeout=5)
|
822
|
+
assert response.status_code == 200
|
823
|
+
filtered_workflows = response.json()
|
824
|
+
assert len(filtered_workflows) == len(handles)
|
825
|
+
|
826
|
+
filters = {
|
827
|
+
"queue_name": test_queue1.name,
|
828
|
+
}
|
829
|
+
response = requests.post("http://localhost:3001/queues", json=filters, timeout=5)
|
830
|
+
assert response.status_code == 200
|
831
|
+
filtered_workflows = response.json()
|
832
|
+
assert len(filtered_workflows) == 2
|
833
|
+
|
834
|
+
filters = {"queue_name": test_queue1.name, "limit": 1, "offset": 1}
|
835
|
+
response = requests.post("http://localhost:3001/queues", json=filters, timeout=5)
|
836
|
+
assert response.status_code == 200
|
636
837
|
filtered_workflows = response.json()
|
637
838
|
assert isinstance(filtered_workflows, list), "Response should be a list"
|
638
839
|
assert (
|
639
840
|
len(filtered_workflows) == 1
|
640
841
|
), f"Expected 1 workflow, got {len(filtered_workflows)}"
|
842
|
+
assert filtered_workflows[0]["WorkflowUUID"] == handles[1].workflow_id
|
641
843
|
|
642
844
|
# Test with non-existent queue name
|
643
845
|
filters = {"queue_name": "non-existent-queue"}
|
@@ -215,7 +215,7 @@ def test_limiter(dbos: DBOS) -> None:
|
|
215
215
|
return time.time()
|
216
216
|
|
217
217
|
limit = 5
|
218
|
-
period =
|
218
|
+
period = 1.8
|
219
219
|
queue = Queue("test_queue", limiter={"limit": limit, "period": period})
|
220
220
|
|
221
221
|
handles: list[WorkflowHandle[float]] = []
|
@@ -235,12 +235,12 @@ def test_limiter(dbos: DBOS) -> None:
|
|
235
235
|
# Verify that each "wave" of tasks started at the ~same time.
|
236
236
|
for wave in range(num_waves):
|
237
237
|
for i in range(wave * limit, (wave + 1) * limit - 1):
|
238
|
-
assert times[i + 1] - times[i] < 0.
|
238
|
+
assert times[i + 1] - times[i] < 0.5
|
239
239
|
|
240
240
|
# Verify that the gap between "waves" is ~equal to the period
|
241
241
|
for wave in range(num_waves - 1):
|
242
|
-
assert times[limit * (wave + 1)] - times[limit * wave] > period - 0.
|
243
|
-
assert times[limit * (wave + 1)] - times[limit * wave] < period + 0.
|
242
|
+
assert times[limit * (wave + 1)] - times[limit * wave] > period - 0.5
|
243
|
+
assert times[limit * (wave + 1)] - times[limit * wave] < period + 0.5
|
244
244
|
|
245
245
|
# Verify all workflows get the SUCCESS status eventually
|
246
246
|
for h in handles:
|
@@ -280,7 +280,7 @@ def test_multiple_queues(dbos: DBOS) -> None:
|
|
280
280
|
return time.time()
|
281
281
|
|
282
282
|
limit = 5
|
283
|
-
period =
|
283
|
+
period = 1.8
|
284
284
|
limiter_queue = Queue(
|
285
285
|
"test_limit_queue", limiter={"limit": limit, "period": period}
|
286
286
|
)
|
@@ -302,12 +302,12 @@ def test_multiple_queues(dbos: DBOS) -> None:
|
|
302
302
|
# Verify that each "wave" of tasks started at the ~same time.
|
303
303
|
for wave in range(num_waves):
|
304
304
|
for i in range(wave * limit, (wave + 1) * limit - 1):
|
305
|
-
assert times[i + 1] - times[i] < 0.
|
305
|
+
assert times[i + 1] - times[i] < 0.5
|
306
306
|
|
307
307
|
# Verify that the gap between "waves" is ~equal to the period
|
308
308
|
for wave in range(num_waves - 1):
|
309
|
-
assert times[limit * (wave + 1)] - times[limit * wave] > period - 0.
|
310
|
-
assert times[limit * (wave + 1)] - times[limit * wave] < period + 0.
|
309
|
+
assert times[limit * (wave + 1)] - times[limit * wave] > period - 0.5
|
310
|
+
assert times[limit * (wave + 1)] - times[limit * wave] < period + 0.5
|
311
311
|
|
312
312
|
# Verify all workflows get the SUCCESS status eventually
|
313
313
|
for h in handles:
|
@@ -101,8 +101,8 @@ def test_scheduled_workflow(dbos: DBOS) -> None:
|
|
101
101
|
nonlocal wf_counter
|
102
102
|
wf_counter += 1
|
103
103
|
|
104
|
-
time.sleep(
|
105
|
-
assert wf_counter > 2 and wf_counter <=
|
104
|
+
time.sleep(5)
|
105
|
+
assert wf_counter > 2 and wf_counter <= 5
|
106
106
|
|
107
107
|
|
108
108
|
def test_appdb_downtime(dbos: DBOS) -> None:
|
@@ -152,8 +152,8 @@ def test_scheduled_transaction(dbos: DBOS) -> None:
|
|
152
152
|
nonlocal txn_counter
|
153
153
|
txn_counter += 1
|
154
154
|
|
155
|
-
time.sleep(
|
156
|
-
assert txn_counter > 2 and txn_counter <=
|
155
|
+
time.sleep(5)
|
156
|
+
assert txn_counter > 2 and txn_counter <= 5
|
157
157
|
|
158
158
|
|
159
159
|
def test_scheduled_step(dbos: DBOS) -> None:
|
@@ -165,8 +165,8 @@ def test_scheduled_step(dbos: DBOS) -> None:
|
|
165
165
|
nonlocal step_counter
|
166
166
|
step_counter += 1
|
167
167
|
|
168
|
-
time.sleep(
|
169
|
-
assert step_counter > 2 and step_counter <=
|
168
|
+
time.sleep(5)
|
169
|
+
assert step_counter > 2 and step_counter <= 5
|
170
170
|
|
171
171
|
|
172
172
|
def test_scheduled_workflow_exception(dbos: DBOS) -> None:
|
@@ -179,8 +179,8 @@ def test_scheduled_workflow_exception(dbos: DBOS) -> None:
|
|
179
179
|
wf_counter += 1
|
180
180
|
raise Exception("error")
|
181
181
|
|
182
|
-
time.sleep(
|
183
|
-
assert wf_counter >= 1 and wf_counter <=
|
182
|
+
time.sleep(4)
|
183
|
+
assert wf_counter >= 1 and wf_counter <= 4
|
184
184
|
|
185
185
|
|
186
186
|
def test_scheduler_oaoo(dbos: DBOS) -> None:
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{dbos-1.6.0a5 → dbos-1.7.0a3}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|