dbos 1.6.0a4__tar.gz → 1.7.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- {dbos-1.6.0a4 → dbos-1.7.0}/PKG-INFO +1 -1
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_admin_server.py +22 -9
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_context.py +5 -11
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_core.py +4 -6
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_dbos.py +11 -1
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_queue.py +18 -3
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_sys_db.py +7 -5
- {dbos-1.6.0a4 → dbos-1.7.0}/pyproject.toml +1 -1
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_admin_server.py +241 -21
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_dbos.py +26 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_queue.py +8 -8
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_scheduler.py +8 -8
- {dbos-1.6.0a4 → dbos-1.7.0}/LICENSE +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/README.md +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/__init__.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/__main__.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_app_db.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_classproperty.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_client.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_conductor/conductor.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_conductor/protocol.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_croniter.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_dbos_config.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_debug.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_docker_pg_helper.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_error.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_event_loop.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_fastapi.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_flask.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_kafka.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_kafka_message.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_logger.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_migrations/env.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_migrations/script.py.mako +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_migrations/versions/d994145b47b6_consolidate_inputs.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_outcome.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_recovery.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_registrations.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_roles.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_scheduler.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_schemas/__init__.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_schemas/application_database.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_schemas/system_database.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_serialization.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_templates/dbos-db-starter/README.md +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_tracer.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_utils.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/_workflow_commands.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/cli/_github_init.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/cli/_template_init.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/cli/cli.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/dbos-config.schema.json +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/dbos/py.typed +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/__init__.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/atexit_no_ctor.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/atexit_no_launch.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/classdefs.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/client_collateral.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/client_worker.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/conftest.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/dupname_classdefs1.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/dupname_classdefsa.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/more_classdefs.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/queuedworkflow.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_async.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_classdecorators.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_cli.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_client.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_concurrency.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_config.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_croniter.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_debug.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_docker_secrets.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_failures.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_fastapi.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_fastapi_roles.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_flask.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_kafka.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_outcome.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_package.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_schema_migration.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_singleton.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_spans.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_sqlalchemy.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_workflow_introspection.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/tests/test_workflow_management.py +0 -0
- {dbos-1.6.0a4 → dbos-1.7.0}/version/__init__.py +0 -0
|
@@ -9,6 +9,7 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, TypedDict
|
|
|
9
9
|
|
|
10
10
|
from dbos._workflow_commands import garbage_collect, global_timeout
|
|
11
11
|
|
|
12
|
+
from ._conductor import protocol as conductor_protocol
|
|
12
13
|
from ._context import SetWorkflowID
|
|
13
14
|
from ._error import DBOSException
|
|
14
15
|
from ._logger import dbos_logger
|
|
@@ -118,7 +119,12 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
|
118
119
|
self.send_response(404)
|
|
119
120
|
self._end_headers()
|
|
120
121
|
return
|
|
121
|
-
|
|
122
|
+
workflow_output = (
|
|
123
|
+
conductor_protocol.WorkflowsOutput.from_workflow_information(
|
|
124
|
+
workflows[0]
|
|
125
|
+
)
|
|
126
|
+
)
|
|
127
|
+
response_body = json.dumps(workflow_output.__dict__).encode("utf-8")
|
|
122
128
|
self.send_response(200)
|
|
123
129
|
self.send_header("Content-Type", "application/json")
|
|
124
130
|
self.send_header("Content-Length", str(len(response_body)))
|
|
@@ -326,20 +332,24 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
|
326
332
|
|
|
327
333
|
def _handle_workflows(self, filters: Dict[str, Any]) -> None:
|
|
328
334
|
workflows = self.dbos.list_workflows(
|
|
329
|
-
workflow_ids=filters.get("
|
|
330
|
-
|
|
335
|
+
workflow_ids=filters.get("workflow_uuids"),
|
|
336
|
+
user=filters.get("authenticated_user"),
|
|
331
337
|
start_time=filters.get("start_time"),
|
|
332
338
|
end_time=filters.get("end_time"),
|
|
333
339
|
status=filters.get("status"),
|
|
334
340
|
app_version=filters.get("application_version"),
|
|
341
|
+
name=filters.get("workflow_name"),
|
|
335
342
|
limit=filters.get("limit"),
|
|
336
343
|
offset=filters.get("offset"),
|
|
337
344
|
sort_desc=filters.get("sort_desc", False),
|
|
338
345
|
workflow_id_prefix=filters.get("workflow_id_prefix"),
|
|
339
346
|
)
|
|
340
|
-
|
|
347
|
+
workflows_output = [
|
|
348
|
+
conductor_protocol.WorkflowsOutput.from_workflow_information(i)
|
|
349
|
+
for i in workflows
|
|
350
|
+
]
|
|
341
351
|
response_body = json.dumps(
|
|
342
|
-
[workflow.__dict__ for workflow in
|
|
352
|
+
[workflow.__dict__ for workflow in workflows_output]
|
|
343
353
|
).encode("utf-8")
|
|
344
354
|
self.send_response(200)
|
|
345
355
|
self.send_header("Content-Type", "application/json")
|
|
@@ -349,18 +359,21 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
|
349
359
|
|
|
350
360
|
def _handle_queued_workflows(self, filters: Dict[str, Any]) -> None:
|
|
351
361
|
workflows = self.dbos.list_queued_workflows(
|
|
352
|
-
queue_name=filters.get("queue_name"),
|
|
353
|
-
name=filters.get("name"),
|
|
354
362
|
start_time=filters.get("start_time"),
|
|
355
363
|
end_time=filters.get("end_time"),
|
|
356
364
|
status=filters.get("status"),
|
|
365
|
+
name=filters.get("workflow_name"),
|
|
357
366
|
limit=filters.get("limit"),
|
|
358
367
|
offset=filters.get("offset"),
|
|
368
|
+
queue_name=filters.get("queue_name"),
|
|
359
369
|
sort_desc=filters.get("sort_desc", False),
|
|
360
370
|
)
|
|
361
|
-
|
|
371
|
+
workflows_output = [
|
|
372
|
+
conductor_protocol.WorkflowsOutput.from_workflow_information(i)
|
|
373
|
+
for i in workflows
|
|
374
|
+
]
|
|
362
375
|
response_body = json.dumps(
|
|
363
|
-
[workflow.__dict__ for workflow in
|
|
376
|
+
[workflow.__dict__ for workflow in workflows_output]
|
|
364
377
|
).encode("utf-8")
|
|
365
378
|
self.send_response(200)
|
|
366
379
|
self.send_header("Content-Type", "application/json")
|
|
@@ -140,23 +140,18 @@ class DBOSContext:
|
|
|
140
140
|
self,
|
|
141
141
|
wfid: Optional[str],
|
|
142
142
|
attributes: TracedAttributes,
|
|
143
|
-
is_temp_workflow: bool = False,
|
|
144
143
|
) -> None:
|
|
145
144
|
if wfid is None or len(wfid) == 0:
|
|
146
145
|
wfid = self.assign_workflow_id()
|
|
147
146
|
self.id_assigned_for_next_workflow = ""
|
|
148
147
|
self.workflow_id = wfid
|
|
149
148
|
self.function_id = 0
|
|
150
|
-
|
|
151
|
-
self._start_span(attributes)
|
|
149
|
+
self._start_span(attributes)
|
|
152
150
|
|
|
153
|
-
def end_workflow(
|
|
154
|
-
self, exc_value: Optional[BaseException], is_temp_workflow: bool = False
|
|
155
|
-
) -> None:
|
|
151
|
+
def end_workflow(self, exc_value: Optional[BaseException]) -> None:
|
|
156
152
|
self.workflow_id = ""
|
|
157
153
|
self.function_id = -1
|
|
158
|
-
|
|
159
|
-
self._end_span(exc_value)
|
|
154
|
+
self._end_span(exc_value)
|
|
160
155
|
|
|
161
156
|
def is_within_workflow(self) -> bool:
|
|
162
157
|
return len(self.workflow_id) > 0
|
|
@@ -490,7 +485,6 @@ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
|
|
|
490
485
|
def __init__(self, attributes: TracedAttributes) -> None:
|
|
491
486
|
self.created_ctx = False
|
|
492
487
|
self.attributes = attributes
|
|
493
|
-
self.is_temp_workflow = attributes["name"] == "temp_wf"
|
|
494
488
|
self.saved_workflow_timeout: Optional[int] = None
|
|
495
489
|
self.saved_deduplication_id: Optional[str] = None
|
|
496
490
|
self.saved_priority: Optional[int] = None
|
|
@@ -514,7 +508,7 @@ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
|
|
|
514
508
|
self.saved_priority = ctx.priority
|
|
515
509
|
ctx.priority = None
|
|
516
510
|
ctx.start_workflow(
|
|
517
|
-
None, self.attributes
|
|
511
|
+
None, self.attributes
|
|
518
512
|
) # Will get from the context's next workflow ID
|
|
519
513
|
return ctx
|
|
520
514
|
|
|
@@ -526,7 +520,7 @@ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
|
|
|
526
520
|
) -> Literal[False]:
|
|
527
521
|
ctx = assert_current_dbos_context()
|
|
528
522
|
assert ctx.is_within_workflow()
|
|
529
|
-
ctx.end_workflow(exc_value
|
|
523
|
+
ctx.end_workflow(exc_value)
|
|
530
524
|
# Restore the saved workflow timeout
|
|
531
525
|
ctx.workflow_timeout_ms = self.saved_workflow_timeout
|
|
532
526
|
# Clear any propagating timeout
|
|
@@ -1157,13 +1157,16 @@ def decorate_step(
|
|
|
1157
1157
|
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
1158
1158
|
rr: Optional[str] = check_required_roles(func, fi)
|
|
1159
1159
|
# Entering step is allowed:
|
|
1160
|
+
# No DBOS, just call the original function directly
|
|
1160
1161
|
# In a step already, just call the original function directly.
|
|
1161
1162
|
# In a workflow (that is not in a step already)
|
|
1162
1163
|
# Not in a workflow (we will start the single op workflow)
|
|
1164
|
+
if not dbosreg.dbos or not dbosreg.dbos._launched:
|
|
1165
|
+
# Call the original function directly
|
|
1166
|
+
return func(*args, **kwargs)
|
|
1163
1167
|
ctx = get_local_dbos_context()
|
|
1164
1168
|
if ctx and ctx.is_step():
|
|
1165
1169
|
# Call the original function directly
|
|
1166
|
-
|
|
1167
1170
|
return func(*args, **kwargs)
|
|
1168
1171
|
if ctx and ctx.is_within_workflow():
|
|
1169
1172
|
assert ctx.is_workflow(), "Steps must be called from within workflows"
|
|
@@ -1187,11 +1190,6 @@ def decorate_step(
|
|
|
1187
1190
|
async def temp_wf_async(*args: Any, **kwargs: Any) -> Any:
|
|
1188
1191
|
return await wrapper(*args, **kwargs)
|
|
1189
1192
|
|
|
1190
|
-
# Other code in transact-py depends on the name of temporary workflow functions to be "temp_wf"
|
|
1191
|
-
# so set the name of both sync and async temporary workflow functions explicitly
|
|
1192
|
-
temp_wf_sync.__name__ = "temp_wf"
|
|
1193
|
-
temp_wf_async.__name__ = "temp_wf"
|
|
1194
|
-
|
|
1195
1193
|
temp_wf = temp_wf_async if inspect.iscoroutinefunction(func) else temp_wf_sync
|
|
1196
1194
|
wrapped_wf = workflow_wrapper(dbosreg, temp_wf)
|
|
1197
1195
|
set_dbos_func_name(temp_wf, "<temp>." + step_name)
|
|
@@ -7,7 +7,6 @@ import inspect
|
|
|
7
7
|
import os
|
|
8
8
|
import sys
|
|
9
9
|
import threading
|
|
10
|
-
import traceback
|
|
11
10
|
import uuid
|
|
12
11
|
from concurrent.futures import ThreadPoolExecutor
|
|
13
12
|
from logging import Logger
|
|
@@ -28,6 +27,7 @@ from typing import (
|
|
|
28
27
|
)
|
|
29
28
|
|
|
30
29
|
from opentelemetry.trace import Span
|
|
30
|
+
from rich import print
|
|
31
31
|
|
|
32
32
|
from dbos._conductor.conductor import ConductorWebsocket
|
|
33
33
|
from dbos._sys_db import WorkflowStatus
|
|
@@ -517,6 +517,16 @@ class DBOS:
|
|
|
517
517
|
|
|
518
518
|
dbos_logger.info("DBOS launched!")
|
|
519
519
|
|
|
520
|
+
if self.conductor_key is None and os.environ.get("DBOS__CLOUD") != "true":
|
|
521
|
+
# Hint the user to open the URL to register and set up Conductor
|
|
522
|
+
app_name = self._config["name"]
|
|
523
|
+
conductor_registration_url = (
|
|
524
|
+
f"https://console.dbos.dev/self-host?appname={app_name}"
|
|
525
|
+
)
|
|
526
|
+
print(
|
|
527
|
+
f"[bold]To view and manage workflows, connect to DBOS Conductor at:[/bold] [bold blue]{conductor_registration_url}[/bold blue]"
|
|
528
|
+
)
|
|
529
|
+
|
|
520
530
|
# Flush handlers and add OTLP to all loggers if enabled
|
|
521
531
|
# to enable their export in DBOS Cloud
|
|
522
532
|
for handler in dbos_logger.handlers:
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import random
|
|
1
2
|
import threading
|
|
2
3
|
from typing import TYPE_CHECKING, Any, Callable, Coroutine, Optional, TypedDict
|
|
3
4
|
|
|
@@ -94,8 +95,12 @@ class Queue:
|
|
|
94
95
|
|
|
95
96
|
|
|
96
97
|
def queue_thread(stop_event: threading.Event, dbos: "DBOS") -> None:
|
|
98
|
+
polling_interval = 1.0
|
|
99
|
+
min_polling_interval = 1.0
|
|
100
|
+
max_polling_interval = 120.0
|
|
97
101
|
while not stop_event.is_set():
|
|
98
|
-
|
|
102
|
+
# Wait for the polling interval with jitter
|
|
103
|
+
if stop_event.wait(timeout=polling_interval * random.uniform(0.95, 1.05)):
|
|
99
104
|
return
|
|
100
105
|
queues = dict(dbos._registry.queue_info_map)
|
|
101
106
|
for _, queue in queues.items():
|
|
@@ -106,12 +111,22 @@ def queue_thread(stop_event: threading.Event, dbos: "DBOS") -> None:
|
|
|
106
111
|
for id in wf_ids:
|
|
107
112
|
execute_workflow_by_id(dbos, id)
|
|
108
113
|
except OperationalError as e:
|
|
109
|
-
|
|
110
|
-
if not isinstance(
|
|
114
|
+
if isinstance(
|
|
111
115
|
e.orig, (errors.SerializationFailure, errors.LockNotAvailable)
|
|
112
116
|
):
|
|
117
|
+
# If a serialization error is encountered, increase the polling interval
|
|
118
|
+
polling_interval = min(
|
|
119
|
+
max_polling_interval,
|
|
120
|
+
polling_interval * 2.0,
|
|
121
|
+
)
|
|
122
|
+
dbos.logger.warning(
|
|
123
|
+
f"Contention detected in queue thread for {queue.name}. Increasing polling interval to {polling_interval:.2f}."
|
|
124
|
+
)
|
|
125
|
+
else:
|
|
113
126
|
dbos.logger.warning(f"Exception encountered in queue thread: {e}")
|
|
114
127
|
except Exception as e:
|
|
115
128
|
if not stop_event.is_set():
|
|
116
129
|
# Only print the error if the thread is not stopping
|
|
117
130
|
dbos.logger.warning(f"Exception encountered in queue thread: {e}")
|
|
131
|
+
# Attempt to scale back the polling interval on each iteration
|
|
132
|
+
polling_interval = max(min_polling_interval, polling_interval * 0.9)
|
|
@@ -1650,7 +1650,7 @@ class SystemDatabase:
|
|
|
1650
1650
|
return []
|
|
1651
1651
|
|
|
1652
1652
|
# Compute max_tasks, the number of workflows that can be dequeued given local and global concurrency limits,
|
|
1653
|
-
max_tasks =
|
|
1653
|
+
max_tasks = 100 # To minimize contention with large queues, never dequeue more than 100 tasks
|
|
1654
1654
|
if queue.worker_concurrency is not None or queue.concurrency is not None:
|
|
1655
1655
|
# Count how many workflows on this queue are currently PENDING both locally and globally.
|
|
1656
1656
|
pending_tasks_query = (
|
|
@@ -1694,6 +1694,7 @@ class SystemDatabase:
|
|
|
1694
1694
|
|
|
1695
1695
|
# Retrieve the first max_tasks workflows in the queue.
|
|
1696
1696
|
# Only retrieve workflows of the local version (or without version set)
|
|
1697
|
+
skip_locks = queue.concurrency is None
|
|
1697
1698
|
query = (
|
|
1698
1699
|
sa.select(
|
|
1699
1700
|
SystemSchema.workflow_status.c.workflow_uuid,
|
|
@@ -1711,7 +1712,10 @@ class SystemDatabase:
|
|
|
1711
1712
|
SystemSchema.workflow_status.c.application_version.is_(None),
|
|
1712
1713
|
)
|
|
1713
1714
|
)
|
|
1714
|
-
|
|
1715
|
+
# Unless global concurrency is set, use skip_locked to only select
|
|
1716
|
+
# rows that can be locked. If global concurrency is set, use no_wait
|
|
1717
|
+
# to ensure all processes have a consistent view of the table.
|
|
1718
|
+
.with_for_update(skip_locked=skip_locks, nowait=(not skip_locks))
|
|
1715
1719
|
)
|
|
1716
1720
|
if queue.priority_enabled:
|
|
1717
1721
|
query = query.order_by(
|
|
@@ -1720,9 +1724,7 @@ class SystemDatabase:
|
|
|
1720
1724
|
)
|
|
1721
1725
|
else:
|
|
1722
1726
|
query = query.order_by(SystemSchema.workflow_status.c.created_at.asc())
|
|
1723
|
-
|
|
1724
|
-
if max_tasks != float("inf"):
|
|
1725
|
-
query = query.limit(int(max_tasks))
|
|
1727
|
+
query = query.limit(int(max_tasks))
|
|
1726
1728
|
|
|
1727
1729
|
rows = c.execute(query).fetchall()
|
|
1728
1730
|
|
|
@@ -3,7 +3,8 @@ import socket
|
|
|
3
3
|
import threading
|
|
4
4
|
import time
|
|
5
5
|
import uuid
|
|
6
|
-
from datetime import datetime, timezone
|
|
6
|
+
from datetime import datetime, timedelta, timezone
|
|
7
|
+
from typing import Any, Dict
|
|
7
8
|
|
|
8
9
|
import pytest
|
|
9
10
|
import requests
|
|
@@ -462,13 +463,13 @@ def test_list_workflows(dbos: DBOS) -> None:
|
|
|
462
463
|
pass
|
|
463
464
|
|
|
464
465
|
@DBOS.workflow()
|
|
465
|
-
def test_workflow_2() ->
|
|
466
|
-
|
|
466
|
+
def test_workflow_2(my_time: datetime) -> str:
|
|
467
|
+
return DBOS.workflow_id + " completed at " + my_time.isoformat()
|
|
467
468
|
|
|
468
469
|
# Start workflows
|
|
469
470
|
handle_1 = DBOS.start_workflow(test_workflow_1)
|
|
470
471
|
time.sleep(2) # Sleep for 2 seconds between workflows
|
|
471
|
-
handle_2 = DBOS.start_workflow(test_workflow_2)
|
|
472
|
+
handle_2 = DBOS.start_workflow(test_workflow_2, datetime.now())
|
|
472
473
|
|
|
473
474
|
# Wait for workflows to complete
|
|
474
475
|
handle_1.get_result()
|
|
@@ -492,8 +493,8 @@ def test_list_workflows(dbos: DBOS) -> None:
|
|
|
492
493
|
).isoformat()
|
|
493
494
|
|
|
494
495
|
# Test POST /workflows with filters
|
|
495
|
-
filters = {
|
|
496
|
-
"
|
|
496
|
+
filters: Dict[str, Any] = {
|
|
497
|
+
"workflow_uuids": workflow_ids,
|
|
497
498
|
"start_time": start_time_filter,
|
|
498
499
|
}
|
|
499
500
|
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
|
@@ -501,7 +502,24 @@ def test_list_workflows(dbos: DBOS) -> None:
|
|
|
501
502
|
|
|
502
503
|
workflows = response.json()
|
|
503
504
|
assert len(workflows) == 1, f"Expected 1 workflows, but got {len(workflows)}"
|
|
504
|
-
|
|
505
|
+
|
|
506
|
+
# Make sure it contains all the expected fields
|
|
507
|
+
assert workflows[0]["WorkflowUUID"] == handle_2.workflow_id, "Workflow ID mismatch"
|
|
508
|
+
assert workflows[0]["WorkflowName"] == test_workflow_2.__qualname__
|
|
509
|
+
assert workflows[0]["Status"] == "SUCCESS"
|
|
510
|
+
assert workflows[0]["WorkflowClassName"] is None
|
|
511
|
+
assert workflows[0]["WorkflowConfigName"] is None
|
|
512
|
+
assert workflows[0]["AuthenticatedUser"] is None
|
|
513
|
+
assert workflows[0]["AssumedRole"] is None
|
|
514
|
+
assert workflows[0]["AuthenticatedRoles"] is None
|
|
515
|
+
assert workflows[0]["Input"] is not None and len(workflows[0]["Input"]) > 0
|
|
516
|
+
assert workflows[0]["Output"] is not None and len(workflows[0]["Output"]) > 0
|
|
517
|
+
assert workflows[0]["Error"] is None
|
|
518
|
+
assert workflows[0]["CreatedAt"] is not None and len(workflows[0]["CreatedAt"]) > 0
|
|
519
|
+
assert workflows[0]["UpdatedAt"] is not None and len(workflows[0]["UpdatedAt"]) > 0
|
|
520
|
+
assert workflows[0]["QueueName"] is None
|
|
521
|
+
assert workflows[0]["ApplicationVersion"] == GlobalParams.app_version
|
|
522
|
+
assert workflows[0]["ExecutorID"] == GlobalParams.executor_id
|
|
505
523
|
|
|
506
524
|
# Test POST /workflows without filters
|
|
507
525
|
response = requests.post("http://localhost:3001/workflows", json={}, timeout=5)
|
|
@@ -512,7 +530,106 @@ def test_list_workflows(dbos: DBOS) -> None:
|
|
|
512
530
|
workflows_list
|
|
513
531
|
), f"Expected {len(workflows_list)} workflows, but got {len(workflows)}"
|
|
514
532
|
for workflow in workflows:
|
|
515
|
-
assert workflow["
|
|
533
|
+
assert workflow["WorkflowUUID"] in workflow_ids, "Workflow ID mismatch"
|
|
534
|
+
|
|
535
|
+
# Verify sort_desc inverts the order
|
|
536
|
+
filters = {
|
|
537
|
+
"sort_desc": True,
|
|
538
|
+
}
|
|
539
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
|
540
|
+
assert response.status_code == 200
|
|
541
|
+
workflows = response.json()
|
|
542
|
+
assert len(workflows) == len(workflows_list)
|
|
543
|
+
assert (
|
|
544
|
+
workflows[0]["WorkflowUUID"] == handle_2.workflow_id
|
|
545
|
+
), "First workflow should be the last one started"
|
|
546
|
+
|
|
547
|
+
# Test all filters
|
|
548
|
+
filters = {
|
|
549
|
+
"workflow_uuids": ["not-a-valid-uuid"],
|
|
550
|
+
}
|
|
551
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
|
552
|
+
assert response.status_code == 200
|
|
553
|
+
workflows = response.json()
|
|
554
|
+
assert len(workflows) == 0, "Expected no workflows for invalid UUID"
|
|
555
|
+
|
|
556
|
+
filters = {
|
|
557
|
+
"workflow_uuids": [handle_1.workflow_id, handle_2.workflow_id],
|
|
558
|
+
}
|
|
559
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
|
560
|
+
assert response.status_code == 200
|
|
561
|
+
workflows = response.json()
|
|
562
|
+
assert len(workflows) == 2
|
|
563
|
+
|
|
564
|
+
filters = {
|
|
565
|
+
"authenticated_user": "no-user",
|
|
566
|
+
}
|
|
567
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
|
568
|
+
assert response.status_code == 200
|
|
569
|
+
workflows = response.json()
|
|
570
|
+
assert len(workflows) == 0
|
|
571
|
+
|
|
572
|
+
filters = {
|
|
573
|
+
"workflow_name": test_workflow_1.__qualname__,
|
|
574
|
+
}
|
|
575
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
|
576
|
+
assert response.status_code == 200
|
|
577
|
+
workflows = response.json()
|
|
578
|
+
assert len(workflows) == 1
|
|
579
|
+
assert workflows[0]["WorkflowUUID"] == handle_1.workflow_id
|
|
580
|
+
|
|
581
|
+
filters = {
|
|
582
|
+
"end_time": (datetime.now(timezone.utc) - timedelta(minutes=10)).isoformat()
|
|
583
|
+
}
|
|
584
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
|
585
|
+
assert response.status_code == 200
|
|
586
|
+
workflows = response.json()
|
|
587
|
+
assert len(workflows) == 0
|
|
588
|
+
|
|
589
|
+
filters = {
|
|
590
|
+
"start_time": (datetime.now(timezone.utc) + timedelta(minutes=10)).isoformat(),
|
|
591
|
+
}
|
|
592
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
|
593
|
+
assert response.status_code == 200
|
|
594
|
+
workflows = response.json()
|
|
595
|
+
assert len(workflows) == 0
|
|
596
|
+
|
|
597
|
+
filters = {
|
|
598
|
+
"status": ["SUCCESS", "CANCELLED"],
|
|
599
|
+
}
|
|
600
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
|
601
|
+
assert response.status_code == 200
|
|
602
|
+
workflows = response.json()
|
|
603
|
+
assert len(workflows) == 2
|
|
604
|
+
|
|
605
|
+
filters = {
|
|
606
|
+
"application_version": GlobalParams.app_version,
|
|
607
|
+
}
|
|
608
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
|
609
|
+
assert response.status_code == 200
|
|
610
|
+
workflows = response.json()
|
|
611
|
+
assert len(workflows) == 2
|
|
612
|
+
|
|
613
|
+
filters = {
|
|
614
|
+
"limit": 1,
|
|
615
|
+
"offset": 1,
|
|
616
|
+
}
|
|
617
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
|
618
|
+
assert response.status_code == 200
|
|
619
|
+
workflows = response.json()
|
|
620
|
+
assert len(workflows) == 1
|
|
621
|
+
assert workflows[0]["WorkflowUUID"] == handle_2.workflow_id
|
|
622
|
+
|
|
623
|
+
filters = {
|
|
624
|
+
"workflow_id_prefix": handle_1.workflow_id[
|
|
625
|
+
:10
|
|
626
|
+
], # First 10 characters of the workflow name
|
|
627
|
+
}
|
|
628
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
|
629
|
+
assert response.status_code == 200
|
|
630
|
+
workflows = response.json()
|
|
631
|
+
assert len(workflows) == 1
|
|
632
|
+
assert workflows[0]["WorkflowUUID"] == handle_1.workflow_id
|
|
516
633
|
|
|
517
634
|
|
|
518
635
|
def test_get_workflow_by_id(dbos: DBOS) -> None:
|
|
@@ -522,16 +639,16 @@ def test_get_workflow_by_id(dbos: DBOS) -> None:
|
|
|
522
639
|
pass
|
|
523
640
|
|
|
524
641
|
@DBOS.workflow()
|
|
525
|
-
def test_workflow_2() ->
|
|
526
|
-
|
|
642
|
+
def test_workflow_2(my_time: datetime) -> str:
|
|
643
|
+
return DBOS.workflow_id + " completed at " + my_time.isoformat()
|
|
527
644
|
|
|
528
645
|
# Start workflows
|
|
529
646
|
handle_1 = DBOS.start_workflow(test_workflow_1)
|
|
530
|
-
handle_2 = DBOS.start_workflow(test_workflow_2)
|
|
647
|
+
handle_2 = DBOS.start_workflow(test_workflow_2, datetime.now())
|
|
531
648
|
|
|
532
649
|
# Wait for workflows to complete
|
|
533
650
|
handle_1.get_result()
|
|
534
|
-
handle_2.get_result()
|
|
651
|
+
assert handle_2.get_result() is not None
|
|
535
652
|
|
|
536
653
|
# Get the workflow ID of the second workflow
|
|
537
654
|
workflow_id = handle_2.workflow_id
|
|
@@ -543,10 +660,28 @@ def test_get_workflow_by_id(dbos: DBOS) -> None:
|
|
|
543
660
|
), f"Expected status code 200, but got {response.status_code}"
|
|
544
661
|
|
|
545
662
|
workflow_data = response.json()
|
|
546
|
-
assert workflow_data["
|
|
663
|
+
assert workflow_data["WorkflowUUID"] == workflow_id, "Workflow ID mismatch"
|
|
547
664
|
assert (
|
|
548
|
-
workflow_data["
|
|
665
|
+
workflow_data["Status"] == "SUCCESS"
|
|
549
666
|
), "Expected workflow status to be SUCCESS"
|
|
667
|
+
assert workflow_data["WorkflowName"] == test_workflow_2.__qualname__
|
|
668
|
+
assert workflow_data["WorkflowClassName"] is None
|
|
669
|
+
assert workflow_data["WorkflowConfigName"] is None
|
|
670
|
+
assert workflow_data["AuthenticatedUser"] is None
|
|
671
|
+
assert workflow_data["AssumedRole"] is None
|
|
672
|
+
assert workflow_data["AuthenticatedRoles"] is None
|
|
673
|
+
assert workflow_data["Input"] is not None and len(workflow_data["Input"]) > 0
|
|
674
|
+
assert workflow_data["Output"] is not None and len(workflow_data["Output"]) > 0
|
|
675
|
+
assert workflow_data["Error"] is None
|
|
676
|
+
assert (
|
|
677
|
+
workflow_data["CreatedAt"] is not None and len(workflow_data["CreatedAt"]) > 0
|
|
678
|
+
)
|
|
679
|
+
assert (
|
|
680
|
+
workflow_data["UpdatedAt"] is not None and len(workflow_data["UpdatedAt"]) > 0
|
|
681
|
+
)
|
|
682
|
+
assert workflow_data["QueueName"] is None
|
|
683
|
+
assert workflow_data["ApplicationVersion"] == GlobalParams.app_version
|
|
684
|
+
assert workflow_data["ExecutorID"] == GlobalParams.executor_id
|
|
550
685
|
|
|
551
686
|
# Test GET /workflows/:workflow_id for a non-existing workflow
|
|
552
687
|
non_existing_workflow_id = "non-existing-id"
|
|
@@ -606,15 +741,15 @@ def test_queued_workflows_endpoint(dbos: DBOS) -> None:
|
|
|
606
741
|
test_queue2 = Queue("test-queue-2", concurrency=1)
|
|
607
742
|
|
|
608
743
|
@DBOS.workflow()
|
|
609
|
-
def blocking_workflow() -> str:
|
|
744
|
+
def blocking_workflow(i: int) -> str:
|
|
610
745
|
while True:
|
|
611
746
|
time.sleep(0.1)
|
|
612
747
|
|
|
613
748
|
# Enqueue some workflows to create queued entries
|
|
614
|
-
handles = []
|
|
615
|
-
handles.append(test_queue1.enqueue(blocking_workflow))
|
|
616
|
-
handles.append(test_queue1.enqueue(blocking_workflow))
|
|
617
|
-
handles.append(test_queue2.enqueue(blocking_workflow))
|
|
749
|
+
handles: list[WorkflowHandle[str]] = []
|
|
750
|
+
handles.append(test_queue1.enqueue(blocking_workflow, 1))
|
|
751
|
+
handles.append(test_queue1.enqueue(blocking_workflow, 2))
|
|
752
|
+
handles.append(test_queue2.enqueue(blocking_workflow, 3))
|
|
618
753
|
|
|
619
754
|
# Test basic queued workflows endpoint
|
|
620
755
|
response = requests.post("http://localhost:3001/queues", json={}, timeout=5)
|
|
@@ -628,16 +763,101 @@ def test_queued_workflows_endpoint(dbos: DBOS) -> None:
|
|
|
628
763
|
len(queued_workflows) == 3
|
|
629
764
|
), f"Expected 3 queued workflows, got {len(queued_workflows)}"
|
|
630
765
|
|
|
631
|
-
#
|
|
632
|
-
|
|
766
|
+
# Make sure it contains all the expected fields
|
|
767
|
+
assert queued_workflows[0]["WorkflowName"] == blocking_workflow.__qualname__
|
|
768
|
+
assert (
|
|
769
|
+
queued_workflows[0]["WorkflowUUID"] == handles[0].workflow_id
|
|
770
|
+
), "Workflow ID mismatch"
|
|
771
|
+
assert (
|
|
772
|
+
queued_workflows[0]["Status"] == "ENQUEUED"
|
|
773
|
+
or queued_workflows[0]["Status"] == "PENDING"
|
|
774
|
+
)
|
|
775
|
+
assert queued_workflows[0]["WorkflowClassName"] is None
|
|
776
|
+
assert queued_workflows[0]["WorkflowConfigName"] is None
|
|
777
|
+
assert queued_workflows[0]["AuthenticatedUser"] is None
|
|
778
|
+
assert queued_workflows[0]["AssumedRole"] is None
|
|
779
|
+
assert queued_workflows[0]["AuthenticatedRoles"] is None
|
|
780
|
+
assert (
|
|
781
|
+
queued_workflows[0]["Input"] is not None
|
|
782
|
+
and len(queued_workflows[0]["Input"]) > 0
|
|
783
|
+
)
|
|
784
|
+
assert "1" in queued_workflows[0]["Input"]
|
|
785
|
+
assert queued_workflows[0]["Output"] is None
|
|
786
|
+
assert queued_workflows[0]["Error"] is None
|
|
787
|
+
assert (
|
|
788
|
+
queued_workflows[0]["CreatedAt"] is not None
|
|
789
|
+
and len(queued_workflows[0]["CreatedAt"]) > 0
|
|
790
|
+
)
|
|
791
|
+
assert (
|
|
792
|
+
queued_workflows[0]["UpdatedAt"] is not None
|
|
793
|
+
and len(queued_workflows[0]["UpdatedAt"]) > 0
|
|
794
|
+
)
|
|
795
|
+
assert queued_workflows[0]["QueueName"] == test_queue1.name
|
|
796
|
+
assert queued_workflows[0]["ApplicationVersion"] == GlobalParams.app_version
|
|
797
|
+
assert queued_workflows[0]["ExecutorID"] == GlobalParams.executor_id
|
|
798
|
+
|
|
799
|
+
# Verify sort_desc inverts the order
|
|
800
|
+
filters: Dict[str, Any] = {
|
|
801
|
+
"sort_desc": True,
|
|
802
|
+
}
|
|
803
|
+
response = requests.post("http://localhost:3001/queues", json=filters, timeout=5)
|
|
804
|
+
assert response.status_code == 200
|
|
805
|
+
filtered_workflows = response.json()
|
|
806
|
+
assert len(filtered_workflows) == len(handles)
|
|
807
|
+
assert (
|
|
808
|
+
filtered_workflows[0]["WorkflowUUID"] == handles[2].workflow_id
|
|
809
|
+
), "First workflow should be the last one enqueued"
|
|
810
|
+
|
|
811
|
+
# Test all filters
|
|
812
|
+
filters = {
|
|
813
|
+
"workflow_name": blocking_workflow.__qualname__,
|
|
814
|
+
}
|
|
815
|
+
response = requests.post("http://localhost:3001/queues", json=filters, timeout=5)
|
|
816
|
+
assert response.status_code == 200
|
|
817
|
+
filtered_workflows = response.json()
|
|
818
|
+
assert len(filtered_workflows) == len(handles)
|
|
819
|
+
|
|
820
|
+
filters = {
|
|
821
|
+
"end_time": (datetime.now(timezone.utc) - timedelta(minutes=10)).isoformat(),
|
|
822
|
+
}
|
|
823
|
+
response = requests.post("http://localhost:3001/queues", json=filters, timeout=5)
|
|
824
|
+
assert response.status_code == 200
|
|
825
|
+
filtered_workflows = response.json()
|
|
826
|
+
assert len(filtered_workflows) == 0
|
|
827
|
+
|
|
828
|
+
filters = {
|
|
829
|
+
"start_time": (datetime.now(timezone.utc) + timedelta(minutes=10)).isoformat(),
|
|
830
|
+
}
|
|
831
|
+
response = requests.post("http://localhost:3001/queues", json=filters, timeout=5)
|
|
832
|
+
assert response.status_code == 200
|
|
833
|
+
filtered_workflows = response.json()
|
|
834
|
+
assert len(filtered_workflows) == 0
|
|
835
|
+
|
|
836
|
+
filters = {
|
|
837
|
+
"status": ["PENDING", "ENQUEUED"],
|
|
838
|
+
}
|
|
633
839
|
response = requests.post("http://localhost:3001/queues", json=filters, timeout=5)
|
|
634
840
|
assert response.status_code == 200
|
|
841
|
+
filtered_workflows = response.json()
|
|
842
|
+
assert len(filtered_workflows) == len(handles)
|
|
635
843
|
|
|
844
|
+
filters = {
|
|
845
|
+
"queue_name": test_queue1.name,
|
|
846
|
+
}
|
|
847
|
+
response = requests.post("http://localhost:3001/queues", json=filters, timeout=5)
|
|
848
|
+
assert response.status_code == 200
|
|
849
|
+
filtered_workflows = response.json()
|
|
850
|
+
assert len(filtered_workflows) == 2
|
|
851
|
+
|
|
852
|
+
filters = {"queue_name": test_queue1.name, "limit": 1, "offset": 1}
|
|
853
|
+
response = requests.post("http://localhost:3001/queues", json=filters, timeout=5)
|
|
854
|
+
assert response.status_code == 200
|
|
636
855
|
filtered_workflows = response.json()
|
|
637
856
|
assert isinstance(filtered_workflows, list), "Response should be a list"
|
|
638
857
|
assert (
|
|
639
858
|
len(filtered_workflows) == 1
|
|
640
859
|
), f"Expected 1 workflow, got {len(filtered_workflows)}"
|
|
860
|
+
assert filtered_workflows[0]["WorkflowUUID"] == handles[1].workflow_id
|
|
641
861
|
|
|
642
862
|
# Test with non-existent queue name
|
|
643
863
|
filters = {"queue_name": "non-existent-queue"}
|
|
@@ -1608,3 +1608,29 @@ def test_custom_names(dbos: DBOS) -> None:
|
|
|
1608
1608
|
handle = DBOS.start_workflow(workflow, value) # type: ignore
|
|
1609
1609
|
assert handle.get_status().name == another_workflow
|
|
1610
1610
|
assert handle.get_result() == value # type: ignore
|
|
1611
|
+
|
|
1612
|
+
|
|
1613
|
+
@pytest.mark.asyncio
|
|
1614
|
+
async def test_step_without_dbos(dbos: DBOS, config: DBOSConfig) -> None:
|
|
1615
|
+
DBOS.destroy(destroy_registry=True)
|
|
1616
|
+
|
|
1617
|
+
@DBOS.step()
|
|
1618
|
+
def step(x: int) -> int:
|
|
1619
|
+
return x
|
|
1620
|
+
|
|
1621
|
+
@DBOS.step()
|
|
1622
|
+
async def async_step(x: int) -> int:
|
|
1623
|
+
return x
|
|
1624
|
+
|
|
1625
|
+
assert step(5) == 5
|
|
1626
|
+
assert await async_step(5) == 5
|
|
1627
|
+
|
|
1628
|
+
DBOS(config=config)
|
|
1629
|
+
|
|
1630
|
+
assert step(5) == 5
|
|
1631
|
+
assert await async_step(5) == 5
|
|
1632
|
+
|
|
1633
|
+
DBOS.launch()
|
|
1634
|
+
|
|
1635
|
+
assert step(5) == 5
|
|
1636
|
+
assert await async_step(5) == 5
|
|
@@ -215,7 +215,7 @@ def test_limiter(dbos: DBOS) -> None:
|
|
|
215
215
|
return time.time()
|
|
216
216
|
|
|
217
217
|
limit = 5
|
|
218
|
-
period =
|
|
218
|
+
period = 1.8
|
|
219
219
|
queue = Queue("test_queue", limiter={"limit": limit, "period": period})
|
|
220
220
|
|
|
221
221
|
handles: list[WorkflowHandle[float]] = []
|
|
@@ -235,12 +235,12 @@ def test_limiter(dbos: DBOS) -> None:
|
|
|
235
235
|
# Verify that each "wave" of tasks started at the ~same time.
|
|
236
236
|
for wave in range(num_waves):
|
|
237
237
|
for i in range(wave * limit, (wave + 1) * limit - 1):
|
|
238
|
-
assert times[i + 1] - times[i] < 0.
|
|
238
|
+
assert times[i + 1] - times[i] < 0.5
|
|
239
239
|
|
|
240
240
|
# Verify that the gap between "waves" is ~equal to the period
|
|
241
241
|
for wave in range(num_waves - 1):
|
|
242
|
-
assert times[limit * (wave + 1)] - times[limit * wave] > period - 0.
|
|
243
|
-
assert times[limit * (wave + 1)] - times[limit * wave] < period + 0.
|
|
242
|
+
assert times[limit * (wave + 1)] - times[limit * wave] > period - 0.5
|
|
243
|
+
assert times[limit * (wave + 1)] - times[limit * wave] < period + 0.5
|
|
244
244
|
|
|
245
245
|
# Verify all workflows get the SUCCESS status eventually
|
|
246
246
|
for h in handles:
|
|
@@ -280,7 +280,7 @@ def test_multiple_queues(dbos: DBOS) -> None:
|
|
|
280
280
|
return time.time()
|
|
281
281
|
|
|
282
282
|
limit = 5
|
|
283
|
-
period =
|
|
283
|
+
period = 1.8
|
|
284
284
|
limiter_queue = Queue(
|
|
285
285
|
"test_limit_queue", limiter={"limit": limit, "period": period}
|
|
286
286
|
)
|
|
@@ -302,12 +302,12 @@ def test_multiple_queues(dbos: DBOS) -> None:
|
|
|
302
302
|
# Verify that each "wave" of tasks started at the ~same time.
|
|
303
303
|
for wave in range(num_waves):
|
|
304
304
|
for i in range(wave * limit, (wave + 1) * limit - 1):
|
|
305
|
-
assert times[i + 1] - times[i] < 0.
|
|
305
|
+
assert times[i + 1] - times[i] < 0.5
|
|
306
306
|
|
|
307
307
|
# Verify that the gap between "waves" is ~equal to the period
|
|
308
308
|
for wave in range(num_waves - 1):
|
|
309
|
-
assert times[limit * (wave + 1)] - times[limit * wave] > period - 0.
|
|
310
|
-
assert times[limit * (wave + 1)] - times[limit * wave] < period + 0.
|
|
309
|
+
assert times[limit * (wave + 1)] - times[limit * wave] > period - 0.5
|
|
310
|
+
assert times[limit * (wave + 1)] - times[limit * wave] < period + 0.5
|
|
311
311
|
|
|
312
312
|
# Verify all workflows get the SUCCESS status eventually
|
|
313
313
|
for h in handles:
|
|
@@ -101,8 +101,8 @@ def test_scheduled_workflow(dbos: DBOS) -> None:
|
|
|
101
101
|
nonlocal wf_counter
|
|
102
102
|
wf_counter += 1
|
|
103
103
|
|
|
104
|
-
time.sleep(
|
|
105
|
-
assert wf_counter > 2 and wf_counter <=
|
|
104
|
+
time.sleep(5)
|
|
105
|
+
assert wf_counter > 2 and wf_counter <= 5
|
|
106
106
|
|
|
107
107
|
|
|
108
108
|
def test_appdb_downtime(dbos: DBOS) -> None:
|
|
@@ -152,8 +152,8 @@ def test_scheduled_transaction(dbos: DBOS) -> None:
|
|
|
152
152
|
nonlocal txn_counter
|
|
153
153
|
txn_counter += 1
|
|
154
154
|
|
|
155
|
-
time.sleep(
|
|
156
|
-
assert txn_counter > 2 and txn_counter <=
|
|
155
|
+
time.sleep(5)
|
|
156
|
+
assert txn_counter > 2 and txn_counter <= 5
|
|
157
157
|
|
|
158
158
|
|
|
159
159
|
def test_scheduled_step(dbos: DBOS) -> None:
|
|
@@ -165,8 +165,8 @@ def test_scheduled_step(dbos: DBOS) -> None:
|
|
|
165
165
|
nonlocal step_counter
|
|
166
166
|
step_counter += 1
|
|
167
167
|
|
|
168
|
-
time.sleep(
|
|
169
|
-
assert step_counter > 2 and step_counter <=
|
|
168
|
+
time.sleep(5)
|
|
169
|
+
assert step_counter > 2 and step_counter <= 5
|
|
170
170
|
|
|
171
171
|
|
|
172
172
|
def test_scheduled_workflow_exception(dbos: DBOS) -> None:
|
|
@@ -179,8 +179,8 @@ def test_scheduled_workflow_exception(dbos: DBOS) -> None:
|
|
|
179
179
|
wf_counter += 1
|
|
180
180
|
raise Exception("error")
|
|
181
181
|
|
|
182
|
-
time.sleep(
|
|
183
|
-
assert wf_counter >= 1 and wf_counter <=
|
|
182
|
+
time.sleep(4)
|
|
183
|
+
assert wf_counter >= 1 and wf_counter <= 4
|
|
184
184
|
|
|
185
185
|
|
|
186
186
|
def test_scheduler_oaoo(dbos: DBOS) -> None:
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{dbos-1.6.0a4 → dbos-1.7.0}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|