dbos 1.5.0a3__tar.gz → 1.5.0a5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dbos-1.5.0a3 → dbos-1.5.0a5}/PKG-INFO +1 -1
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_admin_server.py +89 -4
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_conductor/conductor.py +37 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_conductor/protocol.py +18 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_workflow_commands.py +1 -2
- {dbos-1.5.0a3 → dbos-1.5.0a5}/pyproject.toml +1 -1
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_admin_server.py +150 -3
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_workflow_management.py +2 -1
- {dbos-1.5.0a3 → dbos-1.5.0a5}/LICENSE +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/README.md +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/__init__.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/__main__.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_app_db.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_classproperty.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_client.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_context.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_core.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_croniter.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_dbos.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_dbos_config.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_debug.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_docker_pg_helper.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_error.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_event_loop.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_fastapi.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_flask.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_kafka.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_kafka_message.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_logger.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_migrations/env.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_migrations/script.py.mako +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_migrations/versions/d994145b47b6_consolidate_inputs.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_outcome.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_queue.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_recovery.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_registrations.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_roles.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_scheduler.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_schemas/__init__.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_schemas/application_database.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_schemas/system_database.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_serialization.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_sys_db.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_templates/dbos-db-starter/README.md +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_tracer.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_utils.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/cli/_github_init.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/cli/_template_init.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/cli/cli.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/dbos-config.schema.json +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/py.typed +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/__init__.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/atexit_no_ctor.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/atexit_no_launch.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/classdefs.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/client_collateral.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/client_worker.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/conftest.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/dupname_classdefs1.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/dupname_classdefsa.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/more_classdefs.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/queuedworkflow.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_async.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_classdecorators.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_cli.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_client.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_concurrency.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_config.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_croniter.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_dbos.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_debug.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_docker_secrets.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_failures.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_fastapi.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_fastapi_roles.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_flask.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_kafka.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_outcome.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_package.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_queue.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_scheduler.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_schema_migration.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_singleton.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_spans.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_sqlalchemy.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/tests/test_workflow_introspection.py +0 -0
- {dbos-1.5.0a3 → dbos-1.5.0a5}/version/__init__.py +0 -0
@@ -5,7 +5,7 @@ import re
|
|
5
5
|
import threading
|
6
6
|
from functools import partial
|
7
7
|
from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
|
8
|
-
from typing import TYPE_CHECKING, Any, List, Optional, TypedDict
|
8
|
+
from typing import TYPE_CHECKING, Any, List, Optional, TypedDict, Dict
|
9
9
|
|
10
10
|
from dbos._workflow_commands import garbage_collect, global_timeout
|
11
11
|
|
@@ -24,6 +24,8 @@ _deactivate_path = "/deactivate"
|
|
24
24
|
_workflow_queues_metadata_path = "/dbos-workflow-queues-metadata"
|
25
25
|
_garbage_collect_path = "/dbos-garbage-collect"
|
26
26
|
_global_timeout_path = "/dbos-global-timeout"
|
27
|
+
_queued_workflows_path = "/queues"
|
28
|
+
_workflows_path = "/workflows"
|
27
29
|
# /workflows/:workflow_id/cancel
|
28
30
|
# /workflows/:workflow_id/resume
|
29
31
|
# /workflows/:workflow_id/restart
|
@@ -104,10 +106,24 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
104
106
|
steps_match = re.match(
|
105
107
|
r"^/workflows/(?P<workflow_id>[^/]+)/steps$", self.path
|
106
108
|
)
|
109
|
+
workflow_match = re.match(r"^/workflows/(?P<workflow_id>[^/]+)$", self.path)
|
107
110
|
|
108
111
|
if steps_match:
|
109
112
|
workflow_id = steps_match.group("workflow_id")
|
110
113
|
self._handle_steps(workflow_id)
|
114
|
+
elif workflow_match:
|
115
|
+
workflow_id = workflow_match.group("workflow_id")
|
116
|
+
workflows = self.dbos.list_workflows(workflow_ids=[workflow_id])
|
117
|
+
if not workflows:
|
118
|
+
self.send_response(404)
|
119
|
+
self._end_headers()
|
120
|
+
return
|
121
|
+
response_body = json.dumps(workflows[0].__dict__).encode("utf-8")
|
122
|
+
self.send_response(200)
|
123
|
+
self.send_header("Content-Type", "application/json")
|
124
|
+
self.send_header("Content-Length", str(len(response_body)))
|
125
|
+
self._end_headers()
|
126
|
+
self.wfile.write(response_body)
|
111
127
|
else:
|
112
128
|
self.send_response(404)
|
113
129
|
self._end_headers()
|
@@ -126,6 +142,32 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
126
142
|
self.send_response(200)
|
127
143
|
self._end_headers()
|
128
144
|
self.wfile.write(json.dumps(workflow_ids).encode("utf-8"))
|
145
|
+
elif self.path == _workflows_path:
|
146
|
+
try:
|
147
|
+
filters = json.loads(post_data.decode("utf-8")) if post_data else {}
|
148
|
+
self._handle_workflows(filters)
|
149
|
+
except (json.JSONDecodeError, AttributeError) as e:
|
150
|
+
self.send_response(400)
|
151
|
+
self.send_header("Content-Type", "application/json")
|
152
|
+
self.end_headers()
|
153
|
+
self.wfile.write(
|
154
|
+
json.dumps({"error": f"Invalid JSON input: {str(e)}"}).encode(
|
155
|
+
"utf-8"
|
156
|
+
)
|
157
|
+
)
|
158
|
+
elif self.path == _queued_workflows_path:
|
159
|
+
try:
|
160
|
+
filters = json.loads(post_data.decode("utf-8")) if post_data else {}
|
161
|
+
self._handle_queued_workflows(filters)
|
162
|
+
except (json.JSONDecodeError, AttributeError) as e:
|
163
|
+
self.send_response(400)
|
164
|
+
self.send_header("Content-Type", "application/json")
|
165
|
+
self.end_headers()
|
166
|
+
self.wfile.write(
|
167
|
+
json.dumps({"error": f"Invalid JSON input: {str(e)}"}).encode(
|
168
|
+
"utf-8"
|
169
|
+
)
|
170
|
+
)
|
129
171
|
elif self.path == _garbage_collect_path:
|
130
172
|
inputs = json.loads(post_data.decode("utf-8"))
|
131
173
|
cutoff_epoch_timestamp_ms = inputs.get("cutoff_epoch_timestamp_ms", None)
|
@@ -139,12 +181,11 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
139
181
|
self._end_headers()
|
140
182
|
elif self.path == _global_timeout_path:
|
141
183
|
inputs = json.loads(post_data.decode("utf-8"))
|
142
|
-
|
143
|
-
global_timeout(self.dbos,
|
184
|
+
cutoff_epoch_timestamp_ms = inputs.get("cutoff_epoch_timestamp_ms", None)
|
185
|
+
global_timeout(self.dbos, cutoff_epoch_timestamp_ms)
|
144
186
|
self.send_response(204)
|
145
187
|
self._end_headers()
|
146
188
|
else:
|
147
|
-
|
148
189
|
restart_match = re.match(
|
149
190
|
r"^/workflows/(?P<workflow_id>[^/]+)/restart$", self.path
|
150
191
|
)
|
@@ -283,6 +324,50 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
283
324
|
self._end_headers()
|
284
325
|
self.wfile.write(json_steps)
|
285
326
|
|
327
|
+
def _handle_workflows(self, filters: Dict[str, Any]) -> None:
|
328
|
+
workflows = self.dbos.list_workflows(
|
329
|
+
workflow_ids=filters.get("workflow_ids"),
|
330
|
+
name=filters.get("name"),
|
331
|
+
start_time=filters.get("start_time"),
|
332
|
+
end_time=filters.get("end_time"),
|
333
|
+
status=filters.get("status"),
|
334
|
+
app_version=filters.get("application_version"),
|
335
|
+
limit=filters.get("limit"),
|
336
|
+
offset=filters.get("offset"),
|
337
|
+
sort_desc=filters.get("sort_desc", False),
|
338
|
+
workflow_id_prefix=filters.get("workflow_id_prefix"),
|
339
|
+
)
|
340
|
+
|
341
|
+
response_body = json.dumps(
|
342
|
+
[workflow.__dict__ for workflow in workflows]
|
343
|
+
).encode("utf-8")
|
344
|
+
self.send_response(200)
|
345
|
+
self.send_header("Content-Type", "application/json")
|
346
|
+
self.send_header("Content-Length", str(len(response_body)))
|
347
|
+
self._end_headers()
|
348
|
+
self.wfile.write(response_body)
|
349
|
+
|
350
|
+
def _handle_queued_workflows(self, filters: Dict[str, Any]) -> None:
|
351
|
+
workflows = self.dbos.list_queued_workflows(
|
352
|
+
queue_name=filters.get("queue_name"),
|
353
|
+
name=filters.get("name"),
|
354
|
+
start_time=filters.get("start_time"),
|
355
|
+
end_time=filters.get("end_time"),
|
356
|
+
status=filters.get("status"),
|
357
|
+
limit=filters.get("limit"),
|
358
|
+
offset=filters.get("offset"),
|
359
|
+
sort_desc=filters.get("sort_desc", False),
|
360
|
+
)
|
361
|
+
|
362
|
+
response_body = json.dumps(
|
363
|
+
[workflow.__dict__ for workflow in workflows]
|
364
|
+
).encode("utf-8")
|
365
|
+
self.send_response(200)
|
366
|
+
self.send_header("Content-Type", "application/json")
|
367
|
+
self.send_header("Content-Length", str(len(response_body)))
|
368
|
+
self._end_headers()
|
369
|
+
self.wfile.write(response_body)
|
370
|
+
|
286
371
|
|
287
372
|
# Be consistent with DBOS-TS response.
|
288
373
|
class PerfUtilization(TypedDict):
|
@@ -13,7 +13,9 @@ from websockets.sync.connection import Connection
|
|
13
13
|
from dbos._context import SetWorkflowID
|
14
14
|
from dbos._utils import GlobalParams
|
15
15
|
from dbos._workflow_commands import (
|
16
|
+
garbage_collect,
|
16
17
|
get_workflow,
|
18
|
+
global_timeout,
|
17
19
|
list_queued_workflows,
|
18
20
|
list_workflow_steps,
|
19
21
|
list_workflows,
|
@@ -356,6 +358,41 @@ class ConductorWebsocket(threading.Thread):
|
|
356
358
|
error_message=error_message,
|
357
359
|
)
|
358
360
|
websocket.send(list_steps_response.to_json())
|
361
|
+
elif msg_type == p.MessageType.RETENTION:
|
362
|
+
retention_message = p.RetentionRequest.from_json(message)
|
363
|
+
success = True
|
364
|
+
try:
|
365
|
+
garbage_collect(
|
366
|
+
self.dbos,
|
367
|
+
cutoff_epoch_timestamp_ms=retention_message.body[
|
368
|
+
"gc_cutoff_epoch_ms"
|
369
|
+
],
|
370
|
+
rows_threshold=retention_message.body[
|
371
|
+
"gc_rows_threshold"
|
372
|
+
],
|
373
|
+
)
|
374
|
+
if (
|
375
|
+
retention_message.body["timeout_cutoff_epoch_ms"]
|
376
|
+
is not None
|
377
|
+
):
|
378
|
+
global_timeout(
|
379
|
+
self.dbos,
|
380
|
+
retention_message.body[
|
381
|
+
"timeout_cutoff_epoch_ms"
|
382
|
+
],
|
383
|
+
)
|
384
|
+
except Exception as e:
|
385
|
+
error_message = f"Exception encountered during enforcing retention policy: {traceback.format_exc()}"
|
386
|
+
self.dbos.logger.error(error_message)
|
387
|
+
success = False
|
388
|
+
|
389
|
+
retention_response = p.RetentionResponse(
|
390
|
+
type=p.MessageType.RETENTION,
|
391
|
+
request_id=base_message.request_id,
|
392
|
+
success=success,
|
393
|
+
error_message=error_message,
|
394
|
+
)
|
395
|
+
websocket.send(retention_response.to_json())
|
359
396
|
else:
|
360
397
|
self.dbos.logger.warning(
|
361
398
|
f"Unexpected message type: {msg_type}"
|
@@ -18,6 +18,7 @@ class MessageType(str, Enum):
|
|
18
18
|
EXIST_PENDING_WORKFLOWS = "exist_pending_workflows"
|
19
19
|
LIST_STEPS = "list_steps"
|
20
20
|
FORK_WORKFLOW = "fork_workflow"
|
21
|
+
RETENTION = "retention"
|
21
22
|
|
22
23
|
|
23
24
|
T = TypeVar("T", bound="BaseMessage")
|
@@ -280,3 +281,20 @@ class ForkWorkflowRequest(BaseMessage):
|
|
280
281
|
class ForkWorkflowResponse(BaseMessage):
|
281
282
|
new_workflow_id: Optional[str]
|
282
283
|
error_message: Optional[str] = None
|
284
|
+
|
285
|
+
|
286
|
+
class RetentionBody(TypedDict):
|
287
|
+
gc_cutoff_epoch_ms: Optional[int]
|
288
|
+
gc_rows_threshold: Optional[int]
|
289
|
+
timeout_cutoff_epoch_ms: Optional[int]
|
290
|
+
|
291
|
+
|
292
|
+
@dataclass
|
293
|
+
class RetentionRequest(BaseMessage):
|
294
|
+
body: RetentionBody
|
295
|
+
|
296
|
+
|
297
|
+
@dataclass
|
298
|
+
class RetentionResponse(BaseMessage):
|
299
|
+
success: bool
|
300
|
+
error_message: Optional[str] = None
|
@@ -141,8 +141,7 @@ def garbage_collect(
|
|
141
141
|
dbos._app_db.garbage_collect(cutoff_epoch_timestamp_ms, pending_workflow_ids)
|
142
142
|
|
143
143
|
|
144
|
-
def global_timeout(dbos: "DBOS",
|
145
|
-
cutoff_epoch_timestamp_ms = int(time.time() * 1000) - timeout_ms
|
144
|
+
def global_timeout(dbos: "DBOS", cutoff_epoch_timestamp_ms: int) -> None:
|
146
145
|
cutoff_iso = datetime.fromtimestamp(cutoff_epoch_timestamp_ms / 1000).isoformat()
|
147
146
|
for workflow in dbos.list_workflows(
|
148
147
|
status=WorkflowStatusString.PENDING.value, end_time=cutoff_iso
|
@@ -3,7 +3,7 @@ import socket
|
|
3
3
|
import threading
|
4
4
|
import time
|
5
5
|
import uuid
|
6
|
-
from datetime import datetime
|
6
|
+
from datetime import datetime, timezone
|
7
7
|
|
8
8
|
import pytest
|
9
9
|
import requests
|
@@ -455,6 +455,108 @@ def test_admin_workflow_fork(dbos: DBOS, sys_db: SystemDatabase) -> None:
|
|
455
455
|
assert worked, "Workflow did not finish successfully"
|
456
456
|
|
457
457
|
|
458
|
+
def test_list_workflows(dbos: DBOS) -> None:
|
459
|
+
# Create workflows for testing
|
460
|
+
@DBOS.workflow()
|
461
|
+
def test_workflow_1() -> None:
|
462
|
+
pass
|
463
|
+
|
464
|
+
@DBOS.workflow()
|
465
|
+
def test_workflow_2() -> None:
|
466
|
+
pass
|
467
|
+
|
468
|
+
# Start workflows
|
469
|
+
handle_1 = DBOS.start_workflow(test_workflow_1)
|
470
|
+
time.sleep(2) # Sleep for 2 seconds between workflows
|
471
|
+
handle_2 = DBOS.start_workflow(test_workflow_2)
|
472
|
+
|
473
|
+
# Wait for workflows to complete
|
474
|
+
handle_1.get_result()
|
475
|
+
handle_2.get_result()
|
476
|
+
|
477
|
+
# List workflows and dynamically set the filter "name"
|
478
|
+
workflows_list = DBOS.list_workflows()
|
479
|
+
assert (
|
480
|
+
len(workflows_list) >= 2
|
481
|
+
), f"Expected at least 2 workflows, but got {len(workflows_list)}"
|
482
|
+
|
483
|
+
workflow_ids = [w.workflow_id for w in workflows_list]
|
484
|
+
|
485
|
+
# Convert created_at to ISO 8601 format
|
486
|
+
created_at_second_workflow = workflows_list[1].created_at
|
487
|
+
assert (
|
488
|
+
created_at_second_workflow is not None
|
489
|
+
), "created_at for the second workflow is None"
|
490
|
+
start_time_filter = datetime.fromtimestamp(
|
491
|
+
created_at_second_workflow / 1000, tz=timezone.utc
|
492
|
+
).isoformat()
|
493
|
+
|
494
|
+
# Test POST /workflows with filters
|
495
|
+
filters = {
|
496
|
+
"workflow_ids": workflow_ids,
|
497
|
+
"start_time": start_time_filter,
|
498
|
+
}
|
499
|
+
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
500
|
+
assert response.status_code == 200
|
501
|
+
|
502
|
+
workflows = response.json()
|
503
|
+
assert len(workflows) == 1, f"Expected 1 workflows, but got {len(workflows)}"
|
504
|
+
assert workflows[0]["workflow_id"] == handle_2.workflow_id, "Workflow ID mismatch"
|
505
|
+
|
506
|
+
# Test POST /workflows without filters
|
507
|
+
response = requests.post("http://localhost:3001/workflows", json={}, timeout=5)
|
508
|
+
assert response.status_code == 200
|
509
|
+
|
510
|
+
workflows = response.json()
|
511
|
+
assert len(workflows) == len(
|
512
|
+
workflows_list
|
513
|
+
), f"Expected {len(workflows_list)} workflows, but got {len(workflows)}"
|
514
|
+
for workflow in workflows:
|
515
|
+
assert workflow["workflow_id"] in workflow_ids, "Workflow ID mismatch"
|
516
|
+
|
517
|
+
|
518
|
+
def test_get_workflow_by_id(dbos: DBOS) -> None:
|
519
|
+
# Create workflows for testing
|
520
|
+
@DBOS.workflow()
|
521
|
+
def test_workflow_1() -> None:
|
522
|
+
pass
|
523
|
+
|
524
|
+
@DBOS.workflow()
|
525
|
+
def test_workflow_2() -> None:
|
526
|
+
pass
|
527
|
+
|
528
|
+
# Start workflows
|
529
|
+
handle_1 = DBOS.start_workflow(test_workflow_1)
|
530
|
+
handle_2 = DBOS.start_workflow(test_workflow_2)
|
531
|
+
|
532
|
+
# Wait for workflows to complete
|
533
|
+
handle_1.get_result()
|
534
|
+
handle_2.get_result()
|
535
|
+
|
536
|
+
# Get the workflow ID of the second workflow
|
537
|
+
workflow_id = handle_2.workflow_id
|
538
|
+
|
539
|
+
# Test GET /workflows/:workflow_id for an existing workflow
|
540
|
+
response = requests.get(f"http://localhost:3001/workflows/{workflow_id}", timeout=5)
|
541
|
+
assert (
|
542
|
+
response.status_code == 200
|
543
|
+
), f"Expected status code 200, but got {response.status_code}"
|
544
|
+
|
545
|
+
workflow_data = response.json()
|
546
|
+
assert workflow_data["workflow_id"] == workflow_id, "Workflow ID mismatch"
|
547
|
+
assert (
|
548
|
+
workflow_data["status"] == "SUCCESS"
|
549
|
+
), "Expected workflow status to be SUCCESS"
|
550
|
+
|
551
|
+
# Test GET /workflows/:workflow_id for a non-existing workflow
|
552
|
+
non_existing_workflow_id = "non-existing-id"
|
553
|
+
response = requests.get(
|
554
|
+
f"http://localhost:3001/workflows/{non_existing_workflow_id}", timeout=5
|
555
|
+
)
|
556
|
+
assert (
|
557
|
+
response.status_code == 404
|
558
|
+
), f"Expected status code 404, but got {response.status_code}"
|
559
|
+
|
458
560
|
def test_admin_garbage_collect(dbos: DBOS) -> None:
|
459
561
|
|
460
562
|
@DBOS.workflow()
|
@@ -484,12 +586,57 @@ def test_admin_global_timeout(dbos: DBOS) -> None:
|
|
484
586
|
|
485
587
|
handle = DBOS.start_workflow(workflow)
|
486
588
|
time.sleep(1)
|
487
|
-
|
589
|
+
cutoff_epoch_timestamp_ms = int(time.time() * 1000) - 1000
|
488
590
|
response = requests.post(
|
489
591
|
f"http://localhost:3001/dbos-global-timeout",
|
490
|
-
json={"
|
592
|
+
json={"cutoff_epoch_timestamp_ms": cutoff_epoch_timestamp_ms},
|
491
593
|
timeout=5,
|
492
594
|
)
|
493
595
|
response.raise_for_status()
|
494
596
|
with pytest.raises(DBOSWorkflowCancelledError):
|
495
597
|
handle.get_result()
|
598
|
+
|
599
|
+
|
600
|
+
def test_queued_workflows_endpoint(dbos: DBOS) -> None:
|
601
|
+
"""Test the /queues endpoint with various filters and scenarios."""
|
602
|
+
|
603
|
+
# Set up a queue for testing
|
604
|
+
test_queue1 = Queue("test-queue-1", concurrency=1)
|
605
|
+
test_queue2 = Queue("test-queue-2", concurrency=1)
|
606
|
+
|
607
|
+
@DBOS.workflow()
|
608
|
+
def blocking_workflow() -> str:
|
609
|
+
while True:
|
610
|
+
time.sleep(0.1)
|
611
|
+
|
612
|
+
# Enqueue some workflows to create queued entries
|
613
|
+
handles = []
|
614
|
+
handles.append(test_queue1.enqueue(blocking_workflow))
|
615
|
+
handles.append(test_queue1.enqueue(blocking_workflow))
|
616
|
+
handles.append(test_queue2.enqueue(blocking_workflow))
|
617
|
+
|
618
|
+
# Test basic queued workflows endpoint
|
619
|
+
response = requests.post("http://localhost:3001/queues", json={}, timeout=5)
|
620
|
+
assert response.status_code == 200, f"Expected status 200, got {response.status_code}"
|
621
|
+
|
622
|
+
queued_workflows = response.json()
|
623
|
+
assert isinstance(queued_workflows, list), "Response should be a list"
|
624
|
+
assert len(queued_workflows) == 3, f"Expected 3 queued workflows, got {len(queued_workflows)}"
|
625
|
+
|
626
|
+
# Test with filters
|
627
|
+
filters = {"queue_name": "test-queue-1", "limit": 1}
|
628
|
+
response = requests.post("http://localhost:3001/queues", json=filters, timeout=5)
|
629
|
+
assert response.status_code == 200
|
630
|
+
|
631
|
+
filtered_workflows = response.json()
|
632
|
+
assert isinstance(filtered_workflows, list), "Response should be a list"
|
633
|
+
assert len(filtered_workflows) == 1, f"Expected 1 workflow, got {len(filtered_workflows)}"
|
634
|
+
|
635
|
+
# Test with non-existent queue name
|
636
|
+
filters = {"queue_name": "non-existent-queue"}
|
637
|
+
response = requests.post("http://localhost:3001/queues", json=filters, timeout=5)
|
638
|
+
assert response.status_code == 200
|
639
|
+
|
640
|
+
empty_result = response.json()
|
641
|
+
assert isinstance(empty_result, list), "Response should be a list even for non-existent queue"
|
642
|
+
assert len(empty_result) == 0, "Expected no workflows for non-existent queue"
|
@@ -737,7 +737,8 @@ def test_global_timeout(dbos: DBOS) -> None:
|
|
737
737
|
# Wait one second, start one final workflow, then timeout all workflows started more than one second ago
|
738
738
|
time.sleep(1)
|
739
739
|
final_handle = DBOS.start_workflow(blocked_workflow)
|
740
|
-
|
740
|
+
cutoff_epoch_timestamp_ms = int(time.time() * 1000) - 1000
|
741
|
+
global_timeout(dbos, cutoff_epoch_timestamp_ms)
|
741
742
|
|
742
743
|
# Verify all workflows started before the global timeout are cancelled
|
743
744
|
for handle in handles:
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{dbos-1.5.0a3 → dbos-1.5.0a5}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|