aethergraph 0.1.0a1__py3-none-any.whl → 0.1.0a3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aethergraph/__init__.py +4 -10
- aethergraph/__main__.py +296 -0
- aethergraph/api/v1/__init__.py +0 -0
- aethergraph/api/v1/agents.py +46 -0
- aethergraph/api/v1/apps.py +70 -0
- aethergraph/api/v1/artifacts.py +415 -0
- aethergraph/api/v1/channels.py +89 -0
- aethergraph/api/v1/deps.py +168 -0
- aethergraph/api/v1/graphs.py +259 -0
- aethergraph/api/v1/identity.py +25 -0
- aethergraph/api/v1/memory.py +353 -0
- aethergraph/api/v1/misc.py +47 -0
- aethergraph/api/v1/pagination.py +29 -0
- aethergraph/api/v1/runs.py +568 -0
- aethergraph/api/v1/schemas.py +535 -0
- aethergraph/api/v1/session.py +323 -0
- aethergraph/api/v1/stats.py +201 -0
- aethergraph/api/v1/viz.py +152 -0
- aethergraph/config/config.py +22 -0
- aethergraph/config/loader.py +3 -2
- aethergraph/config/storage.py +209 -0
- aethergraph/contracts/__init__.py +0 -0
- aethergraph/contracts/services/__init__.py +0 -0
- aethergraph/contracts/services/artifacts.py +27 -14
- aethergraph/contracts/services/memory.py +45 -17
- aethergraph/contracts/services/metering.py +129 -0
- aethergraph/contracts/services/runs.py +50 -0
- aethergraph/contracts/services/sessions.py +87 -0
- aethergraph/contracts/services/state_stores.py +3 -0
- aethergraph/contracts/services/viz.py +44 -0
- aethergraph/contracts/storage/artifact_index.py +88 -0
- aethergraph/contracts/storage/artifact_store.py +99 -0
- aethergraph/contracts/storage/async_kv.py +34 -0
- aethergraph/contracts/storage/blob_store.py +50 -0
- aethergraph/contracts/storage/doc_store.py +35 -0
- aethergraph/contracts/storage/event_log.py +31 -0
- aethergraph/contracts/storage/vector_index.py +48 -0
- aethergraph/core/__init__.py +0 -0
- aethergraph/core/execution/forward_scheduler.py +13 -2
- aethergraph/core/execution/global_scheduler.py +21 -15
- aethergraph/core/execution/step_forward.py +10 -1
- aethergraph/core/graph/__init__.py +0 -0
- aethergraph/core/graph/graph_builder.py +8 -4
- aethergraph/core/graph/graph_fn.py +156 -15
- aethergraph/core/graph/graph_spec.py +8 -0
- aethergraph/core/graph/graphify.py +146 -27
- aethergraph/core/graph/node_spec.py +0 -2
- aethergraph/core/graph/node_state.py +3 -0
- aethergraph/core/graph/task_graph.py +39 -1
- aethergraph/core/runtime/__init__.py +0 -0
- aethergraph/core/runtime/ad_hoc_context.py +64 -4
- aethergraph/core/runtime/base_service.py +28 -4
- aethergraph/core/runtime/execution_context.py +13 -15
- aethergraph/core/runtime/graph_runner.py +222 -37
- aethergraph/core/runtime/node_context.py +510 -6
- aethergraph/core/runtime/node_services.py +12 -5
- aethergraph/core/runtime/recovery.py +15 -1
- aethergraph/core/runtime/run_manager.py +783 -0
- aethergraph/core/runtime/run_manager_local.py +204 -0
- aethergraph/core/runtime/run_registration.py +2 -2
- aethergraph/core/runtime/run_types.py +89 -0
- aethergraph/core/runtime/runtime_env.py +136 -7
- aethergraph/core/runtime/runtime_metering.py +71 -0
- aethergraph/core/runtime/runtime_registry.py +36 -13
- aethergraph/core/runtime/runtime_services.py +194 -6
- aethergraph/core/tools/builtins/toolset.py +1 -1
- aethergraph/core/tools/toolkit.py +5 -0
- aethergraph/plugins/agents/default_chat_agent copy.py +90 -0
- aethergraph/plugins/agents/default_chat_agent.py +171 -0
- aethergraph/plugins/agents/shared.py +81 -0
- aethergraph/plugins/channel/adapters/webui.py +112 -112
- aethergraph/plugins/channel/routes/webui_routes.py +367 -102
- aethergraph/plugins/channel/utils/slack_utils.py +115 -59
- aethergraph/plugins/channel/utils/telegram_utils.py +88 -47
- aethergraph/plugins/channel/websockets/weibui_ws.py +172 -0
- aethergraph/runtime/__init__.py +15 -0
- aethergraph/server/app_factory.py +196 -34
- aethergraph/server/clients/channel_client.py +202 -0
- aethergraph/server/http/channel_http_routes.py +116 -0
- aethergraph/server/http/channel_ws_routers.py +45 -0
- aethergraph/server/loading.py +117 -0
- aethergraph/server/server.py +131 -0
- aethergraph/server/server_state.py +240 -0
- aethergraph/server/start.py +227 -66
- aethergraph/server/ui_static/assets/KaTeX_AMS-Regular-BQhdFMY1.woff2 +0 -0
- aethergraph/server/ui_static/assets/KaTeX_AMS-Regular-DMm9YOAa.woff +0 -0
- aethergraph/server/ui_static/assets/KaTeX_AMS-Regular-DRggAlZN.ttf +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Caligraphic-Bold-ATXxdsX0.ttf +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Caligraphic-Bold-BEiXGLvX.woff +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Caligraphic-Bold-Dq_IR9rO.woff2 +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Caligraphic-Regular-CTRA-rTL.woff +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Caligraphic-Regular-Di6jR-x-.woff2 +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Caligraphic-Regular-wX97UBjC.ttf +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Fraktur-Bold-BdnERNNW.ttf +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Fraktur-Bold-BsDP51OF.woff +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Fraktur-Bold-CL6g_b3V.woff2 +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Fraktur-Regular-CB_wures.ttf +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Fraktur-Regular-CTYiF6lA.woff2 +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Fraktur-Regular-Dxdc4cR9.woff +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Main-Bold-Cx986IdX.woff2 +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Main-Bold-Jm3AIy58.woff +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Main-Bold-waoOVXN0.ttf +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Main-BoldItalic-DxDJ3AOS.woff2 +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Main-BoldItalic-DzxPMmG6.ttf +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Main-BoldItalic-SpSLRI95.woff +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Main-Italic-3WenGoN9.ttf +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Main-Italic-BMLOBm91.woff +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Main-Italic-NWA7e6Wa.woff2 +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Main-Regular-B22Nviop.woff2 +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Main-Regular-Dr94JaBh.woff +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Main-Regular-ypZvNtVU.ttf +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Math-BoldItalic-B3XSjfu4.ttf +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Math-BoldItalic-CZnvNsCZ.woff2 +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Math-BoldItalic-iY-2wyZ7.woff +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Math-Italic-DA0__PXp.woff +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Math-Italic-flOr_0UB.ttf +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Math-Italic-t53AETM-.woff2 +0 -0
- aethergraph/server/ui_static/assets/KaTeX_SansSerif-Bold-CFMepnvq.ttf +0 -0
- aethergraph/server/ui_static/assets/KaTeX_SansSerif-Bold-D1sUS0GD.woff2 +0 -0
- aethergraph/server/ui_static/assets/KaTeX_SansSerif-Bold-DbIhKOiC.woff +0 -0
- aethergraph/server/ui_static/assets/KaTeX_SansSerif-Italic-C3H0VqGB.woff2 +0 -0
- aethergraph/server/ui_static/assets/KaTeX_SansSerif-Italic-DN2j7dab.woff +0 -0
- aethergraph/server/ui_static/assets/KaTeX_SansSerif-Italic-YYjJ1zSn.ttf +0 -0
- aethergraph/server/ui_static/assets/KaTeX_SansSerif-Regular-BNo7hRIc.ttf +0 -0
- aethergraph/server/ui_static/assets/KaTeX_SansSerif-Regular-CS6fqUqJ.woff +0 -0
- aethergraph/server/ui_static/assets/KaTeX_SansSerif-Regular-DDBCnlJ7.woff2 +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Script-Regular-C5JkGWo-.ttf +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Script-Regular-D3wIWfF6.woff2 +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Script-Regular-D5yQViql.woff +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Size1-Regular-C195tn64.woff +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Size1-Regular-Dbsnue_I.ttf +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Size1-Regular-mCD8mA8B.woff2 +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Size2-Regular-B7gKUWhC.ttf +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Size2-Regular-Dy4dx90m.woff2 +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Size2-Regular-oD1tc_U0.woff +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Size3-Regular-CTq5MqoE.woff +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Size3-Regular-DgpXs0kz.ttf +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Size4-Regular-BF-4gkZK.woff +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Size4-Regular-DWFBv043.ttf +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Size4-Regular-Dl5lxZxV.woff2 +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Typewriter-Regular-C0xS9mPB.woff +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Typewriter-Regular-CO6r4hn1.woff2 +0 -0
- aethergraph/server/ui_static/assets/KaTeX_Typewriter-Regular-D3Ib7_Hf.ttf +0 -0
- aethergraph/server/ui_static/assets/index-BR5GtXcZ.css +1 -0
- aethergraph/server/ui_static/assets/index-CQ0HZZ83.js +400 -0
- aethergraph/server/ui_static/index.html +15 -0
- aethergraph/server/ui_static/logo.png +0 -0
- aethergraph/services/artifacts/__init__.py +0 -0
- aethergraph/services/artifacts/facade.py +1239 -132
- aethergraph/services/auth/{dev.py → authn.py} +0 -8
- aethergraph/services/auth/authz.py +100 -0
- aethergraph/services/channel/__init__.py +0 -0
- aethergraph/services/channel/channel_bus.py +19 -1
- aethergraph/services/channel/factory.py +13 -1
- aethergraph/services/channel/ingress.py +311 -0
- aethergraph/services/channel/queue_adapter.py +75 -0
- aethergraph/services/channel/session.py +502 -19
- aethergraph/services/container/default_container.py +122 -43
- aethergraph/services/continuations/continuation.py +6 -0
- aethergraph/services/continuations/stores/fs_store.py +19 -0
- aethergraph/services/eventhub/event_hub.py +76 -0
- aethergraph/services/kv/__init__.py +0 -0
- aethergraph/services/kv/ephemeral.py +244 -0
- aethergraph/services/llm/__init__.py +0 -0
- aethergraph/services/llm/generic_client copy.py +691 -0
- aethergraph/services/llm/generic_client.py +1288 -187
- aethergraph/services/llm/providers.py +3 -1
- aethergraph/services/llm/types.py +47 -0
- aethergraph/services/llm/utils.py +284 -0
- aethergraph/services/logger/std.py +3 -0
- aethergraph/services/mcp/__init__.py +9 -0
- aethergraph/services/mcp/http_client.py +38 -0
- aethergraph/services/mcp/service.py +225 -1
- aethergraph/services/mcp/stdio_client.py +41 -6
- aethergraph/services/mcp/ws_client.py +44 -2
- aethergraph/services/memory/__init__.py +0 -0
- aethergraph/services/memory/distillers/llm_long_term.py +234 -0
- aethergraph/services/memory/distillers/llm_meta_summary.py +398 -0
- aethergraph/services/memory/distillers/long_term.py +225 -0
- aethergraph/services/memory/facade/__init__.py +3 -0
- aethergraph/services/memory/facade/chat.py +440 -0
- aethergraph/services/memory/facade/core.py +447 -0
- aethergraph/services/memory/facade/distillation.py +424 -0
- aethergraph/services/memory/facade/rag.py +410 -0
- aethergraph/services/memory/facade/results.py +315 -0
- aethergraph/services/memory/facade/retrieval.py +139 -0
- aethergraph/services/memory/facade/types.py +77 -0
- aethergraph/services/memory/facade/utils.py +43 -0
- aethergraph/services/memory/facade_dep.py +1539 -0
- aethergraph/services/memory/factory.py +9 -3
- aethergraph/services/memory/utils.py +10 -0
- aethergraph/services/metering/eventlog_metering.py +470 -0
- aethergraph/services/metering/noop.py +25 -4
- aethergraph/services/rag/__init__.py +0 -0
- aethergraph/services/rag/facade.py +279 -23
- aethergraph/services/rag/index_factory.py +2 -2
- aethergraph/services/rag/node_rag.py +317 -0
- aethergraph/services/rate_limit/inmem_rate_limit.py +24 -0
- aethergraph/services/registry/__init__.py +0 -0
- aethergraph/services/registry/agent_app_meta.py +419 -0
- aethergraph/services/registry/registry_key.py +1 -1
- aethergraph/services/registry/unified_registry.py +74 -6
- aethergraph/services/scope/scope.py +159 -0
- aethergraph/services/scope/scope_factory.py +164 -0
- aethergraph/services/state_stores/serialize.py +5 -0
- aethergraph/services/state_stores/utils.py +2 -1
- aethergraph/services/viz/__init__.py +0 -0
- aethergraph/services/viz/facade.py +413 -0
- aethergraph/services/viz/viz_service.py +69 -0
- aethergraph/storage/artifacts/artifact_index_jsonl.py +180 -0
- aethergraph/storage/artifacts/artifact_index_sqlite.py +426 -0
- aethergraph/storage/artifacts/cas_store.py +422 -0
- aethergraph/storage/artifacts/fs_cas.py +18 -0
- aethergraph/storage/artifacts/s3_cas.py +14 -0
- aethergraph/storage/artifacts/utils.py +124 -0
- aethergraph/storage/blob/fs_blob.py +86 -0
- aethergraph/storage/blob/s3_blob.py +115 -0
- aethergraph/storage/continuation_store/fs_cont.py +283 -0
- aethergraph/storage/continuation_store/inmem_cont.py +146 -0
- aethergraph/storage/continuation_store/kvdoc_cont.py +261 -0
- aethergraph/storage/docstore/fs_doc.py +63 -0
- aethergraph/storage/docstore/sqlite_doc.py +31 -0
- aethergraph/storage/docstore/sqlite_doc_sync.py +90 -0
- aethergraph/storage/eventlog/fs_event.py +136 -0
- aethergraph/storage/eventlog/sqlite_event.py +47 -0
- aethergraph/storage/eventlog/sqlite_event_sync.py +178 -0
- aethergraph/storage/factory.py +432 -0
- aethergraph/storage/fs_utils.py +28 -0
- aethergraph/storage/graph_state_store/state_store.py +64 -0
- aethergraph/storage/kv/inmem_kv.py +103 -0
- aethergraph/storage/kv/layered_kv.py +52 -0
- aethergraph/storage/kv/sqlite_kv.py +39 -0
- aethergraph/storage/kv/sqlite_kv_sync.py +98 -0
- aethergraph/storage/memory/event_persist.py +68 -0
- aethergraph/storage/memory/fs_persist.py +118 -0
- aethergraph/{services/memory/hotlog_kv.py → storage/memory/hotlog.py} +8 -2
- aethergraph/{services → storage}/memory/indices.py +31 -7
- aethergraph/storage/metering/meter_event.py +55 -0
- aethergraph/storage/runs/doc_store.py +280 -0
- aethergraph/storage/runs/inmen_store.py +82 -0
- aethergraph/storage/runs/sqlite_run_store.py +403 -0
- aethergraph/storage/sessions/doc_store.py +183 -0
- aethergraph/storage/sessions/inmem_store.py +110 -0
- aethergraph/storage/sessions/sqlite_session_store.py +399 -0
- aethergraph/storage/vector_index/chroma_index.py +138 -0
- aethergraph/storage/vector_index/faiss_index.py +179 -0
- aethergraph/storage/vector_index/sqlite_index.py +187 -0
- {aethergraph-0.1.0a1.dist-info → aethergraph-0.1.0a3.dist-info}/METADATA +138 -31
- aethergraph-0.1.0a3.dist-info/RECORD +356 -0
- aethergraph-0.1.0a3.dist-info/entry_points.txt +3 -0
- aethergraph/services/artifacts/factory.py +0 -35
- aethergraph/services/artifacts/fs_store.py +0 -656
- aethergraph/services/artifacts/jsonl_index.py +0 -123
- aethergraph/services/artifacts/sqlite_index.py +0 -209
- aethergraph/services/memory/distillers/episode.py +0 -116
- aethergraph/services/memory/distillers/rolling.py +0 -74
- aethergraph/services/memory/facade.py +0 -633
- aethergraph/services/memory/persist_fs.py +0 -40
- aethergraph/services/rag/index/base.py +0 -27
- aethergraph/services/rag/index/faiss_index.py +0 -121
- aethergraph/services/rag/index/sqlite_index.py +0 -134
- aethergraph-0.1.0a1.dist-info/RECORD +0 -182
- aethergraph-0.1.0a1.dist-info/entry_points.txt +0 -2
- {aethergraph-0.1.0a1.dist-info → aethergraph-0.1.0a3.dist-info}/WHEEL +0 -0
- {aethergraph-0.1.0a1.dist-info → aethergraph-0.1.0a3.dist-info}/licenses/LICENSE +0 -0
- {aethergraph-0.1.0a1.dist-info → aethergraph-0.1.0a3.dist-info}/licenses/NOTICE +0 -0
- {aethergraph-0.1.0a1.dist-info → aethergraph-0.1.0a3.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections.abc import Sequence
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from typing import Protocol
|
|
6
|
+
|
|
7
|
+
from aethergraph.api.v1.schemas import Session
|
|
8
|
+
from aethergraph.core.runtime.run_types import SessionKind
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class SessionStore(Protocol):
|
|
12
|
+
async def create(
|
|
13
|
+
self,
|
|
14
|
+
*,
|
|
15
|
+
kind: SessionKind,
|
|
16
|
+
user_id: str | None = None,
|
|
17
|
+
org_id: str | None = None,
|
|
18
|
+
title: str | None = None,
|
|
19
|
+
source: str = "webui",
|
|
20
|
+
external_ref: str | None = None,
|
|
21
|
+
) -> Session:
|
|
22
|
+
"""
|
|
23
|
+
Create a new session and return it.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
async def get(self, session_id: str) -> Session | None:
|
|
27
|
+
"""
|
|
28
|
+
Get a session by its ID, or None if not found.
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
async def list_for_user(
|
|
32
|
+
self,
|
|
33
|
+
*,
|
|
34
|
+
user_id: str | None,
|
|
35
|
+
org_id: str | None = None,
|
|
36
|
+
kind: SessionKind | None = None,
|
|
37
|
+
limit: int = 50,
|
|
38
|
+
offset: int = 0,
|
|
39
|
+
) -> Sequence[Session]:
|
|
40
|
+
"""
|
|
41
|
+
List sessions for a specific user, optionally filtered by kind.
|
|
42
|
+
"""
|
|
43
|
+
|
|
44
|
+
async def touch(
|
|
45
|
+
self,
|
|
46
|
+
session_id: str,
|
|
47
|
+
*,
|
|
48
|
+
updated_at: datetime | None = None,
|
|
49
|
+
) -> None:
|
|
50
|
+
"""
|
|
51
|
+
Update session's updated_at (e.g., when new message/run occurs).
|
|
52
|
+
No-op if session doesn't exist.
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
async def update(
|
|
56
|
+
self,
|
|
57
|
+
session_id: str,
|
|
58
|
+
*,
|
|
59
|
+
title: str | None = None,
|
|
60
|
+
external_ref: str | None = None,
|
|
61
|
+
) -> Session | None:
|
|
62
|
+
"""
|
|
63
|
+
Update session metadata, returning the updated session.
|
|
64
|
+
No-op if session doesn't exist (returns None).
|
|
65
|
+
"""
|
|
66
|
+
|
|
67
|
+
async def delete(self, session_id: str) -> None:
|
|
68
|
+
"""
|
|
69
|
+
Delete a session by its ID.
|
|
70
|
+
No-op if session doesn't exist.
|
|
71
|
+
"""
|
|
72
|
+
|
|
73
|
+
async def record_artifact(
|
|
74
|
+
self,
|
|
75
|
+
session_id: str,
|
|
76
|
+
*,
|
|
77
|
+
created_at: datetime | None = None,
|
|
78
|
+
) -> None:
|
|
79
|
+
"""
|
|
80
|
+
Update artifact-related stats for a session:
|
|
81
|
+
|
|
82
|
+
- increment artifact_count
|
|
83
|
+
- update last_artifact_at
|
|
84
|
+
|
|
85
|
+
No-op if session_id does not exist.
|
|
86
|
+
"""
|
|
87
|
+
...
|
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
from __future__ import annotations
|
|
3
3
|
|
|
4
4
|
from dataclasses import dataclass
|
|
5
|
+
from datetime import datetime
|
|
5
6
|
from typing import Any, Protocol
|
|
6
7
|
|
|
7
8
|
|
|
@@ -13,6 +14,8 @@ class GraphSnapshot:
|
|
|
13
14
|
created_at: float # epoch seconds
|
|
14
15
|
spec_hash: str # detect spec drift
|
|
15
16
|
state: dict[str, Any] # JSON-serializable TaskGraphState
|
|
17
|
+
started_at: datetime | None = None
|
|
18
|
+
finished_at: datetime | None = None
|
|
16
19
|
|
|
17
20
|
|
|
18
21
|
@dataclass
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from typing import Any, Literal
|
|
5
|
+
|
|
6
|
+
VizKind = Literal["scalar", "vector", "matrix", "image"]
|
|
7
|
+
VizMode = Literal["append", "replace"]
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass
|
|
11
|
+
class VizEvent:
|
|
12
|
+
# Provenance
|
|
13
|
+
run_id: str
|
|
14
|
+
graph_id: str
|
|
15
|
+
node_id: str
|
|
16
|
+
tool_name: str
|
|
17
|
+
tool_version: str
|
|
18
|
+
|
|
19
|
+
# Visualization fields
|
|
20
|
+
track_id: str # unique id for the trace (e.g., "loss", "accuracy")
|
|
21
|
+
figure_id: str # optional figure id for grouping traces, e.g. "metrics_panel"
|
|
22
|
+
viz_kind: VizKind
|
|
23
|
+
step: int # iteration or step number
|
|
24
|
+
mode: VizMode = "append" # append or replace
|
|
25
|
+
|
|
26
|
+
# Tenant-ish fields
|
|
27
|
+
org_id: str | None = None
|
|
28
|
+
user_id: str | None = None
|
|
29
|
+
client_id: str | None = None
|
|
30
|
+
app_id: str | None = None
|
|
31
|
+
session_id: str | None = None
|
|
32
|
+
|
|
33
|
+
# Payload
|
|
34
|
+
value: float | None = None # for scalar
|
|
35
|
+
vector: list[float] | None = None # for vector
|
|
36
|
+
matrix: list[list[float]] | None = None # for matrix
|
|
37
|
+
artifact_id: str | None = None # for image or other artifact-based viz
|
|
38
|
+
|
|
39
|
+
# Optional metadata
|
|
40
|
+
meta: dict[str, Any] | None = None # {"label": "Training Loss", "color": "blue", ...}
|
|
41
|
+
tags: list[str] | None = None # arbitrary tags for filtering or grouping
|
|
42
|
+
|
|
43
|
+
# Timestamp
|
|
44
|
+
created_at: str | None = None # ISO 8601 timestamp
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
from typing import Any, Literal, Protocol
|
|
2
|
+
|
|
3
|
+
from aethergraph.contracts.services.artifacts import Artifact
|
|
4
|
+
|
|
5
|
+
"""
|
|
6
|
+
Artifact index interface for storing and retrieving artifact metadata.
|
|
7
|
+
This is a special index used for tracking artifacts generated during runs.
|
|
8
|
+
|
|
9
|
+
Typical implementations include:
|
|
10
|
+
- FileSystemArtifactIndex: File system-based artifact index for durable storage
|
|
11
|
+
- DatabaseArtifactIndex: (future) Database-backed artifact index for scalable storage and querying
|
|
12
|
+
|
|
13
|
+
Note Artifact index is a specialized index for artifacts, different from general document or blob stores.
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class AsyncArtifactIndex(Protocol):
|
|
18
|
+
"""Backend-agnostic index for artifact metadata & occurrences."""
|
|
19
|
+
|
|
20
|
+
async def upsert(self, a: Artifact) -> None:
|
|
21
|
+
"""Insert or update a single artifact record."""
|
|
22
|
+
...
|
|
23
|
+
|
|
24
|
+
async def list_for_run(self, run_id: str) -> list[Artifact]:
|
|
25
|
+
"""Return all artifacts for a given run_id."""
|
|
26
|
+
...
|
|
27
|
+
|
|
28
|
+
async def search(
|
|
29
|
+
self,
|
|
30
|
+
*,
|
|
31
|
+
kind: str | None = None,
|
|
32
|
+
labels: dict[str, Any] | None = None,
|
|
33
|
+
metric: str | None = None,
|
|
34
|
+
mode: Literal["max", "min"] | None = None,
|
|
35
|
+
limit: int | None = None,
|
|
36
|
+
offset: int = 0,
|
|
37
|
+
) -> list[Artifact]:
|
|
38
|
+
"""
|
|
39
|
+
Generic search:
|
|
40
|
+
- kind: filter by artifact.kind
|
|
41
|
+
- labels: exact-match filter on labels[k] == v
|
|
42
|
+
- metric: if provided with mode, order by metrics[metric]
|
|
43
|
+
- mode: "max" or "min" for metric
|
|
44
|
+
- limit: optional max number of results
|
|
45
|
+
- offset: pagination offset, i.e. skip this many rows before returning results
|
|
46
|
+
"""
|
|
47
|
+
...
|
|
48
|
+
|
|
49
|
+
async def best(
|
|
50
|
+
self,
|
|
51
|
+
*,
|
|
52
|
+
kind: str,
|
|
53
|
+
metric: str,
|
|
54
|
+
mode: Literal["max", "min"],
|
|
55
|
+
filters: dict[str, Any] | None = None,
|
|
56
|
+
) -> Artifact | None:
|
|
57
|
+
"""Return the single best artifact for metric under optional label filters."""
|
|
58
|
+
...
|
|
59
|
+
|
|
60
|
+
async def pin(self, artifact_id: str, pinned: bool = True) -> None:
|
|
61
|
+
"""Mark/unmark an artifact as pinned."""
|
|
62
|
+
...
|
|
63
|
+
|
|
64
|
+
async def record_occurrence(
|
|
65
|
+
self,
|
|
66
|
+
a: Artifact,
|
|
67
|
+
extra_labels: dict | None = None,
|
|
68
|
+
) -> None:
|
|
69
|
+
"""
|
|
70
|
+
Append-only lineage log: "this artifact was used/created here".
|
|
71
|
+
"""
|
|
72
|
+
...
|
|
73
|
+
|
|
74
|
+
async def get(self, artifact_id: str) -> Artifact | None:
|
|
75
|
+
"""Get artifact by ID."""
|
|
76
|
+
...
|
|
77
|
+
|
|
78
|
+
# TODO: add cursor-based pagination for listing/searching large sets
|
|
79
|
+
# e.g.
|
|
80
|
+
# async def search_paginated(
|
|
81
|
+
# self,
|
|
82
|
+
# kind: str | None = None,
|
|
83
|
+
# labels: dict[str, Any] | None = None,
|
|
84
|
+
# metric: str | None = None,
|
|
85
|
+
# mode: Literal["max", "min"] | None = None,
|
|
86
|
+
# limit: int | None = None,
|
|
87
|
+
# cursor: str | None = None,
|
|
88
|
+
# ) -> list[Artifact]:
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
from contextlib import AbstractAsyncContextManager
|
|
2
|
+
from typing import Any, Protocol
|
|
3
|
+
|
|
4
|
+
from aethergraph.contracts.services.artifacts import Artifact
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class AsyncArtifactStore(Protocol):
|
|
8
|
+
@property
|
|
9
|
+
def base_uri(self) -> str: ...
|
|
10
|
+
|
|
11
|
+
# ---------- save / ingest ----------
|
|
12
|
+
async def save_file(
|
|
13
|
+
self,
|
|
14
|
+
*,
|
|
15
|
+
path: str,
|
|
16
|
+
kind: str,
|
|
17
|
+
run_id: str,
|
|
18
|
+
graph_id: str,
|
|
19
|
+
node_id: str,
|
|
20
|
+
tool_name: str,
|
|
21
|
+
tool_version: str,
|
|
22
|
+
suggested_uri: str | None = None,
|
|
23
|
+
pin: bool = False,
|
|
24
|
+
labels: dict | None = None,
|
|
25
|
+
metrics: dict | None = None,
|
|
26
|
+
preview_uri: str | None = None,
|
|
27
|
+
) -> Artifact: ...
|
|
28
|
+
|
|
29
|
+
async def open_writer(
|
|
30
|
+
self,
|
|
31
|
+
*,
|
|
32
|
+
kind: str,
|
|
33
|
+
run_id: str,
|
|
34
|
+
graph_id: str,
|
|
35
|
+
node_id: str,
|
|
36
|
+
tool_name: str,
|
|
37
|
+
tool_version: str,
|
|
38
|
+
planned_ext: str | None = None,
|
|
39
|
+
pin: bool = False,
|
|
40
|
+
) -> AbstractAsyncContextManager[Any]: ...
|
|
41
|
+
|
|
42
|
+
async def plan_staging_path(self, planned_ext: str = "") -> str: ...
|
|
43
|
+
async def ingest_staged_file(
|
|
44
|
+
self,
|
|
45
|
+
*,
|
|
46
|
+
staged_path: str,
|
|
47
|
+
kind: str,
|
|
48
|
+
run_id: str,
|
|
49
|
+
graph_id: str,
|
|
50
|
+
node_id: str,
|
|
51
|
+
tool_name: str,
|
|
52
|
+
tool_version: str,
|
|
53
|
+
pin: bool = False,
|
|
54
|
+
labels: dict | None = None,
|
|
55
|
+
metrics: dict | None = None,
|
|
56
|
+
preview_uri: str | None = None,
|
|
57
|
+
suggested_uri: str | None = None,
|
|
58
|
+
) -> Artifact: ...
|
|
59
|
+
|
|
60
|
+
async def plan_staging_dir(self, suffix: str = "") -> str: ...
|
|
61
|
+
async def ingest_directory(
|
|
62
|
+
self,
|
|
63
|
+
*,
|
|
64
|
+
staged_dir: str,
|
|
65
|
+
kind: str,
|
|
66
|
+
run_id: str,
|
|
67
|
+
graph_id: str,
|
|
68
|
+
node_id: str,
|
|
69
|
+
tool_name: str,
|
|
70
|
+
tool_version: str,
|
|
71
|
+
include: list[str] | None = None,
|
|
72
|
+
exclude: list[str] | None = None,
|
|
73
|
+
index_children: bool = False,
|
|
74
|
+
pin: bool = False,
|
|
75
|
+
labels: dict | None = None,
|
|
76
|
+
metrics: dict | None = None,
|
|
77
|
+
suggested_uri: str | None = None,
|
|
78
|
+
archive: bool = False,
|
|
79
|
+
archive_name: str = "bundle.tar.gz",
|
|
80
|
+
cleanup: bool = True,
|
|
81
|
+
store: str | None = None,
|
|
82
|
+
) -> Artifact: ...
|
|
83
|
+
|
|
84
|
+
# ---------- load ----------
|
|
85
|
+
async def load_bytes(self, uri: str) -> bytes: ...
|
|
86
|
+
async def load_text(
|
|
87
|
+
self,
|
|
88
|
+
uri: str,
|
|
89
|
+
*,
|
|
90
|
+
encoding: str = "utf-8",
|
|
91
|
+
errors: str = "strict",
|
|
92
|
+
) -> str: ...
|
|
93
|
+
|
|
94
|
+
async def load_artifact(self, uri: str) -> Any: ...
|
|
95
|
+
async def load_artifact_bytes(self, uri: str) -> bytes: ...
|
|
96
|
+
async def load_artifact_dir(self, uri: str) -> str: ...
|
|
97
|
+
|
|
98
|
+
# ---------- housekeeping ----------
|
|
99
|
+
async def cleanup_tmp(self, max_age_hours: int = 24) -> None: ...
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
from typing import Any, Protocol
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Used for defining the AsyncKV protocol interface.
|
|
5
|
+
|
|
6
|
+
Typical implementations include:
|
|
7
|
+
- EphemeralKV: In-memory, transient key-value store.
|
|
8
|
+
- SQLiteKV: Durable key-value store backed by SQLite.
|
|
9
|
+
- LayeredKV: Combines a fast ephemeral cache with a durable backend.
|
|
10
|
+
- RedisKV: (future) Cloud-based key-value store using Redis.
|
|
11
|
+
- Factory function to create KV instances based on environment configuration.
|
|
12
|
+
|
|
13
|
+
It is used in various parts of the system for transient and durable storage needs.
|
|
14
|
+
- context.kv() for general KV storage.
|
|
15
|
+
- memory hotlog implementation with KV support
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class AsyncKV(Protocol):
|
|
20
|
+
async def get(self, key: str, default: Any = None) -> Any: ...
|
|
21
|
+
async def set(self, key: str, value: Any, *, ttl_s: int | None = None) -> None: ...
|
|
22
|
+
async def delete(self, key: str) -> None: ...
|
|
23
|
+
|
|
24
|
+
# Optional
|
|
25
|
+
async def mget(self, keys: list[str]) -> list[Any]: ... # multiple get
|
|
26
|
+
async def mset(
|
|
27
|
+
self, kv: dict[str, Any], *, ttl_s: int | None = None
|
|
28
|
+
) -> None: ... # multiple set
|
|
29
|
+
async def expire(self, key: str, ttl_s: int) -> None: ...
|
|
30
|
+
async def purge_expired(self, limit: int = 1000) -> int: ... # return number purged
|
|
31
|
+
|
|
32
|
+
# Optional: if implemented, allows scanning for cleanup and debugging
|
|
33
|
+
# Should return all keys starting with "prefix"
|
|
34
|
+
async def scan_keys(self, prefix: str) -> list[str]: ...
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
from typing import Protocol
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Blob store interface for storing and retrieving binary large objects (blobs).
|
|
5
|
+
Typical implementations include:
|
|
6
|
+
- FSBlobStore: File system-based blob store for persistent storage
|
|
7
|
+
- S3BlobStore: Amazon S3-based blob store for cloud storage
|
|
8
|
+
|
|
9
|
+
This is usually used for storing large binary data such as images, audio, or other media files.
|
|
10
|
+
For example:
|
|
11
|
+
- Artifact Store for saving generated images or files
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class BlobStore(Protocol):
|
|
16
|
+
@property
|
|
17
|
+
def base_uri(self) -> str: # e.g. file:///..., s3://bucket/prefix
|
|
18
|
+
...
|
|
19
|
+
|
|
20
|
+
async def put_bytes(
|
|
21
|
+
self,
|
|
22
|
+
data: bytes,
|
|
23
|
+
*,
|
|
24
|
+
key: str | None = None,
|
|
25
|
+
ext: str | None = None,
|
|
26
|
+
mime: str | None = None,
|
|
27
|
+
) -> str:
|
|
28
|
+
"""Store bytes under an optional key; return full blob URI."""
|
|
29
|
+
...
|
|
30
|
+
|
|
31
|
+
async def put_file(
|
|
32
|
+
self,
|
|
33
|
+
path: str,
|
|
34
|
+
*,
|
|
35
|
+
key: str | None = None,
|
|
36
|
+
mime: str | None = None,
|
|
37
|
+
keep_source: bool = False, # whether to keep the source file after storing, only relevant for FSBlobStore
|
|
38
|
+
) -> str:
|
|
39
|
+
"""Store a local file; return full blob URI."""
|
|
40
|
+
...
|
|
41
|
+
|
|
42
|
+
async def load_bytes(self, uri: str) -> bytes: ...
|
|
43
|
+
|
|
44
|
+
async def load_text(
|
|
45
|
+
self,
|
|
46
|
+
uri: str,
|
|
47
|
+
*,
|
|
48
|
+
encoding: str = "utf-8",
|
|
49
|
+
errors: str = "strict",
|
|
50
|
+
) -> str: ...
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
from typing import Any, Protocol
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Document store interface for storing and retrieving JSON-like documents.
|
|
5
|
+
|
|
6
|
+
Typical implementations include:
|
|
7
|
+
- InMemoryDocStore: Transient, in-memory document store for testing or ephemeral use cases
|
|
8
|
+
- FileSystemDocStore: File system-based document store for durable storage
|
|
9
|
+
- DatabaseDocStore: (future) Database-backed document store for scalable storage and querying
|
|
10
|
+
|
|
11
|
+
It is used in various parts of the system for storing structured documents.
|
|
12
|
+
- memory persistence saving summary JSON documents
|
|
13
|
+
- continuation storage for saving intermediate results and tokens
|
|
14
|
+
- graph state store for saving state snapshots
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class DocStore(Protocol):
|
|
19
|
+
"""
|
|
20
|
+
Generic doc_id → JSON dict store.
|
|
21
|
+
|
|
22
|
+
NOTE:
|
|
23
|
+
- This is intentionally low-level and schema-agnostic.
|
|
24
|
+
- Higher-level stores (RunStore, EventLog, ArtifactIndex, etc.) are responsible
|
|
25
|
+
for applying domain-specific filtering, sorting, and pagination.
|
|
26
|
+
- DocStore.list() may scan all docs; it is not intended as a scalable
|
|
27
|
+
query interface for large, structured datasets.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
async def put(self, doc_id: str, doc: dict[str, Any]) -> None: ...
|
|
31
|
+
async def get(self, doc_id: str) -> dict[str, Any] | None: ...
|
|
32
|
+
async def delete(self, doc_id: str) -> None: ...
|
|
33
|
+
|
|
34
|
+
# Optional
|
|
35
|
+
async def list(self) -> list[str]: ...
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from typing import Protocol
|
|
3
|
+
|
|
4
|
+
"""
|
|
5
|
+
Event log interface for appending and querying events.
|
|
6
|
+
|
|
7
|
+
Typical implementations include:
|
|
8
|
+
- InMemoryEventLog: Transient, in-memory event log for testing or ephemeral use cases
|
|
9
|
+
- FSPersistenceEventLog: File system-based event log for durable storage
|
|
10
|
+
- DatabaseEventLog: (future) Database-backed event log for scalable storage and querying
|
|
11
|
+
|
|
12
|
+
It is used in various parts of the system for logging events with metadata.
|
|
13
|
+
- memory persistent implementation for saving events durably
|
|
14
|
+
- graph state store for appending state change events
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class EventLog(Protocol):
|
|
19
|
+
async def append(self, evt: dict) -> None: ...
|
|
20
|
+
|
|
21
|
+
async def query(
|
|
22
|
+
self,
|
|
23
|
+
*,
|
|
24
|
+
scope_id: str | None = None, # filter by scope ID, e.g., run ID, memory ID
|
|
25
|
+
since: datetime | None = None, # filter events after this time
|
|
26
|
+
until: datetime | None = None, # filter events before this time
|
|
27
|
+
kinds: list[str] | None = None, # filter by event kinds
|
|
28
|
+
limit: int | None = None, # max number of events to return
|
|
29
|
+
tags: list[str] | None = None, # filter by tags
|
|
30
|
+
offset: int = 0, # pagination offset
|
|
31
|
+
) -> list[dict]: ...
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
from typing import Any, Protocol
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Vector index interface for storing and retrieving vector embeddings.
|
|
5
|
+
|
|
6
|
+
It can be used in rag services or any system that requires vector similarity search.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class VectorIndex(Protocol):
|
|
11
|
+
async def add(
|
|
12
|
+
self,
|
|
13
|
+
corpus_id: str,
|
|
14
|
+
chunk_ids: list[str],
|
|
15
|
+
vectors: list[list[float]],
|
|
16
|
+
metas: list[dict[str, Any]],
|
|
17
|
+
) -> None:
|
|
18
|
+
"""
|
|
19
|
+
Insert or upsert vectors into a corpus.
|
|
20
|
+
|
|
21
|
+
- corpus_id: logical collection name
|
|
22
|
+
- chunk_ids: user IDs for each vector
|
|
23
|
+
- vectors: len == len(chunk_ids), each a dense float vector
|
|
24
|
+
- metas: arbitrary metadata (e.g. {"doc_id": ..., "offset": ...})
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
async def delete(
|
|
28
|
+
self,
|
|
29
|
+
corpus_id: str,
|
|
30
|
+
chunk_ids: list[str] | None = None,
|
|
31
|
+
) -> None:
|
|
32
|
+
"""
|
|
33
|
+
Delete entire corpus (chunk_ids=None) or specific chunks.
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
async def search(
|
|
37
|
+
self,
|
|
38
|
+
corpus_id: str,
|
|
39
|
+
query_vec: list[float],
|
|
40
|
+
k: int,
|
|
41
|
+
) -> list[dict[str, Any]]: ...
|
|
42
|
+
|
|
43
|
+
# Each dict MUST look like:
|
|
44
|
+
# {"chunk_id": str, "score": float, "meta": dict[str, Any]}
|
|
45
|
+
|
|
46
|
+
# Optional
|
|
47
|
+
async def list_corpora(self) -> list[str]: ...
|
|
48
|
+
async def list_chunks(self, corpus_id: str) -> list[str]: ...
|
|
File without changes
|
|
@@ -200,6 +200,9 @@ class ForwardScheduler(BaseScheduler):
|
|
|
200
200
|
# logger
|
|
201
201
|
self.logger = logger
|
|
202
202
|
|
|
203
|
+
# termination flag
|
|
204
|
+
self._cancelled = False
|
|
205
|
+
|
|
203
206
|
def bind_loop(self, loop: asyncio.AbstractEventLoop | None = None):
|
|
204
207
|
"""Bind an event loop to this scheduler (for cross-thread resume calls)."""
|
|
205
208
|
self.loop = loop or asyncio.get_running_loop()
|
|
@@ -322,6 +325,12 @@ class ForwardScheduler(BaseScheduler):
|
|
|
322
325
|
if not ctrl.done():
|
|
323
326
|
ctrl.cancel()
|
|
324
327
|
|
|
328
|
+
if self._cancelled:
|
|
329
|
+
# propagate an explicit cancellation upwards
|
|
330
|
+
raise asyncio.CancelledError(
|
|
331
|
+
f"ForwardScheduler for run_id={self.env.run_id} was terminated"
|
|
332
|
+
)
|
|
333
|
+
|
|
325
334
|
async def run_from(self, node_ids: list[str]):
|
|
326
335
|
"""Run starting from specific nodes (e.g. after external event)."""
|
|
327
336
|
for nid in node_ids:
|
|
@@ -340,6 +349,8 @@ class ForwardScheduler(BaseScheduler):
|
|
|
340
349
|
async def terminate(self):
|
|
341
350
|
"""Terminate execution; running tasks will complete but no new tasks will be started."""
|
|
342
351
|
self._terminated = True
|
|
352
|
+
self._cancelled = True
|
|
353
|
+
|
|
343
354
|
# cancel backoff tasks
|
|
344
355
|
for task in self._backoff_tasks.values():
|
|
345
356
|
task.cancel()
|
|
@@ -700,10 +711,10 @@ class ForwardScheduler(BaseScheduler):
|
|
|
700
711
|
|
|
701
712
|
except NotImplementedError:
|
|
702
713
|
# subgraph logic not handled here; escalate to orchestrator
|
|
703
|
-
await
|
|
714
|
+
await self.graph.set_node_status(node_id, NodeStatus.FAILED)
|
|
704
715
|
except asyncio.CancelledError:
|
|
705
716
|
# task cancelled (e.g. on terminate);
|
|
706
|
-
await
|
|
717
|
+
await self.graph.set_node_status(node_id, NodeStatus.CANCELLED)
|
|
707
718
|
finally:
|
|
708
719
|
# remove from running tasks in caller
|
|
709
720
|
pass
|
|
@@ -62,6 +62,7 @@ class RunState:
|
|
|
62
62
|
ready_pending: set[str] = field(default_factory=set) # nodes explicitly enqueued
|
|
63
63
|
backoff_tasks: dict[str, asyncio.Task] = field(default_factory=dict) # node_id -> sleeper task
|
|
64
64
|
terminated: bool = False
|
|
65
|
+
cancelled: bool = False
|
|
65
66
|
|
|
66
67
|
def capacity(self) -> int:
|
|
67
68
|
return max(0, self.settings.max_concurrency - len(self.running_tasks))
|
|
@@ -163,6 +164,7 @@ class GlobalForwardScheduler:
|
|
|
163
164
|
# mark runs as terminated & cancel sleepers/runners
|
|
164
165
|
for rs in self._runs.values():
|
|
165
166
|
rs.terminated = True
|
|
167
|
+
rs.cancelled = True
|
|
166
168
|
for t in list(rs.backoff_tasks.values()):
|
|
167
169
|
t.cancel()
|
|
168
170
|
for t in list(rs.running_tasks.values()):
|
|
@@ -185,6 +187,8 @@ class GlobalForwardScheduler:
|
|
|
185
187
|
if not rs:
|
|
186
188
|
return
|
|
187
189
|
rs.terminated = True
|
|
190
|
+
rs.cancelled = True
|
|
191
|
+
|
|
188
192
|
for t in list(rs.backoff_tasks.values()):
|
|
189
193
|
t.cancel()
|
|
190
194
|
for t in list(rs.running_tasks.values()):
|
|
@@ -240,20 +244,22 @@ class GlobalForwardScheduler:
|
|
|
240
244
|
and not tgt.resume_pending
|
|
241
245
|
):
|
|
242
246
|
# compute a simple status
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
247
|
+
if tgt.cancelled:
|
|
248
|
+
status = "CANCELLED"
|
|
249
|
+
else:
|
|
250
|
+
status = "SUCCESS"
|
|
251
|
+
for n in tgt.graph.nodes:
|
|
252
|
+
if n.spec.type == "plan":
|
|
253
|
+
continue
|
|
254
|
+
if n.state.status == NodeStatus.FAILED:
|
|
255
|
+
status = "FAILED"
|
|
256
|
+
break
|
|
257
|
+
evt = RunEvent(
|
|
258
|
+
run_id=tgt.run_id,
|
|
259
|
+
status=status,
|
|
260
|
+
timestamp=datetime.utcnow().timestamp(),
|
|
256
261
|
)
|
|
262
|
+
await self._emit_run(evt)
|
|
257
263
|
break
|
|
258
264
|
|
|
259
265
|
# 4) If nothing is running anywhere and nothing scheduled, decide how to wait
|
|
@@ -506,11 +512,11 @@ class GlobalForwardScheduler:
|
|
|
506
512
|
)
|
|
507
513
|
except asyncio.CancelledError:
|
|
508
514
|
try:
|
|
509
|
-
await rs.graph.set_node_status(node_id, NodeStatus.
|
|
515
|
+
await rs.graph.set_node_status(node_id, NodeStatus.CANCELLED)
|
|
510
516
|
except Exception as e:
|
|
511
517
|
if self._logger:
|
|
512
518
|
self._logger.warning(
|
|
513
|
-
f"[GlobalForwardScheduler._start_node] failed to set node {node_id} as
|
|
519
|
+
f"[GlobalForwardScheduler._start_node] failed to set node {node_id} as CANCELLED on cancellation: {e}"
|
|
514
520
|
)
|
|
515
521
|
finally:
|
|
516
522
|
pass
|
|
@@ -3,6 +3,7 @@ import functools
|
|
|
3
3
|
import inspect
|
|
4
4
|
from typing import Any
|
|
5
5
|
|
|
6
|
+
from aethergraph.contracts.services.channel import OutEvent
|
|
6
7
|
from aethergraph.services.continuations.continuation import Continuation
|
|
7
8
|
|
|
8
9
|
from ..graph.graph_refs import RESERVED_INJECTABLES # {"context", "resume", "self"}
|
|
@@ -397,7 +398,15 @@ async def _enter_wait(
|
|
|
397
398
|
# 3) Notify only if the tool hasn't already done it
|
|
398
399
|
if not spec.get("notified", False):
|
|
399
400
|
try:
|
|
400
|
-
|
|
401
|
+
# TODO: This is a temporary fix. The proper way is to have the channel bus injected into the NodeContext
|
|
402
|
+
bus = node_ctx.services.channels
|
|
403
|
+
event = OutEvent(
|
|
404
|
+
type=cont.kind,
|
|
405
|
+
channel=cont.channel,
|
|
406
|
+
text=cont.prompt,
|
|
407
|
+
meta={"continuation_token": cont.token},
|
|
408
|
+
)
|
|
409
|
+
await bus.publish(event)
|
|
401
410
|
if lg:
|
|
402
411
|
lg.debug("notified channel=%s", cont.channel)
|
|
403
412
|
except Exception as e:
|