aethergraph 0.1.0a2__py3-none-any.whl → 0.1.0a4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. aethergraph/__main__.py +3 -0
  2. aethergraph/api/v1/artifacts.py +23 -4
  3. aethergraph/api/v1/schemas.py +7 -0
  4. aethergraph/api/v1/session.py +123 -4
  5. aethergraph/config/config.py +2 -0
  6. aethergraph/config/search.py +49 -0
  7. aethergraph/contracts/services/channel.py +18 -1
  8. aethergraph/contracts/services/execution.py +58 -0
  9. aethergraph/contracts/services/llm.py +26 -0
  10. aethergraph/contracts/services/memory.py +10 -4
  11. aethergraph/contracts/services/planning.py +53 -0
  12. aethergraph/contracts/storage/event_log.py +8 -0
  13. aethergraph/contracts/storage/search_backend.py +47 -0
  14. aethergraph/contracts/storage/vector_index.py +73 -0
  15. aethergraph/core/graph/action_spec.py +76 -0
  16. aethergraph/core/graph/graph_fn.py +75 -2
  17. aethergraph/core/graph/graphify.py +74 -2
  18. aethergraph/core/runtime/graph_runner.py +2 -1
  19. aethergraph/core/runtime/node_context.py +66 -3
  20. aethergraph/core/runtime/node_services.py +8 -0
  21. aethergraph/core/runtime/run_manager.py +263 -271
  22. aethergraph/core/runtime/run_types.py +54 -1
  23. aethergraph/core/runtime/runtime_env.py +35 -14
  24. aethergraph/core/runtime/runtime_services.py +308 -18
  25. aethergraph/plugins/agents/default_chat_agent.py +266 -74
  26. aethergraph/plugins/agents/default_chat_agent_v2.py +487 -0
  27. aethergraph/plugins/channel/adapters/webui.py +69 -21
  28. aethergraph/plugins/channel/routes/webui_routes.py +8 -48
  29. aethergraph/runtime/__init__.py +12 -0
  30. aethergraph/server/app_factory.py +10 -1
  31. aethergraph/server/ui_static/assets/index-CFktGdbW.js +4913 -0
  32. aethergraph/server/ui_static/assets/index-DcfkFlTA.css +1 -0
  33. aethergraph/server/ui_static/index.html +2 -2
  34. aethergraph/services/artifacts/facade.py +157 -21
  35. aethergraph/services/artifacts/types.py +35 -0
  36. aethergraph/services/artifacts/utils.py +42 -0
  37. aethergraph/services/channel/channel_bus.py +3 -1
  38. aethergraph/services/channel/event_hub copy.py +55 -0
  39. aethergraph/services/channel/event_hub.py +81 -0
  40. aethergraph/services/channel/factory.py +3 -2
  41. aethergraph/services/channel/session.py +709 -74
  42. aethergraph/services/container/default_container.py +69 -7
  43. aethergraph/services/execution/__init__.py +0 -0
  44. aethergraph/services/execution/local_python.py +118 -0
  45. aethergraph/services/indices/__init__.py +0 -0
  46. aethergraph/services/indices/global_indices.py +21 -0
  47. aethergraph/services/indices/scoped_indices.py +292 -0
  48. aethergraph/services/llm/generic_client.py +342 -46
  49. aethergraph/services/llm/generic_embed_client.py +359 -0
  50. aethergraph/services/llm/types.py +3 -1
  51. aethergraph/services/memory/distillers/llm_long_term.py +60 -109
  52. aethergraph/services/memory/distillers/llm_long_term_v1.py +180 -0
  53. aethergraph/services/memory/distillers/llm_meta_summary.py +57 -266
  54. aethergraph/services/memory/distillers/llm_meta_summary_v1.py +342 -0
  55. aethergraph/services/memory/distillers/long_term.py +48 -131
  56. aethergraph/services/memory/distillers/long_term_v1.py +170 -0
  57. aethergraph/services/memory/facade/chat.py +18 -8
  58. aethergraph/services/memory/facade/core.py +159 -19
  59. aethergraph/services/memory/facade/distillation.py +86 -31
  60. aethergraph/services/memory/facade/retrieval.py +100 -1
  61. aethergraph/services/memory/factory.py +4 -1
  62. aethergraph/services/planning/__init__.py +0 -0
  63. aethergraph/services/planning/action_catalog.py +271 -0
  64. aethergraph/services/planning/bindings.py +56 -0
  65. aethergraph/services/planning/dependency_index.py +65 -0
  66. aethergraph/services/planning/flow_validator.py +263 -0
  67. aethergraph/services/planning/graph_io_adapter.py +150 -0
  68. aethergraph/services/planning/input_parser.py +312 -0
  69. aethergraph/services/planning/missing_inputs.py +28 -0
  70. aethergraph/services/planning/node_planner.py +613 -0
  71. aethergraph/services/planning/orchestrator.py +112 -0
  72. aethergraph/services/planning/plan_executor.py +506 -0
  73. aethergraph/services/planning/plan_types.py +321 -0
  74. aethergraph/services/planning/planner.py +617 -0
  75. aethergraph/services/planning/planner_service.py +369 -0
  76. aethergraph/services/planning/planning_context_builder.py +43 -0
  77. aethergraph/services/planning/quick_actions.py +29 -0
  78. aethergraph/services/planning/routers/__init__.py +0 -0
  79. aethergraph/services/planning/routers/simple_router.py +26 -0
  80. aethergraph/services/rag/facade.py +0 -3
  81. aethergraph/services/scope/scope.py +30 -30
  82. aethergraph/services/scope/scope_factory.py +15 -7
  83. aethergraph/services/skills/__init__.py +0 -0
  84. aethergraph/services/skills/skill_registry.py +465 -0
  85. aethergraph/services/skills/skills.py +220 -0
  86. aethergraph/services/skills/utils.py +194 -0
  87. aethergraph/storage/artifacts/artifact_index_jsonl.py +16 -10
  88. aethergraph/storage/artifacts/artifact_index_sqlite.py +12 -2
  89. aethergraph/storage/docstore/sqlite_doc_sync.py +1 -1
  90. aethergraph/storage/memory/event_persist.py +42 -2
  91. aethergraph/storage/memory/fs_persist.py +32 -2
  92. aethergraph/storage/search_backend/__init__.py +0 -0
  93. aethergraph/storage/search_backend/generic_vector_backend.py +230 -0
  94. aethergraph/storage/search_backend/null_backend.py +34 -0
  95. aethergraph/storage/search_backend/sqlite_lexical_backend.py +387 -0
  96. aethergraph/storage/search_backend/utils.py +31 -0
  97. aethergraph/storage/search_factory.py +75 -0
  98. aethergraph/storage/vector_index/faiss_index.py +72 -4
  99. aethergraph/storage/vector_index/sqlite_index.py +521 -52
  100. aethergraph/storage/vector_index/sqlite_index_vanila.py +311 -0
  101. aethergraph/storage/vector_index/utils.py +22 -0
  102. {aethergraph-0.1.0a2.dist-info → aethergraph-0.1.0a4.dist-info}/METADATA +1 -1
  103. {aethergraph-0.1.0a2.dist-info → aethergraph-0.1.0a4.dist-info}/RECORD +108 -64
  104. {aethergraph-0.1.0a2.dist-info → aethergraph-0.1.0a4.dist-info}/WHEEL +1 -1
  105. aethergraph/plugins/agents/default_chat_agent copy.py +0 -90
  106. aethergraph/server/ui_static/assets/index-BR5GtXcZ.css +0 -1
  107. aethergraph/server/ui_static/assets/index-CQ0HZZ83.js +0 -400
  108. aethergraph/services/eventhub/event_hub.py +0 -76
  109. aethergraph/services/llm/generic_client copy.py +0 -691
  110. aethergraph/services/prompts/file_store.py +0 -41
  111. {aethergraph-0.1.0a2.dist-info → aethergraph-0.1.0a4.dist-info}/entry_points.txt +0 -0
  112. {aethergraph-0.1.0a2.dist-info → aethergraph-0.1.0a4.dist-info}/licenses/LICENSE +0 -0
  113. {aethergraph-0.1.0a2.dist-info → aethergraph-0.1.0a4.dist-info}/licenses/NOTICE +0 -0
  114. {aethergraph-0.1.0a2.dist-info → aethergraph-0.1.0a4.dist-info}/top_level.txt +0 -0
aethergraph/__main__.py CHANGED
@@ -191,6 +191,9 @@ def main(argv: list[str] | None = None) -> int:
191
191
 
192
192
  # Export them to environment so the worker factory can read them
193
193
  os.environ["AETHERGRAPH_WORKSPACE"] = args.workspace
194
+ os.environ.setdefault(
195
+ "AETHERGRAPH_ROOT", args.workspace
196
+ ) # AETHERGRAPH_ROOT is the workspace root in env
194
197
  os.environ["AETHERGRAPH_PROJECT_ROOT"] = str(project_root)
195
198
  os.environ["AETHERGRAPH_LOAD_MODULES"] = ",".join(modules)
196
199
  os.environ["AETHERGRAPH_LOAD_PATHS"] = os.pathsep.join(paths)
@@ -128,6 +128,8 @@ def _artifact_to_meta(a: Artifact) -> ArtifactMeta:
128
128
  @router.get("/artifacts", response_model=ArtifactListResponse)
129
129
  async def list_artifacts(
130
130
  scope_id: Annotated[str | None, Query()] = None,
131
+ run_id: Annotated[str | None, Query()] = None,
132
+ session_id: Annotated[str | None, Query()] = None,
131
133
  kind: Annotated[str | None, Query()] = None,
132
134
  tags: Annotated[str | None, Query()] = None,
133
135
  cursor: Annotated[str | None, Query()] = None,
@@ -141,17 +143,34 @@ async def list_artifacts(
141
143
 
142
144
  offset = decode_cursor(cursor.strip() if cursor else None)
143
145
 
146
+ # label_filters: dict[str, Any] = {}
147
+
148
+ # if scope_id and scope_id.strip():
149
+ # label_filters["scope_id"] = scope_id.strip()
150
+
151
+ # if tags and tags.strip():
152
+ # tag_list = [t.strip() for t in tags.split(",") if t.strip()]
153
+ # if tag_list:
154
+ # label_filters["tags"] = tag_list
155
+
156
+ # # 🔹 Tenant scoping: org_id + user_id
157
+ # label_filters.update(_tenant_label_filters(identity))
158
+
144
159
  label_filters: dict[str, Any] = {}
145
160
 
161
+ # execution scopes
162
+ if run_id and run_id.strip():
163
+ label_filters["run_id"] = run_id.strip()
164
+ if session_id and session_id.strip():
165
+ label_filters["session_id"] = session_id.strip()
166
+
167
+ # memory scope (keep for “overview” / RAG-style scoping)
146
168
  if scope_id and scope_id.strip():
147
169
  label_filters["scope_id"] = scope_id.strip()
148
170
 
149
171
  if tags and tags.strip():
150
- tag_list = [t.strip() for t in tags.split(",") if t.strip()]
151
- if tag_list:
152
- label_filters["tags"] = tag_list
172
+ label_filters["tags"] = [t.strip() for t in tags.split(",") if t.strip()]
153
173
 
154
- # 🔹 Tenant scoping: org_id + user_id
155
174
  label_filters.update(_tenant_label_filters(identity))
156
175
 
157
176
  artifacts = await index.search(
@@ -488,6 +488,11 @@ class SessionChatFile(BaseModel):
488
488
  mimetype: str | None = None
489
489
  size: int | None = None
490
490
  uri: str | None = None # optional, useful for artifact URIs
491
+ renderer: Literal["image", "download", "vega", "plotly"] | None = None # optional, for UI hints
492
+
493
+ class Config:
494
+ # Optional but nice: future-proof against extra keys
495
+ extra = "allow"
491
496
 
492
497
 
493
498
  class SessionChatEvent(BaseModel):
@@ -498,10 +503,12 @@ class SessionChatEvent(BaseModel):
498
503
  buttons: list[dict[str, Any]]
499
504
  file: SessionChatFile | None = None # legacy/single
500
505
  files: list[SessionChatFile] | None = None # NEW: multi
506
+ attachments: list[SessionChatFile] | None = None # 🔹 placeholder for future richer usage
501
507
  meta: dict[str, Any]
502
508
  ts: float
503
509
  agent_id: str | None = None
504
510
  upsert_key: str | None = None # for idempotent updates
511
+ rich: dict[str, Any] | None = None # phase/progress payloads
505
512
 
506
513
 
507
514
  class SessionUpdateRequest(BaseModel):
@@ -1,6 +1,17 @@
1
+ import asyncio
2
+ from contextlib import suppress
1
3
  from datetime import datetime, timezone
2
-
3
- from fastapi import APIRouter, Depends, HTTPException, Query, Request
4
+ import logging
5
+
6
+ from fastapi import (
7
+ APIRouter,
8
+ Depends,
9
+ HTTPException,
10
+ Query,
11
+ Request,
12
+ WebSocket,
13
+ WebSocketDisconnect,
14
+ )
4
15
 
5
16
  from aethergraph.api.v1.deps import RequestIdentity, get_identity
6
17
  from aethergraph.api.v1.pagination import decode_cursor, encode_cursor
@@ -19,6 +30,7 @@ from aethergraph.core.runtime.runtime_registry import current_registry
19
30
  from aethergraph.core.runtime.runtime_services import current_services
20
31
 
21
32
  router = APIRouter(tags=["sessions"])
33
+ logger = logging.getLogger(__name__)
22
34
 
23
35
 
24
36
  @router.post("/sessions", response_model=Session)
@@ -74,8 +86,6 @@ async def list_sessions(
74
86
  limit=limit,
75
87
  offset=offset,
76
88
  )
77
- # print(f"Listed {len(sessions)} sessions for user_id={identity.user_id} org_id={identity.org_id} offset={offset} limit={limit}")
78
- # print(f"Sessions: {[s for s in sessions]}")
79
89
  next_cursor = encode_cursor(offset + limit) if len(sessions) == limit else None
80
90
  return SessionListResponse(items=sessions, next_cursor=next_cursor)
81
91
 
@@ -206,6 +216,109 @@ async def get_session_runs(
206
216
  return SessionRunsResponse(items=summaries)
207
217
 
208
218
 
219
+ def _row_to_session_chat_event(row: dict, session_id: str) -> SessionChatEvent:
220
+ payload = row.get("payload", {}) or {}
221
+ return SessionChatEvent(
222
+ id=row.get("id"),
223
+ session_id=session_id,
224
+ ts=row.get("ts"),
225
+ type=payload.get("type") or "agent.message",
226
+ text=payload.get("text"),
227
+ buttons=payload.get("buttons", []),
228
+ file=payload.get("file"),
229
+ files=payload.get("files") or None,
230
+ rich=payload.get("rich") or None,
231
+ meta=payload.get("meta", {}) or {},
232
+ agent_id=payload.get("agent_id"),
233
+ upsert_key=payload.get("upsert_key"),
234
+ )
235
+
236
+
237
+ @router.websocket("/ws/sessions/{session_id}/chat")
238
+ async def ws_session_chat(websocket: WebSocket, session_id: str):
239
+ DROP_FROM_HISTORY = {"agent.stream.start", "agent.stream.delta"}
240
+
241
+ container = current_services()
242
+ event_log = container.eventlog
243
+ hub = getattr(container, "eventhub", None)
244
+
245
+ if hub is None or event_log is None:
246
+ await websocket.close(code=1011)
247
+ return
248
+
249
+ await websocket.accept()
250
+
251
+ async def send_snapshot() -> None:
252
+ events = await event_log.query(
253
+ scope_id=session_id,
254
+ kinds=["session_chat"],
255
+ since=None,
256
+ limit=200,
257
+ )
258
+ filtered = []
259
+ for ev in events:
260
+ payload = ev.get("payload") or {}
261
+ t = payload.get("type") or "agent.message"
262
+ if t in DROP_FROM_HISTORY:
263
+ continue
264
+ filtered.append(ev)
265
+
266
+ filtered.sort(key=lambda ev: ev.get("ts") or 0)
267
+ initial_payload = [
268
+ _row_to_session_chat_event(ev, session_id).model_dump() for ev in filtered
269
+ ]
270
+ await websocket.send_json({"kind": "snapshot", "events": initial_payload})
271
+
272
+ async def recv_until_disconnect() -> None:
273
+ # Blocks until disconnect; does not require the client to send meaningful messages.
274
+ while True:
275
+ msg = await websocket.receive()
276
+ if msg.get("type") == "websocket.disconnect":
277
+ return
278
+
279
+ async def send_live() -> None:
280
+ # If you kept old hub shape, you'd do async for row in hub.subscribe(scope_id=session_id)
281
+ async for row in hub.subscribe(scope_id=session_id, kind="session_chat"):
282
+ ev = _row_to_session_chat_event(row, session_id)
283
+ await websocket.send_json({"kind": "event", "event": ev.model_dump()})
284
+
285
+ recv_task = send_task = None
286
+ try:
287
+ await send_snapshot()
288
+
289
+ recv_task = asyncio.create_task(recv_until_disconnect())
290
+ send_task = asyncio.create_task(send_live())
291
+
292
+ done, pending = await asyncio.wait(
293
+ {recv_task, send_task},
294
+ return_when=asyncio.FIRST_COMPLETED,
295
+ )
296
+
297
+ # Cancel the other task (this is what prevents idle hangs)
298
+ for t in pending:
299
+ t.cancel()
300
+ with suppress(asyncio.CancelledError):
301
+ await t
302
+
303
+ except WebSocketDisconnect:
304
+ # can happen from send_json
305
+ return
306
+ except asyncio.CancelledError:
307
+ # critical for uvicorn --reload
308
+ with suppress(Exception):
309
+ await websocket.close(code=1001)
310
+ raise
311
+ except Exception as e:
312
+ with suppress(Exception):
313
+ await websocket.close(code=1011, reason=str(e)[:120])
314
+ finally:
315
+ for t in (recv_task, send_task):
316
+ if t and not t.done():
317
+ t.cancel()
318
+ with suppress(asyncio.CancelledError):
319
+ await t
320
+
321
+
209
322
  @router.get("/sessions/{session_id}/chat/events", response_model=list[SessionChatEvent])
210
323
  async def get_session_chat_events(
211
324
  session_id: str,
@@ -213,6 +326,8 @@ async def get_session_chat_events(
213
326
  since_ts: float | None = Query(None), # noqa: B008
214
327
  identity: RequestIdentity = Depends(get_identity), # noqa: B008
215
328
  ) -> list[SessionChatEvent]:
329
+ DROP_FROM_HISTORY = {"agent.stream.start", "agent.stream.delta"}
330
+
216
331
  container = current_services()
217
332
  event_log = container.eventlog
218
333
 
@@ -234,6 +349,9 @@ async def get_session_chat_events(
234
349
  # make cursor exclusive -- only return events after since_ts to avoid duplicates
235
350
  events = [ev for ev in events if (ev.get("ts") or 0) > since_ts]
236
351
 
352
+ # Filter legacy persisted deltas/start
353
+ events = [ev for ev in events if (ev.get("payload") or {}).get("type") not in DROP_FROM_HISTORY]
354
+
237
355
  out: list[SessionChatEvent] = []
238
356
  for ev in events:
239
357
  payload = ev.get("payload", {}) or {}
@@ -250,6 +368,7 @@ async def get_session_chat_events(
250
368
  meta=payload.get("meta", {}) or {},
251
369
  agent_id=payload.get("agent_id"),
252
370
  upsert_key=payload.get("upsert_key"), # forward idempotent key
371
+ rich=payload.get("rich") or None, # forward rich content
253
372
  )
254
373
  )
255
374
  out.sort(key=lambda e: e.ts)
@@ -5,6 +5,7 @@ from pydantic import BaseModel, Field, SecretStr
5
5
  from pydantic_settings import BaseSettings, SettingsConfigDict
6
6
 
7
7
  from .llm import LLMSettings
8
+ from .search import SearchBackendSettings
8
9
  from .storage import StorageSettings
9
10
 
10
11
 
@@ -136,6 +137,7 @@ class AppSettings(BaseSettings):
136
137
  channels: ChannelSettings = ChannelSettings()
137
138
  rag: RAGSettings = RAGSettings()
138
139
  storage: StorageSettings = StorageSettings()
140
+ search: SearchBackendSettings = SearchBackendSettings()
139
141
 
140
142
  # Future fields:
141
143
  # authn: ...
@@ -0,0 +1,49 @@
1
+ # search_settings.py (or wherever you keep config models)
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import Literal
6
+
7
+ from pydantic import BaseModel
8
+
9
+ from .storage import FAISSVectorIndexSettings, SQLiteVectorIndexSettings
10
+
11
+ # ^ or wherever those two are defined
12
+
13
+
14
+ class SQLiteLexicalSearchSettings(BaseModel):
15
+ """
16
+ Settings for SQLite-based lexical search backend.
17
+ Paths are relative to AppSettings.root.
18
+ """
19
+
20
+ dir: str = "search/sqlite_lexical"
21
+ filename: str = "index.sqlite"
22
+
23
+
24
+ class SearchBackendSettings(BaseModel):
25
+ """
26
+ Config for the high-level SearchBackend used by ScopedIndices.
27
+
28
+ backend:
29
+ - "none" -> NullSearchBackend (no search at all)
30
+ - "sqlite_vector" -> VectorSearchBackend + SQLiteVectorIndex
31
+ - "faiss_vector" -> VectorSearchBackend + FAISSVectorIndex
32
+ - "sqlite_lexical"-> SQLiteLexicalSearchBackend (no embeddings)
33
+ """
34
+
35
+ backend: Literal["none", "sqlite_vector", "faiss_vector", "sqlite_lexical"] = "sqlite_vector"
36
+
37
+ # Vector search backends (reuse your existing index settings types,
38
+ # but point them to search-specific directories by default).
39
+ sqlite_vector: SQLiteVectorIndexSettings = SQLiteVectorIndexSettings(
40
+ dir="search/vector_sqlite",
41
+ filename="index.sqlite",
42
+ )
43
+ faiss_vector: FAISSVectorIndexSettings = FAISSVectorIndexSettings(
44
+ dir="search/vector_faiss",
45
+ dim=None,
46
+ )
47
+
48
+ # Lexical search backend
49
+ sqlite_lexical: SQLiteLexicalSearchSettings = SQLiteLexicalSearchSettings()
@@ -18,6 +18,23 @@ EventType = Literal[
18
18
  ]
19
19
 
20
20
 
21
+ class PhaseRich(TypedDict, total=False):
22
+ kind: Literal["phase"]
23
+ phase: str # "routing", "planning", "reasoning", "tools", "reply"
24
+ status: Literal["pending", "active", "done", "failed", "skipped"]
25
+ label: str | None # short human label
26
+ detail: str | None # optional extra text
27
+ code: str | None # internal code like "routing.planning"
28
+
29
+
30
+ class ProgressRich(TypedDict, total=False):
31
+ kind: Literal["progress"]
32
+ label: str | None
33
+ current: float | int | None
34
+ total: float | int | None
35
+ unit: str | None # "%", "steps", etc.
36
+
37
+
21
38
  class FileRef(TypedDict, total=False):
22
39
  id: str # platform file id (e.g., Slack file ID)
23
40
  name: str # suggested filename
@@ -46,7 +63,7 @@ class OutEvent:
46
63
  rich: dict[str, Any] | None = None
47
64
  meta: dict[str, Any] | None = None
48
65
  # Optional structured extras most adapters can use, e.g., for buttons, attachments, files, etc.
49
- buttons: dict[str, Button] | None = None # for approvals or link actions
66
+ buttons: list[Button] | None = None
50
67
  image: dict[str, Any] | None = None # e.g., {"url": "...", "alt": "...", "title": "..."}
51
68
  file: dict[str, Any] | None = (
52
69
  None # e.g., {"bytes" b"...", "filename": "...", "mimetype": "..."} or {"url": "...", "filename": "...", "mimetype": "..."}
@@ -0,0 +1,58 @@
1
+ # aethergraph/contracts/services/execution.py
2
+ from __future__ import annotations
3
+
4
+ from dataclasses import dataclass, field
5
+ from typing import Any, Literal, Protocol
6
+
7
+ Language = Literal["python"] # later: "bash", "r", etc.
8
+
9
+
10
+ @dataclass
11
+ class CodeExecutionRequest:
12
+ """
13
+ Generic request to execute code.
14
+
15
+ For v0 we only support Python, but keep `language` so we can grow.
16
+ """
17
+
18
+ language: Language
19
+ code: str
20
+
21
+ # CLI-style args if you want to support them later
22
+ args: list[str] = field(default_factory=list)
23
+
24
+ # Execution constraints
25
+ timeout_s: float = 30.0
26
+
27
+ # Optional working directory and env (for future use)
28
+ workdir: str | None = None
29
+ env: dict[str, str] | None = None
30
+
31
+
32
+ @dataclass
33
+ class CodeExecutionResult:
34
+ """
35
+ Result of a code execution.
36
+
37
+ - stdout / stderr: what the script printed
38
+ - exit_code: OS-level exit code
39
+ - error: high-level error if our runner failed (timeout, spawn failure, etc.)
40
+ - metadata: free-form, e.g. timing info
41
+ """
42
+
43
+ stdout: str
44
+ stderr: str
45
+ exit_code: int
46
+ error: str | None = None
47
+ metadata: dict[str, Any] = field(default_factory=dict)
48
+
49
+
50
+ class ExecutionService(Protocol):
51
+ """
52
+ Abstract interface for code execution backends.
53
+
54
+ This lets us later plug in Docker, VM, remote HTTP, etc,
55
+ without changing NodeContext or node code.
56
+ """
57
+
58
+ async def execute(self, request: CodeExecutionRequest) -> CodeExecutionResult: ...
@@ -1,3 +1,4 @@
1
+ from collections.abc import Sequence
1
2
  from typing import Any, Protocol
2
3
 
3
4
 
@@ -15,3 +16,28 @@ class LLMClientProtocol(Protocol):
15
16
  headers: dict[str, str] | None = None,
16
17
  return_response: bool = False,
17
18
  ) -> Any: ...
19
+
20
+
21
+ class EmbeddingClientProtocol(Protocol):
22
+ async def embed(
23
+ self,
24
+ texts: Sequence[str],
25
+ *,
26
+ model: str | None = None,
27
+ **kwargs,
28
+ ) -> list[list[float]]:
29
+ """
30
+ Batch-embed texts. Returns one vector per text.
31
+ """
32
+
33
+ async def embed_one(
34
+ self,
35
+ text: str,
36
+ *,
37
+ model: str | None = None,
38
+ **kwargs,
39
+ ) -> list[float]:
40
+ """
41
+ Convenience method: embed a single string.
42
+ Default implementation can call embed([text])[0].
43
+ """
@@ -32,6 +32,7 @@ class Event:
32
32
  org_id: str | None = None
33
33
  client_id: str | None = None
34
34
  app_id: str | None = None
35
+ agent_id: str | None = None
35
36
  session_id: str | None = None
36
37
 
37
38
  # --------- Core semantics ---------
@@ -63,16 +64,21 @@ class Event:
63
64
 
64
65
 
65
66
  class HotLog(Protocol):
66
- async def append(self, run_id: str, evt: Event, *, ttl_s: int, limit: int) -> None: ...
67
+ async def append(self, scope_id: str, evt: Event, *, ttl_s: int, limit: int) -> None: ...
67
68
  async def recent(
68
- self, run_id: str, *, kinds: list[str] | None = None, limit: int = 50
69
+ self, scope_id: str, *, kinds: list[str] | None = None, limit: int = 50
69
70
  ) -> list[Event]: ...
70
71
 
71
72
 
72
73
  class Persistence(Protocol):
73
- async def append_event(self, run_id: str, evt: Event) -> None: ...
74
+ async def append_event(self, scope_id: str, evt: Event) -> None: ...
74
75
  async def save_json(self, uri: str, obj: dict[str, Any]) -> None: ...
75
76
  async def load_json(self, uri: str) -> dict[str, Any]: ...
77
+ async def get_events_by_ids(
78
+ self,
79
+ scope_id: str,
80
+ event_ids: list[str],
81
+ ) -> list[Event]: ...
76
82
 
77
83
 
78
84
  class Indices(Protocol):
@@ -87,7 +93,7 @@ class Indices(Protocol):
87
93
  class Distiller(Protocol):
88
94
  async def distill(
89
95
  self,
90
- run_id: str,
96
+ scope_id: str,
91
97
  *,
92
98
  hotlog: HotLog,
93
99
  persistence: Persistence,
@@ -0,0 +1,53 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass, field
4
+ from typing import Any, Literal, Protocol
5
+
6
+ IntentMode = Literal["chat_only", "quick_action", "plan_and_execute"]
7
+
8
+
9
+ @dataclass
10
+ class RoutedIntent:
11
+ """
12
+ Result of routing a user turn:
13
+ - How should we handle this? (mode)
14
+ - If planning: which flows are in scope?
15
+ - If quick_action: which quick action?
16
+ """
17
+
18
+ mode: IntentMode
19
+
20
+ # For planning
21
+ flow_ids: list[str] | None = None
22
+
23
+ # For quick action, e.g. `list_recent_runs`
24
+ quick_action_id: str | None = None
25
+
26
+ # Freeform extention field.
27
+ # safety flags, strategy hints, etc. without changing the dataclass structure.
28
+ metadata: dict[str, Any] = field(default_factory=dict)
29
+
30
+
31
+ @dataclass
32
+ class SessionState:
33
+ # placeholder for future session-level state
34
+ last_flow_ids: list[str] | None = None
35
+
36
+
37
+ class IntentRouter(Protocol):
38
+ async def route(
39
+ self,
40
+ *,
41
+ user_message: str,
42
+ session_state: SessionState,
43
+ ) -> RoutedIntent: ...
44
+
45
+
46
+ class PlanningContextBuilderProtocol:
47
+ async def build(
48
+ self,
49
+ *,
50
+ user_message: str,
51
+ routed: RoutedIntent,
52
+ session_state: SessionState,
53
+ ) -> Any: ...
@@ -29,3 +29,11 @@ class EventLog(Protocol):
29
29
  tags: list[str] | None = None, # filter by tags
30
30
  offset: int = 0, # pagination offset
31
31
  ) -> list[dict]: ...
32
+
33
+ async def get_many(
34
+ self,
35
+ scope_id: str,
36
+ event_ids: list[str],
37
+ ) -> list[dict]: ...
38
+
39
+ """Fetch events for a given scope_id (timeline) by event_id."""
@@ -0,0 +1,47 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+ from typing import Any, Protocol
5
+
6
+
7
+ @dataclass
8
+ class ScoredItem:
9
+ item_id: str
10
+ corpus: str
11
+ score: float
12
+ metadata: dict[str, Any]
13
+
14
+
15
+ class SearchBackend(Protocol):
16
+ async def upsert(
17
+ self,
18
+ *,
19
+ corpus: str,
20
+ item_id: str,
21
+ text: str,
22
+ metadata: dict[str, Any],
23
+ ) -> None:
24
+ """
25
+ Insert or update an indexed item.
26
+
27
+ - corpus: logical collection ("event", "artifact", "run", "doc_*")
28
+ - item_id: stable identifier (event_id, artifact_id, run_id, etc.)
29
+ - text: main text used for embedding / lexical search
30
+ - metadata: arbitrary JSON metadata for filters and recency
31
+ """
32
+ ...
33
+
34
+ async def search(
35
+ self,
36
+ *,
37
+ corpus: str,
38
+ query: str,
39
+ top_k: int = 10,
40
+ filters: dict[str, Any] | None = None,
41
+ ) -> list[ScoredItem]:
42
+ """
43
+ Semantic/lexical search.
44
+
45
+ - filters: AND filters over metadata (None values are treated as wildcards).
46
+ """
47
+ ...
@@ -1,3 +1,4 @@
1
+ from dataclasses import dataclass, field
1
2
  from typing import Any, Protocol
2
3
 
3
4
  """
@@ -38,11 +39,83 @@ class VectorIndex(Protocol):
38
39
  corpus_id: str,
39
40
  query_vec: list[float],
40
41
  k: int,
42
+ where: dict[str, Any] | None = None,
43
+ max_candidates: int | None = None,
41
44
  ) -> list[dict[str, Any]]: ...
42
45
 
46
+ """
47
+ where: equality filters on *promoted* fields (org_id, user_id, scope_id, etc.)
48
+ max_candidates: limit number of candidate rows to score (after SQL WHERE, before cosine).
49
+ """
50
+
43
51
  # Each dict MUST look like:
44
52
  # {"chunk_id": str, "score": float, "meta": dict[str, Any]}
45
53
 
46
54
  # Optional
47
55
  async def list_corpora(self) -> list[str]: ...
48
56
  async def list_chunks(self, corpus_id: str) -> list[str]: ...
57
+
58
+
59
+ PROMOTED_FIELDS = {
60
+ "scope_id",
61
+ "user_id",
62
+ "org_id",
63
+ "client_id",
64
+ "session_id",
65
+ "run_id",
66
+ "graph_id",
67
+ "node_id",
68
+ "kind",
69
+ "source",
70
+ }
71
+
72
+
73
+ @dataclass
74
+ class IndexMeta:
75
+ """
76
+ NOTE: When used with ScopedIndices, identity fields (scope_id, user_id, org_id, etc.)
77
+ are usually left as None here and provided instead by scope.rag_labels(). In that
78
+ common path, this struct is primarily for type-specific metadata (kind, source, ts, ...).
79
+ """
80
+
81
+ # tenant / scope
82
+ scope_id: str | None = None
83
+ user_id: str | None = None
84
+ org_id: str | None = None
85
+ client_id: str | None = None
86
+ session_id: str | None = None
87
+
88
+ # run / graph context
89
+ run_id: str | None = None
90
+ graph_id: str | None = None
91
+ node_id: str | None = None
92
+
93
+ # content type
94
+ kind: str | None = None # e.g. "artifact", "memory_event"
95
+ source: str | None = None # e.g. "hotlog", "artifact_index"
96
+
97
+ # time
98
+ ts: str | None = None # human-readable ISO
99
+ created_at_ts: float | None = None # numeric, for DB index
100
+
101
+ # free-form / extra labels
102
+ extra: dict[str, Any] = field(default_factory=dict)
103
+
104
+ def to_dict(self) -> dict[str, Any]:
105
+ d: dict[str, Any] = {
106
+ "scope_id": self.scope_id,
107
+ "user_id": self.user_id,
108
+ "org_id": self.org_id,
109
+ "client_id": self.client_id,
110
+ "session_id": self.session_id,
111
+ "run_id": self.run_id,
112
+ "graph_id": self.graph_id,
113
+ "node_id": self.node_id,
114
+ "kind": self.kind,
115
+ "source": self.source,
116
+ "ts": self.ts,
117
+ "created_at_ts": self.created_at_ts,
118
+ }
119
+ d.update(self.extra)
120
+ # Strip Nones so meta stays compact
121
+ return {k: v for k, v in d.items() if v is not None}