remdb 0.3.226__py3-none-any.whl → 0.3.245__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of remdb might be problematic. Click here for more details.
- rem/agentic/README.md +22 -248
- rem/agentic/context.py +13 -2
- rem/agentic/context_builder.py +39 -33
- rem/agentic/providers/pydantic_ai.py +67 -50
- rem/api/mcp_router/resources.py +223 -0
- rem/api/mcp_router/tools.py +25 -9
- rem/api/routers/auth.py +112 -9
- rem/api/routers/chat/child_streaming.py +394 -0
- rem/api/routers/chat/streaming.py +166 -357
- rem/api/routers/chat/streaming_utils.py +327 -0
- rem/api/routers/query.py +5 -14
- rem/cli/commands/ask.py +144 -33
- rem/cli/commands/process.py +9 -1
- rem/cli/commands/query.py +109 -0
- rem/cli/commands/session.py +117 -0
- rem/cli/main.py +2 -0
- rem/models/entities/session.py +1 -0
- rem/services/postgres/repository.py +7 -17
- rem/services/rem/service.py +47 -0
- rem/services/session/compression.py +7 -3
- rem/services/session/pydantic_messages.py +45 -11
- rem/services/session/reload.py +2 -1
- rem/settings.py +43 -0
- rem/sql/migrations/004_cache_system.sql +3 -1
- rem/utils/schema_loader.py +99 -99
- {remdb-0.3.226.dist-info → remdb-0.3.245.dist-info}/METADATA +2 -2
- {remdb-0.3.226.dist-info → remdb-0.3.245.dist-info}/RECORD +29 -26
- {remdb-0.3.226.dist-info → remdb-0.3.245.dist-info}/WHEEL +0 -0
- {remdb-0.3.226.dist-info → remdb-0.3.245.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,394 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Child Agent Event Handling.
|
|
3
|
+
|
|
4
|
+
Handles events from child agents during multi-agent orchestration.
|
|
5
|
+
|
|
6
|
+
Event Flow:
|
|
7
|
+
```
|
|
8
|
+
Parent Agent (Siggy)
|
|
9
|
+
│
|
|
10
|
+
▼
|
|
11
|
+
ask_agent tool
|
|
12
|
+
│
|
|
13
|
+
├──────────────────────────────────┐
|
|
14
|
+
▼ │
|
|
15
|
+
Child Agent (intake_diverge) │
|
|
16
|
+
│ │
|
|
17
|
+
├── child_tool_start ──────────────┼──► Event Sink (Queue)
|
|
18
|
+
├── child_content ─────────────────┤
|
|
19
|
+
└── child_tool_result ─────────────┘
|
|
20
|
+
│
|
|
21
|
+
▼
|
|
22
|
+
drain_child_events()
|
|
23
|
+
│
|
|
24
|
+
├── SSE to client
|
|
25
|
+
└── DB persistence
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
IMPORTANT: When child_content is streamed, parent text output should be SKIPPED
|
|
29
|
+
to prevent content duplication.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
from __future__ import annotations
|
|
33
|
+
|
|
34
|
+
import asyncio
|
|
35
|
+
import json
|
|
36
|
+
import uuid
|
|
37
|
+
from typing import TYPE_CHECKING, Any, AsyncGenerator
|
|
38
|
+
|
|
39
|
+
from loguru import logger
|
|
40
|
+
|
|
41
|
+
from .streaming_utils import StreamingState, build_content_chunk
|
|
42
|
+
from .sse_events import MetadataEvent, ToolCallEvent, format_sse_event
|
|
43
|
+
from ....services.session import SessionMessageStore
|
|
44
|
+
from ....settings import settings
|
|
45
|
+
from ....utils.date_utils import to_iso, utc_now
|
|
46
|
+
|
|
47
|
+
if TYPE_CHECKING:
|
|
48
|
+
from ....agentic.context import AgentContext
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
async def handle_child_tool_start(
|
|
52
|
+
state: StreamingState,
|
|
53
|
+
child_agent: str,
|
|
54
|
+
tool_name: str,
|
|
55
|
+
arguments: dict | str | None,
|
|
56
|
+
session_id: str | None,
|
|
57
|
+
user_id: str | None,
|
|
58
|
+
) -> AsyncGenerator[str, None]:
|
|
59
|
+
"""
|
|
60
|
+
Handle child_tool_start event.
|
|
61
|
+
|
|
62
|
+
Actions:
|
|
63
|
+
1. Log the tool call
|
|
64
|
+
2. Emit SSE event
|
|
65
|
+
3. Save to database (with tool_arguments in metadata for consistency with parent)
|
|
66
|
+
"""
|
|
67
|
+
full_tool_name = f"{child_agent}:{tool_name}"
|
|
68
|
+
tool_id = f"call_{uuid.uuid4().hex[:8]}"
|
|
69
|
+
|
|
70
|
+
# Normalize arguments - may come as JSON string from ToolCallPart.args
|
|
71
|
+
if isinstance(arguments, str):
|
|
72
|
+
try:
|
|
73
|
+
arguments = json.loads(arguments)
|
|
74
|
+
except json.JSONDecodeError:
|
|
75
|
+
arguments = None
|
|
76
|
+
elif not isinstance(arguments, dict):
|
|
77
|
+
arguments = None
|
|
78
|
+
|
|
79
|
+
# 1. LOG
|
|
80
|
+
logger.info(f"🔧 {full_tool_name}")
|
|
81
|
+
|
|
82
|
+
# 2. EMIT SSE
|
|
83
|
+
yield format_sse_event(ToolCallEvent(
|
|
84
|
+
tool_name=full_tool_name,
|
|
85
|
+
tool_id=tool_id,
|
|
86
|
+
status="started",
|
|
87
|
+
arguments=arguments,
|
|
88
|
+
))
|
|
89
|
+
|
|
90
|
+
# 3. SAVE TO DB - content contains args as JSON (pydantic_messages.py parses it)
|
|
91
|
+
if session_id and settings.postgres.enabled:
|
|
92
|
+
try:
|
|
93
|
+
store = SessionMessageStore(
|
|
94
|
+
user_id=user_id or settings.test.effective_user_id
|
|
95
|
+
)
|
|
96
|
+
tool_msg = {
|
|
97
|
+
"role": "tool",
|
|
98
|
+
# Content is the tool call args as JSON - this is what the agent sees on reload
|
|
99
|
+
# and what pydantic_messages.py parses for ToolCallPart.args
|
|
100
|
+
"content": json.dumps(arguments) if arguments else "",
|
|
101
|
+
"timestamp": to_iso(utc_now()),
|
|
102
|
+
"tool_call_id": tool_id,
|
|
103
|
+
"tool_name": full_tool_name,
|
|
104
|
+
}
|
|
105
|
+
await store.store_session_messages(
|
|
106
|
+
session_id=session_id,
|
|
107
|
+
messages=[tool_msg],
|
|
108
|
+
user_id=user_id,
|
|
109
|
+
compress=False,
|
|
110
|
+
)
|
|
111
|
+
except Exception as e:
|
|
112
|
+
logger.warning(f"Failed to save child tool call: {e}")
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def handle_child_content(
|
|
116
|
+
state: StreamingState,
|
|
117
|
+
child_agent: str,
|
|
118
|
+
content: str,
|
|
119
|
+
) -> str | None:
|
|
120
|
+
"""
|
|
121
|
+
Handle child_content event.
|
|
122
|
+
|
|
123
|
+
CRITICAL: Sets state.child_content_streamed = True
|
|
124
|
+
This flag is used to skip parent text output and prevent duplication.
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
SSE chunk or None if content is empty
|
|
128
|
+
"""
|
|
129
|
+
if not content:
|
|
130
|
+
return None
|
|
131
|
+
|
|
132
|
+
# Track that child content was streamed
|
|
133
|
+
# Parent text output should be SKIPPED when this is True
|
|
134
|
+
state.child_content_streamed = True
|
|
135
|
+
state.responding_agent = child_agent
|
|
136
|
+
|
|
137
|
+
return build_content_chunk(state, content)
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
async def handle_child_tool_result(
|
|
141
|
+
state: StreamingState,
|
|
142
|
+
child_agent: str,
|
|
143
|
+
result: Any,
|
|
144
|
+
message_id: str | None,
|
|
145
|
+
session_id: str | None,
|
|
146
|
+
agent_schema: str | None,
|
|
147
|
+
) -> AsyncGenerator[str, None]:
|
|
148
|
+
"""
|
|
149
|
+
Handle child_tool_result event.
|
|
150
|
+
|
|
151
|
+
Actions:
|
|
152
|
+
1. Log metadata if present
|
|
153
|
+
2. Emit metadata event if present
|
|
154
|
+
3. Emit tool completion event
|
|
155
|
+
"""
|
|
156
|
+
# Check for metadata registration
|
|
157
|
+
if isinstance(result, dict) and result.get("_metadata_event"):
|
|
158
|
+
risk = result.get("risk_level", "")
|
|
159
|
+
conf = result.get("confidence", "")
|
|
160
|
+
logger.info(f"📊 {child_agent} metadata: risk={risk}, confidence={conf}")
|
|
161
|
+
|
|
162
|
+
# Update responding agent from child
|
|
163
|
+
if result.get("agent_schema"):
|
|
164
|
+
state.responding_agent = result.get("agent_schema")
|
|
165
|
+
|
|
166
|
+
# Build extra dict with risk fields
|
|
167
|
+
extra_data = {}
|
|
168
|
+
if risk:
|
|
169
|
+
extra_data["risk_level"] = risk
|
|
170
|
+
|
|
171
|
+
yield format_sse_event(MetadataEvent(
|
|
172
|
+
message_id=message_id,
|
|
173
|
+
session_id=session_id,
|
|
174
|
+
agent_schema=agent_schema,
|
|
175
|
+
responding_agent=state.responding_agent,
|
|
176
|
+
confidence=result.get("confidence"),
|
|
177
|
+
extra=extra_data if extra_data else None,
|
|
178
|
+
))
|
|
179
|
+
|
|
180
|
+
# Emit tool completion
|
|
181
|
+
# Preserve full result dict if it contains an artifact (e.g. finalize_intake)
|
|
182
|
+
# This is needed for frontend to extract artifact URLs for download
|
|
183
|
+
if isinstance(result, dict) and result.get("artifact"):
|
|
184
|
+
result_for_sse = result # Full dict with artifact
|
|
185
|
+
else:
|
|
186
|
+
result_for_sse = str(result)[:200] if result else None
|
|
187
|
+
|
|
188
|
+
yield format_sse_event(ToolCallEvent(
|
|
189
|
+
tool_name=f"{child_agent}:tool",
|
|
190
|
+
tool_id=f"call_{uuid.uuid4().hex[:8]}",
|
|
191
|
+
status="completed",
|
|
192
|
+
result=result_for_sse,
|
|
193
|
+
))
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
async def drain_child_events(
|
|
197
|
+
event_sink: asyncio.Queue,
|
|
198
|
+
state: StreamingState,
|
|
199
|
+
session_id: str | None = None,
|
|
200
|
+
user_id: str | None = None,
|
|
201
|
+
message_id: str | None = None,
|
|
202
|
+
agent_schema: str | None = None,
|
|
203
|
+
) -> AsyncGenerator[str, None]:
|
|
204
|
+
"""
|
|
205
|
+
Drain all pending child events from the event sink.
|
|
206
|
+
|
|
207
|
+
This is called during tool execution to process events
|
|
208
|
+
pushed by child agents via ask_agent.
|
|
209
|
+
|
|
210
|
+
IMPORTANT: When child_content events are processed, this sets
|
|
211
|
+
state.child_content_streamed = True. Callers should check this
|
|
212
|
+
flag and skip parent text output to prevent duplication.
|
|
213
|
+
"""
|
|
214
|
+
while not event_sink.empty():
|
|
215
|
+
try:
|
|
216
|
+
child_event = event_sink.get_nowait()
|
|
217
|
+
async for chunk in process_child_event(
|
|
218
|
+
child_event, state, session_id, user_id, message_id, agent_schema
|
|
219
|
+
):
|
|
220
|
+
yield chunk
|
|
221
|
+
except Exception as e:
|
|
222
|
+
logger.warning(f"Error processing child event: {e}")
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
async def process_child_event(
|
|
226
|
+
child_event: dict,
|
|
227
|
+
state: StreamingState,
|
|
228
|
+
session_id: str | None = None,
|
|
229
|
+
user_id: str | None = None,
|
|
230
|
+
message_id: str | None = None,
|
|
231
|
+
agent_schema: str | None = None,
|
|
232
|
+
) -> AsyncGenerator[str, None]:
|
|
233
|
+
"""Process a single child event and yield SSE chunks."""
|
|
234
|
+
event_type = child_event.get("type", "")
|
|
235
|
+
child_agent = child_event.get("agent_name", "child")
|
|
236
|
+
|
|
237
|
+
if event_type == "child_tool_start":
|
|
238
|
+
async for chunk in handle_child_tool_start(
|
|
239
|
+
state=state,
|
|
240
|
+
child_agent=child_agent,
|
|
241
|
+
tool_name=child_event.get("tool_name", "tool"),
|
|
242
|
+
arguments=child_event.get("arguments"),
|
|
243
|
+
session_id=session_id,
|
|
244
|
+
user_id=user_id,
|
|
245
|
+
):
|
|
246
|
+
yield chunk
|
|
247
|
+
|
|
248
|
+
elif event_type == "child_content":
|
|
249
|
+
chunk = handle_child_content(
|
|
250
|
+
state=state,
|
|
251
|
+
child_agent=child_agent,
|
|
252
|
+
content=child_event.get("content", ""),
|
|
253
|
+
)
|
|
254
|
+
if chunk:
|
|
255
|
+
yield chunk
|
|
256
|
+
|
|
257
|
+
elif event_type == "child_tool_result":
|
|
258
|
+
async for chunk in handle_child_tool_result(
|
|
259
|
+
state=state,
|
|
260
|
+
child_agent=child_agent,
|
|
261
|
+
result=child_event.get("result"),
|
|
262
|
+
message_id=message_id,
|
|
263
|
+
session_id=session_id,
|
|
264
|
+
agent_schema=agent_schema,
|
|
265
|
+
):
|
|
266
|
+
yield chunk
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
async def stream_with_child_events(
|
|
270
|
+
tools_stream,
|
|
271
|
+
child_event_sink: asyncio.Queue,
|
|
272
|
+
state: StreamingState,
|
|
273
|
+
session_id: str | None = None,
|
|
274
|
+
user_id: str | None = None,
|
|
275
|
+
message_id: str | None = None,
|
|
276
|
+
agent_schema: str | None = None,
|
|
277
|
+
) -> AsyncGenerator[tuple[str, Any], None]:
|
|
278
|
+
"""
|
|
279
|
+
Multiplex tool events with child events using asyncio.wait().
|
|
280
|
+
|
|
281
|
+
This is the key fix for child agent streaming - instead of draining
|
|
282
|
+
the queue synchronously during tool event iteration, we concurrently
|
|
283
|
+
listen to both sources and yield events as they arrive.
|
|
284
|
+
|
|
285
|
+
Yields:
|
|
286
|
+
Tuples of (event_type, event_data) where event_type is either
|
|
287
|
+
"tool" or "child", allowing the caller to handle each appropriately.
|
|
288
|
+
"""
|
|
289
|
+
tool_iter = tools_stream.__aiter__()
|
|
290
|
+
|
|
291
|
+
# Create initial tasks
|
|
292
|
+
pending_tool: asyncio.Task | None = None
|
|
293
|
+
pending_child: asyncio.Task | None = None
|
|
294
|
+
|
|
295
|
+
try:
|
|
296
|
+
pending_tool = asyncio.create_task(tool_iter.__anext__())
|
|
297
|
+
except StopAsyncIteration:
|
|
298
|
+
# No tool events, just drain any remaining child events
|
|
299
|
+
while not child_event_sink.empty():
|
|
300
|
+
try:
|
|
301
|
+
child_event = child_event_sink.get_nowait()
|
|
302
|
+
yield ("child", child_event)
|
|
303
|
+
except asyncio.QueueEmpty:
|
|
304
|
+
break
|
|
305
|
+
return
|
|
306
|
+
|
|
307
|
+
# Start listening for child events with a short timeout
|
|
308
|
+
pending_child = asyncio.create_task(
|
|
309
|
+
_get_child_event_with_timeout(child_event_sink, timeout=0.05)
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
try:
|
|
313
|
+
while True:
|
|
314
|
+
# Wait for either source to produce an event
|
|
315
|
+
tasks = {t for t in [pending_tool, pending_child] if t is not None}
|
|
316
|
+
if not tasks:
|
|
317
|
+
break
|
|
318
|
+
|
|
319
|
+
done, _ = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED)
|
|
320
|
+
|
|
321
|
+
for task in done:
|
|
322
|
+
try:
|
|
323
|
+
result = task.result()
|
|
324
|
+
except asyncio.TimeoutError:
|
|
325
|
+
# Child queue timeout - restart listener
|
|
326
|
+
if task is pending_child:
|
|
327
|
+
pending_child = asyncio.create_task(
|
|
328
|
+
_get_child_event_with_timeout(child_event_sink, timeout=0.05)
|
|
329
|
+
)
|
|
330
|
+
continue
|
|
331
|
+
except StopAsyncIteration:
|
|
332
|
+
# Tool stream exhausted
|
|
333
|
+
if task is pending_tool:
|
|
334
|
+
pending_tool = None
|
|
335
|
+
# Final drain of any remaining child events
|
|
336
|
+
if pending_child:
|
|
337
|
+
pending_child.cancel()
|
|
338
|
+
try:
|
|
339
|
+
await pending_child
|
|
340
|
+
except asyncio.CancelledError:
|
|
341
|
+
pass
|
|
342
|
+
while not child_event_sink.empty():
|
|
343
|
+
try:
|
|
344
|
+
child_event = child_event_sink.get_nowait()
|
|
345
|
+
yield ("child", child_event)
|
|
346
|
+
except asyncio.QueueEmpty:
|
|
347
|
+
break
|
|
348
|
+
return
|
|
349
|
+
continue
|
|
350
|
+
|
|
351
|
+
if task is pending_child and result is not None:
|
|
352
|
+
# Got a child event
|
|
353
|
+
yield ("child", result)
|
|
354
|
+
# Restart child listener
|
|
355
|
+
pending_child = asyncio.create_task(
|
|
356
|
+
_get_child_event_with_timeout(child_event_sink, timeout=0.05)
|
|
357
|
+
)
|
|
358
|
+
elif task is pending_tool:
|
|
359
|
+
# Got a tool event
|
|
360
|
+
yield ("tool", result)
|
|
361
|
+
# Get next tool event
|
|
362
|
+
try:
|
|
363
|
+
pending_tool = asyncio.create_task(tool_iter.__anext__())
|
|
364
|
+
except StopAsyncIteration:
|
|
365
|
+
pending_tool = None
|
|
366
|
+
elif task is pending_child and result is None:
|
|
367
|
+
# Timeout with no event - restart listener
|
|
368
|
+
pending_child = asyncio.create_task(
|
|
369
|
+
_get_child_event_with_timeout(child_event_sink, timeout=0.05)
|
|
370
|
+
)
|
|
371
|
+
finally:
|
|
372
|
+
# Cleanup any pending tasks
|
|
373
|
+
for task in [pending_tool, pending_child]:
|
|
374
|
+
if task and not task.done():
|
|
375
|
+
task.cancel()
|
|
376
|
+
try:
|
|
377
|
+
await task
|
|
378
|
+
except asyncio.CancelledError:
|
|
379
|
+
pass
|
|
380
|
+
|
|
381
|
+
|
|
382
|
+
async def _get_child_event_with_timeout(
|
|
383
|
+
queue: asyncio.Queue, timeout: float = 0.05
|
|
384
|
+
) -> dict | None:
|
|
385
|
+
"""
|
|
386
|
+
Get an event from the queue with a timeout.
|
|
387
|
+
|
|
388
|
+
Returns None on timeout (no event available).
|
|
389
|
+
This allows the multiplexer to check for tool events regularly.
|
|
390
|
+
"""
|
|
391
|
+
try:
|
|
392
|
+
return await asyncio.wait_for(queue.get(), timeout=timeout)
|
|
393
|
+
except asyncio.TimeoutError:
|
|
394
|
+
return None
|