lite-agent 0.8.0__py3-none-any.whl → 0.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lite-agent might be problematic. Click here for more details.
- lite_agent/agent.py +17 -263
- lite_agent/chat_display.py +304 -41
- lite_agent/client.py +15 -8
- lite_agent/message_transfers.py +21 -2
- lite_agent/processors/response_event_processor.py +4 -2
- lite_agent/runner.py +90 -104
- lite_agent/types/__init__.py +1 -1
- lite_agent/types/messages.py +2 -1
- lite_agent/utils/advanced_message_builder.py +201 -0
- lite_agent/utils/message_builder.py +4 -2
- lite_agent/utils/message_converter.py +232 -0
- lite_agent/utils/message_state_manager.py +152 -0
- {lite_agent-0.8.0.dist-info → lite_agent-0.10.0.dist-info}/METADATA +2 -2
- {lite_agent-0.8.0.dist-info → lite_agent-0.10.0.dist-info}/RECORD +15 -12
- {lite_agent-0.8.0.dist-info → lite_agent-0.10.0.dist-info}/WHEEL +0 -0
lite_agent/runner.py
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import json
|
|
2
|
-
import warnings
|
|
3
2
|
from collections.abc import AsyncGenerator, Sequence
|
|
4
3
|
from datetime import datetime, timedelta, timezone
|
|
5
4
|
from os import PathLike
|
|
@@ -30,6 +29,7 @@ from lite_agent.types import (
|
|
|
30
29
|
)
|
|
31
30
|
from lite_agent.types.events import AssistantMessageEvent, FunctionCallOutputEvent
|
|
32
31
|
from lite_agent.utils.message_builder import MessageBuilder
|
|
32
|
+
from lite_agent.utils.message_state_manager import MessageStateManager
|
|
33
33
|
|
|
34
34
|
|
|
35
35
|
class Runner:
|
|
@@ -38,45 +38,40 @@ class Runner:
|
|
|
38
38
|
self.messages: list[FlexibleRunnerMessage] = []
|
|
39
39
|
self.api = api
|
|
40
40
|
self.streaming = streaming
|
|
41
|
-
self.
|
|
41
|
+
self._message_state_manager = MessageStateManager()
|
|
42
42
|
self.usage = MessageUsage(input_tokens=0, output_tokens=0, total_tokens=0)
|
|
43
43
|
|
|
44
|
-
def _start_assistant_message(self, content: str = "", meta: AssistantMessageMeta | None = None) -> None:
|
|
44
|
+
async def _start_assistant_message(self, content: str = "", meta: AssistantMessageMeta | None = None) -> None:
|
|
45
45
|
"""Start a new assistant message."""
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
meta=
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
46
|
+
# Create meta with model information if not provided
|
|
47
|
+
if meta is None:
|
|
48
|
+
meta = AssistantMessageMeta()
|
|
49
|
+
if hasattr(self.agent.client, "model"):
|
|
50
|
+
meta.model = self.agent.client.model
|
|
51
|
+
await self._message_state_manager.start_message(content, meta)
|
|
52
|
+
|
|
53
|
+
async def _ensure_current_assistant_message(self) -> NewAssistantMessage:
|
|
52
54
|
"""Ensure current assistant message exists and return it."""
|
|
53
|
-
|
|
54
|
-
self._start_assistant_message()
|
|
55
|
-
if self._current_assistant_message is None:
|
|
56
|
-
msg = "Failed to create current assistant message"
|
|
57
|
-
raise RuntimeError(msg)
|
|
58
|
-
return self._current_assistant_message
|
|
59
|
-
|
|
60
|
-
def _add_to_current_assistant_message(self, content_item: AssistantTextContent | AssistantToolCall | AssistantToolCallResult) -> None:
|
|
61
|
-
"""Add content to the current assistant message."""
|
|
62
|
-
self._ensure_current_assistant_message().content.append(content_item)
|
|
55
|
+
return await self._message_state_manager.ensure_message_exists()
|
|
63
56
|
|
|
64
|
-
def
|
|
57
|
+
async def _add_to_current_assistant_message(self, content_item: AssistantTextContent | AssistantToolCall | AssistantToolCallResult) -> None:
|
|
58
|
+
"""Add content to the current assistant message."""
|
|
59
|
+
if isinstance(content_item, AssistantTextContent):
|
|
60
|
+
await self._message_state_manager.add_text_delta(content_item.text)
|
|
61
|
+
elif isinstance(content_item, AssistantToolCall):
|
|
62
|
+
await self._message_state_manager.add_tool_call(content_item)
|
|
63
|
+
elif isinstance(content_item, AssistantToolCallResult):
|
|
64
|
+
await self._message_state_manager.add_tool_result(content_item)
|
|
65
|
+
|
|
66
|
+
async def _add_text_content_to_current_assistant_message(self, delta: str) -> None:
|
|
65
67
|
"""Add text delta to the current assistant message's text content."""
|
|
66
|
-
|
|
67
|
-
# Find the first text content item and append the delta
|
|
68
|
-
for content_item in message.content:
|
|
69
|
-
if content_item.type == "text":
|
|
70
|
-
content_item.text += delta
|
|
71
|
-
return
|
|
72
|
-
# If no text content found, add new text content
|
|
73
|
-
message.content.append(AssistantTextContent(text=delta))
|
|
68
|
+
await self._message_state_manager.add_text_delta(delta)
|
|
74
69
|
|
|
75
|
-
def _finalize_assistant_message(self) -> None:
|
|
70
|
+
async def _finalize_assistant_message(self) -> None:
|
|
76
71
|
"""Finalize the current assistant message and add it to messages."""
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
self.
|
|
72
|
+
finalized_message = await self._message_state_manager.finalize_message()
|
|
73
|
+
if finalized_message is not None:
|
|
74
|
+
self.messages.append(finalized_message)
|
|
80
75
|
|
|
81
76
|
def _add_tool_call_result(self, call_id: str, output: str, execution_time_ms: int | None = None) -> None:
|
|
82
77
|
"""Add a tool call result to the last assistant message, or create a new one if needed."""
|
|
@@ -90,9 +85,16 @@ class Runner:
|
|
|
90
85
|
# Add to existing assistant message
|
|
91
86
|
last_message = cast("NewAssistantMessage", self.messages[-1])
|
|
92
87
|
last_message.content.append(result)
|
|
88
|
+
# Ensure model information is set if not already present
|
|
89
|
+
if last_message.meta.model is None and hasattr(self.agent.client, "model"):
|
|
90
|
+
last_message.meta.model = self.agent.client.model
|
|
93
91
|
else:
|
|
94
92
|
# Create new assistant message with just the tool result
|
|
95
|
-
|
|
93
|
+
# Include model information if available
|
|
94
|
+
meta = AssistantMessageMeta()
|
|
95
|
+
if hasattr(self.agent.client, "model"):
|
|
96
|
+
meta.model = self.agent.client.model
|
|
97
|
+
assistant_message = NewAssistantMessage(content=[result], meta=meta)
|
|
96
98
|
self.messages.append(assistant_message)
|
|
97
99
|
|
|
98
100
|
# For completion API compatibility, create a separate assistant message
|
|
@@ -115,10 +117,12 @@ class Runner:
|
|
|
115
117
|
# Check for transfer_to_agent calls first
|
|
116
118
|
transfer_calls = [tc for tc in tool_calls if tc.function.name == ToolName.TRANSFER_TO_AGENT]
|
|
117
119
|
if transfer_calls:
|
|
120
|
+
logger.info(f"Processing {len(transfer_calls)} transfer_to_agent calls")
|
|
118
121
|
# Handle all transfer calls but only execute the first one
|
|
119
122
|
for i, tool_call in enumerate(transfer_calls):
|
|
120
123
|
if i == 0:
|
|
121
124
|
# Execute the first transfer
|
|
125
|
+
logger.info(f"Executing agent transfer: {tool_call.function.arguments}")
|
|
122
126
|
call_id, output = await self._handle_agent_transfer(tool_call)
|
|
123
127
|
# Generate function_call_output event if in includes
|
|
124
128
|
if "function_call_output" in includes:
|
|
@@ -193,8 +197,6 @@ class Runner:
|
|
|
193
197
|
last_message = cast("NewAssistantMessage", self.messages[-1])
|
|
194
198
|
last_message.content.append(tool_result)
|
|
195
199
|
|
|
196
|
-
# Note: For completion API compatibility, the conversion happens when sending to LLM
|
|
197
|
-
|
|
198
200
|
async def _collect_all_chunks(self, stream: AsyncGenerator[AgentChunk, None]) -> list[AgentChunk]:
|
|
199
201
|
"""Collect all chunks from an async generator into a list."""
|
|
200
202
|
return [chunk async for chunk in stream]
|
|
@@ -206,7 +208,6 @@ class Runner:
|
|
|
206
208
|
includes: Sequence[AgentChunkType] | None = None,
|
|
207
209
|
context: "Any | None" = None, # noqa: ANN401
|
|
208
210
|
record_to: PathLike | str | None = None,
|
|
209
|
-
agent_kwargs: dict[str, Any] | None = None,
|
|
210
211
|
) -> AsyncGenerator[AgentChunk, None]:
|
|
211
212
|
"""Run the agent and return a RunResponse object that can be asynchronously iterated for each chunk.
|
|
212
213
|
|
|
@@ -241,7 +242,7 @@ class Runner:
|
|
|
241
242
|
# Handle single message (BaseModel, TypedDict, or dict)
|
|
242
243
|
self.append_message(user_input) # type: ignore[arg-type]
|
|
243
244
|
logger.debug("Messages prepared, calling _run")
|
|
244
|
-
return self._run(max_steps, includes, self._normalize_record_path(record_to), context=context
|
|
245
|
+
return self._run(max_steps, includes, self._normalize_record_path(record_to), context=context)
|
|
245
246
|
|
|
246
247
|
async def _run(
|
|
247
248
|
self,
|
|
@@ -249,7 +250,6 @@ class Runner:
|
|
|
249
250
|
includes: Sequence[AgentChunkType],
|
|
250
251
|
record_to: Path | None = None,
|
|
251
252
|
context: Any | None = None, # noqa: ANN401
|
|
252
|
-
agent_kwargs: dict[str, Any] | None = None,
|
|
253
253
|
) -> AsyncGenerator[AgentChunk, None]:
|
|
254
254
|
"""Run the agent and return a RunResponse object that can be asynchronously iterated for each chunk."""
|
|
255
255
|
logger.debug(f"Running agent with messages: {self.messages}")
|
|
@@ -281,21 +281,13 @@ class Runner:
|
|
|
281
281
|
|
|
282
282
|
while not is_finish() and steps < max_steps:
|
|
283
283
|
logger.debug(f"Step {steps}: finish_reason={finish_reason}, is_finish()={is_finish()}")
|
|
284
|
-
|
|
285
|
-
# This allows us to keep the new format internally but ensures compatibility
|
|
286
|
-
# Extract agent kwargs for reasoning configuration
|
|
287
|
-
reasoning = None
|
|
288
|
-
if agent_kwargs:
|
|
289
|
-
reasoning = agent_kwargs.get("reasoning")
|
|
290
|
-
|
|
291
|
-
logger.debug(f"Using API: {self.api}, streaming: {self.streaming}")
|
|
284
|
+
logger.info(f"Making LLM request: API={self.api}, streaming={self.streaming}, messages={len(self.messages)}")
|
|
292
285
|
match self.api:
|
|
293
286
|
case "completion":
|
|
294
287
|
logger.debug("Calling agent.completion")
|
|
295
288
|
resp = await self.agent.completion(
|
|
296
289
|
self.messages,
|
|
297
290
|
record_to_file=record_to,
|
|
298
|
-
reasoning=reasoning,
|
|
299
291
|
streaming=self.streaming,
|
|
300
292
|
)
|
|
301
293
|
case "responses":
|
|
@@ -303,63 +295,75 @@ class Runner:
|
|
|
303
295
|
resp = await self.agent.responses(
|
|
304
296
|
self.messages,
|
|
305
297
|
record_to_file=record_to,
|
|
306
|
-
reasoning=reasoning,
|
|
307
298
|
streaming=self.streaming,
|
|
308
299
|
)
|
|
309
300
|
case _:
|
|
310
301
|
msg = f"Unknown API type: {self.api}"
|
|
311
302
|
raise ValueError(msg)
|
|
312
|
-
logger.debug(
|
|
303
|
+
logger.debug("Received response stream from agent, processing chunks...")
|
|
313
304
|
async for chunk in resp:
|
|
305
|
+
# Only log important chunk types to reduce noise
|
|
306
|
+
if chunk.type not in ["response_raw", "content_delta"]:
|
|
307
|
+
logger.debug(f"Processing chunk: {chunk.type}")
|
|
314
308
|
match chunk.type:
|
|
315
309
|
case "assistant_message":
|
|
310
|
+
logger.debug(f"Assistant message chunk: {len(chunk.message.content) if chunk.message.content else 0} content items")
|
|
316
311
|
# Start or update assistant message in new format
|
|
317
312
|
# If we already have a current assistant message, just update its metadata
|
|
318
|
-
|
|
313
|
+
current_message = await self._message_state_manager.get_current_message()
|
|
314
|
+
if current_message is not None:
|
|
319
315
|
# Preserve all existing metadata and only update specific fields
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
original_meta.latency_ms = chunk.message.meta.latency_ms
|
|
324
|
-
if hasattr(chunk.message.meta, "output_time_ms"):
|
|
325
|
-
original_meta.total_time_ms = chunk.message.meta.output_time_ms
|
|
316
|
+
meta_updates = {"sent_at": chunk.message.meta.sent_at}
|
|
317
|
+
# Only include fields of type datetime in meta_updates
|
|
318
|
+
# Update int fields separately after update_meta
|
|
326
319
|
# Preserve other metadata fields like model, usage, etc.
|
|
327
320
|
for attr in ["model", "usage", "input_tokens", "output_tokens"]:
|
|
328
321
|
if hasattr(chunk.message.meta, attr):
|
|
329
|
-
|
|
322
|
+
meta_updates[attr] = getattr(chunk.message.meta, attr)
|
|
323
|
+
await self._message_state_manager.update_meta(**meta_updates)
|
|
324
|
+
# Now update int fields directly if present
|
|
325
|
+
if hasattr(chunk.message.meta, "latency_ms"):
|
|
326
|
+
await self._message_state_manager.update_meta(latency_ms=chunk.message.meta.latency_ms)
|
|
327
|
+
if hasattr(chunk.message.meta, "output_time_ms"):
|
|
328
|
+
await self._message_state_manager.update_meta(total_time_ms=chunk.message.meta.output_time_ms)
|
|
330
329
|
else:
|
|
331
|
-
# For non-streaming mode,
|
|
332
|
-
self.
|
|
330
|
+
# For non-streaming mode, start with complete message
|
|
331
|
+
await self._start_assistant_message(meta=chunk.message.meta)
|
|
332
|
+
# Add all content from the chunk message
|
|
333
|
+
for content_item in chunk.message.content:
|
|
334
|
+
await self._add_to_current_assistant_message(content_item)
|
|
333
335
|
|
|
334
336
|
# If model is None, try to get it from agent client
|
|
335
|
-
|
|
336
|
-
|
|
337
|
+
current_message = await self._message_state_manager.get_current_message()
|
|
338
|
+
if current_message is not None and current_message.meta.model is None and hasattr(self.agent.client, "model"):
|
|
339
|
+
await self._message_state_manager.update_meta(model=self.agent.client.model)
|
|
337
340
|
# Only yield assistant_message chunk if it's in includes and has content
|
|
338
|
-
if chunk.type in includes and
|
|
341
|
+
if chunk.type in includes and current_message is not None:
|
|
339
342
|
# Create a new chunk with the current assistant message content
|
|
340
343
|
updated_chunk = AssistantMessageEvent(
|
|
341
|
-
message=
|
|
344
|
+
message=current_message,
|
|
342
345
|
)
|
|
343
346
|
yield updated_chunk
|
|
344
347
|
case "content_delta":
|
|
345
348
|
# Accumulate text content to current assistant message
|
|
346
|
-
self._add_text_content_to_current_assistant_message(chunk.delta)
|
|
349
|
+
await self._add_text_content_to_current_assistant_message(chunk.delta)
|
|
347
350
|
# Always yield content_delta chunk if it's in includes
|
|
348
351
|
if chunk.type in includes:
|
|
349
352
|
yield chunk
|
|
350
353
|
case "function_call":
|
|
354
|
+
logger.debug(f"Function call: {chunk.name}({chunk.arguments or '{}'})")
|
|
351
355
|
# Add tool call to current assistant message
|
|
352
|
-
# Keep arguments as string for compatibility with funcall library
|
|
353
356
|
tool_call = AssistantToolCall(
|
|
354
357
|
call_id=chunk.call_id,
|
|
355
358
|
name=chunk.name,
|
|
356
359
|
arguments=chunk.arguments or "{}",
|
|
357
360
|
)
|
|
358
|
-
self._add_to_current_assistant_message(tool_call)
|
|
361
|
+
await self._add_to_current_assistant_message(tool_call)
|
|
359
362
|
# Always yield function_call chunk if it's in includes
|
|
360
363
|
if chunk.type in includes:
|
|
361
364
|
yield chunk
|
|
362
365
|
case "usage":
|
|
366
|
+
logger.debug(f"Usage: {chunk.usage.input_tokens} input, {chunk.usage.output_tokens} output tokens")
|
|
363
367
|
# Update the current or last assistant message with usage data and output_time_ms
|
|
364
368
|
usage_time = datetime.now(timezone.utc)
|
|
365
369
|
|
|
@@ -372,9 +376,8 @@ class Runner:
|
|
|
372
376
|
target_message = None
|
|
373
377
|
|
|
374
378
|
# First check if we have a current assistant message
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
else:
|
|
379
|
+
target_message = await self._message_state_manager.get_current_message()
|
|
380
|
+
if target_message is None:
|
|
378
381
|
# Otherwise, look for the last assistant message in the list
|
|
379
382
|
for i in range(len(self.messages) - 1, -1, -1):
|
|
380
383
|
current_message = self.messages[i]
|
|
@@ -402,11 +405,24 @@ class Runner:
|
|
|
402
405
|
# Always yield usage chunk if it's in includes
|
|
403
406
|
if chunk.type in includes:
|
|
404
407
|
yield chunk
|
|
408
|
+
case "timing":
|
|
409
|
+
# Update timing information in current assistant message
|
|
410
|
+
current_message = await self._message_state_manager.get_current_message()
|
|
411
|
+
if current_message is not None:
|
|
412
|
+
await self._message_state_manager.update_meta(latency_ms=chunk.timing.latency_ms, total_time_ms=chunk.timing.output_time_ms)
|
|
413
|
+
# Also try to update the last assistant message if no current message
|
|
414
|
+
elif self.messages and isinstance(self.messages[-1], NewAssistantMessage):
|
|
415
|
+
last_message = cast("NewAssistantMessage", self.messages[-1])
|
|
416
|
+
last_message.meta.latency_ms = chunk.timing.latency_ms
|
|
417
|
+
last_message.meta.total_time_ms = chunk.timing.output_time_ms
|
|
418
|
+
# Always yield timing chunk if it's in includes
|
|
419
|
+
if chunk.type in includes:
|
|
420
|
+
yield chunk
|
|
405
421
|
case _ if chunk.type in includes:
|
|
406
422
|
yield chunk
|
|
407
423
|
|
|
408
424
|
# Finalize assistant message so it can be found in pending function calls
|
|
409
|
-
self._finalize_assistant_message()
|
|
425
|
+
await self._finalize_assistant_message()
|
|
410
426
|
|
|
411
427
|
# Check for pending tool calls after processing current assistant message
|
|
412
428
|
pending_tool_calls = self._find_pending_tool_calls()
|
|
@@ -432,36 +448,6 @@ class Runner:
|
|
|
432
448
|
require_confirm_tools = await self.agent.list_require_confirm_tools(tool_calls)
|
|
433
449
|
return bool(require_confirm_tools)
|
|
434
450
|
|
|
435
|
-
async def run_continue_until_complete(
|
|
436
|
-
self,
|
|
437
|
-
max_steps: int = 20,
|
|
438
|
-
includes: list[AgentChunkType] | None = None,
|
|
439
|
-
record_to: PathLike | str | None = None,
|
|
440
|
-
) -> list[AgentChunk]:
|
|
441
|
-
"""Deprecated: Use run_until_complete(None) instead."""
|
|
442
|
-
warnings.warn(
|
|
443
|
-
"run_continue_until_complete is deprecated. Use run_until_complete(None) instead.",
|
|
444
|
-
DeprecationWarning,
|
|
445
|
-
stacklevel=2,
|
|
446
|
-
)
|
|
447
|
-
resp = self.run_continue_stream(max_steps, includes, record_to=record_to)
|
|
448
|
-
return await self._collect_all_chunks(resp)
|
|
449
|
-
|
|
450
|
-
def run_continue_stream(
|
|
451
|
-
self,
|
|
452
|
-
max_steps: int = 20,
|
|
453
|
-
includes: list[AgentChunkType] | None = None,
|
|
454
|
-
record_to: PathLike | str | None = None,
|
|
455
|
-
context: "Any | None" = None, # noqa: ANN401
|
|
456
|
-
) -> AsyncGenerator[AgentChunk, None]:
|
|
457
|
-
"""Deprecated: Use run(None) instead."""
|
|
458
|
-
warnings.warn(
|
|
459
|
-
"run_continue_stream is deprecated. Use run(None) instead.",
|
|
460
|
-
DeprecationWarning,
|
|
461
|
-
stacklevel=2,
|
|
462
|
-
)
|
|
463
|
-
return self._run_continue_stream(max_steps, includes, record_to=record_to, context=context)
|
|
464
|
-
|
|
465
451
|
async def _run_continue_stream(
|
|
466
452
|
self,
|
|
467
453
|
max_steps: int = 20,
|
|
@@ -832,8 +818,6 @@ class Runner:
|
|
|
832
818
|
logger.info("Transferring conversation from %s to %s", self.agent.name, target_agent_name)
|
|
833
819
|
self.agent = target_agent
|
|
834
820
|
|
|
835
|
-
return tool_call.id, output
|
|
836
|
-
|
|
837
821
|
except Exception as e:
|
|
838
822
|
logger.exception("Failed to execute transfer_to_agent tool call")
|
|
839
823
|
output = f"Transfer failed: {e!s}"
|
|
@@ -843,6 +827,8 @@ class Runner:
|
|
|
843
827
|
output=output,
|
|
844
828
|
)
|
|
845
829
|
return tool_call.id, output
|
|
830
|
+
else:
|
|
831
|
+
return tool_call.id, output
|
|
846
832
|
|
|
847
833
|
async def _handle_parent_transfer(self, tool_call: ToolCall) -> tuple[str, str]:
|
|
848
834
|
"""Handle parent transfer when transfer_to_parent tool is called.
|
|
@@ -883,8 +869,6 @@ class Runner:
|
|
|
883
869
|
logger.info("Transferring conversation from %s back to parent %s", self.agent.name, self.agent.parent.name)
|
|
884
870
|
self.agent = self.agent.parent
|
|
885
871
|
|
|
886
|
-
return tool_call.id, output
|
|
887
|
-
|
|
888
872
|
except Exception as e:
|
|
889
873
|
logger.exception("Failed to execute transfer_to_parent tool call")
|
|
890
874
|
output = f"Transfer to parent failed: {e!s}"
|
|
@@ -894,3 +878,5 @@ class Runner:
|
|
|
894
878
|
output=output,
|
|
895
879
|
)
|
|
896
880
|
return tool_call.id, output
|
|
881
|
+
else:
|
|
882
|
+
return tool_call.id, output
|
lite_agent/types/__init__.py
CHANGED
lite_agent/types/messages.py
CHANGED
|
@@ -211,7 +211,7 @@ class UserMessageContentItemImageURL(BaseModel):
|
|
|
211
211
|
image_url: UserMessageContentItemImageURLImageURL
|
|
212
212
|
|
|
213
213
|
|
|
214
|
-
#
|
|
214
|
+
# Message wrapper classes for backward compatibility
|
|
215
215
|
class AgentUserMessage(NewUserMessage):
|
|
216
216
|
def __init__(
|
|
217
217
|
self,
|
|
@@ -250,6 +250,7 @@ class AgentAssistantMessage(NewAssistantMessage):
|
|
|
250
250
|
)
|
|
251
251
|
|
|
252
252
|
|
|
253
|
+
# AgentSystemMessage is now an alias to NewSystemMessage
|
|
253
254
|
AgentSystemMessage = NewSystemMessage
|
|
254
255
|
RunnerMessage = NewMessage
|
|
255
256
|
|
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
"""Advanced message builder with fluent interface for complex message construction."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime, timezone
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
from lite_agent.loggers import logger
|
|
7
|
+
from lite_agent.types import (
|
|
8
|
+
AssistantMessageMeta,
|
|
9
|
+
AssistantTextContent,
|
|
10
|
+
AssistantToolCall,
|
|
11
|
+
AssistantToolCallResult,
|
|
12
|
+
MessageMeta,
|
|
13
|
+
MessageUsage,
|
|
14
|
+
NewAssistantMessage,
|
|
15
|
+
NewSystemMessage,
|
|
16
|
+
NewUserMessage,
|
|
17
|
+
UserImageContent,
|
|
18
|
+
UserTextContent,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class FluentMessageBuilder:
|
|
23
|
+
"""Fluent interface for building complex messages step by step."""
|
|
24
|
+
|
|
25
|
+
def __init__(self):
|
|
26
|
+
self._reset()
|
|
27
|
+
|
|
28
|
+
def _reset(self) -> None:
|
|
29
|
+
"""Reset builder state."""
|
|
30
|
+
self._message_type = None
|
|
31
|
+
self._content_items = []
|
|
32
|
+
self._meta = None
|
|
33
|
+
|
|
34
|
+
def user_message(self) -> "FluentMessageBuilder":
|
|
35
|
+
"""Start building a user message."""
|
|
36
|
+
self._reset()
|
|
37
|
+
self._message_type = "user"
|
|
38
|
+
self._meta = MessageMeta()
|
|
39
|
+
logger.debug("Started building user message")
|
|
40
|
+
return self
|
|
41
|
+
|
|
42
|
+
def assistant_message(self, model: str | None = None) -> "FluentMessageBuilder":
|
|
43
|
+
"""Start building an assistant message."""
|
|
44
|
+
self._reset()
|
|
45
|
+
self._message_type = "assistant"
|
|
46
|
+
self._meta = AssistantMessageMeta(model=model)
|
|
47
|
+
logger.debug(f"Started building assistant message (model: {model})")
|
|
48
|
+
return self
|
|
49
|
+
|
|
50
|
+
def system_message(self) -> "FluentMessageBuilder":
|
|
51
|
+
"""Start building a system message."""
|
|
52
|
+
self._reset()
|
|
53
|
+
self._message_type = "system"
|
|
54
|
+
self._meta = MessageMeta()
|
|
55
|
+
self._content = ""
|
|
56
|
+
logger.debug("Started building system message")
|
|
57
|
+
return self
|
|
58
|
+
|
|
59
|
+
def add_text(self, text: str) -> "FluentMessageBuilder":
|
|
60
|
+
"""Add text content."""
|
|
61
|
+
if self._message_type == "user":
|
|
62
|
+
self._content_items.append(UserTextContent(text=text))
|
|
63
|
+
elif self._message_type == "assistant":
|
|
64
|
+
self._content_items.append(AssistantTextContent(text=text))
|
|
65
|
+
elif self._message_type == "system":
|
|
66
|
+
self._content = text
|
|
67
|
+
else:
|
|
68
|
+
msg = "Message type not set. Call user_message(), assistant_message(), or system_message() first."
|
|
69
|
+
raise ValueError(msg)
|
|
70
|
+
|
|
71
|
+
logger.debug(f"Added text content (length: {len(text)})")
|
|
72
|
+
return self
|
|
73
|
+
|
|
74
|
+
def add_image(self, image_url: str | None = None, file_id: str | None = None, detail: str = "auto") -> "FluentMessageBuilder":
|
|
75
|
+
"""Add image content to user message."""
|
|
76
|
+
if self._message_type != "user":
|
|
77
|
+
msg = "Images can only be added to user messages"
|
|
78
|
+
raise ValueError(msg)
|
|
79
|
+
|
|
80
|
+
self._content_items.append(UserImageContent(image_url=image_url, file_id=file_id, detail=detail))
|
|
81
|
+
logger.debug(f"Added image content (url: {bool(image_url)}, file_id: {bool(file_id)})")
|
|
82
|
+
return self
|
|
83
|
+
|
|
84
|
+
def add_tool_call(self, call_id: str, name: str, arguments: dict[str, Any] | str) -> "FluentMessageBuilder":
|
|
85
|
+
"""Add tool call to assistant message."""
|
|
86
|
+
if self._message_type != "assistant":
|
|
87
|
+
msg = "Tool calls can only be added to assistant messages"
|
|
88
|
+
raise ValueError(msg)
|
|
89
|
+
|
|
90
|
+
self._content_items.append(AssistantToolCall(call_id=call_id, name=name, arguments=arguments))
|
|
91
|
+
logger.debug(f"Added tool call: {name} (call_id: {call_id})")
|
|
92
|
+
return self
|
|
93
|
+
|
|
94
|
+
def add_tool_result(self, call_id: str, output: str, execution_time_ms: int | None = None) -> "FluentMessageBuilder":
|
|
95
|
+
"""Add tool call result to assistant message."""
|
|
96
|
+
if self._message_type != "assistant":
|
|
97
|
+
msg = "Tool results can only be added to assistant messages"
|
|
98
|
+
raise ValueError(msg)
|
|
99
|
+
|
|
100
|
+
self._content_items.append(AssistantToolCallResult(call_id=call_id, output=output, execution_time_ms=execution_time_ms))
|
|
101
|
+
logger.debug(f"Added tool result for call: {call_id}")
|
|
102
|
+
return self
|
|
103
|
+
|
|
104
|
+
def with_timestamp(self, timestamp: datetime | None = None) -> "FluentMessageBuilder":
|
|
105
|
+
"""Set message timestamp."""
|
|
106
|
+
if self._meta is None:
|
|
107
|
+
msg = "Message type not set. Call user_message(), assistant_message(), or system_message() first."
|
|
108
|
+
raise ValueError(msg)
|
|
109
|
+
if timestamp is None:
|
|
110
|
+
timestamp = datetime.now(timezone.utc)
|
|
111
|
+
self._meta.sent_at = timestamp
|
|
112
|
+
return self
|
|
113
|
+
|
|
114
|
+
def with_usage(self, input_tokens: int | None = None, output_tokens: int | None = None) -> "FluentMessageBuilder":
|
|
115
|
+
"""Set usage information (assistant messages only)."""
|
|
116
|
+
if self._message_type != "assistant":
|
|
117
|
+
msg = "Usage information can only be set for assistant messages"
|
|
118
|
+
raise ValueError(msg)
|
|
119
|
+
|
|
120
|
+
if self._meta is None:
|
|
121
|
+
msg = "Message type not set. Call user_message(), assistant_message(), or system_message() first."
|
|
122
|
+
raise ValueError(msg)
|
|
123
|
+
|
|
124
|
+
if not hasattr(self._meta, "usage"):
|
|
125
|
+
self._meta.usage = MessageUsage()
|
|
126
|
+
|
|
127
|
+
if self._meta.usage is not None:
|
|
128
|
+
if input_tokens is not None:
|
|
129
|
+
self._meta.usage.input_tokens = input_tokens
|
|
130
|
+
if output_tokens is not None:
|
|
131
|
+
self._meta.usage.output_tokens = output_tokens
|
|
132
|
+
if input_tokens is not None and output_tokens is not None:
|
|
133
|
+
self._meta.usage.total_tokens = input_tokens + output_tokens
|
|
134
|
+
|
|
135
|
+
return self
|
|
136
|
+
|
|
137
|
+
def with_timing(self, latency_ms: int | None = None, total_time_ms: int | None = None) -> "FluentMessageBuilder":
|
|
138
|
+
"""Set timing information (assistant messages only)."""
|
|
139
|
+
if self._message_type != "assistant":
|
|
140
|
+
msg = "Timing information can only be set for assistant messages"
|
|
141
|
+
raise ValueError(msg)
|
|
142
|
+
|
|
143
|
+
if self._meta is None:
|
|
144
|
+
msg = "Message type not set. Call user_message(), assistant_message(), or system_message() first."
|
|
145
|
+
raise ValueError(msg)
|
|
146
|
+
|
|
147
|
+
if latency_ms is not None:
|
|
148
|
+
self._meta.latency_ms = latency_ms # type: ignore[attr-defined]
|
|
149
|
+
if total_time_ms is not None:
|
|
150
|
+
self._meta.total_time_ms = total_time_ms # type: ignore[attr-defined]
|
|
151
|
+
|
|
152
|
+
return self
|
|
153
|
+
|
|
154
|
+
def build(self) -> NewUserMessage | NewAssistantMessage | NewSystemMessage:
|
|
155
|
+
"""Build the final message."""
|
|
156
|
+
if self._message_type == "user":
|
|
157
|
+
message = NewUserMessage(content=self._content_items, meta=self._meta)
|
|
158
|
+
elif self._message_type == "assistant":
|
|
159
|
+
message = NewAssistantMessage(content=self._content_items, meta=self._meta)
|
|
160
|
+
elif self._message_type == "system":
|
|
161
|
+
message = NewSystemMessage(content=self._content, meta=self._meta)
|
|
162
|
+
else:
|
|
163
|
+
msg = "Message type not set"
|
|
164
|
+
raise ValueError(msg)
|
|
165
|
+
|
|
166
|
+
logger.debug(f"Built {self._message_type} message with {len(getattr(self, '_content_items', []))} content items")
|
|
167
|
+
return message
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
class MessageBuilderFactory:
|
|
171
|
+
"""Factory for creating common message types quickly."""
|
|
172
|
+
|
|
173
|
+
@staticmethod
|
|
174
|
+
def create_simple_user_message(text: str) -> NewUserMessage:
|
|
175
|
+
"""Create a simple user text message."""
|
|
176
|
+
return FluentMessageBuilder().user_message().add_text(text).build()
|
|
177
|
+
|
|
178
|
+
@staticmethod
|
|
179
|
+
def create_simple_assistant_message(text: str, model: str | None = None) -> NewAssistantMessage:
|
|
180
|
+
"""Create a simple assistant text message."""
|
|
181
|
+
return FluentMessageBuilder().assistant_message(model).add_text(text).build()
|
|
182
|
+
|
|
183
|
+
@staticmethod
|
|
184
|
+
def create_system_message(text: str) -> NewSystemMessage:
|
|
185
|
+
"""Create a system message."""
|
|
186
|
+
return FluentMessageBuilder().system_message().add_text(text).build()
|
|
187
|
+
|
|
188
|
+
@staticmethod
|
|
189
|
+
def create_user_message_with_image(text: str, image_url: str) -> NewUserMessage:
|
|
190
|
+
"""Create a user message with text and image."""
|
|
191
|
+
return FluentMessageBuilder().user_message().add_text(text).add_image(image_url=image_url).build()
|
|
192
|
+
|
|
193
|
+
@staticmethod
|
|
194
|
+
def create_assistant_with_tool_call(text: str, call_id: str, tool_name: str, arguments: dict[str, Any], model: str | None = None) -> NewAssistantMessage:
|
|
195
|
+
"""Create an assistant message with text and a tool call."""
|
|
196
|
+
return FluentMessageBuilder().assistant_message(model).add_text(text).add_tool_call(call_id, tool_name, arguments).build()
|
|
197
|
+
|
|
198
|
+
@staticmethod
|
|
199
|
+
def create_assistant_with_tool_result(call_id: str, result: str, execution_time_ms: int | None = None, model: str | None = None) -> NewAssistantMessage:
|
|
200
|
+
"""Create an assistant message with just a tool result."""
|
|
201
|
+
return FluentMessageBuilder().assistant_message(model).add_tool_result(call_id, result, execution_time_ms).build()
|
|
@@ -180,7 +180,9 @@ class MessageBuilder:
|
|
|
180
180
|
execution_time_ms=item.get("execution_time_ms"),
|
|
181
181
|
),
|
|
182
182
|
)
|
|
183
|
-
|
|
183
|
+
else:
|
|
184
|
+
# Unknown dict type - convert to text
|
|
185
|
+
assistant_content_items.append(AssistantTextContent(text=str(item)))
|
|
184
186
|
else:
|
|
185
187
|
# Fallback for unknown item format
|
|
186
188
|
assistant_content_items.append(AssistantTextContent(text=str(item)))
|
|
@@ -188,7 +190,7 @@ class MessageBuilder:
|
|
|
188
190
|
# Fallback for other content types
|
|
189
191
|
assistant_content_items = [AssistantTextContent(text=str(content))]
|
|
190
192
|
|
|
191
|
-
# Handle tool calls if present
|
|
193
|
+
# Handle tool calls if present
|
|
192
194
|
if "tool_calls" in message:
|
|
193
195
|
for tool_call in message.get("tool_calls", []):
|
|
194
196
|
try:
|