agent-dev-cli 0.0.1b251223__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agent_dev_cli-0.0.1b251223.dist-info/METADATA +143 -0
- agent_dev_cli-0.0.1b251223.dist-info/RECORD +20 -0
- agent_dev_cli-0.0.1b251223.dist-info/WHEEL +4 -0
- agent_dev_cli-0.0.1b251223.dist-info/entry_points.txt +3 -0
- agentdev/__init__.py +22 -0
- agentdev/__main__.py +5 -0
- agentdev/_bootstrap.py +70 -0
- agentdev/_hooks.py +275 -0
- agentdev/backend/__init__.py +10 -0
- agentdev/backend/_conversations.py +473 -0
- agentdev/backend/_utils.py +86 -0
- agentdev/backend/code_analyzer.py +41 -0
- agentdev/backend/errors.py +2 -0
- agentdev/backend/event_mapper.py +302 -0
- agentdev/backend/server.py +262 -0
- agentdev/backend/structs/__init__.py +10 -0
- agentdev/backend/structs/entity_response.py +30 -0
- agentdev/backend/structs/request.py +57 -0
- agentdev/cli.py +165 -0
- agentdev/localdebug.py +65 -0
|
@@ -0,0 +1,473 @@
|
|
|
1
|
+
# Copyright (c) Microsoft. All rights reserved.
|
|
2
|
+
|
|
3
|
+
"""Conversation storage abstraction for OpenAI Conversations API.
|
|
4
|
+
|
|
5
|
+
This module provides a clean abstraction layer for managing conversations
|
|
6
|
+
while wrapping AgentFramework's AgentThread underneath.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import time
|
|
10
|
+
import uuid
|
|
11
|
+
from abc import ABC, abstractmethod
|
|
12
|
+
from typing import Any, Literal, cast
|
|
13
|
+
|
|
14
|
+
from agent_framework import AgentThread, ChatMessage
|
|
15
|
+
from openai.types.conversations import Conversation, ConversationDeletedResource
|
|
16
|
+
from openai.types.conversations.conversation_item import ConversationItem
|
|
17
|
+
from openai.types.conversations.message import Message
|
|
18
|
+
from openai.types.conversations.text_content import TextContent
|
|
19
|
+
from openai.types.responses import (
|
|
20
|
+
ResponseFunctionToolCallItem,
|
|
21
|
+
ResponseFunctionToolCallOutputItem,
|
|
22
|
+
ResponseInputFile,
|
|
23
|
+
ResponseInputImage,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
# Type alias for OpenAI Message role literals
|
|
27
|
+
MessageRole = Literal["unknown", "user", "assistant", "system", "critic", "discriminator", "developer", "tool"]
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class ConversationStore(ABC):
|
|
31
|
+
"""Abstract base class for conversation storage.
|
|
32
|
+
|
|
33
|
+
Provides OpenAI Conversations API interface while managing
|
|
34
|
+
AgentThread instances underneath.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
@abstractmethod
|
|
38
|
+
def create_conversation(self, metadata: dict[str, str] | None = None) -> Conversation:
|
|
39
|
+
"""Create a new conversation (wraps AgentThread creation).
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
metadata: Optional metadata dict (e.g., {"agent_id": "weather_agent"})
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
Conversation object with generated ID
|
|
46
|
+
"""
|
|
47
|
+
pass
|
|
48
|
+
|
|
49
|
+
@abstractmethod
|
|
50
|
+
def get_conversation(self, conversation_id: str) -> Conversation | None:
|
|
51
|
+
"""Retrieve conversation metadata.
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
conversation_id: Conversation ID
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
Conversation object or None if not found
|
|
58
|
+
"""
|
|
59
|
+
pass
|
|
60
|
+
|
|
61
|
+
@abstractmethod
|
|
62
|
+
def update_conversation(self, conversation_id: str, metadata: dict[str, str]) -> Conversation:
|
|
63
|
+
"""Update conversation metadata.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
conversation_id: Conversation ID
|
|
67
|
+
metadata: New metadata dict
|
|
68
|
+
|
|
69
|
+
Returns:
|
|
70
|
+
Updated Conversation object
|
|
71
|
+
|
|
72
|
+
Raises:
|
|
73
|
+
ValueError: If conversation not found
|
|
74
|
+
"""
|
|
75
|
+
pass
|
|
76
|
+
|
|
77
|
+
@abstractmethod
|
|
78
|
+
def delete_conversation(self, conversation_id: str) -> ConversationDeletedResource:
|
|
79
|
+
"""Delete conversation (including AgentThread).
|
|
80
|
+
|
|
81
|
+
Args:
|
|
82
|
+
conversation_id: Conversation ID
|
|
83
|
+
|
|
84
|
+
Returns:
|
|
85
|
+
ConversationDeletedResource object
|
|
86
|
+
|
|
87
|
+
Raises:
|
|
88
|
+
ValueError: If conversation not found
|
|
89
|
+
"""
|
|
90
|
+
pass
|
|
91
|
+
|
|
92
|
+
@abstractmethod
|
|
93
|
+
async def add_items(self, conversation_id: str, items: list[dict[str, Any]]) -> list[ConversationItem]:
|
|
94
|
+
"""Add items to conversation (syncs to AgentThread.message_store).
|
|
95
|
+
|
|
96
|
+
Args:
|
|
97
|
+
conversation_id: Conversation ID
|
|
98
|
+
items: List of conversation items to add
|
|
99
|
+
|
|
100
|
+
Returns:
|
|
101
|
+
List of added ConversationItem objects
|
|
102
|
+
|
|
103
|
+
Raises:
|
|
104
|
+
ValueError: If conversation not found
|
|
105
|
+
"""
|
|
106
|
+
pass
|
|
107
|
+
|
|
108
|
+
@abstractmethod
|
|
109
|
+
async def list_items(
|
|
110
|
+
self, conversation_id: str, limit: int = 100, after: str | None = None, order: str = "asc"
|
|
111
|
+
) -> tuple[list[ConversationItem], bool]:
|
|
112
|
+
"""List conversation items from AgentThread.message_store.
|
|
113
|
+
|
|
114
|
+
Args:
|
|
115
|
+
conversation_id: Conversation ID
|
|
116
|
+
limit: Maximum number of items to return
|
|
117
|
+
after: Cursor for pagination (item_id)
|
|
118
|
+
order: Sort order ("asc" or "desc")
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
Tuple of (items list, has_more boolean)
|
|
122
|
+
|
|
123
|
+
Raises:
|
|
124
|
+
ValueError: If conversation not found
|
|
125
|
+
"""
|
|
126
|
+
pass
|
|
127
|
+
|
|
128
|
+
@abstractmethod
|
|
129
|
+
def get_item(self, conversation_id: str, item_id: str) -> ConversationItem | None:
|
|
130
|
+
"""Get specific conversation item.
|
|
131
|
+
|
|
132
|
+
Args:
|
|
133
|
+
conversation_id: Conversation ID
|
|
134
|
+
item_id: Item ID
|
|
135
|
+
|
|
136
|
+
Returns:
|
|
137
|
+
ConversationItem or None if not found
|
|
138
|
+
"""
|
|
139
|
+
pass
|
|
140
|
+
|
|
141
|
+
@abstractmethod
|
|
142
|
+
def get_thread(self, conversation_id: str) -> AgentThread | None:
|
|
143
|
+
"""Get underlying AgentThread for execution (internal use).
|
|
144
|
+
|
|
145
|
+
This is the critical method that allows the executor to get the
|
|
146
|
+
AgentThread for running agents with conversation context.
|
|
147
|
+
|
|
148
|
+
Args:
|
|
149
|
+
conversation_id: Conversation ID
|
|
150
|
+
|
|
151
|
+
Returns:
|
|
152
|
+
AgentThread object or None if not found
|
|
153
|
+
"""
|
|
154
|
+
pass
|
|
155
|
+
|
|
156
|
+
@abstractmethod
|
|
157
|
+
def list_conversations_by_metadata(self, metadata_filter: dict[str, str]) -> list[Conversation]:
|
|
158
|
+
"""Filter conversations by metadata (e.g., agent_id).
|
|
159
|
+
|
|
160
|
+
Args:
|
|
161
|
+
metadata_filter: Metadata key-value pairs to match
|
|
162
|
+
|
|
163
|
+
Returns:
|
|
164
|
+
List of matching Conversation objects
|
|
165
|
+
"""
|
|
166
|
+
pass
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
class InMemoryConversationStore(ConversationStore):
|
|
170
|
+
"""In-memory conversation storage wrapping AgentThread.
|
|
171
|
+
|
|
172
|
+
This implementation stores conversations in memory with their
|
|
173
|
+
underlying AgentThread instances for execution.
|
|
174
|
+
"""
|
|
175
|
+
|
|
176
|
+
def __init__(self) -> None:
|
|
177
|
+
"""Initialize in-memory conversation storage.
|
|
178
|
+
|
|
179
|
+
Storage structure maps conversation IDs to conversation data including
|
|
180
|
+
the underlying AgentThread, metadata, and cached ConversationItems.
|
|
181
|
+
"""
|
|
182
|
+
self._conversations: dict[str, dict[str, Any]] = {}
|
|
183
|
+
|
|
184
|
+
# Item index for O(1) lookup: {conversation_id: {item_id: ConversationItem}}
|
|
185
|
+
self._item_index: dict[str, dict[str, ConversationItem]] = {}
|
|
186
|
+
|
|
187
|
+
def create_conversation(self, metadata: dict[str, str] | None = None) -> Conversation:
|
|
188
|
+
"""Create a new conversation with underlying AgentThread."""
|
|
189
|
+
conv_id = f"conv_{uuid.uuid4().hex}"
|
|
190
|
+
created_at = int(time.time())
|
|
191
|
+
|
|
192
|
+
# Create AgentThread with default ChatMessageStore
|
|
193
|
+
thread = AgentThread()
|
|
194
|
+
|
|
195
|
+
self._conversations[conv_id] = {
|
|
196
|
+
"id": conv_id,
|
|
197
|
+
"thread": thread,
|
|
198
|
+
"metadata": metadata or {},
|
|
199
|
+
"created_at": created_at,
|
|
200
|
+
"items": [],
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
# Initialize item index for this conversation
|
|
204
|
+
self._item_index[conv_id] = {}
|
|
205
|
+
|
|
206
|
+
return Conversation(id=conv_id, object="conversation", created_at=created_at, metadata=metadata)
|
|
207
|
+
|
|
208
|
+
def get_conversation(self, conversation_id: str) -> Conversation | None:
|
|
209
|
+
"""Retrieve conversation metadata."""
|
|
210
|
+
conv_data = self._conversations.get(conversation_id)
|
|
211
|
+
if not conv_data:
|
|
212
|
+
return None
|
|
213
|
+
|
|
214
|
+
return Conversation(
|
|
215
|
+
id=conv_data["id"],
|
|
216
|
+
object="conversation",
|
|
217
|
+
created_at=conv_data["created_at"],
|
|
218
|
+
metadata=conv_data.get("metadata"),
|
|
219
|
+
)
|
|
220
|
+
|
|
221
|
+
def update_conversation(self, conversation_id: str, metadata: dict[str, str]) -> Conversation:
|
|
222
|
+
"""Update conversation metadata."""
|
|
223
|
+
conv_data = self._conversations.get(conversation_id)
|
|
224
|
+
if not conv_data:
|
|
225
|
+
raise ValueError(f"Conversation {conversation_id} not found")
|
|
226
|
+
|
|
227
|
+
conv_data["metadata"] = metadata
|
|
228
|
+
|
|
229
|
+
return Conversation(
|
|
230
|
+
id=conv_data["id"],
|
|
231
|
+
object="conversation",
|
|
232
|
+
created_at=conv_data["created_at"],
|
|
233
|
+
metadata=metadata,
|
|
234
|
+
)
|
|
235
|
+
|
|
236
|
+
def delete_conversation(self, conversation_id: str) -> ConversationDeletedResource:
|
|
237
|
+
"""Delete conversation and its AgentThread."""
|
|
238
|
+
if conversation_id not in self._conversations:
|
|
239
|
+
raise ValueError(f"Conversation {conversation_id} not found")
|
|
240
|
+
|
|
241
|
+
del self._conversations[conversation_id]
|
|
242
|
+
# Cleanup item index
|
|
243
|
+
self._item_index.pop(conversation_id, None)
|
|
244
|
+
|
|
245
|
+
return ConversationDeletedResource(id=conversation_id, object="conversation.deleted", deleted=True)
|
|
246
|
+
|
|
247
|
+
async def add_items(self, conversation_id: str, items: list[dict[str, Any]]) -> list[ConversationItem]:
|
|
248
|
+
"""Add items to conversation and sync to AgentThread."""
|
|
249
|
+
conv_data = self._conversations.get(conversation_id)
|
|
250
|
+
if not conv_data:
|
|
251
|
+
raise ValueError(f"Conversation {conversation_id} not found")
|
|
252
|
+
|
|
253
|
+
thread: AgentThread = conv_data["thread"]
|
|
254
|
+
|
|
255
|
+
# Convert items to ChatMessages and add to thread
|
|
256
|
+
chat_messages = []
|
|
257
|
+
for item in items:
|
|
258
|
+
# Simple conversion - assume text content for now
|
|
259
|
+
role = item.get("role", "user")
|
|
260
|
+
content = item.get("content", [])
|
|
261
|
+
text = content[0].get("text", "") if content else ""
|
|
262
|
+
|
|
263
|
+
chat_msg = ChatMessage(role=role, contents=[{"type": "text", "text": text}])
|
|
264
|
+
chat_messages.append(chat_msg)
|
|
265
|
+
|
|
266
|
+
# Add messages to AgentThread
|
|
267
|
+
await thread.on_new_messages(chat_messages)
|
|
268
|
+
|
|
269
|
+
# Create Message objects (ConversationItem is a Union - use concrete Message type)
|
|
270
|
+
conv_items: list[ConversationItem] = []
|
|
271
|
+
for msg in chat_messages:
|
|
272
|
+
item_id = f"item_{uuid.uuid4().hex}"
|
|
273
|
+
|
|
274
|
+
# Extract role - handle both string and enum
|
|
275
|
+
role_str = msg.role.value if hasattr(msg.role, "value") else str(msg.role)
|
|
276
|
+
role = cast(MessageRole, role_str) # Safe: Agent Framework roles match OpenAI roles
|
|
277
|
+
|
|
278
|
+
# Convert ChatMessage contents to OpenAI TextContent format
|
|
279
|
+
message_content = []
|
|
280
|
+
for content_item in msg.contents:
|
|
281
|
+
if hasattr(content_item, "type") and content_item.type == "text":
|
|
282
|
+
# Extract text from TextContent object
|
|
283
|
+
text_value = getattr(content_item, "text", "")
|
|
284
|
+
message_content.append(TextContent(type="text", text=text_value))
|
|
285
|
+
|
|
286
|
+
# Create Message object (concrete type from ConversationItem union)
|
|
287
|
+
message = Message(
|
|
288
|
+
id=item_id,
|
|
289
|
+
type="message", # Required discriminator for union
|
|
290
|
+
role=role,
|
|
291
|
+
content=message_content,
|
|
292
|
+
status="completed", # Required field
|
|
293
|
+
)
|
|
294
|
+
conv_items.append(message)
|
|
295
|
+
|
|
296
|
+
# Cache items
|
|
297
|
+
conv_data["items"].extend(conv_items)
|
|
298
|
+
|
|
299
|
+
# Update item index for O(1) lookup
|
|
300
|
+
if conversation_id not in self._item_index:
|
|
301
|
+
self._item_index[conversation_id] = {}
|
|
302
|
+
|
|
303
|
+
for conv_item in conv_items:
|
|
304
|
+
if conv_item.id: # Guard against None
|
|
305
|
+
self._item_index[conversation_id][conv_item.id] = conv_item
|
|
306
|
+
|
|
307
|
+
return conv_items
|
|
308
|
+
|
|
309
|
+
async def list_items(
|
|
310
|
+
self, conversation_id: str, limit: int = 100, after: str | None = None, order: str = "asc"
|
|
311
|
+
) -> tuple[list[ConversationItem], bool]:
|
|
312
|
+
"""List conversation items from AgentThread message store.
|
|
313
|
+
|
|
314
|
+
Converts AgentFramework ChatMessages to proper OpenAI ConversationItem types:
|
|
315
|
+
- Messages with text/images/files → Message
|
|
316
|
+
- Function calls → ResponseFunctionToolCallItem
|
|
317
|
+
- Function results → ResponseFunctionToolCallOutputItem
|
|
318
|
+
"""
|
|
319
|
+
conv_data = self._conversations.get(conversation_id)
|
|
320
|
+
if not conv_data:
|
|
321
|
+
raise ValueError(f"Conversation {conversation_id} not found")
|
|
322
|
+
|
|
323
|
+
thread: AgentThread = conv_data["thread"]
|
|
324
|
+
|
|
325
|
+
# Get messages from thread's message store
|
|
326
|
+
items: list[ConversationItem] = []
|
|
327
|
+
if thread.message_store:
|
|
328
|
+
af_messages = await thread.message_store.list_messages()
|
|
329
|
+
|
|
330
|
+
# Convert each AgentFramework ChatMessage to appropriate ConversationItem type(s)
|
|
331
|
+
for i, msg in enumerate(af_messages):
|
|
332
|
+
item_id = f"item_{i}"
|
|
333
|
+
role_str = msg.role.value if hasattr(msg.role, "value") else str(msg.role)
|
|
334
|
+
role = cast(MessageRole, role_str) # Safe: Agent Framework roles match OpenAI roles
|
|
335
|
+
|
|
336
|
+
# Process each content item in the message
|
|
337
|
+
# A single ChatMessage may produce multiple ConversationItems
|
|
338
|
+
# (e.g., a message with both text and a function call)
|
|
339
|
+
message_contents: list[TextContent | ResponseInputImage | ResponseInputFile] = []
|
|
340
|
+
function_calls = []
|
|
341
|
+
function_results = []
|
|
342
|
+
|
|
343
|
+
for content in msg.contents:
|
|
344
|
+
content_type = getattr(content, "type", None)
|
|
345
|
+
|
|
346
|
+
if content_type == "text":
|
|
347
|
+
# Text content for Message
|
|
348
|
+
text_value = getattr(content, "text", "")
|
|
349
|
+
message_contents.append(TextContent(type="text", text=text_value))
|
|
350
|
+
|
|
351
|
+
elif content_type == "data":
|
|
352
|
+
# Data content (images, files, PDFs)
|
|
353
|
+
uri = getattr(content, "uri", "")
|
|
354
|
+
media_type = getattr(content, "media_type", None)
|
|
355
|
+
|
|
356
|
+
if media_type and media_type.startswith("image/"):
|
|
357
|
+
# Convert to ResponseInputImage
|
|
358
|
+
message_contents.append(
|
|
359
|
+
ResponseInputImage(type="input_image", image_url=uri, detail="auto")
|
|
360
|
+
)
|
|
361
|
+
else:
|
|
362
|
+
# Convert to ResponseInputFile
|
|
363
|
+
# Extract filename from URI if possible
|
|
364
|
+
filename = None
|
|
365
|
+
if media_type == "application/pdf":
|
|
366
|
+
filename = "document.pdf"
|
|
367
|
+
|
|
368
|
+
message_contents.append(
|
|
369
|
+
ResponseInputFile(type="input_file", file_url=uri, filename=filename)
|
|
370
|
+
)
|
|
371
|
+
|
|
372
|
+
elif content_type == "function_call":
|
|
373
|
+
# Function call - create separate ConversationItem
|
|
374
|
+
call_id = getattr(content, "call_id", None)
|
|
375
|
+
name = getattr(content, "name", "")
|
|
376
|
+
arguments = getattr(content, "arguments", "")
|
|
377
|
+
|
|
378
|
+
if call_id and name:
|
|
379
|
+
function_calls.append(
|
|
380
|
+
ResponseFunctionToolCallItem(
|
|
381
|
+
id=f"{item_id}_call_{call_id}",
|
|
382
|
+
call_id=call_id,
|
|
383
|
+
name=name,
|
|
384
|
+
arguments=arguments,
|
|
385
|
+
type="function_call",
|
|
386
|
+
status="completed",
|
|
387
|
+
)
|
|
388
|
+
)
|
|
389
|
+
|
|
390
|
+
elif content_type == "function_result":
|
|
391
|
+
# Function result - create separate ConversationItem
|
|
392
|
+
call_id = getattr(content, "call_id", None)
|
|
393
|
+
# Output is stored in additional_properties
|
|
394
|
+
output = ""
|
|
395
|
+
if hasattr(content, "additional_properties"):
|
|
396
|
+
output = content.additional_properties.get("output", "")
|
|
397
|
+
|
|
398
|
+
if call_id:
|
|
399
|
+
function_results.append(
|
|
400
|
+
ResponseFunctionToolCallOutputItem(
|
|
401
|
+
id=f"{item_id}_result_{call_id}",
|
|
402
|
+
call_id=call_id,
|
|
403
|
+
output=output,
|
|
404
|
+
type="function_call_output",
|
|
405
|
+
status="completed",
|
|
406
|
+
)
|
|
407
|
+
)
|
|
408
|
+
|
|
409
|
+
# Create ConversationItems based on what we found
|
|
410
|
+
# If message has text/images/files, create a Message item
|
|
411
|
+
if message_contents:
|
|
412
|
+
message = Message(
|
|
413
|
+
id=item_id,
|
|
414
|
+
type="message",
|
|
415
|
+
role=role, # type: ignore
|
|
416
|
+
content=message_contents, # type: ignore
|
|
417
|
+
status="completed",
|
|
418
|
+
)
|
|
419
|
+
items.append(message)
|
|
420
|
+
|
|
421
|
+
# Add function call items
|
|
422
|
+
items.extend(function_calls)
|
|
423
|
+
|
|
424
|
+
# Add function result items
|
|
425
|
+
items.extend(function_results)
|
|
426
|
+
|
|
427
|
+
# Apply pagination
|
|
428
|
+
if order == "desc":
|
|
429
|
+
items = items[::-1]
|
|
430
|
+
|
|
431
|
+
start_idx = 0
|
|
432
|
+
if after:
|
|
433
|
+
# Find the index after the cursor
|
|
434
|
+
for i, item in enumerate(items):
|
|
435
|
+
if item.id == after:
|
|
436
|
+
start_idx = i + 1
|
|
437
|
+
break
|
|
438
|
+
|
|
439
|
+
paginated_items = items[start_idx : start_idx + limit]
|
|
440
|
+
has_more = len(items) > start_idx + limit
|
|
441
|
+
|
|
442
|
+
return paginated_items, has_more
|
|
443
|
+
|
|
444
|
+
def get_item(self, conversation_id: str, item_id: str) -> ConversationItem | None:
|
|
445
|
+
"""Get specific conversation item - O(1) lookup via index."""
|
|
446
|
+
# Use index for O(1) lookup instead of linear search
|
|
447
|
+
conv_items = self._item_index.get(conversation_id)
|
|
448
|
+
if not conv_items:
|
|
449
|
+
return None
|
|
450
|
+
|
|
451
|
+
return conv_items.get(item_id)
|
|
452
|
+
|
|
453
|
+
def get_thread(self, conversation_id: str) -> AgentThread | None:
|
|
454
|
+
"""Get AgentThread for execution - CRITICAL for agent.run_stream()."""
|
|
455
|
+
conv_data = self._conversations.get(conversation_id)
|
|
456
|
+
return conv_data["thread"] if conv_data else None
|
|
457
|
+
|
|
458
|
+
def list_conversations_by_metadata(self, metadata_filter: dict[str, str]) -> list[Conversation]:
|
|
459
|
+
"""Filter conversations by metadata (e.g., agent_id)."""
|
|
460
|
+
results = []
|
|
461
|
+
for conv_data in self._conversations.values():
|
|
462
|
+
conv_meta = conv_data.get("metadata", {})
|
|
463
|
+
# Check if all filter items match
|
|
464
|
+
if all(conv_meta.get(k) == v for k, v in metadata_filter.items()):
|
|
465
|
+
results.append(
|
|
466
|
+
Conversation(
|
|
467
|
+
id=conv_data["id"],
|
|
468
|
+
object="conversation",
|
|
469
|
+
created_at=conv_data["created_at"],
|
|
470
|
+
metadata=conv_meta,
|
|
471
|
+
)
|
|
472
|
+
)
|
|
473
|
+
return results
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
from agent_framework import ChatMessage, AgentExecutorResponse
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
# Helper function to serialize executor input & output to JSON for frontend visualization
|
|
5
|
+
def serialize_data(data: Any, max_depth: int = 10) -> Any:
|
|
6
|
+
"""Serialize executor event data to JSON-serializable format.
|
|
7
|
+
|
|
8
|
+
Handles complex types like ChatMessage, AgentExecutorResponse, dataclasses,
|
|
9
|
+
enums, and objects with to_dict() or model_dump() methods.
|
|
10
|
+
|
|
11
|
+
Args:
|
|
12
|
+
data: The data to serialize
|
|
13
|
+
max_depth: Maximum recursion depth to prevent infinite loops (default: 10)
|
|
14
|
+
"""
|
|
15
|
+
if data is None:
|
|
16
|
+
return None
|
|
17
|
+
|
|
18
|
+
# Prevent infinite recursion
|
|
19
|
+
if max_depth <= 0:
|
|
20
|
+
return f"<max_depth_reached: {type(data).__name__}>"
|
|
21
|
+
|
|
22
|
+
# Primitive types
|
|
23
|
+
if isinstance(data, (str, int, float, bool)):
|
|
24
|
+
return data
|
|
25
|
+
|
|
26
|
+
# Handle Enum types (like Role)
|
|
27
|
+
from enum import Enum
|
|
28
|
+
if isinstance(data, Enum):
|
|
29
|
+
return data.value if hasattr(data, 'value') else str(data)
|
|
30
|
+
|
|
31
|
+
# Handle list
|
|
32
|
+
if isinstance(data, list):
|
|
33
|
+
return [serialize_data(item, max_depth - 1) for item in data]
|
|
34
|
+
|
|
35
|
+
# Handle dict
|
|
36
|
+
if isinstance(data, dict):
|
|
37
|
+
return {k: serialize_data(v, max_depth - 1) for k, v in data.items()}
|
|
38
|
+
|
|
39
|
+
# Handle ChatMessage - show as {"role":"xxx", "text":"xxx"}
|
|
40
|
+
if isinstance(data, ChatMessage):
|
|
41
|
+
role_str = data.role.value if hasattr(data.role, 'value') else str(data.role)
|
|
42
|
+
return {"role": role_str, "text": data.text}
|
|
43
|
+
|
|
44
|
+
# Handle AgentExecutorResponse
|
|
45
|
+
if isinstance(data, AgentExecutorResponse):
|
|
46
|
+
result = {}
|
|
47
|
+
if hasattr(data, "agent_run_response") and data.agent_run_response:
|
|
48
|
+
result["agent_run_response"] = serialize_data(data.agent_run_response, max_depth - 1)
|
|
49
|
+
if hasattr(data, "full_conversation") and data.full_conversation:
|
|
50
|
+
result["full_conversation"] = serialize_data(data.full_conversation, max_depth - 1)
|
|
51
|
+
return result if result else {"type": type(data).__name__}
|
|
52
|
+
|
|
53
|
+
# Handle objects with to_dict() method
|
|
54
|
+
if hasattr(data, "to_dict") and callable(data.to_dict):
|
|
55
|
+
try:
|
|
56
|
+
return data.to_dict()
|
|
57
|
+
except Exception:
|
|
58
|
+
pass
|
|
59
|
+
|
|
60
|
+
# Handle Pydantic models with model_dump()
|
|
61
|
+
if hasattr(data, "model_dump") and callable(data.model_dump):
|
|
62
|
+
try:
|
|
63
|
+
return data.model_dump()
|
|
64
|
+
except Exception:
|
|
65
|
+
pass
|
|
66
|
+
|
|
67
|
+
# Handle dataclasses
|
|
68
|
+
if hasattr(data, "__dataclass_fields__"):
|
|
69
|
+
try:
|
|
70
|
+
from dataclasses import asdict
|
|
71
|
+
return asdict(data)
|
|
72
|
+
except Exception:
|
|
73
|
+
pass
|
|
74
|
+
|
|
75
|
+
# Handle objects with __dict__
|
|
76
|
+
if hasattr(data, "__dict__"):
|
|
77
|
+
try:
|
|
78
|
+
return {k: serialize_data(v, max_depth - 1) for k, v in data.__dict__.items() if not k.startswith("_")}
|
|
79
|
+
except Exception:
|
|
80
|
+
pass
|
|
81
|
+
|
|
82
|
+
# Fallback: convert to string representation
|
|
83
|
+
try:
|
|
84
|
+
return str(data)
|
|
85
|
+
except Exception:
|
|
86
|
+
return f"<{type(data).__name__}>"
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
from pydantic import BaseModel
|
|
3
|
+
from typing import Callable, Any
|
|
4
|
+
from agent_framework import FunctionExecutor, Executor
|
|
5
|
+
import os
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class CodeLocation(BaseModel):
|
|
9
|
+
file_path: str
|
|
10
|
+
line_number: int
|
|
11
|
+
|
|
12
|
+
def get_cls_location(obj: object) -> CodeLocation | None:
|
|
13
|
+
cls = obj.__class__
|
|
14
|
+
try:
|
|
15
|
+
file_path = inspect.getfile(cls)
|
|
16
|
+
line_number = inspect.getsourcelines(cls)[1]
|
|
17
|
+
print(f"Class {cls} defined in {file_path} at line {line_number}")
|
|
18
|
+
abs_path = os.path.abspath(file_path)
|
|
19
|
+
return CodeLocation(file_path=abs_path, line_number=line_number)
|
|
20
|
+
except Exception as e:
|
|
21
|
+
print(f"Could not get location for class {cls}: {e}")
|
|
22
|
+
return None
|
|
23
|
+
|
|
24
|
+
def get_func_location(func: Callable) -> CodeLocation | None:
|
|
25
|
+
try:
|
|
26
|
+
file_path = inspect.getfile(func)
|
|
27
|
+
line_number = inspect.getsourcelines(func)[1]
|
|
28
|
+
print(f"Function {func.__name__} defined in {file_path} at line {line_number}")
|
|
29
|
+
abs_path = os.path.abspath(file_path)
|
|
30
|
+
return CodeLocation(file_path=abs_path, line_number=line_number)
|
|
31
|
+
except Exception as e:
|
|
32
|
+
print(f"Could not get location for function {func}: {e}")
|
|
33
|
+
return None
|
|
34
|
+
|
|
35
|
+
def get_executor_location(executor: Any) -> CodeLocation | None:
|
|
36
|
+
if isinstance(executor, FunctionExecutor):
|
|
37
|
+
return get_func_location(executor._original_func)
|
|
38
|
+
elif isinstance(executor, Executor):
|
|
39
|
+
return get_cls_location(executor)
|
|
40
|
+
else:
|
|
41
|
+
return None
|