django-agent-runtime 0.3.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- django_agent_runtime/__init__.py +25 -0
- django_agent_runtime/admin.py +155 -0
- django_agent_runtime/api/__init__.py +26 -0
- django_agent_runtime/api/permissions.py +109 -0
- django_agent_runtime/api/serializers.py +114 -0
- django_agent_runtime/api/views.py +472 -0
- django_agent_runtime/apps.py +26 -0
- django_agent_runtime/conf.py +241 -0
- django_agent_runtime/examples/__init__.py +10 -0
- django_agent_runtime/examples/langgraph_adapter.py +164 -0
- django_agent_runtime/examples/langgraph_tools.py +179 -0
- django_agent_runtime/examples/simple_chat.py +69 -0
- django_agent_runtime/examples/tool_agent.py +157 -0
- django_agent_runtime/management/__init__.py +2 -0
- django_agent_runtime/management/commands/__init__.py +2 -0
- django_agent_runtime/management/commands/runagent.py +419 -0
- django_agent_runtime/migrations/0001_initial.py +117 -0
- django_agent_runtime/migrations/0002_persistence_models.py +129 -0
- django_agent_runtime/migrations/0003_persistenceconversation_active_branch_id_and_more.py +212 -0
- django_agent_runtime/migrations/0004_add_anonymous_session_id.py +18 -0
- django_agent_runtime/migrations/__init__.py +2 -0
- django_agent_runtime/models/__init__.py +54 -0
- django_agent_runtime/models/base.py +450 -0
- django_agent_runtime/models/concrete.py +146 -0
- django_agent_runtime/persistence/__init__.py +60 -0
- django_agent_runtime/persistence/helpers.py +148 -0
- django_agent_runtime/persistence/models.py +506 -0
- django_agent_runtime/persistence/stores.py +1191 -0
- django_agent_runtime/runtime/__init__.py +23 -0
- django_agent_runtime/runtime/events/__init__.py +65 -0
- django_agent_runtime/runtime/events/base.py +135 -0
- django_agent_runtime/runtime/events/db.py +129 -0
- django_agent_runtime/runtime/events/redis.py +228 -0
- django_agent_runtime/runtime/events/sync.py +140 -0
- django_agent_runtime/runtime/interfaces.py +475 -0
- django_agent_runtime/runtime/llm/__init__.py +91 -0
- django_agent_runtime/runtime/llm/anthropic.py +249 -0
- django_agent_runtime/runtime/llm/litellm_adapter.py +173 -0
- django_agent_runtime/runtime/llm/openai.py +230 -0
- django_agent_runtime/runtime/queue/__init__.py +75 -0
- django_agent_runtime/runtime/queue/base.py +158 -0
- django_agent_runtime/runtime/queue/postgres.py +248 -0
- django_agent_runtime/runtime/queue/redis_streams.py +336 -0
- django_agent_runtime/runtime/queue/sync.py +277 -0
- django_agent_runtime/runtime/registry.py +186 -0
- django_agent_runtime/runtime/runner.py +540 -0
- django_agent_runtime/runtime/tracing/__init__.py +48 -0
- django_agent_runtime/runtime/tracing/langfuse.py +117 -0
- django_agent_runtime/runtime/tracing/noop.py +36 -0
- django_agent_runtime/urls.py +39 -0
- django_agent_runtime-0.3.6.dist-info/METADATA +723 -0
- django_agent_runtime-0.3.6.dist-info/RECORD +55 -0
- django_agent_runtime-0.3.6.dist-info/WHEEL +5 -0
- django_agent_runtime-0.3.6.dist-info/licenses/LICENSE +22 -0
- django_agent_runtime-0.3.6.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,1191 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Django implementations of agent-runtime-core persistence stores.
|
|
3
|
+
|
|
4
|
+
These stores use Django's async ORM to provide database-backed persistence
|
|
5
|
+
for agent memory, conversations, tasks, preferences, knowledge, and audit.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from typing import Any, Optional
|
|
10
|
+
from uuid import UUID
|
|
11
|
+
|
|
12
|
+
from agent_runtime_core.persistence import (
|
|
13
|
+
MemoryStore,
|
|
14
|
+
ConversationStore,
|
|
15
|
+
TaskStore,
|
|
16
|
+
PreferencesStore,
|
|
17
|
+
KnowledgeStore,
|
|
18
|
+
AuditStore,
|
|
19
|
+
Scope,
|
|
20
|
+
Conversation,
|
|
21
|
+
ConversationMessage,
|
|
22
|
+
Task,
|
|
23
|
+
TaskList,
|
|
24
|
+
TaskState,
|
|
25
|
+
ToolCall,
|
|
26
|
+
ToolResult,
|
|
27
|
+
Fact,
|
|
28
|
+
FactType,
|
|
29
|
+
Summary as CoreSummary,
|
|
30
|
+
Embedding as CoreEmbedding,
|
|
31
|
+
AuditEntry as CoreAuditEntry,
|
|
32
|
+
AuditEventType,
|
|
33
|
+
ErrorRecord as CoreErrorRecord,
|
|
34
|
+
ErrorSeverity,
|
|
35
|
+
PerformanceMetric as CorePerformanceMetric,
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
from django_agent_runtime.persistence.models import (
|
|
39
|
+
Memory,
|
|
40
|
+
PersistenceConversation,
|
|
41
|
+
PersistenceMessage,
|
|
42
|
+
PersistenceTaskList,
|
|
43
|
+
PersistenceTask,
|
|
44
|
+
Preferences,
|
|
45
|
+
TaskStateChoices,
|
|
46
|
+
Fact as FactModel,
|
|
47
|
+
FactTypeChoices,
|
|
48
|
+
Summary as SummaryModel,
|
|
49
|
+
Embedding as EmbeddingModel,
|
|
50
|
+
AuditEntry as AuditEntryModel,
|
|
51
|
+
AuditEventTypeChoices,
|
|
52
|
+
ErrorRecord as ErrorRecordModel,
|
|
53
|
+
ErrorSeverityChoices,
|
|
54
|
+
PerformanceMetric as PerformanceMetricModel,
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class DjangoMemoryStore(MemoryStore):
|
|
59
|
+
"""
|
|
60
|
+
Django-backed memory store.
|
|
61
|
+
|
|
62
|
+
Stores key-value pairs scoped to a user.
|
|
63
|
+
The scope parameter is ignored - user context provides scoping.
|
|
64
|
+
"""
|
|
65
|
+
|
|
66
|
+
def __init__(self, user):
|
|
67
|
+
self.user = user
|
|
68
|
+
|
|
69
|
+
async def get(self, key: str, scope: Scope = Scope.PROJECT) -> Optional[Any]:
|
|
70
|
+
"""Get a value by key."""
|
|
71
|
+
try:
|
|
72
|
+
entry = await Memory.objects.aget(user=self.user, key=key)
|
|
73
|
+
return entry.value
|
|
74
|
+
except Memory.DoesNotExist:
|
|
75
|
+
return None
|
|
76
|
+
|
|
77
|
+
async def set(self, key: str, value: Any, scope: Scope = Scope.PROJECT) -> None:
|
|
78
|
+
"""Set a value by key."""
|
|
79
|
+
try:
|
|
80
|
+
entry = await Memory.objects.aget(user=self.user, key=key)
|
|
81
|
+
entry.value = value
|
|
82
|
+
await entry.asave(update_fields=["value", "updated_at"])
|
|
83
|
+
except Memory.DoesNotExist:
|
|
84
|
+
await Memory.objects.acreate(user=self.user, key=key, value=value)
|
|
85
|
+
|
|
86
|
+
async def delete(self, key: str, scope: Scope = Scope.PROJECT) -> bool:
|
|
87
|
+
"""Delete a key. Returns True if key existed."""
|
|
88
|
+
deleted, _ = await Memory.objects.filter(user=self.user, key=key).adelete()
|
|
89
|
+
return deleted > 0
|
|
90
|
+
|
|
91
|
+
async def list_keys(
|
|
92
|
+
self, scope: Scope = Scope.PROJECT, prefix: Optional[str] = None
|
|
93
|
+
) -> list[str]:
|
|
94
|
+
"""List all keys, optionally filtered by prefix."""
|
|
95
|
+
qs = Memory.objects.filter(user=self.user)
|
|
96
|
+
if prefix:
|
|
97
|
+
qs = qs.filter(key__startswith=prefix)
|
|
98
|
+
keys = []
|
|
99
|
+
async for entry in qs.values_list("key", flat=True):
|
|
100
|
+
keys.append(entry)
|
|
101
|
+
return keys
|
|
102
|
+
|
|
103
|
+
async def clear(self, scope: Scope = Scope.PROJECT) -> None:
|
|
104
|
+
"""Clear all keys."""
|
|
105
|
+
await Memory.objects.filter(user=self.user).adelete()
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class DjangoConversationStore(ConversationStore):
|
|
109
|
+
"""
|
|
110
|
+
Django-backed conversation store.
|
|
111
|
+
|
|
112
|
+
Stores conversations and messages scoped to a user.
|
|
113
|
+
"""
|
|
114
|
+
|
|
115
|
+
def __init__(self, user):
|
|
116
|
+
self.user = user
|
|
117
|
+
|
|
118
|
+
def _message_to_db(self, msg: ConversationMessage) -> dict:
|
|
119
|
+
"""Convert ConversationMessage to database fields."""
|
|
120
|
+
tool_calls_data = []
|
|
121
|
+
for tc in msg.tool_calls:
|
|
122
|
+
tool_calls_data.append({
|
|
123
|
+
"id": tc.id,
|
|
124
|
+
"name": tc.name,
|
|
125
|
+
"arguments": tc.arguments,
|
|
126
|
+
"timestamp": tc.timestamp.isoformat() if tc.timestamp else None,
|
|
127
|
+
})
|
|
128
|
+
|
|
129
|
+
return {
|
|
130
|
+
"id": msg.id,
|
|
131
|
+
"role": msg.role,
|
|
132
|
+
"content": msg.content,
|
|
133
|
+
"tool_calls": tool_calls_data,
|
|
134
|
+
"tool_call_id": msg.tool_call_id or "",
|
|
135
|
+
"model": msg.model or "",
|
|
136
|
+
"usage": msg.usage,
|
|
137
|
+
"metadata": msg.metadata,
|
|
138
|
+
"timestamp": msg.timestamp,
|
|
139
|
+
"parent_message_id": msg.parent_message_id,
|
|
140
|
+
"branch_id": msg.branch_id,
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
def _db_to_message(self, db_msg: PersistenceMessage) -> ConversationMessage:
|
|
144
|
+
"""Convert database message to ConversationMessage."""
|
|
145
|
+
tool_calls = []
|
|
146
|
+
for tc_data in db_msg.tool_calls or []:
|
|
147
|
+
ts = tc_data.get("timestamp")
|
|
148
|
+
tool_calls.append(ToolCall(
|
|
149
|
+
id=tc_data["id"],
|
|
150
|
+
name=tc_data["name"],
|
|
151
|
+
arguments=tc_data.get("arguments", {}),
|
|
152
|
+
timestamp=datetime.fromisoformat(ts) if ts else datetime.utcnow(),
|
|
153
|
+
))
|
|
154
|
+
|
|
155
|
+
return ConversationMessage(
|
|
156
|
+
id=db_msg.id,
|
|
157
|
+
role=db_msg.role,
|
|
158
|
+
content=db_msg.content,
|
|
159
|
+
timestamp=db_msg.timestamp,
|
|
160
|
+
tool_calls=tool_calls,
|
|
161
|
+
tool_call_id=db_msg.tool_call_id or None,
|
|
162
|
+
model=db_msg.model or None,
|
|
163
|
+
usage=db_msg.usage or {},
|
|
164
|
+
metadata=db_msg.metadata or {},
|
|
165
|
+
parent_message_id=db_msg.parent_message_id,
|
|
166
|
+
branch_id=db_msg.branch_id,
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
async def save(self, conversation: Conversation, scope: Scope = Scope.PROJECT) -> None:
|
|
170
|
+
"""Save or update a conversation."""
|
|
171
|
+
try:
|
|
172
|
+
db_conv = await PersistenceConversation.objects.aget(id=conversation.id)
|
|
173
|
+
db_conv.title = conversation.title or ""
|
|
174
|
+
db_conv.agent_key = conversation.agent_key or ""
|
|
175
|
+
db_conv.summary = conversation.summary or ""
|
|
176
|
+
db_conv.metadata = conversation.metadata
|
|
177
|
+
db_conv.parent_conversation_id = conversation.parent_conversation_id
|
|
178
|
+
db_conv.active_branch_id = conversation.active_branch_id
|
|
179
|
+
await db_conv.asave(update_fields=[
|
|
180
|
+
"title", "agent_key", "summary", "metadata",
|
|
181
|
+
"parent_conversation_id", "active_branch_id", "updated_at"
|
|
182
|
+
])
|
|
183
|
+
created = False
|
|
184
|
+
except PersistenceConversation.DoesNotExist:
|
|
185
|
+
db_conv = await PersistenceConversation.objects.acreate(
|
|
186
|
+
id=conversation.id,
|
|
187
|
+
user=self.user,
|
|
188
|
+
title=conversation.title or "",
|
|
189
|
+
agent_key=conversation.agent_key or "",
|
|
190
|
+
summary=conversation.summary or "",
|
|
191
|
+
metadata=conversation.metadata,
|
|
192
|
+
parent_conversation_id=conversation.parent_conversation_id,
|
|
193
|
+
active_branch_id=conversation.active_branch_id,
|
|
194
|
+
)
|
|
195
|
+
created = True
|
|
196
|
+
|
|
197
|
+
# Save messages if this is a new conversation or we're doing a full save
|
|
198
|
+
if created and conversation.messages:
|
|
199
|
+
for msg in conversation.messages:
|
|
200
|
+
msg_data = self._message_to_db(msg)
|
|
201
|
+
await PersistenceMessage.objects.acreate(
|
|
202
|
+
conversation=db_conv,
|
|
203
|
+
**msg_data,
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
async def get(
|
|
207
|
+
self, conversation_id: UUID, scope: Scope = Scope.PROJECT
|
|
208
|
+
) -> Optional[Conversation]:
|
|
209
|
+
"""Get a conversation by ID."""
|
|
210
|
+
try:
|
|
211
|
+
db_conv = await PersistenceConversation.objects.aget(
|
|
212
|
+
id=conversation_id, user=self.user
|
|
213
|
+
)
|
|
214
|
+
except PersistenceConversation.DoesNotExist:
|
|
215
|
+
return None
|
|
216
|
+
|
|
217
|
+
messages = []
|
|
218
|
+
async for db_msg in db_conv.messages.all().order_by("timestamp"):
|
|
219
|
+
messages.append(self._db_to_message(db_msg))
|
|
220
|
+
|
|
221
|
+
return Conversation(
|
|
222
|
+
id=db_conv.id,
|
|
223
|
+
title=db_conv.title or None,
|
|
224
|
+
messages=messages,
|
|
225
|
+
created_at=db_conv.created_at,
|
|
226
|
+
updated_at=db_conv.updated_at,
|
|
227
|
+
metadata=db_conv.metadata or {},
|
|
228
|
+
agent_key=db_conv.agent_key or None,
|
|
229
|
+
summary=db_conv.summary or None,
|
|
230
|
+
parent_conversation_id=db_conv.parent_conversation_id,
|
|
231
|
+
active_branch_id=db_conv.active_branch_id,
|
|
232
|
+
)
|
|
233
|
+
|
|
234
|
+
async def delete(self, conversation_id: UUID, scope: Scope = Scope.PROJECT) -> bool:
|
|
235
|
+
"""Delete a conversation. Returns True if it existed."""
|
|
236
|
+
deleted, _ = await PersistenceConversation.objects.filter(
|
|
237
|
+
id=conversation_id, user=self.user
|
|
238
|
+
).adelete()
|
|
239
|
+
return deleted > 0
|
|
240
|
+
|
|
241
|
+
async def list_conversations(
|
|
242
|
+
self,
|
|
243
|
+
scope: Scope = Scope.PROJECT,
|
|
244
|
+
limit: int = 100,
|
|
245
|
+
offset: int = 0,
|
|
246
|
+
agent_key: Optional[str] = None,
|
|
247
|
+
) -> list[Conversation]:
|
|
248
|
+
"""List conversations, optionally filtered by agent."""
|
|
249
|
+
qs = PersistenceConversation.objects.filter(user=self.user)
|
|
250
|
+
if agent_key:
|
|
251
|
+
qs = qs.filter(agent_key=agent_key)
|
|
252
|
+
qs = qs.order_by("-updated_at")[offset : offset + limit]
|
|
253
|
+
|
|
254
|
+
conversations = []
|
|
255
|
+
async for db_conv in qs:
|
|
256
|
+
conversations.append(
|
|
257
|
+
Conversation(
|
|
258
|
+
id=db_conv.id,
|
|
259
|
+
title=db_conv.title or None,
|
|
260
|
+
messages=[], # Don't load messages for list
|
|
261
|
+
created_at=db_conv.created_at,
|
|
262
|
+
updated_at=db_conv.updated_at,
|
|
263
|
+
metadata=db_conv.metadata or {},
|
|
264
|
+
agent_key=db_conv.agent_key or None,
|
|
265
|
+
summary=db_conv.summary or None,
|
|
266
|
+
parent_conversation_id=db_conv.parent_conversation_id,
|
|
267
|
+
active_branch_id=db_conv.active_branch_id,
|
|
268
|
+
)
|
|
269
|
+
)
|
|
270
|
+
return conversations
|
|
271
|
+
|
|
272
|
+
async def add_message(
|
|
273
|
+
self,
|
|
274
|
+
conversation_id: UUID,
|
|
275
|
+
message: ConversationMessage,
|
|
276
|
+
scope: Scope = Scope.PROJECT,
|
|
277
|
+
) -> None:
|
|
278
|
+
"""Add a message to an existing conversation."""
|
|
279
|
+
try:
|
|
280
|
+
db_conv = await PersistenceConversation.objects.aget(
|
|
281
|
+
id=conversation_id, user=self.user
|
|
282
|
+
)
|
|
283
|
+
except PersistenceConversation.DoesNotExist:
|
|
284
|
+
raise ValueError(f"Conversation {conversation_id} not found")
|
|
285
|
+
|
|
286
|
+
msg_data = self._message_to_db(message)
|
|
287
|
+
await PersistenceMessage.objects.acreate(conversation=db_conv, **msg_data)
|
|
288
|
+
|
|
289
|
+
# Update conversation timestamp
|
|
290
|
+
db_conv.updated_at = datetime.utcnow()
|
|
291
|
+
await db_conv.asave(update_fields=["updated_at"])
|
|
292
|
+
|
|
293
|
+
async def get_messages(
|
|
294
|
+
self,
|
|
295
|
+
conversation_id: UUID,
|
|
296
|
+
scope: Scope = Scope.PROJECT,
|
|
297
|
+
limit: Optional[int] = None,
|
|
298
|
+
before: Optional[datetime] = None,
|
|
299
|
+
) -> list[ConversationMessage]:
|
|
300
|
+
"""Get messages from a conversation."""
|
|
301
|
+
try:
|
|
302
|
+
db_conv = await PersistenceConversation.objects.aget(
|
|
303
|
+
id=conversation_id, user=self.user
|
|
304
|
+
)
|
|
305
|
+
except PersistenceConversation.DoesNotExist:
|
|
306
|
+
return []
|
|
307
|
+
|
|
308
|
+
qs = db_conv.messages.all().order_by("timestamp")
|
|
309
|
+
if before:
|
|
310
|
+
qs = qs.filter(timestamp__lt=before)
|
|
311
|
+
if limit:
|
|
312
|
+
qs = qs[:limit]
|
|
313
|
+
|
|
314
|
+
messages = []
|
|
315
|
+
async for db_msg in qs:
|
|
316
|
+
messages.append(self._db_to_message(db_msg))
|
|
317
|
+
return messages
|
|
318
|
+
|
|
319
|
+
|
|
320
|
+
def _task_state_to_db(state: TaskState) -> str:
|
|
321
|
+
"""Convert TaskState to database choice."""
|
|
322
|
+
mapping = {
|
|
323
|
+
TaskState.NOT_STARTED: TaskStateChoices.NOT_STARTED,
|
|
324
|
+
TaskState.IN_PROGRESS: TaskStateChoices.IN_PROGRESS,
|
|
325
|
+
TaskState.COMPLETE: TaskStateChoices.COMPLETE,
|
|
326
|
+
TaskState.CANCELLED: TaskStateChoices.CANCELLED,
|
|
327
|
+
}
|
|
328
|
+
return mapping.get(state, TaskStateChoices.NOT_STARTED)
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
def _db_to_task_state(db_state: str) -> TaskState:
|
|
332
|
+
"""Convert database choice to TaskState."""
|
|
333
|
+
mapping = {
|
|
334
|
+
TaskStateChoices.NOT_STARTED: TaskState.NOT_STARTED,
|
|
335
|
+
TaskStateChoices.IN_PROGRESS: TaskState.IN_PROGRESS,
|
|
336
|
+
TaskStateChoices.COMPLETE: TaskState.COMPLETE,
|
|
337
|
+
TaskStateChoices.CANCELLED: TaskState.CANCELLED,
|
|
338
|
+
}
|
|
339
|
+
return mapping.get(db_state, TaskState.NOT_STARTED)
|
|
340
|
+
|
|
341
|
+
|
|
342
|
+
class DjangoTaskStore(TaskStore):
|
|
343
|
+
"""
|
|
344
|
+
Django-backed task store.
|
|
345
|
+
|
|
346
|
+
Stores task lists and tasks scoped to a user.
|
|
347
|
+
"""
|
|
348
|
+
|
|
349
|
+
def __init__(self, user):
|
|
350
|
+
self.user = user
|
|
351
|
+
|
|
352
|
+
def _db_to_task(self, db_task: PersistenceTask) -> Task:
|
|
353
|
+
"""Convert database task to Task."""
|
|
354
|
+
# Convert dependencies from JSON list of strings to list of UUIDs
|
|
355
|
+
dependencies = []
|
|
356
|
+
for dep in db_task.dependencies or []:
|
|
357
|
+
if isinstance(dep, str):
|
|
358
|
+
dependencies.append(UUID(dep))
|
|
359
|
+
else:
|
|
360
|
+
dependencies.append(dep)
|
|
361
|
+
|
|
362
|
+
return Task(
|
|
363
|
+
id=db_task.id,
|
|
364
|
+
name=db_task.name,
|
|
365
|
+
description=db_task.description,
|
|
366
|
+
state=_db_to_task_state(db_task.state),
|
|
367
|
+
parent_id=db_task.parent_id,
|
|
368
|
+
created_at=db_task.created_at,
|
|
369
|
+
updated_at=db_task.updated_at,
|
|
370
|
+
metadata=db_task.metadata or {},
|
|
371
|
+
dependencies=dependencies,
|
|
372
|
+
priority=db_task.priority,
|
|
373
|
+
due_at=db_task.due_at,
|
|
374
|
+
completed_at=db_task.completed_at,
|
|
375
|
+
checkpoint_data=db_task.checkpoint_data or {},
|
|
376
|
+
checkpoint_at=db_task.checkpoint_at,
|
|
377
|
+
attempts=db_task.attempts,
|
|
378
|
+
last_error=db_task.last_error or None,
|
|
379
|
+
)
|
|
380
|
+
|
|
381
|
+
async def save(self, task_list: TaskList, scope: Scope = Scope.PROJECT) -> None:
|
|
382
|
+
"""Save or update a task list."""
|
|
383
|
+
try:
|
|
384
|
+
db_list = await PersistenceTaskList.objects.aget(id=task_list.id)
|
|
385
|
+
db_list.name = task_list.name
|
|
386
|
+
db_list.conversation_id = task_list.conversation_id
|
|
387
|
+
db_list.run_id = task_list.run_id
|
|
388
|
+
await db_list.asave(update_fields=["name", "conversation_id", "run_id", "updated_at"])
|
|
389
|
+
created = False
|
|
390
|
+
except PersistenceTaskList.DoesNotExist:
|
|
391
|
+
db_list = await PersistenceTaskList.objects.acreate(
|
|
392
|
+
id=task_list.id,
|
|
393
|
+
user=self.user,
|
|
394
|
+
name=task_list.name,
|
|
395
|
+
conversation_id=task_list.conversation_id,
|
|
396
|
+
run_id=task_list.run_id,
|
|
397
|
+
)
|
|
398
|
+
created = True
|
|
399
|
+
|
|
400
|
+
# If new, create all tasks
|
|
401
|
+
if created and task_list.tasks:
|
|
402
|
+
for task in task_list.tasks:
|
|
403
|
+
# Convert dependencies to list of strings for JSON storage
|
|
404
|
+
deps = [str(d) for d in task.dependencies] if task.dependencies else []
|
|
405
|
+
await PersistenceTask.objects.acreate(
|
|
406
|
+
id=task.id,
|
|
407
|
+
task_list=db_list,
|
|
408
|
+
name=task.name,
|
|
409
|
+
description=task.description,
|
|
410
|
+
state=_task_state_to_db(task.state),
|
|
411
|
+
parent_id=task.parent_id,
|
|
412
|
+
metadata=task.metadata,
|
|
413
|
+
dependencies=deps,
|
|
414
|
+
priority=task.priority,
|
|
415
|
+
due_at=task.due_at,
|
|
416
|
+
completed_at=task.completed_at,
|
|
417
|
+
checkpoint_data=task.checkpoint_data,
|
|
418
|
+
checkpoint_at=task.checkpoint_at,
|
|
419
|
+
attempts=task.attempts,
|
|
420
|
+
last_error=task.last_error or "",
|
|
421
|
+
)
|
|
422
|
+
|
|
423
|
+
async def get(
|
|
424
|
+
self, task_list_id: UUID, scope: Scope = Scope.PROJECT
|
|
425
|
+
) -> Optional[TaskList]:
|
|
426
|
+
"""Get a task list by ID."""
|
|
427
|
+
try:
|
|
428
|
+
db_list = await PersistenceTaskList.objects.aget(
|
|
429
|
+
id=task_list_id, user=self.user
|
|
430
|
+
)
|
|
431
|
+
except PersistenceTaskList.DoesNotExist:
|
|
432
|
+
return None
|
|
433
|
+
|
|
434
|
+
tasks = []
|
|
435
|
+
async for db_task in db_list.tasks.all().order_by("created_at"):
|
|
436
|
+
tasks.append(self._db_to_task(db_task))
|
|
437
|
+
|
|
438
|
+
return TaskList(
|
|
439
|
+
id=db_list.id,
|
|
440
|
+
name=db_list.name,
|
|
441
|
+
tasks=tasks,
|
|
442
|
+
created_at=db_list.created_at,
|
|
443
|
+
updated_at=db_list.updated_at,
|
|
444
|
+
conversation_id=db_list.conversation_id,
|
|
445
|
+
run_id=db_list.run_id,
|
|
446
|
+
)
|
|
447
|
+
|
|
448
|
+
async def delete(self, task_list_id: UUID, scope: Scope = Scope.PROJECT) -> bool:
|
|
449
|
+
"""Delete a task list. Returns True if it existed."""
|
|
450
|
+
deleted, _ = await PersistenceTaskList.objects.filter(
|
|
451
|
+
id=task_list_id, user=self.user
|
|
452
|
+
).adelete()
|
|
453
|
+
return deleted > 0
|
|
454
|
+
|
|
455
|
+
async def get_by_conversation(
|
|
456
|
+
self, conversation_id: UUID, scope: Scope = Scope.PROJECT
|
|
457
|
+
) -> Optional[TaskList]:
|
|
458
|
+
"""Get the task list associated with a conversation."""
|
|
459
|
+
try:
|
|
460
|
+
db_list = await PersistenceTaskList.objects.aget(
|
|
461
|
+
conversation_id=conversation_id, user=self.user
|
|
462
|
+
)
|
|
463
|
+
except PersistenceTaskList.DoesNotExist:
|
|
464
|
+
return None
|
|
465
|
+
|
|
466
|
+
tasks = []
|
|
467
|
+
async for db_task in db_list.tasks.all().order_by("created_at"):
|
|
468
|
+
tasks.append(self._db_to_task(db_task))
|
|
469
|
+
|
|
470
|
+
return TaskList(
|
|
471
|
+
id=db_list.id,
|
|
472
|
+
name=db_list.name,
|
|
473
|
+
tasks=tasks,
|
|
474
|
+
created_at=db_list.created_at,
|
|
475
|
+
updated_at=db_list.updated_at,
|
|
476
|
+
conversation_id=db_list.conversation_id,
|
|
477
|
+
run_id=db_list.run_id,
|
|
478
|
+
)
|
|
479
|
+
|
|
480
|
+
async def update_task(
|
|
481
|
+
self,
|
|
482
|
+
task_list_id: UUID,
|
|
483
|
+
task_id: UUID,
|
|
484
|
+
state: Optional[TaskState] = None,
|
|
485
|
+
name: Optional[str] = None,
|
|
486
|
+
description: Optional[str] = None,
|
|
487
|
+
scope: Scope = Scope.PROJECT,
|
|
488
|
+
) -> None:
|
|
489
|
+
"""Update a specific task in a task list."""
|
|
490
|
+
try:
|
|
491
|
+
db_list = await PersistenceTaskList.objects.aget(
|
|
492
|
+
id=task_list_id, user=self.user
|
|
493
|
+
)
|
|
494
|
+
except PersistenceTaskList.DoesNotExist:
|
|
495
|
+
raise ValueError(f"Task list {task_list_id} not found")
|
|
496
|
+
|
|
497
|
+
try:
|
|
498
|
+
db_task = await PersistenceTask.objects.aget(id=task_id, task_list=db_list)
|
|
499
|
+
except PersistenceTask.DoesNotExist:
|
|
500
|
+
raise ValueError(f"Task {task_id} not found in list {task_list_id}")
|
|
501
|
+
|
|
502
|
+
update_fields = []
|
|
503
|
+
if state is not None:
|
|
504
|
+
db_task.state = _task_state_to_db(state)
|
|
505
|
+
update_fields.append("state")
|
|
506
|
+
if name is not None:
|
|
507
|
+
db_task.name = name
|
|
508
|
+
update_fields.append("name")
|
|
509
|
+
if description is not None:
|
|
510
|
+
db_task.description = description
|
|
511
|
+
update_fields.append("description")
|
|
512
|
+
|
|
513
|
+
if update_fields:
|
|
514
|
+
update_fields.append("updated_at")
|
|
515
|
+
await db_task.asave(update_fields=update_fields)
|
|
516
|
+
|
|
517
|
+
|
|
518
|
+
class DjangoPreferencesStore(PreferencesStore):
|
|
519
|
+
"""
|
|
520
|
+
Django-backed preferences store.
|
|
521
|
+
|
|
522
|
+
Stores user preferences as key-value pairs.
|
|
523
|
+
"""
|
|
524
|
+
|
|
525
|
+
def __init__(self, user):
|
|
526
|
+
self.user = user
|
|
527
|
+
|
|
528
|
+
async def get(self, key: str, scope: Scope = Scope.GLOBAL) -> Optional[Any]:
|
|
529
|
+
"""Get a preference value."""
|
|
530
|
+
try:
|
|
531
|
+
entry = await Preferences.objects.aget(user=self.user, key=key)
|
|
532
|
+
return entry.value
|
|
533
|
+
except Preferences.DoesNotExist:
|
|
534
|
+
return None
|
|
535
|
+
|
|
536
|
+
async def set(self, key: str, value: Any, scope: Scope = Scope.GLOBAL) -> None:
|
|
537
|
+
"""Set a preference value."""
|
|
538
|
+
try:
|
|
539
|
+
entry = await Preferences.objects.aget(user=self.user, key=key)
|
|
540
|
+
entry.value = value
|
|
541
|
+
await entry.asave(update_fields=["value", "updated_at"])
|
|
542
|
+
except Preferences.DoesNotExist:
|
|
543
|
+
await Preferences.objects.acreate(user=self.user, key=key, value=value)
|
|
544
|
+
|
|
545
|
+
async def delete(self, key: str, scope: Scope = Scope.GLOBAL) -> bool:
|
|
546
|
+
"""Delete a preference. Returns True if it existed."""
|
|
547
|
+
deleted, _ = await Preferences.objects.filter(user=self.user, key=key).adelete()
|
|
548
|
+
return deleted > 0
|
|
549
|
+
|
|
550
|
+
async def get_all(self, scope: Scope = Scope.GLOBAL) -> dict[str, Any]:
|
|
551
|
+
"""Get all preferences."""
|
|
552
|
+
prefs = {}
|
|
553
|
+
async for entry in Preferences.objects.filter(user=self.user):
|
|
554
|
+
prefs[entry.key] = entry.value
|
|
555
|
+
return prefs
|
|
556
|
+
|
|
557
|
+
|
|
558
|
+
# =============================================================================
|
|
559
|
+
# Knowledge Store Helper Functions
|
|
560
|
+
# =============================================================================
|
|
561
|
+
|
|
562
|
+
|
|
563
|
+
def _fact_type_to_db(fact_type: FactType) -> str:
|
|
564
|
+
"""Convert FactType to database choice."""
|
|
565
|
+
mapping = {
|
|
566
|
+
FactType.USER: FactTypeChoices.USER,
|
|
567
|
+
FactType.PROJECT: FactTypeChoices.PROJECT,
|
|
568
|
+
FactType.PREFERENCE: FactTypeChoices.PREFERENCE,
|
|
569
|
+
FactType.CONTEXT: FactTypeChoices.CONTEXT,
|
|
570
|
+
FactType.CUSTOM: FactTypeChoices.CUSTOM,
|
|
571
|
+
}
|
|
572
|
+
return mapping.get(fact_type, FactTypeChoices.CUSTOM)
|
|
573
|
+
|
|
574
|
+
|
|
575
|
+
def _db_to_fact_type(db_type: str) -> FactType:
|
|
576
|
+
"""Convert database choice to FactType."""
|
|
577
|
+
mapping = {
|
|
578
|
+
FactTypeChoices.USER: FactType.USER,
|
|
579
|
+
FactTypeChoices.PROJECT: FactType.PROJECT,
|
|
580
|
+
FactTypeChoices.PREFERENCE: FactType.PREFERENCE,
|
|
581
|
+
FactTypeChoices.CONTEXT: FactType.CONTEXT,
|
|
582
|
+
FactTypeChoices.CUSTOM: FactType.CUSTOM,
|
|
583
|
+
}
|
|
584
|
+
return mapping.get(db_type, FactType.CUSTOM)
|
|
585
|
+
|
|
586
|
+
|
|
587
|
+
class DjangoKnowledgeStore(KnowledgeStore):
|
|
588
|
+
"""
|
|
589
|
+
Django-backed knowledge store.
|
|
590
|
+
|
|
591
|
+
Stores facts, summaries, and optionally embeddings scoped to a user.
|
|
592
|
+
"""
|
|
593
|
+
|
|
594
|
+
def __init__(self, user):
|
|
595
|
+
self.user = user
|
|
596
|
+
|
|
597
|
+
def _db_to_fact(self, db_fact: FactModel) -> Fact:
|
|
598
|
+
"""Convert database fact to Fact."""
|
|
599
|
+
return Fact(
|
|
600
|
+
id=db_fact.id,
|
|
601
|
+
key=db_fact.key,
|
|
602
|
+
value=db_fact.value,
|
|
603
|
+
fact_type=_db_to_fact_type(db_fact.fact_type),
|
|
604
|
+
confidence=db_fact.confidence,
|
|
605
|
+
source=db_fact.source or None,
|
|
606
|
+
created_at=db_fact.created_at,
|
|
607
|
+
updated_at=db_fact.updated_at,
|
|
608
|
+
expires_at=db_fact.expires_at,
|
|
609
|
+
metadata=db_fact.metadata or {},
|
|
610
|
+
)
|
|
611
|
+
|
|
612
|
+
def _db_to_summary(self, db_summary: SummaryModel) -> CoreSummary:
|
|
613
|
+
"""Convert database summary to Summary."""
|
|
614
|
+
# Convert conversation_ids from JSON list of strings to list of UUIDs
|
|
615
|
+
conv_ids = []
|
|
616
|
+
for cid in db_summary.conversation_ids or []:
|
|
617
|
+
if isinstance(cid, str):
|
|
618
|
+
conv_ids.append(UUID(cid))
|
|
619
|
+
else:
|
|
620
|
+
conv_ids.append(cid)
|
|
621
|
+
|
|
622
|
+
return CoreSummary(
|
|
623
|
+
id=db_summary.id,
|
|
624
|
+
content=db_summary.content,
|
|
625
|
+
conversation_id=db_summary.conversation_id,
|
|
626
|
+
conversation_ids=conv_ids,
|
|
627
|
+
start_time=db_summary.start_time,
|
|
628
|
+
end_time=db_summary.end_time,
|
|
629
|
+
created_at=db_summary.created_at,
|
|
630
|
+
metadata=db_summary.metadata or {},
|
|
631
|
+
)
|
|
632
|
+
|
|
633
|
+
def _db_to_embedding(self, db_emb: EmbeddingModel) -> CoreEmbedding:
|
|
634
|
+
"""Convert database embedding to Embedding."""
|
|
635
|
+
return CoreEmbedding(
|
|
636
|
+
id=db_emb.id,
|
|
637
|
+
vector=db_emb.vector,
|
|
638
|
+
content=db_emb.content,
|
|
639
|
+
content_type=db_emb.content_type,
|
|
640
|
+
source_id=db_emb.source_id,
|
|
641
|
+
model=db_emb.model or None,
|
|
642
|
+
dimensions=db_emb.dimensions,
|
|
643
|
+
created_at=db_emb.created_at,
|
|
644
|
+
metadata=db_emb.metadata or {},
|
|
645
|
+
)
|
|
646
|
+
|
|
647
|
+
# Fact operations
|
|
648
|
+
async def save_fact(self, fact: Fact, scope: Scope = Scope.PROJECT) -> None:
|
|
649
|
+
"""Save or update a fact."""
|
|
650
|
+
try:
|
|
651
|
+
db_fact = await FactModel.objects.aget(id=fact.id)
|
|
652
|
+
db_fact.key = fact.key
|
|
653
|
+
db_fact.value = fact.value
|
|
654
|
+
db_fact.fact_type = _fact_type_to_db(fact.fact_type)
|
|
655
|
+
db_fact.confidence = fact.confidence
|
|
656
|
+
db_fact.source = fact.source or ""
|
|
657
|
+
db_fact.expires_at = fact.expires_at
|
|
658
|
+
db_fact.metadata = fact.metadata
|
|
659
|
+
await db_fact.asave(update_fields=[
|
|
660
|
+
"key", "value", "fact_type", "confidence", "source",
|
|
661
|
+
"expires_at", "metadata", "updated_at"
|
|
662
|
+
])
|
|
663
|
+
except FactModel.DoesNotExist:
|
|
664
|
+
await FactModel.objects.acreate(
|
|
665
|
+
id=fact.id,
|
|
666
|
+
user=self.user,
|
|
667
|
+
key=fact.key,
|
|
668
|
+
value=fact.value,
|
|
669
|
+
fact_type=_fact_type_to_db(fact.fact_type),
|
|
670
|
+
confidence=fact.confidence,
|
|
671
|
+
source=fact.source or "",
|
|
672
|
+
expires_at=fact.expires_at,
|
|
673
|
+
metadata=fact.metadata,
|
|
674
|
+
)
|
|
675
|
+
|
|
676
|
+
async def get_fact(self, fact_id: UUID, scope: Scope = Scope.PROJECT) -> Optional[Fact]:
|
|
677
|
+
"""Get a fact by ID."""
|
|
678
|
+
try:
|
|
679
|
+
db_fact = await FactModel.objects.aget(id=fact_id, user=self.user)
|
|
680
|
+
return self._db_to_fact(db_fact)
|
|
681
|
+
except FactModel.DoesNotExist:
|
|
682
|
+
return None
|
|
683
|
+
|
|
684
|
+
async def get_fact_by_key(self, key: str, scope: Scope = Scope.PROJECT) -> Optional[Fact]:
|
|
685
|
+
"""Get a fact by its key."""
|
|
686
|
+
try:
|
|
687
|
+
db_fact = await FactModel.objects.aget(user=self.user, key=key)
|
|
688
|
+
return self._db_to_fact(db_fact)
|
|
689
|
+
except FactModel.DoesNotExist:
|
|
690
|
+
return None
|
|
691
|
+
|
|
692
|
+
async def list_facts(
|
|
693
|
+
self,
|
|
694
|
+
scope: Scope = Scope.PROJECT,
|
|
695
|
+
fact_type: Optional[FactType] = None,
|
|
696
|
+
limit: int = 100,
|
|
697
|
+
) -> list[Fact]:
|
|
698
|
+
"""List facts, optionally filtered by type."""
|
|
699
|
+
qs = FactModel.objects.filter(user=self.user)
|
|
700
|
+
if fact_type:
|
|
701
|
+
qs = qs.filter(fact_type=_fact_type_to_db(fact_type))
|
|
702
|
+
qs = qs.order_by("-updated_at")[:limit]
|
|
703
|
+
|
|
704
|
+
facts = []
|
|
705
|
+
async for db_fact in qs:
|
|
706
|
+
facts.append(self._db_to_fact(db_fact))
|
|
707
|
+
return facts
|
|
708
|
+
|
|
709
|
+
async def delete_fact(self, fact_id: UUID, scope: Scope = Scope.PROJECT) -> bool:
|
|
710
|
+
"""Delete a fact. Returns True if it existed."""
|
|
711
|
+
deleted, _ = await FactModel.objects.filter(id=fact_id, user=self.user).adelete()
|
|
712
|
+
return deleted > 0
|
|
713
|
+
|
|
714
|
+
# Summary operations
|
|
715
|
+
async def save_summary(self, summary: CoreSummary, scope: Scope = Scope.PROJECT) -> None:
|
|
716
|
+
"""Save or update a summary."""
|
|
717
|
+
conv_ids = [str(cid) for cid in summary.conversation_ids] if summary.conversation_ids else []
|
|
718
|
+
try:
|
|
719
|
+
db_summary = await SummaryModel.objects.aget(id=summary.id)
|
|
720
|
+
db_summary.content = summary.content
|
|
721
|
+
db_summary.conversation_id = summary.conversation_id
|
|
722
|
+
db_summary.conversation_ids = conv_ids
|
|
723
|
+
db_summary.start_time = summary.start_time
|
|
724
|
+
db_summary.end_time = summary.end_time
|
|
725
|
+
db_summary.metadata = summary.metadata
|
|
726
|
+
await db_summary.asave(update_fields=[
|
|
727
|
+
"content", "conversation_id", "conversation_ids",
|
|
728
|
+
"start_time", "end_time", "metadata"
|
|
729
|
+
])
|
|
730
|
+
except SummaryModel.DoesNotExist:
|
|
731
|
+
await SummaryModel.objects.acreate(
|
|
732
|
+
id=summary.id,
|
|
733
|
+
user=self.user,
|
|
734
|
+
content=summary.content,
|
|
735
|
+
conversation_id=summary.conversation_id,
|
|
736
|
+
conversation_ids=conv_ids,
|
|
737
|
+
start_time=summary.start_time,
|
|
738
|
+
end_time=summary.end_time,
|
|
739
|
+
metadata=summary.metadata,
|
|
740
|
+
)
|
|
741
|
+
|
|
742
|
+
async def get_summary(self, summary_id: UUID, scope: Scope = Scope.PROJECT) -> Optional[CoreSummary]:
|
|
743
|
+
"""Get a summary by ID."""
|
|
744
|
+
try:
|
|
745
|
+
db_summary = await SummaryModel.objects.aget(id=summary_id, user=self.user)
|
|
746
|
+
return self._db_to_summary(db_summary)
|
|
747
|
+
except SummaryModel.DoesNotExist:
|
|
748
|
+
return None
|
|
749
|
+
|
|
750
|
+
async def get_summaries_for_conversation(
|
|
751
|
+
self,
|
|
752
|
+
conversation_id: UUID,
|
|
753
|
+
scope: Scope = Scope.PROJECT,
|
|
754
|
+
) -> list[CoreSummary]:
|
|
755
|
+
"""Get all summaries for a conversation."""
|
|
756
|
+
qs = SummaryModel.objects.filter(
|
|
757
|
+
user=self.user,
|
|
758
|
+
conversation_id=conversation_id,
|
|
759
|
+
).order_by("-created_at")
|
|
760
|
+
|
|
761
|
+
summaries = []
|
|
762
|
+
async for db_summary in qs:
|
|
763
|
+
summaries.append(self._db_to_summary(db_summary))
|
|
764
|
+
return summaries
|
|
765
|
+
|
|
766
|
+
async def delete_summary(self, summary_id: UUID, scope: Scope = Scope.PROJECT) -> bool:
|
|
767
|
+
"""Delete a summary. Returns True if it existed."""
|
|
768
|
+
deleted, _ = await SummaryModel.objects.filter(id=summary_id, user=self.user).adelete()
|
|
769
|
+
return deleted > 0
|
|
770
|
+
|
|
771
|
+
# Embedding operations (optional)
|
|
772
|
+
async def save_embedding(self, embedding: CoreEmbedding, scope: Scope = Scope.PROJECT) -> None:
|
|
773
|
+
"""Save an embedding."""
|
|
774
|
+
try:
|
|
775
|
+
db_emb = await EmbeddingModel.objects.aget(id=embedding.id)
|
|
776
|
+
db_emb.vector = embedding.vector
|
|
777
|
+
db_emb.content = embedding.content
|
|
778
|
+
db_emb.content_type = embedding.content_type
|
|
779
|
+
db_emb.source_id = embedding.source_id
|
|
780
|
+
db_emb.model = embedding.model or ""
|
|
781
|
+
db_emb.dimensions = embedding.dimensions
|
|
782
|
+
db_emb.metadata = embedding.metadata
|
|
783
|
+
await db_emb.asave(update_fields=[
|
|
784
|
+
"vector", "content", "content_type", "source_id",
|
|
785
|
+
"model", "dimensions", "metadata"
|
|
786
|
+
])
|
|
787
|
+
except EmbeddingModel.DoesNotExist:
|
|
788
|
+
await EmbeddingModel.objects.acreate(
|
|
789
|
+
id=embedding.id,
|
|
790
|
+
user=self.user,
|
|
791
|
+
vector=embedding.vector,
|
|
792
|
+
content=embedding.content,
|
|
793
|
+
content_type=embedding.content_type,
|
|
794
|
+
source_id=embedding.source_id,
|
|
795
|
+
model=embedding.model or "",
|
|
796
|
+
dimensions=embedding.dimensions,
|
|
797
|
+
metadata=embedding.metadata,
|
|
798
|
+
)
|
|
799
|
+
|
|
800
|
+
async def search_similar(
|
|
801
|
+
self,
|
|
802
|
+
query_vector: list[float],
|
|
803
|
+
limit: int = 10,
|
|
804
|
+
scope: Scope = Scope.PROJECT,
|
|
805
|
+
content_type: Optional[str] = None,
|
|
806
|
+
) -> list[tuple[CoreEmbedding, float]]:
|
|
807
|
+
"""
|
|
808
|
+
Search for similar embeddings using cosine similarity.
|
|
809
|
+
|
|
810
|
+
Note: This is a basic implementation using Python. For production use
|
|
811
|
+
with large datasets, consider using pgvector extension for PostgreSQL.
|
|
812
|
+
"""
|
|
813
|
+
import math
|
|
814
|
+
|
|
815
|
+
def cosine_similarity(v1: list[float], v2: list[float]) -> float:
|
|
816
|
+
"""Calculate cosine similarity between two vectors."""
|
|
817
|
+
if len(v1) != len(v2):
|
|
818
|
+
return 0.0
|
|
819
|
+
dot_product = sum(a * b for a, b in zip(v1, v2))
|
|
820
|
+
norm1 = math.sqrt(sum(a * a for a in v1))
|
|
821
|
+
norm2 = math.sqrt(sum(b * b for b in v2))
|
|
822
|
+
if norm1 == 0 or norm2 == 0:
|
|
823
|
+
return 0.0
|
|
824
|
+
return dot_product / (norm1 * norm2)
|
|
825
|
+
|
|
826
|
+
qs = EmbeddingModel.objects.filter(user=self.user)
|
|
827
|
+
if content_type:
|
|
828
|
+
qs = qs.filter(content_type=content_type)
|
|
829
|
+
|
|
830
|
+
results = []
|
|
831
|
+
async for db_emb in qs:
|
|
832
|
+
score = cosine_similarity(query_vector, db_emb.vector)
|
|
833
|
+
results.append((self._db_to_embedding(db_emb), score))
|
|
834
|
+
|
|
835
|
+
# Sort by score descending and limit
|
|
836
|
+
results.sort(key=lambda x: x[1], reverse=True)
|
|
837
|
+
return results[:limit]
|
|
838
|
+
|
|
839
|
+
async def delete_embedding(self, embedding_id: UUID, scope: Scope = Scope.PROJECT) -> bool:
|
|
840
|
+
"""Delete an embedding. Returns True if it existed."""
|
|
841
|
+
deleted, _ = await EmbeddingModel.objects.filter(id=embedding_id, user=self.user).adelete()
|
|
842
|
+
return deleted > 0
|
|
843
|
+
|
|
844
|
+
|
|
845
|
+
# =============================================================================
|
|
846
|
+
# Audit Store Helper Functions
|
|
847
|
+
# =============================================================================
|
|
848
|
+
|
|
849
|
+
|
|
850
|
+
def _event_type_to_db(event_type: AuditEventType) -> str:
|
|
851
|
+
"""Convert AuditEventType to database choice."""
|
|
852
|
+
mapping = {
|
|
853
|
+
AuditEventType.CONVERSATION_START: AuditEventTypeChoices.CONVERSATION_START,
|
|
854
|
+
AuditEventType.CONVERSATION_END: AuditEventTypeChoices.CONVERSATION_END,
|
|
855
|
+
AuditEventType.MESSAGE_SENT: AuditEventTypeChoices.MESSAGE_SENT,
|
|
856
|
+
AuditEventType.MESSAGE_RECEIVED: AuditEventTypeChoices.MESSAGE_RECEIVED,
|
|
857
|
+
AuditEventType.TOOL_CALL: AuditEventTypeChoices.TOOL_CALL,
|
|
858
|
+
AuditEventType.TOOL_RESULT: AuditEventTypeChoices.TOOL_RESULT,
|
|
859
|
+
AuditEventType.TOOL_ERROR: AuditEventTypeChoices.TOOL_ERROR,
|
|
860
|
+
AuditEventType.AGENT_START: AuditEventTypeChoices.AGENT_START,
|
|
861
|
+
AuditEventType.AGENT_END: AuditEventTypeChoices.AGENT_END,
|
|
862
|
+
AuditEventType.AGENT_ERROR: AuditEventTypeChoices.AGENT_ERROR,
|
|
863
|
+
AuditEventType.CHECKPOINT_SAVED: AuditEventTypeChoices.CHECKPOINT_SAVED,
|
|
864
|
+
AuditEventType.CHECKPOINT_RESTORED: AuditEventTypeChoices.CHECKPOINT_RESTORED,
|
|
865
|
+
AuditEventType.CUSTOM: AuditEventTypeChoices.CUSTOM,
|
|
866
|
+
}
|
|
867
|
+
return mapping.get(event_type, AuditEventTypeChoices.CUSTOM)
|
|
868
|
+
|
|
869
|
+
|
|
870
|
+
def _db_to_event_type(db_type: str) -> AuditEventType:
|
|
871
|
+
"""Convert database choice to AuditEventType."""
|
|
872
|
+
mapping = {
|
|
873
|
+
AuditEventTypeChoices.CONVERSATION_START: AuditEventType.CONVERSATION_START,
|
|
874
|
+
AuditEventTypeChoices.CONVERSATION_END: AuditEventType.CONVERSATION_END,
|
|
875
|
+
AuditEventTypeChoices.MESSAGE_SENT: AuditEventType.MESSAGE_SENT,
|
|
876
|
+
AuditEventTypeChoices.MESSAGE_RECEIVED: AuditEventType.MESSAGE_RECEIVED,
|
|
877
|
+
AuditEventTypeChoices.TOOL_CALL: AuditEventType.TOOL_CALL,
|
|
878
|
+
AuditEventTypeChoices.TOOL_RESULT: AuditEventType.TOOL_RESULT,
|
|
879
|
+
AuditEventTypeChoices.TOOL_ERROR: AuditEventType.TOOL_ERROR,
|
|
880
|
+
AuditEventTypeChoices.AGENT_START: AuditEventType.AGENT_START,
|
|
881
|
+
AuditEventTypeChoices.AGENT_END: AuditEventType.AGENT_END,
|
|
882
|
+
AuditEventTypeChoices.AGENT_ERROR: AuditEventType.AGENT_ERROR,
|
|
883
|
+
AuditEventTypeChoices.CHECKPOINT_SAVED: AuditEventType.CHECKPOINT_SAVED,
|
|
884
|
+
AuditEventTypeChoices.CHECKPOINT_RESTORED: AuditEventType.CHECKPOINT_RESTORED,
|
|
885
|
+
AuditEventTypeChoices.CUSTOM: AuditEventType.CUSTOM,
|
|
886
|
+
}
|
|
887
|
+
return mapping.get(db_type, AuditEventType.CUSTOM)
|
|
888
|
+
|
|
889
|
+
|
|
890
|
+
def _severity_to_db(severity: ErrorSeverity) -> str:
|
|
891
|
+
"""Convert ErrorSeverity to database choice."""
|
|
892
|
+
mapping = {
|
|
893
|
+
ErrorSeverity.DEBUG: ErrorSeverityChoices.DEBUG,
|
|
894
|
+
ErrorSeverity.INFO: ErrorSeverityChoices.INFO,
|
|
895
|
+
ErrorSeverity.WARNING: ErrorSeverityChoices.WARNING,
|
|
896
|
+
ErrorSeverity.ERROR: ErrorSeverityChoices.ERROR,
|
|
897
|
+
ErrorSeverity.CRITICAL: ErrorSeverityChoices.CRITICAL,
|
|
898
|
+
}
|
|
899
|
+
return mapping.get(severity, ErrorSeverityChoices.ERROR)
|
|
900
|
+
|
|
901
|
+
|
|
902
|
+
def _db_to_severity(db_severity: str) -> ErrorSeverity:
|
|
903
|
+
"""Convert database choice to ErrorSeverity."""
|
|
904
|
+
mapping = {
|
|
905
|
+
ErrorSeverityChoices.DEBUG: ErrorSeverity.DEBUG,
|
|
906
|
+
ErrorSeverityChoices.INFO: ErrorSeverity.INFO,
|
|
907
|
+
ErrorSeverityChoices.WARNING: ErrorSeverity.WARNING,
|
|
908
|
+
ErrorSeverityChoices.ERROR: ErrorSeverity.ERROR,
|
|
909
|
+
ErrorSeverityChoices.CRITICAL: ErrorSeverity.CRITICAL,
|
|
910
|
+
}
|
|
911
|
+
return mapping.get(db_severity, ErrorSeverity.ERROR)
|
|
912
|
+
|
|
913
|
+
|
|
914
|
+
class DjangoAuditStore(AuditStore):
|
|
915
|
+
"""
|
|
916
|
+
Django-backed audit store.
|
|
917
|
+
|
|
918
|
+
Stores audit entries, error records, and performance metrics scoped to a user.
|
|
919
|
+
"""
|
|
920
|
+
|
|
921
|
+
def __init__(self, user):
|
|
922
|
+
self.user = user
|
|
923
|
+
|
|
924
|
+
def _db_to_audit_entry(self, db_entry: AuditEntryModel) -> CoreAuditEntry:
|
|
925
|
+
"""Convert database audit entry to AuditEntry."""
|
|
926
|
+
return CoreAuditEntry(
|
|
927
|
+
id=db_entry.id,
|
|
928
|
+
event_type=_db_to_event_type(db_entry.event_type),
|
|
929
|
+
timestamp=db_entry.timestamp,
|
|
930
|
+
conversation_id=db_entry.conversation_id,
|
|
931
|
+
run_id=db_entry.run_id,
|
|
932
|
+
agent_key=db_entry.agent_key or None,
|
|
933
|
+
action=db_entry.action or None,
|
|
934
|
+
details=db_entry.details or {},
|
|
935
|
+
actor_type=db_entry.actor_type,
|
|
936
|
+
actor_id=db_entry.actor_id or None,
|
|
937
|
+
request_id=db_entry.request_id or None,
|
|
938
|
+
parent_event_id=db_entry.parent_event_id,
|
|
939
|
+
metadata=db_entry.metadata or {},
|
|
940
|
+
)
|
|
941
|
+
|
|
942
|
+
def _db_to_error_record(self, db_error: ErrorRecordModel) -> CoreErrorRecord:
|
|
943
|
+
"""Convert database error record to ErrorRecord."""
|
|
944
|
+
return CoreErrorRecord(
|
|
945
|
+
id=db_error.id,
|
|
946
|
+
timestamp=db_error.timestamp,
|
|
947
|
+
severity=_db_to_severity(db_error.severity),
|
|
948
|
+
error_type=db_error.error_type or None,
|
|
949
|
+
message=db_error.message or None,
|
|
950
|
+
stack_trace=db_error.stack_trace or None,
|
|
951
|
+
conversation_id=db_error.conversation_id,
|
|
952
|
+
run_id=db_error.run_id,
|
|
953
|
+
agent_key=db_error.agent_key or None,
|
|
954
|
+
context=db_error.context or {},
|
|
955
|
+
resolved=db_error.resolved,
|
|
956
|
+
resolved_at=db_error.resolved_at,
|
|
957
|
+
resolution_notes=db_error.resolution_notes or None,
|
|
958
|
+
metadata=db_error.metadata or {},
|
|
959
|
+
)
|
|
960
|
+
|
|
961
|
+
def _db_to_metric(self, db_metric: PerformanceMetricModel) -> CorePerformanceMetric:
|
|
962
|
+
"""Convert database metric to PerformanceMetric."""
|
|
963
|
+
return CorePerformanceMetric(
|
|
964
|
+
id=db_metric.id,
|
|
965
|
+
name=db_metric.name,
|
|
966
|
+
value=db_metric.value,
|
|
967
|
+
unit=db_metric.unit or None,
|
|
968
|
+
timestamp=db_metric.timestamp,
|
|
969
|
+
conversation_id=db_metric.conversation_id,
|
|
970
|
+
run_id=db_metric.run_id,
|
|
971
|
+
agent_key=db_metric.agent_key or None,
|
|
972
|
+
tags=db_metric.tags or {},
|
|
973
|
+
metadata=db_metric.metadata or {},
|
|
974
|
+
)
|
|
975
|
+
|
|
976
|
+
# Audit entry operations
|
|
977
|
+
async def log_event(self, entry: CoreAuditEntry, scope: Scope = Scope.PROJECT) -> None:
|
|
978
|
+
"""Log an audit event."""
|
|
979
|
+
await AuditEntryModel.objects.acreate(
|
|
980
|
+
id=entry.id,
|
|
981
|
+
user=self.user,
|
|
982
|
+
event_type=_event_type_to_db(entry.event_type),
|
|
983
|
+
conversation_id=entry.conversation_id,
|
|
984
|
+
run_id=entry.run_id,
|
|
985
|
+
agent_key=entry.agent_key or "",
|
|
986
|
+
action=entry.action or "",
|
|
987
|
+
details=entry.details,
|
|
988
|
+
actor_type=entry.actor_type,
|
|
989
|
+
actor_id=entry.actor_id or "",
|
|
990
|
+
request_id=entry.request_id or "",
|
|
991
|
+
parent_event_id=entry.parent_event_id,
|
|
992
|
+
metadata=entry.metadata,
|
|
993
|
+
)
|
|
994
|
+
|
|
995
|
+
async def get_events(
|
|
996
|
+
self,
|
|
997
|
+
scope: Scope = Scope.PROJECT,
|
|
998
|
+
conversation_id: Optional[UUID] = None,
|
|
999
|
+
run_id: Optional[UUID] = None,
|
|
1000
|
+
event_types: Optional[list[AuditEventType]] = None,
|
|
1001
|
+
start_time: Optional[datetime] = None,
|
|
1002
|
+
end_time: Optional[datetime] = None,
|
|
1003
|
+
limit: int = 100,
|
|
1004
|
+
) -> list[CoreAuditEntry]:
|
|
1005
|
+
"""Get audit events with optional filters."""
|
|
1006
|
+
qs = AuditEntryModel.objects.filter(user=self.user)
|
|
1007
|
+
if event_types:
|
|
1008
|
+
db_types = [_event_type_to_db(et) for et in event_types]
|
|
1009
|
+
qs = qs.filter(event_type__in=db_types)
|
|
1010
|
+
if conversation_id:
|
|
1011
|
+
qs = qs.filter(conversation_id=conversation_id)
|
|
1012
|
+
if run_id:
|
|
1013
|
+
qs = qs.filter(run_id=run_id)
|
|
1014
|
+
if start_time:
|
|
1015
|
+
qs = qs.filter(timestamp__gte=start_time)
|
|
1016
|
+
if end_time:
|
|
1017
|
+
qs = qs.filter(timestamp__lte=end_time)
|
|
1018
|
+
qs = qs.order_by("-timestamp")[:limit]
|
|
1019
|
+
|
|
1020
|
+
entries = []
|
|
1021
|
+
async for db_entry in qs:
|
|
1022
|
+
entries.append(self._db_to_audit_entry(db_entry))
|
|
1023
|
+
return entries
|
|
1024
|
+
|
|
1025
|
+
# Error record operations
|
|
1026
|
+
async def log_error(self, error: CoreErrorRecord, scope: Scope = Scope.PROJECT) -> None:
|
|
1027
|
+
"""Log an error record."""
|
|
1028
|
+
await ErrorRecordModel.objects.acreate(
|
|
1029
|
+
id=error.id,
|
|
1030
|
+
user=self.user,
|
|
1031
|
+
severity=_severity_to_db(error.severity),
|
|
1032
|
+
error_type=error.error_type or "",
|
|
1033
|
+
message=error.message or "",
|
|
1034
|
+
stack_trace=error.stack_trace or "",
|
|
1035
|
+
conversation_id=error.conversation_id,
|
|
1036
|
+
run_id=error.run_id,
|
|
1037
|
+
agent_key=error.agent_key or "",
|
|
1038
|
+
context=error.context,
|
|
1039
|
+
resolved=error.resolved,
|
|
1040
|
+
resolved_at=error.resolved_at,
|
|
1041
|
+
resolution_notes=error.resolution_notes or "",
|
|
1042
|
+
metadata=error.metadata,
|
|
1043
|
+
)
|
|
1044
|
+
|
|
1045
|
+
async def get_errors(
|
|
1046
|
+
self,
|
|
1047
|
+
scope: Scope = Scope.PROJECT,
|
|
1048
|
+
severity: Optional[ErrorSeverity] = None,
|
|
1049
|
+
resolved: Optional[bool] = None,
|
|
1050
|
+
conversation_id: Optional[UUID] = None,
|
|
1051
|
+
start_time: Optional[datetime] = None,
|
|
1052
|
+
end_time: Optional[datetime] = None,
|
|
1053
|
+
limit: int = 100,
|
|
1054
|
+
) -> list[CoreErrorRecord]:
|
|
1055
|
+
"""Get error records with optional filters."""
|
|
1056
|
+
qs = ErrorRecordModel.objects.filter(user=self.user)
|
|
1057
|
+
if severity:
|
|
1058
|
+
qs = qs.filter(severity=_severity_to_db(severity))
|
|
1059
|
+
if resolved is not None:
|
|
1060
|
+
qs = qs.filter(resolved=resolved)
|
|
1061
|
+
if conversation_id:
|
|
1062
|
+
qs = qs.filter(conversation_id=conversation_id)
|
|
1063
|
+
if start_time:
|
|
1064
|
+
qs = qs.filter(timestamp__gte=start_time)
|
|
1065
|
+
if end_time:
|
|
1066
|
+
qs = qs.filter(timestamp__lte=end_time)
|
|
1067
|
+
qs = qs.order_by("-timestamp")[:limit]
|
|
1068
|
+
|
|
1069
|
+
errors = []
|
|
1070
|
+
async for db_error in qs:
|
|
1071
|
+
errors.append(self._db_to_error_record(db_error))
|
|
1072
|
+
return errors
|
|
1073
|
+
|
|
1074
|
+
async def resolve_error(
|
|
1075
|
+
self,
|
|
1076
|
+
error_id: UUID,
|
|
1077
|
+
resolution_notes: Optional[str] = None,
|
|
1078
|
+
scope: Scope = Scope.PROJECT,
|
|
1079
|
+
) -> bool:
|
|
1080
|
+
"""Mark an error as resolved. Returns True if it existed."""
|
|
1081
|
+
try:
|
|
1082
|
+
db_error = await ErrorRecordModel.objects.aget(id=error_id, user=self.user)
|
|
1083
|
+
db_error.resolved = True
|
|
1084
|
+
db_error.resolved_at = datetime.utcnow()
|
|
1085
|
+
db_error.resolution_notes = resolution_notes or ""
|
|
1086
|
+
await db_error.asave(update_fields=["resolved", "resolved_at", "resolution_notes"])
|
|
1087
|
+
return True
|
|
1088
|
+
except ErrorRecordModel.DoesNotExist:
|
|
1089
|
+
return False
|
|
1090
|
+
|
|
1091
|
+
# Performance metric operations
|
|
1092
|
+
async def record_metric(self, metric: CorePerformanceMetric, scope: Scope = Scope.PROJECT) -> None:
|
|
1093
|
+
"""Record a performance metric."""
|
|
1094
|
+
await PerformanceMetricModel.objects.acreate(
|
|
1095
|
+
id=metric.id,
|
|
1096
|
+
user=self.user,
|
|
1097
|
+
name=metric.name,
|
|
1098
|
+
value=metric.value,
|
|
1099
|
+
unit=metric.unit or "",
|
|
1100
|
+
conversation_id=metric.conversation_id,
|
|
1101
|
+
run_id=metric.run_id,
|
|
1102
|
+
agent_key=metric.agent_key or "",
|
|
1103
|
+
tags=metric.tags,
|
|
1104
|
+
metadata=metric.metadata,
|
|
1105
|
+
)
|
|
1106
|
+
|
|
1107
|
+
async def get_metrics(
|
|
1108
|
+
self,
|
|
1109
|
+
name: str,
|
|
1110
|
+
scope: Scope = Scope.PROJECT,
|
|
1111
|
+
start_time: Optional[datetime] = None,
|
|
1112
|
+
end_time: Optional[datetime] = None,
|
|
1113
|
+
tags: Optional[dict] = None,
|
|
1114
|
+
limit: int = 1000,
|
|
1115
|
+
) -> list[CorePerformanceMetric]:
|
|
1116
|
+
"""Get metrics by name with optional filters."""
|
|
1117
|
+
qs = PerformanceMetricModel.objects.filter(user=self.user, name=name)
|
|
1118
|
+
if start_time:
|
|
1119
|
+
qs = qs.filter(timestamp__gte=start_time)
|
|
1120
|
+
if end_time:
|
|
1121
|
+
qs = qs.filter(timestamp__lte=end_time)
|
|
1122
|
+
# Note: tags filtering would require JSON field querying
|
|
1123
|
+
# For now, we filter in Python if tags are specified
|
|
1124
|
+
qs = qs.order_by("-timestamp")[:limit]
|
|
1125
|
+
|
|
1126
|
+
metrics = []
|
|
1127
|
+
async for db_metric in qs:
|
|
1128
|
+
if tags:
|
|
1129
|
+
# Filter by tags in Python
|
|
1130
|
+
db_tags = db_metric.tags or {}
|
|
1131
|
+
if not all(db_tags.get(k) == v for k, v in tags.items()):
|
|
1132
|
+
continue
|
|
1133
|
+
metrics.append(self._db_to_metric(db_metric))
|
|
1134
|
+
return metrics
|
|
1135
|
+
|
|
1136
|
+
async def get_metric_summary(
|
|
1137
|
+
self,
|
|
1138
|
+
name: str,
|
|
1139
|
+
scope: Scope = Scope.PROJECT,
|
|
1140
|
+
start_time: Optional[datetime] = None,
|
|
1141
|
+
end_time: Optional[datetime] = None,
|
|
1142
|
+
) -> dict:
|
|
1143
|
+
"""
|
|
1144
|
+
Get summary statistics for a metric.
|
|
1145
|
+
Returns: {count, min, max, avg, sum, p50, p95, p99}
|
|
1146
|
+
"""
|
|
1147
|
+
qs = PerformanceMetricModel.objects.filter(user=self.user, name=name)
|
|
1148
|
+
if start_time:
|
|
1149
|
+
qs = qs.filter(timestamp__gte=start_time)
|
|
1150
|
+
if end_time:
|
|
1151
|
+
qs = qs.filter(timestamp__lte=end_time)
|
|
1152
|
+
|
|
1153
|
+
values = []
|
|
1154
|
+
async for db_metric in qs:
|
|
1155
|
+
values.append(db_metric.value)
|
|
1156
|
+
|
|
1157
|
+
if not values:
|
|
1158
|
+
return {
|
|
1159
|
+
"count": 0,
|
|
1160
|
+
"min": None,
|
|
1161
|
+
"max": None,
|
|
1162
|
+
"avg": None,
|
|
1163
|
+
"sum": None,
|
|
1164
|
+
"p50": None,
|
|
1165
|
+
"p95": None,
|
|
1166
|
+
"p99": None,
|
|
1167
|
+
}
|
|
1168
|
+
|
|
1169
|
+
values.sort()
|
|
1170
|
+
count = len(values)
|
|
1171
|
+
total = sum(values)
|
|
1172
|
+
|
|
1173
|
+
def percentile(data: list[float], p: float) -> float:
|
|
1174
|
+
"""Calculate percentile."""
|
|
1175
|
+
if not data:
|
|
1176
|
+
return 0.0
|
|
1177
|
+
k = (len(data) - 1) * p / 100
|
|
1178
|
+
f = int(k)
|
|
1179
|
+
c = f + 1 if f + 1 < len(data) else f
|
|
1180
|
+
return data[f] + (k - f) * (data[c] - data[f]) if c != f else data[f]
|
|
1181
|
+
|
|
1182
|
+
return {
|
|
1183
|
+
"count": count,
|
|
1184
|
+
"min": min(values),
|
|
1185
|
+
"max": max(values),
|
|
1186
|
+
"avg": total / count,
|
|
1187
|
+
"sum": total,
|
|
1188
|
+
"p50": percentile(values, 50),
|
|
1189
|
+
"p95": percentile(values, 95),
|
|
1190
|
+
"p99": percentile(values, 99),
|
|
1191
|
+
}
|