agnt5 0.1.0__cp39-abi3-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agnt5/__init__.py +307 -0
- agnt5/__pycache__/__init__.cpython-311.pyc +0 -0
- agnt5/__pycache__/agent.cpython-311.pyc +0 -0
- agnt5/__pycache__/context.cpython-311.pyc +0 -0
- agnt5/__pycache__/durable.cpython-311.pyc +0 -0
- agnt5/__pycache__/extraction.cpython-311.pyc +0 -0
- agnt5/__pycache__/memory.cpython-311.pyc +0 -0
- agnt5/__pycache__/reflection.cpython-311.pyc +0 -0
- agnt5/__pycache__/runtime.cpython-311.pyc +0 -0
- agnt5/__pycache__/task.cpython-311.pyc +0 -0
- agnt5/__pycache__/tool.cpython-311.pyc +0 -0
- agnt5/__pycache__/tracing.cpython-311.pyc +0 -0
- agnt5/__pycache__/types.cpython-311.pyc +0 -0
- agnt5/__pycache__/workflow.cpython-311.pyc +0 -0
- agnt5/_core.abi3.so +0 -0
- agnt5/agent.py +1086 -0
- agnt5/context.py +406 -0
- agnt5/durable.py +1050 -0
- agnt5/extraction.py +410 -0
- agnt5/llm/__init__.py +179 -0
- agnt5/llm/__pycache__/__init__.cpython-311.pyc +0 -0
- agnt5/llm/__pycache__/anthropic.cpython-311.pyc +0 -0
- agnt5/llm/__pycache__/azure.cpython-311.pyc +0 -0
- agnt5/llm/__pycache__/base.cpython-311.pyc +0 -0
- agnt5/llm/__pycache__/google.cpython-311.pyc +0 -0
- agnt5/llm/__pycache__/mistral.cpython-311.pyc +0 -0
- agnt5/llm/__pycache__/openai.cpython-311.pyc +0 -0
- agnt5/llm/__pycache__/together.cpython-311.pyc +0 -0
- agnt5/llm/anthropic.py +319 -0
- agnt5/llm/azure.py +348 -0
- agnt5/llm/base.py +315 -0
- agnt5/llm/google.py +373 -0
- agnt5/llm/mistral.py +330 -0
- agnt5/llm/model_registry.py +467 -0
- agnt5/llm/models.json +227 -0
- agnt5/llm/openai.py +334 -0
- agnt5/llm/together.py +377 -0
- agnt5/memory.py +746 -0
- agnt5/reflection.py +514 -0
- agnt5/runtime.py +699 -0
- agnt5/task.py +476 -0
- agnt5/testing.py +451 -0
- agnt5/tool.py +516 -0
- agnt5/tracing.py +624 -0
- agnt5/types.py +210 -0
- agnt5/workflow.py +897 -0
- agnt5-0.1.0.dist-info/METADATA +93 -0
- agnt5-0.1.0.dist-info/RECORD +49 -0
- agnt5-0.1.0.dist-info/WHEEL +4 -0
agnt5/memory.py
ADDED
|
@@ -0,0 +1,746 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Memory systems for the AGNT5 SDK.
|
|
3
|
+
|
|
4
|
+
Provides various memory implementations for agents including short-term,
|
|
5
|
+
long-term, and semantic memory with vector search capabilities.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import Any, Dict, List, Optional, Union, Callable, AsyncIterator
|
|
9
|
+
from abc import ABC, abstractmethod
|
|
10
|
+
import asyncio
|
|
11
|
+
import json
|
|
12
|
+
from datetime import datetime, timedelta
|
|
13
|
+
from dataclasses import dataclass, field
|
|
14
|
+
import logging
|
|
15
|
+
from collections import OrderedDict
|
|
16
|
+
|
|
17
|
+
from .types import MemoryEntry, MemoryQuery, Message, MessageRole
|
|
18
|
+
from .durable import durable, DurableObject
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
logger = logging.getLogger(__name__)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class MemoryStore(ABC):
|
|
25
|
+
"""
|
|
26
|
+
Abstract base class for memory stores.
|
|
27
|
+
|
|
28
|
+
Implementations can provide different backends like in-memory,
|
|
29
|
+
database, or vector stores.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
@abstractmethod
|
|
33
|
+
async def add(self, content: Any, metadata: Optional[Dict[str, Any]] = None) -> str:
|
|
34
|
+
"""Add an entry to memory."""
|
|
35
|
+
pass
|
|
36
|
+
|
|
37
|
+
@abstractmethod
|
|
38
|
+
async def get(self, entry_id: str) -> Optional[MemoryEntry]:
|
|
39
|
+
"""Get a specific memory entry."""
|
|
40
|
+
pass
|
|
41
|
+
|
|
42
|
+
@abstractmethod
|
|
43
|
+
async def search(self, query: MemoryQuery) -> List[MemoryEntry]:
|
|
44
|
+
"""Search memory with a query."""
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
@abstractmethod
|
|
48
|
+
async def update(self, entry_id: str, content: Any, metadata: Optional[Dict[str, Any]] = None) -> bool:
|
|
49
|
+
"""Update a memory entry."""
|
|
50
|
+
pass
|
|
51
|
+
|
|
52
|
+
@abstractmethod
|
|
53
|
+
async def delete(self, entry_id: str) -> bool:
|
|
54
|
+
"""Delete a memory entry."""
|
|
55
|
+
pass
|
|
56
|
+
|
|
57
|
+
@abstractmethod
|
|
58
|
+
async def clear(self) -> None:
|
|
59
|
+
"""Clear all memory entries."""
|
|
60
|
+
pass
|
|
61
|
+
|
|
62
|
+
@abstractmethod
|
|
63
|
+
async def size(self) -> int:
|
|
64
|
+
"""Get the number of entries in memory."""
|
|
65
|
+
pass
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
class InMemoryStore(MemoryStore):
|
|
69
|
+
"""
|
|
70
|
+
Simple in-memory storage implementation.
|
|
71
|
+
|
|
72
|
+
Good for development and testing, not suitable for production
|
|
73
|
+
with large amounts of data.
|
|
74
|
+
"""
|
|
75
|
+
|
|
76
|
+
def __init__(self, max_size: Optional[int] = None):
|
|
77
|
+
"""Initialize in-memory store."""
|
|
78
|
+
self.max_size = max_size
|
|
79
|
+
self._entries: OrderedDict[str, MemoryEntry] = OrderedDict()
|
|
80
|
+
self._embeddings: Dict[str, List[float]] = {}
|
|
81
|
+
|
|
82
|
+
async def add(self, content: Any, metadata: Optional[Dict[str, Any]] = None) -> str:
|
|
83
|
+
"""Add an entry to memory."""
|
|
84
|
+
entry = MemoryEntry(
|
|
85
|
+
content=content,
|
|
86
|
+
metadata=metadata or {},
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
# Enforce max size
|
|
90
|
+
if self.max_size and len(self._entries) >= self.max_size:
|
|
91
|
+
# Remove oldest entry (FIFO)
|
|
92
|
+
oldest_id = next(iter(self._entries))
|
|
93
|
+
await self.delete(oldest_id)
|
|
94
|
+
|
|
95
|
+
self._entries[entry.id] = entry
|
|
96
|
+
return entry.id
|
|
97
|
+
|
|
98
|
+
async def get(self, entry_id: str) -> Optional[MemoryEntry]:
|
|
99
|
+
"""Get a specific memory entry."""
|
|
100
|
+
entry = self._entries.get(entry_id)
|
|
101
|
+
if entry:
|
|
102
|
+
entry.access()
|
|
103
|
+
return entry
|
|
104
|
+
|
|
105
|
+
async def search(self, query: MemoryQuery) -> List[MemoryEntry]:
|
|
106
|
+
"""Search memory with a query."""
|
|
107
|
+
results = []
|
|
108
|
+
|
|
109
|
+
for entry in self._entries.values():
|
|
110
|
+
# Simple text search
|
|
111
|
+
if query.query and isinstance(entry.content, str):
|
|
112
|
+
if query.query.lower() in entry.content.lower():
|
|
113
|
+
results.append(entry)
|
|
114
|
+
|
|
115
|
+
# Filter by metadata
|
|
116
|
+
if query.filters:
|
|
117
|
+
match = all(
|
|
118
|
+
entry.metadata.get(k) == v
|
|
119
|
+
for k, v in query.filters.items()
|
|
120
|
+
)
|
|
121
|
+
if match and entry not in results:
|
|
122
|
+
results.append(entry)
|
|
123
|
+
|
|
124
|
+
# Apply limit
|
|
125
|
+
results = results[:query.limit]
|
|
126
|
+
|
|
127
|
+
# Mark as accessed
|
|
128
|
+
for entry in results:
|
|
129
|
+
entry.access()
|
|
130
|
+
|
|
131
|
+
return results
|
|
132
|
+
|
|
133
|
+
async def update(self, entry_id: str, content: Any, metadata: Optional[Dict[str, Any]] = None) -> bool:
|
|
134
|
+
"""Update a memory entry."""
|
|
135
|
+
entry = self._entries.get(entry_id)
|
|
136
|
+
if not entry:
|
|
137
|
+
return False
|
|
138
|
+
|
|
139
|
+
entry.content = content
|
|
140
|
+
if metadata:
|
|
141
|
+
entry.metadata.update(metadata)
|
|
142
|
+
entry.accessed_at = datetime.utcnow()
|
|
143
|
+
|
|
144
|
+
return True
|
|
145
|
+
|
|
146
|
+
async def delete(self, entry_id: str) -> bool:
|
|
147
|
+
"""Delete a memory entry."""
|
|
148
|
+
if entry_id in self._entries:
|
|
149
|
+
del self._entries[entry_id]
|
|
150
|
+
if entry_id in self._embeddings:
|
|
151
|
+
del self._embeddings[entry_id]
|
|
152
|
+
return True
|
|
153
|
+
return False
|
|
154
|
+
|
|
155
|
+
async def clear(self) -> None:
|
|
156
|
+
"""Clear all memory entries."""
|
|
157
|
+
self._entries.clear()
|
|
158
|
+
self._embeddings.clear()
|
|
159
|
+
|
|
160
|
+
async def size(self) -> int:
|
|
161
|
+
"""Get the number of entries in memory."""
|
|
162
|
+
return len(self._entries)
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
class VectorMemoryStore(InMemoryStore):
|
|
166
|
+
"""
|
|
167
|
+
Memory store with vector search capabilities.
|
|
168
|
+
|
|
169
|
+
Requires an embedding function to convert content to vectors.
|
|
170
|
+
"""
|
|
171
|
+
|
|
172
|
+
def __init__(
|
|
173
|
+
self,
|
|
174
|
+
embedding_fn: Callable[[str], List[float]],
|
|
175
|
+
max_size: Optional[int] = None,
|
|
176
|
+
similarity_metric: str = "cosine",
|
|
177
|
+
):
|
|
178
|
+
"""Initialize vector memory store."""
|
|
179
|
+
super().__init__(max_size)
|
|
180
|
+
self.embedding_fn = embedding_fn
|
|
181
|
+
self.similarity_metric = similarity_metric
|
|
182
|
+
|
|
183
|
+
async def add(self, content: Any, metadata: Optional[Dict[str, Any]] = None) -> str:
|
|
184
|
+
"""Add an entry with embedding."""
|
|
185
|
+
entry_id = await super().add(content, metadata)
|
|
186
|
+
|
|
187
|
+
# Generate embedding if content is text
|
|
188
|
+
if isinstance(content, str):
|
|
189
|
+
embedding = await self._get_embedding(content)
|
|
190
|
+
self._embeddings[entry_id] = embedding
|
|
191
|
+
self._entries[entry_id].embedding = embedding
|
|
192
|
+
|
|
193
|
+
return entry_id
|
|
194
|
+
|
|
195
|
+
async def search(self, query: MemoryQuery) -> List[MemoryEntry]:
|
|
196
|
+
"""Search with vector similarity."""
|
|
197
|
+
if query.embedding:
|
|
198
|
+
# Vector search
|
|
199
|
+
return await self._vector_search(query)
|
|
200
|
+
elif query.query:
|
|
201
|
+
# Convert query to embedding
|
|
202
|
+
query_embedding = await self._get_embedding(query.query)
|
|
203
|
+
query.embedding = query_embedding
|
|
204
|
+
return await self._vector_search(query)
|
|
205
|
+
else:
|
|
206
|
+
# Fallback to regular search
|
|
207
|
+
return await super().search(query)
|
|
208
|
+
|
|
209
|
+
async def _get_embedding(self, text: str) -> List[float]:
|
|
210
|
+
"""Get embedding for text."""
|
|
211
|
+
if asyncio.iscoroutinefunction(self.embedding_fn):
|
|
212
|
+
return await self.embedding_fn(text)
|
|
213
|
+
else:
|
|
214
|
+
loop = asyncio.get_event_loop()
|
|
215
|
+
return await loop.run_in_executor(None, self.embedding_fn, text)
|
|
216
|
+
|
|
217
|
+
async def _vector_search(self, query: MemoryQuery) -> List[MemoryEntry]:
|
|
218
|
+
"""Perform vector similarity search."""
|
|
219
|
+
if not query.embedding:
|
|
220
|
+
return []
|
|
221
|
+
|
|
222
|
+
# Calculate similarities
|
|
223
|
+
similarities = []
|
|
224
|
+
|
|
225
|
+
for entry_id, embedding in self._embeddings.items():
|
|
226
|
+
similarity = self._calculate_similarity(query.embedding, embedding)
|
|
227
|
+
if similarity >= query.similarity_threshold:
|
|
228
|
+
similarities.append((entry_id, similarity))
|
|
229
|
+
|
|
230
|
+
# Sort by similarity
|
|
231
|
+
similarities.sort(key=lambda x: x[1], reverse=True)
|
|
232
|
+
|
|
233
|
+
# Get entries
|
|
234
|
+
results = []
|
|
235
|
+
for entry_id, similarity in similarities[:query.limit]:
|
|
236
|
+
entry = self._entries.get(entry_id)
|
|
237
|
+
if entry:
|
|
238
|
+
# Add similarity score to metadata
|
|
239
|
+
entry.metadata["similarity_score"] = similarity
|
|
240
|
+
entry.access()
|
|
241
|
+
results.append(entry)
|
|
242
|
+
|
|
243
|
+
return results
|
|
244
|
+
|
|
245
|
+
def _calculate_similarity(self, vec1: List[float], vec2: List[float]) -> float:
|
|
246
|
+
"""Calculate similarity between two vectors."""
|
|
247
|
+
if self.similarity_metric == "cosine":
|
|
248
|
+
# Cosine similarity implementation without numpy
|
|
249
|
+
if len(vec1) != len(vec2):
|
|
250
|
+
return 0.0
|
|
251
|
+
|
|
252
|
+
dot_product = sum(a * b for a, b in zip(vec1, vec2))
|
|
253
|
+
norm1 = sum(a * a for a in vec1) ** 0.5
|
|
254
|
+
norm2 = sum(b * b for b in vec2) ** 0.5
|
|
255
|
+
|
|
256
|
+
if norm1 == 0 or norm2 == 0:
|
|
257
|
+
return 0.0
|
|
258
|
+
|
|
259
|
+
return float(dot_product / (norm1 * norm2))
|
|
260
|
+
else:
|
|
261
|
+
# Default to cosine
|
|
262
|
+
return self._calculate_similarity(vec1, vec2)
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
class Memory:
|
|
266
|
+
"""
|
|
267
|
+
High-level memory interface for agents.
|
|
268
|
+
|
|
269
|
+
Combines different memory types and provides a unified interface.
|
|
270
|
+
|
|
271
|
+
Example:
|
|
272
|
+
```python
|
|
273
|
+
from agnt5 import Memory
|
|
274
|
+
|
|
275
|
+
# Create memory with custom store
|
|
276
|
+
memory = Memory(
|
|
277
|
+
store=VectorMemoryStore(embedding_fn=my_embed_fn),
|
|
278
|
+
retention_days=30,
|
|
279
|
+
)
|
|
280
|
+
|
|
281
|
+
# Add memories
|
|
282
|
+
await memory.add("User's name is Alice")
|
|
283
|
+
await memory.add({"event": "user_login", "user": "alice"})
|
|
284
|
+
|
|
285
|
+
# Search memories
|
|
286
|
+
results = await memory.search("What is the user's name?")
|
|
287
|
+
|
|
288
|
+
# Get recent memories
|
|
289
|
+
recent = await memory.get_recent(limit=10)
|
|
290
|
+
```
|
|
291
|
+
"""
|
|
292
|
+
|
|
293
|
+
def __init__(
|
|
294
|
+
self,
|
|
295
|
+
store: Optional[MemoryStore] = None,
|
|
296
|
+
retention_days: Optional[int] = None,
|
|
297
|
+
auto_summarize: bool = False,
|
|
298
|
+
summarize_threshold: int = 100,
|
|
299
|
+
):
|
|
300
|
+
"""Initialize memory."""
|
|
301
|
+
self.store = store or InMemoryStore()
|
|
302
|
+
self.retention_days = retention_days
|
|
303
|
+
self.auto_summarize = auto_summarize
|
|
304
|
+
self.summarize_threshold = summarize_threshold
|
|
305
|
+
|
|
306
|
+
# Message history for agents
|
|
307
|
+
self._message_history: List[Message] = []
|
|
308
|
+
|
|
309
|
+
async def add(self, content: Union[Any, Message]) -> str:
|
|
310
|
+
"""Add content to memory."""
|
|
311
|
+
# Handle messages specially
|
|
312
|
+
if isinstance(content, Message):
|
|
313
|
+
self._message_history.append(content)
|
|
314
|
+
|
|
315
|
+
# Store message content
|
|
316
|
+
metadata = {
|
|
317
|
+
"type": "message",
|
|
318
|
+
"role": content.role.value,
|
|
319
|
+
"timestamp": content.timestamp.isoformat(),
|
|
320
|
+
}
|
|
321
|
+
if content.name:
|
|
322
|
+
metadata["name"] = content.name
|
|
323
|
+
|
|
324
|
+
return await self.store.add(content.content, metadata)
|
|
325
|
+
|
|
326
|
+
# Regular content
|
|
327
|
+
return await self.store.add(content)
|
|
328
|
+
|
|
329
|
+
async def search(
|
|
330
|
+
self,
|
|
331
|
+
query: Union[str, MemoryQuery],
|
|
332
|
+
limit: int = 10,
|
|
333
|
+
) -> List[MemoryEntry]:
|
|
334
|
+
"""Search memory."""
|
|
335
|
+
if isinstance(query, str):
|
|
336
|
+
query = MemoryQuery(query=query, limit=limit)
|
|
337
|
+
else:
|
|
338
|
+
query.limit = limit
|
|
339
|
+
|
|
340
|
+
return await self.store.search(query)
|
|
341
|
+
|
|
342
|
+
async def get_recent(
|
|
343
|
+
self,
|
|
344
|
+
limit: int = 10,
|
|
345
|
+
content_type: Optional[str] = None,
|
|
346
|
+
) -> List[MemoryEntry]:
|
|
347
|
+
"""Get recent memories."""
|
|
348
|
+
query = MemoryQuery(limit=limit)
|
|
349
|
+
|
|
350
|
+
if content_type:
|
|
351
|
+
query.filters["type"] = content_type
|
|
352
|
+
|
|
353
|
+
# Get all entries and sort by access time
|
|
354
|
+
all_entries = []
|
|
355
|
+
size = await self.store.size()
|
|
356
|
+
|
|
357
|
+
# This is inefficient for large stores, but works for the interface
|
|
358
|
+
results = await self.store.search(MemoryQuery(limit=size))
|
|
359
|
+
|
|
360
|
+
# Sort by accessed_at
|
|
361
|
+
results.sort(key=lambda e: e.accessed_at, reverse=True)
|
|
362
|
+
|
|
363
|
+
return results[:limit]
|
|
364
|
+
|
|
365
|
+
async def get_messages(
|
|
366
|
+
self,
|
|
367
|
+
limit: Optional[int] = None,
|
|
368
|
+
role: Optional[MessageRole] = None,
|
|
369
|
+
) -> List[Message]:
|
|
370
|
+
"""Get message history."""
|
|
371
|
+
messages = self._message_history
|
|
372
|
+
|
|
373
|
+
if role:
|
|
374
|
+
messages = [m for m in messages if m.role == role]
|
|
375
|
+
|
|
376
|
+
if limit:
|
|
377
|
+
messages = messages[-limit:]
|
|
378
|
+
|
|
379
|
+
return messages
|
|
380
|
+
|
|
381
|
+
async def forget_old(self) -> int:
|
|
382
|
+
"""Remove old memories based on retention policy."""
|
|
383
|
+
if not self.retention_days:
|
|
384
|
+
return 0
|
|
385
|
+
|
|
386
|
+
cutoff = datetime.utcnow() - timedelta(days=self.retention_days)
|
|
387
|
+
deleted = 0
|
|
388
|
+
|
|
389
|
+
# Get all entries (inefficient, but works for the interface)
|
|
390
|
+
size = await self.store.size()
|
|
391
|
+
all_entries = await self.store.search(MemoryQuery(limit=size))
|
|
392
|
+
|
|
393
|
+
for entry in all_entries:
|
|
394
|
+
if entry.accessed_at < cutoff:
|
|
395
|
+
if await self.store.delete(entry.id):
|
|
396
|
+
deleted += 1
|
|
397
|
+
|
|
398
|
+
return deleted
|
|
399
|
+
|
|
400
|
+
async def summarize(self) -> Optional[str]:
|
|
401
|
+
"""
|
|
402
|
+
Summarize memory contents.
|
|
403
|
+
|
|
404
|
+
This is a placeholder - actual implementation would use
|
|
405
|
+
an LLM to generate summaries.
|
|
406
|
+
"""
|
|
407
|
+
size = await self.store.size()
|
|
408
|
+
|
|
409
|
+
if size < self.summarize_threshold:
|
|
410
|
+
return None
|
|
411
|
+
|
|
412
|
+
# TODO: Implement actual summarization
|
|
413
|
+
return f"Memory contains {size} entries"
|
|
414
|
+
|
|
415
|
+
async def clear(self) -> None:
|
|
416
|
+
"""Clear all memories."""
|
|
417
|
+
await self.store.clear()
|
|
418
|
+
self._message_history.clear()
|
|
419
|
+
|
|
420
|
+
async def export(self) -> Dict[str, Any]:
|
|
421
|
+
"""Export memory contents."""
|
|
422
|
+
size = await self.store.size()
|
|
423
|
+
all_entries = await self.store.search(MemoryQuery(limit=size))
|
|
424
|
+
|
|
425
|
+
return {
|
|
426
|
+
"entries": [
|
|
427
|
+
{
|
|
428
|
+
"id": entry.id,
|
|
429
|
+
"content": entry.content,
|
|
430
|
+
"metadata": entry.metadata,
|
|
431
|
+
"created_at": entry.created_at.isoformat(),
|
|
432
|
+
"accessed_at": entry.accessed_at.isoformat(),
|
|
433
|
+
"access_count": entry.access_count,
|
|
434
|
+
}
|
|
435
|
+
for entry in all_entries
|
|
436
|
+
],
|
|
437
|
+
"messages": [
|
|
438
|
+
{
|
|
439
|
+
"role": msg.role.value,
|
|
440
|
+
"content": msg.content,
|
|
441
|
+
"name": msg.name,
|
|
442
|
+
"timestamp": msg.timestamp.isoformat(),
|
|
443
|
+
}
|
|
444
|
+
for msg in self._message_history
|
|
445
|
+
],
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
async def import_data(self, data: Dict[str, Any]) -> None:
|
|
449
|
+
"""Import memory contents."""
|
|
450
|
+
# Clear existing data
|
|
451
|
+
await self.clear()
|
|
452
|
+
|
|
453
|
+
# Import entries
|
|
454
|
+
for entry_data in data.get("entries", []):
|
|
455
|
+
await self.store.add(
|
|
456
|
+
entry_data["content"],
|
|
457
|
+
entry_data.get("metadata", {}),
|
|
458
|
+
)
|
|
459
|
+
|
|
460
|
+
# Import messages
|
|
461
|
+
for msg_data in data.get("messages", []):
|
|
462
|
+
msg = Message(
|
|
463
|
+
role=MessageRole(msg_data["role"]),
|
|
464
|
+
content=msg_data["content"],
|
|
465
|
+
name=msg_data.get("name"),
|
|
466
|
+
)
|
|
467
|
+
self._message_history.append(msg)
|
|
468
|
+
|
|
469
|
+
|
|
470
|
+
@durable.object
|
|
471
|
+
class DurableMemoryStore(DurableObject):
|
|
472
|
+
"""
|
|
473
|
+
Durable memory store that persists memories across restarts.
|
|
474
|
+
|
|
475
|
+
This provides a durable, persistent memory system that:
|
|
476
|
+
- Automatically persists all memory entries
|
|
477
|
+
- Survives process restarts and failures
|
|
478
|
+
- Provides efficient search and retrieval
|
|
479
|
+
- Integrates seamlessly with agents and workflows
|
|
480
|
+
"""
|
|
481
|
+
|
|
482
|
+
def __init__(self, memory_id: str):
|
|
483
|
+
"""Initialize a durable memory store."""
|
|
484
|
+
super().__init__(memory_id)
|
|
485
|
+
self.memory_id = memory_id
|
|
486
|
+
self.entries: Dict[str, MemoryEntry] = {}
|
|
487
|
+
self.entry_counter = 0
|
|
488
|
+
|
|
489
|
+
async def add(self, content: Any, metadata: Optional[Dict[str, Any]] = None) -> str:
|
|
490
|
+
"""Add an entry to durable memory."""
|
|
491
|
+
entry_id = f"mem_{self.memory_id}_{self.entry_counter}"
|
|
492
|
+
self.entry_counter += 1
|
|
493
|
+
|
|
494
|
+
entry = MemoryEntry(
|
|
495
|
+
id=entry_id,
|
|
496
|
+
content=content,
|
|
497
|
+
metadata=metadata or {},
|
|
498
|
+
)
|
|
499
|
+
|
|
500
|
+
self.entries[entry_id] = entry
|
|
501
|
+
await self.save() # Persist to durable storage
|
|
502
|
+
|
|
503
|
+
logger.debug(f"Added memory entry {entry_id} to durable store {self.memory_id}")
|
|
504
|
+
return entry_id
|
|
505
|
+
|
|
506
|
+
async def get(self, entry_id: str) -> Optional[MemoryEntry]:
|
|
507
|
+
"""Get a specific memory entry from durable storage."""
|
|
508
|
+
return self.entries.get(entry_id)
|
|
509
|
+
|
|
510
|
+
async def search(self, query: MemoryQuery) -> List[MemoryEntry]:
|
|
511
|
+
"""Search memory with a query."""
|
|
512
|
+
results = []
|
|
513
|
+
|
|
514
|
+
for entry in self.entries.values():
|
|
515
|
+
# Simple text-based search
|
|
516
|
+
if query.query:
|
|
517
|
+
content_str = str(entry.content).lower()
|
|
518
|
+
if query.query.lower() in content_str:
|
|
519
|
+
results.append(entry)
|
|
520
|
+
continue
|
|
521
|
+
|
|
522
|
+
# Metadata filtering
|
|
523
|
+
if query.filters:
|
|
524
|
+
matches_metadata = all(
|
|
525
|
+
entry.metadata.get(key) == value
|
|
526
|
+
for key, value in query.filters.items()
|
|
527
|
+
)
|
|
528
|
+
if matches_metadata:
|
|
529
|
+
results.append(entry)
|
|
530
|
+
continue
|
|
531
|
+
|
|
532
|
+
# Sort by relevance/timestamp
|
|
533
|
+
results.sort(key=lambda x: x.accessed_at, reverse=True)
|
|
534
|
+
|
|
535
|
+
# Apply limit
|
|
536
|
+
if query.limit:
|
|
537
|
+
results = results[:query.limit]
|
|
538
|
+
|
|
539
|
+
return results
|
|
540
|
+
|
|
541
|
+
async def update(self, entry_id: str, content: Any, metadata: Optional[Dict[str, Any]] = None) -> bool:
|
|
542
|
+
"""Update a memory entry in durable storage."""
|
|
543
|
+
if entry_id not in self.entries:
|
|
544
|
+
return False
|
|
545
|
+
|
|
546
|
+
entry = self.entries[entry_id]
|
|
547
|
+
entry.content = content
|
|
548
|
+
if metadata:
|
|
549
|
+
entry.metadata.update(metadata)
|
|
550
|
+
entry.access() # Update access time
|
|
551
|
+
|
|
552
|
+
await self.save() # Persist changes
|
|
553
|
+
logger.debug(f"Updated memory entry {entry_id} in durable store {self.memory_id}")
|
|
554
|
+
return True
|
|
555
|
+
|
|
556
|
+
async def delete(self, entry_id: str) -> bool:
|
|
557
|
+
"""Delete a memory entry from durable storage."""
|
|
558
|
+
if entry_id in self.entries:
|
|
559
|
+
del self.entries[entry_id]
|
|
560
|
+
await self.save() # Persist changes
|
|
561
|
+
logger.debug(f"Deleted memory entry {entry_id} from durable store {self.memory_id}")
|
|
562
|
+
return True
|
|
563
|
+
return False
|
|
564
|
+
|
|
565
|
+
async def clear(self) -> None:
|
|
566
|
+
"""Clear all memory entries."""
|
|
567
|
+
self.entries.clear()
|
|
568
|
+
self.entry_counter = 0
|
|
569
|
+
await self.save()
|
|
570
|
+
logger.debug(f"Cleared all entries from durable store {self.memory_id}")
|
|
571
|
+
|
|
572
|
+
async def size(self) -> int:
|
|
573
|
+
"""Get the number of entries in memory."""
|
|
574
|
+
return len(self.entries)
|
|
575
|
+
|
|
576
|
+
async def get_stats(self) -> Dict[str, Any]:
|
|
577
|
+
"""Get memory store statistics."""
|
|
578
|
+
total_entries = len(self.entries)
|
|
579
|
+
memory_types = {}
|
|
580
|
+
|
|
581
|
+
for entry in self.entries.values():
|
|
582
|
+
memory_type = entry.metadata.get("type", "unknown")
|
|
583
|
+
memory_types[memory_type] = memory_types.get(memory_type, 0) + 1
|
|
584
|
+
|
|
585
|
+
oldest_entry = min(self.entries.values(), key=lambda x: x.created_at) if self.entries else None
|
|
586
|
+
newest_entry = max(self.entries.values(), key=lambda x: x.created_at) if self.entries else None
|
|
587
|
+
|
|
588
|
+
return {
|
|
589
|
+
"memory_id": self.memory_id,
|
|
590
|
+
"total_entries": total_entries,
|
|
591
|
+
"memory_types": memory_types,
|
|
592
|
+
"oldest_entry": oldest_entry.created_at.isoformat() if oldest_entry else None,
|
|
593
|
+
"newest_entry": newest_entry.created_at.isoformat() if newest_entry else None,
|
|
594
|
+
"version": self._version,
|
|
595
|
+
"last_saved": self._last_saved.isoformat(),
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
async def _get_state(self) -> Dict[str, Any]:
|
|
599
|
+
"""Get memory store state for durable persistence."""
|
|
600
|
+
base_state = await super()._get_state()
|
|
601
|
+
|
|
602
|
+
# Serialize entries
|
|
603
|
+
serialized_entries = {}
|
|
604
|
+
for entry_id, entry in self.entries.items():
|
|
605
|
+
serialized_entries[entry_id] = {
|
|
606
|
+
"id": entry.id,
|
|
607
|
+
"content": entry.content,
|
|
608
|
+
"metadata": entry.metadata,
|
|
609
|
+
"created_at": entry.created_at.isoformat(),
|
|
610
|
+
"accessed_at": entry.accessed_at.isoformat(),
|
|
611
|
+
"access_count": entry.access_count,
|
|
612
|
+
}
|
|
613
|
+
|
|
614
|
+
memory_state = {
|
|
615
|
+
"memory_id": self.memory_id,
|
|
616
|
+
"entries": serialized_entries,
|
|
617
|
+
"entry_counter": self.entry_counter,
|
|
618
|
+
}
|
|
619
|
+
|
|
620
|
+
return {**base_state, **memory_state}
|
|
621
|
+
|
|
622
|
+
async def _restore_state(self, state: Dict[str, Any]) -> None:
|
|
623
|
+
"""Restore memory store state from durable persistence."""
|
|
624
|
+
await super()._restore_state(state)
|
|
625
|
+
|
|
626
|
+
self.memory_id = state.get("memory_id", self.object_id)
|
|
627
|
+
self.entry_counter = state.get("entry_counter", 0)
|
|
628
|
+
|
|
629
|
+
# Restore entries
|
|
630
|
+
self.entries = {}
|
|
631
|
+
for entry_id, entry_data in state.get("entries", {}).items():
|
|
632
|
+
entry = MemoryEntry(
|
|
633
|
+
id=entry_data["id"],
|
|
634
|
+
content=entry_data["content"],
|
|
635
|
+
metadata=entry_data["metadata"],
|
|
636
|
+
)
|
|
637
|
+
# Restore timestamps and access count
|
|
638
|
+
entry.created_at = datetime.fromisoformat(entry_data["created_at"])
|
|
639
|
+
entry.accessed_at = datetime.fromisoformat(entry_data["accessed_at"])
|
|
640
|
+
entry.access_count = entry_data["access_count"]
|
|
641
|
+
|
|
642
|
+
self.entries[entry_id] = entry
|
|
643
|
+
|
|
644
|
+
logger.info(f"Restored durable memory store {self.memory_id} with {len(self.entries)} entries")
|
|
645
|
+
|
|
646
|
+
|
|
647
|
+
class DurableMemory(Memory):
|
|
648
|
+
"""
|
|
649
|
+
Enhanced durable memory interface that uses durable storage by default.
|
|
650
|
+
|
|
651
|
+
This provides a seamless upgrade path for agents to use durable memory
|
|
652
|
+
while maintaining backward compatibility with the Memory interface.
|
|
653
|
+
"""
|
|
654
|
+
|
|
655
|
+
def __init__(
|
|
656
|
+
self,
|
|
657
|
+
memory_id: Optional[str] = None,
|
|
658
|
+
retention_days: Optional[int] = None,
|
|
659
|
+
auto_summarize: bool = False,
|
|
660
|
+
summarize_threshold: int = 100,
|
|
661
|
+
):
|
|
662
|
+
"""Initialize durable memory."""
|
|
663
|
+
import uuid
|
|
664
|
+
self._memory_id = memory_id or str(uuid.uuid4())
|
|
665
|
+
self._durable_store = None
|
|
666
|
+
|
|
667
|
+
# Don't call super().__init__ yet - we'll set up the store first
|
|
668
|
+
self.retention_days = retention_days
|
|
669
|
+
self.auto_summarize = auto_summarize
|
|
670
|
+
self.summarize_threshold = summarize_threshold
|
|
671
|
+
self._message_history: List[Message] = []
|
|
672
|
+
|
|
673
|
+
async def _ensure_durable_store(self) -> DurableMemoryStore:
|
|
674
|
+
"""Ensure durable store is initialized."""
|
|
675
|
+
if self._durable_store is None:
|
|
676
|
+
self._durable_store = await DurableMemoryStore.get_or_create(self._memory_id)
|
|
677
|
+
return self._durable_store
|
|
678
|
+
|
|
679
|
+
async def add(self, content: Union[Any, Message]) -> str:
|
|
680
|
+
"""Add content to durable memory."""
|
|
681
|
+
store = await self._ensure_durable_store()
|
|
682
|
+
|
|
683
|
+
# Handle messages specially
|
|
684
|
+
if isinstance(content, Message):
|
|
685
|
+
self._message_history.append(content)
|
|
686
|
+
|
|
687
|
+
# Store message content with enhanced metadata
|
|
688
|
+
metadata = {
|
|
689
|
+
"type": "message",
|
|
690
|
+
"role": content.role.value,
|
|
691
|
+
"timestamp": content.timestamp.isoformat(),
|
|
692
|
+
"message_id": getattr(content, 'id', None),
|
|
693
|
+
}
|
|
694
|
+
if content.name:
|
|
695
|
+
metadata["name"] = content.name
|
|
696
|
+
if hasattr(content, 'tool_calls') and content.tool_calls:
|
|
697
|
+
metadata["has_tool_calls"] = True
|
|
698
|
+
metadata["tool_count"] = len(content.tool_calls)
|
|
699
|
+
|
|
700
|
+
return await store.add(content.content, metadata)
|
|
701
|
+
|
|
702
|
+
# Regular content
|
|
703
|
+
return await store.add(content)
|
|
704
|
+
|
|
705
|
+
async def search(
|
|
706
|
+
self,
|
|
707
|
+
query: Union[str, MemoryQuery],
|
|
708
|
+
limit: int = 10,
|
|
709
|
+
) -> List[MemoryEntry]:
|
|
710
|
+
"""Search durable memory."""
|
|
711
|
+
store = await self._ensure_durable_store()
|
|
712
|
+
|
|
713
|
+
if isinstance(query, str):
|
|
714
|
+
query = MemoryQuery(query=query, limit=limit)
|
|
715
|
+
else:
|
|
716
|
+
query.limit = limit
|
|
717
|
+
|
|
718
|
+
return await store.search(query)
|
|
719
|
+
|
|
720
|
+
async def get_stats(self) -> Dict[str, Any]:
|
|
721
|
+
"""Get comprehensive memory statistics."""
|
|
722
|
+
store = await self._ensure_durable_store()
|
|
723
|
+
return await store.get_stats()
|
|
724
|
+
|
|
725
|
+
async def clear(self) -> None:
|
|
726
|
+
"""Clear all durable memories."""
|
|
727
|
+
store = await self._ensure_durable_store()
|
|
728
|
+
await store.clear()
|
|
729
|
+
self._message_history.clear()
|
|
730
|
+
|
|
731
|
+
@property
|
|
732
|
+
def memory_id(self) -> str:
|
|
733
|
+
"""Get the memory ID."""
|
|
734
|
+
return self._memory_id
|
|
735
|
+
|
|
736
|
+
@property
|
|
737
|
+
def is_durable(self) -> bool:
|
|
738
|
+
"""Always returns True for durable memory."""
|
|
739
|
+
return True
|
|
740
|
+
|
|
741
|
+
@classmethod
|
|
742
|
+
async def create(cls, memory_id: Optional[str] = None) -> "DurableMemory":
|
|
743
|
+
"""Create and initialize a durable memory instance."""
|
|
744
|
+
memory = cls(memory_id=memory_id)
|
|
745
|
+
await memory._ensure_durable_store() # Initialize immediately
|
|
746
|
+
return memory
|