memorisdk 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of memorisdk might be problematic. Click here for more details.
- memoriai/__init__.py +140 -0
- memoriai/agents/__init__.py +7 -0
- memoriai/agents/conscious_agent.py +506 -0
- memoriai/agents/memory_agent.py +322 -0
- memoriai/agents/retrieval_agent.py +579 -0
- memoriai/config/__init__.py +14 -0
- memoriai/config/manager.py +281 -0
- memoriai/config/settings.py +287 -0
- memoriai/core/__init__.py +6 -0
- memoriai/core/database.py +966 -0
- memoriai/core/memory.py +1349 -0
- memoriai/database/__init__.py +5 -0
- memoriai/database/connectors/__init__.py +9 -0
- memoriai/database/connectors/mysql_connector.py +159 -0
- memoriai/database/connectors/postgres_connector.py +158 -0
- memoriai/database/connectors/sqlite_connector.py +148 -0
- memoriai/database/queries/__init__.py +15 -0
- memoriai/database/queries/base_queries.py +204 -0
- memoriai/database/queries/chat_queries.py +157 -0
- memoriai/database/queries/entity_queries.py +236 -0
- memoriai/database/queries/memory_queries.py +178 -0
- memoriai/database/templates/__init__.py +0 -0
- memoriai/database/templates/basic_template.py +0 -0
- memoriai/database/templates/schemas/__init__.py +0 -0
- memoriai/integrations/__init__.py +68 -0
- memoriai/integrations/anthropic_integration.py +194 -0
- memoriai/integrations/litellm_integration.py +11 -0
- memoriai/integrations/openai_integration.py +273 -0
- memoriai/scripts/llm_text.py +50 -0
- memoriai/tools/__init__.py +5 -0
- memoriai/tools/memory_tool.py +544 -0
- memoriai/utils/__init__.py +89 -0
- memoriai/utils/exceptions.py +418 -0
- memoriai/utils/helpers.py +433 -0
- memoriai/utils/logging.py +204 -0
- memoriai/utils/pydantic_models.py +258 -0
- memoriai/utils/schemas.py +0 -0
- memoriai/utils/validators.py +339 -0
- memorisdk-1.0.0.dist-info/METADATA +386 -0
- memorisdk-1.0.0.dist-info/RECORD +44 -0
- memorisdk-1.0.0.dist-info/WHEEL +5 -0
- memorisdk-1.0.0.dist-info/entry_points.txt +2 -0
- memorisdk-1.0.0.dist-info/licenses/LICENSE +203 -0
- memorisdk-1.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,966 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Database Manager - Pydantic-based memory storage with entity indexing
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import sqlite3
|
|
7
|
+
import uuid
|
|
8
|
+
from datetime import datetime, timedelta
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Any, Dict, List, Optional
|
|
11
|
+
|
|
12
|
+
from loguru import logger
|
|
13
|
+
|
|
14
|
+
from ..utils.exceptions import DatabaseError
|
|
15
|
+
from ..utils.pydantic_models import MemoryCategoryType, ProcessedMemory, RetentionType
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class DatabaseManager:
|
|
19
|
+
"""Manages Pydantic-based memory storage with entity indexing and FTS search"""
|
|
20
|
+
|
|
21
|
+
def __init__(self, database_connect: str, template: str = "basic"):
|
|
22
|
+
self.database_connect = database_connect
|
|
23
|
+
self.template = template
|
|
24
|
+
self.db_path = self._parse_connection_string(database_connect)
|
|
25
|
+
|
|
26
|
+
def _parse_connection_string(self, connect_str: str) -> str:
|
|
27
|
+
"""Parse database connection string"""
|
|
28
|
+
if connect_str.startswith("sqlite:///"):
|
|
29
|
+
return connect_str.replace("sqlite:///", "")
|
|
30
|
+
elif connect_str.startswith("sqlite://"):
|
|
31
|
+
return connect_str.replace("sqlite://", "")
|
|
32
|
+
else:
|
|
33
|
+
# For now, only SQLite is implemented
|
|
34
|
+
raise DatabaseError(f"Unsupported database type: {connect_str}")
|
|
35
|
+
|
|
36
|
+
def _get_connection(self) -> sqlite3.Connection:
|
|
37
|
+
"""Get database connection with FTS5 support"""
|
|
38
|
+
try:
|
|
39
|
+
# Ensure directory exists
|
|
40
|
+
db_dir = Path(self.db_path).parent
|
|
41
|
+
db_dir.mkdir(parents=True, exist_ok=True)
|
|
42
|
+
|
|
43
|
+
conn = sqlite3.connect(self.db_path)
|
|
44
|
+
conn.row_factory = sqlite3.Row # Enable dict-like access
|
|
45
|
+
|
|
46
|
+
# Enable FTS features
|
|
47
|
+
conn.execute("PRAGMA enable_fts3_tokenizer=1")
|
|
48
|
+
# Set up other performance optimizations
|
|
49
|
+
conn.execute("PRAGMA journal_mode=WAL")
|
|
50
|
+
conn.execute("PRAGMA synchronous=NORMAL")
|
|
51
|
+
conn.execute("PRAGMA cache_size=10000")
|
|
52
|
+
|
|
53
|
+
return conn
|
|
54
|
+
except Exception as e:
|
|
55
|
+
raise DatabaseError(f"Failed to connect to database: {e}")
|
|
56
|
+
|
|
57
|
+
def initialize_schema(self):
|
|
58
|
+
"""Initialize database schema based on template"""
|
|
59
|
+
try:
|
|
60
|
+
# Check if FTS5 is available
|
|
61
|
+
if not self._check_fts5_support():
|
|
62
|
+
logger.warning(
|
|
63
|
+
"FTS5 not available, search functionality will be limited"
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
# Read and execute schema from file
|
|
67
|
+
schema_path = (
|
|
68
|
+
Path(__file__).parent.parent
|
|
69
|
+
/ "database"
|
|
70
|
+
/ "templates"
|
|
71
|
+
/ "schemas"
|
|
72
|
+
/ f"{self.template}.sql"
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
if schema_path.exists():
|
|
76
|
+
with open(schema_path) as f:
|
|
77
|
+
schema_sql = f.read()
|
|
78
|
+
|
|
79
|
+
with self._get_connection() as conn:
|
|
80
|
+
# Execute schema using executescript for better multi-statement handling
|
|
81
|
+
try:
|
|
82
|
+
conn.executescript(schema_sql)
|
|
83
|
+
logger.info("Database schema initialized successfully")
|
|
84
|
+
except sqlite3.Error as e:
|
|
85
|
+
logger.warning(
|
|
86
|
+
f"Schema execution issue: {e}, falling back to statement-by-statement"
|
|
87
|
+
)
|
|
88
|
+
# Fallback to statement-by-statement execution
|
|
89
|
+
self._execute_schema_statements(conn, schema_sql)
|
|
90
|
+
|
|
91
|
+
else:
|
|
92
|
+
# Fallback to basic schema
|
|
93
|
+
self._create_basic_schema()
|
|
94
|
+
|
|
95
|
+
except Exception as e:
|
|
96
|
+
logger.error(f"Failed to initialize schema: {e}")
|
|
97
|
+
# Fallback to basic schema
|
|
98
|
+
self._create_basic_schema()
|
|
99
|
+
|
|
100
|
+
def _check_fts5_support(self) -> bool:
|
|
101
|
+
"""Check if FTS5 is supported in this SQLite installation"""
|
|
102
|
+
try:
|
|
103
|
+
with self._get_connection() as conn:
|
|
104
|
+
cursor = conn.cursor()
|
|
105
|
+
cursor.execute("CREATE VIRTUAL TABLE fts_test USING fts5(content)")
|
|
106
|
+
cursor.execute("DROP TABLE fts_test")
|
|
107
|
+
return True
|
|
108
|
+
except sqlite3.OperationalError:
|
|
109
|
+
return False
|
|
110
|
+
|
|
111
|
+
def _execute_schema_statements(self, conn: sqlite3.Connection, schema_sql: str):
|
|
112
|
+
"""Execute schema statements one by one with better error handling"""
|
|
113
|
+
# Split by semicolons but be smarter about it
|
|
114
|
+
statements = []
|
|
115
|
+
current_statement = []
|
|
116
|
+
in_trigger = False
|
|
117
|
+
|
|
118
|
+
for line in schema_sql.split("\n"):
|
|
119
|
+
line = line.strip()
|
|
120
|
+
if not line or line.startswith("--"):
|
|
121
|
+
continue
|
|
122
|
+
|
|
123
|
+
if line.upper().startswith("CREATE TRIGGER"):
|
|
124
|
+
in_trigger = True
|
|
125
|
+
elif line.upper().startswith("END;") and in_trigger:
|
|
126
|
+
current_statement.append(line)
|
|
127
|
+
statements.append("\n".join(current_statement))
|
|
128
|
+
current_statement = []
|
|
129
|
+
in_trigger = False
|
|
130
|
+
continue
|
|
131
|
+
|
|
132
|
+
current_statement.append(line)
|
|
133
|
+
|
|
134
|
+
if line.endswith(";") and not in_trigger:
|
|
135
|
+
statements.append("\n".join(current_statement))
|
|
136
|
+
current_statement = []
|
|
137
|
+
|
|
138
|
+
# Execute each statement
|
|
139
|
+
for statement in statements:
|
|
140
|
+
if statement.strip():
|
|
141
|
+
try:
|
|
142
|
+
conn.execute(statement)
|
|
143
|
+
except sqlite3.Error as e:
|
|
144
|
+
logger.debug(f"Schema statement warning: {e}")
|
|
145
|
+
|
|
146
|
+
conn.commit()
|
|
147
|
+
|
|
148
|
+
def _create_basic_schema(self):
|
|
149
|
+
"""Create basic schema if SQL file is not available"""
|
|
150
|
+
with self._get_connection() as conn:
|
|
151
|
+
cursor = conn.cursor()
|
|
152
|
+
|
|
153
|
+
# Basic tables for fallback
|
|
154
|
+
cursor.execute(
|
|
155
|
+
"""
|
|
156
|
+
CREATE TABLE IF NOT EXISTS chat_history (
|
|
157
|
+
chat_id TEXT PRIMARY KEY,
|
|
158
|
+
user_input TEXT NOT NULL,
|
|
159
|
+
ai_output TEXT NOT NULL,
|
|
160
|
+
model TEXT NOT NULL,
|
|
161
|
+
timestamp TIMESTAMP NOT NULL,
|
|
162
|
+
session_id TEXT NOT NULL,
|
|
163
|
+
namespace TEXT NOT NULL DEFAULT 'default',
|
|
164
|
+
tokens_used INTEGER DEFAULT 0,
|
|
165
|
+
metadata TEXT DEFAULT '{}'
|
|
166
|
+
)
|
|
167
|
+
"""
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
cursor.execute(
|
|
171
|
+
"""
|
|
172
|
+
CREATE TABLE IF NOT EXISTS short_term_memory (
|
|
173
|
+
memory_id TEXT PRIMARY KEY,
|
|
174
|
+
chat_id TEXT,
|
|
175
|
+
processed_data TEXT NOT NULL,
|
|
176
|
+
importance_score REAL NOT NULL DEFAULT 0.5,
|
|
177
|
+
category_primary TEXT NOT NULL,
|
|
178
|
+
retention_type TEXT NOT NULL DEFAULT 'short_term',
|
|
179
|
+
namespace TEXT NOT NULL DEFAULT 'default',
|
|
180
|
+
created_at TIMESTAMP NOT NULL,
|
|
181
|
+
expires_at TIMESTAMP,
|
|
182
|
+
access_count INTEGER DEFAULT 0,
|
|
183
|
+
last_accessed TIMESTAMP,
|
|
184
|
+
searchable_content TEXT NOT NULL,
|
|
185
|
+
summary TEXT NOT NULL
|
|
186
|
+
)
|
|
187
|
+
"""
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
cursor.execute(
|
|
191
|
+
"""
|
|
192
|
+
CREATE TABLE IF NOT EXISTS long_term_memory (
|
|
193
|
+
memory_id TEXT PRIMARY KEY,
|
|
194
|
+
original_chat_id TEXT,
|
|
195
|
+
processed_data TEXT NOT NULL,
|
|
196
|
+
importance_score REAL NOT NULL DEFAULT 0.5,
|
|
197
|
+
category_primary TEXT NOT NULL,
|
|
198
|
+
retention_type TEXT NOT NULL DEFAULT 'long_term',
|
|
199
|
+
namespace TEXT NOT NULL DEFAULT 'default',
|
|
200
|
+
created_at TIMESTAMP NOT NULL,
|
|
201
|
+
access_count INTEGER DEFAULT 0,
|
|
202
|
+
last_accessed TIMESTAMP,
|
|
203
|
+
searchable_content TEXT NOT NULL,
|
|
204
|
+
summary TEXT NOT NULL,
|
|
205
|
+
novelty_score REAL DEFAULT 0.5,
|
|
206
|
+
relevance_score REAL DEFAULT 0.5,
|
|
207
|
+
actionability_score REAL DEFAULT 0.5
|
|
208
|
+
)
|
|
209
|
+
"""
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
cursor.execute(
|
|
213
|
+
"""
|
|
214
|
+
CREATE TABLE IF NOT EXISTS memory_entities (
|
|
215
|
+
entity_id TEXT PRIMARY KEY,
|
|
216
|
+
memory_id TEXT NOT NULL,
|
|
217
|
+
memory_type TEXT NOT NULL,
|
|
218
|
+
entity_type TEXT NOT NULL,
|
|
219
|
+
entity_value TEXT NOT NULL,
|
|
220
|
+
relevance_score REAL NOT NULL DEFAULT 0.5,
|
|
221
|
+
entity_context TEXT,
|
|
222
|
+
namespace TEXT NOT NULL DEFAULT 'default',
|
|
223
|
+
created_at TIMESTAMP NOT NULL
|
|
224
|
+
)
|
|
225
|
+
"""
|
|
226
|
+
)
|
|
227
|
+
|
|
228
|
+
conn.commit()
|
|
229
|
+
logger.info("Basic database schema created")
|
|
230
|
+
|
|
231
|
+
def store_chat_history(
|
|
232
|
+
self,
|
|
233
|
+
chat_id: str,
|
|
234
|
+
user_input: str,
|
|
235
|
+
ai_output: str,
|
|
236
|
+
model: str,
|
|
237
|
+
timestamp: datetime,
|
|
238
|
+
session_id: str,
|
|
239
|
+
namespace: str = "default",
|
|
240
|
+
tokens_used: int = 0,
|
|
241
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
242
|
+
):
|
|
243
|
+
"""Store chat history"""
|
|
244
|
+
with self._get_connection() as conn:
|
|
245
|
+
cursor = conn.cursor()
|
|
246
|
+
cursor.execute(
|
|
247
|
+
"""
|
|
248
|
+
INSERT OR REPLACE INTO chat_history
|
|
249
|
+
(chat_id, user_input, ai_output, model, timestamp, session_id, namespace, tokens_used, metadata)
|
|
250
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
251
|
+
""",
|
|
252
|
+
(
|
|
253
|
+
chat_id,
|
|
254
|
+
user_input,
|
|
255
|
+
ai_output,
|
|
256
|
+
model,
|
|
257
|
+
timestamp,
|
|
258
|
+
session_id,
|
|
259
|
+
namespace,
|
|
260
|
+
tokens_used,
|
|
261
|
+
json.dumps(metadata or {}),
|
|
262
|
+
),
|
|
263
|
+
)
|
|
264
|
+
conn.commit()
|
|
265
|
+
|
|
266
|
+
def get_chat_history(
|
|
267
|
+
self,
|
|
268
|
+
namespace: str = "default",
|
|
269
|
+
session_id: Optional[str] = None,
|
|
270
|
+
limit: int = 10,
|
|
271
|
+
) -> List[Dict[str, Any]]:
|
|
272
|
+
"""Get chat history with optional session filtering"""
|
|
273
|
+
with self._get_connection() as conn:
|
|
274
|
+
cursor = conn.cursor()
|
|
275
|
+
|
|
276
|
+
if session_id:
|
|
277
|
+
cursor.execute(
|
|
278
|
+
"""
|
|
279
|
+
SELECT chat_id, user_input, ai_output, model, timestamp,
|
|
280
|
+
session_id, namespace, tokens_used, metadata
|
|
281
|
+
FROM chat_history
|
|
282
|
+
WHERE namespace = ? AND session_id = ?
|
|
283
|
+
ORDER BY timestamp DESC
|
|
284
|
+
LIMIT ?
|
|
285
|
+
""",
|
|
286
|
+
(namespace, session_id, limit),
|
|
287
|
+
)
|
|
288
|
+
else:
|
|
289
|
+
cursor.execute(
|
|
290
|
+
"""
|
|
291
|
+
SELECT chat_id, user_input, ai_output, model, timestamp,
|
|
292
|
+
session_id, namespace, tokens_used, metadata
|
|
293
|
+
FROM chat_history
|
|
294
|
+
WHERE namespace = ?
|
|
295
|
+
ORDER BY timestamp DESC
|
|
296
|
+
LIMIT ?
|
|
297
|
+
""",
|
|
298
|
+
(namespace, limit),
|
|
299
|
+
)
|
|
300
|
+
|
|
301
|
+
rows = cursor.fetchall()
|
|
302
|
+
|
|
303
|
+
# Convert to list of dictionaries and parse metadata JSON
|
|
304
|
+
result = []
|
|
305
|
+
for row in rows:
|
|
306
|
+
row_dict = dict(row)
|
|
307
|
+
try:
|
|
308
|
+
row_dict["metadata"] = json.loads(row_dict["metadata"] or "{}")
|
|
309
|
+
except (json.JSONDecodeError, TypeError):
|
|
310
|
+
row_dict["metadata"] = {}
|
|
311
|
+
result.append(row_dict)
|
|
312
|
+
|
|
313
|
+
return result
|
|
314
|
+
|
|
315
|
+
def store_processed_memory(
|
|
316
|
+
self, memory: ProcessedMemory, chat_id: str, namespace: str = "default"
|
|
317
|
+
) -> str:
|
|
318
|
+
"""Store a processed memory with entity indexing"""
|
|
319
|
+
|
|
320
|
+
if not memory.should_store:
|
|
321
|
+
logger.debug(f"Memory not stored: {memory.storage_reasoning}")
|
|
322
|
+
return ""
|
|
323
|
+
|
|
324
|
+
memory_id = str(uuid.uuid4())
|
|
325
|
+
storage_location = self._determine_storage_location(memory)
|
|
326
|
+
|
|
327
|
+
with self._get_connection() as conn:
|
|
328
|
+
cursor = conn.cursor()
|
|
329
|
+
|
|
330
|
+
try:
|
|
331
|
+
if storage_location == "short_term_memory":
|
|
332
|
+
self._store_short_term_memory(
|
|
333
|
+
cursor, memory_id, memory, chat_id, namespace
|
|
334
|
+
)
|
|
335
|
+
elif storage_location == "long_term_memory":
|
|
336
|
+
self._store_long_term_memory(
|
|
337
|
+
cursor, memory_id, memory, chat_id, namespace
|
|
338
|
+
)
|
|
339
|
+
elif storage_location == "rules_memory":
|
|
340
|
+
self._store_rules_memory(cursor, memory_id, memory, namespace)
|
|
341
|
+
|
|
342
|
+
# Store entities for indexing
|
|
343
|
+
self._store_entities(
|
|
344
|
+
cursor, memory_id, memory, storage_location, namespace
|
|
345
|
+
)
|
|
346
|
+
|
|
347
|
+
conn.commit()
|
|
348
|
+
logger.debug(f"Stored memory {memory_id} in {storage_location}")
|
|
349
|
+
return memory_id
|
|
350
|
+
|
|
351
|
+
except Exception as e:
|
|
352
|
+
conn.rollback()
|
|
353
|
+
logger.error(f"Failed to store memory: {e}")
|
|
354
|
+
raise DatabaseError(f"Failed to store memory: {e}")
|
|
355
|
+
|
|
356
|
+
def _store_short_term_memory(
|
|
357
|
+
self,
|
|
358
|
+
cursor: sqlite3.Cursor,
|
|
359
|
+
memory_id: str,
|
|
360
|
+
memory: ProcessedMemory,
|
|
361
|
+
chat_id: str,
|
|
362
|
+
namespace: str,
|
|
363
|
+
):
|
|
364
|
+
"""Store memory in short-term table"""
|
|
365
|
+
# Ensure we have a valid timestamp (timezone-naive for SQLite compatibility)
|
|
366
|
+
created_at = memory.timestamp
|
|
367
|
+
if created_at is None:
|
|
368
|
+
created_at = datetime.now()
|
|
369
|
+
elif hasattr(created_at, "replace"):
|
|
370
|
+
# Make timezone-naive if timezone-aware
|
|
371
|
+
created_at = created_at.replace(tzinfo=None)
|
|
372
|
+
|
|
373
|
+
expires_at = datetime.now() + timedelta(days=7) # 7-day expiration
|
|
374
|
+
|
|
375
|
+
cursor.execute(
|
|
376
|
+
"""
|
|
377
|
+
INSERT INTO short_term_memory
|
|
378
|
+
(memory_id, chat_id, processed_data, importance_score, category_primary,
|
|
379
|
+
retention_type, namespace, created_at, expires_at, access_count,
|
|
380
|
+
searchable_content, summary)
|
|
381
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
382
|
+
""",
|
|
383
|
+
(
|
|
384
|
+
memory_id,
|
|
385
|
+
chat_id,
|
|
386
|
+
memory.model_dump_json(),
|
|
387
|
+
memory.importance.importance_score,
|
|
388
|
+
memory.category.primary_category.value,
|
|
389
|
+
memory.importance.retention_type.value,
|
|
390
|
+
namespace,
|
|
391
|
+
created_at,
|
|
392
|
+
expires_at,
|
|
393
|
+
0,
|
|
394
|
+
memory.searchable_content,
|
|
395
|
+
memory.summary,
|
|
396
|
+
),
|
|
397
|
+
)
|
|
398
|
+
|
|
399
|
+
def _store_long_term_memory(
|
|
400
|
+
self,
|
|
401
|
+
cursor: sqlite3.Cursor,
|
|
402
|
+
memory_id: str,
|
|
403
|
+
memory: ProcessedMemory,
|
|
404
|
+
chat_id: str,
|
|
405
|
+
namespace: str,
|
|
406
|
+
):
|
|
407
|
+
"""Store memory in long-term table"""
|
|
408
|
+
# Ensure we have a valid timestamp (timezone-naive for SQLite compatibility)
|
|
409
|
+
created_at = memory.timestamp
|
|
410
|
+
if created_at is None:
|
|
411
|
+
created_at = datetime.now()
|
|
412
|
+
elif hasattr(created_at, "replace"):
|
|
413
|
+
# Make timezone-naive if timezone-aware
|
|
414
|
+
created_at = created_at.replace(tzinfo=None)
|
|
415
|
+
|
|
416
|
+
cursor.execute(
|
|
417
|
+
"""
|
|
418
|
+
INSERT INTO long_term_memory
|
|
419
|
+
(memory_id, original_chat_id, processed_data, importance_score, category_primary,
|
|
420
|
+
retention_type, namespace, created_at, access_count, searchable_content, summary,
|
|
421
|
+
novelty_score, relevance_score, actionability_score)
|
|
422
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
423
|
+
""",
|
|
424
|
+
(
|
|
425
|
+
memory_id,
|
|
426
|
+
chat_id,
|
|
427
|
+
memory.model_dump_json(),
|
|
428
|
+
memory.importance.importance_score,
|
|
429
|
+
memory.category.primary_category.value,
|
|
430
|
+
memory.importance.retention_type.value,
|
|
431
|
+
namespace,
|
|
432
|
+
created_at,
|
|
433
|
+
0,
|
|
434
|
+
memory.searchable_content,
|
|
435
|
+
memory.summary,
|
|
436
|
+
memory.importance.novelty_score,
|
|
437
|
+
memory.importance.relevance_score,
|
|
438
|
+
memory.importance.actionability_score,
|
|
439
|
+
),
|
|
440
|
+
)
|
|
441
|
+
|
|
442
|
+
def _store_rules_memory(
|
|
443
|
+
self,
|
|
444
|
+
cursor: sqlite3.Cursor,
|
|
445
|
+
memory_id: str,
|
|
446
|
+
memory: ProcessedMemory,
|
|
447
|
+
namespace: str,
|
|
448
|
+
):
|
|
449
|
+
"""Store rule-type memory in rules table"""
|
|
450
|
+
# Ensure we have a valid timestamp (timezone-naive for SQLite compatibility)
|
|
451
|
+
created_at = memory.timestamp
|
|
452
|
+
if created_at is None:
|
|
453
|
+
created_at = datetime.now()
|
|
454
|
+
elif hasattr(created_at, "replace"):
|
|
455
|
+
# Make timezone-naive if timezone-aware
|
|
456
|
+
created_at = created_at.replace(tzinfo=None)
|
|
457
|
+
|
|
458
|
+
cursor.execute(
|
|
459
|
+
"""
|
|
460
|
+
INSERT INTO rules_memory
|
|
461
|
+
(rule_id, rule_text, rule_type, priority, active, namespace,
|
|
462
|
+
created_at, updated_at, processed_data)
|
|
463
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
464
|
+
""",
|
|
465
|
+
(
|
|
466
|
+
memory_id,
|
|
467
|
+
memory.summary,
|
|
468
|
+
"rule",
|
|
469
|
+
5,
|
|
470
|
+
True,
|
|
471
|
+
namespace,
|
|
472
|
+
created_at,
|
|
473
|
+
created_at,
|
|
474
|
+
memory.model_dump_json(),
|
|
475
|
+
),
|
|
476
|
+
)
|
|
477
|
+
|
|
478
|
+
def _store_entities(
|
|
479
|
+
self,
|
|
480
|
+
cursor: sqlite3.Cursor,
|
|
481
|
+
memory_id: str,
|
|
482
|
+
memory: ProcessedMemory,
|
|
483
|
+
memory_type: str,
|
|
484
|
+
namespace: str,
|
|
485
|
+
):
|
|
486
|
+
"""Store extracted entities for indexing"""
|
|
487
|
+
|
|
488
|
+
# Simple entities (lists), In future we can make it to dynamically handle more entity types
|
|
489
|
+
|
|
490
|
+
entity_mappings = [
|
|
491
|
+
(memory.entities.people, "person"),
|
|
492
|
+
(memory.entities.technologies, "technology"),
|
|
493
|
+
(memory.entities.topics, "topic"),
|
|
494
|
+
(memory.entities.skills, "skill"),
|
|
495
|
+
(memory.entities.projects, "project"),
|
|
496
|
+
(memory.entities.keywords, "keyword"),
|
|
497
|
+
]
|
|
498
|
+
|
|
499
|
+
for entity_list, entity_type in entity_mappings:
|
|
500
|
+
for entity_value in entity_list:
|
|
501
|
+
entity_id = str(uuid.uuid4())
|
|
502
|
+
cursor.execute(
|
|
503
|
+
"""
|
|
504
|
+
INSERT INTO memory_entities
|
|
505
|
+
(entity_id, memory_id, memory_type, entity_type, entity_value,
|
|
506
|
+
relevance_score, namespace, created_at)
|
|
507
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
508
|
+
""",
|
|
509
|
+
(
|
|
510
|
+
entity_id,
|
|
511
|
+
memory_id,
|
|
512
|
+
memory_type.replace("_memory", ""),
|
|
513
|
+
entity_type,
|
|
514
|
+
entity_value,
|
|
515
|
+
0.8,
|
|
516
|
+
namespace,
|
|
517
|
+
datetime.now(),
|
|
518
|
+
),
|
|
519
|
+
)
|
|
520
|
+
|
|
521
|
+
# Structured entities (with metadata)
|
|
522
|
+
for structured_entity in memory.entities.structured_entities:
|
|
523
|
+
entity_id = str(uuid.uuid4())
|
|
524
|
+
cursor.execute(
|
|
525
|
+
"""
|
|
526
|
+
INSERT INTO memory_entities
|
|
527
|
+
(entity_id, memory_id, memory_type, entity_type, entity_value,
|
|
528
|
+
relevance_score, entity_context, namespace, created_at)
|
|
529
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
530
|
+
""",
|
|
531
|
+
(
|
|
532
|
+
entity_id,
|
|
533
|
+
memory_id,
|
|
534
|
+
memory_type.replace("_memory", ""),
|
|
535
|
+
structured_entity.entity_type.value,
|
|
536
|
+
structured_entity.value,
|
|
537
|
+
structured_entity.relevance_score,
|
|
538
|
+
structured_entity.context,
|
|
539
|
+
namespace,
|
|
540
|
+
datetime.now(),
|
|
541
|
+
),
|
|
542
|
+
)
|
|
543
|
+
|
|
544
|
+
def search_memories(
|
|
545
|
+
self,
|
|
546
|
+
query: str,
|
|
547
|
+
namespace: str = "default",
|
|
548
|
+
category_filter: Optional[List[str]] = None,
|
|
549
|
+
limit: int = 10,
|
|
550
|
+
) -> List[Dict[str, Any]]:
|
|
551
|
+
"""Advanced memory search with hybrid approach: FTS + Entity + Category filtering"""
|
|
552
|
+
all_results = []
|
|
553
|
+
|
|
554
|
+
with self._get_connection() as conn:
|
|
555
|
+
cursor = conn.cursor()
|
|
556
|
+
|
|
557
|
+
# 1. Try FTS search first (most relevant)
|
|
558
|
+
fts_results = self._execute_fts_search(
|
|
559
|
+
cursor, query, namespace, category_filter, limit
|
|
560
|
+
)
|
|
561
|
+
if fts_results:
|
|
562
|
+
for result in fts_results:
|
|
563
|
+
result["search_strategy"] = "fts_search"
|
|
564
|
+
result["search_score"] = 1.0
|
|
565
|
+
all_results.append(result)
|
|
566
|
+
|
|
567
|
+
# 2. Entity-based search for better context matching
|
|
568
|
+
entity_results = self._execute_entity_search(
|
|
569
|
+
cursor, query, namespace, category_filter, limit
|
|
570
|
+
)
|
|
571
|
+
for result in entity_results:
|
|
572
|
+
result["search_strategy"] = "entity_search"
|
|
573
|
+
result["search_score"] = 0.8
|
|
574
|
+
all_results.append(result)
|
|
575
|
+
|
|
576
|
+
# 3. Category-based search if specified
|
|
577
|
+
if category_filter:
|
|
578
|
+
category_results = self._execute_category_search(
|
|
579
|
+
cursor, query, namespace, category_filter, limit
|
|
580
|
+
)
|
|
581
|
+
for result in category_results:
|
|
582
|
+
result["search_strategy"] = "category_search"
|
|
583
|
+
result["search_score"] = 0.6
|
|
584
|
+
all_results.append(result)
|
|
585
|
+
|
|
586
|
+
# 4. Fallback to LIKE search if no other results
|
|
587
|
+
if not all_results:
|
|
588
|
+
like_results = self._execute_like_search(
|
|
589
|
+
cursor, query, namespace, category_filter, limit
|
|
590
|
+
)
|
|
591
|
+
for result in like_results:
|
|
592
|
+
result["search_strategy"] = "like_search"
|
|
593
|
+
result["search_score"] = 0.4
|
|
594
|
+
all_results.append(result)
|
|
595
|
+
|
|
596
|
+
# Remove duplicates while preserving the best search score
|
|
597
|
+
unique_results = {}
|
|
598
|
+
for result in all_results:
|
|
599
|
+
memory_id = result.get("memory_id")
|
|
600
|
+
if (
|
|
601
|
+
memory_id not in unique_results
|
|
602
|
+
or result["search_score"]
|
|
603
|
+
> unique_results[memory_id]["search_score"]
|
|
604
|
+
):
|
|
605
|
+
unique_results[memory_id] = result
|
|
606
|
+
|
|
607
|
+
# Sort by composite score (search_score + importance + recency)
|
|
608
|
+
final_results = list(unique_results.values())
|
|
609
|
+
final_results.sort(
|
|
610
|
+
key=lambda x: (
|
|
611
|
+
x.get("search_score", 0) * 0.4
|
|
612
|
+
+ x.get("importance_score", 0) * 0.4
|
|
613
|
+
+ self._calculate_recency_score(x.get("created_at")) * 0.2
|
|
614
|
+
),
|
|
615
|
+
reverse=True,
|
|
616
|
+
)
|
|
617
|
+
|
|
618
|
+
return final_results[:limit]
|
|
619
|
+
|
|
620
|
+
def _execute_fts_search(
|
|
621
|
+
self,
|
|
622
|
+
cursor,
|
|
623
|
+
query: str,
|
|
624
|
+
namespace: str,
|
|
625
|
+
category_filter: Optional[List[str]],
|
|
626
|
+
limit: int,
|
|
627
|
+
):
|
|
628
|
+
"""Execute FTS5 search"""
|
|
629
|
+
try:
|
|
630
|
+
# Build FTS query with category filter
|
|
631
|
+
fts_query = f'"{query}"' if query else "*"
|
|
632
|
+
category_clause = ""
|
|
633
|
+
params = [fts_query, namespace]
|
|
634
|
+
|
|
635
|
+
if category_filter:
|
|
636
|
+
category_placeholders = ",".join("?" * len(category_filter))
|
|
637
|
+
category_clause = (
|
|
638
|
+
f"AND fts.category_primary IN ({category_placeholders})"
|
|
639
|
+
)
|
|
640
|
+
params.extend(category_filter)
|
|
641
|
+
|
|
642
|
+
params.append(limit)
|
|
643
|
+
|
|
644
|
+
cursor.execute(
|
|
645
|
+
f"""
|
|
646
|
+
SELECT
|
|
647
|
+
fts.memory_id, fts.memory_type, fts.category_primary,
|
|
648
|
+
CASE
|
|
649
|
+
WHEN fts.memory_type = 'short_term' THEN st.processed_data
|
|
650
|
+
WHEN fts.memory_type = 'long_term' THEN lt.processed_data
|
|
651
|
+
WHEN fts.memory_type = 'rules' THEN r.processed_data
|
|
652
|
+
END as processed_data,
|
|
653
|
+
CASE
|
|
654
|
+
WHEN fts.memory_type = 'short_term' THEN st.importance_score
|
|
655
|
+
WHEN fts.memory_type = 'long_term' THEN lt.importance_score
|
|
656
|
+
ELSE 0.5
|
|
657
|
+
END as importance_score,
|
|
658
|
+
CASE
|
|
659
|
+
WHEN fts.memory_type = 'short_term' THEN st.created_at
|
|
660
|
+
WHEN fts.memory_type = 'long_term' THEN lt.created_at
|
|
661
|
+
WHEN fts.memory_type = 'rules' THEN r.created_at
|
|
662
|
+
END as created_at,
|
|
663
|
+
fts.summary,
|
|
664
|
+
rank
|
|
665
|
+
FROM memory_search_fts fts
|
|
666
|
+
LEFT JOIN short_term_memory st ON fts.memory_id = st.memory_id AND fts.memory_type = 'short_term'
|
|
667
|
+
LEFT JOIN long_term_memory lt ON fts.memory_id = lt.memory_id AND fts.memory_type = 'long_term'
|
|
668
|
+
LEFT JOIN rules_memory r ON fts.memory_id = r.rule_id AND fts.memory_type = 'rules'
|
|
669
|
+
WHERE memory_search_fts MATCH ? AND fts.namespace = ? {category_clause}
|
|
670
|
+
ORDER BY rank, importance_score DESC
|
|
671
|
+
LIMIT ?
|
|
672
|
+
""",
|
|
673
|
+
params,
|
|
674
|
+
)
|
|
675
|
+
|
|
676
|
+
return [dict(row) for row in cursor.fetchall()]
|
|
677
|
+
|
|
678
|
+
except sqlite3.OperationalError as e:
|
|
679
|
+
logger.debug(f"FTS not available: {e}")
|
|
680
|
+
return []
|
|
681
|
+
|
|
682
|
+
def _execute_entity_search(
|
|
683
|
+
self,
|
|
684
|
+
cursor,
|
|
685
|
+
query: str,
|
|
686
|
+
namespace: str,
|
|
687
|
+
category_filter: Optional[List[str]],
|
|
688
|
+
limit: int,
|
|
689
|
+
):
|
|
690
|
+
"""Execute entity-based search"""
|
|
691
|
+
category_clause = ""
|
|
692
|
+
params = [f"%{query}%", namespace]
|
|
693
|
+
|
|
694
|
+
if category_filter:
|
|
695
|
+
category_placeholders = ",".join("?" * len(category_filter))
|
|
696
|
+
category_clause = f"AND m.category_primary IN ({category_placeholders})"
|
|
697
|
+
params.extend(category_filter)
|
|
698
|
+
|
|
699
|
+
params.append(limit)
|
|
700
|
+
|
|
701
|
+
cursor.execute(
|
|
702
|
+
f"""
|
|
703
|
+
SELECT DISTINCT m.memory_id, m.processed_data, m.importance_score, m.created_at,
|
|
704
|
+
m.summary, m.category_primary, 'long_term' as memory_type,
|
|
705
|
+
e.entity_type, e.entity_value, e.relevance_score
|
|
706
|
+
FROM long_term_memory m
|
|
707
|
+
JOIN memory_entities e ON m.memory_id = e.memory_id
|
|
708
|
+
WHERE e.entity_value LIKE ? AND m.namespace = ? {category_clause}
|
|
709
|
+
ORDER BY e.relevance_score DESC, m.importance_score DESC
|
|
710
|
+
LIMIT ?
|
|
711
|
+
""",
|
|
712
|
+
params,
|
|
713
|
+
)
|
|
714
|
+
|
|
715
|
+
return [dict(row) for row in cursor.fetchall()]
|
|
716
|
+
|
|
717
|
+
def _execute_category_search(
|
|
718
|
+
self, cursor, query: str, namespace: str, category_filter: List[str], limit: int
|
|
719
|
+
):
|
|
720
|
+
"""Execute category-based search"""
|
|
721
|
+
category_placeholders = ",".join("?" * len(category_filter))
|
|
722
|
+
params = [namespace] + category_filter + [f"%{query}%", f"%{query}%", limit]
|
|
723
|
+
|
|
724
|
+
cursor.execute(
|
|
725
|
+
f"""
|
|
726
|
+
SELECT memory_id, processed_data, importance_score, created_at, summary,
|
|
727
|
+
category_primary, 'long_term' as memory_type
|
|
728
|
+
FROM long_term_memory
|
|
729
|
+
WHERE namespace = ? AND category_primary IN ({category_placeholders})
|
|
730
|
+
AND (searchable_content LIKE ? OR summary LIKE ?)
|
|
731
|
+
ORDER BY importance_score DESC, created_at DESC
|
|
732
|
+
LIMIT ?
|
|
733
|
+
""",
|
|
734
|
+
params,
|
|
735
|
+
)
|
|
736
|
+
|
|
737
|
+
return [dict(row) for row in cursor.fetchall()]
|
|
738
|
+
|
|
739
|
+
def _execute_like_search(
|
|
740
|
+
self,
|
|
741
|
+
cursor,
|
|
742
|
+
query: str,
|
|
743
|
+
namespace: str,
|
|
744
|
+
category_filter: Optional[List[str]],
|
|
745
|
+
limit: int,
|
|
746
|
+
):
|
|
747
|
+
"""Execute fallback LIKE search"""
|
|
748
|
+
results = []
|
|
749
|
+
|
|
750
|
+
# Search short-term memory
|
|
751
|
+
category_clause = ""
|
|
752
|
+
params = [namespace, f"%{query}%", f"%{query}%", datetime.now()]
|
|
753
|
+
|
|
754
|
+
if category_filter:
|
|
755
|
+
category_placeholders = ",".join("?" * len(category_filter))
|
|
756
|
+
category_clause = f"AND category_primary IN ({category_placeholders})"
|
|
757
|
+
params.extend(category_filter)
|
|
758
|
+
|
|
759
|
+
params.append(limit)
|
|
760
|
+
|
|
761
|
+
cursor.execute(
|
|
762
|
+
f"""
|
|
763
|
+
SELECT *, 'short_term' as memory_type FROM short_term_memory
|
|
764
|
+
WHERE namespace = ? AND (searchable_content LIKE ? OR summary LIKE ?)
|
|
765
|
+
AND (expires_at IS NULL OR expires_at > ?) {category_clause}
|
|
766
|
+
ORDER BY importance_score DESC, created_at DESC
|
|
767
|
+
LIMIT ?
|
|
768
|
+
""",
|
|
769
|
+
params,
|
|
770
|
+
)
|
|
771
|
+
results.extend([dict(row) for row in cursor.fetchall()])
|
|
772
|
+
|
|
773
|
+
# Search long-term memory
|
|
774
|
+
params = [namespace, f"%{query}%", f"%{query}%"]
|
|
775
|
+
if category_filter:
|
|
776
|
+
params.extend(category_filter)
|
|
777
|
+
params.append(limit)
|
|
778
|
+
|
|
779
|
+
cursor.execute(
|
|
780
|
+
f"""
|
|
781
|
+
SELECT *, 'long_term' as memory_type FROM long_term_memory
|
|
782
|
+
WHERE namespace = ? AND (searchable_content LIKE ? OR summary LIKE ?) {category_clause}
|
|
783
|
+
ORDER BY importance_score DESC, created_at DESC
|
|
784
|
+
LIMIT ?
|
|
785
|
+
""",
|
|
786
|
+
params,
|
|
787
|
+
)
|
|
788
|
+
results.extend([dict(row) for row in cursor.fetchall()])
|
|
789
|
+
|
|
790
|
+
return results
|
|
791
|
+
|
|
792
|
+
def _calculate_recency_score(self, created_at_str: str) -> float:
|
|
793
|
+
"""Calculate recency score (0-1, newer = higher)"""
|
|
794
|
+
try:
|
|
795
|
+
if not created_at_str:
|
|
796
|
+
return 0.0
|
|
797
|
+
created_at = datetime.fromisoformat(created_at_str).replace(tzinfo=None)
|
|
798
|
+
days_old = (datetime.now() - created_at).days
|
|
799
|
+
# Exponential decay: score decreases as days increase
|
|
800
|
+
return max(0, 1 - (days_old / 30)) # Full score for recent, 0 after 30 days
|
|
801
|
+
except:
|
|
802
|
+
return 0.0
|
|
803
|
+
|
|
804
|
+
def _search_by_entities(
|
|
805
|
+
self, cursor: sqlite3.Cursor, query: str, namespace: str, limit: int
|
|
806
|
+
) -> List[Dict[str, Any]]:
|
|
807
|
+
"""Search memories by entity matching"""
|
|
808
|
+
cursor.execute(
|
|
809
|
+
"""
|
|
810
|
+
SELECT
|
|
811
|
+
e.memory_id, e.memory_type, e.relevance_score,
|
|
812
|
+
CASE
|
|
813
|
+
WHEN e.memory_type = 'short_term' THEN st.processed_data
|
|
814
|
+
WHEN e.memory_type = 'long_term' THEN lt.processed_data
|
|
815
|
+
END as processed_data,
|
|
816
|
+
CASE
|
|
817
|
+
WHEN e.memory_type = 'short_term' THEN st.importance_score
|
|
818
|
+
WHEN e.memory_type = 'long_term' THEN lt.importance_score
|
|
819
|
+
END as importance_score,
|
|
820
|
+
CASE
|
|
821
|
+
WHEN e.memory_type = 'short_term' THEN st.category_primary
|
|
822
|
+
WHEN e.memory_type = 'long_term' THEN lt.category_primary
|
|
823
|
+
END as category_primary,
|
|
824
|
+
CASE
|
|
825
|
+
WHEN e.memory_type = 'short_term' THEN st.created_at
|
|
826
|
+
WHEN e.memory_type = 'long_term' THEN lt.created_at
|
|
827
|
+
END as created_at
|
|
828
|
+
FROM memory_entities e
|
|
829
|
+
LEFT JOIN short_term_memory st ON e.memory_id = st.memory_id AND e.memory_type = 'short_term'
|
|
830
|
+
LEFT JOIN long_term_memory lt ON e.memory_id = lt.memory_id AND e.memory_type = 'long_term'
|
|
831
|
+
WHERE e.namespace = ? AND e.entity_value LIKE ?
|
|
832
|
+
ORDER BY e.relevance_score DESC, importance_score DESC
|
|
833
|
+
LIMIT ?
|
|
834
|
+
""",
|
|
835
|
+
(namespace, f"%{query}%", limit),
|
|
836
|
+
)
|
|
837
|
+
|
|
838
|
+
return [dict(row) for row in cursor.fetchall()]
|
|
839
|
+
|
|
840
|
+
def _determine_storage_location(self, memory: ProcessedMemory) -> str:
|
|
841
|
+
"""Determine where to store the memory based on its properties"""
|
|
842
|
+
if memory.category.primary_category == MemoryCategoryType.rule:
|
|
843
|
+
return "rules_memory"
|
|
844
|
+
elif memory.importance.retention_type in [
|
|
845
|
+
RetentionType.long_term,
|
|
846
|
+
RetentionType.permanent,
|
|
847
|
+
]:
|
|
848
|
+
return "long_term_memory"
|
|
849
|
+
else:
|
|
850
|
+
return "short_term_memory"
|
|
851
|
+
|
|
852
|
+
def get_memory_stats(self, namespace: str = "default") -> Dict[str, Any]:
|
|
853
|
+
"""Get comprehensive memory statistics"""
|
|
854
|
+
with self._get_connection() as conn:
|
|
855
|
+
cursor = conn.cursor()
|
|
856
|
+
|
|
857
|
+
stats = {}
|
|
858
|
+
|
|
859
|
+
# Basic counts
|
|
860
|
+
cursor.execute(
|
|
861
|
+
"SELECT COUNT(*) FROM chat_history WHERE namespace = ?", (namespace,)
|
|
862
|
+
)
|
|
863
|
+
stats["chat_history_count"] = cursor.fetchone()[0]
|
|
864
|
+
|
|
865
|
+
cursor.execute(
|
|
866
|
+
"SELECT COUNT(*) FROM short_term_memory WHERE namespace = ?",
|
|
867
|
+
(namespace,),
|
|
868
|
+
)
|
|
869
|
+
stats["short_term_count"] = cursor.fetchone()[0]
|
|
870
|
+
|
|
871
|
+
cursor.execute(
|
|
872
|
+
"SELECT COUNT(*) FROM long_term_memory WHERE namespace = ?",
|
|
873
|
+
(namespace,),
|
|
874
|
+
)
|
|
875
|
+
stats["long_term_count"] = cursor.fetchone()[0]
|
|
876
|
+
|
|
877
|
+
cursor.execute(
|
|
878
|
+
"SELECT COUNT(*) FROM rules_memory WHERE namespace = ?", (namespace,)
|
|
879
|
+
)
|
|
880
|
+
stats["rules_count"] = cursor.fetchone()[0]
|
|
881
|
+
|
|
882
|
+
cursor.execute(
|
|
883
|
+
"SELECT COUNT(*) FROM memory_entities WHERE namespace = ?", (namespace,)
|
|
884
|
+
)
|
|
885
|
+
stats["total_entities"] = cursor.fetchone()[0]
|
|
886
|
+
|
|
887
|
+
# Category breakdown
|
|
888
|
+
cursor.execute(
|
|
889
|
+
"""
|
|
890
|
+
SELECT category_primary, COUNT(*) as count
|
|
891
|
+
FROM (
|
|
892
|
+
SELECT category_primary FROM short_term_memory WHERE namespace = ?
|
|
893
|
+
UNION ALL
|
|
894
|
+
SELECT category_primary FROM long_term_memory WHERE namespace = ?
|
|
895
|
+
)
|
|
896
|
+
GROUP BY category_primary
|
|
897
|
+
""",
|
|
898
|
+
(namespace, namespace),
|
|
899
|
+
)
|
|
900
|
+
|
|
901
|
+
stats["memories_by_category"] = {
|
|
902
|
+
row[0]: row[1] for row in cursor.fetchall()
|
|
903
|
+
}
|
|
904
|
+
|
|
905
|
+
# Average importance
|
|
906
|
+
cursor.execute(
|
|
907
|
+
"""
|
|
908
|
+
SELECT AVG(importance_score) FROM (
|
|
909
|
+
SELECT importance_score FROM short_term_memory WHERE namespace = ?
|
|
910
|
+
UNION ALL
|
|
911
|
+
SELECT importance_score FROM long_term_memory WHERE namespace = ?
|
|
912
|
+
)
|
|
913
|
+
""",
|
|
914
|
+
(namespace, namespace),
|
|
915
|
+
)
|
|
916
|
+
|
|
917
|
+
avg_importance = cursor.fetchone()[0]
|
|
918
|
+
stats["average_importance"] = avg_importance if avg_importance else 0.0
|
|
919
|
+
|
|
920
|
+
return stats
|
|
921
|
+
|
|
922
|
+
def clear_memory(
|
|
923
|
+
self, namespace: str = "default", memory_type: Optional[str] = None
|
|
924
|
+
):
|
|
925
|
+
"""Clear memory data with entity cleanup"""
|
|
926
|
+
with self._get_connection() as conn:
|
|
927
|
+
cursor = conn.cursor()
|
|
928
|
+
|
|
929
|
+
if memory_type == "short_term":
|
|
930
|
+
cursor.execute(
|
|
931
|
+
"DELETE FROM memory_entities WHERE namespace = ? AND memory_type = 'short_term'",
|
|
932
|
+
(namespace,),
|
|
933
|
+
)
|
|
934
|
+
cursor.execute(
|
|
935
|
+
"DELETE FROM short_term_memory WHERE namespace = ?", (namespace,)
|
|
936
|
+
)
|
|
937
|
+
elif memory_type == "long_term":
|
|
938
|
+
cursor.execute(
|
|
939
|
+
"DELETE FROM memory_entities WHERE namespace = ? AND memory_type = 'long_term'",
|
|
940
|
+
(namespace,),
|
|
941
|
+
)
|
|
942
|
+
cursor.execute(
|
|
943
|
+
"DELETE FROM long_term_memory WHERE namespace = ?", (namespace,)
|
|
944
|
+
)
|
|
945
|
+
elif memory_type == "chat_history":
|
|
946
|
+
cursor.execute(
|
|
947
|
+
"DELETE FROM chat_history WHERE namespace = ?", (namespace,)
|
|
948
|
+
)
|
|
949
|
+
else: # Clear all
|
|
950
|
+
cursor.execute(
|
|
951
|
+
"DELETE FROM memory_entities WHERE namespace = ?", (namespace,)
|
|
952
|
+
)
|
|
953
|
+
cursor.execute(
|
|
954
|
+
"DELETE FROM short_term_memory WHERE namespace = ?", (namespace,)
|
|
955
|
+
)
|
|
956
|
+
cursor.execute(
|
|
957
|
+
"DELETE FROM long_term_memory WHERE namespace = ?", (namespace,)
|
|
958
|
+
)
|
|
959
|
+
cursor.execute(
|
|
960
|
+
"DELETE FROM rules_memory WHERE namespace = ?", (namespace,)
|
|
961
|
+
)
|
|
962
|
+
cursor.execute(
|
|
963
|
+
"DELETE FROM chat_history WHERE namespace = ?", (namespace,)
|
|
964
|
+
)
|
|
965
|
+
|
|
966
|
+
conn.commit()
|