mem-llm 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mem_llm/__init__.py +98 -0
- mem_llm/api_server.py +595 -0
- mem_llm/base_llm_client.py +201 -0
- mem_llm/builtin_tools.py +311 -0
- mem_llm/cli.py +254 -0
- mem_llm/clients/__init__.py +22 -0
- mem_llm/clients/lmstudio_client.py +393 -0
- mem_llm/clients/ollama_client.py +354 -0
- mem_llm/config.yaml.example +52 -0
- mem_llm/config_from_docs.py +180 -0
- mem_llm/config_manager.py +231 -0
- mem_llm/conversation_summarizer.py +372 -0
- mem_llm/data_export_import.py +640 -0
- mem_llm/dynamic_prompt.py +298 -0
- mem_llm/knowledge_loader.py +88 -0
- mem_llm/llm_client.py +225 -0
- mem_llm/llm_client_factory.py +260 -0
- mem_llm/logger.py +129 -0
- mem_llm/mem_agent.py +1611 -0
- mem_llm/memory_db.py +612 -0
- mem_llm/memory_manager.py +321 -0
- mem_llm/memory_tools.py +253 -0
- mem_llm/prompt_security.py +304 -0
- mem_llm/response_metrics.py +221 -0
- mem_llm/retry_handler.py +193 -0
- mem_llm/thread_safe_db.py +301 -0
- mem_llm/tool_system.py +429 -0
- mem_llm/vector_store.py +278 -0
- mem_llm/web_launcher.py +129 -0
- mem_llm/web_ui/README.md +44 -0
- mem_llm/web_ui/__init__.py +7 -0
- mem_llm/web_ui/index.html +641 -0
- mem_llm/web_ui/memory.html +569 -0
- mem_llm/web_ui/metrics.html +75 -0
- mem_llm-2.0.0.dist-info/METADATA +667 -0
- mem_llm-2.0.0.dist-info/RECORD +39 -0
- mem_llm-2.0.0.dist-info/WHEEL +5 -0
- mem_llm-2.0.0.dist-info/entry_points.txt +3 -0
- mem_llm-2.0.0.dist-info/top_level.txt +1 -0
mem_llm/memory_db.py
ADDED
|
@@ -0,0 +1,612 @@
|
|
|
1
|
+
"""
|
|
2
|
+
SQL Database Memory Management
|
|
3
|
+
Stores memory data using SQLite - Production-ready
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import sqlite3
|
|
7
|
+
import json
|
|
8
|
+
import threading
|
|
9
|
+
from datetime import datetime
|
|
10
|
+
from typing import Dict, List, Optional, Tuple
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
import logging
|
|
13
|
+
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
# Optional vector store support
|
|
17
|
+
try:
|
|
18
|
+
from .vector_store import create_vector_store, VectorStore
|
|
19
|
+
VECTOR_STORE_AVAILABLE = True
|
|
20
|
+
except ImportError:
|
|
21
|
+
VECTOR_STORE_AVAILABLE = False
|
|
22
|
+
VectorStore = None
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class SQLMemoryManager:
|
|
26
|
+
"""SQLite-based memory management system with thread-safety"""
|
|
27
|
+
|
|
28
|
+
def __init__(self, db_path: str = "memories/memories.db",
|
|
29
|
+
enable_vector_search: bool = False,
|
|
30
|
+
vector_store_type: str = "chroma",
|
|
31
|
+
embedding_model: str = "all-MiniLM-L6-v2"):
|
|
32
|
+
"""
|
|
33
|
+
Args:
|
|
34
|
+
db_path: SQLite database file path
|
|
35
|
+
enable_vector_search: Enable vector/semantic search (optional)
|
|
36
|
+
vector_store_type: Type of vector store ('chroma', etc.)
|
|
37
|
+
embedding_model: Embedding model name (sentence-transformers)
|
|
38
|
+
"""
|
|
39
|
+
self.db_path = Path(db_path)
|
|
40
|
+
|
|
41
|
+
# Ensure directory exists
|
|
42
|
+
db_dir = self.db_path.parent
|
|
43
|
+
if not db_dir.exists():
|
|
44
|
+
db_dir.mkdir(parents=True, exist_ok=True)
|
|
45
|
+
|
|
46
|
+
self.conn = None
|
|
47
|
+
self._lock = threading.RLock() # Reentrant lock for thread safety
|
|
48
|
+
self._init_database()
|
|
49
|
+
|
|
50
|
+
# Vector store (optional)
|
|
51
|
+
self.enable_vector_search = enable_vector_search
|
|
52
|
+
self.vector_store: Optional[VectorStore] = None
|
|
53
|
+
|
|
54
|
+
if enable_vector_search:
|
|
55
|
+
if not VECTOR_STORE_AVAILABLE:
|
|
56
|
+
logger.warning(
|
|
57
|
+
"Vector search requested but dependencies not available. "
|
|
58
|
+
"Install with: pip install chromadb sentence-transformers"
|
|
59
|
+
)
|
|
60
|
+
self.enable_vector_search = False
|
|
61
|
+
else:
|
|
62
|
+
try:
|
|
63
|
+
persist_dir = str(db_dir / "vector_store")
|
|
64
|
+
self.vector_store = create_vector_store(
|
|
65
|
+
store_type=vector_store_type,
|
|
66
|
+
collection_name="knowledge_base",
|
|
67
|
+
persist_directory=persist_dir,
|
|
68
|
+
embedding_model=embedding_model
|
|
69
|
+
)
|
|
70
|
+
if self.vector_store:
|
|
71
|
+
logger.info(f"Vector search enabled: {vector_store_type}")
|
|
72
|
+
else:
|
|
73
|
+
logger.warning("Failed to initialize vector store, falling back to keyword search")
|
|
74
|
+
self.enable_vector_search = False
|
|
75
|
+
except Exception as e:
|
|
76
|
+
logger.error(f"Error initializing vector store: {e}")
|
|
77
|
+
self.enable_vector_search = False
|
|
78
|
+
|
|
79
|
+
def _init_database(self) -> None:
|
|
80
|
+
"""Create database and tables"""
|
|
81
|
+
self.conn = sqlite3.connect(
|
|
82
|
+
str(self.db_path),
|
|
83
|
+
check_same_thread=False,
|
|
84
|
+
timeout=30.0, # 30 second timeout for busy database
|
|
85
|
+
isolation_level=None # Autocommit mode
|
|
86
|
+
)
|
|
87
|
+
self.conn.row_factory = sqlite3.Row
|
|
88
|
+
|
|
89
|
+
# Enable WAL mode for better concurrency
|
|
90
|
+
self.conn.execute("PRAGMA journal_mode=WAL")
|
|
91
|
+
self.conn.execute("PRAGMA synchronous=NORMAL")
|
|
92
|
+
self.conn.execute("PRAGMA cache_size=-64000") # 64MB cache
|
|
93
|
+
self.conn.execute("PRAGMA busy_timeout=30000") # 30 second busy timeout
|
|
94
|
+
|
|
95
|
+
cursor = self.conn.cursor()
|
|
96
|
+
|
|
97
|
+
# User profiles table
|
|
98
|
+
cursor.execute("""
|
|
99
|
+
CREATE TABLE IF NOT EXISTS users (
|
|
100
|
+
user_id TEXT PRIMARY KEY,
|
|
101
|
+
name TEXT,
|
|
102
|
+
first_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
103
|
+
last_interaction TIMESTAMP,
|
|
104
|
+
preferences TEXT,
|
|
105
|
+
summary TEXT,
|
|
106
|
+
metadata TEXT
|
|
107
|
+
)
|
|
108
|
+
""")
|
|
109
|
+
|
|
110
|
+
# Conversations table
|
|
111
|
+
cursor.execute("""
|
|
112
|
+
CREATE TABLE IF NOT EXISTS conversations (
|
|
113
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
114
|
+
user_id TEXT NOT NULL,
|
|
115
|
+
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
116
|
+
user_message TEXT NOT NULL,
|
|
117
|
+
bot_response TEXT NOT NULL,
|
|
118
|
+
metadata TEXT,
|
|
119
|
+
sentiment TEXT,
|
|
120
|
+
resolved BOOLEAN DEFAULT 0,
|
|
121
|
+
FOREIGN KEY (user_id) REFERENCES users(user_id)
|
|
122
|
+
)
|
|
123
|
+
""")
|
|
124
|
+
|
|
125
|
+
# İndeksler - Performans için
|
|
126
|
+
cursor.execute("""
|
|
127
|
+
CREATE INDEX IF NOT EXISTS idx_user_timestamp
|
|
128
|
+
ON conversations(user_id, timestamp DESC)
|
|
129
|
+
""")
|
|
130
|
+
|
|
131
|
+
cursor.execute("""
|
|
132
|
+
CREATE INDEX IF NOT EXISTS idx_resolved
|
|
133
|
+
ON conversations(user_id, resolved)
|
|
134
|
+
""")
|
|
135
|
+
|
|
136
|
+
# Senaryo şablonları tablosu
|
|
137
|
+
cursor.execute("""
|
|
138
|
+
CREATE TABLE IF NOT EXISTS scenario_templates (
|
|
139
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
140
|
+
name TEXT NOT NULL UNIQUE,
|
|
141
|
+
description TEXT,
|
|
142
|
+
system_prompt TEXT NOT NULL,
|
|
143
|
+
example_interactions TEXT,
|
|
144
|
+
metadata TEXT,
|
|
145
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
146
|
+
)
|
|
147
|
+
""")
|
|
148
|
+
|
|
149
|
+
# Problem/FAQ veritabanı
|
|
150
|
+
cursor.execute("""
|
|
151
|
+
CREATE TABLE IF NOT EXISTS knowledge_base (
|
|
152
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
153
|
+
category TEXT NOT NULL,
|
|
154
|
+
question TEXT NOT NULL,
|
|
155
|
+
answer TEXT NOT NULL,
|
|
156
|
+
keywords TEXT,
|
|
157
|
+
priority INTEGER DEFAULT 0,
|
|
158
|
+
active BOOLEAN DEFAULT 1,
|
|
159
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
160
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
161
|
+
)
|
|
162
|
+
""")
|
|
163
|
+
|
|
164
|
+
cursor.execute("""
|
|
165
|
+
CREATE INDEX IF NOT EXISTS idx_category
|
|
166
|
+
ON knowledge_base(category, active)
|
|
167
|
+
""")
|
|
168
|
+
|
|
169
|
+
self.conn.commit()
|
|
170
|
+
|
|
171
|
+
def add_user(self, user_id: str, name: Optional[str] = None,
|
|
172
|
+
metadata: Optional[Dict] = None) -> None:
|
|
173
|
+
"""
|
|
174
|
+
Add new user or update existing (thread-safe)
|
|
175
|
+
|
|
176
|
+
Args:
|
|
177
|
+
user_id: User ID
|
|
178
|
+
name: User name
|
|
179
|
+
metadata: Additional information
|
|
180
|
+
"""
|
|
181
|
+
with self._lock:
|
|
182
|
+
cursor = self.conn.cursor()
|
|
183
|
+
cursor.execute("""
|
|
184
|
+
INSERT INTO users (user_id, name, metadata)
|
|
185
|
+
VALUES (?, ?, ?)
|
|
186
|
+
ON CONFLICT(user_id) DO UPDATE SET
|
|
187
|
+
name = COALESCE(excluded.name, users.name),
|
|
188
|
+
metadata = COALESCE(excluded.metadata, users.metadata)
|
|
189
|
+
""", (user_id, name, json.dumps(metadata or {})))
|
|
190
|
+
|
|
191
|
+
def add_interaction(self, user_id: str, user_message: str,
|
|
192
|
+
bot_response: str, metadata: Optional[Dict] = None,
|
|
193
|
+
resolved: bool = False) -> int:
|
|
194
|
+
"""
|
|
195
|
+
Record new interaction (thread-safe)
|
|
196
|
+
|
|
197
|
+
Args:
|
|
198
|
+
user_id: User ID
|
|
199
|
+
user_message: User's message
|
|
200
|
+
bot_response: Bot's response
|
|
201
|
+
metadata: Additional information
|
|
202
|
+
resolved: Is issue resolved?
|
|
203
|
+
|
|
204
|
+
Returns:
|
|
205
|
+
Added record ID
|
|
206
|
+
"""
|
|
207
|
+
if not user_message or not bot_response:
|
|
208
|
+
raise ValueError("user_message and bot_response cannot be None or empty")
|
|
209
|
+
|
|
210
|
+
with self._lock:
|
|
211
|
+
cursor = self.conn.cursor()
|
|
212
|
+
|
|
213
|
+
# Create user if not exists
|
|
214
|
+
self.add_user(user_id)
|
|
215
|
+
|
|
216
|
+
# Record interaction
|
|
217
|
+
cursor.execute("""
|
|
218
|
+
INSERT INTO conversations
|
|
219
|
+
(user_id, user_message, bot_response, metadata, resolved)
|
|
220
|
+
VALUES (?, ?, ?, ?, ?)
|
|
221
|
+
""", (user_id, user_message, bot_response,
|
|
222
|
+
json.dumps(metadata or {}), resolved))
|
|
223
|
+
|
|
224
|
+
interaction_id = cursor.lastrowid
|
|
225
|
+
|
|
226
|
+
# Update user's last interaction time
|
|
227
|
+
cursor.execute("""
|
|
228
|
+
UPDATE users
|
|
229
|
+
SET last_interaction = CURRENT_TIMESTAMP
|
|
230
|
+
WHERE user_id = ?
|
|
231
|
+
""", (user_id,))
|
|
232
|
+
|
|
233
|
+
return interaction_id
|
|
234
|
+
|
|
235
|
+
# Alias for compatibility
|
|
236
|
+
def add_conversation(self, user_id: str, user_message: str, bot_response: str, metadata: Optional[Dict] = None) -> int:
|
|
237
|
+
"""Alias for add_interaction"""
|
|
238
|
+
return self.add_interaction(user_id, user_message, bot_response, metadata)
|
|
239
|
+
|
|
240
|
+
def get_recent_conversations(self, user_id: str, limit: int = 10) -> List[Dict]:
|
|
241
|
+
"""
|
|
242
|
+
Kullanıcının son konuşmalarını getirir (thread-safe)
|
|
243
|
+
|
|
244
|
+
Args:
|
|
245
|
+
user_id: Kullanıcı kimliği
|
|
246
|
+
limit: Getirilecek konuşma sayısı
|
|
247
|
+
|
|
248
|
+
Returns:
|
|
249
|
+
Konuşmalar listesi
|
|
250
|
+
"""
|
|
251
|
+
with self._lock:
|
|
252
|
+
cursor = self.conn.cursor()
|
|
253
|
+
cursor.execute("""
|
|
254
|
+
SELECT timestamp, user_message, bot_response, metadata, resolved
|
|
255
|
+
FROM conversations
|
|
256
|
+
WHERE user_id = ?
|
|
257
|
+
ORDER BY timestamp DESC
|
|
258
|
+
LIMIT ?
|
|
259
|
+
""", (user_id, limit))
|
|
260
|
+
|
|
261
|
+
rows = cursor.fetchall()
|
|
262
|
+
return [dict(row) for row in rows]
|
|
263
|
+
|
|
264
|
+
def search_conversations(self, user_id: str, keyword: str) -> List[Dict]:
|
|
265
|
+
"""
|
|
266
|
+
Konuşmalarda anahtar kelime arar (thread-safe)
|
|
267
|
+
|
|
268
|
+
Args:
|
|
269
|
+
user_id: Kullanıcı kimliği
|
|
270
|
+
keyword: Aranacak kelime
|
|
271
|
+
|
|
272
|
+
Returns:
|
|
273
|
+
Eşleşen konuşmalar
|
|
274
|
+
"""
|
|
275
|
+
cursor = self.conn.cursor()
|
|
276
|
+
cursor.execute("""
|
|
277
|
+
SELECT timestamp, user_message, bot_response, metadata, resolved
|
|
278
|
+
FROM conversations
|
|
279
|
+
WHERE user_id = ?
|
|
280
|
+
AND (user_message LIKE ? OR bot_response LIKE ? OR metadata LIKE ?)
|
|
281
|
+
ORDER BY timestamp DESC
|
|
282
|
+
""", (user_id, f"%{keyword}%", f"%{keyword}%", f"%{keyword}%"))
|
|
283
|
+
|
|
284
|
+
rows = cursor.fetchall()
|
|
285
|
+
return [dict(row) for row in rows]
|
|
286
|
+
|
|
287
|
+
def get_user_profile(self, user_id: str) -> Optional[Dict]:
|
|
288
|
+
"""
|
|
289
|
+
Kullanıcı profilini getirir
|
|
290
|
+
|
|
291
|
+
Args:
|
|
292
|
+
user_id: Kullanıcı kimliği
|
|
293
|
+
|
|
294
|
+
Returns:
|
|
295
|
+
Kullanıcı profili veya None
|
|
296
|
+
"""
|
|
297
|
+
cursor = self.conn.cursor()
|
|
298
|
+
cursor.execute("""
|
|
299
|
+
SELECT * FROM users WHERE user_id = ?
|
|
300
|
+
""", (user_id,))
|
|
301
|
+
|
|
302
|
+
row = cursor.fetchone()
|
|
303
|
+
if row:
|
|
304
|
+
return dict(row)
|
|
305
|
+
return None
|
|
306
|
+
|
|
307
|
+
def update_user_profile(self, user_id: str, updates: Dict) -> None:
|
|
308
|
+
"""
|
|
309
|
+
Kullanıcı profilini günceller
|
|
310
|
+
|
|
311
|
+
Args:
|
|
312
|
+
user_id: Kullanıcı kimliği
|
|
313
|
+
updates: Güncellenecek alanlar
|
|
314
|
+
"""
|
|
315
|
+
allowed_fields = ['name', 'preferences', 'summary', 'metadata']
|
|
316
|
+
set_clause = []
|
|
317
|
+
values = []
|
|
318
|
+
|
|
319
|
+
for field, value in updates.items():
|
|
320
|
+
if field in allowed_fields:
|
|
321
|
+
set_clause.append(f"{field} = ?")
|
|
322
|
+
if isinstance(value, (dict, list)):
|
|
323
|
+
values.append(json.dumps(value))
|
|
324
|
+
else:
|
|
325
|
+
values.append(value)
|
|
326
|
+
|
|
327
|
+
if set_clause:
|
|
328
|
+
values.append(user_id)
|
|
329
|
+
cursor = self.conn.cursor()
|
|
330
|
+
cursor.execute(f"""
|
|
331
|
+
UPDATE users
|
|
332
|
+
SET {', '.join(set_clause)}
|
|
333
|
+
WHERE user_id = ?
|
|
334
|
+
""", values)
|
|
335
|
+
self.conn.commit()
|
|
336
|
+
|
|
337
|
+
def add_knowledge(self, category: str, question: str, answer: str,
|
|
338
|
+
keywords: Optional[List[str]] = None,
|
|
339
|
+
priority: int = 0) -> int:
|
|
340
|
+
"""
|
|
341
|
+
Bilgi bankasına yeni kayıt ekler
|
|
342
|
+
|
|
343
|
+
Args:
|
|
344
|
+
category: Kategori (örn: "kargo", "iade", "ödeme")
|
|
345
|
+
question: Soru
|
|
346
|
+
answer: Cevap
|
|
347
|
+
keywords: Anahtar kelimeler
|
|
348
|
+
priority: Öncelik (yüksek = önce gösterilir)
|
|
349
|
+
|
|
350
|
+
Returns:
|
|
351
|
+
Kayıt ID'si
|
|
352
|
+
"""
|
|
353
|
+
cursor = self.conn.cursor()
|
|
354
|
+
cursor.execute("""
|
|
355
|
+
INSERT INTO knowledge_base
|
|
356
|
+
(category, question, answer, keywords, priority)
|
|
357
|
+
VALUES (?, ?, ?, ?, ?)
|
|
358
|
+
""", (category, question, answer,
|
|
359
|
+
json.dumps(keywords or []), priority))
|
|
360
|
+
|
|
361
|
+
kb_id = cursor.lastrowid
|
|
362
|
+
self.conn.commit()
|
|
363
|
+
|
|
364
|
+
# Sync to vector store if enabled
|
|
365
|
+
if self.enable_vector_search and self.vector_store:
|
|
366
|
+
try:
|
|
367
|
+
self._sync_to_vector_store(kb_id)
|
|
368
|
+
except Exception as e:
|
|
369
|
+
logger.warning(f"Failed to sync KB entry to vector store: {e}")
|
|
370
|
+
|
|
371
|
+
return kb_id
|
|
372
|
+
|
|
373
|
+
def search_knowledge(self, query: str, category: Optional[str] = None,
|
|
374
|
+
limit: int = 5, use_vector_search: Optional[bool] = None) -> List[Dict]:
|
|
375
|
+
"""
|
|
376
|
+
Bilgi bankasında arama yapar (keyword matching veya semantic search)
|
|
377
|
+
|
|
378
|
+
Args:
|
|
379
|
+
query: Arama sorgusu
|
|
380
|
+
category: Kategori filtresi (opsiyonel)
|
|
381
|
+
limit: Maksimum sonuç sayısı
|
|
382
|
+
use_vector_search: Force vector search (None = auto-detect)
|
|
383
|
+
|
|
384
|
+
Returns:
|
|
385
|
+
Bulunan kayıtlar
|
|
386
|
+
"""
|
|
387
|
+
# Use vector search if enabled and available
|
|
388
|
+
if use_vector_search is None:
|
|
389
|
+
use_vector_search = self.enable_vector_search
|
|
390
|
+
|
|
391
|
+
if use_vector_search and self.vector_store:
|
|
392
|
+
return self._vector_search(query, category, limit)
|
|
393
|
+
else:
|
|
394
|
+
return self._keyword_search(query, category, limit)
|
|
395
|
+
|
|
396
|
+
def _keyword_search(self, query: str, category: Optional[str] = None,
|
|
397
|
+
limit: int = 5) -> List[Dict]:
|
|
398
|
+
"""Traditional keyword-based search"""
|
|
399
|
+
cursor = self.conn.cursor()
|
|
400
|
+
|
|
401
|
+
# Extract important keywords from query (remove question words)
|
|
402
|
+
import re
|
|
403
|
+
stopwords = ['ne', 'kadar', 'nedir', 'nasıl', 'için', 'mı', 'mi', 'mu', 'mü',
|
|
404
|
+
'what', 'how', 'when', 'where', 'is', 'are', 'the', 'a', 'an']
|
|
405
|
+
|
|
406
|
+
# Clean query and extract keywords
|
|
407
|
+
query_lower = query.lower()
|
|
408
|
+
words = re.findall(r'\w+', query_lower)
|
|
409
|
+
keywords = [w for w in words if w not in stopwords and len(w) > 2]
|
|
410
|
+
|
|
411
|
+
# If no keywords, use original query
|
|
412
|
+
if not keywords:
|
|
413
|
+
keywords = [query_lower]
|
|
414
|
+
|
|
415
|
+
# Build search conditions for each keyword
|
|
416
|
+
conditions = []
|
|
417
|
+
params = []
|
|
418
|
+
|
|
419
|
+
for keyword in keywords[:5]: # Max 5 keywords
|
|
420
|
+
conditions.append("(question LIKE ? OR answer LIKE ? OR keywords LIKE ?)")
|
|
421
|
+
params.extend([f"%{keyword}%", f"%{keyword}%", f"%{keyword}%"])
|
|
422
|
+
|
|
423
|
+
where_clause = " OR ".join(conditions) if conditions else "1=1"
|
|
424
|
+
|
|
425
|
+
if category:
|
|
426
|
+
sql = f"""
|
|
427
|
+
SELECT category, question, answer, priority
|
|
428
|
+
FROM knowledge_base
|
|
429
|
+
WHERE active = 1
|
|
430
|
+
AND category = ?
|
|
431
|
+
AND ({where_clause})
|
|
432
|
+
ORDER BY priority DESC, id DESC
|
|
433
|
+
LIMIT ?
|
|
434
|
+
"""
|
|
435
|
+
cursor.execute(sql, [category] + params + [limit])
|
|
436
|
+
else:
|
|
437
|
+
sql = f"""
|
|
438
|
+
SELECT category, question, answer, priority
|
|
439
|
+
FROM knowledge_base
|
|
440
|
+
WHERE active = 1
|
|
441
|
+
AND ({where_clause})
|
|
442
|
+
ORDER BY priority DESC, id DESC
|
|
443
|
+
LIMIT ?
|
|
444
|
+
"""
|
|
445
|
+
cursor.execute(sql, params + [limit])
|
|
446
|
+
|
|
447
|
+
return [dict(row) for row in cursor.fetchall()]
|
|
448
|
+
|
|
449
|
+
def _vector_search(self, query: str, category: Optional[str] = None,
|
|
450
|
+
limit: int = 5) -> List[Dict]:
|
|
451
|
+
"""Vector-based semantic search"""
|
|
452
|
+
if not self.vector_store:
|
|
453
|
+
return []
|
|
454
|
+
|
|
455
|
+
# Prepare metadata filter
|
|
456
|
+
filter_metadata = None
|
|
457
|
+
if category:
|
|
458
|
+
filter_metadata = {"category": category}
|
|
459
|
+
|
|
460
|
+
# Search in vector store
|
|
461
|
+
vector_results = self.vector_store.search(
|
|
462
|
+
query=query,
|
|
463
|
+
limit=limit * 2, # Get more results to filter by category if needed
|
|
464
|
+
filter_metadata=filter_metadata
|
|
465
|
+
)
|
|
466
|
+
|
|
467
|
+
# Map vector results back to KB format
|
|
468
|
+
results = []
|
|
469
|
+
for result in vector_results[:limit]:
|
|
470
|
+
# Extract metadata
|
|
471
|
+
metadata = result.get('metadata', {})
|
|
472
|
+
|
|
473
|
+
results.append({
|
|
474
|
+
'category': metadata.get('category', ''),
|
|
475
|
+
'question': metadata.get('question', ''),
|
|
476
|
+
'answer': result.get('text', ''),
|
|
477
|
+
'priority': metadata.get('priority', 0),
|
|
478
|
+
'score': result.get('score', 0.0), # Similarity score
|
|
479
|
+
'vector_search': True
|
|
480
|
+
})
|
|
481
|
+
|
|
482
|
+
return results
|
|
483
|
+
|
|
484
|
+
def _sync_to_vector_store(self, kb_id: int) -> None:
|
|
485
|
+
"""Sync a single KB entry to vector store"""
|
|
486
|
+
if not self.vector_store:
|
|
487
|
+
return
|
|
488
|
+
|
|
489
|
+
cursor = self.conn.cursor()
|
|
490
|
+
cursor.execute("""
|
|
491
|
+
SELECT id, category, question, answer, keywords, priority
|
|
492
|
+
FROM knowledge_base
|
|
493
|
+
WHERE id = ?
|
|
494
|
+
""", (kb_id,))
|
|
495
|
+
|
|
496
|
+
row = cursor.fetchone()
|
|
497
|
+
if row:
|
|
498
|
+
doc = {
|
|
499
|
+
'id': str(row['id']),
|
|
500
|
+
'text': f"{row['question']}\n{row['answer']}", # Combine for better search
|
|
501
|
+
'metadata': {
|
|
502
|
+
'category': row['category'],
|
|
503
|
+
'question': row['question'],
|
|
504
|
+
'answer': row['answer'],
|
|
505
|
+
'keywords': row['keywords'],
|
|
506
|
+
'priority': row['priority'],
|
|
507
|
+
'kb_id': row['id']
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
self.vector_store.add_documents([doc])
|
|
511
|
+
|
|
512
|
+
def sync_all_kb_to_vector_store(self) -> int:
|
|
513
|
+
"""
|
|
514
|
+
Sync all existing KB entries to vector store
|
|
515
|
+
|
|
516
|
+
Returns:
|
|
517
|
+
Number of entries synced
|
|
518
|
+
"""
|
|
519
|
+
if not self.vector_store:
|
|
520
|
+
return 0
|
|
521
|
+
|
|
522
|
+
cursor = self.conn.cursor()
|
|
523
|
+
cursor.execute("""
|
|
524
|
+
SELECT id, category, question, answer, keywords, priority
|
|
525
|
+
FROM knowledge_base
|
|
526
|
+
WHERE active = 1
|
|
527
|
+
""")
|
|
528
|
+
|
|
529
|
+
rows = cursor.fetchall()
|
|
530
|
+
documents = []
|
|
531
|
+
|
|
532
|
+
for row in rows:
|
|
533
|
+
doc = {
|
|
534
|
+
'id': str(row['id']),
|
|
535
|
+
'text': f"{row['question']}\n{row['answer']}",
|
|
536
|
+
'metadata': {
|
|
537
|
+
'category': row['category'],
|
|
538
|
+
'question': row['question'],
|
|
539
|
+
'answer': row['answer'],
|
|
540
|
+
'keywords': row['keywords'],
|
|
541
|
+
'priority': row['priority'],
|
|
542
|
+
'kb_id': row['id']
|
|
543
|
+
}
|
|
544
|
+
}
|
|
545
|
+
documents.append(doc)
|
|
546
|
+
|
|
547
|
+
if documents:
|
|
548
|
+
try:
|
|
549
|
+
# Add in batches for better performance
|
|
550
|
+
batch_size = 100
|
|
551
|
+
for i in range(0, len(documents), batch_size):
|
|
552
|
+
batch = documents[i:i + batch_size]
|
|
553
|
+
self.vector_store.add_documents(batch)
|
|
554
|
+
logger.debug(f"Synced {len(batch)} KB entries to vector store")
|
|
555
|
+
|
|
556
|
+
logger.info(f"Synced {len(documents)} KB entries to vector store")
|
|
557
|
+
except Exception as e:
|
|
558
|
+
logger.error(f"Error syncing KB to vector store: {e}")
|
|
559
|
+
return 0
|
|
560
|
+
|
|
561
|
+
return len(documents)
|
|
562
|
+
|
|
563
|
+
def get_statistics(self) -> Dict:
|
|
564
|
+
"""
|
|
565
|
+
Genel istatistikleri döndürür
|
|
566
|
+
|
|
567
|
+
Returns:
|
|
568
|
+
İstatistik bilgileri
|
|
569
|
+
"""
|
|
570
|
+
cursor = self.conn.cursor()
|
|
571
|
+
|
|
572
|
+
# Toplam kullanıcı
|
|
573
|
+
cursor.execute("SELECT COUNT(*) as count FROM users")
|
|
574
|
+
total_users = cursor.fetchone()['count']
|
|
575
|
+
|
|
576
|
+
# Toplam etkileşim
|
|
577
|
+
cursor.execute("SELECT COUNT(*) as count FROM conversations")
|
|
578
|
+
total_interactions = cursor.fetchone()['count']
|
|
579
|
+
|
|
580
|
+
# Çözülmemiş sorunlar
|
|
581
|
+
cursor.execute("SELECT COUNT(*) as count FROM conversations WHERE resolved = 0")
|
|
582
|
+
unresolved = cursor.fetchone()['count']
|
|
583
|
+
|
|
584
|
+
# Bilgi bankası kayıt sayısı
|
|
585
|
+
cursor.execute("SELECT COUNT(*) as count FROM knowledge_base WHERE active = 1")
|
|
586
|
+
kb_count = cursor.fetchone()['count']
|
|
587
|
+
|
|
588
|
+
return {
|
|
589
|
+
"total_users": total_users,
|
|
590
|
+
"total_interactions": total_interactions,
|
|
591
|
+
"unresolved_issues": unresolved,
|
|
592
|
+
"knowledge_base_entries": kb_count,
|
|
593
|
+
"avg_interactions_per_user": total_interactions / total_users if total_users > 0 else 0
|
|
594
|
+
}
|
|
595
|
+
|
|
596
|
+
def clear_memory(self, user_id: str) -> None:
|
|
597
|
+
"""Delete all user conversations"""
|
|
598
|
+
cursor = self.conn.cursor()
|
|
599
|
+
cursor.execute("DELETE FROM conversations WHERE user_id = ?", (user_id,))
|
|
600
|
+
self.conn.commit()
|
|
601
|
+
|
|
602
|
+
def close(self) -> None:
|
|
603
|
+
"""Veritabanı bağlantısını kapatır"""
|
|
604
|
+
if self.conn:
|
|
605
|
+
self.conn.close()
|
|
606
|
+
|
|
607
|
+
def __enter__(self):
|
|
608
|
+
return self
|
|
609
|
+
|
|
610
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
611
|
+
self.close()
|
|
612
|
+
|