mem-llm 1.0.2__py3-none-any.whl → 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mem-llm might be problematic. Click here for more details.

Files changed (41) hide show
  1. mem_llm/__init__.py +71 -8
  2. mem_llm/api_server.py +595 -0
  3. mem_llm/base_llm_client.py +201 -0
  4. mem_llm/builtin_tools.py +311 -0
  5. mem_llm/builtin_tools_async.py +170 -0
  6. mem_llm/cli.py +254 -0
  7. mem_llm/clients/__init__.py +22 -0
  8. mem_llm/clients/lmstudio_client.py +393 -0
  9. mem_llm/clients/ollama_client.py +354 -0
  10. mem_llm/config.yaml.example +1 -1
  11. mem_llm/config_from_docs.py +1 -1
  12. mem_llm/config_manager.py +5 -3
  13. mem_llm/conversation_summarizer.py +372 -0
  14. mem_llm/data_export_import.py +640 -0
  15. mem_llm/dynamic_prompt.py +298 -0
  16. mem_llm/llm_client.py +77 -14
  17. mem_llm/llm_client_factory.py +260 -0
  18. mem_llm/logger.py +129 -0
  19. mem_llm/mem_agent.py +1178 -87
  20. mem_llm/memory_db.py +290 -59
  21. mem_llm/memory_manager.py +60 -1
  22. mem_llm/prompt_security.py +304 -0
  23. mem_llm/response_metrics.py +221 -0
  24. mem_llm/retry_handler.py +193 -0
  25. mem_llm/thread_safe_db.py +301 -0
  26. mem_llm/tool_system.py +537 -0
  27. mem_llm/vector_store.py +278 -0
  28. mem_llm/web_launcher.py +129 -0
  29. mem_llm/web_ui/README.md +44 -0
  30. mem_llm/web_ui/__init__.py +7 -0
  31. mem_llm/web_ui/index.html +641 -0
  32. mem_llm/web_ui/memory.html +569 -0
  33. mem_llm/web_ui/metrics.html +75 -0
  34. mem_llm-2.1.0.dist-info/METADATA +753 -0
  35. mem_llm-2.1.0.dist-info/RECORD +40 -0
  36. {mem_llm-1.0.2.dist-info → mem_llm-2.1.0.dist-info}/WHEEL +1 -1
  37. mem_llm-2.1.0.dist-info/entry_points.txt +3 -0
  38. mem_llm/prompt_templates.py +0 -244
  39. mem_llm-1.0.2.dist-info/METADATA +0 -382
  40. mem_llm-1.0.2.dist-info/RECORD +0 -15
  41. {mem_llm-1.0.2.dist-info → mem_llm-2.1.0.dist-info}/top_level.txt +0 -0
mem_llm/memory_db.py CHANGED
@@ -5,28 +5,93 @@ Stores memory data using SQLite - Production-ready
5
5
 
6
6
  import sqlite3
7
7
  import json
8
+ import threading
8
9
  from datetime import datetime
9
10
  from typing import Dict, List, Optional, Tuple
10
11
  from pathlib import Path
12
+ import logging
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+ # Optional vector store support
17
+ try:
18
+ from .vector_store import create_vector_store, VectorStore
19
+ VECTOR_STORE_AVAILABLE = True
20
+ except ImportError:
21
+ VECTOR_STORE_AVAILABLE = False
22
+ VectorStore = None
11
23
 
12
24
 
13
25
  class SQLMemoryManager:
14
- """SQLite-based memory management system"""
26
+ """SQLite-based memory management system with thread-safety"""
15
27
 
16
- def __init__(self, db_path: str = "memories.db"):
28
+ def __init__(self, db_path: str = "memories/memories.db",
29
+ enable_vector_search: bool = False,
30
+ vector_store_type: str = "chroma",
31
+ embedding_model: str = "all-MiniLM-L6-v2"):
17
32
  """
18
33
  Args:
19
34
  db_path: SQLite database file path
35
+ enable_vector_search: Enable vector/semantic search (optional)
36
+ vector_store_type: Type of vector store ('chroma', etc.)
37
+ embedding_model: Embedding model name (sentence-transformers)
20
38
  """
21
39
  self.db_path = Path(db_path)
40
+
41
+ # Ensure directory exists
42
+ db_dir = self.db_path.parent
43
+ if not db_dir.exists():
44
+ db_dir.mkdir(parents=True, exist_ok=True)
45
+
22
46
  self.conn = None
47
+ self._lock = threading.RLock() # Reentrant lock for thread safety
23
48
  self._init_database()
49
+
50
+ # Vector store (optional)
51
+ self.enable_vector_search = enable_vector_search
52
+ self.vector_store: Optional[VectorStore] = None
53
+
54
+ if enable_vector_search:
55
+ if not VECTOR_STORE_AVAILABLE:
56
+ logger.warning(
57
+ "Vector search requested but dependencies not available. "
58
+ "Install with: pip install chromadb sentence-transformers"
59
+ )
60
+ self.enable_vector_search = False
61
+ else:
62
+ try:
63
+ persist_dir = str(db_dir / "vector_store")
64
+ self.vector_store = create_vector_store(
65
+ store_type=vector_store_type,
66
+ collection_name="knowledge_base",
67
+ persist_directory=persist_dir,
68
+ embedding_model=embedding_model
69
+ )
70
+ if self.vector_store:
71
+ logger.info(f"Vector search enabled: {vector_store_type}")
72
+ else:
73
+ logger.warning("Failed to initialize vector store, falling back to keyword search")
74
+ self.enable_vector_search = False
75
+ except Exception as e:
76
+ logger.error(f"Error initializing vector store: {e}")
77
+ self.enable_vector_search = False
24
78
 
25
79
  def _init_database(self) -> None:
26
80
  """Create database and tables"""
27
- self.conn = sqlite3.connect(str(self.db_path), check_same_thread=False)
81
+ self.conn = sqlite3.connect(
82
+ str(self.db_path),
83
+ check_same_thread=False,
84
+ timeout=30.0, # 30 second timeout for busy database
85
+ isolation_level=None # Autocommit mode
86
+ )
28
87
  self.conn.row_factory = sqlite3.Row
29
88
 
89
+ # Enable WAL mode for better concurrency
90
+ self.conn.execute("PRAGMA journal_mode=WAL")
91
+ self.conn.execute("PRAGMA synchronous=NORMAL")
92
+ self.conn.execute("PRAGMA cache_size=-64000") # 64MB cache
93
+ self.conn.execute("PRAGMA busy_timeout=30000") # 30 second busy timeout
94
+
30
95
  cursor = self.conn.cursor()
31
96
 
32
97
  # User profiles table
@@ -106,28 +171,28 @@ class SQLMemoryManager:
106
171
  def add_user(self, user_id: str, name: Optional[str] = None,
107
172
  metadata: Optional[Dict] = None) -> None:
108
173
  """
109
- Add new user or update existing
174
+ Add new user or update existing (thread-safe)
110
175
 
111
176
  Args:
112
177
  user_id: User ID
113
178
  name: User name
114
179
  metadata: Additional information
115
180
  """
116
- cursor = self.conn.cursor()
117
- cursor.execute("""
118
- INSERT INTO users (user_id, name, metadata)
119
- VALUES (?, ?, ?)
120
- ON CONFLICT(user_id) DO UPDATE SET
121
- name = COALESCE(excluded.name, users.name),
122
- metadata = COALESCE(excluded.metadata, users.metadata)
123
- """, (user_id, name, json.dumps(metadata or {})))
124
- self.conn.commit()
181
+ with self._lock:
182
+ cursor = self.conn.cursor()
183
+ cursor.execute("""
184
+ INSERT INTO users (user_id, name, metadata)
185
+ VALUES (?, ?, ?)
186
+ ON CONFLICT(user_id) DO UPDATE SET
187
+ name = COALESCE(excluded.name, users.name),
188
+ metadata = COALESCE(excluded.metadata, users.metadata)
189
+ """, (user_id, name, json.dumps(metadata or {})))
125
190
 
126
191
  def add_interaction(self, user_id: str, user_message: str,
127
192
  bot_response: str, metadata: Optional[Dict] = None,
128
193
  resolved: bool = False) -> int:
129
194
  """
130
- Record new interaction
195
+ Record new interaction (thread-safe)
131
196
 
132
197
  Args:
133
198
  user_id: User ID
@@ -139,30 +204,33 @@ class SQLMemoryManager:
139
204
  Returns:
140
205
  Added record ID
141
206
  """
142
- cursor = self.conn.cursor()
143
-
144
- # Create user if not exists
145
- self.add_user(user_id)
146
-
147
- # Record interaction
148
- cursor.execute("""
149
- INSERT INTO conversations
150
- (user_id, user_message, bot_response, metadata, resolved)
151
- VALUES (?, ?, ?, ?, ?)
152
- """, (user_id, user_message, bot_response,
153
- json.dumps(metadata or {}), resolved))
154
-
155
- interaction_id = cursor.lastrowid
156
-
157
- # Update user's last interaction time
158
- cursor.execute("""
159
- UPDATE users
160
- SET last_interaction = CURRENT_TIMESTAMP
161
- WHERE user_id = ?
162
- """, (user_id,))
207
+ if not user_message or not bot_response:
208
+ raise ValueError("user_message and bot_response cannot be None or empty")
163
209
 
164
- self.conn.commit()
165
- return interaction_id
210
+ with self._lock:
211
+ cursor = self.conn.cursor()
212
+
213
+ # Create user if not exists
214
+ self.add_user(user_id)
215
+
216
+ # Record interaction
217
+ cursor.execute("""
218
+ INSERT INTO conversations
219
+ (user_id, user_message, bot_response, metadata, resolved)
220
+ VALUES (?, ?, ?, ?, ?)
221
+ """, (user_id, user_message, bot_response,
222
+ json.dumps(metadata or {}), resolved))
223
+
224
+ interaction_id = cursor.lastrowid
225
+
226
+ # Update user's last interaction time
227
+ cursor.execute("""
228
+ UPDATE users
229
+ SET last_interaction = CURRENT_TIMESTAMP
230
+ WHERE user_id = ?
231
+ """, (user_id,))
232
+
233
+ return interaction_id
166
234
 
167
235
  # Alias for compatibility
168
236
  def add_conversation(self, user_id: str, user_message: str, bot_response: str, metadata: Optional[Dict] = None) -> int:
@@ -171,7 +239,7 @@ class SQLMemoryManager:
171
239
 
172
240
  def get_recent_conversations(self, user_id: str, limit: int = 10) -> List[Dict]:
173
241
  """
174
- Kullanıcının son konuşmalarını getirir
242
+ Kullanıcının son konuşmalarını getirir (thread-safe)
175
243
 
176
244
  Args:
177
245
  user_id: Kullanıcı kimliği
@@ -180,21 +248,22 @@ class SQLMemoryManager:
180
248
  Returns:
181
249
  Konuşmalar listesi
182
250
  """
183
- cursor = self.conn.cursor()
184
- cursor.execute("""
185
- SELECT timestamp, user_message, bot_response, metadata, resolved
186
- FROM conversations
187
- WHERE user_id = ?
188
- ORDER BY timestamp DESC
189
- LIMIT ?
190
- """, (user_id, limit))
191
-
192
- rows = cursor.fetchall()
193
- return [dict(row) for row in rows]
251
+ with self._lock:
252
+ cursor = self.conn.cursor()
253
+ cursor.execute("""
254
+ SELECT timestamp, user_message, bot_response, metadata, resolved
255
+ FROM conversations
256
+ WHERE user_id = ?
257
+ ORDER BY timestamp DESC
258
+ LIMIT ?
259
+ """, (user_id, limit))
260
+
261
+ rows = cursor.fetchall()
262
+ return [dict(row) for row in rows]
194
263
 
195
264
  def search_conversations(self, user_id: str, keyword: str) -> List[Dict]:
196
265
  """
197
- Konuşmalarda anahtar kelime arar
266
+ Konuşmalarda anahtar kelime arar (thread-safe)
198
267
 
199
268
  Args:
200
269
  user_id: Kullanıcı kimliği
@@ -289,46 +358,208 @@ class SQLMemoryManager:
289
358
  """, (category, question, answer,
290
359
  json.dumps(keywords or []), priority))
291
360
 
361
+ kb_id = cursor.lastrowid
292
362
  self.conn.commit()
293
- return cursor.lastrowid
363
+
364
+ # Sync to vector store if enabled
365
+ if self.enable_vector_search and self.vector_store:
366
+ try:
367
+ self._sync_to_vector_store(kb_id)
368
+ except Exception as e:
369
+ logger.warning(f"Failed to sync KB entry to vector store: {e}")
370
+
371
+ return kb_id
294
372
 
295
373
  def search_knowledge(self, query: str, category: Optional[str] = None,
296
- limit: int = 5) -> List[Dict]:
374
+ limit: int = 5, use_vector_search: Optional[bool] = None) -> List[Dict]:
297
375
  """
298
- Bilgi bankasında arama yapar
376
+ Bilgi bankasında arama yapar (keyword matching veya semantic search)
299
377
 
300
378
  Args:
301
379
  query: Arama sorgusu
302
380
  category: Kategori filtresi (opsiyonel)
303
381
  limit: Maksimum sonuç sayısı
382
+ use_vector_search: Force vector search (None = auto-detect)
304
383
 
305
384
  Returns:
306
385
  Bulunan kayıtlar
307
386
  """
387
+ # Use vector search if enabled and available
388
+ if use_vector_search is None:
389
+ use_vector_search = self.enable_vector_search
390
+
391
+ if use_vector_search and self.vector_store:
392
+ return self._vector_search(query, category, limit)
393
+ else:
394
+ return self._keyword_search(query, category, limit)
395
+
396
+ def _keyword_search(self, query: str, category: Optional[str] = None,
397
+ limit: int = 5) -> List[Dict]:
398
+ """Traditional keyword-based search"""
308
399
  cursor = self.conn.cursor()
309
400
 
401
+ # Extract important keywords from query (remove question words)
402
+ import re
403
+ stopwords = ['ne', 'kadar', 'nedir', 'nasıl', 'için', 'mı', 'mi', 'mu', 'mü',
404
+ 'what', 'how', 'when', 'where', 'is', 'are', 'the', 'a', 'an']
405
+
406
+ # Clean query and extract keywords
407
+ query_lower = query.lower()
408
+ words = re.findall(r'\w+', query_lower)
409
+ keywords = [w for w in words if w not in stopwords and len(w) > 2]
410
+
411
+ # If no keywords, use original query
412
+ if not keywords:
413
+ keywords = [query_lower]
414
+
415
+ # Build search conditions for each keyword
416
+ conditions = []
417
+ params = []
418
+
419
+ for keyword in keywords[:5]: # Max 5 keywords
420
+ conditions.append("(question LIKE ? OR answer LIKE ? OR keywords LIKE ?)")
421
+ params.extend([f"%{keyword}%", f"%{keyword}%", f"%{keyword}%"])
422
+
423
+ where_clause = " OR ".join(conditions) if conditions else "1=1"
424
+
310
425
  if category:
311
- cursor.execute("""
426
+ sql = f"""
312
427
  SELECT category, question, answer, priority
313
428
  FROM knowledge_base
314
429
  WHERE active = 1
315
430
  AND category = ?
316
- AND (question LIKE ? OR answer LIKE ? OR keywords LIKE ?)
431
+ AND ({where_clause})
317
432
  ORDER BY priority DESC, id DESC
318
433
  LIMIT ?
319
- """, (category, f"%{query}%", f"%{query}%", f"%{query}%", limit))
434
+ """
435
+ cursor.execute(sql, [category] + params + [limit])
320
436
  else:
321
- cursor.execute("""
437
+ sql = f"""
322
438
  SELECT category, question, answer, priority
323
439
  FROM knowledge_base
324
440
  WHERE active = 1
325
- AND (question LIKE ? OR answer LIKE ? OR keywords LIKE ?)
441
+ AND ({where_clause})
326
442
  ORDER BY priority DESC, id DESC
327
443
  LIMIT ?
328
- """, (f"%{query}%", f"%{query}%", f"%{query}%", limit))
444
+ """
445
+ cursor.execute(sql, params + [limit])
329
446
 
330
447
  return [dict(row) for row in cursor.fetchall()]
331
448
 
449
+ def _vector_search(self, query: str, category: Optional[str] = None,
450
+ limit: int = 5) -> List[Dict]:
451
+ """Vector-based semantic search"""
452
+ if not self.vector_store:
453
+ return []
454
+
455
+ # Prepare metadata filter
456
+ filter_metadata = None
457
+ if category:
458
+ filter_metadata = {"category": category}
459
+
460
+ # Search in vector store
461
+ vector_results = self.vector_store.search(
462
+ query=query,
463
+ limit=limit * 2, # Get more results to filter by category if needed
464
+ filter_metadata=filter_metadata
465
+ )
466
+
467
+ # Map vector results back to KB format
468
+ results = []
469
+ for result in vector_results[:limit]:
470
+ # Extract metadata
471
+ metadata = result.get('metadata', {})
472
+
473
+ results.append({
474
+ 'category': metadata.get('category', ''),
475
+ 'question': metadata.get('question', ''),
476
+ 'answer': result.get('text', ''),
477
+ 'priority': metadata.get('priority', 0),
478
+ 'score': result.get('score', 0.0), # Similarity score
479
+ 'vector_search': True
480
+ })
481
+
482
+ return results
483
+
484
+ def _sync_to_vector_store(self, kb_id: int) -> None:
485
+ """Sync a single KB entry to vector store"""
486
+ if not self.vector_store:
487
+ return
488
+
489
+ cursor = self.conn.cursor()
490
+ cursor.execute("""
491
+ SELECT id, category, question, answer, keywords, priority
492
+ FROM knowledge_base
493
+ WHERE id = ?
494
+ """, (kb_id,))
495
+
496
+ row = cursor.fetchone()
497
+ if row:
498
+ doc = {
499
+ 'id': str(row['id']),
500
+ 'text': f"{row['question']}\n{row['answer']}", # Combine for better search
501
+ 'metadata': {
502
+ 'category': row['category'],
503
+ 'question': row['question'],
504
+ 'answer': row['answer'],
505
+ 'keywords': row['keywords'],
506
+ 'priority': row['priority'],
507
+ 'kb_id': row['id']
508
+ }
509
+ }
510
+ self.vector_store.add_documents([doc])
511
+
512
+ def sync_all_kb_to_vector_store(self) -> int:
513
+ """
514
+ Sync all existing KB entries to vector store
515
+
516
+ Returns:
517
+ Number of entries synced
518
+ """
519
+ if not self.vector_store:
520
+ return 0
521
+
522
+ cursor = self.conn.cursor()
523
+ cursor.execute("""
524
+ SELECT id, category, question, answer, keywords, priority
525
+ FROM knowledge_base
526
+ WHERE active = 1
527
+ """)
528
+
529
+ rows = cursor.fetchall()
530
+ documents = []
531
+
532
+ for row in rows:
533
+ doc = {
534
+ 'id': str(row['id']),
535
+ 'text': f"{row['question']}\n{row['answer']}",
536
+ 'metadata': {
537
+ 'category': row['category'],
538
+ 'question': row['question'],
539
+ 'answer': row['answer'],
540
+ 'keywords': row['keywords'],
541
+ 'priority': row['priority'],
542
+ 'kb_id': row['id']
543
+ }
544
+ }
545
+ documents.append(doc)
546
+
547
+ if documents:
548
+ try:
549
+ # Add in batches for better performance
550
+ batch_size = 100
551
+ for i in range(0, len(documents), batch_size):
552
+ batch = documents[i:i + batch_size]
553
+ self.vector_store.add_documents(batch)
554
+ logger.debug(f"Synced {len(batch)} KB entries to vector store")
555
+
556
+ logger.info(f"Synced {len(documents)} KB entries to vector store")
557
+ except Exception as e:
558
+ logger.error(f"Error syncing KB to vector store: {e}")
559
+ return 0
560
+
561
+ return len(documents)
562
+
332
563
  def get_statistics(self) -> Dict:
333
564
  """
334
565
  Genel istatistikleri döndürür
mem_llm/memory_manager.py CHANGED
@@ -43,7 +43,16 @@ class MemoryManager:
43
43
  with open(user_file, 'r', encoding='utf-8') as f:
44
44
  data = json.load(f)
45
45
  self.conversations[user_id] = data.get('conversations', [])
46
- self.user_profiles[user_id] = data.get('profile', {})
46
+ profile = data.get('profile', {})
47
+
48
+ # Parse preferences if it's a JSON string (legacy format)
49
+ if isinstance(profile.get('preferences'), str):
50
+ try:
51
+ profile['preferences'] = json.loads(profile['preferences'])
52
+ except:
53
+ profile['preferences'] = {}
54
+
55
+ self.user_profiles[user_id] = profile
47
56
  return data
48
57
  else:
49
58
  # Create empty memory for new user
@@ -259,4 +268,54 @@ class MemoryManager:
259
268
  self.load_memory(user_id)
260
269
 
261
270
  return self.user_profiles.get(user_id)
271
+
272
+ def update_user_profile(self, user_id: str, updates: Dict) -> None:
273
+ """
274
+ Update user profile (SQL-compatible alias)
275
+
276
+ Args:
277
+ user_id: User ID
278
+ updates: Fields to update
279
+ """
280
+ return self.update_profile(user_id, updates)
281
+
282
+ def add_user(self, user_id: str, name: Optional[str] = None, metadata: Optional[Dict] = None) -> None:
283
+ """
284
+ Add or update user (SQL-compatible method)
285
+
286
+ Args:
287
+ user_id: User ID
288
+ name: User name (optional)
289
+ metadata: Additional metadata (optional)
290
+ """
291
+ self.load_memory(user_id)
292
+ if name and 'name' not in self.user_profiles[user_id]:
293
+ self.user_profiles[user_id]['name'] = name
294
+ if metadata:
295
+ self.user_profiles[user_id].update(metadata)
296
+ self.save_memory(user_id)
297
+
298
+ def get_statistics(self) -> Dict:
299
+ """
300
+ Get general statistics (SQL-compatible method)
301
+
302
+ Returns:
303
+ Statistics dictionary
304
+ """
305
+ all_users = list(self.memory_dir.glob("*.json"))
306
+ total_interactions = 0
307
+
308
+ for user_file in all_users:
309
+ try:
310
+ with open(user_file, 'r', encoding='utf-8') as f:
311
+ data = json.load(f)
312
+ total_interactions += len(data.get('conversations', []))
313
+ except:
314
+ pass
315
+
316
+ return {
317
+ 'total_users': len(all_users),
318
+ 'total_interactions': total_interactions,
319
+ 'knowledge_base_entries': 0 # JSON doesn't have KB
320
+ }
262
321