mem-llm 1.0.2__py3-none-any.whl → 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mem-llm might be problematic. Click here for more details.

Files changed (41) hide show
  1. mem_llm/__init__.py +71 -8
  2. mem_llm/api_server.py +595 -0
  3. mem_llm/base_llm_client.py +201 -0
  4. mem_llm/builtin_tools.py +311 -0
  5. mem_llm/builtin_tools_async.py +170 -0
  6. mem_llm/cli.py +254 -0
  7. mem_llm/clients/__init__.py +22 -0
  8. mem_llm/clients/lmstudio_client.py +393 -0
  9. mem_llm/clients/ollama_client.py +354 -0
  10. mem_llm/config.yaml.example +1 -1
  11. mem_llm/config_from_docs.py +1 -1
  12. mem_llm/config_manager.py +5 -3
  13. mem_llm/conversation_summarizer.py +372 -0
  14. mem_llm/data_export_import.py +640 -0
  15. mem_llm/dynamic_prompt.py +298 -0
  16. mem_llm/llm_client.py +77 -14
  17. mem_llm/llm_client_factory.py +260 -0
  18. mem_llm/logger.py +129 -0
  19. mem_llm/mem_agent.py +1178 -87
  20. mem_llm/memory_db.py +290 -59
  21. mem_llm/memory_manager.py +60 -1
  22. mem_llm/prompt_security.py +304 -0
  23. mem_llm/response_metrics.py +221 -0
  24. mem_llm/retry_handler.py +193 -0
  25. mem_llm/thread_safe_db.py +301 -0
  26. mem_llm/tool_system.py +537 -0
  27. mem_llm/vector_store.py +278 -0
  28. mem_llm/web_launcher.py +129 -0
  29. mem_llm/web_ui/README.md +44 -0
  30. mem_llm/web_ui/__init__.py +7 -0
  31. mem_llm/web_ui/index.html +641 -0
  32. mem_llm/web_ui/memory.html +569 -0
  33. mem_llm/web_ui/metrics.html +75 -0
  34. mem_llm-2.1.0.dist-info/METADATA +753 -0
  35. mem_llm-2.1.0.dist-info/RECORD +40 -0
  36. {mem_llm-1.0.2.dist-info → mem_llm-2.1.0.dist-info}/WHEEL +1 -1
  37. mem_llm-2.1.0.dist-info/entry_points.txt +3 -0
  38. mem_llm/prompt_templates.py +0 -244
  39. mem_llm-1.0.2.dist-info/METADATA +0 -382
  40. mem_llm-1.0.2.dist-info/RECORD +0 -15
  41. {mem_llm-1.0.2.dist-info → mem_llm-2.1.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,193 @@
1
+ """
2
+ Retry Logic with Exponential Backoff
3
+ ====================================
4
+ Robust error handling for LLM API calls and database operations.
5
+ """
6
+
7
+ import time
8
+ import functools
9
+ from typing import Callable, Optional, Type, Tuple
10
+ import logging
11
+
12
+
13
+ def exponential_backoff_retry(
14
+ max_retries: int = 3,
15
+ initial_delay: float = 1.0,
16
+ exponential_base: float = 2.0,
17
+ max_delay: float = 60.0,
18
+ exceptions: Tuple[Type[Exception], ...] = (Exception,),
19
+ logger: Optional[logging.Logger] = None
20
+ ):
21
+ """
22
+ Decorator for retrying functions with exponential backoff
23
+
24
+ Args:
25
+ max_retries: Maximum number of retry attempts
26
+ initial_delay: Initial delay in seconds
27
+ exponential_base: Base for exponential calculation
28
+ max_delay: Maximum delay between retries
29
+ exceptions: Tuple of exceptions to catch and retry
30
+ logger: Optional logger for retry information
31
+
32
+ Example:
33
+ @exponential_backoff_retry(max_retries=3, initial_delay=1.0)
34
+ def unstable_api_call():
35
+ # Your code here
36
+ pass
37
+ """
38
+ def decorator(func: Callable):
39
+ @functools.wraps(func)
40
+ def wrapper(*args, **kwargs):
41
+ last_exception = None
42
+
43
+ for attempt in range(max_retries + 1):
44
+ try:
45
+ return func(*args, **kwargs)
46
+
47
+ except exceptions as e:
48
+ last_exception = e
49
+
50
+ if attempt == max_retries:
51
+ if logger:
52
+ logger.error(
53
+ f"Function {func.__name__} failed after {max_retries} retries: {str(e)}"
54
+ )
55
+ raise
56
+
57
+ # Calculate delay with exponential backoff
58
+ delay = min(initial_delay * (exponential_base ** attempt), max_delay)
59
+
60
+ if logger:
61
+ logger.warning(
62
+ f"Function {func.__name__} failed (attempt {attempt + 1}/{max_retries}), "
63
+ f"retrying in {delay:.2f}s: {str(e)}"
64
+ )
65
+
66
+ time.sleep(delay)
67
+
68
+ # Should never reach here, but just in case
69
+ if last_exception:
70
+ raise last_exception
71
+
72
+ return wrapper
73
+ return decorator
74
+
75
+
76
+ class SafeExecutor:
77
+ """Safe execution wrapper with error handling and fallbacks"""
78
+
79
+ def __init__(self, logger: Optional[logging.Logger] = None):
80
+ self.logger = logger or logging.getLogger(__name__)
81
+
82
+ def execute_with_fallback(self,
83
+ primary_func: Callable,
84
+ fallback_func: Optional[Callable] = None,
85
+ fallback_value: any = None,
86
+ error_message: str = "Operation failed"):
87
+ """
88
+ Execute function with fallback on error
89
+
90
+ Args:
91
+ primary_func: Main function to execute
92
+ fallback_func: Fallback function if primary fails
93
+ fallback_value: Value to return if both fail
94
+ error_message: Error message prefix
95
+
96
+ Returns:
97
+ Result from primary_func, fallback_func, or fallback_value
98
+ """
99
+ try:
100
+ return primary_func()
101
+ except Exception as e:
102
+ self.logger.error(f"{error_message}: {str(e)}")
103
+
104
+ if fallback_func:
105
+ try:
106
+ self.logger.info("Attempting fallback function")
107
+ return fallback_func()
108
+ except Exception as fallback_e:
109
+ self.logger.error(f"Fallback also failed: {str(fallback_e)}")
110
+
111
+ return fallback_value
112
+
113
+ def safe_json_parse(self, json_string: str, default: dict = None) -> dict:
114
+ """
115
+ Safely parse JSON with fallback
116
+
117
+ Args:
118
+ json_string: JSON string to parse
119
+ default: Default value if parsing fails
120
+
121
+ Returns:
122
+ Parsed dict or default
123
+ """
124
+ import json
125
+
126
+ try:
127
+ return json.loads(json_string)
128
+ except json.JSONDecodeError as e:
129
+ self.logger.error(f"JSON parse error: {str(e)}")
130
+
131
+ # Try to extract partial JSON
132
+ try:
133
+ # Find first { and last }
134
+ start = json_string.find('{')
135
+ end = json_string.rfind('}')
136
+ if start != -1 and end != -1:
137
+ partial = json_string[start:end+1]
138
+ return json.loads(partial)
139
+ except:
140
+ pass
141
+
142
+ return default if default is not None else {}
143
+
144
+ def safe_db_operation(self,
145
+ operation: Callable,
146
+ operation_name: str = "Database operation",
147
+ default_value: any = None):
148
+ """
149
+ Safely execute database operation
150
+
151
+ Args:
152
+ operation: Database operation function
153
+ operation_name: Name for logging
154
+ default_value: Value to return on failure
155
+
156
+ Returns:
157
+ Operation result or default_value
158
+ """
159
+ try:
160
+ return operation()
161
+ except Exception as e:
162
+ self.logger.error(f"{operation_name} failed: {str(e)}")
163
+ return default_value
164
+
165
+
166
+ # Connection checker with retry
167
+ def check_connection_with_retry(url: str,
168
+ max_retries: int = 3,
169
+ timeout: int = 5) -> bool:
170
+ """
171
+ Check connection with retry logic
172
+
173
+ Args:
174
+ url: URL to check
175
+ max_retries: Maximum retry attempts
176
+ timeout: Request timeout
177
+
178
+ Returns:
179
+ True if connection successful
180
+ """
181
+ import requests
182
+
183
+ for attempt in range(max_retries):
184
+ try:
185
+ response = requests.get(url, timeout=timeout)
186
+ if response.status_code == 200:
187
+ return True
188
+ except Exception:
189
+ if attempt < max_retries - 1:
190
+ time.sleep(1.0 * (2 ** attempt))
191
+ continue
192
+
193
+ return False
@@ -0,0 +1,301 @@
1
+ """
2
+ Thread-Safe Database Connection Pool
3
+ =====================================
4
+ Provides thread-safe SQLite connections with proper transaction management
5
+ """
6
+
7
+ import sqlite3
8
+ import threading
9
+ from contextlib import contextmanager
10
+ from typing import Optional
11
+ from pathlib import Path
12
+ import queue
13
+ import logging
14
+
15
+
16
+ class ConnectionPool:
17
+ """Thread-safe SQLite connection pool"""
18
+
19
+ def __init__(self, db_path: str, pool_size: int = 5):
20
+ """
21
+ Initialize connection pool
22
+
23
+ Args:
24
+ db_path: Path to SQLite database
25
+ pool_size: Maximum number of connections
26
+ """
27
+ self.db_path = Path(db_path)
28
+ self.pool_size = pool_size
29
+ self.pool = queue.Queue(maxsize=pool_size)
30
+ self.local = threading.local()
31
+ self._lock = threading.Lock()
32
+ self.logger = logging.getLogger(__name__)
33
+
34
+ # Pre-create connections
35
+ for _ in range(pool_size):
36
+ conn = self._create_connection()
37
+ self.pool.put(conn)
38
+
39
+ def _create_connection(self) -> sqlite3.Connection:
40
+ """Create a new connection with proper settings"""
41
+ conn = sqlite3.connect(
42
+ str(self.db_path),
43
+ check_same_thread=False,
44
+ timeout=30.0, # 30 second timeout
45
+ isolation_level=None # Autocommit mode for better concurrency
46
+ )
47
+ conn.row_factory = sqlite3.Row
48
+
49
+ # Enable WAL mode and optimizations
50
+ conn.execute("PRAGMA journal_mode=WAL")
51
+ conn.execute("PRAGMA synchronous=NORMAL")
52
+ conn.execute("PRAGMA cache_size=-64000")
53
+ conn.execute("PRAGMA busy_timeout=30000") # 30 second busy timeout
54
+
55
+ return conn
56
+
57
+ @contextmanager
58
+ def get_connection(self):
59
+ """
60
+ Get a connection from pool (context manager)
61
+
62
+ Usage:
63
+ with pool.get_connection() as conn:
64
+ cursor = conn.cursor()
65
+ cursor.execute("SELECT ...")
66
+ """
67
+ # Check if thread already has a connection
68
+ if hasattr(self.local, 'conn') and self.local.conn:
69
+ yield self.local.conn
70
+ return
71
+
72
+ # Get connection from pool
73
+ conn = None
74
+ try:
75
+ conn = self.pool.get(timeout=10.0)
76
+ self.local.conn = conn
77
+ yield conn
78
+ except queue.Empty:
79
+ self.logger.error("Connection pool exhausted")
80
+ # Create temporary connection
81
+ conn = self._create_connection()
82
+ yield conn
83
+ finally:
84
+ # Return to pool
85
+ if conn and hasattr(self.local, 'conn'):
86
+ self.local.conn = None
87
+ try:
88
+ self.pool.put_nowait(conn)
89
+ except queue.Full:
90
+ conn.close()
91
+
92
+ @contextmanager
93
+ def transaction(self):
94
+ """
95
+ Execute operations in a transaction
96
+
97
+ Usage:
98
+ with pool.transaction() as conn:
99
+ cursor = conn.cursor()
100
+ cursor.execute("INSERT ...")
101
+ cursor.execute("UPDATE ...")
102
+ # Automatically committed
103
+ """
104
+ with self.get_connection() as conn:
105
+ try:
106
+ conn.execute("BEGIN IMMEDIATE")
107
+ yield conn
108
+ conn.execute("COMMIT")
109
+ except Exception as e:
110
+ conn.execute("ROLLBACK")
111
+ self.logger.error(f"Transaction rolled back: {e}")
112
+ raise
113
+
114
+ def close_all(self):
115
+ """Close all connections in pool"""
116
+ while not self.pool.empty():
117
+ try:
118
+ conn = self.pool.get_nowait()
119
+ conn.close()
120
+ except queue.Empty:
121
+ break
122
+
123
+
124
+ class ThreadSafeSQLMemory:
125
+ """Thread-safe wrapper for SQL memory operations"""
126
+
127
+ def __init__(self, db_path: str = "memories/memories.db", pool_size: int = 5):
128
+ """
129
+ Initialize thread-safe SQL memory
130
+
131
+ Args:
132
+ db_path: Database file path
133
+ pool_size: Connection pool size
134
+ """
135
+ self.db_path = Path(db_path)
136
+
137
+ # Ensure directory exists
138
+ db_dir = self.db_path.parent
139
+ if not db_dir.exists():
140
+ db_dir.mkdir(parents=True, exist_ok=True)
141
+
142
+ self.pool = ConnectionPool(str(db_path), pool_size)
143
+ self.logger = logging.getLogger(__name__)
144
+ self._init_database()
145
+
146
+ def _init_database(self):
147
+ """Initialize database schema"""
148
+ with self.pool.get_connection() as conn:
149
+ cursor = conn.cursor()
150
+
151
+ # User profiles table
152
+ cursor.execute("""
153
+ CREATE TABLE IF NOT EXISTS users (
154
+ user_id TEXT PRIMARY KEY,
155
+ name TEXT,
156
+ first_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
157
+ last_interaction TIMESTAMP,
158
+ preferences TEXT,
159
+ summary TEXT,
160
+ metadata TEXT
161
+ )
162
+ """)
163
+
164
+ # Conversations table
165
+ cursor.execute("""
166
+ CREATE TABLE IF NOT EXISTS conversations (
167
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
168
+ user_id TEXT NOT NULL,
169
+ timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
170
+ user_message TEXT NOT NULL,
171
+ bot_response TEXT NOT NULL,
172
+ metadata TEXT,
173
+ sentiment TEXT,
174
+ resolved BOOLEAN DEFAULT 0,
175
+ FOREIGN KEY (user_id) REFERENCES users(user_id)
176
+ )
177
+ """)
178
+
179
+ # Indexes for performance
180
+ cursor.execute("""
181
+ CREATE INDEX IF NOT EXISTS idx_user_timestamp
182
+ ON conversations(user_id, timestamp DESC)
183
+ """)
184
+
185
+ cursor.execute("""
186
+ CREATE INDEX IF NOT EXISTS idx_resolved
187
+ ON conversations(user_id, resolved)
188
+ """)
189
+
190
+ # Knowledge base table
191
+ cursor.execute("""
192
+ CREATE TABLE IF NOT EXISTS knowledge_base (
193
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
194
+ category TEXT NOT NULL,
195
+ question TEXT NOT NULL,
196
+ answer TEXT NOT NULL,
197
+ keywords TEXT,
198
+ priority INTEGER DEFAULT 0,
199
+ active BOOLEAN DEFAULT 1,
200
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
201
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
202
+ )
203
+ """)
204
+
205
+ cursor.execute("""
206
+ CREATE INDEX IF NOT EXISTS idx_category
207
+ ON knowledge_base(category, active)
208
+ """)
209
+
210
+ conn.commit()
211
+
212
+ def add_user(self, user_id: str, name: Optional[str] = None,
213
+ metadata: Optional[dict] = None):
214
+ """Thread-safe user addition"""
215
+ import json
216
+
217
+ with self.pool.transaction() as conn:
218
+ cursor = conn.cursor()
219
+ cursor.execute("""
220
+ INSERT INTO users (user_id, name, metadata)
221
+ VALUES (?, ?, ?)
222
+ ON CONFLICT(user_id) DO UPDATE SET
223
+ name = COALESCE(excluded.name, users.name),
224
+ metadata = COALESCE(excluded.metadata, users.metadata)
225
+ """, (user_id, name, json.dumps(metadata or {})))
226
+
227
+ def add_interaction(self, user_id: str, user_message: str,
228
+ bot_response: str, metadata: Optional[dict] = None,
229
+ resolved: bool = False) -> int:
230
+ """Thread-safe interaction addition"""
231
+ import json
232
+
233
+ if not user_message or not bot_response:
234
+ raise ValueError("Messages cannot be None or empty")
235
+
236
+ with self.pool.transaction() as conn:
237
+ cursor = conn.cursor()
238
+
239
+ # Ensure user exists
240
+ self.add_user(user_id)
241
+
242
+ # Add interaction
243
+ cursor.execute("""
244
+ INSERT INTO conversations
245
+ (user_id, user_message, bot_response, metadata, resolved)
246
+ VALUES (?, ?, ?, ?, ?)
247
+ """, (user_id, user_message, bot_response,
248
+ json.dumps(metadata or {}), resolved))
249
+
250
+ interaction_id = cursor.lastrowid
251
+
252
+ # Update last interaction time
253
+ cursor.execute("""
254
+ UPDATE users
255
+ SET last_interaction = CURRENT_TIMESTAMP
256
+ WHERE user_id = ?
257
+ """, (user_id,))
258
+
259
+ return interaction_id
260
+
261
+ def get_recent_conversations(self, user_id: str, limit: int = 10) -> list:
262
+ """Thread-safe conversation retrieval"""
263
+ with self.pool.get_connection() as conn:
264
+ cursor = conn.cursor()
265
+ cursor.execute("""
266
+ SELECT timestamp, user_message, bot_response, metadata, resolved
267
+ FROM conversations
268
+ WHERE user_id = ?
269
+ ORDER BY timestamp DESC
270
+ LIMIT ?
271
+ """, (user_id, limit))
272
+
273
+ rows = cursor.fetchall()
274
+ return [dict(row) for row in rows]
275
+
276
+ def search_conversations(self, user_id: str, keyword: str) -> list:
277
+ """Thread-safe conversation search"""
278
+ with self.pool.get_connection() as conn:
279
+ cursor = conn.cursor()
280
+ cursor.execute("""
281
+ SELECT timestamp, user_message, bot_response, metadata
282
+ FROM conversations
283
+ WHERE user_id = ?
284
+ AND (user_message LIKE ? OR bot_response LIKE ?)
285
+ ORDER BY timestamp DESC
286
+ LIMIT 100
287
+ """, (user_id, f'%{keyword}%', f'%{keyword}%'))
288
+
289
+ rows = cursor.fetchall()
290
+ return [dict(row) for row in rows]
291
+
292
+ def close(self):
293
+ """Close connection pool"""
294
+ self.pool.close_all()
295
+
296
+ def __del__(self):
297
+ """Cleanup on deletion"""
298
+ try:
299
+ self.close()
300
+ except:
301
+ pass