mem-llm 1.0.10__py3-none-any.whl → 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mem-llm might be problematic. Click here for more details.
- mem_llm/__init__.py +21 -2
- mem_llm/llm_client.py +27 -8
- mem_llm/logger.py +129 -0
- mem_llm/mem_agent.py +47 -4
- mem_llm/memory_db.py +66 -49
- mem_llm/prompt_security.py +304 -0
- mem_llm/retry_handler.py +193 -0
- mem_llm/thread_safe_db.py +295 -0
- mem_llm-1.1.0.dist-info/METADATA +528 -0
- mem_llm-1.1.0.dist-info/RECORD +21 -0
- mem_llm-1.0.10.dist-info/METADATA +0 -1028
- mem_llm-1.0.10.dist-info/RECORD +0 -17
- {mem_llm-1.0.10.dist-info → mem_llm-1.1.0.dist-info}/WHEEL +0 -0
- {mem_llm-1.0.10.dist-info → mem_llm-1.1.0.dist-info}/entry_points.txt +0 -0
- {mem_llm-1.0.10.dist-info → mem_llm-1.1.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,295 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Thread-Safe Database Connection Pool
|
|
3
|
+
=====================================
|
|
4
|
+
Provides thread-safe SQLite connections with proper transaction management
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import sqlite3
|
|
8
|
+
import threading
|
|
9
|
+
from contextlib import contextmanager
|
|
10
|
+
from typing import Optional
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
import queue
|
|
13
|
+
import logging
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ConnectionPool:
|
|
17
|
+
"""Thread-safe SQLite connection pool"""
|
|
18
|
+
|
|
19
|
+
def __init__(self, db_path: str, pool_size: int = 5):
|
|
20
|
+
"""
|
|
21
|
+
Initialize connection pool
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
db_path: Path to SQLite database
|
|
25
|
+
pool_size: Maximum number of connections
|
|
26
|
+
"""
|
|
27
|
+
self.db_path = Path(db_path)
|
|
28
|
+
self.pool_size = pool_size
|
|
29
|
+
self.pool = queue.Queue(maxsize=pool_size)
|
|
30
|
+
self.local = threading.local()
|
|
31
|
+
self._lock = threading.Lock()
|
|
32
|
+
self.logger = logging.getLogger(__name__)
|
|
33
|
+
|
|
34
|
+
# Pre-create connections
|
|
35
|
+
for _ in range(pool_size):
|
|
36
|
+
conn = self._create_connection()
|
|
37
|
+
self.pool.put(conn)
|
|
38
|
+
|
|
39
|
+
def _create_connection(self) -> sqlite3.Connection:
|
|
40
|
+
"""Create a new connection with proper settings"""
|
|
41
|
+
conn = sqlite3.connect(
|
|
42
|
+
str(self.db_path),
|
|
43
|
+
check_same_thread=False,
|
|
44
|
+
timeout=30.0, # 30 second timeout
|
|
45
|
+
isolation_level=None # Autocommit mode for better concurrency
|
|
46
|
+
)
|
|
47
|
+
conn.row_factory = sqlite3.Row
|
|
48
|
+
|
|
49
|
+
# Enable WAL mode and optimizations
|
|
50
|
+
conn.execute("PRAGMA journal_mode=WAL")
|
|
51
|
+
conn.execute("PRAGMA synchronous=NORMAL")
|
|
52
|
+
conn.execute("PRAGMA cache_size=-64000")
|
|
53
|
+
conn.execute("PRAGMA busy_timeout=30000") # 30 second busy timeout
|
|
54
|
+
|
|
55
|
+
return conn
|
|
56
|
+
|
|
57
|
+
@contextmanager
|
|
58
|
+
def get_connection(self):
|
|
59
|
+
"""
|
|
60
|
+
Get a connection from pool (context manager)
|
|
61
|
+
|
|
62
|
+
Usage:
|
|
63
|
+
with pool.get_connection() as conn:
|
|
64
|
+
cursor = conn.cursor()
|
|
65
|
+
cursor.execute("SELECT ...")
|
|
66
|
+
"""
|
|
67
|
+
# Check if thread already has a connection
|
|
68
|
+
if hasattr(self.local, 'conn') and self.local.conn:
|
|
69
|
+
yield self.local.conn
|
|
70
|
+
return
|
|
71
|
+
|
|
72
|
+
# Get connection from pool
|
|
73
|
+
conn = None
|
|
74
|
+
try:
|
|
75
|
+
conn = self.pool.get(timeout=10.0)
|
|
76
|
+
self.local.conn = conn
|
|
77
|
+
yield conn
|
|
78
|
+
except queue.Empty:
|
|
79
|
+
self.logger.error("Connection pool exhausted")
|
|
80
|
+
# Create temporary connection
|
|
81
|
+
conn = self._create_connection()
|
|
82
|
+
yield conn
|
|
83
|
+
finally:
|
|
84
|
+
# Return to pool
|
|
85
|
+
if conn and hasattr(self.local, 'conn'):
|
|
86
|
+
self.local.conn = None
|
|
87
|
+
try:
|
|
88
|
+
self.pool.put_nowait(conn)
|
|
89
|
+
except queue.Full:
|
|
90
|
+
conn.close()
|
|
91
|
+
|
|
92
|
+
@contextmanager
|
|
93
|
+
def transaction(self):
|
|
94
|
+
"""
|
|
95
|
+
Execute operations in a transaction
|
|
96
|
+
|
|
97
|
+
Usage:
|
|
98
|
+
with pool.transaction() as conn:
|
|
99
|
+
cursor = conn.cursor()
|
|
100
|
+
cursor.execute("INSERT ...")
|
|
101
|
+
cursor.execute("UPDATE ...")
|
|
102
|
+
# Automatically committed
|
|
103
|
+
"""
|
|
104
|
+
with self.get_connection() as conn:
|
|
105
|
+
try:
|
|
106
|
+
conn.execute("BEGIN IMMEDIATE")
|
|
107
|
+
yield conn
|
|
108
|
+
conn.execute("COMMIT")
|
|
109
|
+
except Exception as e:
|
|
110
|
+
conn.execute("ROLLBACK")
|
|
111
|
+
self.logger.error(f"Transaction rolled back: {e}")
|
|
112
|
+
raise
|
|
113
|
+
|
|
114
|
+
def close_all(self):
|
|
115
|
+
"""Close all connections in pool"""
|
|
116
|
+
while not self.pool.empty():
|
|
117
|
+
try:
|
|
118
|
+
conn = self.pool.get_nowait()
|
|
119
|
+
conn.close()
|
|
120
|
+
except queue.Empty:
|
|
121
|
+
break
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
class ThreadSafeSQLMemory:
|
|
125
|
+
"""Thread-safe wrapper for SQL memory operations"""
|
|
126
|
+
|
|
127
|
+
def __init__(self, db_path: str = "memories.db", pool_size: int = 5):
|
|
128
|
+
"""
|
|
129
|
+
Initialize thread-safe SQL memory
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
db_path: Database file path
|
|
133
|
+
pool_size: Connection pool size
|
|
134
|
+
"""
|
|
135
|
+
self.db_path = Path(db_path)
|
|
136
|
+
self.pool = ConnectionPool(str(db_path), pool_size)
|
|
137
|
+
self.logger = logging.getLogger(__name__)
|
|
138
|
+
self._init_database()
|
|
139
|
+
|
|
140
|
+
def _init_database(self):
|
|
141
|
+
"""Initialize database schema"""
|
|
142
|
+
with self.pool.get_connection() as conn:
|
|
143
|
+
cursor = conn.cursor()
|
|
144
|
+
|
|
145
|
+
# User profiles table
|
|
146
|
+
cursor.execute("""
|
|
147
|
+
CREATE TABLE IF NOT EXISTS users (
|
|
148
|
+
user_id TEXT PRIMARY KEY,
|
|
149
|
+
name TEXT,
|
|
150
|
+
first_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
151
|
+
last_interaction TIMESTAMP,
|
|
152
|
+
preferences TEXT,
|
|
153
|
+
summary TEXT,
|
|
154
|
+
metadata TEXT
|
|
155
|
+
)
|
|
156
|
+
""")
|
|
157
|
+
|
|
158
|
+
# Conversations table
|
|
159
|
+
cursor.execute("""
|
|
160
|
+
CREATE TABLE IF NOT EXISTS conversations (
|
|
161
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
162
|
+
user_id TEXT NOT NULL,
|
|
163
|
+
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
164
|
+
user_message TEXT NOT NULL,
|
|
165
|
+
bot_response TEXT NOT NULL,
|
|
166
|
+
metadata TEXT,
|
|
167
|
+
sentiment TEXT,
|
|
168
|
+
resolved BOOLEAN DEFAULT 0,
|
|
169
|
+
FOREIGN KEY (user_id) REFERENCES users(user_id)
|
|
170
|
+
)
|
|
171
|
+
""")
|
|
172
|
+
|
|
173
|
+
# Indexes for performance
|
|
174
|
+
cursor.execute("""
|
|
175
|
+
CREATE INDEX IF NOT EXISTS idx_user_timestamp
|
|
176
|
+
ON conversations(user_id, timestamp DESC)
|
|
177
|
+
""")
|
|
178
|
+
|
|
179
|
+
cursor.execute("""
|
|
180
|
+
CREATE INDEX IF NOT EXISTS idx_resolved
|
|
181
|
+
ON conversations(user_id, resolved)
|
|
182
|
+
""")
|
|
183
|
+
|
|
184
|
+
# Knowledge base table
|
|
185
|
+
cursor.execute("""
|
|
186
|
+
CREATE TABLE IF NOT EXISTS knowledge_base (
|
|
187
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
188
|
+
category TEXT NOT NULL,
|
|
189
|
+
question TEXT NOT NULL,
|
|
190
|
+
answer TEXT NOT NULL,
|
|
191
|
+
keywords TEXT,
|
|
192
|
+
priority INTEGER DEFAULT 0,
|
|
193
|
+
active BOOLEAN DEFAULT 1,
|
|
194
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
195
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
196
|
+
)
|
|
197
|
+
""")
|
|
198
|
+
|
|
199
|
+
cursor.execute("""
|
|
200
|
+
CREATE INDEX IF NOT EXISTS idx_category
|
|
201
|
+
ON knowledge_base(category, active)
|
|
202
|
+
""")
|
|
203
|
+
|
|
204
|
+
conn.commit()
|
|
205
|
+
|
|
206
|
+
def add_user(self, user_id: str, name: Optional[str] = None,
|
|
207
|
+
metadata: Optional[dict] = None):
|
|
208
|
+
"""Thread-safe user addition"""
|
|
209
|
+
import json
|
|
210
|
+
|
|
211
|
+
with self.pool.transaction() as conn:
|
|
212
|
+
cursor = conn.cursor()
|
|
213
|
+
cursor.execute("""
|
|
214
|
+
INSERT INTO users (user_id, name, metadata)
|
|
215
|
+
VALUES (?, ?, ?)
|
|
216
|
+
ON CONFLICT(user_id) DO UPDATE SET
|
|
217
|
+
name = COALESCE(excluded.name, users.name),
|
|
218
|
+
metadata = COALESCE(excluded.metadata, users.metadata)
|
|
219
|
+
""", (user_id, name, json.dumps(metadata or {})))
|
|
220
|
+
|
|
221
|
+
def add_interaction(self, user_id: str, user_message: str,
|
|
222
|
+
bot_response: str, metadata: Optional[dict] = None,
|
|
223
|
+
resolved: bool = False) -> int:
|
|
224
|
+
"""Thread-safe interaction addition"""
|
|
225
|
+
import json
|
|
226
|
+
|
|
227
|
+
if not user_message or not bot_response:
|
|
228
|
+
raise ValueError("Messages cannot be None or empty")
|
|
229
|
+
|
|
230
|
+
with self.pool.transaction() as conn:
|
|
231
|
+
cursor = conn.cursor()
|
|
232
|
+
|
|
233
|
+
# Ensure user exists
|
|
234
|
+
self.add_user(user_id)
|
|
235
|
+
|
|
236
|
+
# Add interaction
|
|
237
|
+
cursor.execute("""
|
|
238
|
+
INSERT INTO conversations
|
|
239
|
+
(user_id, user_message, bot_response, metadata, resolved)
|
|
240
|
+
VALUES (?, ?, ?, ?, ?)
|
|
241
|
+
""", (user_id, user_message, bot_response,
|
|
242
|
+
json.dumps(metadata or {}), resolved))
|
|
243
|
+
|
|
244
|
+
interaction_id = cursor.lastrowid
|
|
245
|
+
|
|
246
|
+
# Update last interaction time
|
|
247
|
+
cursor.execute("""
|
|
248
|
+
UPDATE users
|
|
249
|
+
SET last_interaction = CURRENT_TIMESTAMP
|
|
250
|
+
WHERE user_id = ?
|
|
251
|
+
""", (user_id,))
|
|
252
|
+
|
|
253
|
+
return interaction_id
|
|
254
|
+
|
|
255
|
+
def get_recent_conversations(self, user_id: str, limit: int = 10) -> list:
|
|
256
|
+
"""Thread-safe conversation retrieval"""
|
|
257
|
+
with self.pool.get_connection() as conn:
|
|
258
|
+
cursor = conn.cursor()
|
|
259
|
+
cursor.execute("""
|
|
260
|
+
SELECT timestamp, user_message, bot_response, metadata, resolved
|
|
261
|
+
FROM conversations
|
|
262
|
+
WHERE user_id = ?
|
|
263
|
+
ORDER BY timestamp DESC
|
|
264
|
+
LIMIT ?
|
|
265
|
+
""", (user_id, limit))
|
|
266
|
+
|
|
267
|
+
rows = cursor.fetchall()
|
|
268
|
+
return [dict(row) for row in rows]
|
|
269
|
+
|
|
270
|
+
def search_conversations(self, user_id: str, keyword: str) -> list:
|
|
271
|
+
"""Thread-safe conversation search"""
|
|
272
|
+
with self.pool.get_connection() as conn:
|
|
273
|
+
cursor = conn.cursor()
|
|
274
|
+
cursor.execute("""
|
|
275
|
+
SELECT timestamp, user_message, bot_response, metadata
|
|
276
|
+
FROM conversations
|
|
277
|
+
WHERE user_id = ?
|
|
278
|
+
AND (user_message LIKE ? OR bot_response LIKE ?)
|
|
279
|
+
ORDER BY timestamp DESC
|
|
280
|
+
LIMIT 100
|
|
281
|
+
""", (user_id, f'%{keyword}%', f'%{keyword}%'))
|
|
282
|
+
|
|
283
|
+
rows = cursor.fetchall()
|
|
284
|
+
return [dict(row) for row in rows]
|
|
285
|
+
|
|
286
|
+
def close(self):
|
|
287
|
+
"""Close connection pool"""
|
|
288
|
+
self.pool.close_all()
|
|
289
|
+
|
|
290
|
+
def __del__(self):
|
|
291
|
+
"""Cleanup on deletion"""
|
|
292
|
+
try:
|
|
293
|
+
self.close()
|
|
294
|
+
except:
|
|
295
|
+
pass
|