mcp-sqlite-memory-bank 1.5.1__py3-none-any.whl → 1.6.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mcp_sqlite_memory_bank/__init__.py +3 -3
- mcp_sqlite_memory_bank/__main__.py +8 -7
- mcp_sqlite_memory_bank/database.py +166 -48
- mcp_sqlite_memory_bank/prompts.py +64 -48
- mcp_sqlite_memory_bank/resources.py +218 -144
- mcp_sqlite_memory_bank/semantic.py +25 -13
- mcp_sqlite_memory_bank/server.py +174 -32
- mcp_sqlite_memory_bank/tools/__init__.py +26 -29
- mcp_sqlite_memory_bank/tools/analytics.py +179 -130
- mcp_sqlite_memory_bank/tools/basic.py +417 -4
- mcp_sqlite_memory_bank/tools/discovery.py +549 -360
- mcp_sqlite_memory_bank/tools/search.py +147 -71
- mcp_sqlite_memory_bank/types.py +6 -1
- mcp_sqlite_memory_bank/utils.py +154 -105
- {mcp_sqlite_memory_bank-1.5.1.dist-info → mcp_sqlite_memory_bank-1.6.2.dist-info}/METADATA +54 -6
- mcp_sqlite_memory_bank-1.6.2.dist-info/RECORD +21 -0
- mcp_sqlite_memory_bank-1.5.1.dist-info/RECORD +0 -21
- {mcp_sqlite_memory_bank-1.5.1.dist-info → mcp_sqlite_memory_bank-1.6.2.dist-info}/WHEEL +0 -0
- {mcp_sqlite_memory_bank-1.5.1.dist-info → mcp_sqlite_memory_bank-1.6.2.dist-info}/entry_points.txt +0 -0
- {mcp_sqlite_memory_bank-1.5.1.dist-info → mcp_sqlite_memory_bank-1.6.2.dist-info}/licenses/LICENSE +0 -0
- {mcp_sqlite_memory_bank-1.5.1.dist-info → mcp_sqlite_memory_bank-1.6.2.dist-info}/top_level.txt +0 -0
@@ -8,7 +8,7 @@ data operations, and core functionality.
|
|
8
8
|
from typing import Any, Dict, List, Optional, cast
|
9
9
|
|
10
10
|
from ..database import get_database
|
11
|
-
from ..types import
|
11
|
+
from ..types import ToolResponse
|
12
12
|
from ..utils import catch_errors
|
13
13
|
|
14
14
|
|
@@ -19,6 +19,7 @@ def create_table(
|
|
19
19
|
) -> ToolResponse:
|
20
20
|
"""Create a new table in the SQLite memory bank."""
|
21
21
|
from .. import server
|
22
|
+
|
22
23
|
return cast(ToolResponse, get_database(server.DB_PATH).create_table(table_name, columns))
|
23
24
|
|
24
25
|
|
@@ -26,6 +27,7 @@ def create_table(
|
|
26
27
|
def list_tables() -> ToolResponse:
|
27
28
|
"""List all tables in the SQLite memory bank."""
|
28
29
|
from .. import server
|
30
|
+
|
29
31
|
return cast(ToolResponse, get_database(server.DB_PATH).list_tables())
|
30
32
|
|
31
33
|
|
@@ -33,6 +35,7 @@ def list_tables() -> ToolResponse:
|
|
33
35
|
def describe_table(table_name: str) -> ToolResponse:
|
34
36
|
"""Get detailed schema information for a table."""
|
35
37
|
from .. import server
|
38
|
+
|
36
39
|
return cast(ToolResponse, get_database(server.DB_PATH).describe_table(table_name))
|
37
40
|
|
38
41
|
|
@@ -40,6 +43,7 @@ def describe_table(table_name: str) -> ToolResponse:
|
|
40
43
|
def drop_table(table_name: str) -> ToolResponse:
|
41
44
|
"""Drop (delete) a table from the SQLite memory bank."""
|
42
45
|
from .. import server
|
46
|
+
|
43
47
|
return cast(ToolResponse, get_database(server.DB_PATH).drop_table(table_name))
|
44
48
|
|
45
49
|
|
@@ -47,6 +51,7 @@ def drop_table(table_name: str) -> ToolResponse:
|
|
47
51
|
def rename_table(old_name: str, new_name: str) -> ToolResponse:
|
48
52
|
"""Rename a table in the SQLite memory bank."""
|
49
53
|
from .. import server
|
54
|
+
|
50
55
|
return cast(ToolResponse, get_database(server.DB_PATH).rename_table(old_name, new_name))
|
51
56
|
|
52
57
|
|
@@ -57,6 +62,7 @@ def create_row(
|
|
57
62
|
) -> ToolResponse:
|
58
63
|
"""Insert a new row into any table in the SQLite Memory Bank."""
|
59
64
|
from .. import server
|
65
|
+
|
60
66
|
return cast(ToolResponse, get_database(server.DB_PATH).insert_row(table_name, data))
|
61
67
|
|
62
68
|
|
@@ -67,6 +73,7 @@ def read_rows(
|
|
67
73
|
) -> ToolResponse:
|
68
74
|
"""Read rows from any table in the SQLite memory bank, with optional filtering."""
|
69
75
|
from .. import server
|
76
|
+
|
70
77
|
return cast(ToolResponse, get_database(server.DB_PATH).read_rows(table_name, where))
|
71
78
|
|
72
79
|
|
@@ -78,6 +85,7 @@ def update_rows(
|
|
78
85
|
) -> ToolResponse:
|
79
86
|
"""Update rows in any table in the SQLite Memory Bank, matching the WHERE clause."""
|
80
87
|
from .. import server
|
88
|
+
|
81
89
|
return cast(ToolResponse, get_database(server.DB_PATH).update_rows(table_name, data, where))
|
82
90
|
|
83
91
|
|
@@ -88,6 +96,7 @@ def delete_rows(
|
|
88
96
|
) -> ToolResponse:
|
89
97
|
"""Delete rows from any table in the SQLite Memory Bank, matching the WHERE clause."""
|
90
98
|
from .. import server
|
99
|
+
|
91
100
|
return cast(ToolResponse, get_database(server.DB_PATH).delete_rows(table_name, where))
|
92
101
|
|
93
102
|
|
@@ -100,13 +109,417 @@ def run_select_query(
|
|
100
109
|
) -> ToolResponse:
|
101
110
|
"""Run a safe SELECT query on a table in the SQLite memory bank."""
|
102
111
|
from .. import server
|
103
|
-
|
104
|
-
|
105
|
-
|
112
|
+
|
113
|
+
return cast(
|
114
|
+
ToolResponse,
|
115
|
+
get_database(server.DB_PATH).select_query(table_name, columns, where, limit),
|
116
|
+
)
|
106
117
|
|
107
118
|
|
108
119
|
@catch_errors
|
109
120
|
def list_all_columns() -> ToolResponse:
|
110
121
|
"""List all columns for all tables in the SQLite memory bank."""
|
111
122
|
from .. import server
|
123
|
+
|
112
124
|
return cast(ToolResponse, get_database(server.DB_PATH).list_all_columns())
|
125
|
+
|
126
|
+
|
127
|
+
@catch_errors
|
128
|
+
def upsert_memory(table_name: str, data: Dict[str, Any], match_columns: List[str]) -> ToolResponse:
|
129
|
+
"""
|
130
|
+
Smart memory upsert: Update existing records or create new ones based on matching columns.
|
131
|
+
|
132
|
+
This is the preferred method for memory management as it prevents duplicates
|
133
|
+
and maintains data consistency.
|
134
|
+
|
135
|
+
Args:
|
136
|
+
table_name (str): Table to upsert into
|
137
|
+
data (Dict[str, Any]): Data to upsert
|
138
|
+
match_columns (List[str]): Columns to use for finding existing records
|
139
|
+
|
140
|
+
Returns:
|
141
|
+
ToolResponse: For updates: {"success": True, "action": "updated", "id": rowid, "updated_fields": {...}}
|
142
|
+
For creates: {"success": True, "action": "created", "id": rowid}
|
143
|
+
"""
|
144
|
+
import os
|
145
|
+
|
146
|
+
db_path = os.environ.get("DB_PATH", "./test.db")
|
147
|
+
db = get_database(db_path)
|
148
|
+
|
149
|
+
try:
|
150
|
+
# Build WHERE clause for matching
|
151
|
+
where_conditions = {col: data[col] for col in match_columns if col in data}
|
152
|
+
|
153
|
+
if not where_conditions:
|
154
|
+
# No match columns provided, just insert
|
155
|
+
return cast(ToolResponse, db.insert_row(table_name, data))
|
156
|
+
|
157
|
+
# Check for existing records
|
158
|
+
existing_result = db.read_rows(table_name, where_conditions)
|
159
|
+
if not existing_result.get("success"):
|
160
|
+
return cast(ToolResponse, existing_result)
|
161
|
+
|
162
|
+
existing_rows = existing_result.get("rows", [])
|
163
|
+
|
164
|
+
if existing_rows:
|
165
|
+
# Update the first matching record
|
166
|
+
row_id = existing_rows[0].get("id")
|
167
|
+
if row_id:
|
168
|
+
# Get the original record to compare changes
|
169
|
+
original_record = existing_rows[0]
|
170
|
+
|
171
|
+
update_result = db.update_rows(table_name, data, {"id": row_id})
|
172
|
+
if update_result.get("success"):
|
173
|
+
# Determine which fields were actually updated
|
174
|
+
updated_fields = {}
|
175
|
+
for key, new_value in data.items():
|
176
|
+
original_value = original_record.get(key)
|
177
|
+
if original_value != new_value:
|
178
|
+
updated_fields[key] = {
|
179
|
+
"old": original_value,
|
180
|
+
"new": new_value
|
181
|
+
}
|
182
|
+
|
183
|
+
return cast(
|
184
|
+
ToolResponse,
|
185
|
+
{
|
186
|
+
"success": True,
|
187
|
+
"action": "updated",
|
188
|
+
"id": row_id,
|
189
|
+
"rows_affected": update_result.get("rows_affected", 1),
|
190
|
+
"updated_fields": updated_fields,
|
191
|
+
},
|
192
|
+
)
|
193
|
+
return cast(ToolResponse, update_result)
|
194
|
+
|
195
|
+
# No existing record found, create new one
|
196
|
+
insert_result = db.insert_row(table_name, data)
|
197
|
+
if insert_result.get("success"):
|
198
|
+
return cast(
|
199
|
+
ToolResponse,
|
200
|
+
{"success": True, "action": "created", "id": insert_result.get("id")},
|
201
|
+
)
|
202
|
+
return cast(ToolResponse, insert_result)
|
203
|
+
|
204
|
+
except Exception as e:
|
205
|
+
return cast(
|
206
|
+
ToolResponse,
|
207
|
+
{
|
208
|
+
"success": False,
|
209
|
+
"error": f"Memory upsert failed: {str(e)}",
|
210
|
+
"category": "UPSERT_ERROR",
|
211
|
+
"details": {"table": table_name, "match_columns": match_columns},
|
212
|
+
},
|
213
|
+
)
|
214
|
+
|
215
|
+
|
216
|
+
@catch_errors
|
217
|
+
def batch_create_memories(
|
218
|
+
table_name: str,
|
219
|
+
data_list: List[Dict[str, Any]],
|
220
|
+
match_columns: Optional[List[str]] = None,
|
221
|
+
use_upsert: bool = True,
|
222
|
+
) -> ToolResponse:
|
223
|
+
"""
|
224
|
+
Efficiently create multiple memory records in a single operation.
|
225
|
+
|
226
|
+
Supports both batch insert (fast) and batch upsert (prevents duplicates).
|
227
|
+
|
228
|
+
Args:
|
229
|
+
table_name (str): Table to insert records into
|
230
|
+
data_list (List[Dict[str, Any]]): List of records to create
|
231
|
+
match_columns (Optional[List[str]]): Columns to use for duplicate detection (if use_upsert=True)
|
232
|
+
use_upsert (bool): Whether to use upsert logic to prevent duplicates (default: True)
|
233
|
+
|
234
|
+
Returns:
|
235
|
+
ToolResponse: {"success": True, "created": int, "updated": int, "failed": int, "results": List}
|
236
|
+
"""
|
237
|
+
if not data_list:
|
238
|
+
return cast(
|
239
|
+
ToolResponse,
|
240
|
+
{
|
241
|
+
"success": True,
|
242
|
+
"created": 0,
|
243
|
+
"updated": 0,
|
244
|
+
"failed": 0,
|
245
|
+
"results": [],
|
246
|
+
"message": "No data provided",
|
247
|
+
},
|
248
|
+
)
|
249
|
+
|
250
|
+
import os
|
251
|
+
|
252
|
+
db_path = os.environ.get("DB_PATH", "./test.db")
|
253
|
+
db = get_database(db_path)
|
254
|
+
|
255
|
+
created_count = 0
|
256
|
+
updated_count = 0
|
257
|
+
failed_count = 0
|
258
|
+
results = []
|
259
|
+
|
260
|
+
try:
|
261
|
+
for i, data in enumerate(data_list):
|
262
|
+
try:
|
263
|
+
if use_upsert and match_columns:
|
264
|
+
# Use upsert logic to prevent duplicates
|
265
|
+
result = upsert_memory(table_name, data, match_columns)
|
266
|
+
if result.get("success"):
|
267
|
+
action = result.get("action", "unknown")
|
268
|
+
if action == "created":
|
269
|
+
created_count += 1
|
270
|
+
elif action == "updated":
|
271
|
+
updated_count += 1
|
272
|
+
results.append(
|
273
|
+
{
|
274
|
+
"index": i,
|
275
|
+
"action": action,
|
276
|
+
"id": result.get("id"),
|
277
|
+
"success": True,
|
278
|
+
}
|
279
|
+
)
|
280
|
+
else:
|
281
|
+
failed_count += 1
|
282
|
+
results.append(
|
283
|
+
{
|
284
|
+
"index": i,
|
285
|
+
"action": "failed",
|
286
|
+
"error": result.get("error", "Unknown error"),
|
287
|
+
"success": False,
|
288
|
+
}
|
289
|
+
)
|
290
|
+
else:
|
291
|
+
# Simple batch insert (faster but no duplicate prevention)
|
292
|
+
insert_result = db.insert_row(table_name, data)
|
293
|
+
if insert_result.get("success"):
|
294
|
+
created_count += 1
|
295
|
+
results.append(
|
296
|
+
{
|
297
|
+
"index": i,
|
298
|
+
"action": "created",
|
299
|
+
"id": insert_result.get("id"),
|
300
|
+
"success": True,
|
301
|
+
}
|
302
|
+
)
|
303
|
+
else:
|
304
|
+
failed_count += 1
|
305
|
+
results.append(
|
306
|
+
{
|
307
|
+
"index": i,
|
308
|
+
"action": "failed",
|
309
|
+
"error": insert_result.get("error", "Unknown error"),
|
310
|
+
"success": False,
|
311
|
+
}
|
312
|
+
)
|
313
|
+
|
314
|
+
except Exception as e:
|
315
|
+
failed_count += 1
|
316
|
+
results.append({"index": i, "action": "failed", "error": str(e), "success": False})
|
317
|
+
|
318
|
+
return cast(
|
319
|
+
ToolResponse,
|
320
|
+
{
|
321
|
+
"success": True,
|
322
|
+
"created": created_count,
|
323
|
+
"updated": updated_count,
|
324
|
+
"failed": failed_count,
|
325
|
+
"total_processed": len(data_list),
|
326
|
+
"results": results,
|
327
|
+
"message": f"Processed {len(data_list)} records: {created_count} created, {updated_count} updated, {failed_count} failed",
|
328
|
+
},
|
329
|
+
)
|
330
|
+
|
331
|
+
except Exception as e:
|
332
|
+
return cast(
|
333
|
+
ToolResponse,
|
334
|
+
{
|
335
|
+
"success": False,
|
336
|
+
"error": f"Batch operation failed: {str(e)}",
|
337
|
+
"category": "BATCH_CREATE_ERROR",
|
338
|
+
"details": {"table": table_name, "records_count": len(data_list)},
|
339
|
+
},
|
340
|
+
)
|
341
|
+
|
342
|
+
|
343
|
+
@catch_errors
|
344
|
+
def batch_delete_memories(
|
345
|
+
table_name: str, where_conditions: List[Dict[str, Any]], match_all: bool = False
|
346
|
+
) -> ToolResponse:
|
347
|
+
"""
|
348
|
+
Efficiently delete multiple memory records in a single operation.
|
349
|
+
|
350
|
+
Supports both individual record deletion and bulk deletion with shared conditions.
|
351
|
+
|
352
|
+
Args:
|
353
|
+
table_name (str): Table to delete records from
|
354
|
+
where_conditions (List[Dict[str, Any]]): List of WHERE conditions for deletion
|
355
|
+
match_all (bool): If True, delete records matching ALL conditions; if False, delete records matching ANY condition
|
356
|
+
|
357
|
+
Returns:
|
358
|
+
ToolResponse: {"success": True, "deleted": int, "failed": int, "results": List}
|
359
|
+
"""
|
360
|
+
if not where_conditions:
|
361
|
+
return cast(
|
362
|
+
ToolResponse,
|
363
|
+
{
|
364
|
+
"success": True,
|
365
|
+
"deleted": 0,
|
366
|
+
"failed": 0,
|
367
|
+
"results": [],
|
368
|
+
"message": "No deletion conditions provided",
|
369
|
+
},
|
370
|
+
)
|
371
|
+
|
372
|
+
import os
|
373
|
+
|
374
|
+
db_path = os.environ.get("DB_PATH", "./test.db")
|
375
|
+
db = get_database(db_path)
|
376
|
+
|
377
|
+
deleted_count = 0
|
378
|
+
failed_count = 0
|
379
|
+
results = []
|
380
|
+
|
381
|
+
try:
|
382
|
+
if match_all and len(where_conditions) == 1:
|
383
|
+
# Single condition - use direct delete
|
384
|
+
condition = where_conditions[0]
|
385
|
+
try:
|
386
|
+
delete_result = db.delete_rows(table_name, condition)
|
387
|
+
if delete_result.get("success"):
|
388
|
+
rows_affected = delete_result.get("rows_affected", 0)
|
389
|
+
deleted_count += rows_affected
|
390
|
+
results.append(
|
391
|
+
{
|
392
|
+
"condition_index": 0,
|
393
|
+
"condition": condition,
|
394
|
+
"action": "deleted",
|
395
|
+
"rows_affected": rows_affected,
|
396
|
+
"success": True,
|
397
|
+
}
|
398
|
+
)
|
399
|
+
else:
|
400
|
+
failed_count += 1
|
401
|
+
results.append(
|
402
|
+
{
|
403
|
+
"condition_index": 0,
|
404
|
+
"condition": condition,
|
405
|
+
"action": "failed",
|
406
|
+
"error": delete_result.get("error", "Unknown error"),
|
407
|
+
"success": False,
|
408
|
+
}
|
409
|
+
)
|
410
|
+
except Exception as e:
|
411
|
+
failed_count += 1
|
412
|
+
results.append(
|
413
|
+
{
|
414
|
+
"condition_index": 0,
|
415
|
+
"condition": condition,
|
416
|
+
"action": "failed",
|
417
|
+
"error": str(e),
|
418
|
+
"success": False,
|
419
|
+
}
|
420
|
+
)
|
421
|
+
|
422
|
+
elif match_all:
|
423
|
+
# Multiple conditions with AND logic - combine conditions
|
424
|
+
combined_condition = {}
|
425
|
+
for condition in where_conditions:
|
426
|
+
combined_condition.update(condition)
|
427
|
+
|
428
|
+
try:
|
429
|
+
delete_result = db.delete_rows(table_name, combined_condition)
|
430
|
+
if delete_result.get("success"):
|
431
|
+
rows_affected = delete_result.get("rows_affected", 0)
|
432
|
+
deleted_count += rows_affected
|
433
|
+
results.append(
|
434
|
+
{
|
435
|
+
"combined_conditions": where_conditions,
|
436
|
+
"action": "deleted",
|
437
|
+
"rows_affected": rows_affected,
|
438
|
+
"success": True,
|
439
|
+
}
|
440
|
+
)
|
441
|
+
else:
|
442
|
+
failed_count += 1
|
443
|
+
results.append(
|
444
|
+
{
|
445
|
+
"combined_conditions": where_conditions,
|
446
|
+
"action": "failed",
|
447
|
+
"error": delete_result.get("error", "Unknown error"),
|
448
|
+
"success": False,
|
449
|
+
}
|
450
|
+
)
|
451
|
+
except Exception as e:
|
452
|
+
failed_count += 1
|
453
|
+
results.append(
|
454
|
+
{
|
455
|
+
"combined_conditions": where_conditions,
|
456
|
+
"action": "failed",
|
457
|
+
"error": str(e),
|
458
|
+
"success": False,
|
459
|
+
}
|
460
|
+
)
|
461
|
+
else:
|
462
|
+
# Multiple conditions with OR logic - delete each separately
|
463
|
+
for i, condition in enumerate(where_conditions):
|
464
|
+
try:
|
465
|
+
delete_result = db.delete_rows(table_name, condition)
|
466
|
+
if delete_result.get("success"):
|
467
|
+
rows_affected = delete_result.get("rows_affected", 0)
|
468
|
+
deleted_count += rows_affected
|
469
|
+
results.append(
|
470
|
+
{
|
471
|
+
"condition_index": i,
|
472
|
+
"condition": condition,
|
473
|
+
"action": "deleted",
|
474
|
+
"rows_affected": rows_affected,
|
475
|
+
"success": True,
|
476
|
+
}
|
477
|
+
)
|
478
|
+
else:
|
479
|
+
failed_count += 1
|
480
|
+
results.append(
|
481
|
+
{
|
482
|
+
"condition_index": i,
|
483
|
+
"condition": condition,
|
484
|
+
"action": "failed",
|
485
|
+
"error": delete_result.get("error", "Unknown error"),
|
486
|
+
"success": False,
|
487
|
+
}
|
488
|
+
)
|
489
|
+
except Exception as e:
|
490
|
+
failed_count += 1
|
491
|
+
results.append(
|
492
|
+
{
|
493
|
+
"condition_index": i,
|
494
|
+
"condition": condition,
|
495
|
+
"action": "failed",
|
496
|
+
"error": str(e),
|
497
|
+
"success": False,
|
498
|
+
}
|
499
|
+
)
|
500
|
+
|
501
|
+
return cast(
|
502
|
+
ToolResponse,
|
503
|
+
{
|
504
|
+
"success": True,
|
505
|
+
"deleted": deleted_count,
|
506
|
+
"failed": failed_count,
|
507
|
+
"total_conditions": len(where_conditions),
|
508
|
+
"results": results,
|
509
|
+
"message": f"Processed {len(where_conditions)} deletion conditions: {deleted_count} records deleted, {failed_count} operations failed",
|
510
|
+
},
|
511
|
+
)
|
512
|
+
|
513
|
+
except Exception as e:
|
514
|
+
return cast(
|
515
|
+
ToolResponse,
|
516
|
+
{
|
517
|
+
"success": False,
|
518
|
+
"error": f"Batch deletion failed: {str(e)}",
|
519
|
+
"category": "BATCH_DELETE_ERROR",
|
520
|
+
"details": {
|
521
|
+
"table": table_name,
|
522
|
+
"conditions_count": len(where_conditions),
|
523
|
+
},
|
524
|
+
},
|
525
|
+
)
|