memorisdk 2.0.0__py3-none-any.whl → 2.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of memorisdk might be problematic. Click here for more details.
- memori/__init__.py +3 -3
- memori/agents/conscious_agent.py +289 -77
- memori/agents/memory_agent.py +19 -9
- memori/agents/retrieval_agent.py +138 -63
- memori/config/manager.py +7 -7
- memori/config/memory_manager.py +25 -25
- memori/config/settings.py +13 -6
- memori/core/conversation.py +15 -15
- memori/core/database.py +14 -13
- memori/core/memory.py +438 -123
- memori/core/providers.py +25 -25
- memori/database/__init__.py +11 -0
- memori/database/adapters/__init__.py +11 -0
- memori/database/adapters/mongodb_adapter.py +739 -0
- memori/database/adapters/mysql_adapter.py +8 -8
- memori/database/adapters/postgresql_adapter.py +6 -6
- memori/database/adapters/sqlite_adapter.py +6 -6
- memori/database/auto_creator.py +8 -9
- memori/database/connection_utils.py +5 -5
- memori/database/connectors/__init__.py +11 -0
- memori/database/connectors/base_connector.py +18 -19
- memori/database/connectors/mongodb_connector.py +527 -0
- memori/database/connectors/mysql_connector.py +13 -15
- memori/database/connectors/postgres_connector.py +12 -12
- memori/database/connectors/sqlite_connector.py +11 -11
- memori/database/models.py +2 -2
- memori/database/mongodb_manager.py +1402 -0
- memori/database/queries/base_queries.py +3 -4
- memori/database/queries/chat_queries.py +3 -5
- memori/database/queries/entity_queries.py +3 -5
- memori/database/queries/memory_queries.py +3 -5
- memori/database/query_translator.py +11 -11
- memori/database/schema_generators/__init__.py +11 -0
- memori/database/schema_generators/mongodb_schema_generator.py +666 -0
- memori/database/schema_generators/mysql_schema_generator.py +2 -4
- memori/database/search/__init__.py +11 -0
- memori/database/search/mongodb_search_adapter.py +653 -0
- memori/database/search/mysql_search_adapter.py +8 -8
- memori/database/search/sqlite_search_adapter.py +6 -6
- memori/database/search_service.py +218 -66
- memori/database/sqlalchemy_manager.py +72 -25
- memori/integrations/__init__.py +1 -1
- memori/integrations/anthropic_integration.py +1 -3
- memori/integrations/litellm_integration.py +23 -6
- memori/integrations/openai_integration.py +31 -3
- memori/tools/memory_tool.py +104 -13
- memori/utils/exceptions.py +58 -58
- memori/utils/helpers.py +11 -12
- memori/utils/input_validator.py +10 -12
- memori/utils/logging.py +4 -4
- memori/utils/pydantic_models.py +57 -57
- memori/utils/query_builder.py +20 -20
- memori/utils/security_audit.py +28 -28
- memori/utils/security_integration.py +9 -9
- memori/utils/transaction_manager.py +20 -19
- memori/utils/validators.py +6 -6
- {memorisdk-2.0.0.dist-info → memorisdk-2.1.0.dist-info}/METADATA +36 -20
- memorisdk-2.1.0.dist-info/RECORD +71 -0
- memori/scripts/llm_text.py +0 -50
- memorisdk-2.0.0.dist-info/RECORD +0 -67
- {memorisdk-2.0.0.dist-info → memorisdk-2.1.0.dist-info}/WHEEL +0 -0
- {memorisdk-2.0.0.dist-info → memorisdk-2.1.0.dist-info}/licenses/LICENSE +0 -0
- {memorisdk-2.0.0.dist-info → memorisdk-2.1.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,527 @@
|
|
|
1
|
+
"""
|
|
2
|
+
MongoDB connector for Memori
|
|
3
|
+
Provides MongoDB-specific implementation of the database connector interface
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
from typing import TYPE_CHECKING, Any
|
|
10
|
+
from urllib.parse import urlparse
|
|
11
|
+
|
|
12
|
+
from loguru import logger
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from pymongo import MongoClient
|
|
16
|
+
from pymongo.collection import Collection
|
|
17
|
+
from pymongo.database import Database
|
|
18
|
+
|
|
19
|
+
try:
|
|
20
|
+
import pymongo # noqa: F401
|
|
21
|
+
from pymongo import MongoClient as _MongoClient
|
|
22
|
+
from pymongo.collection import Collection as _Collection
|
|
23
|
+
from pymongo.database import Database as _Database
|
|
24
|
+
from pymongo.errors import ConnectionFailure, OperationFailure # noqa: F401
|
|
25
|
+
|
|
26
|
+
PYMONGO_AVAILABLE = True
|
|
27
|
+
MongoClient = _MongoClient
|
|
28
|
+
Collection = _Collection
|
|
29
|
+
Database = _Database
|
|
30
|
+
except ImportError:
|
|
31
|
+
PYMONGO_AVAILABLE = False
|
|
32
|
+
MongoClient = None # type: ignore
|
|
33
|
+
Collection = None # type: ignore
|
|
34
|
+
Database = None # type: ignore
|
|
35
|
+
|
|
36
|
+
from ...utils.exceptions import DatabaseError
|
|
37
|
+
from .base_connector import BaseDatabaseConnector, DatabaseType
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class MongoDBConnector(BaseDatabaseConnector):
|
|
41
|
+
"""MongoDB database connector with Atlas Vector Search support"""
|
|
42
|
+
|
|
43
|
+
def __init__(self, connection_config):
|
|
44
|
+
"""Initialize MongoDB connector"""
|
|
45
|
+
if not PYMONGO_AVAILABLE:
|
|
46
|
+
raise DatabaseError(
|
|
47
|
+
"pymongo is required for MongoDB support. Install with: pip install pymongo"
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
if isinstance(connection_config, str):
|
|
51
|
+
self.connection_string = connection_config
|
|
52
|
+
self.connection_config = {"connection_string": connection_config}
|
|
53
|
+
else:
|
|
54
|
+
self.connection_string = connection_config.get(
|
|
55
|
+
"connection_string", "mongodb://localhost:27017"
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
# Parse MongoDB connection string
|
|
59
|
+
self._parse_connection_string()
|
|
60
|
+
|
|
61
|
+
# MongoDB-specific settings
|
|
62
|
+
self.client = None
|
|
63
|
+
self.database = None
|
|
64
|
+
self._collections = {}
|
|
65
|
+
|
|
66
|
+
super().__init__(connection_config)
|
|
67
|
+
|
|
68
|
+
def _detect_database_type(self) -> DatabaseType:
|
|
69
|
+
"""Detect database type from connection config"""
|
|
70
|
+
return DatabaseType.MONGODB
|
|
71
|
+
|
|
72
|
+
def _parse_connection_string(self):
|
|
73
|
+
"""Parse MongoDB connection string to extract components"""
|
|
74
|
+
try:
|
|
75
|
+
parsed = urlparse(self.connection_string)
|
|
76
|
+
self.host = parsed.hostname or "localhost"
|
|
77
|
+
self.port = parsed.port or 27017
|
|
78
|
+
self.database_name = parsed.path.lstrip("/") or "memori"
|
|
79
|
+
self.username = parsed.username
|
|
80
|
+
self.password = parsed.password
|
|
81
|
+
|
|
82
|
+
# Extract query parameters
|
|
83
|
+
self.options = {}
|
|
84
|
+
if parsed.query:
|
|
85
|
+
params = parsed.query.split("&")
|
|
86
|
+
for param in params:
|
|
87
|
+
if "=" in param:
|
|
88
|
+
key, value = param.split("=", 1)
|
|
89
|
+
self.options[key] = value
|
|
90
|
+
|
|
91
|
+
except Exception as e:
|
|
92
|
+
logger.warning(f"Failed to parse MongoDB connection string: {e}")
|
|
93
|
+
# Set defaults
|
|
94
|
+
self.host = "localhost"
|
|
95
|
+
self.port = 27017
|
|
96
|
+
self.database_name = "memori"
|
|
97
|
+
self.username = None
|
|
98
|
+
self.password = None
|
|
99
|
+
self.options = {}
|
|
100
|
+
|
|
101
|
+
def get_connection(self) -> MongoClient:
|
|
102
|
+
"""Get MongoDB client connection"""
|
|
103
|
+
if self.client is None:
|
|
104
|
+
try:
|
|
105
|
+
# Create MongoDB client with appropriate options
|
|
106
|
+
client_options = {
|
|
107
|
+
"serverSelectionTimeoutMS": 5000, # 5 second timeout
|
|
108
|
+
"connectTimeoutMS": 10000, # 10 second connect timeout
|
|
109
|
+
"socketTimeoutMS": 30000, # 30 second socket timeout
|
|
110
|
+
"maxPoolSize": 50, # Connection pool size
|
|
111
|
+
"retryWrites": True, # Enable retryable writes
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
# Add any additional options from connection string
|
|
115
|
+
client_options.update(self.options)
|
|
116
|
+
|
|
117
|
+
self.client = MongoClient(self.connection_string, **client_options)
|
|
118
|
+
|
|
119
|
+
# Test connection
|
|
120
|
+
self.client.admin.command("ping")
|
|
121
|
+
logger.info(f"Connected to MongoDB at {self.host}:{self.port}")
|
|
122
|
+
|
|
123
|
+
except Exception as e:
|
|
124
|
+
raise DatabaseError(f"Failed to connect to MongoDB: {e}")
|
|
125
|
+
|
|
126
|
+
return self.client
|
|
127
|
+
|
|
128
|
+
def get_database(self) -> Database:
|
|
129
|
+
"""Get MongoDB database"""
|
|
130
|
+
if self.database is None:
|
|
131
|
+
client = self.get_connection()
|
|
132
|
+
self.database = client[self.database_name]
|
|
133
|
+
return self.database
|
|
134
|
+
|
|
135
|
+
def get_collection(self, collection_name: str) -> Collection:
|
|
136
|
+
"""Get MongoDB collection with caching"""
|
|
137
|
+
if collection_name not in self._collections:
|
|
138
|
+
database = self.get_database()
|
|
139
|
+
self._collections[collection_name] = database[collection_name]
|
|
140
|
+
return self._collections[collection_name]
|
|
141
|
+
|
|
142
|
+
def execute_query(
|
|
143
|
+
self, query: str, params: list[Any] | None = None
|
|
144
|
+
) -> list[dict[str, Any]]:
|
|
145
|
+
"""
|
|
146
|
+
Execute a query-like operation in MongoDB
|
|
147
|
+
Note: MongoDB doesn't use SQL, so this is adapted for MongoDB operations
|
|
148
|
+
"""
|
|
149
|
+
try:
|
|
150
|
+
# Parse the "query" as a JSON operation for MongoDB
|
|
151
|
+
# This is a compatibility layer for the base interface
|
|
152
|
+
if isinstance(query, str) and query.strip().startswith("{"):
|
|
153
|
+
# Treat as MongoDB operation
|
|
154
|
+
operation = json.loads(query)
|
|
155
|
+
collection_name = operation.get("collection", "memories")
|
|
156
|
+
operation_type = operation.get("operation", "find")
|
|
157
|
+
filter_doc = operation.get("filter", {})
|
|
158
|
+
options = operation.get("options", {})
|
|
159
|
+
|
|
160
|
+
collection = self.get_collection(collection_name)
|
|
161
|
+
|
|
162
|
+
if operation_type == "find":
|
|
163
|
+
cursor = collection.find(filter_doc, **options)
|
|
164
|
+
results = list(cursor)
|
|
165
|
+
# Convert ObjectId to string for JSON serialization
|
|
166
|
+
for result in results:
|
|
167
|
+
if "_id" in result:
|
|
168
|
+
result["_id"] = str(result["_id"])
|
|
169
|
+
return results
|
|
170
|
+
elif operation_type == "aggregate":
|
|
171
|
+
pipeline = operation.get("pipeline", [])
|
|
172
|
+
cursor = collection.aggregate(pipeline, **options)
|
|
173
|
+
results = list(cursor)
|
|
174
|
+
# Convert ObjectId to string for JSON serialization
|
|
175
|
+
for result in results:
|
|
176
|
+
if "_id" in result:
|
|
177
|
+
result["_id"] = str(result["_id"])
|
|
178
|
+
return results
|
|
179
|
+
else:
|
|
180
|
+
raise DatabaseError(
|
|
181
|
+
f"Unsupported MongoDB operation: {operation_type}"
|
|
182
|
+
)
|
|
183
|
+
else:
|
|
184
|
+
# Fallback: treat as a collection name and return all documents
|
|
185
|
+
collection = self.get_collection(query or "memories")
|
|
186
|
+
cursor = collection.find().limit(100) # Limit for safety
|
|
187
|
+
results = list(cursor)
|
|
188
|
+
# Convert ObjectId to string for JSON serialization
|
|
189
|
+
for result in results:
|
|
190
|
+
if "_id" in result:
|
|
191
|
+
result["_id"] = str(result["_id"])
|
|
192
|
+
return results
|
|
193
|
+
|
|
194
|
+
except Exception as e:
|
|
195
|
+
raise DatabaseError(f"Failed to execute MongoDB query: {e}")
|
|
196
|
+
|
|
197
|
+
def execute_insert(self, query: str, params: list[Any] | None = None) -> str:
|
|
198
|
+
"""Execute an insert operation and return the inserted document ID"""
|
|
199
|
+
try:
|
|
200
|
+
if isinstance(query, str) and query.strip().startswith("{"):
|
|
201
|
+
# Parse as MongoDB insert operation
|
|
202
|
+
operation = json.loads(query)
|
|
203
|
+
collection_name = operation.get("collection", "memories")
|
|
204
|
+
document = operation.get("document", {})
|
|
205
|
+
|
|
206
|
+
collection = self.get_collection(collection_name)
|
|
207
|
+
result = collection.insert_one(document)
|
|
208
|
+
return str(result.inserted_id)
|
|
209
|
+
else:
|
|
210
|
+
raise DatabaseError("Invalid insert operation format for MongoDB")
|
|
211
|
+
|
|
212
|
+
except Exception as e:
|
|
213
|
+
raise DatabaseError(f"Failed to execute MongoDB insert: {e}")
|
|
214
|
+
|
|
215
|
+
def execute_update(self, query: str, params: list[Any] | None = None) -> int:
|
|
216
|
+
"""Execute an update operation and return number of modified documents"""
|
|
217
|
+
try:
|
|
218
|
+
if isinstance(query, str) and query.strip().startswith("{"):
|
|
219
|
+
# Parse as MongoDB update operation
|
|
220
|
+
operation = json.loads(query)
|
|
221
|
+
collection_name = operation.get("collection", "memories")
|
|
222
|
+
filter_doc = operation.get("filter", {})
|
|
223
|
+
update_doc = operation.get("update", {})
|
|
224
|
+
options = operation.get("options", {})
|
|
225
|
+
|
|
226
|
+
collection = self.get_collection(collection_name)
|
|
227
|
+
|
|
228
|
+
if operation.get("update_many", False):
|
|
229
|
+
result = collection.update_many(filter_doc, update_doc, **options)
|
|
230
|
+
else:
|
|
231
|
+
result = collection.update_one(filter_doc, update_doc, **options)
|
|
232
|
+
|
|
233
|
+
return result.modified_count
|
|
234
|
+
else:
|
|
235
|
+
raise DatabaseError("Invalid update operation format for MongoDB")
|
|
236
|
+
|
|
237
|
+
except Exception as e:
|
|
238
|
+
raise DatabaseError(f"Failed to execute MongoDB update: {e}")
|
|
239
|
+
|
|
240
|
+
def execute_delete(self, query: str, params: list[Any] | None = None) -> int:
|
|
241
|
+
"""Execute a delete operation and return number of deleted documents"""
|
|
242
|
+
try:
|
|
243
|
+
if isinstance(query, str) and query.strip().startswith("{"):
|
|
244
|
+
# Parse as MongoDB delete operation
|
|
245
|
+
operation = json.loads(query)
|
|
246
|
+
collection_name = operation.get("collection", "memories")
|
|
247
|
+
filter_doc = operation.get("filter", {})
|
|
248
|
+
options = operation.get("options", {})
|
|
249
|
+
|
|
250
|
+
collection = self.get_collection(collection_name)
|
|
251
|
+
|
|
252
|
+
if operation.get("delete_many", False):
|
|
253
|
+
result = collection.delete_many(filter_doc, **options)
|
|
254
|
+
else:
|
|
255
|
+
result = collection.delete_one(filter_doc, **options)
|
|
256
|
+
|
|
257
|
+
return result.deleted_count
|
|
258
|
+
else:
|
|
259
|
+
raise DatabaseError("Invalid delete operation format for MongoDB")
|
|
260
|
+
|
|
261
|
+
except Exception as e:
|
|
262
|
+
raise DatabaseError(f"Failed to execute MongoDB delete: {e}")
|
|
263
|
+
|
|
264
|
+
def execute_transaction(self, queries: list[tuple[str, list[Any] | None]]) -> bool:
|
|
265
|
+
"""Execute multiple operations in a MongoDB transaction"""
|
|
266
|
+
try:
|
|
267
|
+
client = self.get_connection()
|
|
268
|
+
|
|
269
|
+
# Check if transactions are supported (requires replica set or sharded cluster)
|
|
270
|
+
try:
|
|
271
|
+
with client.start_session() as session:
|
|
272
|
+
with session.start_transaction():
|
|
273
|
+
for query, params in queries:
|
|
274
|
+
# Execute each operation within the transaction
|
|
275
|
+
if "insert" in query.lower():
|
|
276
|
+
self.execute_insert(query, params)
|
|
277
|
+
elif "update" in query.lower():
|
|
278
|
+
self.execute_update(query, params)
|
|
279
|
+
elif "delete" in query.lower():
|
|
280
|
+
self.execute_delete(query, params)
|
|
281
|
+
|
|
282
|
+
# Transaction commits automatically if no exception is raised
|
|
283
|
+
return True
|
|
284
|
+
|
|
285
|
+
except OperationFailure as e:
|
|
286
|
+
if "Transaction numbers" in str(e):
|
|
287
|
+
# Transactions not supported, execute operations individually
|
|
288
|
+
logger.warning(
|
|
289
|
+
"Transactions not supported, executing operations individually"
|
|
290
|
+
)
|
|
291
|
+
for query, params in queries:
|
|
292
|
+
if "insert" in query.lower():
|
|
293
|
+
self.execute_insert(query, params)
|
|
294
|
+
elif "update" in query.lower():
|
|
295
|
+
self.execute_update(query, params)
|
|
296
|
+
elif "delete" in query.lower():
|
|
297
|
+
self.execute_delete(query, params)
|
|
298
|
+
return True
|
|
299
|
+
else:
|
|
300
|
+
raise
|
|
301
|
+
|
|
302
|
+
except Exception as e:
|
|
303
|
+
logger.error(f"Transaction failed: {e}")
|
|
304
|
+
return False
|
|
305
|
+
|
|
306
|
+
def test_connection(self) -> bool:
|
|
307
|
+
"""Test if the MongoDB connection is working"""
|
|
308
|
+
try:
|
|
309
|
+
client = self.get_connection()
|
|
310
|
+
# Ping the server
|
|
311
|
+
client.admin.command("ping")
|
|
312
|
+
return True
|
|
313
|
+
except Exception as e:
|
|
314
|
+
logger.error(f"MongoDB connection test failed: {e}")
|
|
315
|
+
return False
|
|
316
|
+
|
|
317
|
+
def initialize_schema(self, schema_sql: str | None = None):
|
|
318
|
+
"""Initialize MongoDB collections and indexes"""
|
|
319
|
+
try:
|
|
320
|
+
from ..schema_generators.mongodb_schema_generator import (
|
|
321
|
+
MongoDBSchemaGenerator,
|
|
322
|
+
)
|
|
323
|
+
|
|
324
|
+
schema_generator = MongoDBSchemaGenerator()
|
|
325
|
+
database = self.get_database()
|
|
326
|
+
|
|
327
|
+
# Create collections with validation rules
|
|
328
|
+
collections_schema = schema_generator.generate_collections_schema()
|
|
329
|
+
for collection_name, schema in collections_schema.items():
|
|
330
|
+
if collection_name not in database.list_collection_names():
|
|
331
|
+
# Create collection with validation
|
|
332
|
+
database.create_collection(
|
|
333
|
+
collection_name,
|
|
334
|
+
validator=schema.get("validator"),
|
|
335
|
+
validationAction=schema.get("validationAction", "error"),
|
|
336
|
+
validationLevel=schema.get("validationLevel", "strict"),
|
|
337
|
+
)
|
|
338
|
+
logger.info(f"Created MongoDB collection: {collection_name}")
|
|
339
|
+
|
|
340
|
+
# Create indexes
|
|
341
|
+
indexes_schema = schema_generator.generate_indexes_schema()
|
|
342
|
+
for collection_name, indexes in indexes_schema.items():
|
|
343
|
+
collection = self.get_collection(collection_name)
|
|
344
|
+
for index in indexes:
|
|
345
|
+
try:
|
|
346
|
+
collection.create_index(
|
|
347
|
+
index["keys"],
|
|
348
|
+
name=index.get("name"),
|
|
349
|
+
unique=index.get("unique", False),
|
|
350
|
+
sparse=index.get("sparse", False),
|
|
351
|
+
background=True, # Create index in background
|
|
352
|
+
)
|
|
353
|
+
logger.debug(f"Created index on {collection_name}: {index}")
|
|
354
|
+
except Exception as e:
|
|
355
|
+
logger.warning(
|
|
356
|
+
f"Failed to create index on {collection_name}: {e}"
|
|
357
|
+
)
|
|
358
|
+
|
|
359
|
+
logger.info("MongoDB schema initialization completed")
|
|
360
|
+
|
|
361
|
+
except Exception as e:
|
|
362
|
+
logger.error(f"Failed to initialize MongoDB schema: {e}")
|
|
363
|
+
raise DatabaseError(f"Failed to initialize MongoDB schema: {e}")
|
|
364
|
+
|
|
365
|
+
def supports_full_text_search(self) -> bool:
|
|
366
|
+
"""Check if MongoDB supports text search (always True for MongoDB)"""
|
|
367
|
+
return True
|
|
368
|
+
|
|
369
|
+
def supports_vector_search(self) -> bool:
|
|
370
|
+
"""Check if MongoDB Atlas Vector Search is available"""
|
|
371
|
+
try:
|
|
372
|
+
# Check if this is MongoDB Atlas by looking for Atlas-specific features
|
|
373
|
+
client = self.get_connection()
|
|
374
|
+
build_info = client.admin.command("buildInfo")
|
|
375
|
+
|
|
376
|
+
# Atlas typically includes specific modules or version patterns
|
|
377
|
+
# This is a heuristic check - in production you might want to configure this explicitly
|
|
378
|
+
build_info.get("version", "")
|
|
379
|
+
modules = build_info.get("modules", [])
|
|
380
|
+
|
|
381
|
+
# Check if vector search is available (Atlas feature)
|
|
382
|
+
# This is a simplified check - Atlas vector search availability can be complex
|
|
383
|
+
return "atlas" in str(modules).lower() or self._is_atlas_connection()
|
|
384
|
+
|
|
385
|
+
except Exception:
|
|
386
|
+
return False
|
|
387
|
+
|
|
388
|
+
def _is_atlas_connection(self) -> bool:
|
|
389
|
+
"""Heuristic to detect if this is an Atlas connection"""
|
|
390
|
+
return (
|
|
391
|
+
"mongodb.net" in self.connection_string.lower()
|
|
392
|
+
or "atlas" in self.connection_string.lower()
|
|
393
|
+
or "cluster" in self.connection_string.lower()
|
|
394
|
+
)
|
|
395
|
+
|
|
396
|
+
def create_full_text_index(
|
|
397
|
+
self, table: str, columns: list[str], index_name: str
|
|
398
|
+
) -> str:
|
|
399
|
+
"""Create MongoDB text index"""
|
|
400
|
+
try:
|
|
401
|
+
collection = self.get_collection(table)
|
|
402
|
+
|
|
403
|
+
# Create text index specification
|
|
404
|
+
index_spec = {}
|
|
405
|
+
for column in columns:
|
|
406
|
+
index_spec[column] = "text"
|
|
407
|
+
|
|
408
|
+
collection.create_index(
|
|
409
|
+
list(index_spec.items()), name=index_name, background=True
|
|
410
|
+
)
|
|
411
|
+
|
|
412
|
+
return f"Created text index '{index_name}' on collection '{table}'"
|
|
413
|
+
|
|
414
|
+
except Exception as e:
|
|
415
|
+
raise DatabaseError(f"Failed to create text index: {e}")
|
|
416
|
+
|
|
417
|
+
def create_vector_index(
|
|
418
|
+
self,
|
|
419
|
+
collection_name: str,
|
|
420
|
+
vector_field: str,
|
|
421
|
+
dimensions: int,
|
|
422
|
+
similarity: str = "cosine",
|
|
423
|
+
index_name: str | None = None,
|
|
424
|
+
) -> str:
|
|
425
|
+
"""Create MongoDB Atlas Vector Search index"""
|
|
426
|
+
try:
|
|
427
|
+
if not self.supports_vector_search():
|
|
428
|
+
raise DatabaseError(
|
|
429
|
+
"Vector search is not supported in this MongoDB deployment"
|
|
430
|
+
)
|
|
431
|
+
|
|
432
|
+
self.get_collection(collection_name)
|
|
433
|
+
|
|
434
|
+
# Vector search index specification for MongoDB Atlas
|
|
435
|
+
|
|
436
|
+
index_name = index_name or f"{vector_field}_vector_index"
|
|
437
|
+
|
|
438
|
+
# Note: Vector search indexes are typically created via Atlas UI or Atlas Admin API
|
|
439
|
+
# This is a placeholder for the actual implementation
|
|
440
|
+
logger.warning(
|
|
441
|
+
"Vector search indexes should be created via MongoDB Atlas UI or Admin API"
|
|
442
|
+
)
|
|
443
|
+
|
|
444
|
+
return f"Vector index specification created for '{collection_name}.{vector_field}'"
|
|
445
|
+
|
|
446
|
+
except Exception as e:
|
|
447
|
+
raise DatabaseError(f"Failed to create vector index: {e}")
|
|
448
|
+
|
|
449
|
+
def get_database_info(self) -> dict[str, Any]:
|
|
450
|
+
"""Get MongoDB database information and capabilities"""
|
|
451
|
+
try:
|
|
452
|
+
client = self.get_connection()
|
|
453
|
+
database = self.get_database()
|
|
454
|
+
|
|
455
|
+
info = {}
|
|
456
|
+
|
|
457
|
+
# Server information
|
|
458
|
+
server_info = client.server_info()
|
|
459
|
+
info["version"] = server_info.get("version", "unknown")
|
|
460
|
+
info["database_type"] = self.database_type.value
|
|
461
|
+
info["database_name"] = self.database_name
|
|
462
|
+
info["connection_string"] = (
|
|
463
|
+
self.connection_string.replace(
|
|
464
|
+
f"{self.username}:{self.password}@", "***:***@"
|
|
465
|
+
)
|
|
466
|
+
if self.username and self.password
|
|
467
|
+
else self.connection_string
|
|
468
|
+
)
|
|
469
|
+
|
|
470
|
+
# Database stats
|
|
471
|
+
try:
|
|
472
|
+
stats = database.command("dbStats")
|
|
473
|
+
info["collections_count"] = stats.get("collections", 0)
|
|
474
|
+
info["data_size"] = stats.get("dataSize", 0)
|
|
475
|
+
info["storage_size"] = stats.get("storageSize", 0)
|
|
476
|
+
info["indexes_count"] = stats.get("indexes", 0)
|
|
477
|
+
except Exception:
|
|
478
|
+
pass
|
|
479
|
+
|
|
480
|
+
# Capabilities
|
|
481
|
+
info["full_text_search_support"] = True
|
|
482
|
+
info["vector_search_support"] = self.supports_vector_search()
|
|
483
|
+
info["transactions_support"] = self._check_transactions_support()
|
|
484
|
+
|
|
485
|
+
# Replica set information
|
|
486
|
+
try:
|
|
487
|
+
replica_config = client.admin.command("replSetGetStatus")
|
|
488
|
+
info["replica_set"] = replica_config.get("set", "Not in replica set")
|
|
489
|
+
except Exception:
|
|
490
|
+
info["replica_set"] = "Standalone"
|
|
491
|
+
|
|
492
|
+
return info
|
|
493
|
+
|
|
494
|
+
except Exception as e:
|
|
495
|
+
logger.warning(f"Could not get MongoDB database info: {e}")
|
|
496
|
+
return {
|
|
497
|
+
"database_type": self.database_type.value,
|
|
498
|
+
"version": "unknown",
|
|
499
|
+
"full_text_search_support": True,
|
|
500
|
+
"vector_search_support": False,
|
|
501
|
+
"error": str(e),
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
def _check_transactions_support(self) -> bool:
|
|
505
|
+
"""Check if MongoDB deployment supports transactions"""
|
|
506
|
+
try:
|
|
507
|
+
client = self.get_connection()
|
|
508
|
+
with client.start_session() as session:
|
|
509
|
+
with session.start_transaction():
|
|
510
|
+
# Just test if we can start a transaction
|
|
511
|
+
pass
|
|
512
|
+
return True
|
|
513
|
+
except Exception:
|
|
514
|
+
return False
|
|
515
|
+
|
|
516
|
+
def close(self):
|
|
517
|
+
"""Close MongoDB connection"""
|
|
518
|
+
if self.client:
|
|
519
|
+
self.client.close()
|
|
520
|
+
self.client = None
|
|
521
|
+
self.database = None
|
|
522
|
+
self._collections.clear()
|
|
523
|
+
logger.info("MongoDB connection closed")
|
|
524
|
+
|
|
525
|
+
def __del__(self):
|
|
526
|
+
"""Cleanup when connector is destroyed"""
|
|
527
|
+
self.close()
|
|
@@ -3,7 +3,7 @@ MySQL connector for Memori v2.0
|
|
|
3
3
|
Implements BaseDatabaseConnector interface with FULLTEXT search support
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
|
-
from typing import Any
|
|
6
|
+
from typing import Any
|
|
7
7
|
from urllib.parse import urlparse
|
|
8
8
|
|
|
9
9
|
from loguru import logger
|
|
@@ -15,7 +15,7 @@ from .base_connector import BaseDatabaseConnector, DatabaseType
|
|
|
15
15
|
class MySQLConnector(BaseDatabaseConnector):
|
|
16
16
|
"""MySQL database connector with FULLTEXT search support"""
|
|
17
17
|
|
|
18
|
-
def __init__(self, connection_config:
|
|
18
|
+
def __init__(self, connection_config: dict[str, Any]):
|
|
19
19
|
self._mysql = None
|
|
20
20
|
self._setup_mysql()
|
|
21
21
|
super().__init__(connection_config)
|
|
@@ -36,7 +36,7 @@ class MySQLConnector(BaseDatabaseConnector):
|
|
|
36
36
|
"""Detect database type from connection config"""
|
|
37
37
|
return DatabaseType.MYSQL
|
|
38
38
|
|
|
39
|
-
def _parse_connection_string(self, connection_string: str) ->
|
|
39
|
+
def _parse_connection_string(self, connection_string: str) -> dict[str, Any]:
|
|
40
40
|
"""Parse MySQL connection string into connection config"""
|
|
41
41
|
if connection_string.startswith("mysql://"):
|
|
42
42
|
parsed = urlparse(connection_string)
|
|
@@ -92,8 +92,8 @@ class MySQLConnector(BaseDatabaseConnector):
|
|
|
92
92
|
raise DatabaseError(f"Failed to connect to MySQL database: {e}")
|
|
93
93
|
|
|
94
94
|
def execute_query(
|
|
95
|
-
self, query: str, params:
|
|
96
|
-
) ->
|
|
95
|
+
self, query: str, params: list[Any] | None = None
|
|
96
|
+
) -> list[dict[str, Any]]:
|
|
97
97
|
"""Execute a query and return results"""
|
|
98
98
|
try:
|
|
99
99
|
with self.get_connection() as conn:
|
|
@@ -112,7 +112,7 @@ class MySQLConnector(BaseDatabaseConnector):
|
|
|
112
112
|
except Exception as e:
|
|
113
113
|
raise DatabaseError(f"Failed to execute query: {e}")
|
|
114
114
|
|
|
115
|
-
def execute_insert(self, query: str, params:
|
|
115
|
+
def execute_insert(self, query: str, params: list[Any] | None = None) -> str:
|
|
116
116
|
"""Execute an insert query and return the inserted row ID"""
|
|
117
117
|
try:
|
|
118
118
|
with self.get_connection() as conn:
|
|
@@ -132,7 +132,7 @@ class MySQLConnector(BaseDatabaseConnector):
|
|
|
132
132
|
except Exception as e:
|
|
133
133
|
raise DatabaseError(f"Failed to execute insert: {e}")
|
|
134
134
|
|
|
135
|
-
def execute_update(self, query: str, params:
|
|
135
|
+
def execute_update(self, query: str, params: list[Any] | None = None) -> int:
|
|
136
136
|
"""Execute an update query and return number of affected rows"""
|
|
137
137
|
try:
|
|
138
138
|
with self.get_connection() as conn:
|
|
@@ -152,7 +152,7 @@ class MySQLConnector(BaseDatabaseConnector):
|
|
|
152
152
|
except Exception as e:
|
|
153
153
|
raise DatabaseError(f"Failed to execute update: {e}")
|
|
154
154
|
|
|
155
|
-
def execute_delete(self, query: str, params:
|
|
155
|
+
def execute_delete(self, query: str, params: list[Any] | None = None) -> int:
|
|
156
156
|
"""Execute a delete query and return number of affected rows"""
|
|
157
157
|
try:
|
|
158
158
|
with self.get_connection() as conn:
|
|
@@ -172,9 +172,7 @@ class MySQLConnector(BaseDatabaseConnector):
|
|
|
172
172
|
except Exception as e:
|
|
173
173
|
raise DatabaseError(f"Failed to execute delete: {e}")
|
|
174
174
|
|
|
175
|
-
def execute_transaction(
|
|
176
|
-
self, queries: List[Tuple[str, Optional[List[Any]]]]
|
|
177
|
-
) -> bool:
|
|
175
|
+
def execute_transaction(self, queries: list[tuple[str, list[Any] | None]]) -> bool:
|
|
178
176
|
"""Execute multiple queries in a transaction"""
|
|
179
177
|
try:
|
|
180
178
|
with self.get_connection() as conn:
|
|
@@ -215,7 +213,7 @@ class MySQLConnector(BaseDatabaseConnector):
|
|
|
215
213
|
logger.error(f"MySQL connection test failed: {e}")
|
|
216
214
|
return False
|
|
217
215
|
|
|
218
|
-
def initialize_schema(self, schema_sql:
|
|
216
|
+
def initialize_schema(self, schema_sql: str | None = None):
|
|
219
217
|
"""Initialize database schema"""
|
|
220
218
|
try:
|
|
221
219
|
if not schema_sql:
|
|
@@ -256,7 +254,7 @@ class MySQLConnector(BaseDatabaseConnector):
|
|
|
256
254
|
logger.error(f"Failed to initialize MySQL schema: {e}")
|
|
257
255
|
raise DatabaseError(f"Failed to initialize MySQL schema: {e}")
|
|
258
256
|
|
|
259
|
-
def _split_mysql_statements(self, schema_sql: str) ->
|
|
257
|
+
def _split_mysql_statements(self, schema_sql: str) -> list[str]:
|
|
260
258
|
"""Split SQL schema into individual statements handling MySQL syntax"""
|
|
261
259
|
statements = []
|
|
262
260
|
current_statement = []
|
|
@@ -320,13 +318,13 @@ class MySQLConnector(BaseDatabaseConnector):
|
|
|
320
318
|
return False
|
|
321
319
|
|
|
322
320
|
def create_full_text_index(
|
|
323
|
-
self, table: str, columns:
|
|
321
|
+
self, table: str, columns: list[str], index_name: str
|
|
324
322
|
) -> str:
|
|
325
323
|
"""Create MySQL FULLTEXT index"""
|
|
326
324
|
columns_str = ", ".join(columns)
|
|
327
325
|
return f"ALTER TABLE {table} ADD FULLTEXT INDEX {index_name} ({columns_str})"
|
|
328
326
|
|
|
329
|
-
def get_database_info(self) ->
|
|
327
|
+
def get_database_info(self) -> dict[str, Any]:
|
|
330
328
|
"""Get MySQL database information and capabilities"""
|
|
331
329
|
try:
|
|
332
330
|
with self.get_connection() as conn:
|