memorisdk 1.0.2__py3-none-any.whl → 2.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of memorisdk might be problematic. Click here for more details.

Files changed (48) hide show
  1. memori/__init__.py +24 -8
  2. memori/agents/conscious_agent.py +252 -414
  3. memori/agents/memory_agent.py +487 -224
  4. memori/agents/retrieval_agent.py +491 -68
  5. memori/config/memory_manager.py +323 -0
  6. memori/core/conversation.py +393 -0
  7. memori/core/database.py +386 -371
  8. memori/core/memory.py +1683 -532
  9. memori/core/providers.py +217 -0
  10. memori/database/adapters/__init__.py +10 -0
  11. memori/database/adapters/mysql_adapter.py +331 -0
  12. memori/database/adapters/postgresql_adapter.py +291 -0
  13. memori/database/adapters/sqlite_adapter.py +229 -0
  14. memori/database/auto_creator.py +320 -0
  15. memori/database/connection_utils.py +207 -0
  16. memori/database/connectors/base_connector.py +283 -0
  17. memori/database/connectors/mysql_connector.py +240 -18
  18. memori/database/connectors/postgres_connector.py +277 -4
  19. memori/database/connectors/sqlite_connector.py +178 -3
  20. memori/database/models.py +400 -0
  21. memori/database/queries/base_queries.py +1 -1
  22. memori/database/queries/memory_queries.py +91 -2
  23. memori/database/query_translator.py +222 -0
  24. memori/database/schema_generators/__init__.py +7 -0
  25. memori/database/schema_generators/mysql_schema_generator.py +215 -0
  26. memori/database/search/__init__.py +8 -0
  27. memori/database/search/mysql_search_adapter.py +255 -0
  28. memori/database/search/sqlite_search_adapter.py +180 -0
  29. memori/database/search_service.py +700 -0
  30. memori/database/sqlalchemy_manager.py +888 -0
  31. memori/integrations/__init__.py +36 -11
  32. memori/integrations/litellm_integration.py +340 -6
  33. memori/integrations/openai_integration.py +506 -240
  34. memori/tools/memory_tool.py +94 -4
  35. memori/utils/input_validator.py +395 -0
  36. memori/utils/pydantic_models.py +138 -36
  37. memori/utils/query_builder.py +530 -0
  38. memori/utils/security_audit.py +594 -0
  39. memori/utils/security_integration.py +339 -0
  40. memori/utils/transaction_manager.py +547 -0
  41. {memorisdk-1.0.2.dist-info → memorisdk-2.0.1.dist-info}/METADATA +56 -23
  42. memorisdk-2.0.1.dist-info/RECORD +66 -0
  43. memori/scripts/llm_text.py +0 -50
  44. memorisdk-1.0.2.dist-info/RECORD +0 -44
  45. memorisdk-1.0.2.dist-info/entry_points.txt +0 -2
  46. {memorisdk-1.0.2.dist-info → memorisdk-2.0.1.dist-info}/WHEEL +0 -0
  47. {memorisdk-1.0.2.dist-info → memorisdk-2.0.1.dist-info}/licenses/LICENSE +0 -0
  48. {memorisdk-1.0.2.dist-info → memorisdk-2.0.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,888 @@
1
+ """
2
+ SQLAlchemy-based database manager for Memori v2.0
3
+ Replaces the existing database.py with cross-database compatibility
4
+ """
5
+
6
+ import importlib.util
7
+ import json
8
+ import ssl
9
+ import uuid
10
+ from datetime import datetime
11
+ from pathlib import Path
12
+ from typing import Any, Dict, List, Optional
13
+ from urllib.parse import parse_qs, urlparse
14
+
15
+ from loguru import logger
16
+ from sqlalchemy import create_engine, func, text
17
+ from sqlalchemy.exc import SQLAlchemyError
18
+ from sqlalchemy.orm import sessionmaker
19
+
20
+ from ..utils.exceptions import DatabaseError
21
+ from ..utils.pydantic_models import (
22
+ ProcessedLongTermMemory,
23
+ )
24
+ from .auto_creator import DatabaseAutoCreator
25
+ from .models import (
26
+ Base,
27
+ ChatHistory,
28
+ LongTermMemory,
29
+ ShortTermMemory,
30
+ )
31
+ from .query_translator import QueryParameterTranslator
32
+ from .search_service import SearchService
33
+
34
+
35
+ class SQLAlchemyDatabaseManager:
36
+ """SQLAlchemy-based database manager with cross-database support"""
37
+
38
+ def __init__(
39
+ self, database_connect: str, template: str = "basic", schema_init: bool = True
40
+ ):
41
+ self.database_connect = database_connect
42
+ self.template = template
43
+ self.schema_init = schema_init
44
+
45
+ # Initialize database auto-creator
46
+ self.auto_creator = DatabaseAutoCreator(schema_init)
47
+
48
+ # Ensure database exists (create if necessary)
49
+ self.database_connect = self.auto_creator.ensure_database_exists(
50
+ database_connect
51
+ )
52
+
53
+ # Parse connection string and create engine
54
+ self.engine = self._create_engine(self.database_connect)
55
+ self.database_type = self.engine.dialect.name
56
+
57
+ # Create session factory
58
+ self.SessionLocal = sessionmaker(bind=self.engine)
59
+
60
+ # Initialize search service
61
+ self._search_service = None
62
+
63
+ # Initialize query parameter translator for cross-database compatibility
64
+ self.query_translator = QueryParameterTranslator(self.database_type)
65
+
66
+ logger.info(f"Initialized SQLAlchemy database manager for {self.database_type}")
67
+
68
+ def _validate_database_dependencies(self, database_connect: str):
69
+ """Validate that required database drivers are installed"""
70
+ if database_connect.startswith("mysql:") or database_connect.startswith(
71
+ "mysql+"
72
+ ):
73
+ # Check for MySQL drivers
74
+ mysql_drivers = []
75
+
76
+ if (
77
+ "mysqlconnector" in database_connect
78
+ or "mysql+mysqlconnector" in database_connect
79
+ ):
80
+ if importlib.util.find_spec("mysql.connector") is not None:
81
+ mysql_drivers.append("mysql-connector-python")
82
+
83
+ if "pymysql" in database_connect:
84
+ if importlib.util.find_spec("pymysql") is not None:
85
+ mysql_drivers.append("PyMySQL")
86
+
87
+ # If using generic mysql:// try both drivers
88
+ if database_connect.startswith("mysql://"):
89
+ if importlib.util.find_spec("mysql.connector") is not None:
90
+ mysql_drivers.append("mysql-connector-python")
91
+ if importlib.util.find_spec("pymysql") is not None:
92
+ mysql_drivers.append("PyMySQL")
93
+
94
+ if not mysql_drivers:
95
+ error_msg = (
96
+ "❌ No MySQL driver found. Install one of the following:\n\n"
97
+ "Option 1 (Recommended): pip install mysql-connector-python\n"
98
+ "Option 2: pip install PyMySQL\n"
99
+ "Option 3: pip install memorisdk[mysql]\n\n"
100
+ "Then update your connection string:\n"
101
+ "- For mysql-connector-python: mysql+mysqlconnector://user:pass@host:port/db\n"
102
+ "- For PyMySQL: mysql+pymysql://user:pass@host:port/db"
103
+ )
104
+ raise DatabaseError(error_msg)
105
+
106
+ elif database_connect.startswith("postgresql:") or database_connect.startswith(
107
+ "postgresql+"
108
+ ):
109
+ # Check for PostgreSQL drivers
110
+ if (
111
+ importlib.util.find_spec("psycopg2") is None
112
+ and importlib.util.find_spec("asyncpg") is None
113
+ ):
114
+ error_msg = (
115
+ "❌ No PostgreSQL driver found. Install one of the following:\n\n"
116
+ "Option 1 (Recommended): pip install psycopg2-binary\n"
117
+ "Option 2: pip install memorisdk[postgres]\n\n"
118
+ "Then use connection string: postgresql://user:pass@host:port/db"
119
+ )
120
+ raise DatabaseError(error_msg)
121
+
122
+ def _create_engine(self, database_connect: str):
123
+ """Create SQLAlchemy engine with appropriate configuration"""
124
+ try:
125
+ # Validate database driver dependencies first
126
+ self._validate_database_dependencies(database_connect)
127
+ # Parse connection string
128
+ if database_connect.startswith("sqlite:"):
129
+ # Ensure directory exists for SQLite
130
+ if ":///" in database_connect:
131
+ db_path = database_connect.replace("sqlite:///", "")
132
+ db_dir = Path(db_path).parent
133
+ db_dir.mkdir(parents=True, exist_ok=True)
134
+
135
+ # SQLite-specific configuration
136
+ engine = create_engine(
137
+ database_connect,
138
+ json_serializer=json.dumps,
139
+ json_deserializer=json.loads,
140
+ echo=False,
141
+ # SQLite-specific options
142
+ connect_args={
143
+ "check_same_thread": False, # Allow multiple threads
144
+ },
145
+ )
146
+
147
+ elif database_connect.startswith("mysql:") or database_connect.startswith(
148
+ "mysql+"
149
+ ):
150
+ # MySQL-specific configuration
151
+ connect_args = {"charset": "utf8mb4"}
152
+
153
+ # Parse URL for SSL parameters
154
+ parsed = urlparse(database_connect)
155
+ if parsed.query:
156
+ query_params = parse_qs(parsed.query)
157
+
158
+ # Handle SSL parameters for PyMySQL - enforce secure transport
159
+ if any(key in query_params for key in ["ssl", "ssl_disabled"]):
160
+ if query_params.get("ssl", ["false"])[0].lower() == "true":
161
+ # Enable SSL with secure configuration for required secure transport
162
+ connect_args["ssl"] = {
163
+ "ssl_disabled": False,
164
+ "check_hostname": False,
165
+ "verify_mode": ssl.CERT_NONE,
166
+ }
167
+ # Also add ssl_disabled=False for PyMySQL
168
+ connect_args["ssl_disabled"] = False
169
+ elif (
170
+ query_params.get("ssl_disabled", ["true"])[0].lower()
171
+ == "false"
172
+ ):
173
+ # Enable SSL with secure configuration for required secure transport
174
+ connect_args["ssl"] = {
175
+ "ssl_disabled": False,
176
+ "check_hostname": False,
177
+ "verify_mode": ssl.CERT_NONE,
178
+ }
179
+ # Also add ssl_disabled=False for PyMySQL
180
+ connect_args["ssl_disabled"] = False
181
+
182
+ # Different args for different MySQL drivers
183
+ if "pymysql" in database_connect:
184
+ # PyMySQL-specific arguments
185
+ connect_args.update(
186
+ {
187
+ "charset": "utf8mb4",
188
+ "autocommit": False,
189
+ }
190
+ )
191
+ elif (
192
+ "mysqlconnector" in database_connect
193
+ or "mysql+mysqlconnector" in database_connect
194
+ ):
195
+ # MySQL Connector/Python-specific arguments
196
+ connect_args.update(
197
+ {
198
+ "charset": "utf8mb4",
199
+ "use_pure": True,
200
+ }
201
+ )
202
+
203
+ engine = create_engine(
204
+ database_connect,
205
+ json_serializer=json.dumps,
206
+ json_deserializer=json.loads,
207
+ echo=False,
208
+ connect_args=connect_args,
209
+ pool_pre_ping=True, # Validate connections
210
+ pool_recycle=3600, # Recycle connections every hour
211
+ )
212
+
213
+ elif database_connect.startswith(
214
+ "postgresql:"
215
+ ) or database_connect.startswith("postgresql+"):
216
+ # PostgreSQL-specific configuration
217
+ engine = create_engine(
218
+ database_connect,
219
+ json_serializer=json.dumps,
220
+ json_deserializer=json.loads,
221
+ echo=False,
222
+ pool_pre_ping=True,
223
+ pool_recycle=3600,
224
+ )
225
+
226
+ else:
227
+ raise DatabaseError(f"Unsupported database type: {database_connect}")
228
+
229
+ # Test connection
230
+ with engine.connect() as conn:
231
+ conn.execute(text("SELECT 1"))
232
+
233
+ return engine
234
+
235
+ except DatabaseError:
236
+ # Re-raise our custom database errors with helpful messages
237
+ raise
238
+ except ModuleNotFoundError as e:
239
+ if "mysql" in str(e).lower():
240
+ error_msg = (
241
+ "❌ MySQL driver not found. Install one of the following:\n\n"
242
+ "Option 1 (Recommended): pip install mysql-connector-python\n"
243
+ "Option 2: pip install PyMySQL\n"
244
+ "Option 3: pip install memorisdk[mysql]\n\n"
245
+ f"Original error: {e}"
246
+ )
247
+ raise DatabaseError(error_msg)
248
+ elif "psycopg" in str(e).lower() or "postgresql" in str(e).lower():
249
+ error_msg = (
250
+ "❌ PostgreSQL driver not found. Install one of the following:\n\n"
251
+ "Option 1 (Recommended): pip install psycopg2-binary\n"
252
+ "Option 2: pip install memorisdk[postgres]\n\n"
253
+ f"Original error: {e}"
254
+ )
255
+ raise DatabaseError(error_msg)
256
+ else:
257
+ raise DatabaseError(f"Missing required dependency: {e}")
258
+ except SQLAlchemyError as e:
259
+ error_msg = f"Database connection failed: {e}\n\nCheck your connection string and ensure the database server is running."
260
+ raise DatabaseError(error_msg)
261
+ except Exception as e:
262
+ raise DatabaseError(f"Failed to create database engine: {e}")
263
+
264
+ def initialize_schema(self):
265
+ """Initialize database schema"""
266
+ try:
267
+ # Create all tables
268
+ Base.metadata.create_all(bind=self.engine)
269
+
270
+ # Setup database-specific features
271
+ self._setup_database_features()
272
+
273
+ logger.info(
274
+ f"Database schema initialized successfully for {self.database_type}"
275
+ )
276
+
277
+ except Exception as e:
278
+ logger.error(f"Failed to initialize schema: {e}")
279
+ raise DatabaseError(f"Failed to initialize schema: {e}")
280
+
281
+ def _setup_database_features(self):
282
+ """Setup database-specific features like full-text search"""
283
+ try:
284
+ with self.engine.connect() as conn:
285
+ if self.database_type == "sqlite":
286
+ self._setup_sqlite_fts(conn)
287
+ elif self.database_type == "mysql":
288
+ self._setup_mysql_fulltext(conn)
289
+ elif self.database_type == "postgresql":
290
+ self._setup_postgresql_fts(conn)
291
+
292
+ conn.commit()
293
+
294
+ except Exception as e:
295
+ logger.warning(f"Failed to setup database-specific features: {e}")
296
+
297
+ def _setup_sqlite_fts(self, conn):
298
+ """Setup SQLite FTS5"""
299
+ try:
300
+ # Create FTS5 virtual table
301
+ conn.execute(
302
+ text(
303
+ """
304
+ CREATE VIRTUAL TABLE IF NOT EXISTS memory_search_fts USING fts5(
305
+ memory_id,
306
+ memory_type,
307
+ namespace,
308
+ searchable_content,
309
+ summary,
310
+ category_primary,
311
+ content='',
312
+ contentless_delete=1
313
+ )
314
+ """
315
+ )
316
+ )
317
+
318
+ # Create triggers
319
+ conn.execute(
320
+ text(
321
+ """
322
+ CREATE TRIGGER IF NOT EXISTS short_term_memory_fts_insert AFTER INSERT ON short_term_memory
323
+ BEGIN
324
+ INSERT INTO memory_search_fts(memory_id, memory_type, namespace, searchable_content, summary, category_primary)
325
+ VALUES (NEW.memory_id, 'short_term', NEW.namespace, NEW.searchable_content, NEW.summary, NEW.category_primary);
326
+ END
327
+ """
328
+ )
329
+ )
330
+
331
+ conn.execute(
332
+ text(
333
+ """
334
+ CREATE TRIGGER IF NOT EXISTS long_term_memory_fts_insert AFTER INSERT ON long_term_memory
335
+ BEGIN
336
+ INSERT INTO memory_search_fts(memory_id, memory_type, namespace, searchable_content, summary, category_primary)
337
+ VALUES (NEW.memory_id, 'long_term', NEW.namespace, NEW.searchable_content, NEW.summary, NEW.category_primary);
338
+ END
339
+ """
340
+ )
341
+ )
342
+
343
+ logger.info("SQLite FTS5 setup completed")
344
+
345
+ except Exception as e:
346
+ logger.warning(f"SQLite FTS5 setup failed: {e}")
347
+
348
+ def _setup_mysql_fulltext(self, conn):
349
+ """Setup MySQL FULLTEXT indexes"""
350
+ try:
351
+ # Create FULLTEXT indexes
352
+ conn.execute(
353
+ text(
354
+ "ALTER TABLE short_term_memory ADD FULLTEXT INDEX ft_short_term_search (searchable_content, summary)"
355
+ )
356
+ )
357
+ conn.execute(
358
+ text(
359
+ "ALTER TABLE long_term_memory ADD FULLTEXT INDEX ft_long_term_search (searchable_content, summary)"
360
+ )
361
+ )
362
+
363
+ logger.info("MySQL FULLTEXT indexes setup completed")
364
+
365
+ except Exception as e:
366
+ logger.warning(
367
+ f"MySQL FULLTEXT setup failed (indexes may already exist): {e}"
368
+ )
369
+
370
+ def _setup_postgresql_fts(self, conn):
371
+ """Setup PostgreSQL full-text search"""
372
+ try:
373
+ # Add tsvector columns
374
+ conn.execute(
375
+ text(
376
+ "ALTER TABLE short_term_memory ADD COLUMN IF NOT EXISTS search_vector tsvector"
377
+ )
378
+ )
379
+ conn.execute(
380
+ text(
381
+ "ALTER TABLE long_term_memory ADD COLUMN IF NOT EXISTS search_vector tsvector"
382
+ )
383
+ )
384
+
385
+ # Create GIN indexes
386
+ conn.execute(
387
+ text(
388
+ "CREATE INDEX IF NOT EXISTS idx_short_term_search_vector ON short_term_memory USING GIN(search_vector)"
389
+ )
390
+ )
391
+ conn.execute(
392
+ text(
393
+ "CREATE INDEX IF NOT EXISTS idx_long_term_search_vector ON long_term_memory USING GIN(search_vector)"
394
+ )
395
+ )
396
+
397
+ # Create update functions and triggers
398
+ conn.execute(
399
+ text(
400
+ """
401
+ CREATE OR REPLACE FUNCTION update_short_term_search_vector() RETURNS trigger AS $$
402
+ BEGIN
403
+ NEW.search_vector := to_tsvector('english', COALESCE(NEW.searchable_content, '') || ' ' || COALESCE(NEW.summary, ''));
404
+ RETURN NEW;
405
+ END
406
+ $$ LANGUAGE plpgsql;
407
+ """
408
+ )
409
+ )
410
+
411
+ conn.execute(
412
+ text(
413
+ """
414
+ DROP TRIGGER IF EXISTS update_short_term_search_vector_trigger ON short_term_memory;
415
+ CREATE TRIGGER update_short_term_search_vector_trigger
416
+ BEFORE INSERT OR UPDATE ON short_term_memory
417
+ FOR EACH ROW EXECUTE FUNCTION update_short_term_search_vector();
418
+ """
419
+ )
420
+ )
421
+
422
+ logger.info("PostgreSQL FTS setup completed")
423
+
424
+ except Exception as e:
425
+ logger.warning(f"PostgreSQL FTS setup failed: {e}")
426
+
427
+ def _get_search_service(self) -> SearchService:
428
+ """Get search service instance with fresh session and proper error handling"""
429
+ try:
430
+ if not self.SessionLocal:
431
+ logger.error("SessionLocal not available for search service")
432
+ return None
433
+
434
+ # Always create a new session to avoid stale connections
435
+ session = self.SessionLocal()
436
+ if not session:
437
+ logger.error("Failed to create database session")
438
+ return None
439
+
440
+ search_service = SearchService(session, self.database_type)
441
+ logger.debug(
442
+ f"Created new search service instance for database type: {self.database_type}"
443
+ )
444
+ return search_service
445
+
446
+ except Exception as e:
447
+ logger.error(f"Failed to create search service: {e}")
448
+ logger.debug(
449
+ f"Search service creation error: {type(e).__name__}: {str(e)}",
450
+ exc_info=True,
451
+ )
452
+ return None
453
+
454
+ def store_chat_history(
455
+ self,
456
+ chat_id: str,
457
+ user_input: str,
458
+ ai_output: str,
459
+ model: str,
460
+ timestamp: datetime,
461
+ session_id: str,
462
+ namespace: str = "default",
463
+ tokens_used: int = 0,
464
+ metadata: Optional[Dict[str, Any]] = None,
465
+ ):
466
+ """Store chat history"""
467
+ with self.SessionLocal() as session:
468
+ try:
469
+ chat_history = ChatHistory(
470
+ chat_id=chat_id,
471
+ user_input=user_input,
472
+ ai_output=ai_output,
473
+ model=model,
474
+ timestamp=timestamp,
475
+ session_id=session_id,
476
+ namespace=namespace,
477
+ tokens_used=tokens_used,
478
+ metadata_json=metadata or {},
479
+ )
480
+
481
+ session.merge(chat_history) # Use merge for INSERT OR REPLACE behavior
482
+ session.commit()
483
+
484
+ except SQLAlchemyError as e:
485
+ session.rollback()
486
+ raise DatabaseError(f"Failed to store chat history: {e}")
487
+
488
+ def get_chat_history(
489
+ self,
490
+ namespace: str = "default",
491
+ session_id: Optional[str] = None,
492
+ limit: int = 10,
493
+ ) -> List[Dict[str, Any]]:
494
+ """Get chat history with optional session filtering"""
495
+ with self.SessionLocal() as session:
496
+ try:
497
+ query = session.query(ChatHistory).filter(
498
+ ChatHistory.namespace == namespace
499
+ )
500
+
501
+ if session_id:
502
+ query = query.filter(ChatHistory.session_id == session_id)
503
+
504
+ results = (
505
+ query.order_by(ChatHistory.timestamp.desc()).limit(limit).all()
506
+ )
507
+
508
+ # Convert to dictionaries
509
+ return [
510
+ {
511
+ "chat_id": result.chat_id,
512
+ "user_input": result.user_input,
513
+ "ai_output": result.ai_output,
514
+ "model": result.model,
515
+ "timestamp": result.timestamp,
516
+ "session_id": result.session_id,
517
+ "namespace": result.namespace,
518
+ "tokens_used": result.tokens_used,
519
+ "metadata": result.metadata_json or {},
520
+ }
521
+ for result in results
522
+ ]
523
+
524
+ except SQLAlchemyError as e:
525
+ raise DatabaseError(f"Failed to get chat history: {e}")
526
+
527
+ def store_long_term_memory_enhanced(
528
+ self, memory: ProcessedLongTermMemory, chat_id: str, namespace: str = "default"
529
+ ) -> str:
530
+ """Store a ProcessedLongTermMemory with enhanced schema"""
531
+ memory_id = str(uuid.uuid4())
532
+
533
+ with self.SessionLocal() as session:
534
+ try:
535
+ long_term_memory = LongTermMemory(
536
+ memory_id=memory_id,
537
+ original_chat_id=chat_id,
538
+ processed_data=memory.model_dump(mode="json"),
539
+ importance_score=memory.importance_score,
540
+ category_primary=memory.classification.value,
541
+ retention_type="long_term",
542
+ namespace=namespace,
543
+ created_at=datetime.now(),
544
+ searchable_content=memory.content,
545
+ summary=memory.summary,
546
+ novelty_score=0.5,
547
+ relevance_score=0.5,
548
+ actionability_score=0.5,
549
+ classification=memory.classification.value,
550
+ memory_importance=memory.importance.value,
551
+ topic=memory.topic,
552
+ entities_json=memory.entities,
553
+ keywords_json=memory.keywords,
554
+ is_user_context=memory.is_user_context,
555
+ is_preference=memory.is_preference,
556
+ is_skill_knowledge=memory.is_skill_knowledge,
557
+ is_current_project=memory.is_current_project,
558
+ promotion_eligible=memory.promotion_eligible,
559
+ duplicate_of=memory.duplicate_of,
560
+ supersedes_json=memory.supersedes,
561
+ related_memories_json=memory.related_memories,
562
+ confidence_score=memory.confidence_score,
563
+ extraction_timestamp=memory.extraction_timestamp,
564
+ classification_reason=memory.classification_reason,
565
+ processed_for_duplicates=False,
566
+ conscious_processed=False,
567
+ )
568
+
569
+ session.add(long_term_memory)
570
+ session.commit()
571
+
572
+ logger.debug(f"Stored enhanced long-term memory {memory_id}")
573
+ return memory_id
574
+
575
+ except SQLAlchemyError as e:
576
+ session.rollback()
577
+ logger.error(f"Failed to store enhanced long-term memory: {e}")
578
+ raise DatabaseError(f"Failed to store enhanced long-term memory: {e}")
579
+
580
+ def search_memories(
581
+ self,
582
+ query: str,
583
+ namespace: str = "default",
584
+ category_filter: Optional[List[str]] = None,
585
+ limit: int = 10,
586
+ ) -> List[Dict[str, Any]]:
587
+ """Search memories using the cross-database search service"""
588
+ search_service = None
589
+ try:
590
+ logger.debug(
591
+ f"Starting memory search for query '{query}' in namespace '{namespace}' with category_filter={category_filter}"
592
+ )
593
+ search_service = self._get_search_service()
594
+
595
+ if not search_service:
596
+ logger.error("Failed to create search service instance")
597
+ return []
598
+
599
+ results = search_service.search_memories(
600
+ query, namespace, category_filter, limit
601
+ )
602
+ logger.debug(f"Search for '{query}' returned {len(results)} results")
603
+
604
+ # Validate results structure
605
+ if not isinstance(results, list):
606
+ logger.warning(
607
+ f"Search service returned unexpected type: {type(results)}, converting to list"
608
+ )
609
+ results = list(results) if results else []
610
+
611
+ return results
612
+
613
+ except Exception as e:
614
+ logger.error(
615
+ f"Memory search failed for query '{query}' in namespace '{namespace}': {e}"
616
+ )
617
+ logger.debug(
618
+ f"Search error details: {type(e).__name__}: {str(e)}", exc_info=True
619
+ )
620
+ # Return empty list instead of raising exception to avoid breaking auto_ingest
621
+ return []
622
+
623
+ finally:
624
+ # Ensure session is properly closed, even if an exception occurred
625
+ if search_service and hasattr(search_service, "session"):
626
+ try:
627
+ if search_service.session:
628
+ logger.debug("Closing search service session")
629
+ search_service.session.close()
630
+ except Exception as session_e:
631
+ logger.warning(f"Error closing search service session: {session_e}")
632
+
633
+ def get_memory_stats(self, namespace: str = "default") -> Dict[str, Any]:
634
+ """Get comprehensive memory statistics"""
635
+ with self.SessionLocal() as session:
636
+ try:
637
+ stats = {}
638
+
639
+ # Basic counts
640
+ stats["chat_history_count"] = (
641
+ session.query(ChatHistory)
642
+ .filter(ChatHistory.namespace == namespace)
643
+ .count()
644
+ )
645
+
646
+ stats["short_term_count"] = (
647
+ session.query(ShortTermMemory)
648
+ .filter(ShortTermMemory.namespace == namespace)
649
+ .count()
650
+ )
651
+
652
+ stats["long_term_count"] = (
653
+ session.query(LongTermMemory)
654
+ .filter(LongTermMemory.namespace == namespace)
655
+ .count()
656
+ )
657
+
658
+ # Category breakdown
659
+ categories = {}
660
+
661
+ # Short-term categories
662
+ short_categories = (
663
+ session.query(
664
+ ShortTermMemory.category_primary,
665
+ func.count(ShortTermMemory.memory_id).label("count"),
666
+ )
667
+ .filter(ShortTermMemory.namespace == namespace)
668
+ .group_by(ShortTermMemory.category_primary)
669
+ .all()
670
+ )
671
+
672
+ for cat, count in short_categories:
673
+ categories[cat] = categories.get(cat, 0) + count
674
+
675
+ # Long-term categories
676
+ long_categories = (
677
+ session.query(
678
+ LongTermMemory.category_primary,
679
+ func.count(LongTermMemory.memory_id).label("count"),
680
+ )
681
+ .filter(LongTermMemory.namespace == namespace)
682
+ .group_by(LongTermMemory.category_primary)
683
+ .all()
684
+ )
685
+
686
+ for cat, count in long_categories:
687
+ categories[cat] = categories.get(cat, 0) + count
688
+
689
+ stats["memories_by_category"] = categories
690
+
691
+ # Average importance
692
+ short_avg = (
693
+ session.query(func.avg(ShortTermMemory.importance_score))
694
+ .filter(ShortTermMemory.namespace == namespace)
695
+ .scalar()
696
+ or 0
697
+ )
698
+
699
+ long_avg = (
700
+ session.query(func.avg(LongTermMemory.importance_score))
701
+ .filter(LongTermMemory.namespace == namespace)
702
+ .scalar()
703
+ or 0
704
+ )
705
+
706
+ total_memories = stats["short_term_count"] + stats["long_term_count"]
707
+ if total_memories > 0:
708
+ # Weight averages by count
709
+ total_avg = (
710
+ (short_avg * stats["short_term_count"])
711
+ + (long_avg * stats["long_term_count"])
712
+ ) / total_memories
713
+ stats["average_importance"] = float(total_avg) if total_avg else 0.0
714
+ else:
715
+ stats["average_importance"] = 0.0
716
+
717
+ # Database info
718
+ stats["database_type"] = self.database_type
719
+ stats["database_url"] = (
720
+ self.database_connect.split("@")[-1]
721
+ if "@" in self.database_connect
722
+ else self.database_connect
723
+ )
724
+
725
+ return stats
726
+
727
+ except SQLAlchemyError as e:
728
+ raise DatabaseError(f"Failed to get memory stats: {e}")
729
+
730
+ def clear_memory(
731
+ self, namespace: str = "default", memory_type: Optional[str] = None
732
+ ):
733
+ """Clear memory data"""
734
+ with self.SessionLocal() as session:
735
+ try:
736
+ if memory_type == "short_term":
737
+ session.query(ShortTermMemory).filter(
738
+ ShortTermMemory.namespace == namespace
739
+ ).delete()
740
+ elif memory_type == "long_term":
741
+ session.query(LongTermMemory).filter(
742
+ LongTermMemory.namespace == namespace
743
+ ).delete()
744
+ elif memory_type == "chat_history":
745
+ session.query(ChatHistory).filter(
746
+ ChatHistory.namespace == namespace
747
+ ).delete()
748
+ else: # Clear all
749
+ session.query(ShortTermMemory).filter(
750
+ ShortTermMemory.namespace == namespace
751
+ ).delete()
752
+ session.query(LongTermMemory).filter(
753
+ LongTermMemory.namespace == namespace
754
+ ).delete()
755
+ session.query(ChatHistory).filter(
756
+ ChatHistory.namespace == namespace
757
+ ).delete()
758
+
759
+ session.commit()
760
+
761
+ except SQLAlchemyError as e:
762
+ session.rollback()
763
+ raise DatabaseError(f"Failed to clear memory: {e}")
764
+
765
+ def execute_with_translation(self, query: str, parameters: Dict[str, Any] = None):
766
+ """
767
+ Execute a query with automatic parameter translation for cross-database compatibility.
768
+
769
+ Args:
770
+ query: SQL query string
771
+ parameters: Query parameters
772
+
773
+ Returns:
774
+ Query result
775
+ """
776
+ if parameters:
777
+ translated_params = self.query_translator.translate_parameters(parameters)
778
+ else:
779
+ translated_params = {}
780
+
781
+ with self.engine.connect() as conn:
782
+ result = conn.execute(text(query), translated_params)
783
+ conn.commit()
784
+ return result
785
+
786
+ def _get_connection(self):
787
+ """
788
+ Compatibility method for legacy code that expects raw database connections.
789
+
790
+ Returns a context manager that provides a SQLAlchemy connection with
791
+ automatic parameter translation support.
792
+
793
+ This is used by memory.py for direct SQL queries.
794
+ """
795
+ from contextlib import contextmanager
796
+
797
+ @contextmanager
798
+ def connection_context():
799
+ class TranslatingConnection:
800
+ """Wrapper that adds parameter translation to SQLAlchemy connections"""
801
+
802
+ def __init__(self, conn, translator):
803
+ self._conn = conn
804
+ self._translator = translator
805
+
806
+ def execute(self, query, parameters=None):
807
+ """Execute query with automatic parameter translation"""
808
+ if parameters:
809
+ # Handle both text() queries and raw strings
810
+ if hasattr(query, "text"):
811
+ # SQLAlchemy text() object
812
+ translated_params = self._translator.translate_parameters(
813
+ parameters
814
+ )
815
+ return self._conn.execute(query, translated_params)
816
+ else:
817
+ # Raw string query
818
+ translated_params = self._translator.translate_parameters(
819
+ parameters
820
+ )
821
+ return self._conn.execute(
822
+ text(str(query)), translated_params
823
+ )
824
+ else:
825
+ return self._conn.execute(query)
826
+
827
+ def commit(self):
828
+ """Commit transaction"""
829
+ return self._conn.commit()
830
+
831
+ def rollback(self):
832
+ """Rollback transaction"""
833
+ return self._conn.rollback()
834
+
835
+ def close(self):
836
+ """Close connection"""
837
+ return self._conn.close()
838
+
839
+ def fetchall(self):
840
+ """Compatibility method for cursor-like usage"""
841
+ # This is for backwards compatibility with code that expects cursor.fetchall()
842
+ return []
843
+
844
+ def scalar(self):
845
+ """Compatibility method for cursor-like usage"""
846
+ return None
847
+
848
+ def __getattr__(self, name):
849
+ """Delegate unknown attributes to the underlying connection"""
850
+ return getattr(self._conn, name)
851
+
852
+ conn = self.engine.connect()
853
+ try:
854
+ yield TranslatingConnection(conn, self.query_translator)
855
+ finally:
856
+ conn.close()
857
+
858
+ return connection_context()
859
+
860
+ def close(self):
861
+ """Close database connections"""
862
+ if self._search_service and hasattr(self._search_service, "session"):
863
+ self._search_service.session.close()
864
+
865
+ if hasattr(self, "engine"):
866
+ self.engine.dispose()
867
+
868
+ def get_database_info(self) -> Dict[str, Any]:
869
+ """Get database information and capabilities"""
870
+ base_info = {
871
+ "database_type": self.database_type,
872
+ "database_url": (
873
+ self.database_connect.split("@")[-1]
874
+ if "@" in self.database_connect
875
+ else self.database_connect
876
+ ),
877
+ "driver": self.engine.dialect.driver,
878
+ "server_version": getattr(self.engine.dialect, "server_version_info", None),
879
+ "supports_fulltext": True, # Assume true for SQLAlchemy managed connections
880
+ "auto_creation_enabled": self.enable_auto_creation,
881
+ }
882
+
883
+ # Add auto-creation specific information
884
+ if hasattr(self, "auto_creator"):
885
+ creation_info = self.auto_creator.get_database_info(self.database_connect)
886
+ base_info.update(creation_info)
887
+
888
+ return base_info