memorisdk 2.1.1__py3-none-any.whl → 2.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of memorisdk might be problematic. Click here for more details.

memori/__init__.py CHANGED
@@ -5,7 +5,7 @@ Professional-grade memory layer with comprehensive error handling, configuration
5
5
  management, and modular architecture for production AI systems.
6
6
  """
7
7
 
8
- __version__ = "2.1.1"
8
+ __version__ = "2.3.0"
9
9
  __author__ = "Harshal More"
10
10
  __email__ = "harshalmore2468@gmail.com"
11
11
 
@@ -116,7 +116,7 @@ class ConsciouscAgent:
116
116
  return False
117
117
 
118
118
  async def initialize_existing_conscious_memories(
119
- self, db_manager, namespace: str = "default"
119
+ self, db_manager, namespace: str = "default", limit: int = 10
120
120
  ) -> bool:
121
121
  """
122
122
  Initialize by copying ALL existing conscious-info memories to short-term memory
@@ -143,16 +143,17 @@ class ConsciouscAgent:
143
143
  from sqlalchemy import text
144
144
 
145
145
  with db_manager._get_connection() as connection:
146
- # Get ALL conscious-info labeled memories from long-term memory
146
+ # Get top conscious-info labeled memories from long-term memory (limited for performance)
147
147
  cursor = connection.execute(
148
148
  text(
149
149
  """SELECT memory_id, processed_data, summary, searchable_content,
150
150
  importance_score, created_at
151
151
  FROM long_term_memory
152
152
  WHERE namespace = :namespace AND classification = 'conscious-info'
153
- ORDER BY importance_score DESC, created_at DESC"""
153
+ ORDER BY importance_score DESC, created_at DESC
154
+ LIMIT :limit"""
154
155
  ),
155
- {"namespace": namespace},
156
+ {"namespace": namespace, "limit": limit},
156
157
  )
157
158
  existing_conscious_memories = cursor.fetchall()
158
159
 
@@ -237,17 +237,19 @@ CONVERSATION CONTEXT:
237
237
  )
238
238
 
239
239
  logger.debug(
240
- f"Processed conversation {chat_id}: "
241
- f"classification={processed_memory.classification}, "
242
- f"importance={processed_memory.importance}, "
243
- f"conscious_context={processed_memory.is_user_context}, "
244
- f"promotion_eligible={processed_memory.promotion_eligible}"
240
+ f"[AGENT] Processed conversation {chat_id[:8]}... - "
241
+ f"classification: {processed_memory.classification.value} | "
242
+ f"importance: {processed_memory.importance.value} | "
243
+ f"conscious_context: {processed_memory.is_user_context} | "
244
+ f"promotion_eligible: {processed_memory.promotion_eligible}"
245
245
  )
246
246
 
247
247
  return processed_memory
248
248
 
249
249
  except Exception as e:
250
- logger.error(f"Memory agent processing failed for {chat_id}: {e}")
250
+ logger.error(
251
+ f"[AGENT] Memory processing failed for {chat_id[:8]}... - {type(e).__name__}: {e}"
252
+ )
251
253
  return self._create_empty_long_term_memory(
252
254
  chat_id, f"Processing failed: {str(e)}"
253
255
  )
@@ -307,7 +309,7 @@ CONVERSATION CONTEXT:
307
309
 
308
310
  if avg_similarity >= similarity_threshold:
309
311
  logger.info(
310
- f"Duplicate detected: {avg_similarity:.2f} similarity with {existing.conversation_id}"
312
+ f"[AGENT] Duplicate detected - {avg_similarity:.2f} similarity with {existing.conversation_id[:8]}..."
311
313
  )
312
314
  return existing.conversation_id
313
315
 
@@ -218,15 +218,25 @@ Be strategic and comprehensive in your search planning."""
218
218
  all_results = []
219
219
  seen_memory_ids = set()
220
220
 
221
- # For MongoDB and SQL, use the unified search_memories method as primary strategy
222
- # This ensures we use the database's native search capabilities
223
- logger.debug(f"Executing unified database search using {db_type} manager")
224
- primary_results = db_manager.search_memories(
225
- query=search_plan.query_text or query, namespace=namespace, limit=limit
226
- )
227
- logger.debug(
228
- f"Primary database search returned {len(primary_results)} results"
229
- )
221
+ # For MongoDB and SQL, use SearchService directly to avoid recursion
222
+ # This ensures we use the database's native search capabilities without triggering context injection
223
+ logger.debug(f"Executing direct SearchService search using {db_type}")
224
+ try:
225
+ from ..database.search_service import SearchService
226
+
227
+ with db_manager.SessionLocal() as session:
228
+ search_service = SearchService(session, db_type)
229
+ primary_results = search_service.search_memories(
230
+ query=search_plan.query_text or query,
231
+ namespace=namespace,
232
+ limit=limit,
233
+ )
234
+ logger.debug(
235
+ f"Direct SearchService returned {len(primary_results)} results"
236
+ )
237
+ except Exception as e:
238
+ logger.error(f"SearchService direct access failed: {e}")
239
+ primary_results = []
230
240
 
231
241
  # Process primary results and add search metadata
232
242
  for result in primary_results:
@@ -383,9 +393,17 @@ Be strategic and comprehensive in your search planning."""
383
393
 
384
394
  search_terms = " ".join(keywords)
385
395
  try:
386
- results = db_manager.search_memories(
387
- query=search_terms, namespace=namespace, limit=limit
388
- )
396
+ # Use SearchService directly to avoid recursion
397
+ from ..database.search_service import SearchService
398
+
399
+ db_type = self._detect_database_type(db_manager)
400
+
401
+ with db_manager.SessionLocal() as session:
402
+ search_service = SearchService(session, db_type)
403
+ results = search_service.search_memories(
404
+ query=search_terms, namespace=namespace, limit=limit
405
+ )
406
+
389
407
  # Ensure results is a list of dictionaries
390
408
  if not isinstance(results, list):
391
409
  logger.warning(f"Search returned non-list result: {type(results)}")
@@ -417,14 +435,24 @@ Be strategic and comprehensive in your search planning."""
417
435
  if not categories:
418
436
  return []
419
437
 
420
- # This would need to be implemented in the database manager
421
- # For now, get all memories and filter by category
438
+ # Use SearchService directly to avoid recursion
439
+ # Get all memories and filter by category
422
440
  logger.debug(
423
441
  f"Searching memories by categories: {categories} in namespace: {namespace}"
424
442
  )
425
- all_results = db_manager.search_memories(
426
- query="", namespace=namespace, limit=limit * 3
427
- )
443
+ try:
444
+ from ..database.search_service import SearchService
445
+
446
+ db_type = self._detect_database_type(db_manager)
447
+
448
+ with db_manager.SessionLocal() as session:
449
+ search_service = SearchService(session, db_type)
450
+ all_results = search_service.search_memories(
451
+ query="", namespace=namespace, limit=limit * 3
452
+ )
453
+ except Exception as e:
454
+ logger.error(f"Category search failed: {e}")
455
+ all_results = []
428
456
 
429
457
  logger.debug(
430
458
  f"Retrieved {len(all_results)} total results for category filtering"
@@ -207,7 +207,7 @@ class ConversationManager:
207
207
  elif mode == "auto":
208
208
  # Auto mode: Search long-term memory database for relevant context
209
209
  logger.debug(
210
- f"Auto-ingest: Processing user input for long-term memory search: '{user_input[:50]}...'"
210
+ f"[CONTEXT] Auto-ingest processing - Query: '{user_input[:50]}...' | Session: {session_id[:8]}..."
211
211
  )
212
212
  context = (
213
213
  memori_instance._get_auto_ingest_context(user_input)
@@ -217,11 +217,11 @@ class ConversationManager:
217
217
  if context:
218
218
  context_prompt = self._build_auto_context_prompt(context)
219
219
  logger.debug(
220
- f"Auto-ingest: Successfully injected long-term memory context with {len(context)} items for session {session_id}"
220
+ f"[CONTEXT] Long-term memory injected - {len(context)} items | Session: {session_id[:8]}..."
221
221
  )
222
222
  else:
223
223
  logger.debug(
224
- f"Auto-ingest: No relevant memories found in long-term database for query '{user_input[:50]}...' in session {session_id}"
224
+ f"[CONTEXT] No relevant memories found for '{user_input[:30]}...' | Session: {session_id[:8]}..."
225
225
  )
226
226
 
227
227
  # Get conversation history
@@ -246,7 +246,7 @@ class ConversationManager:
246
246
  system_content += f"{role_label}: {msg['content']}\n"
247
247
  system_content += "--- End History ---\n"
248
248
  logger.debug(
249
- f"Added {len(previous_messages)} history messages for session {session_id}"
249
+ f"[CONTEXT] Added {len(previous_messages)} history messages | Session: {session_id[:8]}..."
250
250
  )
251
251
 
252
252
  # Find existing system message or create new one
@@ -267,16 +267,17 @@ class ConversationManager:
267
267
  0, {"role": "system", "content": system_content}
268
268
  )
269
269
 
270
+ context_status = "yes" if context_prompt else "no"
271
+ history_status = "yes" if len(history_messages) > 1 else "no"
270
272
  logger.debug(
271
- f"Enhanced messages for session {session_id}: context={'yes' if context_prompt else 'no'}, "
272
- f"history={'yes' if len(history_messages) > 1 else 'no'}"
273
+ f"[CONTEXT] Enhanced messages for session {session_id[:8]}... - context: {context_status} | history: {history_status}"
273
274
  )
274
275
 
275
276
  return enhanced_messages
276
277
 
277
278
  except Exception as e:
278
279
  logger.error(
279
- f"Failed to inject context with history for session {session_id}: {e}"
280
+ f"[CONTEXT] Failed to inject context for session {session_id[:8]}... - {type(e).__name__}: {e}"
280
281
  )
281
282
  return messages
282
283
 
memori/core/memory.py CHANGED
@@ -3,6 +3,7 @@ Main Memori class - Pydantic-based memory interface v1.0
3
3
  """
4
4
 
5
5
  import asyncio
6
+ import threading
6
7
  import time
7
8
  import uuid
8
9
  from datetime import datetime
@@ -65,6 +66,7 @@ class Memori:
65
66
  schema_init: bool = True, # Initialize database schema and create tables
66
67
  database_prefix: str | None = None, # Database name prefix
67
68
  database_suffix: str | None = None, # Database name suffix
69
+ conscious_memory_limit: int = 10, # Limit for conscious memory processing
68
70
  ):
69
71
  """
70
72
  Initialize Memori memory system v1.0.
@@ -109,6 +111,14 @@ class Memori:
109
111
  self.schema_init = schema_init
110
112
  self.database_prefix = database_prefix
111
113
  self.database_suffix = database_suffix
114
+ # Validate conscious_memory_limit parameter
115
+ if not isinstance(conscious_memory_limit, int) or conscious_memory_limit < 1:
116
+ raise ValueError("conscious_memory_limit must be a positive integer")
117
+
118
+ self.conscious_memory_limit = conscious_memory_limit
119
+
120
+ # Thread safety for conscious memory initialization
121
+ self._conscious_init_lock = threading.RLock()
112
122
 
113
123
  # Configure provider based on explicit settings ONLY - no auto-detection
114
124
  if provider_config:
@@ -452,7 +462,7 @@ class Memori:
452
462
  )
453
463
  init_success = (
454
464
  await self.conscious_agent.initialize_existing_conscious_memories(
455
- self.db_manager, self.namespace
465
+ self.db_manager, self.namespace, self.conscious_memory_limit
456
466
  )
457
467
  )
458
468
  if init_success:
@@ -478,52 +488,104 @@ class Memori:
478
488
 
479
489
  def _run_synchronous_conscious_initialization(self):
480
490
  """Run conscious agent initialization synchronously (when no event loop is available)"""
481
- try:
482
- if not self.conscious_agent:
483
- return
491
+ with self._conscious_init_lock:
492
+ try:
493
+ if not self.conscious_agent:
494
+ return
484
495
 
485
- # If both auto_ingest and conscious_ingest are enabled,
486
- # initialize by copying ALL existing conscious-info memories first
487
- if self.auto_ingest and self.conscious_ingest:
488
- logger.info(
489
- "Conscious-ingest: Both auto_ingest and conscious_ingest enabled - initializing existing conscious memories"
490
- )
496
+ # Check if we've already initialized in this session to avoid repeated work
497
+ # Use namespace-specific key to prevent conflicts between instances
498
+ init_key = f"_conscious_initialized_{self.namespace or 'default'}"
499
+ if hasattr(self, init_key) and getattr(self, init_key):
500
+ logger.debug(
501
+ f"[CONSCIOUS] Already initialized for namespace '{self.namespace or 'default'}', skipping"
502
+ )
503
+ return
491
504
 
492
- # Run synchronous initialization of existing memories
493
- self._initialize_existing_conscious_memories_sync()
505
+ # If both auto_ingest and conscious_ingest are enabled,
506
+ # initialize by copying the most important existing conscious-info memories first
507
+ if self.auto_ingest and self.conscious_ingest:
508
+ logger.info(
509
+ "[CONSCIOUS] Both auto_ingest and conscious_ingest enabled - initializing existing conscious memories"
510
+ )
494
511
 
495
- logger.debug(
496
- "Conscious-ingest: Synchronous conscious context extraction completed"
497
- )
512
+ # Run optimized synchronous initialization of existing memories
513
+ import time
498
514
 
499
- except Exception as e:
500
- logger.error(f"Synchronous conscious agent initialization failed: {e}")
515
+ start_time = time.time()
516
+
517
+ initialized = self._initialize_existing_conscious_memories_sync()
518
+
519
+ elapsed = time.time() - start_time
520
+ if initialized:
521
+ logger.debug(
522
+ f"[CONSCIOUS] Initialization completed in {elapsed:.2f}s"
523
+ )
524
+ else:
525
+ logger.debug(
526
+ f"[CONSCIOUS] Initialization skipped (no work needed) in {elapsed:.2f}s"
527
+ )
528
+
529
+ # Mark as initialized to avoid repeated work for this specific namespace
530
+ init_key = f"_conscious_initialized_{self.namespace or 'default'}"
531
+ setattr(self, init_key, True)
532
+
533
+ logger.debug(
534
+ "[CONSCIOUS] Synchronous conscious context extraction completed"
535
+ )
536
+
537
+ except Exception as e:
538
+ logger.error(f"Synchronous conscious agent initialization failed: {e}")
501
539
 
502
540
  def _initialize_existing_conscious_memories_sync(self):
503
- """Synchronously initialize existing conscious-info memories"""
541
+ """Synchronously initialize existing conscious-info memories with optimization"""
504
542
  try:
505
543
  from sqlalchemy import text
506
544
 
507
545
  with self.db_manager._get_connection() as connection:
508
- # Get ALL conscious-info labeled memories from long-term memory
546
+ # First, check if we already have conscious memories in short-term storage
547
+ existing_short_term = connection.execute(
548
+ text(
549
+ """SELECT COUNT(*) FROM short_term_memory
550
+ WHERE namespace = :namespace
551
+ AND (category_primary = 'conscious_context' OR memory_id LIKE 'conscious_%')"""
552
+ ),
553
+ {"namespace": self.namespace or "default"},
554
+ ).scalar()
555
+
556
+ if existing_short_term > 0:
557
+ logger.debug(
558
+ f"[CONSCIOUS] {existing_short_term} conscious memories already in short-term storage, skipping initialization"
559
+ )
560
+ return False
561
+
562
+ # Get only the most important conscious-info memories (limit to 10 for performance)
509
563
  cursor = connection.execute(
510
564
  text(
511
565
  """SELECT memory_id, processed_data, summary, searchable_content,
512
566
  importance_score, created_at
513
567
  FROM long_term_memory
514
568
  WHERE namespace = :namespace AND classification = 'conscious-info'
515
- ORDER BY importance_score DESC, created_at DESC"""
569
+ ORDER BY importance_score DESC, created_at DESC
570
+ LIMIT :limit"""
516
571
  ),
517
- {"namespace": self.namespace or "default"},
572
+ {
573
+ "namespace": self.namespace or "default",
574
+ "limit": self.conscious_memory_limit,
575
+ },
518
576
  )
519
577
  existing_conscious_memories = cursor.fetchall()
520
578
 
521
579
  if not existing_conscious_memories:
522
580
  logger.debug(
523
- "Conscious-ingest: No existing conscious-info memories found for initialization"
581
+ "[CONSCIOUS] No conscious-info memories found for initialization"
524
582
  )
525
583
  return False
526
584
 
585
+ # Batch process memories for efficiency
586
+ logger.debug(
587
+ f"[CONSCIOUS] Processing {len(existing_conscious_memories)} conscious memories..."
588
+ )
527
589
  copied_count = 0
528
590
  for memory_row in existing_conscious_memories:
529
591
  success = self._copy_memory_to_short_term_sync(memory_row)
@@ -532,12 +594,12 @@ class Memori:
532
594
 
533
595
  if copied_count > 0:
534
596
  logger.info(
535
- f"Conscious-ingest: Initialized {copied_count} existing conscious-info memories to short-term memory"
597
+ f"[CONSCIOUS] Initialized {copied_count} conscious memories to short-term storage"
536
598
  )
537
599
  return True
538
600
  else:
539
601
  logger.debug(
540
- "Conscious-ingest: No new conscious memories to initialize (all were duplicates)"
602
+ "[CONSCIOUS] No new conscious memories to initialize (all were duplicates)"
541
603
  )
542
604
  return False
543
605
 
@@ -564,26 +626,25 @@ class Memori:
564
626
  from sqlalchemy import text
565
627
 
566
628
  with self.db_manager._get_connection() as connection:
567
- # Check if similar content already exists in short-term memory
629
+ # Database-agnostic duplicate check with safer pattern matching
568
630
  existing_check = connection.execute(
569
631
  text(
570
632
  """SELECT COUNT(*) FROM short_term_memory
571
633
  WHERE namespace = :namespace
572
- AND category_primary = 'conscious_context'
573
- AND (searchable_content = :searchable_content
574
- OR summary = :summary)"""
634
+ AND (memory_id = :exact_id
635
+ OR memory_id LIKE :conscious_pattern)"""
575
636
  ),
576
637
  {
577
638
  "namespace": self.namespace or "default",
578
- "searchable_content": searchable_content,
579
- "summary": summary,
639
+ "exact_id": memory_id,
640
+ "conscious_pattern": f"conscious_{memory_id}_%",
580
641
  },
581
642
  )
582
643
 
583
644
  existing_count = existing_check.scalar()
584
645
  if existing_count > 0:
585
646
  logger.debug(
586
- f"Conscious-ingest: Skipping duplicate memory {memory_id} - similar content already exists in short-term memory"
647
+ f"[CONSCIOUS] Skipping duplicate memory {memory_id[:8]}... - already exists in short-term memory"
587
648
  )
588
649
  return False
589
650
 
@@ -1892,7 +1953,7 @@ class Memori:
1892
1953
 
1893
1954
  # Debug logging for conversation recording
1894
1955
  logger.info(
1895
- f"Recording conversation - Input: '{user_input[:100]}...' Model: {model}"
1956
+ f"[MEMORY] Recording conversation - Input: '{user_input[:60]}...' | Model: {model} | Session: {self.session_id[:8]}..."
1896
1957
  )
1897
1958
 
1898
1959
  # Parse response
@@ -1915,29 +1976,31 @@ class Memori:
1915
1976
  namespace=self.namespace,
1916
1977
  metadata=metadata or {},
1917
1978
  )
1918
- logger.debug(
1919
- f"Successfully stored chat history for conversation: {chat_id}"
1920
- )
1979
+ logger.debug(f"[MEMORY] Chat history stored - ID: {chat_id[:8]}...")
1921
1980
 
1922
1981
  # Always process into long-term memory when memory agent is available
1923
1982
  if self.memory_agent:
1924
1983
  self._schedule_memory_processing(
1925
1984
  chat_id, user_input, response_text, response_model
1926
1985
  )
1927
- logger.debug(f"Scheduled memory processing for conversation: {chat_id}")
1986
+ logger.debug(f"[MEMORY] Processing scheduled - ID: {chat_id[:8]}...")
1928
1987
  else:
1929
1988
  logger.warning(
1930
- f"Memory agent not available, skipping memory processing for: {chat_id}"
1989
+ f"[MEMORY] Agent unavailable, skipping processing - ID: {chat_id[:8]}..."
1931
1990
  )
1932
1991
 
1933
- logger.info(f"Recorded conversation successfully: {chat_id}")
1992
+ logger.info(
1993
+ f"[MEMORY] Conversation recorded successfully - ID: {chat_id[:8]}..."
1994
+ )
1934
1995
  return chat_id
1935
1996
 
1936
1997
  except Exception as e:
1937
- logger.error(f"Failed to record conversation {chat_id}: {e}")
1998
+ logger.error(
1999
+ f"[MEMORY] Failed to record conversation {chat_id[:8]}... - {type(e).__name__}: {e}"
2000
+ )
1938
2001
  import traceback
1939
2002
 
1940
- logger.error(f"Recording error details: {traceback.format_exc()}")
2003
+ logger.debug(f"[MEMORY] Recording error details: {traceback.format_exc()}")
1941
2004
  raise
1942
2005
 
1943
2006
  def _schedule_memory_processing(
@@ -28,6 +28,21 @@ class DatabaseAutoCreator:
28
28
  self.schema_init = schema_init
29
29
  self.utils = DatabaseConnectionUtils()
30
30
 
31
+ def _is_gibsonai_temp_connection(self, components: dict[str, str] | None) -> bool:
32
+ """Detect GibsonAI temporary database credentials to avoid noisy warnings."""
33
+ if not components:
34
+ return False
35
+
36
+ host = (components.get("host") or "").lower()
37
+ if "gibsonai.com" not in host:
38
+ return False
39
+
40
+ user = components.get("user") or components.get("username") or ""
41
+ database = components.get("database") or ""
42
+
43
+ # GibsonAI temporary credentials follow predictable us_/db_ prefixes
44
+ return user.startswith("us_") or database.startswith("db_")
45
+
31
46
  def ensure_database_exists(self, connection_string: str) -> str:
32
47
  """
33
48
  Ensure target database exists, creating it if necessary.
@@ -45,6 +60,7 @@ class DatabaseAutoCreator:
45
60
  logger.debug("Auto-creation disabled, using original connection string")
46
61
  return connection_string
47
62
 
63
+ components = None
48
64
  try:
49
65
  # Parse connection string
50
66
  components = self.utils.parse_connection_string(connection_string)
@@ -56,6 +72,13 @@ class DatabaseAutoCreator:
56
72
  )
57
73
  return connection_string
58
74
 
75
+ # Skip noisy warnings for managed GibsonAI temporary databases
76
+ if self._is_gibsonai_temp_connection(components):
77
+ logger.debug(
78
+ "[DB_SETUP] GibsonAI managed database detected - skipping auto-creation checks"
79
+ )
80
+ return connection_string
81
+
59
82
  # Validate database name
60
83
  if not self.utils.validate_database_name(components["database"]):
61
84
  raise ValueError(f"Invalid database name: {components['database']}")
@@ -70,10 +93,39 @@ class DatabaseAutoCreator:
70
93
  logger.info(f"Successfully created database '{components['database']}'")
71
94
  return connection_string
72
95
 
96
+ except PermissionError as e:
97
+ if components and self._is_gibsonai_temp_connection(components):
98
+ logger.debug(
99
+ "[DB_SETUP] GibsonAI managed database does not allow auto-creation (permission denied)"
100
+ )
101
+ return connection_string
102
+
103
+ logger.error(f"[DB_SETUP] Permission denied - {e}")
104
+ if components:
105
+ logger.warning(
106
+ f"[DB_SETUP] Database '{components['database']}' may need manual creation with proper permissions"
107
+ )
108
+ else:
109
+ logger.warning(
110
+ "[DB_SETUP] Database may need manual creation with proper permissions"
111
+ )
112
+ return connection_string
113
+ except RuntimeError as e:
114
+ logger.error(f"[DB_SETUP] Database creation error - {e}")
115
+ logger.info(
116
+ "[DB_SETUP] Proceeding with original connection string, database may need manual setup"
117
+ )
118
+ return connection_string
73
119
  except Exception as e:
74
- logger.error(f"Database auto-creation failed: {e}")
75
- # Don't raise exception - let the original connection attempt proceed
76
- # This allows graceful degradation if user has manual setup
120
+ logger.error(
121
+ f"[DB_SETUP] Unexpected database auto-creation failure - {type(e).__name__}: {e}"
122
+ )
123
+ if components:
124
+ logger.debug(
125
+ f"[DB_SETUP] Connection string: {components['engine']}://{components['host']}:{components['port']}/{components['database']}"
126
+ )
127
+ else:
128
+ logger.debug(f"[DB_SETUP] Connection string: {connection_string}")
77
129
  return connection_string
78
130
 
79
131
  def _database_exists(self, components: dict[str, str]) -> bool:
@@ -90,7 +142,12 @@ class DatabaseAutoCreator:
90
142
  return False
91
143
 
92
144
  except Exception as e:
93
- logger.error(f"Failed to check database existence: {e}")
145
+ if self._is_gibsonai_temp_connection(components):
146
+ logger.debug(
147
+ "[DB_CONNECTION] Skipping GibsonAI database existence check due to restricted permissions"
148
+ )
149
+ else:
150
+ logger.error(f"Failed to check database existence: {e}")
94
151
  return False
95
152
 
96
153
  def _postgresql_database_exists(self, components: dict[str, str]) -> bool:
@@ -176,7 +233,17 @@ class DatabaseAutoCreator:
176
233
  logger.error(error_msg)
177
234
  return False
178
235
  except Exception as e:
179
- logger.error(f"MySQL database existence check failed: {e}")
236
+ if self._is_gibsonai_temp_connection(components):
237
+ logger.debug(
238
+ f"[DB_CONNECTION] GibsonAI existence check bypassed for '{components['database']}' ({e})"
239
+ )
240
+ else:
241
+ logger.error(
242
+ f"[DB_CONNECTION] MySQL database existence check failed for '{components['database']}': {e}"
243
+ )
244
+ logger.debug(
245
+ f"[DB_CONNECTION] Connection details - host: {components.get('host')}, port: {components.get('port')}, user: {components.get('user') or components.get('username')}"
246
+ )
180
247
  return False
181
248
 
182
249
  def _create_database(self, components: dict[str, str]) -> None:
@@ -42,7 +42,7 @@ class SearchService:
42
42
  List of memory dictionaries with search metadata
43
43
  """
44
44
  logger.debug(
45
- f"SearchService.search_memories called - query: '{query}', namespace: '{namespace}', database: {self.database_type}, limit: {limit}"
45
+ f"[SEARCH] Query initiated - '{query[:50]}{'...' if len(query) > 50 else ''}' | namespace: '{namespace}' | db: {self.database_type} | limit: {limit}"
46
46
  )
47
47
 
48
48
  if not query or not query.strip():
@@ -58,13 +58,13 @@ class SearchService:
58
58
  search_long_term = not memory_types or "long_term" in memory_types
59
59
 
60
60
  logger.debug(
61
- f"Memory types to search - short_term: {search_short_term}, long_term: {search_long_term}, categories: {category_filter}"
61
+ f"[SEARCH] Target scope - short_term: {search_short_term} | long_term: {search_long_term} | categories: {category_filter or 'all'}"
62
62
  )
63
63
 
64
64
  try:
65
65
  # Try database-specific full-text search first
66
66
  if self.database_type == "sqlite":
67
- logger.debug("Using SQLite FTS5 search strategy")
67
+ logger.debug("[SEARCH] Strategy: SQLite FTS5")
68
68
  results = self._search_sqlite_fts(
69
69
  query,
70
70
  namespace,
@@ -74,7 +74,7 @@ class SearchService:
74
74
  search_long_term,
75
75
  )
76
76
  elif self.database_type == "mysql":
77
- logger.debug("Using MySQL FULLTEXT search strategy")
77
+ logger.debug("[SEARCH] Strategy: MySQL FULLTEXT")
78
78
  results = self._search_mysql_fulltext(
79
79
  query,
80
80
  namespace,
@@ -84,7 +84,7 @@ class SearchService:
84
84
  search_long_term,
85
85
  )
86
86
  elif self.database_type == "postgresql":
87
- logger.debug("Using PostgreSQL FTS search strategy")
87
+ logger.debug("[SEARCH] Strategy: PostgreSQL FTS")
88
88
  results = self._search_postgresql_fts(
89
89
  query,
90
90
  namespace,
@@ -94,12 +94,12 @@ class SearchService:
94
94
  search_long_term,
95
95
  )
96
96
 
97
- logger.debug(f"Primary search strategy returned {len(results)} results")
97
+ logger.debug(f"[SEARCH] Primary strategy results: {len(results)} matches")
98
98
 
99
99
  # If no results or full-text search failed, fall back to LIKE search
100
100
  if not results:
101
101
  logger.debug(
102
- "Primary search returned no results, falling back to LIKE search"
102
+ "[SEARCH] Primary strategy empty, falling back to LIKE search"
103
103
  )
104
104
  results = self._search_like_fallback(
105
105
  query,
@@ -112,13 +112,10 @@ class SearchService:
112
112
 
113
113
  except Exception as e:
114
114
  logger.error(
115
- f"Full-text search failed for query '{query}' in namespace '{namespace}': {e}"
115
+ f"[SEARCH] Full-text search failed for '{query[:30]}...' in '{namespace}' - {type(e).__name__}: {e}"
116
116
  )
117
- logger.debug(
118
- f"Full-text search error details: {type(e).__name__}: {str(e)}",
119
- exc_info=True,
120
- )
121
- logger.warning(f"Falling back to LIKE search for query '{query}'")
117
+ logger.debug("[SEARCH] Full-text error details", exc_info=True)
118
+ logger.warning("[SEARCH] Attempting LIKE fallback search")
122
119
  try:
123
120
  results = self._search_like_fallback(
124
121
  query,
@@ -128,21 +125,25 @@ class SearchService:
128
125
  search_short_term,
129
126
  search_long_term,
130
127
  )
131
- logger.debug(f"LIKE fallback search returned {len(results)} results")
128
+ logger.debug(f"[SEARCH] LIKE fallback results: {len(results)} matches")
132
129
  except Exception as fallback_e:
133
130
  logger.error(
134
- f"LIKE fallback search also failed for query '{query}': {fallback_e}"
131
+ f"[SEARCH] LIKE fallback also failed - {type(fallback_e).__name__}: {fallback_e}"
135
132
  )
136
133
  results = []
137
134
 
138
135
  final_results = self._rank_and_limit_results(results, limit)
139
136
  logger.debug(
140
- f"SearchService completed - returning {len(final_results)} final results after ranking and limiting"
137
+ f"[SEARCH] Completed - {len(final_results)} results after ranking and limiting"
141
138
  )
142
139
 
143
140
  if final_results:
141
+ top_result = final_results[0]
142
+ memory_id = str(top_result.get("memory_id", "unknown"))[:8]
143
+ score = top_result.get("composite_score", 0)
144
+ strategy = top_result.get("search_strategy", "unknown")
144
145
  logger.debug(
145
- f"Top result: memory_id={final_results[0].get('memory_id')}, score={final_results[0].get('composite_score', 0):.3f}, strategy={final_results[0].get('search_strategy')}"
146
+ f"[SEARCH] Top result: {memory_id}... | score: {score:.3f} | strategy: {strategy}"
146
147
  )
147
148
 
148
149
  return final_results
@@ -268,6 +269,36 @@ class SearchService:
268
269
  results = []
269
270
 
270
271
  try:
272
+ # First check if there are any records in the database
273
+ if search_short_term:
274
+ short_count = (
275
+ self.session.query(ShortTermMemory)
276
+ .filter(ShortTermMemory.namespace == namespace)
277
+ .count()
278
+ )
279
+ if short_count == 0:
280
+ logger.debug(
281
+ "No short-term memories found in database, skipping FULLTEXT search"
282
+ )
283
+ search_short_term = False
284
+
285
+ if search_long_term:
286
+ long_count = (
287
+ self.session.query(LongTermMemory)
288
+ .filter(LongTermMemory.namespace == namespace)
289
+ .count()
290
+ )
291
+ if long_count == 0:
292
+ logger.debug(
293
+ "No long-term memories found in database, skipping FULLTEXT search"
294
+ )
295
+ search_long_term = False
296
+
297
+ # If no records exist, return empty results
298
+ if not search_short_term and not search_long_term:
299
+ logger.debug("No memories found in database for FULLTEXT search")
300
+ return []
301
+
271
302
  # Apply limit proportionally between memory types
272
303
  short_limit = (
273
304
  limit // 2 if search_short_term and search_long_term else limit
@@ -278,65 +309,147 @@ class SearchService:
278
309
 
279
310
  # Search short-term memory if requested
280
311
  if search_short_term:
281
- short_query = self.session.query(ShortTermMemory).filter(
282
- ShortTermMemory.namespace == namespace
283
- )
284
-
285
- # Add FULLTEXT search
286
- fulltext_condition = text(
287
- "MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE)"
288
- ).params(query=query)
289
- short_query = short_query.filter(fulltext_condition)
290
-
291
- # Add category filter
292
- if category_filter:
293
- short_query = short_query.filter(
294
- ShortTermMemory.category_primary.in_(category_filter)
312
+ try:
313
+ # Build category filter clause
314
+ category_clause = ""
315
+ params = {"query": query}
316
+ if category_filter:
317
+ category_placeholders = ",".join(
318
+ [f":cat_{i}" for i in range(len(category_filter))]
319
+ )
320
+ category_clause = (
321
+ f"AND category_primary IN ({category_placeholders})"
322
+ )
323
+ for i, cat in enumerate(category_filter):
324
+ params[f"cat_{i}"] = cat
325
+
326
+ # Use direct SQL query for more reliable results
327
+ sql_query = text(
328
+ f"""
329
+ SELECT
330
+ memory_id,
331
+ processed_data,
332
+ importance_score,
333
+ created_at,
334
+ summary,
335
+ category_primary,
336
+ MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE) as search_score,
337
+ 'short_term' as memory_type,
338
+ 'mysql_fulltext' as search_strategy
339
+ FROM short_term_memory
340
+ WHERE namespace = :namespace
341
+ AND MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE)
342
+ {category_clause}
343
+ ORDER BY search_score DESC
344
+ LIMIT :short_limit
345
+ """
295
346
  )
296
347
 
297
- # Add relevance score and limit
298
- short_results = self.session.execute(
299
- short_query.statement.add_columns(
300
- text(
301
- "MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE) as search_score"
302
- ).params(query=query),
303
- text("'short_term' as memory_type"),
304
- text("'mysql_fulltext' as search_strategy"),
305
- ).limit(short_limit)
306
- ).fetchall()
307
-
308
- results.extend([dict(row) for row in short_results])
348
+ params["namespace"] = namespace
349
+ params["short_limit"] = short_limit
350
+
351
+ short_results = self.session.execute(sql_query, params).fetchall()
352
+
353
+ # Convert rows to dictionaries safely
354
+ for row in short_results:
355
+ try:
356
+ if hasattr(row, "_mapping"):
357
+ row_dict = dict(row._mapping)
358
+ else:
359
+ # Create dict from row values and keys
360
+ row_dict = {
361
+ "memory_id": row[0],
362
+ "processed_data": row[1],
363
+ "importance_score": row[2],
364
+ "created_at": row[3],
365
+ "summary": row[4],
366
+ "category_primary": row[5],
367
+ "search_score": float(row[6]) if row[6] else 0.0,
368
+ "memory_type": row[7],
369
+ "search_strategy": row[8],
370
+ }
371
+ results.append(row_dict)
372
+ except Exception as e:
373
+ logger.warning(
374
+ f"Failed to convert short-term memory row to dict: {e}"
375
+ )
376
+ continue
377
+
378
+ except Exception as e:
379
+ logger.warning(f"Short-term memory FULLTEXT search failed: {e}")
380
+ # Continue to try long-term search
309
381
 
310
382
  # Search long-term memory if requested
311
383
  if search_long_term:
312
- long_query = self.session.query(LongTermMemory).filter(
313
- LongTermMemory.namespace == namespace
314
- )
315
-
316
- # Add FULLTEXT search
317
- fulltext_condition = text(
318
- "MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE)"
319
- ).params(query=query)
320
- long_query = long_query.filter(fulltext_condition)
321
-
322
- # Add category filter
323
- if category_filter:
324
- long_query = long_query.filter(
325
- LongTermMemory.category_primary.in_(category_filter)
384
+ try:
385
+ # Build category filter clause
386
+ category_clause = ""
387
+ params = {"query": query}
388
+ if category_filter:
389
+ category_placeholders = ",".join(
390
+ [f":cat_{i}" for i in range(len(category_filter))]
391
+ )
392
+ category_clause = (
393
+ f"AND category_primary IN ({category_placeholders})"
394
+ )
395
+ for i, cat in enumerate(category_filter):
396
+ params[f"cat_{i}"] = cat
397
+
398
+ # Use direct SQL query for more reliable results
399
+ sql_query = text(
400
+ f"""
401
+ SELECT
402
+ memory_id,
403
+ processed_data,
404
+ importance_score,
405
+ created_at,
406
+ summary,
407
+ category_primary,
408
+ MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE) as search_score,
409
+ 'long_term' as memory_type,
410
+ 'mysql_fulltext' as search_strategy
411
+ FROM long_term_memory
412
+ WHERE namespace = :namespace
413
+ AND MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE)
414
+ {category_clause}
415
+ ORDER BY search_score DESC
416
+ LIMIT :long_limit
417
+ """
326
418
  )
327
419
 
328
- # Add relevance score and limit
329
- long_results = self.session.execute(
330
- long_query.statement.add_columns(
331
- text(
332
- "MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE) as search_score"
333
- ).params(query=query),
334
- text("'long_term' as memory_type"),
335
- text("'mysql_fulltext' as search_strategy"),
336
- ).limit(long_limit)
337
- ).fetchall()
338
-
339
- results.extend([dict(row) for row in long_results])
420
+ params["namespace"] = namespace
421
+ params["long_limit"] = long_limit
422
+
423
+ long_results = self.session.execute(sql_query, params).fetchall()
424
+
425
+ # Convert rows to dictionaries safely
426
+ for row in long_results:
427
+ try:
428
+ if hasattr(row, "_mapping"):
429
+ row_dict = dict(row._mapping)
430
+ else:
431
+ # Create dict from row values and keys
432
+ row_dict = {
433
+ "memory_id": row[0],
434
+ "processed_data": row[1],
435
+ "importance_score": row[2],
436
+ "created_at": row[3],
437
+ "summary": row[4],
438
+ "category_primary": row[5],
439
+ "search_score": float(row[6]) if row[6] else 0.0,
440
+ "memory_type": row[7],
441
+ "search_strategy": row[8],
442
+ }
443
+ results.append(row_dict)
444
+ except Exception as e:
445
+ logger.warning(
446
+ f"Failed to convert long-term memory row to dict: {e}"
447
+ )
448
+ continue
449
+
450
+ except Exception as e:
451
+ logger.warning(f"Long-term memory FULLTEXT search failed: {e}")
452
+ # Continue with whatever results we have
340
453
 
341
454
  return results
342
455
 
@@ -379,69 +492,100 @@ class SearchService:
379
492
 
380
493
  # Search short-term memory if requested
381
494
  if search_short_term:
382
- short_query = self.session.query(ShortTermMemory).filter(
383
- ShortTermMemory.namespace == namespace
384
- )
385
-
386
- # Add tsvector search
387
- ts_query = text(
388
- "search_vector @@ to_tsquery('english', :query)"
389
- ).params(query=tsquery_text)
390
- short_query = short_query.filter(ts_query)
391
495
 
392
- # Add category filter
496
+ # Build category filter clause safely
497
+ category_clause = ""
393
498
  if category_filter:
394
- short_query = short_query.filter(
395
- ShortTermMemory.category_primary.in_(category_filter)
396
- )
499
+ category_clause = "AND category_primary = ANY(:category_list)"
500
+
501
+ # Use direct SQL to avoid SQLAlchemy Row conversion issues
502
+ short_sql = text(
503
+ f"""
504
+ SELECT memory_id, processed_data, importance_score, created_at, summary, category_primary,
505
+ ts_rank(search_vector, to_tsquery('english', :query)) as search_score,
506
+ 'short_term' as memory_type, 'postgresql_fts' as search_strategy
507
+ FROM short_term_memory
508
+ WHERE namespace = :namespace
509
+ AND search_vector @@ to_tsquery('english', :query)
510
+ {category_clause}
511
+ ORDER BY search_score DESC
512
+ LIMIT :limit
513
+ """
514
+ )
397
515
 
398
- # Add relevance score and limit
399
- short_results = self.session.execute(
400
- short_query.statement.add_columns(
401
- text(
402
- "ts_rank(search_vector, to_tsquery('english', :query)) as search_score"
403
- ).params(query=tsquery_text),
404
- text("'short_term' as memory_type"),
405
- text("'postgresql_fts' as search_strategy"),
516
+ params = {
517
+ "namespace": namespace,
518
+ "query": tsquery_text,
519
+ "limit": short_limit,
520
+ }
521
+ if category_filter:
522
+ params["category_list"] = category_filter
523
+
524
+ short_results = self.session.execute(short_sql, params).fetchall()
525
+
526
+ # Convert to dictionaries manually with proper column mapping
527
+ for row in short_results:
528
+ results.append(
529
+ {
530
+ "memory_id": row[0],
531
+ "processed_data": row[1],
532
+ "importance_score": row[2],
533
+ "created_at": row[3],
534
+ "summary": row[4],
535
+ "category_primary": row[5],
536
+ "search_score": row[6],
537
+ "memory_type": row[7],
538
+ "search_strategy": row[8],
539
+ }
406
540
  )
407
- .order_by(text("search_score DESC"))
408
- .limit(short_limit)
409
- ).fetchall()
410
-
411
- results.extend([dict(row) for row in short_results])
412
541
 
413
542
  # Search long-term memory if requested
414
543
  if search_long_term:
415
- long_query = self.session.query(LongTermMemory).filter(
416
- LongTermMemory.namespace == namespace
544
+ # Build category filter clause safely
545
+ category_clause = ""
546
+ if category_filter:
547
+ category_clause = "AND category_primary = ANY(:category_list)"
548
+
549
+ # Use direct SQL to avoid SQLAlchemy Row conversion issues
550
+ long_sql = text(
551
+ f"""
552
+ SELECT memory_id, processed_data, importance_score, created_at, summary, category_primary,
553
+ ts_rank(search_vector, to_tsquery('english', :query)) as search_score,
554
+ 'long_term' as memory_type, 'postgresql_fts' as search_strategy
555
+ FROM long_term_memory
556
+ WHERE namespace = :namespace
557
+ AND search_vector @@ to_tsquery('english', :query)
558
+ {category_clause}
559
+ ORDER BY search_score DESC
560
+ LIMIT :limit
561
+ """
417
562
  )
418
563
 
419
- # Add tsvector search
420
- ts_query = text(
421
- "search_vector @@ to_tsquery('english', :query)"
422
- ).params(query=tsquery_text)
423
- long_query = long_query.filter(ts_query)
424
-
425
- # Add category filter
564
+ params = {
565
+ "namespace": namespace,
566
+ "query": tsquery_text,
567
+ "limit": long_limit,
568
+ }
426
569
  if category_filter:
427
- long_query = long_query.filter(
428
- LongTermMemory.category_primary.in_(category_filter)
429
- )
430
-
431
- # Add relevance score and limit
432
- long_results = self.session.execute(
433
- long_query.statement.add_columns(
434
- text(
435
- "ts_rank(search_vector, to_tsquery('english', :query)) as search_score"
436
- ).params(query=tsquery_text),
437
- text("'long_term' as memory_type"),
438
- text("'postgresql_fts' as search_strategy"),
570
+ params["category_list"] = category_filter
571
+
572
+ long_results = self.session.execute(long_sql, params).fetchall()
573
+
574
+ # Convert to dictionaries manually with proper column mapping
575
+ for row in long_results:
576
+ results.append(
577
+ {
578
+ "memory_id": row[0],
579
+ "processed_data": row[1],
580
+ "importance_score": row[2],
581
+ "created_at": row[3],
582
+ "summary": row[4],
583
+ "category_primary": row[5],
584
+ "search_score": row[6],
585
+ "memory_type": row[7],
586
+ "search_strategy": row[8],
587
+ }
439
588
  )
440
- .order_by(text("search_score DESC"))
441
- .limit(long_limit)
442
- ).fetchall()
443
-
444
- results.extend([dict(row) for row in long_results])
445
589
 
446
590
  return results
447
591
 
@@ -348,24 +348,72 @@ class SQLAlchemyDatabaseManager:
348
348
  def _setup_mysql_fulltext(self, conn):
349
349
  """Setup MySQL FULLTEXT indexes"""
350
350
  try:
351
- # Create FULLTEXT indexes
352
- conn.execute(
353
- text(
354
- "ALTER TABLE short_term_memory ADD FULLTEXT INDEX ft_short_term_search (searchable_content, summary)"
355
- )
351
+ # Check if indexes exist before creating them
352
+ index_check_query = text(
353
+ """
354
+ SELECT COUNT(*) as index_count
355
+ FROM information_schema.statistics
356
+ WHERE table_schema = DATABASE()
357
+ AND index_name IN ('ft_short_term_search', 'ft_long_term_search')
358
+ """
356
359
  )
357
- conn.execute(
358
- text(
359
- "ALTER TABLE long_term_memory ADD FULLTEXT INDEX ft_long_term_search (searchable_content, summary)"
360
+
361
+ result = conn.execute(index_check_query)
362
+ existing_indexes = result.fetchone()[0]
363
+
364
+ if existing_indexes < 2:
365
+ logger.info(
366
+ f"Creating missing MySQL FULLTEXT indexes ({existing_indexes}/2 exist)..."
360
367
  )
361
- )
362
368
 
363
- logger.info("MySQL FULLTEXT indexes setup completed")
369
+ # Check and create short_term_memory index if missing
370
+ short_term_check = conn.execute(
371
+ text(
372
+ """
373
+ SELECT COUNT(*) FROM information_schema.statistics
374
+ WHERE table_schema = DATABASE()
375
+ AND table_name = 'short_term_memory'
376
+ AND index_name = 'ft_short_term_search'
377
+ """
378
+ )
379
+ ).fetchone()[0]
380
+
381
+ if short_term_check == 0:
382
+ conn.execute(
383
+ text(
384
+ "ALTER TABLE short_term_memory ADD FULLTEXT INDEX ft_short_term_search (searchable_content, summary)"
385
+ )
386
+ )
387
+ logger.info("Created ft_short_term_search index")
388
+
389
+ # Check and create long_term_memory index if missing
390
+ long_term_check = conn.execute(
391
+ text(
392
+ """
393
+ SELECT COUNT(*) FROM information_schema.statistics
394
+ WHERE table_schema = DATABASE()
395
+ AND table_name = 'long_term_memory'
396
+ AND index_name = 'ft_long_term_search'
397
+ """
398
+ )
399
+ ).fetchone()[0]
400
+
401
+ if long_term_check == 0:
402
+ conn.execute(
403
+ text(
404
+ "ALTER TABLE long_term_memory ADD FULLTEXT INDEX ft_long_term_search (searchable_content, summary)"
405
+ )
406
+ )
407
+ logger.info("Created ft_long_term_search index")
408
+
409
+ logger.info("MySQL FULLTEXT indexes setup completed")
410
+ else:
411
+ logger.debug(
412
+ "MySQL FULLTEXT indexes already exist (2/2), skipping creation"
413
+ )
364
414
 
365
415
  except Exception as e:
366
- logger.warning(
367
- f"MySQL FULLTEXT setup failed (indexes may already exist): {e}"
368
- )
416
+ logger.warning(f"MySQL FULLTEXT setup failed: {e}")
369
417
 
370
418
  def _setup_postgresql_fts(self, conn):
371
419
  """Setup PostgreSQL full-text search"""
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: memorisdk
3
- Version: 2.1.1
3
+ Version: 2.3.0
4
4
  Summary: The Open-Source Memory Layer for AI Agents & Multi-Agent Systems
5
5
  Author-email: GibsonAI Team <noc@gibsonai.com>
6
6
  License: Apache-2.0
@@ -482,6 +482,7 @@ Memori works seamlessly with popular AI frameworks:
482
482
  | [Agno](./examples/integrations/agno_example.py) | Memory-enhanced agent framework integration with persistent conversations | Simple chat agent with memory search |
483
483
  | [AWS Strands](./examples/integrations/aws_strands_example.py) | Professional development coach with Strands SDK and persistent memory | Career coaching agent with goal tracking |
484
484
  | [Azure AI Foundry](./examples/integrations/azure_ai_foundry_example.py) | Azure AI Foundry agents with persistent memory across conversations | Enterprise AI agents with Azure integration |
485
+ | [AutoGen](./examples/integrations/autogen_example.py) | Multi-agent group chat memory recording | Agent chats with memory integration |
485
486
  | [CamelAI](./examples/integrations/camelai_example.py) | Multi-agent communication framework with automatic memory recording and retrieval | Memory-enhanced chat agents with conversation continuity |
486
487
  | [CrewAI](./examples/integrations/crewai_example.py) | Multi-agent system with shared memory across agent interactions | Collaborative agents with memory |
487
488
  | [Digital Ocean AI](./examples/integrations/digital_ocean_example.py) | Memory-enhanced customer support using Digital Ocean's AI platform | Customer support assistant with conversation history |
@@ -1,25 +1,25 @@
1
- memori/__init__.py,sha256=u5Y2fGofYR3hZwRLN0lwu24iVztgmDLk0JGZfwhSjW8,3670
1
+ memori/__init__.py,sha256=BXUPF0Td839gaRLZv18OfmWz6ergsJ0lti1w_LM3H2s,3670
2
2
  memori/agents/__init__.py,sha256=9M3IG5R10FfVgT8tUzBZ2pZ0SypSpYkFfhtyvMyeTpE,261
3
- memori/agents/conscious_agent.py,sha256=x3MFps2BSIt9CubjwFUZ_2g4EXESO66aT2lyHx_LiDQ,22225
4
- memori/agents/memory_agent.py,sha256=khCbbBaMfHm7uYxZvIw4JO4kXzM848R_Cual0uSVZ2A,23957
5
- memori/agents/retrieval_agent.py,sha256=_8J50i68AvVxylcEYWDTMfgXnoKO0hx97X8Lo3byg0U,40867
3
+ memori/agents/conscious_agent.py,sha256=JhbwnmMNysOeisJI1MH6o8PLS-WonGAfgCx6sdWb4IM,22324
4
+ memori/agents/memory_agent.py,sha256=ZMFiTpXVf0Bympwmwr101bSo2oeCNVSN4MpF9q78C7o,24068
5
+ memori/agents/retrieval_agent.py,sha256=vDHS9NzlWOV_wK8cFVpLGuXM2kcZB_VQxfqeSPwOpxk,42040
6
6
  memori/config/__init__.py,sha256=tQAxopgOsea02u9iId-ocOY86nWWNGC3rvt3AOFcLn8,295
7
7
  memori/config/manager.py,sha256=PnIfp-j8BzvSsomzGZtMOdtARuhaoVwfxj1pJs5hLow,10360
8
8
  memori/config/memory_manager.py,sha256=jEchdcMxNiM825Z2ypsE5vY-uS5mCbd_AKsnQ6o_YW8,10938
9
9
  memori/config/settings.py,sha256=t-Pmz3x-IjMDcdIFJ9VNK37-OeNOhSfFciWtseUVdRc,9795
10
10
  memori/core/__init__.py,sha256=jvhHn-KL3bzRHs11-4B0BCKH6gkAf6Gf_G59If8fD0M,157
11
- memori/core/conversation.py,sha256=kQV59BWy_ZS0RwARXnQGs5KXB1fRh1ftH8rHuNs6a_E,15813
11
+ memori/core/conversation.py,sha256=MSfbbr2nAIKFGzRD5YMAW_ZIPqeBmaopcOonnnRrM3I,15907
12
12
  memori/core/database.py,sha256=aycWOP2TJD5GBZXnAFU2yPDeGKRUjUeep9DoK6hLGas,40075
13
- memori/core/memory.py,sha256=257RDLlLUdKxN4rd2HUhBqjha5Ahx1zpS-TfF3wxhvw,117079
13
+ memori/core/memory.py,sha256=27ZYyq8ivx7qjeaBiU3JizOBP7XoJigGjH0JdzHCXPk,120172
14
14
  memori/core/providers.py,sha256=IH-ep_VYY_-itY31dyqT-ftDlHKE_IzsFED_30tAJaI,6944
15
15
  memori/database/__init__.py,sha256=yungdfis0lyDE2eZFs86miYCAMG4klhS-TLhKq-1K3w,426
16
- memori/database/auto_creator.py,sha256=oMUKuJlLqwG4OxbhkMMjq0e5DDan3KcvAfEM2a56jOQ,12662
16
+ memori/database/auto_creator.py,sha256=M18-PEOesFAfdbmK2hP7wnS4LCcBYGMMRZ-1Onfo_9U,15729
17
17
  memori/database/connection_utils.py,sha256=QQ50IQlrNUd0CWiFQeH76yIi1evbgMv8dDV29QkSAsc,6796
18
18
  memori/database/models.py,sha256=jPsD_ALGHMgv6_nxgizSQ2BTbNTupCMPOuS5EBaUiFU,14826
19
19
  memori/database/mongodb_manager.py,sha256=aB4vEnL2Jrb0ny4wbhsQGouTwkKfcVGNwsc-D8k7WYA,59270
20
20
  memori/database/query_translator.py,sha256=ruwzfIVxhcO5ouNamRVlxIUMkesU7xE8N5LzgaPW9Qc,6988
21
- memori/database/search_service.py,sha256=QH0CQaDONR3aSd7Su2C6Ysz4pikbYk3NDFRkP4EcFws,26856
22
- memori/database/sqlalchemy_manager.py,sha256=qSVSUp76CFiAy7CUvwdq3xGeXPGimhZxac0OtnV6XKQ,34926
21
+ memori/database/search_service.py,sha256=uTu6-GarBN5P8DYFefC5CZxJytScz-R5iHW7fFV1fUo,33752
22
+ memori/database/sqlalchemy_manager.py,sha256=pZjxH4pU1JRaMlvWNcDkTYAAWORqHLTiiYrWEVDQbrk,36968
23
23
  memori/database/adapters/__init__.py,sha256=QMAo4Ts5ycxSqi0nmcssHRXASwr2KKn0mMu-kYbmFzo,626
24
24
  memori/database/adapters/mongodb_adapter.py,sha256=iZXaQwB0_My1eVz0mqeTZNILIXGnCF_UeOwZ3TAhZao,26537
25
25
  memori/database/adapters/mysql_adapter.py,sha256=HK04a9GAZiKwWZJkVhArNqOCUnHUnJRA-c_MAOPJVUI,12915
@@ -64,8 +64,8 @@ memori/utils/security_audit.py,sha256=SZyqdHyIUzrrnMxO__dAcnB-3x0vSuHW7eW0apOGT2
64
64
  memori/utils/security_integration.py,sha256=xiyYQ1sEo0yk_0NhWeXzzjTJ60pNbI0SEyAz766iilA,13050
65
65
  memori/utils/transaction_manager.py,sha256=kyxI_gRJUY8b1lq0ZUDN65rNmUC5qIGOyL8obFIysBQ,18735
66
66
  memori/utils/validators.py,sha256=u5emqDrSkN9JlJxdo5yxcCqs510UDOiLf16F6p-Oyps,11267
67
- memorisdk-2.1.1.dist-info/licenses/LICENSE,sha256=gyrDaYsSODngoYE1l68l_UfjppS-oYDrf1MvY1JGhgE,10430
68
- memorisdk-2.1.1.dist-info/METADATA,sha256=6KWbuv3JjXWmNrCjwqZ-MisJQVK5CqgS9H-0W-AyY0c,19907
69
- memorisdk-2.1.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
70
- memorisdk-2.1.1.dist-info/top_level.txt,sha256=Nm3ad0isbJYBzTEce-O_gmkAEiTbAbyilgAhRt8IoGA,7
71
- memorisdk-2.1.1.dist-info/RECORD,,
67
+ memorisdk-2.3.0.dist-info/licenses/LICENSE,sha256=gyrDaYsSODngoYE1l68l_UfjppS-oYDrf1MvY1JGhgE,10430
68
+ memorisdk-2.3.0.dist-info/METADATA,sha256=aey8RziaUGuNW_zqGh8AkbeTCJYoFszOwo-6Zxtzbho,20046
69
+ memorisdk-2.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
70
+ memorisdk-2.3.0.dist-info/top_level.txt,sha256=Nm3ad0isbJYBzTEce-O_gmkAEiTbAbyilgAhRt8IoGA,7
71
+ memorisdk-2.3.0.dist-info/RECORD,,