memorisdk 2.1.0__py3-none-any.whl → 2.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of memorisdk might be problematic. Click here for more details.

@@ -42,7 +42,7 @@ class SearchService:
42
42
  List of memory dictionaries with search metadata
43
43
  """
44
44
  logger.debug(
45
- f"SearchService.search_memories called - query: '{query}', namespace: '{namespace}', database: {self.database_type}, limit: {limit}"
45
+ f"[SEARCH] Query initiated - '{query[:50]}{'...' if len(query) > 50 else ''}' | namespace: '{namespace}' | db: {self.database_type} | limit: {limit}"
46
46
  )
47
47
 
48
48
  if not query or not query.strip():
@@ -58,13 +58,13 @@ class SearchService:
58
58
  search_long_term = not memory_types or "long_term" in memory_types
59
59
 
60
60
  logger.debug(
61
- f"Memory types to search - short_term: {search_short_term}, long_term: {search_long_term}, categories: {category_filter}"
61
+ f"[SEARCH] Target scope - short_term: {search_short_term} | long_term: {search_long_term} | categories: {category_filter or 'all'}"
62
62
  )
63
63
 
64
64
  try:
65
65
  # Try database-specific full-text search first
66
66
  if self.database_type == "sqlite":
67
- logger.debug("Using SQLite FTS5 search strategy")
67
+ logger.debug("[SEARCH] Strategy: SQLite FTS5")
68
68
  results = self._search_sqlite_fts(
69
69
  query,
70
70
  namespace,
@@ -74,7 +74,7 @@ class SearchService:
74
74
  search_long_term,
75
75
  )
76
76
  elif self.database_type == "mysql":
77
- logger.debug("Using MySQL FULLTEXT search strategy")
77
+ logger.debug("[SEARCH] Strategy: MySQL FULLTEXT")
78
78
  results = self._search_mysql_fulltext(
79
79
  query,
80
80
  namespace,
@@ -84,7 +84,7 @@ class SearchService:
84
84
  search_long_term,
85
85
  )
86
86
  elif self.database_type == "postgresql":
87
- logger.debug("Using PostgreSQL FTS search strategy")
87
+ logger.debug("[SEARCH] Strategy: PostgreSQL FTS")
88
88
  results = self._search_postgresql_fts(
89
89
  query,
90
90
  namespace,
@@ -94,12 +94,12 @@ class SearchService:
94
94
  search_long_term,
95
95
  )
96
96
 
97
- logger.debug(f"Primary search strategy returned {len(results)} results")
97
+ logger.debug(f"[SEARCH] Primary strategy results: {len(results)} matches")
98
98
 
99
99
  # If no results or full-text search failed, fall back to LIKE search
100
100
  if not results:
101
101
  logger.debug(
102
- "Primary search returned no results, falling back to LIKE search"
102
+ "[SEARCH] Primary strategy empty, falling back to LIKE search"
103
103
  )
104
104
  results = self._search_like_fallback(
105
105
  query,
@@ -112,13 +112,10 @@ class SearchService:
112
112
 
113
113
  except Exception as e:
114
114
  logger.error(
115
- f"Full-text search failed for query '{query}' in namespace '{namespace}': {e}"
115
+ f"[SEARCH] Full-text search failed for '{query[:30]}...' in '{namespace}' - {type(e).__name__}: {e}"
116
116
  )
117
- logger.debug(
118
- f"Full-text search error details: {type(e).__name__}: {str(e)}",
119
- exc_info=True,
120
- )
121
- logger.warning(f"Falling back to LIKE search for query '{query}'")
117
+ logger.debug("[SEARCH] Full-text error details", exc_info=True)
118
+ logger.warning("[SEARCH] Attempting LIKE fallback search")
122
119
  try:
123
120
  results = self._search_like_fallback(
124
121
  query,
@@ -128,21 +125,25 @@ class SearchService:
128
125
  search_short_term,
129
126
  search_long_term,
130
127
  )
131
- logger.debug(f"LIKE fallback search returned {len(results)} results")
128
+ logger.debug(f"[SEARCH] LIKE fallback results: {len(results)} matches")
132
129
  except Exception as fallback_e:
133
130
  logger.error(
134
- f"LIKE fallback search also failed for query '{query}': {fallback_e}"
131
+ f"[SEARCH] LIKE fallback also failed - {type(fallback_e).__name__}: {fallback_e}"
135
132
  )
136
133
  results = []
137
134
 
138
135
  final_results = self._rank_and_limit_results(results, limit)
139
136
  logger.debug(
140
- f"SearchService completed - returning {len(final_results)} final results after ranking and limiting"
137
+ f"[SEARCH] Completed - {len(final_results)} results after ranking and limiting"
141
138
  )
142
139
 
143
140
  if final_results:
141
+ top_result = final_results[0]
142
+ memory_id = str(top_result.get("memory_id", "unknown"))[:8]
143
+ score = top_result.get("composite_score", 0)
144
+ strategy = top_result.get("search_strategy", "unknown")
144
145
  logger.debug(
145
- f"Top result: memory_id={final_results[0].get('memory_id')}, score={final_results[0].get('composite_score', 0):.3f}, strategy={final_results[0].get('search_strategy')}"
146
+ f"[SEARCH] Top result: {memory_id}... | score: {score:.3f} | strategy: {strategy}"
146
147
  )
147
148
 
148
149
  return final_results
@@ -268,6 +269,36 @@ class SearchService:
268
269
  results = []
269
270
 
270
271
  try:
272
+ # First check if there are any records in the database
273
+ if search_short_term:
274
+ short_count = (
275
+ self.session.query(ShortTermMemory)
276
+ .filter(ShortTermMemory.namespace == namespace)
277
+ .count()
278
+ )
279
+ if short_count == 0:
280
+ logger.debug(
281
+ "No short-term memories found in database, skipping FULLTEXT search"
282
+ )
283
+ search_short_term = False
284
+
285
+ if search_long_term:
286
+ long_count = (
287
+ self.session.query(LongTermMemory)
288
+ .filter(LongTermMemory.namespace == namespace)
289
+ .count()
290
+ )
291
+ if long_count == 0:
292
+ logger.debug(
293
+ "No long-term memories found in database, skipping FULLTEXT search"
294
+ )
295
+ search_long_term = False
296
+
297
+ # If no records exist, return empty results
298
+ if not search_short_term and not search_long_term:
299
+ logger.debug("No memories found in database for FULLTEXT search")
300
+ return []
301
+
271
302
  # Apply limit proportionally between memory types
272
303
  short_limit = (
273
304
  limit // 2 if search_short_term and search_long_term else limit
@@ -278,65 +309,147 @@ class SearchService:
278
309
 
279
310
  # Search short-term memory if requested
280
311
  if search_short_term:
281
- short_query = self.session.query(ShortTermMemory).filter(
282
- ShortTermMemory.namespace == namespace
283
- )
284
-
285
- # Add FULLTEXT search
286
- fulltext_condition = text(
287
- "MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE)"
288
- ).params(query=query)
289
- short_query = short_query.filter(fulltext_condition)
290
-
291
- # Add category filter
292
- if category_filter:
293
- short_query = short_query.filter(
294
- ShortTermMemory.category_primary.in_(category_filter)
312
+ try:
313
+ # Build category filter clause
314
+ category_clause = ""
315
+ params = {"query": query}
316
+ if category_filter:
317
+ category_placeholders = ",".join(
318
+ [f":cat_{i}" for i in range(len(category_filter))]
319
+ )
320
+ category_clause = (
321
+ f"AND category_primary IN ({category_placeholders})"
322
+ )
323
+ for i, cat in enumerate(category_filter):
324
+ params[f"cat_{i}"] = cat
325
+
326
+ # Use direct SQL query for more reliable results
327
+ sql_query = text(
328
+ f"""
329
+ SELECT
330
+ memory_id,
331
+ processed_data,
332
+ importance_score,
333
+ created_at,
334
+ summary,
335
+ category_primary,
336
+ MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE) as search_score,
337
+ 'short_term' as memory_type,
338
+ 'mysql_fulltext' as search_strategy
339
+ FROM short_term_memory
340
+ WHERE namespace = :namespace
341
+ AND MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE)
342
+ {category_clause}
343
+ ORDER BY search_score DESC
344
+ LIMIT :short_limit
345
+ """
295
346
  )
296
347
 
297
- # Add relevance score and limit
298
- short_results = self.session.execute(
299
- short_query.statement.add_columns(
300
- text(
301
- "MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE) as search_score"
302
- ).params(query=query),
303
- text("'short_term' as memory_type"),
304
- text("'mysql_fulltext' as search_strategy"),
305
- ).limit(short_limit)
306
- ).fetchall()
307
-
308
- results.extend([dict(row) for row in short_results])
348
+ params["namespace"] = namespace
349
+ params["short_limit"] = short_limit
350
+
351
+ short_results = self.session.execute(sql_query, params).fetchall()
352
+
353
+ # Convert rows to dictionaries safely
354
+ for row in short_results:
355
+ try:
356
+ if hasattr(row, "_mapping"):
357
+ row_dict = dict(row._mapping)
358
+ else:
359
+ # Create dict from row values and keys
360
+ row_dict = {
361
+ "memory_id": row[0],
362
+ "processed_data": row[1],
363
+ "importance_score": row[2],
364
+ "created_at": row[3],
365
+ "summary": row[4],
366
+ "category_primary": row[5],
367
+ "search_score": float(row[6]) if row[6] else 0.0,
368
+ "memory_type": row[7],
369
+ "search_strategy": row[8],
370
+ }
371
+ results.append(row_dict)
372
+ except Exception as e:
373
+ logger.warning(
374
+ f"Failed to convert short-term memory row to dict: {e}"
375
+ )
376
+ continue
377
+
378
+ except Exception as e:
379
+ logger.warning(f"Short-term memory FULLTEXT search failed: {e}")
380
+ # Continue to try long-term search
309
381
 
310
382
  # Search long-term memory if requested
311
383
  if search_long_term:
312
- long_query = self.session.query(LongTermMemory).filter(
313
- LongTermMemory.namespace == namespace
314
- )
315
-
316
- # Add FULLTEXT search
317
- fulltext_condition = text(
318
- "MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE)"
319
- ).params(query=query)
320
- long_query = long_query.filter(fulltext_condition)
321
-
322
- # Add category filter
323
- if category_filter:
324
- long_query = long_query.filter(
325
- LongTermMemory.category_primary.in_(category_filter)
384
+ try:
385
+ # Build category filter clause
386
+ category_clause = ""
387
+ params = {"query": query}
388
+ if category_filter:
389
+ category_placeholders = ",".join(
390
+ [f":cat_{i}" for i in range(len(category_filter))]
391
+ )
392
+ category_clause = (
393
+ f"AND category_primary IN ({category_placeholders})"
394
+ )
395
+ for i, cat in enumerate(category_filter):
396
+ params[f"cat_{i}"] = cat
397
+
398
+ # Use direct SQL query for more reliable results
399
+ sql_query = text(
400
+ f"""
401
+ SELECT
402
+ memory_id,
403
+ processed_data,
404
+ importance_score,
405
+ created_at,
406
+ summary,
407
+ category_primary,
408
+ MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE) as search_score,
409
+ 'long_term' as memory_type,
410
+ 'mysql_fulltext' as search_strategy
411
+ FROM long_term_memory
412
+ WHERE namespace = :namespace
413
+ AND MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE)
414
+ {category_clause}
415
+ ORDER BY search_score DESC
416
+ LIMIT :long_limit
417
+ """
326
418
  )
327
419
 
328
- # Add relevance score and limit
329
- long_results = self.session.execute(
330
- long_query.statement.add_columns(
331
- text(
332
- "MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE) as search_score"
333
- ).params(query=query),
334
- text("'long_term' as memory_type"),
335
- text("'mysql_fulltext' as search_strategy"),
336
- ).limit(long_limit)
337
- ).fetchall()
338
-
339
- results.extend([dict(row) for row in long_results])
420
+ params["namespace"] = namespace
421
+ params["long_limit"] = long_limit
422
+
423
+ long_results = self.session.execute(sql_query, params).fetchall()
424
+
425
+ # Convert rows to dictionaries safely
426
+ for row in long_results:
427
+ try:
428
+ if hasattr(row, "_mapping"):
429
+ row_dict = dict(row._mapping)
430
+ else:
431
+ # Create dict from row values and keys
432
+ row_dict = {
433
+ "memory_id": row[0],
434
+ "processed_data": row[1],
435
+ "importance_score": row[2],
436
+ "created_at": row[3],
437
+ "summary": row[4],
438
+ "category_primary": row[5],
439
+ "search_score": float(row[6]) if row[6] else 0.0,
440
+ "memory_type": row[7],
441
+ "search_strategy": row[8],
442
+ }
443
+ results.append(row_dict)
444
+ except Exception as e:
445
+ logger.warning(
446
+ f"Failed to convert long-term memory row to dict: {e}"
447
+ )
448
+ continue
449
+
450
+ except Exception as e:
451
+ logger.warning(f"Long-term memory FULLTEXT search failed: {e}")
452
+ # Continue with whatever results we have
340
453
 
341
454
  return results
342
455
 
@@ -379,69 +492,100 @@ class SearchService:
379
492
 
380
493
  # Search short-term memory if requested
381
494
  if search_short_term:
382
- short_query = self.session.query(ShortTermMemory).filter(
383
- ShortTermMemory.namespace == namespace
384
- )
385
-
386
- # Add tsvector search
387
- ts_query = text(
388
- "search_vector @@ to_tsquery('english', :query)"
389
- ).params(query=tsquery_text)
390
- short_query = short_query.filter(ts_query)
391
495
 
392
- # Add category filter
496
+ # Build category filter clause safely
497
+ category_clause = ""
393
498
  if category_filter:
394
- short_query = short_query.filter(
395
- ShortTermMemory.category_primary.in_(category_filter)
396
- )
499
+ category_clause = "AND category_primary = ANY(:category_list)"
500
+
501
+ # Use direct SQL to avoid SQLAlchemy Row conversion issues
502
+ short_sql = text(
503
+ f"""
504
+ SELECT memory_id, processed_data, importance_score, created_at, summary, category_primary,
505
+ ts_rank(search_vector, to_tsquery('english', :query)) as search_score,
506
+ 'short_term' as memory_type, 'postgresql_fts' as search_strategy
507
+ FROM short_term_memory
508
+ WHERE namespace = :namespace
509
+ AND search_vector @@ to_tsquery('english', :query)
510
+ {category_clause}
511
+ ORDER BY search_score DESC
512
+ LIMIT :limit
513
+ """
514
+ )
397
515
 
398
- # Add relevance score and limit
399
- short_results = self.session.execute(
400
- short_query.statement.add_columns(
401
- text(
402
- "ts_rank(search_vector, to_tsquery('english', :query)) as search_score"
403
- ).params(query=tsquery_text),
404
- text("'short_term' as memory_type"),
405
- text("'postgresql_fts' as search_strategy"),
516
+ params = {
517
+ "namespace": namespace,
518
+ "query": tsquery_text,
519
+ "limit": short_limit,
520
+ }
521
+ if category_filter:
522
+ params["category_list"] = category_filter
523
+
524
+ short_results = self.session.execute(short_sql, params).fetchall()
525
+
526
+ # Convert to dictionaries manually with proper column mapping
527
+ for row in short_results:
528
+ results.append(
529
+ {
530
+ "memory_id": row[0],
531
+ "processed_data": row[1],
532
+ "importance_score": row[2],
533
+ "created_at": row[3],
534
+ "summary": row[4],
535
+ "category_primary": row[5],
536
+ "search_score": row[6],
537
+ "memory_type": row[7],
538
+ "search_strategy": row[8],
539
+ }
406
540
  )
407
- .order_by(text("search_score DESC"))
408
- .limit(short_limit)
409
- ).fetchall()
410
-
411
- results.extend([dict(row) for row in short_results])
412
541
 
413
542
  # Search long-term memory if requested
414
543
  if search_long_term:
415
- long_query = self.session.query(LongTermMemory).filter(
416
- LongTermMemory.namespace == namespace
544
+ # Build category filter clause safely
545
+ category_clause = ""
546
+ if category_filter:
547
+ category_clause = "AND category_primary = ANY(:category_list)"
548
+
549
+ # Use direct SQL to avoid SQLAlchemy Row conversion issues
550
+ long_sql = text(
551
+ f"""
552
+ SELECT memory_id, processed_data, importance_score, created_at, summary, category_primary,
553
+ ts_rank(search_vector, to_tsquery('english', :query)) as search_score,
554
+ 'long_term' as memory_type, 'postgresql_fts' as search_strategy
555
+ FROM long_term_memory
556
+ WHERE namespace = :namespace
557
+ AND search_vector @@ to_tsquery('english', :query)
558
+ {category_clause}
559
+ ORDER BY search_score DESC
560
+ LIMIT :limit
561
+ """
417
562
  )
418
563
 
419
- # Add tsvector search
420
- ts_query = text(
421
- "search_vector @@ to_tsquery('english', :query)"
422
- ).params(query=tsquery_text)
423
- long_query = long_query.filter(ts_query)
424
-
425
- # Add category filter
564
+ params = {
565
+ "namespace": namespace,
566
+ "query": tsquery_text,
567
+ "limit": long_limit,
568
+ }
426
569
  if category_filter:
427
- long_query = long_query.filter(
428
- LongTermMemory.category_primary.in_(category_filter)
429
- )
430
-
431
- # Add relevance score and limit
432
- long_results = self.session.execute(
433
- long_query.statement.add_columns(
434
- text(
435
- "ts_rank(search_vector, to_tsquery('english', :query)) as search_score"
436
- ).params(query=tsquery_text),
437
- text("'long_term' as memory_type"),
438
- text("'postgresql_fts' as search_strategy"),
570
+ params["category_list"] = category_filter
571
+
572
+ long_results = self.session.execute(long_sql, params).fetchall()
573
+
574
+ # Convert to dictionaries manually with proper column mapping
575
+ for row in long_results:
576
+ results.append(
577
+ {
578
+ "memory_id": row[0],
579
+ "processed_data": row[1],
580
+ "importance_score": row[2],
581
+ "created_at": row[3],
582
+ "summary": row[4],
583
+ "category_primary": row[5],
584
+ "search_score": row[6],
585
+ "memory_type": row[7],
586
+ "search_strategy": row[8],
587
+ }
439
588
  )
440
- .order_by(text("search_score DESC"))
441
- .limit(long_limit)
442
- ).fetchall()
443
-
444
- results.extend([dict(row) for row in long_results])
445
589
 
446
590
  return results
447
591
 
@@ -348,24 +348,72 @@ class SQLAlchemyDatabaseManager:
348
348
  def _setup_mysql_fulltext(self, conn):
349
349
  """Setup MySQL FULLTEXT indexes"""
350
350
  try:
351
- # Create FULLTEXT indexes
352
- conn.execute(
353
- text(
354
- "ALTER TABLE short_term_memory ADD FULLTEXT INDEX ft_short_term_search (searchable_content, summary)"
355
- )
351
+ # Check if indexes exist before creating them
352
+ index_check_query = text(
353
+ """
354
+ SELECT COUNT(*) as index_count
355
+ FROM information_schema.statistics
356
+ WHERE table_schema = DATABASE()
357
+ AND index_name IN ('ft_short_term_search', 'ft_long_term_search')
358
+ """
356
359
  )
357
- conn.execute(
358
- text(
359
- "ALTER TABLE long_term_memory ADD FULLTEXT INDEX ft_long_term_search (searchable_content, summary)"
360
+
361
+ result = conn.execute(index_check_query)
362
+ existing_indexes = result.fetchone()[0]
363
+
364
+ if existing_indexes < 2:
365
+ logger.info(
366
+ f"Creating missing MySQL FULLTEXT indexes ({existing_indexes}/2 exist)..."
360
367
  )
361
- )
362
368
 
363
- logger.info("MySQL FULLTEXT indexes setup completed")
369
+ # Check and create short_term_memory index if missing
370
+ short_term_check = conn.execute(
371
+ text(
372
+ """
373
+ SELECT COUNT(*) FROM information_schema.statistics
374
+ WHERE table_schema = DATABASE()
375
+ AND table_name = 'short_term_memory'
376
+ AND index_name = 'ft_short_term_search'
377
+ """
378
+ )
379
+ ).fetchone()[0]
380
+
381
+ if short_term_check == 0:
382
+ conn.execute(
383
+ text(
384
+ "ALTER TABLE short_term_memory ADD FULLTEXT INDEX ft_short_term_search (searchable_content, summary)"
385
+ )
386
+ )
387
+ logger.info("Created ft_short_term_search index")
388
+
389
+ # Check and create long_term_memory index if missing
390
+ long_term_check = conn.execute(
391
+ text(
392
+ """
393
+ SELECT COUNT(*) FROM information_schema.statistics
394
+ WHERE table_schema = DATABASE()
395
+ AND table_name = 'long_term_memory'
396
+ AND index_name = 'ft_long_term_search'
397
+ """
398
+ )
399
+ ).fetchone()[0]
400
+
401
+ if long_term_check == 0:
402
+ conn.execute(
403
+ text(
404
+ "ALTER TABLE long_term_memory ADD FULLTEXT INDEX ft_long_term_search (searchable_content, summary)"
405
+ )
406
+ )
407
+ logger.info("Created ft_long_term_search index")
408
+
409
+ logger.info("MySQL FULLTEXT indexes setup completed")
410
+ else:
411
+ logger.debug(
412
+ "MySQL FULLTEXT indexes already exist (2/2), skipping creation"
413
+ )
364
414
 
365
415
  except Exception as e:
366
- logger.warning(
367
- f"MySQL FULLTEXT setup failed (indexes may already exist): {e}"
368
- )
416
+ logger.warning(f"MySQL FULLTEXT setup failed: {e}")
369
417
 
370
418
  def _setup_postgresql_fts(self, conn):
371
419
  """Setup PostgreSQL full-text search"""
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: memorisdk
3
- Version: 2.1.0
3
+ Version: 2.3.0
4
4
  Summary: The Open-Source Memory Layer for AI Agents & Multi-Agent Systems
5
5
  Author-email: GibsonAI Team <noc@gibsonai.com>
6
6
  License: Apache-2.0
@@ -53,11 +53,11 @@ Requires-Dist: psycopg2-binary>=2.9.0; extra == "postgres"
53
53
  Provides-Extra: mysql
54
54
  Requires-Dist: PyMySQL>=1.0.0; extra == "mysql"
55
55
  Provides-Extra: mongodb
56
- Requires-Dist: pymongo>=4.0.0; extra == "mongodb"
56
+ Requires-Dist: pymongo[srv]>=4.0.0; extra == "mongodb"
57
57
  Provides-Extra: databases
58
58
  Requires-Dist: psycopg2-binary>=2.9.0; extra == "databases"
59
59
  Requires-Dist: PyMySQL>=1.0.0; extra == "databases"
60
- Requires-Dist: pymongo>=4.0.0; extra == "databases"
60
+ Requires-Dist: pymongo[srv]>=4.0.0; extra == "databases"
61
61
  Provides-Extra: anthropic
62
62
  Requires-Dist: anthropic>=0.3.0; extra == "anthropic"
63
63
  Provides-Extra: litellm
@@ -87,6 +87,7 @@ Requires-Dist: mkdocs-minify-plugin>=0.7.0; extra == "all"
87
87
  Requires-Dist: mkdocs-redirects>=1.2.0; extra == "all"
88
88
  Requires-Dist: psycopg2-binary>=2.9.0; extra == "all"
89
89
  Requires-Dist: PyMySQL>=1.0.0; extra == "all"
90
+ Requires-Dist: pymongo[srv]>=4.0.0; extra == "all"
90
91
  Requires-Dist: litellm>=1.0.0; extra == "all"
91
92
  Requires-Dist: anthropic>=0.3.0; extra == "all"
92
93
  Requires-Dist: streamlit>=1.28.0; extra == "all"
@@ -481,6 +482,7 @@ Memori works seamlessly with popular AI frameworks:
481
482
  | [Agno](./examples/integrations/agno_example.py) | Memory-enhanced agent framework integration with persistent conversations | Simple chat agent with memory search |
482
483
  | [AWS Strands](./examples/integrations/aws_strands_example.py) | Professional development coach with Strands SDK and persistent memory | Career coaching agent with goal tracking |
483
484
  | [Azure AI Foundry](./examples/integrations/azure_ai_foundry_example.py) | Azure AI Foundry agents with persistent memory across conversations | Enterprise AI agents with Azure integration |
485
+ | [AutoGen](./examples/integrations/autogen_example.py) | Multi-agent group chat memory recording | Agent chats with memory integration |
484
486
  | [CamelAI](./examples/integrations/camelai_example.py) | Multi-agent communication framework with automatic memory recording and retrieval | Memory-enhanced chat agents with conversation continuity |
485
487
  | [CrewAI](./examples/integrations/crewai_example.py) | Multi-agent system with shared memory across agent interactions | Collaborative agents with memory |
486
488
  | [Digital Ocean AI](./examples/integrations/digital_ocean_example.py) | Memory-enhanced customer support using Digital Ocean's AI platform | Customer support assistant with conversation history |