memorisdk 2.1.1__py3-none-any.whl → 2.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of memorisdk might be problematic. Click here for more details.

@@ -28,6 +28,21 @@ class DatabaseAutoCreator:
28
28
  self.schema_init = schema_init
29
29
  self.utils = DatabaseConnectionUtils()
30
30
 
31
+ def _is_gibsonai_temp_connection(self, components: dict[str, str] | None) -> bool:
32
+ """Detect GibsonAI temporary database credentials to avoid noisy warnings."""
33
+ if not components:
34
+ return False
35
+
36
+ host = (components.get("host") or "").lower()
37
+ if "gibsonai.com" not in host:
38
+ return False
39
+
40
+ user = components.get("user") or components.get("username") or ""
41
+ database = components.get("database") or ""
42
+
43
+ # GibsonAI temporary credentials follow predictable us_/db_ prefixes
44
+ return user.startswith("us_") or database.startswith("db_")
45
+
31
46
  def ensure_database_exists(self, connection_string: str) -> str:
32
47
  """
33
48
  Ensure target database exists, creating it if necessary.
@@ -45,6 +60,7 @@ class DatabaseAutoCreator:
45
60
  logger.debug("Auto-creation disabled, using original connection string")
46
61
  return connection_string
47
62
 
63
+ components = None
48
64
  try:
49
65
  # Parse connection string
50
66
  components = self.utils.parse_connection_string(connection_string)
@@ -56,6 +72,13 @@ class DatabaseAutoCreator:
56
72
  )
57
73
  return connection_string
58
74
 
75
+ # Skip noisy warnings for managed GibsonAI temporary databases
76
+ if self._is_gibsonai_temp_connection(components):
77
+ logger.debug(
78
+ "[DB_SETUP] GibsonAI managed database detected - skipping auto-creation checks"
79
+ )
80
+ return connection_string
81
+
59
82
  # Validate database name
60
83
  if not self.utils.validate_database_name(components["database"]):
61
84
  raise ValueError(f"Invalid database name: {components['database']}")
@@ -70,10 +93,39 @@ class DatabaseAutoCreator:
70
93
  logger.info(f"Successfully created database '{components['database']}'")
71
94
  return connection_string
72
95
 
96
+ except PermissionError as e:
97
+ if components and self._is_gibsonai_temp_connection(components):
98
+ logger.debug(
99
+ "[DB_SETUP] GibsonAI managed database does not allow auto-creation (permission denied)"
100
+ )
101
+ return connection_string
102
+
103
+ logger.error(f"[DB_SETUP] Permission denied - {e}")
104
+ if components:
105
+ logger.warning(
106
+ f"[DB_SETUP] Database '{components['database']}' may need manual creation with proper permissions"
107
+ )
108
+ else:
109
+ logger.warning(
110
+ "[DB_SETUP] Database may need manual creation with proper permissions"
111
+ )
112
+ return connection_string
113
+ except RuntimeError as e:
114
+ logger.error(f"[DB_SETUP] Database creation error - {e}")
115
+ logger.info(
116
+ "[DB_SETUP] Proceeding with original connection string, database may need manual setup"
117
+ )
118
+ return connection_string
73
119
  except Exception as e:
74
- logger.error(f"Database auto-creation failed: {e}")
75
- # Don't raise exception - let the original connection attempt proceed
76
- # This allows graceful degradation if user has manual setup
120
+ logger.error(
121
+ f"[DB_SETUP] Unexpected database auto-creation failure - {type(e).__name__}: {e}"
122
+ )
123
+ if components:
124
+ logger.debug(
125
+ f"[DB_SETUP] Connection string: {components['engine']}://{components['host']}:{components['port']}/{components['database']}"
126
+ )
127
+ else:
128
+ logger.debug(f"[DB_SETUP] Connection string: {connection_string}")
77
129
  return connection_string
78
130
 
79
131
  def _database_exists(self, components: dict[str, str]) -> bool:
@@ -90,7 +142,12 @@ class DatabaseAutoCreator:
90
142
  return False
91
143
 
92
144
  except Exception as e:
93
- logger.error(f"Failed to check database existence: {e}")
145
+ if self._is_gibsonai_temp_connection(components):
146
+ logger.debug(
147
+ "[DB_CONNECTION] Skipping GibsonAI database existence check due to restricted permissions"
148
+ )
149
+ else:
150
+ logger.error(f"Failed to check database existence: {e}")
94
151
  return False
95
152
 
96
153
  def _postgresql_database_exists(self, components: dict[str, str]) -> bool:
@@ -176,7 +233,17 @@ class DatabaseAutoCreator:
176
233
  logger.error(error_msg)
177
234
  return False
178
235
  except Exception as e:
179
- logger.error(f"MySQL database existence check failed: {e}")
236
+ if self._is_gibsonai_temp_connection(components):
237
+ logger.debug(
238
+ f"[DB_CONNECTION] GibsonAI existence check bypassed for '{components['database']}' ({e})"
239
+ )
240
+ else:
241
+ logger.error(
242
+ f"[DB_CONNECTION] MySQL database existence check failed for '{components['database']}': {e}"
243
+ )
244
+ logger.debug(
245
+ f"[DB_CONNECTION] Connection details - host: {components.get('host')}, port: {components.get('port')}, user: {components.get('user') or components.get('username')}"
246
+ )
180
247
  return False
181
248
 
182
249
  def _create_database(self, components: dict[str, str]) -> None:
@@ -928,6 +928,28 @@ class MongoDBDatabaseManager:
928
928
  except Exception as e:
929
929
  logger.error(f"Failed to mark conscious memories processed: {e}")
930
930
 
931
+ def _check_milestone(self, memory_count: int):
932
+ """
933
+ Check and celebrate memory storage milestones to encourage user engagement.
934
+ Displays celebration messages at key milestones: 10, 50, 100, 500, 1000 memories.
935
+
936
+ Args:
937
+ memory_count: Current count of long-term memories
938
+ """
939
+ milestones = [10, 50, 100, 500, 1000]
940
+
941
+ if memory_count in milestones:
942
+ celebration_msg = (
943
+ f"\n{'=' * 60}\n"
944
+ f"🎉 Milestone Achieved: {memory_count} memories stored!\n"
945
+ f"{'=' * 60}\n"
946
+ f"⭐️ Loving Memori? Give us a star on GitHub!\n"
947
+ f"👉 https://github.com/GibsonAI/memori\n"
948
+ f"Your support helps us build better open AI memory tools ❤️\n"
949
+ f"{'=' * 60}\n"
950
+ )
951
+ logger.info(celebration_msg)
952
+
931
953
  def store_long_term_memory_enhanced(
932
954
  self, memory: ProcessedLongTermMemory, chat_id: str, namespace: str = "default"
933
955
  ) -> str:
@@ -1000,6 +1022,13 @@ class MongoDBDatabaseManager:
1000
1022
  collection.insert_one(document)
1001
1023
 
1002
1024
  logger.debug(f"Stored enhanced long-term memory {memory_id}")
1025
+
1026
+ # Get current memory count and check for milestones
1027
+ total_memories = collection.count_documents({"namespace": namespace})
1028
+
1029
+ # Celebrate milestone if reached
1030
+ self._check_milestone(total_memories)
1031
+
1003
1032
  return memory_id
1004
1033
 
1005
1034
  except Exception as e:
@@ -42,7 +42,7 @@ class SearchService:
42
42
  List of memory dictionaries with search metadata
43
43
  """
44
44
  logger.debug(
45
- f"SearchService.search_memories called - query: '{query}', namespace: '{namespace}', database: {self.database_type}, limit: {limit}"
45
+ f"[SEARCH] Query initiated - '{query[:50]}{'...' if len(query) > 50 else ''}' | namespace: '{namespace}' | db: {self.database_type} | limit: {limit}"
46
46
  )
47
47
 
48
48
  if not query or not query.strip():
@@ -58,13 +58,13 @@ class SearchService:
58
58
  search_long_term = not memory_types or "long_term" in memory_types
59
59
 
60
60
  logger.debug(
61
- f"Memory types to search - short_term: {search_short_term}, long_term: {search_long_term}, categories: {category_filter}"
61
+ f"[SEARCH] Target scope - short_term: {search_short_term} | long_term: {search_long_term} | categories: {category_filter or 'all'}"
62
62
  )
63
63
 
64
64
  try:
65
65
  # Try database-specific full-text search first
66
66
  if self.database_type == "sqlite":
67
- logger.debug("Using SQLite FTS5 search strategy")
67
+ logger.debug("[SEARCH] Strategy: SQLite FTS5")
68
68
  results = self._search_sqlite_fts(
69
69
  query,
70
70
  namespace,
@@ -74,7 +74,7 @@ class SearchService:
74
74
  search_long_term,
75
75
  )
76
76
  elif self.database_type == "mysql":
77
- logger.debug("Using MySQL FULLTEXT search strategy")
77
+ logger.debug("[SEARCH] Strategy: MySQL FULLTEXT")
78
78
  results = self._search_mysql_fulltext(
79
79
  query,
80
80
  namespace,
@@ -84,7 +84,7 @@ class SearchService:
84
84
  search_long_term,
85
85
  )
86
86
  elif self.database_type == "postgresql":
87
- logger.debug("Using PostgreSQL FTS search strategy")
87
+ logger.debug("[SEARCH] Strategy: PostgreSQL FTS")
88
88
  results = self._search_postgresql_fts(
89
89
  query,
90
90
  namespace,
@@ -94,12 +94,12 @@ class SearchService:
94
94
  search_long_term,
95
95
  )
96
96
 
97
- logger.debug(f"Primary search strategy returned {len(results)} results")
97
+ logger.debug(f"[SEARCH] Primary strategy results: {len(results)} matches")
98
98
 
99
99
  # If no results or full-text search failed, fall back to LIKE search
100
100
  if not results:
101
101
  logger.debug(
102
- "Primary search returned no results, falling back to LIKE search"
102
+ "[SEARCH] Primary strategy empty, falling back to LIKE search"
103
103
  )
104
104
  results = self._search_like_fallback(
105
105
  query,
@@ -112,13 +112,10 @@ class SearchService:
112
112
 
113
113
  except Exception as e:
114
114
  logger.error(
115
- f"Full-text search failed for query '{query}' in namespace '{namespace}': {e}"
115
+ f"[SEARCH] Full-text search failed for '{query[:30]}...' in '{namespace}' - {type(e).__name__}: {e}"
116
116
  )
117
- logger.debug(
118
- f"Full-text search error details: {type(e).__name__}: {str(e)}",
119
- exc_info=True,
120
- )
121
- logger.warning(f"Falling back to LIKE search for query '{query}'")
117
+ logger.debug("[SEARCH] Full-text error details", exc_info=True)
118
+ logger.warning("[SEARCH] Attempting LIKE fallback search")
122
119
  try:
123
120
  results = self._search_like_fallback(
124
121
  query,
@@ -128,21 +125,25 @@ class SearchService:
128
125
  search_short_term,
129
126
  search_long_term,
130
127
  )
131
- logger.debug(f"LIKE fallback search returned {len(results)} results")
128
+ logger.debug(f"[SEARCH] LIKE fallback results: {len(results)} matches")
132
129
  except Exception as fallback_e:
133
130
  logger.error(
134
- f"LIKE fallback search also failed for query '{query}': {fallback_e}"
131
+ f"[SEARCH] LIKE fallback also failed - {type(fallback_e).__name__}: {fallback_e}"
135
132
  )
136
133
  results = []
137
134
 
138
135
  final_results = self._rank_and_limit_results(results, limit)
139
136
  logger.debug(
140
- f"SearchService completed - returning {len(final_results)} final results after ranking and limiting"
137
+ f"[SEARCH] Completed - {len(final_results)} results after ranking and limiting"
141
138
  )
142
139
 
143
140
  if final_results:
141
+ top_result = final_results[0]
142
+ memory_id = str(top_result.get("memory_id", "unknown"))[:8]
143
+ score = top_result.get("composite_score", 0)
144
+ strategy = top_result.get("search_strategy", "unknown")
144
145
  logger.debug(
145
- f"Top result: memory_id={final_results[0].get('memory_id')}, score={final_results[0].get('composite_score', 0):.3f}, strategy={final_results[0].get('search_strategy')}"
146
+ f"[SEARCH] Top result: {memory_id}... | score: {score:.3f} | strategy: {strategy}"
146
147
  )
147
148
 
148
149
  return final_results
@@ -268,6 +269,36 @@ class SearchService:
268
269
  results = []
269
270
 
270
271
  try:
272
+ # First check if there are any records in the database
273
+ if search_short_term:
274
+ short_count = (
275
+ self.session.query(ShortTermMemory)
276
+ .filter(ShortTermMemory.namespace == namespace)
277
+ .count()
278
+ )
279
+ if short_count == 0:
280
+ logger.debug(
281
+ "No short-term memories found in database, skipping FULLTEXT search"
282
+ )
283
+ search_short_term = False
284
+
285
+ if search_long_term:
286
+ long_count = (
287
+ self.session.query(LongTermMemory)
288
+ .filter(LongTermMemory.namespace == namespace)
289
+ .count()
290
+ )
291
+ if long_count == 0:
292
+ logger.debug(
293
+ "No long-term memories found in database, skipping FULLTEXT search"
294
+ )
295
+ search_long_term = False
296
+
297
+ # If no records exist, return empty results
298
+ if not search_short_term and not search_long_term:
299
+ logger.debug("No memories found in database for FULLTEXT search")
300
+ return []
301
+
271
302
  # Apply limit proportionally between memory types
272
303
  short_limit = (
273
304
  limit // 2 if search_short_term and search_long_term else limit
@@ -278,65 +309,147 @@ class SearchService:
278
309
 
279
310
  # Search short-term memory if requested
280
311
  if search_short_term:
281
- short_query = self.session.query(ShortTermMemory).filter(
282
- ShortTermMemory.namespace == namespace
283
- )
284
-
285
- # Add FULLTEXT search
286
- fulltext_condition = text(
287
- "MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE)"
288
- ).params(query=query)
289
- short_query = short_query.filter(fulltext_condition)
290
-
291
- # Add category filter
292
- if category_filter:
293
- short_query = short_query.filter(
294
- ShortTermMemory.category_primary.in_(category_filter)
312
+ try:
313
+ # Build category filter clause
314
+ category_clause = ""
315
+ params = {"query": query}
316
+ if category_filter:
317
+ category_placeholders = ",".join(
318
+ [f":cat_{i}" for i in range(len(category_filter))]
319
+ )
320
+ category_clause = (
321
+ f"AND category_primary IN ({category_placeholders})"
322
+ )
323
+ for i, cat in enumerate(category_filter):
324
+ params[f"cat_{i}"] = cat
325
+
326
+ # Use direct SQL query for more reliable results
327
+ sql_query = text(
328
+ f"""
329
+ SELECT
330
+ memory_id,
331
+ processed_data,
332
+ importance_score,
333
+ created_at,
334
+ summary,
335
+ category_primary,
336
+ MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE) as search_score,
337
+ 'short_term' as memory_type,
338
+ 'mysql_fulltext' as search_strategy
339
+ FROM short_term_memory
340
+ WHERE namespace = :namespace
341
+ AND MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE)
342
+ {category_clause}
343
+ ORDER BY search_score DESC
344
+ LIMIT :short_limit
345
+ """
295
346
  )
296
347
 
297
- # Add relevance score and limit
298
- short_results = self.session.execute(
299
- short_query.statement.add_columns(
300
- text(
301
- "MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE) as search_score"
302
- ).params(query=query),
303
- text("'short_term' as memory_type"),
304
- text("'mysql_fulltext' as search_strategy"),
305
- ).limit(short_limit)
306
- ).fetchall()
307
-
308
- results.extend([dict(row) for row in short_results])
348
+ params["namespace"] = namespace
349
+ params["short_limit"] = short_limit
350
+
351
+ short_results = self.session.execute(sql_query, params).fetchall()
352
+
353
+ # Convert rows to dictionaries safely
354
+ for row in short_results:
355
+ try:
356
+ if hasattr(row, "_mapping"):
357
+ row_dict = dict(row._mapping)
358
+ else:
359
+ # Create dict from row values and keys
360
+ row_dict = {
361
+ "memory_id": row[0],
362
+ "processed_data": row[1],
363
+ "importance_score": row[2],
364
+ "created_at": row[3],
365
+ "summary": row[4],
366
+ "category_primary": row[5],
367
+ "search_score": float(row[6]) if row[6] else 0.0,
368
+ "memory_type": row[7],
369
+ "search_strategy": row[8],
370
+ }
371
+ results.append(row_dict)
372
+ except Exception as e:
373
+ logger.warning(
374
+ f"Failed to convert short-term memory row to dict: {e}"
375
+ )
376
+ continue
377
+
378
+ except Exception as e:
379
+ logger.warning(f"Short-term memory FULLTEXT search failed: {e}")
380
+ # Continue to try long-term search
309
381
 
310
382
  # Search long-term memory if requested
311
383
  if search_long_term:
312
- long_query = self.session.query(LongTermMemory).filter(
313
- LongTermMemory.namespace == namespace
314
- )
315
-
316
- # Add FULLTEXT search
317
- fulltext_condition = text(
318
- "MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE)"
319
- ).params(query=query)
320
- long_query = long_query.filter(fulltext_condition)
321
-
322
- # Add category filter
323
- if category_filter:
324
- long_query = long_query.filter(
325
- LongTermMemory.category_primary.in_(category_filter)
384
+ try:
385
+ # Build category filter clause
386
+ category_clause = ""
387
+ params = {"query": query}
388
+ if category_filter:
389
+ category_placeholders = ",".join(
390
+ [f":cat_{i}" for i in range(len(category_filter))]
391
+ )
392
+ category_clause = (
393
+ f"AND category_primary IN ({category_placeholders})"
394
+ )
395
+ for i, cat in enumerate(category_filter):
396
+ params[f"cat_{i}"] = cat
397
+
398
+ # Use direct SQL query for more reliable results
399
+ sql_query = text(
400
+ f"""
401
+ SELECT
402
+ memory_id,
403
+ processed_data,
404
+ importance_score,
405
+ created_at,
406
+ summary,
407
+ category_primary,
408
+ MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE) as search_score,
409
+ 'long_term' as memory_type,
410
+ 'mysql_fulltext' as search_strategy
411
+ FROM long_term_memory
412
+ WHERE namespace = :namespace
413
+ AND MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE)
414
+ {category_clause}
415
+ ORDER BY search_score DESC
416
+ LIMIT :long_limit
417
+ """
326
418
  )
327
419
 
328
- # Add relevance score and limit
329
- long_results = self.session.execute(
330
- long_query.statement.add_columns(
331
- text(
332
- "MATCH(searchable_content, summary) AGAINST(:query IN NATURAL LANGUAGE MODE) as search_score"
333
- ).params(query=query),
334
- text("'long_term' as memory_type"),
335
- text("'mysql_fulltext' as search_strategy"),
336
- ).limit(long_limit)
337
- ).fetchall()
338
-
339
- results.extend([dict(row) for row in long_results])
420
+ params["namespace"] = namespace
421
+ params["long_limit"] = long_limit
422
+
423
+ long_results = self.session.execute(sql_query, params).fetchall()
424
+
425
+ # Convert rows to dictionaries safely
426
+ for row in long_results:
427
+ try:
428
+ if hasattr(row, "_mapping"):
429
+ row_dict = dict(row._mapping)
430
+ else:
431
+ # Create dict from row values and keys
432
+ row_dict = {
433
+ "memory_id": row[0],
434
+ "processed_data": row[1],
435
+ "importance_score": row[2],
436
+ "created_at": row[3],
437
+ "summary": row[4],
438
+ "category_primary": row[5],
439
+ "search_score": float(row[6]) if row[6] else 0.0,
440
+ "memory_type": row[7],
441
+ "search_strategy": row[8],
442
+ }
443
+ results.append(row_dict)
444
+ except Exception as e:
445
+ logger.warning(
446
+ f"Failed to convert long-term memory row to dict: {e}"
447
+ )
448
+ continue
449
+
450
+ except Exception as e:
451
+ logger.warning(f"Long-term memory FULLTEXT search failed: {e}")
452
+ # Continue with whatever results we have
340
453
 
341
454
  return results
342
455
 
@@ -379,69 +492,100 @@ class SearchService:
379
492
 
380
493
  # Search short-term memory if requested
381
494
  if search_short_term:
382
- short_query = self.session.query(ShortTermMemory).filter(
383
- ShortTermMemory.namespace == namespace
384
- )
385
-
386
- # Add tsvector search
387
- ts_query = text(
388
- "search_vector @@ to_tsquery('english', :query)"
389
- ).params(query=tsquery_text)
390
- short_query = short_query.filter(ts_query)
391
495
 
392
- # Add category filter
496
+ # Build category filter clause safely
497
+ category_clause = ""
393
498
  if category_filter:
394
- short_query = short_query.filter(
395
- ShortTermMemory.category_primary.in_(category_filter)
396
- )
499
+ category_clause = "AND category_primary = ANY(:category_list)"
500
+
501
+ # Use direct SQL to avoid SQLAlchemy Row conversion issues
502
+ short_sql = text(
503
+ f"""
504
+ SELECT memory_id, processed_data, importance_score, created_at, summary, category_primary,
505
+ ts_rank(search_vector, to_tsquery('english', :query)) as search_score,
506
+ 'short_term' as memory_type, 'postgresql_fts' as search_strategy
507
+ FROM short_term_memory
508
+ WHERE namespace = :namespace
509
+ AND search_vector @@ to_tsquery('english', :query)
510
+ {category_clause}
511
+ ORDER BY search_score DESC
512
+ LIMIT :limit
513
+ """
514
+ )
397
515
 
398
- # Add relevance score and limit
399
- short_results = self.session.execute(
400
- short_query.statement.add_columns(
401
- text(
402
- "ts_rank(search_vector, to_tsquery('english', :query)) as search_score"
403
- ).params(query=tsquery_text),
404
- text("'short_term' as memory_type"),
405
- text("'postgresql_fts' as search_strategy"),
516
+ params = {
517
+ "namespace": namespace,
518
+ "query": tsquery_text,
519
+ "limit": short_limit,
520
+ }
521
+ if category_filter:
522
+ params["category_list"] = category_filter
523
+
524
+ short_results = self.session.execute(short_sql, params).fetchall()
525
+
526
+ # Convert to dictionaries manually with proper column mapping
527
+ for row in short_results:
528
+ results.append(
529
+ {
530
+ "memory_id": row[0],
531
+ "processed_data": row[1],
532
+ "importance_score": row[2],
533
+ "created_at": row[3],
534
+ "summary": row[4],
535
+ "category_primary": row[5],
536
+ "search_score": row[6],
537
+ "memory_type": row[7],
538
+ "search_strategy": row[8],
539
+ }
406
540
  )
407
- .order_by(text("search_score DESC"))
408
- .limit(short_limit)
409
- ).fetchall()
410
-
411
- results.extend([dict(row) for row in short_results])
412
541
 
413
542
  # Search long-term memory if requested
414
543
  if search_long_term:
415
- long_query = self.session.query(LongTermMemory).filter(
416
- LongTermMemory.namespace == namespace
544
+ # Build category filter clause safely
545
+ category_clause = ""
546
+ if category_filter:
547
+ category_clause = "AND category_primary = ANY(:category_list)"
548
+
549
+ # Use direct SQL to avoid SQLAlchemy Row conversion issues
550
+ long_sql = text(
551
+ f"""
552
+ SELECT memory_id, processed_data, importance_score, created_at, summary, category_primary,
553
+ ts_rank(search_vector, to_tsquery('english', :query)) as search_score,
554
+ 'long_term' as memory_type, 'postgresql_fts' as search_strategy
555
+ FROM long_term_memory
556
+ WHERE namespace = :namespace
557
+ AND search_vector @@ to_tsquery('english', :query)
558
+ {category_clause}
559
+ ORDER BY search_score DESC
560
+ LIMIT :limit
561
+ """
417
562
  )
418
563
 
419
- # Add tsvector search
420
- ts_query = text(
421
- "search_vector @@ to_tsquery('english', :query)"
422
- ).params(query=tsquery_text)
423
- long_query = long_query.filter(ts_query)
424
-
425
- # Add category filter
564
+ params = {
565
+ "namespace": namespace,
566
+ "query": tsquery_text,
567
+ "limit": long_limit,
568
+ }
426
569
  if category_filter:
427
- long_query = long_query.filter(
428
- LongTermMemory.category_primary.in_(category_filter)
429
- )
430
-
431
- # Add relevance score and limit
432
- long_results = self.session.execute(
433
- long_query.statement.add_columns(
434
- text(
435
- "ts_rank(search_vector, to_tsquery('english', :query)) as search_score"
436
- ).params(query=tsquery_text),
437
- text("'long_term' as memory_type"),
438
- text("'postgresql_fts' as search_strategy"),
570
+ params["category_list"] = category_filter
571
+
572
+ long_results = self.session.execute(long_sql, params).fetchall()
573
+
574
+ # Convert to dictionaries manually with proper column mapping
575
+ for row in long_results:
576
+ results.append(
577
+ {
578
+ "memory_id": row[0],
579
+ "processed_data": row[1],
580
+ "importance_score": row[2],
581
+ "created_at": row[3],
582
+ "summary": row[4],
583
+ "category_primary": row[5],
584
+ "search_score": row[6],
585
+ "memory_type": row[7],
586
+ "search_strategy": row[8],
587
+ }
439
588
  )
440
- .order_by(text("search_score DESC"))
441
- .limit(long_limit)
442
- ).fetchall()
443
-
444
- results.extend([dict(row) for row in long_results])
445
589
 
446
590
  return results
447
591