omni-cortex 1.17.2__py3-none-any.whl → 1.17.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. omni_cortex/_bundled/dashboard/backend/.env.example +12 -0
  2. omni_cortex/_bundled/dashboard/backend/backfill_summaries.py +280 -0
  3. omni_cortex/_bundled/dashboard/backend/chat_service.py +631 -0
  4. omni_cortex/_bundled/dashboard/backend/database.py +1773 -0
  5. omni_cortex/_bundled/dashboard/backend/image_service.py +552 -0
  6. omni_cortex/_bundled/dashboard/backend/logging_config.py +122 -0
  7. omni_cortex/_bundled/dashboard/backend/main.py +1888 -0
  8. omni_cortex/_bundled/dashboard/backend/models.py +472 -0
  9. omni_cortex/_bundled/dashboard/backend/project_config.py +170 -0
  10. omni_cortex/_bundled/dashboard/backend/project_scanner.py +164 -0
  11. omni_cortex/_bundled/dashboard/backend/prompt_security.py +111 -0
  12. omni_cortex/_bundled/dashboard/backend/pyproject.toml +23 -0
  13. omni_cortex/_bundled/dashboard/backend/security.py +104 -0
  14. omni_cortex/_bundled/dashboard/backend/test_database.py +301 -0
  15. omni_cortex/_bundled/dashboard/backend/tmpclaude-2dfa-cwd +1 -0
  16. omni_cortex/_bundled/dashboard/backend/tmpclaude-c460-cwd +1 -0
  17. omni_cortex/_bundled/dashboard/backend/uv.lock +1110 -0
  18. omni_cortex/_bundled/dashboard/backend/websocket_manager.py +104 -0
  19. omni_cortex/_bundled/dashboard/frontend/dist/assets/index-CQlQK3nE.js +551 -0
  20. omni_cortex/_bundled/dashboard/frontend/dist/assets/index-CmUNNfe4.css +1 -0
  21. omni_cortex/_bundled/dashboard/frontend/dist/index.html +14 -0
  22. omni_cortex/_bundled/hooks/post_tool_use.py +497 -0
  23. omni_cortex/_bundled/hooks/pre_tool_use.py +277 -0
  24. omni_cortex/_bundled/hooks/session_utils.py +186 -0
  25. omni_cortex/_bundled/hooks/stop.py +219 -0
  26. omni_cortex/_bundled/hooks/subagent_stop.py +120 -0
  27. omni_cortex/_bundled/hooks/user_prompt.py +331 -0
  28. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/dashboard/backend/main.py +2 -2
  29. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/hooks/user_prompt.py +113 -2
  30. {omni_cortex-1.17.2.dist-info → omni_cortex-1.17.4.dist-info}/METADATA +6 -1
  31. omni_cortex-1.17.4.dist-info/RECORD +53 -0
  32. omni_cortex/__init__.py +0 -3
  33. omni_cortex/categorization/__init__.py +0 -9
  34. omni_cortex/categorization/auto_tags.py +0 -166
  35. omni_cortex/categorization/auto_type.py +0 -165
  36. omni_cortex/config.py +0 -141
  37. omni_cortex/dashboard.py +0 -232
  38. omni_cortex/database/__init__.py +0 -24
  39. omni_cortex/database/connection.py +0 -137
  40. omni_cortex/database/migrations.py +0 -210
  41. omni_cortex/database/schema.py +0 -212
  42. omni_cortex/database/sync.py +0 -421
  43. omni_cortex/decay/__init__.py +0 -7
  44. omni_cortex/decay/importance.py +0 -147
  45. omni_cortex/embeddings/__init__.py +0 -35
  46. omni_cortex/embeddings/local.py +0 -442
  47. omni_cortex/models/__init__.py +0 -20
  48. omni_cortex/models/activity.py +0 -265
  49. omni_cortex/models/agent.py +0 -144
  50. omni_cortex/models/memory.py +0 -395
  51. omni_cortex/models/relationship.py +0 -206
  52. omni_cortex/models/session.py +0 -290
  53. omni_cortex/resources/__init__.py +0 -1
  54. omni_cortex/search/__init__.py +0 -22
  55. omni_cortex/search/hybrid.py +0 -197
  56. omni_cortex/search/keyword.py +0 -204
  57. omni_cortex/search/ranking.py +0 -127
  58. omni_cortex/search/semantic.py +0 -232
  59. omni_cortex/server.py +0 -360
  60. omni_cortex/setup.py +0 -278
  61. omni_cortex/tools/__init__.py +0 -13
  62. omni_cortex/tools/activities.py +0 -453
  63. omni_cortex/tools/memories.py +0 -536
  64. omni_cortex/tools/sessions.py +0 -311
  65. omni_cortex/tools/utilities.py +0 -477
  66. omni_cortex/utils/__init__.py +0 -13
  67. omni_cortex/utils/formatting.py +0 -282
  68. omni_cortex/utils/ids.py +0 -72
  69. omni_cortex/utils/timestamps.py +0 -129
  70. omni_cortex/utils/truncation.py +0 -111
  71. omni_cortex-1.17.2.dist-info/RECORD +0 -65
  72. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/dashboard/backend/.env.example +0 -0
  73. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/dashboard/backend/backfill_summaries.py +0 -0
  74. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/dashboard/backend/chat_service.py +0 -0
  75. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/dashboard/backend/database.py +0 -0
  76. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/dashboard/backend/image_service.py +0 -0
  77. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/dashboard/backend/logging_config.py +0 -0
  78. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/dashboard/backend/models.py +0 -0
  79. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/dashboard/backend/project_config.py +0 -0
  80. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/dashboard/backend/project_scanner.py +0 -0
  81. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/dashboard/backend/prompt_security.py +0 -0
  82. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/dashboard/backend/pyproject.toml +0 -0
  83. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/dashboard/backend/security.py +0 -0
  84. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/dashboard/backend/uv.lock +0 -0
  85. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/dashboard/backend/websocket_manager.py +0 -0
  86. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/hooks/post_tool_use.py +0 -0
  87. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/hooks/pre_tool_use.py +0 -0
  88. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/hooks/session_utils.py +0 -0
  89. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/hooks/stop.py +0 -0
  90. {omni_cortex-1.17.2.data → omni_cortex-1.17.4.data}/data/share/omni-cortex/hooks/subagent_stop.py +0 -0
  91. {omni_cortex-1.17.2.dist-info → omni_cortex-1.17.4.dist-info}/WHEEL +0 -0
  92. {omni_cortex-1.17.2.dist-info → omni_cortex-1.17.4.dist-info}/entry_points.txt +0 -0
  93. {omni_cortex-1.17.2.dist-info → omni_cortex-1.17.4.dist-info}/licenses/LICENSE +0 -0
@@ -1,290 +0,0 @@
1
- """Session model and CRUD operations."""
2
-
3
- import json
4
- import sqlite3
5
- from typing import Optional, Any
6
- from pydantic import BaseModel, Field, ConfigDict
7
-
8
- from ..utils.ids import generate_session_id, generate_summary_id
9
- from ..utils.timestamps import now_iso
10
-
11
-
12
- class SessionCreate(BaseModel):
13
- """Input model for creating a session."""
14
-
15
- model_config = ConfigDict(
16
- str_strip_whitespace=True,
17
- validate_assignment=True,
18
- )
19
-
20
- session_id: Optional[str] = Field(None, description="Custom session ID")
21
- project_path: str = Field(..., description="Project directory path")
22
- provide_context: bool = Field(True, description="Whether to provide previous context")
23
- context_depth: int = Field(3, description="Number of past sessions to summarize", ge=1, le=10)
24
-
25
-
26
- class Session(BaseModel):
27
- """Full session model from database."""
28
-
29
- id: str
30
- project_path: str
31
- started_at: str
32
- ended_at: Optional[str] = None
33
- summary: Optional[str] = None
34
- tags: Optional[list[str]] = None
35
- metadata: Optional[dict[str, Any]] = None
36
-
37
-
38
- class SessionSummary(BaseModel):
39
- """Session summary model."""
40
-
41
- id: str
42
- session_id: str
43
- key_learnings: Optional[list[str]] = None
44
- key_decisions: Optional[list[str]] = None
45
- key_errors: Optional[list[str]] = None
46
- files_modified: Optional[list[str]] = None
47
- tools_used: Optional[dict[str, int]] = None
48
- total_activities: int = 0
49
- total_memories_created: int = 0
50
- created_at: str
51
-
52
-
53
- def create_session(
54
- conn: sqlite3.Connection,
55
- data: SessionCreate,
56
- ) -> Session:
57
- """Create a new session.
58
-
59
- Args:
60
- conn: Database connection
61
- data: Session creation data
62
-
63
- Returns:
64
- Created session object
65
- """
66
- session_id = data.session_id or generate_session_id()
67
- now = now_iso()
68
-
69
- cursor = conn.cursor()
70
- cursor.execute(
71
- """
72
- INSERT INTO sessions (id, project_path, started_at)
73
- VALUES (?, ?, ?)
74
- """,
75
- (session_id, data.project_path, now),
76
- )
77
- conn.commit()
78
-
79
- return Session(
80
- id=session_id,
81
- project_path=data.project_path,
82
- started_at=now,
83
- )
84
-
85
-
86
- def get_session(conn: sqlite3.Connection, session_id: str) -> Optional[Session]:
87
- """Get a session by ID."""
88
- cursor = conn.cursor()
89
- cursor.execute("SELECT * FROM sessions WHERE id = ?", (session_id,))
90
- row = cursor.fetchone()
91
- return _row_to_session(row) if row else None
92
-
93
-
94
- def end_session(
95
- conn: sqlite3.Connection,
96
- session_id: str,
97
- summary: Optional[str] = None,
98
- key_learnings: Optional[list[str]] = None,
99
- ) -> Optional[Session]:
100
- """End a session and create summary.
101
-
102
- Args:
103
- conn: Database connection
104
- session_id: Session ID to end
105
- summary: Optional summary text
106
- key_learnings: Optional list of key learnings
107
-
108
- Returns:
109
- Updated session or None if not found
110
- """
111
- session = get_session(conn, session_id)
112
- if not session:
113
- return None
114
-
115
- now = now_iso()
116
- cursor = conn.cursor()
117
-
118
- # Update session
119
- cursor.execute(
120
- """
121
- UPDATE sessions
122
- SET ended_at = ?, summary = ?
123
- WHERE id = ?
124
- """,
125
- (now, summary, session_id),
126
- )
127
-
128
- # Gather session statistics
129
- cursor.execute(
130
- "SELECT COUNT(*) FROM activities WHERE session_id = ?",
131
- (session_id,),
132
- )
133
- total_activities = cursor.fetchone()[0]
134
-
135
- cursor.execute(
136
- "SELECT COUNT(*) FROM memories WHERE source_session_id = ?",
137
- (session_id,),
138
- )
139
- total_memories = cursor.fetchone()[0]
140
-
141
- # Get tools used
142
- cursor.execute(
143
- """
144
- SELECT tool_name, COUNT(*) as cnt
145
- FROM activities
146
- WHERE session_id = ? AND tool_name IS NOT NULL
147
- GROUP BY tool_name
148
- """,
149
- (session_id,),
150
- )
151
- tools_used = {row["tool_name"]: row["cnt"] for row in cursor.fetchall()}
152
-
153
- # Get files modified
154
- cursor.execute(
155
- """
156
- SELECT DISTINCT file_path
157
- FROM activities
158
- WHERE session_id = ? AND file_path IS NOT NULL
159
- """,
160
- (session_id,),
161
- )
162
- files_modified = [row["file_path"] for row in cursor.fetchall()]
163
-
164
- # Get errors
165
- cursor.execute(
166
- """
167
- SELECT error_message
168
- FROM activities
169
- WHERE session_id = ? AND success = 0 AND error_message IS NOT NULL
170
- LIMIT 10
171
- """,
172
- (session_id,),
173
- )
174
- key_errors = [row["error_message"] for row in cursor.fetchall()]
175
-
176
- # Create summary record
177
- summary_id = generate_summary_id()
178
- cursor.execute(
179
- """
180
- INSERT INTO session_summaries (
181
- id, session_id, key_learnings, key_decisions, key_errors,
182
- files_modified, tools_used, total_activities,
183
- total_memories_created, created_at
184
- ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
185
- """,
186
- (
187
- summary_id,
188
- session_id,
189
- json.dumps(key_learnings) if key_learnings else None,
190
- None, # key_decisions - could be extracted from memories
191
- json.dumps(key_errors) if key_errors else None,
192
- json.dumps(files_modified) if files_modified else None,
193
- json.dumps(tools_used) if tools_used else None,
194
- total_activities,
195
- total_memories,
196
- now,
197
- ),
198
- )
199
-
200
- conn.commit()
201
- return get_session(conn, session_id)
202
-
203
-
204
- def get_recent_sessions(
205
- conn: sqlite3.Connection,
206
- project_path: Optional[str] = None,
207
- limit: int = 5,
208
- ) -> list[Session]:
209
- """Get recent sessions.
210
-
211
- Args:
212
- conn: Database connection
213
- project_path: Filter by project path
214
- limit: Maximum number of sessions
215
-
216
- Returns:
217
- List of recent sessions
218
- """
219
- cursor = conn.cursor()
220
-
221
- if project_path:
222
- cursor.execute(
223
- """
224
- SELECT * FROM sessions
225
- WHERE project_path = ?
226
- ORDER BY started_at DESC
227
- LIMIT ?
228
- """,
229
- (project_path, limit),
230
- )
231
- else:
232
- cursor.execute(
233
- """
234
- SELECT * FROM sessions
235
- ORDER BY started_at DESC
236
- LIMIT ?
237
- """,
238
- (limit,),
239
- )
240
-
241
- return [_row_to_session(row) for row in cursor.fetchall()]
242
-
243
-
244
- def get_session_summary(
245
- conn: sqlite3.Connection,
246
- session_id: str,
247
- ) -> Optional[SessionSummary]:
248
- """Get session summary."""
249
- cursor = conn.cursor()
250
- cursor.execute(
251
- "SELECT * FROM session_summaries WHERE session_id = ?",
252
- (session_id,),
253
- )
254
- row = cursor.fetchone()
255
- if not row:
256
- return None
257
-
258
- return SessionSummary(
259
- id=row["id"],
260
- session_id=row["session_id"],
261
- key_learnings=json.loads(row["key_learnings"]) if row["key_learnings"] else None,
262
- key_decisions=json.loads(row["key_decisions"]) if row["key_decisions"] else None,
263
- key_errors=json.loads(row["key_errors"]) if row["key_errors"] else None,
264
- files_modified=json.loads(row["files_modified"]) if row["files_modified"] else None,
265
- tools_used=json.loads(row["tools_used"]) if row["tools_used"] else None,
266
- total_activities=row["total_activities"],
267
- total_memories_created=row["total_memories_created"],
268
- created_at=row["created_at"],
269
- )
270
-
271
-
272
- def _row_to_session(row: sqlite3.Row) -> Session:
273
- """Convert database row to Session object."""
274
- tags = row["tags"]
275
- if tags and isinstance(tags, str):
276
- tags = json.loads(tags)
277
-
278
- metadata = row["metadata"]
279
- if metadata and isinstance(metadata, str):
280
- metadata = json.loads(metadata)
281
-
282
- return Session(
283
- id=row["id"],
284
- project_path=row["project_path"],
285
- started_at=row["started_at"],
286
- ended_at=row["ended_at"],
287
- summary=row["summary"],
288
- tags=tags,
289
- metadata=metadata,
290
- )
@@ -1 +0,0 @@
1
- """MCP resources for Omni Cortex."""
@@ -1,22 +0,0 @@
1
- """Search functionality for Omni Cortex."""
2
-
3
- from .keyword import keyword_search
4
- from .ranking import calculate_relevance_score, rank_memories, normalize_scores
5
- from .semantic import semantic_search, find_similar_memories, get_embedding_coverage
6
- from .hybrid import hybrid_search, search
7
-
8
- __all__ = [
9
- # Keyword search
10
- "keyword_search",
11
- # Semantic search
12
- "semantic_search",
13
- "find_similar_memories",
14
- "get_embedding_coverage",
15
- # Hybrid search
16
- "hybrid_search",
17
- "search",
18
- # Ranking
19
- "calculate_relevance_score",
20
- "rank_memories",
21
- "normalize_scores",
22
- ]
@@ -1,197 +0,0 @@
1
- """Hybrid search combining keyword and semantic search."""
2
-
3
- import logging
4
- import sqlite3
5
- from typing import Optional
6
-
7
- from ..models.memory import Memory
8
- from .keyword import keyword_search
9
- from .semantic import semantic_search
10
- from .ranking import normalize_scores
11
-
12
- logger = logging.getLogger(__name__)
13
-
14
-
15
- def hybrid_search(
16
- conn: sqlite3.Connection,
17
- query: str,
18
- type_filter: Optional[str] = None,
19
- tags_filter: Optional[list[str]] = None,
20
- status_filter: Optional[str] = None,
21
- min_importance: Optional[int] = None,
22
- include_archived: bool = False,
23
- limit: int = 10,
24
- keyword_weight: float = 0.4,
25
- semantic_weight: float = 0.6,
26
- ) -> list[tuple[Memory, float, float]]:
27
- """Search memories using combined keyword and semantic search.
28
-
29
- The hybrid approach uses both methods and combines their scores:
30
- - Keyword search finds exact/fuzzy matches
31
- - Semantic search finds conceptually similar content
32
- - Results are merged and re-ranked using weighted scores
33
-
34
- Args:
35
- conn: Database connection
36
- query: Search query string
37
- type_filter: Filter by memory type
38
- tags_filter: Filter by tags
39
- status_filter: Filter by status
40
- min_importance: Minimum importance score
41
- include_archived: Include archived memories
42
- limit: Maximum results
43
- keyword_weight: Weight for keyword scores (0-1)
44
- semantic_weight: Weight for semantic scores (0-1)
45
-
46
- Returns:
47
- List of (Memory, keyword_score, semantic_score) tuples
48
- """
49
- # Normalize weights
50
- total_weight = keyword_weight + semantic_weight
51
- if total_weight > 0:
52
- keyword_weight = keyword_weight / total_weight
53
- semantic_weight = semantic_weight / total_weight
54
-
55
- # Perform keyword search (get more than limit for merging)
56
- search_limit = min(limit * 3, 100)
57
-
58
- keyword_results = keyword_search(
59
- conn,
60
- query=query,
61
- type_filter=type_filter,
62
- tags_filter=tags_filter,
63
- status_filter=status_filter,
64
- min_importance=min_importance,
65
- include_archived=include_archived,
66
- limit=search_limit,
67
- )
68
-
69
- # Perform semantic search
70
- semantic_results = []
71
- try:
72
- semantic_results = semantic_search(
73
- conn,
74
- query=query,
75
- type_filter=type_filter,
76
- tags_filter=tags_filter,
77
- status_filter=status_filter,
78
- min_importance=min_importance,
79
- include_archived=include_archived,
80
- limit=search_limit,
81
- similarity_threshold=0.2, # Lower threshold for hybrid
82
- )
83
- except Exception as e:
84
- logger.warning(f"Semantic search failed, using keyword only: {e}")
85
-
86
- # Build score dictionaries
87
- keyword_scores: dict[str, tuple[Memory, float]] = {}
88
- for memory, score in keyword_results:
89
- keyword_scores[memory.id] = (memory, score)
90
-
91
- semantic_scores: dict[str, tuple[Memory, float]] = {}
92
- for memory, score in semantic_results:
93
- semantic_scores[memory.id] = (memory, score)
94
-
95
- # Get all unique memory IDs
96
- all_ids = set(keyword_scores.keys()) | set(semantic_scores.keys())
97
-
98
- if not all_ids:
99
- return []
100
-
101
- # Normalize keyword scores to 0-1 range
102
- if keyword_scores:
103
- kw_score_values = [s for _, s in keyword_scores.values()]
104
- kw_normalized = normalize_scores(kw_score_values)
105
- kw_norm_map = dict(zip(keyword_scores.keys(), kw_normalized))
106
- else:
107
- kw_norm_map = {}
108
-
109
- # Semantic scores are already 0-1 (cosine similarity)
110
- sem_norm_map = {mid: score for mid, (_, score) in semantic_scores.items()}
111
-
112
- # Combine scores
113
- combined_results: list[tuple[Memory, float, float]] = []
114
-
115
- for memory_id in all_ids:
116
- # Get memory object (prefer from keyword results for consistency)
117
- if memory_id in keyword_scores:
118
- memory = keyword_scores[memory_id][0]
119
- else:
120
- memory = semantic_scores[memory_id][0]
121
-
122
- # Get normalized scores (0 if not in that result set)
123
- kw_score = kw_norm_map.get(memory_id, 0.0)
124
- sem_score = sem_norm_map.get(memory_id, 0.0)
125
-
126
- combined_results.append((memory, kw_score, sem_score))
127
-
128
- # Sort by weighted combined score
129
- def combined_score(item: tuple[Memory, float, float]) -> float:
130
- _, kw, sem = item
131
- return (kw * keyword_weight) + (sem * semantic_weight)
132
-
133
- combined_results.sort(key=combined_score, reverse=True)
134
-
135
- return combined_results[:limit]
136
-
137
-
138
- def search(
139
- conn: sqlite3.Connection,
140
- query: str,
141
- mode: str = "keyword",
142
- type_filter: Optional[str] = None,
143
- tags_filter: Optional[list[str]] = None,
144
- status_filter: Optional[str] = None,
145
- min_importance: Optional[int] = None,
146
- include_archived: bool = False,
147
- limit: int = 10,
148
- ) -> list[tuple[Memory, float, float]]:
149
- """Unified search function supporting all modes.
150
-
151
- Args:
152
- conn: Database connection
153
- query: Search query string
154
- mode: Search mode - "keyword", "semantic", or "hybrid"
155
- type_filter: Filter by memory type
156
- tags_filter: Filter by tags
157
- status_filter: Filter by status
158
- min_importance: Minimum importance score
159
- include_archived: Include archived memories
160
- limit: Maximum results
161
-
162
- Returns:
163
- List of (Memory, keyword_score, semantic_score) tuples
164
- """
165
- common_args = {
166
- "conn": conn,
167
- "query": query,
168
- "type_filter": type_filter,
169
- "tags_filter": tags_filter,
170
- "status_filter": status_filter,
171
- "min_importance": min_importance,
172
- "include_archived": include_archived,
173
- "limit": limit,
174
- }
175
-
176
- if mode == "keyword":
177
- results = keyword_search(**common_args)
178
- # Convert to unified format (keyword_score, semantic_score=0)
179
- return [(memory, score, 0.0) for memory, score in results]
180
-
181
- elif mode == "semantic":
182
- try:
183
- results = semantic_search(**common_args, similarity_threshold=0.3)
184
- # Convert to unified format (keyword_score=0, semantic_score)
185
- return [(memory, 0.0, score) for memory, score in results]
186
- except Exception as e:
187
- logger.warning(f"Semantic search failed, falling back to keyword: {e}")
188
- results = keyword_search(**common_args)
189
- return [(memory, score, 0.0) for memory, score in results]
190
-
191
- elif mode == "hybrid":
192
- return hybrid_search(**common_args)
193
-
194
- else:
195
- logger.warning(f"Unknown search mode '{mode}', using keyword")
196
- results = keyword_search(**common_args)
197
- return [(memory, score, 0.0) for memory, score in results]