omni-cortex 1.17.0__py3-none-any.whl → 1.17.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. omni_cortex/__init__.py +3 -0
  2. omni_cortex/categorization/__init__.py +9 -0
  3. omni_cortex/categorization/auto_tags.py +166 -0
  4. omni_cortex/categorization/auto_type.py +165 -0
  5. omni_cortex/config.py +141 -0
  6. omni_cortex/dashboard.py +232 -0
  7. omni_cortex/database/__init__.py +24 -0
  8. omni_cortex/database/connection.py +137 -0
  9. omni_cortex/database/migrations.py +210 -0
  10. omni_cortex/database/schema.py +212 -0
  11. omni_cortex/database/sync.py +421 -0
  12. omni_cortex/decay/__init__.py +7 -0
  13. omni_cortex/decay/importance.py +147 -0
  14. omni_cortex/embeddings/__init__.py +35 -0
  15. omni_cortex/embeddings/local.py +442 -0
  16. omni_cortex/models/__init__.py +20 -0
  17. omni_cortex/models/activity.py +265 -0
  18. omni_cortex/models/agent.py +144 -0
  19. omni_cortex/models/memory.py +395 -0
  20. omni_cortex/models/relationship.py +206 -0
  21. omni_cortex/models/session.py +290 -0
  22. omni_cortex/resources/__init__.py +1 -0
  23. omni_cortex/search/__init__.py +22 -0
  24. omni_cortex/search/hybrid.py +197 -0
  25. omni_cortex/search/keyword.py +204 -0
  26. omni_cortex/search/ranking.py +127 -0
  27. omni_cortex/search/semantic.py +232 -0
  28. omni_cortex/server.py +360 -0
  29. omni_cortex/setup.py +278 -0
  30. omni_cortex/tools/__init__.py +13 -0
  31. omni_cortex/tools/activities.py +453 -0
  32. omni_cortex/tools/memories.py +536 -0
  33. omni_cortex/tools/sessions.py +311 -0
  34. omni_cortex/tools/utilities.py +477 -0
  35. omni_cortex/utils/__init__.py +13 -0
  36. omni_cortex/utils/formatting.py +282 -0
  37. omni_cortex/utils/ids.py +72 -0
  38. omni_cortex/utils/timestamps.py +129 -0
  39. omni_cortex/utils/truncation.py +111 -0
  40. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/main.py +43 -13
  41. {omni_cortex-1.17.0.dist-info → omni_cortex-1.17.2.dist-info}/METADATA +1 -1
  42. omni_cortex-1.17.2.dist-info/RECORD +65 -0
  43. omni_cortex-1.17.0.dist-info/RECORD +0 -26
  44. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/.env.example +0 -0
  45. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/backfill_summaries.py +0 -0
  46. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/chat_service.py +0 -0
  47. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/database.py +0 -0
  48. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/image_service.py +0 -0
  49. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/logging_config.py +0 -0
  50. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/models.py +0 -0
  51. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/project_config.py +0 -0
  52. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/project_scanner.py +0 -0
  53. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/prompt_security.py +0 -0
  54. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/pyproject.toml +0 -0
  55. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/security.py +0 -0
  56. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/uv.lock +0 -0
  57. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/dashboard/backend/websocket_manager.py +0 -0
  58. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/post_tool_use.py +0 -0
  59. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/pre_tool_use.py +0 -0
  60. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/session_utils.py +0 -0
  61. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/stop.py +0 -0
  62. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/subagent_stop.py +0 -0
  63. {omni_cortex-1.17.0.data → omni_cortex-1.17.2.data}/data/share/omni-cortex/hooks/user_prompt.py +0 -0
  64. {omni_cortex-1.17.0.dist-info → omni_cortex-1.17.2.dist-info}/WHEEL +0 -0
  65. {omni_cortex-1.17.0.dist-info → omni_cortex-1.17.2.dist-info}/entry_points.txt +0 -0
  66. {omni_cortex-1.17.0.dist-info → omni_cortex-1.17.2.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,477 @@
1
+ """Utility tools for Omni Cortex MCP."""
2
+
3
+ import json
4
+ from typing import Optional
5
+ from pydantic import BaseModel, Field, ConfigDict
6
+
7
+ from mcp.server.fastmcp import FastMCP
8
+
9
+ from ..database.connection import init_database
10
+ from ..database.sync import search_global_memories, get_global_stats, sync_all_project_memories
11
+ from ..models.memory import list_memories, update_memory, MemoryUpdate
12
+ from ..utils.timestamps import now_iso
13
+
14
+
15
+ # === Input Models ===
16
+
17
+ class ListTagsInput(BaseModel):
18
+ """Input for listing tags."""
19
+
20
+ model_config = ConfigDict(str_strip_whitespace=True, validate_assignment=True)
21
+
22
+ min_count: int = Field(1, description="Minimum usage count", ge=1)
23
+ limit: int = Field(50, description="Maximum tags to return", ge=1, le=200)
24
+
25
+
26
+ class ReviewMemoriesInput(BaseModel):
27
+ """Input for reviewing memories."""
28
+
29
+ model_config = ConfigDict(str_strip_whitespace=True, validate_assignment=True)
30
+
31
+ action: str = Field(
32
+ ..., description="Action: list, mark_fresh, mark_outdated, mark_archived"
33
+ )
34
+ days_threshold: int = Field(30, description="Review memories older than this", ge=1)
35
+ memory_ids: Optional[list[str]] = Field(None, description="Memory IDs to update (for mark actions)")
36
+ limit: int = Field(20, description="Maximum memories to list", ge=1, le=100)
37
+
38
+
39
+ class ExportInput(BaseModel):
40
+ """Input for exporting data."""
41
+
42
+ model_config = ConfigDict(str_strip_whitespace=True, validate_assignment=True)
43
+
44
+ format: str = Field("markdown", description="Export format: markdown, json, sqlite")
45
+ include_activities: bool = Field(True, description="Include activities")
46
+ include_memories: bool = Field(True, description="Include memories")
47
+ since: Optional[str] = Field(None, description="Export data since this date (ISO 8601)")
48
+ output_path: Optional[str] = Field(None, description="File path to save export")
49
+
50
+
51
+ class GlobalSearchInput(BaseModel):
52
+ """Input for global cross-project search."""
53
+
54
+ model_config = ConfigDict(str_strip_whitespace=True, validate_assignment=True)
55
+
56
+ query: str = Field(..., description="Search query", min_length=1)
57
+ type_filter: Optional[str] = Field(None, description="Filter by memory type")
58
+ tags_filter: Optional[list[str]] = Field(None, description="Filter by tags")
59
+ project_filter: Optional[str] = Field(None, description="Filter by project path (substring match)")
60
+ limit: int = Field(20, description="Maximum results", ge=1, le=100)
61
+
62
+
63
+ class GlobalSyncInput(BaseModel):
64
+ """Input for syncing to global index."""
65
+
66
+ model_config = ConfigDict(str_strip_whitespace=True, validate_assignment=True)
67
+
68
+ full_sync: bool = Field(False, description="Sync all project memories to global index")
69
+
70
+
71
+ VALID_ACTIONS = ["list", "mark_fresh", "mark_outdated", "mark_archived"]
72
+
73
+
74
+ def register_utility_tools(mcp: FastMCP) -> None:
75
+ """Register all utility tools with the MCP server."""
76
+
77
+ @mcp.tool(
78
+ name="cortex_list_tags",
79
+ annotations={
80
+ "title": "List Tags",
81
+ "readOnlyHint": True,
82
+ "destructiveHint": False,
83
+ "idempotentHint": True,
84
+ "openWorldHint": False,
85
+ },
86
+ )
87
+ async def cortex_list_tags(params: ListTagsInput) -> str:
88
+ """List all tags used in memories with usage counts.
89
+
90
+ Args:
91
+ params: ListTagsInput with filters
92
+
93
+ Returns:
94
+ List of tags and their usage counts
95
+ """
96
+ try:
97
+ conn = init_database()
98
+ cursor = conn.cursor()
99
+
100
+ # Query to extract and count tags
101
+ cursor.execute("""
102
+ SELECT tags FROM memories
103
+ WHERE tags IS NOT NULL AND tags != '[]'
104
+ """)
105
+
106
+ tag_counts: dict[str, int] = {}
107
+ for row in cursor.fetchall():
108
+ tags = row["tags"]
109
+ if tags:
110
+ if isinstance(tags, str):
111
+ tags = json.loads(tags)
112
+ for tag in tags:
113
+ tag_counts[tag] = tag_counts.get(tag, 0) + 1
114
+
115
+ # Filter by min_count and sort
116
+ filtered = [
117
+ (tag, count)
118
+ for tag, count in tag_counts.items()
119
+ if count >= params.min_count
120
+ ]
121
+ filtered.sort(key=lambda x: x[1], reverse=True)
122
+ filtered = filtered[:params.limit]
123
+
124
+ if not filtered:
125
+ return "No tags found."
126
+
127
+ lines = ["# Tags", ""]
128
+ for tag, count in filtered:
129
+ lines.append(f"- **{tag}**: {count} memories")
130
+
131
+ return "\n".join(lines)
132
+
133
+ except Exception as e:
134
+ return f"Error listing tags: {e}"
135
+
136
+ @mcp.tool(
137
+ name="cortex_review_memories",
138
+ annotations={
139
+ "title": "Review Memories",
140
+ "readOnlyHint": False,
141
+ "destructiveHint": False,
142
+ "idempotentHint": True,
143
+ "openWorldHint": False,
144
+ },
145
+ )
146
+ async def cortex_review_memories(params: ReviewMemoriesInput) -> str:
147
+ """Review and update memory freshness status.
148
+
149
+ Actions:
150
+ - list: Show memories that need review
151
+ - mark_fresh: Mark memories as verified/fresh
152
+ - mark_outdated: Mark memories as outdated
153
+ - mark_archived: Archive memories
154
+
155
+ Args:
156
+ params: ReviewMemoriesInput with action and options
157
+
158
+ Returns:
159
+ Results of the review action
160
+ """
161
+ try:
162
+ if params.action not in VALID_ACTIONS:
163
+ return f"Invalid action: {params.action}. Valid: {', '.join(VALID_ACTIONS)}"
164
+
165
+ conn = init_database()
166
+
167
+ if params.action == "list":
168
+ # Find memories that need review
169
+ memories, total = list_memories(
170
+ conn,
171
+ status_filter="needs_review",
172
+ limit=params.limit,
173
+ )
174
+
175
+ if not memories:
176
+ # Also check for memories not accessed recently
177
+ memories, total = list_memories(
178
+ conn,
179
+ sort_by="last_accessed",
180
+ sort_order="asc",
181
+ limit=params.limit,
182
+ )
183
+
184
+ if not memories:
185
+ return "No memories need review."
186
+
187
+ lines = [f"# Memories for Review ({len(memories)})", ""]
188
+ for mem in memories:
189
+ lines.append(f"## {mem.id}")
190
+ lines.append(f"Type: {mem.type} | Status: {mem.status}")
191
+ lines.append(f"Last accessed: {mem.last_accessed}")
192
+ lines.append(f"Content: {mem.content[:100]}...")
193
+ lines.append("")
194
+
195
+ return "\n".join(lines)
196
+
197
+ else:
198
+ # Update memories
199
+ if not params.memory_ids:
200
+ return "No memory IDs provided for update."
201
+
202
+ status_map = {
203
+ "mark_fresh": "fresh",
204
+ "mark_outdated": "outdated",
205
+ "mark_archived": "archived",
206
+ }
207
+ new_status = status_map[params.action]
208
+
209
+ updated = 0
210
+ for memory_id in params.memory_ids:
211
+ result = update_memory(
212
+ conn,
213
+ memory_id,
214
+ MemoryUpdate(
215
+ status=new_status,
216
+ ),
217
+ )
218
+ if result:
219
+ updated += 1
220
+
221
+ return f"Updated {updated} memories to status: {new_status}"
222
+
223
+ except Exception as e:
224
+ return f"Error reviewing memories: {e}"
225
+
226
+ @mcp.tool(
227
+ name="cortex_export",
228
+ annotations={
229
+ "title": "Export Data",
230
+ "readOnlyHint": True,
231
+ "destructiveHint": False,
232
+ "idempotentHint": True,
233
+ "openWorldHint": False,
234
+ },
235
+ )
236
+ async def cortex_export(params: ExportInput) -> str:
237
+ """Export memories and activities to various formats.
238
+
239
+ Args:
240
+ params: ExportInput with format and filters
241
+
242
+ Returns:
243
+ Exported data or confirmation of file save
244
+ """
245
+ try:
246
+ conn = init_database()
247
+
248
+ # SQLite dump format
249
+ if params.format == "sqlite":
250
+ if not params.output_path:
251
+ return "SQLite export requires output_path parameter."
252
+
253
+ from ..config import get_project_db_path
254
+ import shutil
255
+
256
+ source_path = get_project_db_path()
257
+ if not source_path.exists():
258
+ return f"Database not found: {source_path}"
259
+
260
+ # Ensure all data is flushed to disk (checkpoint WAL)
261
+ try:
262
+ conn.execute("PRAGMA wal_checkpoint(TRUNCATE)")
263
+ except Exception:
264
+ pass # May fail if not in WAL mode
265
+
266
+ # Copy the database file
267
+ shutil.copy2(source_path, params.output_path)
268
+
269
+ # Also copy WAL and SHM files if they exist
270
+ for suffix in ["-wal", "-shm"]:
271
+ wal_path = source_path.parent / (source_path.name + suffix)
272
+ if wal_path.exists():
273
+ shutil.copy2(wal_path, params.output_path + suffix)
274
+
275
+ return f"SQLite database exported to: {params.output_path}"
276
+
277
+ data = {
278
+ "exported_at": now_iso(),
279
+ "format": params.format,
280
+ }
281
+
282
+ if params.include_memories:
283
+ memories, _ = list_memories(conn, limit=1000)
284
+ data["memories"] = [m.model_dump() for m in memories]
285
+
286
+ if params.include_activities:
287
+ from ..models.activity import get_activities
288
+ activities, _ = get_activities(conn, since=params.since, limit=1000)
289
+ data["activities"] = [a.model_dump() for a in activities]
290
+
291
+ if params.format == "json":
292
+ output = json.dumps(data, indent=2, default=str)
293
+ else:
294
+ # Markdown format
295
+ lines = [
296
+ "# Omni Cortex Export",
297
+ f"Exported: {data['exported_at']}",
298
+ "",
299
+ ]
300
+
301
+ if params.include_memories and data.get("memories"):
302
+ lines.append("## Memories")
303
+ lines.append("")
304
+ for mem in data["memories"]:
305
+ lines.append(f"### [{mem['type']}] {mem['id']}")
306
+ lines.append(f"**Content:** {mem['content']}")
307
+ if mem.get("context"):
308
+ lines.append(f"**Context:** {mem['context']}")
309
+ if mem.get("tags"):
310
+ lines.append(f"**Tags:** {', '.join(mem['tags'])}")
311
+ lines.append(f"**Created:** {mem['created_at']}")
312
+ lines.append("")
313
+
314
+ if params.include_activities and data.get("activities"):
315
+ lines.append("## Activities")
316
+ lines.append("")
317
+ for act in data["activities"][:50]: # Limit for readability
318
+ lines.append(f"- **{act['event_type']}** ({act['timestamp']})")
319
+ if act.get("tool_name"):
320
+ lines.append(f" Tool: {act['tool_name']}")
321
+ lines.append("")
322
+
323
+ output = "\n".join(lines)
324
+
325
+ if params.output_path:
326
+ with open(params.output_path, "w", encoding="utf-8") as f:
327
+ f.write(output)
328
+ return f"Exported to: {params.output_path}"
329
+
330
+ # Truncate if too long
331
+ if len(output) > 10000:
332
+ output = output[:10000] + "\n\n... [truncated]"
333
+
334
+ return output
335
+
336
+ except Exception as e:
337
+ return f"Error exporting data: {e}"
338
+
339
+ @mcp.tool(
340
+ name="cortex_global_search",
341
+ annotations={
342
+ "title": "Search Global Index",
343
+ "readOnlyHint": True,
344
+ "destructiveHint": False,
345
+ "idempotentHint": True,
346
+ "openWorldHint": False,
347
+ },
348
+ )
349
+ async def cortex_global_search(params: GlobalSearchInput) -> str:
350
+ """Search memories across all projects via global index.
351
+
352
+ This tool searches the global index at ~/.omni-cortex/global.db
353
+ which contains memories from all projects that have global_sync_enabled.
354
+
355
+ Args:
356
+ params: GlobalSearchInput with query and filters
357
+
358
+ Returns:
359
+ Matching memories from all projects
360
+ """
361
+ try:
362
+ results = search_global_memories(
363
+ query=params.query,
364
+ type_filter=params.type_filter,
365
+ tags_filter=params.tags_filter,
366
+ project_filter=params.project_filter,
367
+ limit=params.limit,
368
+ )
369
+
370
+ if not results:
371
+ return f"No memories found in global index for: {params.query}"
372
+
373
+ lines = [f"# Global Search Results ({len(results)})", ""]
374
+
375
+ # Group by project
376
+ by_project: dict[str, list] = {}
377
+ for mem in results:
378
+ project = mem.get("project_path", "unknown")
379
+ if project not in by_project:
380
+ by_project[project] = []
381
+ by_project[project].append(mem)
382
+
383
+ for project, memories in by_project.items():
384
+ lines.append(f"## Project: {project}")
385
+ lines.append("")
386
+
387
+ for mem in memories:
388
+ lines.append(f"### [{mem['type']}] {mem['id']}")
389
+ lines.append(f"{mem['content'][:200]}...")
390
+ if mem.get("tags"):
391
+ lines.append(f"**Tags:** {', '.join(mem['tags'])}")
392
+ lines.append(f"**Score:** {mem.get('score', 0):.2f}")
393
+ lines.append("")
394
+
395
+ return "\n".join(lines)
396
+
397
+ except Exception as e:
398
+ return f"Error searching global index: {e}"
399
+
400
+ @mcp.tool(
401
+ name="cortex_global_stats",
402
+ annotations={
403
+ "title": "Global Index Stats",
404
+ "readOnlyHint": True,
405
+ "destructiveHint": False,
406
+ "idempotentHint": True,
407
+ "openWorldHint": False,
408
+ },
409
+ )
410
+ async def cortex_global_stats() -> str:
411
+ """Get statistics from the global memory index.
412
+
413
+ Shows total memories, breakdown by project and type.
414
+
415
+ Returns:
416
+ Statistics about the global index
417
+ """
418
+ try:
419
+ stats = get_global_stats()
420
+
421
+ if "error" in stats:
422
+ return f"Error: {stats['error']}"
423
+
424
+ lines = [
425
+ "# Global Index Statistics",
426
+ "",
427
+ f"**Total Memories:** {stats.get('total_memories', 0)}",
428
+ "",
429
+ ]
430
+
431
+ if stats.get("by_project"):
432
+ lines.append("## By Project")
433
+ for project, count in stats["by_project"].items():
434
+ lines.append(f"- {project}: {count}")
435
+ lines.append("")
436
+
437
+ if stats.get("by_type"):
438
+ lines.append("## By Type")
439
+ for mem_type, count in stats["by_type"].items():
440
+ lines.append(f"- {mem_type}: {count}")
441
+
442
+ return "\n".join(lines)
443
+
444
+ except Exception as e:
445
+ return f"Error getting global stats: {e}"
446
+
447
+ @mcp.tool(
448
+ name="cortex_sync_to_global",
449
+ annotations={
450
+ "title": "Sync to Global Index",
451
+ "readOnlyHint": False,
452
+ "destructiveHint": False,
453
+ "idempotentHint": True,
454
+ "openWorldHint": False,
455
+ },
456
+ )
457
+ async def cortex_sync_to_global(params: GlobalSyncInput) -> str:
458
+ """Sync project memories to the global index.
459
+
460
+ This manually triggers a sync of all project memories to the
461
+ global index. Normally this happens automatically on create/update.
462
+
463
+ Args:
464
+ params: GlobalSyncInput with sync options
465
+
466
+ Returns:
467
+ Number of memories synced
468
+ """
469
+ try:
470
+ if params.full_sync:
471
+ count = sync_all_project_memories()
472
+ return f"Synced {count} memories to global index."
473
+ else:
474
+ return "Set full_sync=true to sync all project memories to global index."
475
+
476
+ except Exception as e:
477
+ return f"Error syncing to global: {e}"
@@ -0,0 +1,13 @@
1
+ """Utility functions for Omni Cortex."""
2
+
3
+ from .ids import generate_id
4
+ from .timestamps import now_iso, parse_iso, format_relative_time
5
+ from .truncation import truncate_output
6
+
7
+ __all__ = [
8
+ "generate_id",
9
+ "now_iso",
10
+ "parse_iso",
11
+ "format_relative_time",
12
+ "truncate_output",
13
+ ]