omni-cortex 1.0.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omni_cortex-1.0.4.data/data/share/omni-cortex/dashboard/backend/chat_service.py +140 -0
- omni_cortex-1.0.4.data/data/share/omni-cortex/dashboard/backend/database.py +729 -0
- omni_cortex-1.0.4.data/data/share/omni-cortex/dashboard/backend/main.py +661 -0
- omni_cortex-1.0.4.data/data/share/omni-cortex/dashboard/backend/models.py +140 -0
- omni_cortex-1.0.4.data/data/share/omni-cortex/dashboard/backend/project_scanner.py +141 -0
- omni_cortex-1.0.4.data/data/share/omni-cortex/dashboard/backend/pyproject.toml +23 -0
- omni_cortex-1.0.4.data/data/share/omni-cortex/dashboard/backend/uv.lock +697 -0
- omni_cortex-1.0.4.data/data/share/omni-cortex/dashboard/backend/websocket_manager.py +82 -0
- omni_cortex-1.0.4.data/data/share/omni-cortex/hooks/post_tool_use.py +160 -0
- omni_cortex-1.0.4.data/data/share/omni-cortex/hooks/pre_tool_use.py +159 -0
- omni_cortex-1.0.4.data/data/share/omni-cortex/hooks/stop.py +184 -0
- omni_cortex-1.0.4.data/data/share/omni-cortex/hooks/subagent_stop.py +120 -0
- omni_cortex-1.0.4.dist-info/METADATA +295 -0
- omni_cortex-1.0.4.dist-info/RECORD +17 -0
- omni_cortex-1.0.4.dist-info/WHEEL +4 -0
- omni_cortex-1.0.4.dist-info/entry_points.txt +4 -0
- omni_cortex-1.0.4.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
"""Pydantic models for the dashboard API."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from typing import Optional
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel, Field
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ProjectInfo(BaseModel):
|
|
10
|
+
"""Information about a project with omni-cortex database."""
|
|
11
|
+
|
|
12
|
+
name: str
|
|
13
|
+
path: str
|
|
14
|
+
db_path: str
|
|
15
|
+
last_modified: Optional[datetime] = None
|
|
16
|
+
memory_count: int = 0
|
|
17
|
+
is_global: bool = False
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class Memory(BaseModel):
|
|
21
|
+
"""Memory record from the database."""
|
|
22
|
+
|
|
23
|
+
id: str
|
|
24
|
+
content: str
|
|
25
|
+
context: Optional[str] = None
|
|
26
|
+
memory_type: str = Field(default="other", validation_alias="type")
|
|
27
|
+
status: str = "fresh"
|
|
28
|
+
importance_score: int = 50
|
|
29
|
+
access_count: int = 0
|
|
30
|
+
created_at: datetime
|
|
31
|
+
last_accessed: Optional[datetime] = None
|
|
32
|
+
tags: list[str] = []
|
|
33
|
+
|
|
34
|
+
model_config = {"populate_by_name": True}
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class MemoryStats(BaseModel):
|
|
38
|
+
"""Statistics about memories in a database."""
|
|
39
|
+
|
|
40
|
+
total_count: int
|
|
41
|
+
by_type: dict[str, int]
|
|
42
|
+
by_status: dict[str, int]
|
|
43
|
+
avg_importance: float
|
|
44
|
+
total_access_count: int
|
|
45
|
+
tags: list[dict[str, int | str]]
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class Activity(BaseModel):
|
|
49
|
+
"""Activity log record."""
|
|
50
|
+
|
|
51
|
+
id: str
|
|
52
|
+
session_id: Optional[str] = None
|
|
53
|
+
event_type: str
|
|
54
|
+
tool_name: Optional[str] = None
|
|
55
|
+
tool_input: Optional[str] = None
|
|
56
|
+
tool_output: Optional[str] = None
|
|
57
|
+
success: bool = True
|
|
58
|
+
error_message: Optional[str] = None
|
|
59
|
+
duration_ms: Optional[int] = None
|
|
60
|
+
file_path: Optional[str] = None
|
|
61
|
+
timestamp: datetime
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class Session(BaseModel):
|
|
65
|
+
"""Session record."""
|
|
66
|
+
|
|
67
|
+
id: str
|
|
68
|
+
project_path: str
|
|
69
|
+
started_at: datetime
|
|
70
|
+
ended_at: Optional[datetime] = None
|
|
71
|
+
summary: Optional[str] = None
|
|
72
|
+
activity_count: int = 0
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class TimelineEntry(BaseModel):
|
|
76
|
+
"""Entry in the timeline view."""
|
|
77
|
+
|
|
78
|
+
timestamp: datetime
|
|
79
|
+
entry_type: str # "memory" or "activity"
|
|
80
|
+
data: dict
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class FilterParams(BaseModel):
|
|
84
|
+
"""Query filter parameters."""
|
|
85
|
+
|
|
86
|
+
memory_type: Optional[str] = None
|
|
87
|
+
status: Optional[str] = None
|
|
88
|
+
tags: Optional[list[str]] = None
|
|
89
|
+
search: Optional[str] = None
|
|
90
|
+
min_importance: Optional[int] = None
|
|
91
|
+
max_importance: Optional[int] = None
|
|
92
|
+
sort_by: str = "last_accessed"
|
|
93
|
+
sort_order: str = "desc"
|
|
94
|
+
limit: int = 50
|
|
95
|
+
offset: int = 0
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class MemoryUpdate(BaseModel):
|
|
99
|
+
"""Update request for a memory."""
|
|
100
|
+
|
|
101
|
+
content: Optional[str] = None
|
|
102
|
+
context: Optional[str] = None
|
|
103
|
+
memory_type: Optional[str] = Field(None, validation_alias="type")
|
|
104
|
+
status: Optional[str] = None
|
|
105
|
+
importance_score: Optional[int] = Field(None, ge=1, le=100)
|
|
106
|
+
tags: Optional[list[str]] = None
|
|
107
|
+
|
|
108
|
+
model_config = {"populate_by_name": True}
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
class WSEvent(BaseModel):
|
|
112
|
+
"""WebSocket event message."""
|
|
113
|
+
|
|
114
|
+
event_type: str
|
|
115
|
+
data: dict
|
|
116
|
+
timestamp: datetime = Field(default_factory=datetime.now)
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
class ChatRequest(BaseModel):
|
|
120
|
+
"""Request for the chat endpoint."""
|
|
121
|
+
|
|
122
|
+
question: str = Field(..., min_length=1, max_length=2000)
|
|
123
|
+
max_memories: int = Field(default=10, ge=1, le=50)
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
class ChatSource(BaseModel):
|
|
127
|
+
"""Source memory reference in chat response."""
|
|
128
|
+
|
|
129
|
+
id: str
|
|
130
|
+
type: str
|
|
131
|
+
content_preview: str
|
|
132
|
+
tags: list[str]
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
class ChatResponse(BaseModel):
|
|
136
|
+
"""Response from the chat endpoint."""
|
|
137
|
+
|
|
138
|
+
answer: str
|
|
139
|
+
sources: list[ChatSource]
|
|
140
|
+
error: Optional[str] = None
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
"""Scanner to discover all omni-cortex databases on the system."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import sqlite3
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from models import ProjectInfo
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def get_global_db_path() -> Path:
|
|
12
|
+
"""Get path to the global index database."""
|
|
13
|
+
return Path.home() / ".omni-cortex" / "global.db"
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def get_memory_count(db_path: Path) -> int:
|
|
17
|
+
"""Get the number of memories in a database."""
|
|
18
|
+
try:
|
|
19
|
+
conn = sqlite3.connect(str(db_path))
|
|
20
|
+
cursor = conn.execute("SELECT COUNT(*) FROM memories")
|
|
21
|
+
count = cursor.fetchone()[0]
|
|
22
|
+
conn.close()
|
|
23
|
+
return count
|
|
24
|
+
except Exception:
|
|
25
|
+
return 0
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def get_projects_from_global_db() -> list[str]:
|
|
29
|
+
"""Get unique project paths from the global index."""
|
|
30
|
+
global_path = get_global_db_path()
|
|
31
|
+
if not global_path.exists():
|
|
32
|
+
return []
|
|
33
|
+
|
|
34
|
+
try:
|
|
35
|
+
conn = sqlite3.connect(str(global_path))
|
|
36
|
+
cursor = conn.execute("SELECT DISTINCT source_project FROM global_memories")
|
|
37
|
+
paths = [row[0] for row in cursor.fetchall() if row[0]]
|
|
38
|
+
conn.close()
|
|
39
|
+
return paths
|
|
40
|
+
except Exception:
|
|
41
|
+
return []
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def scan_directory_for_cortex(base_dir: Path) -> list[Path]:
|
|
45
|
+
"""Scan a directory for .omni-cortex/cortex.db files."""
|
|
46
|
+
found = []
|
|
47
|
+
try:
|
|
48
|
+
for item in base_dir.iterdir():
|
|
49
|
+
if item.is_dir():
|
|
50
|
+
cortex_dir = item / ".omni-cortex"
|
|
51
|
+
cortex_db = cortex_dir / "cortex.db"
|
|
52
|
+
if cortex_db.exists():
|
|
53
|
+
found.append(cortex_db)
|
|
54
|
+
except PermissionError:
|
|
55
|
+
pass
|
|
56
|
+
return found
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def scan_projects() -> list[ProjectInfo]:
|
|
60
|
+
"""
|
|
61
|
+
Scan for all omni-cortex databases.
|
|
62
|
+
|
|
63
|
+
Returns list of ProjectInfo with name, path, db_path, last_modified, memory_count.
|
|
64
|
+
"""
|
|
65
|
+
projects: list[ProjectInfo] = []
|
|
66
|
+
seen_paths: set[str] = set()
|
|
67
|
+
|
|
68
|
+
# 1. Add global index if exists
|
|
69
|
+
global_path = get_global_db_path()
|
|
70
|
+
if global_path.exists():
|
|
71
|
+
stat = global_path.stat()
|
|
72
|
+
projects.append(
|
|
73
|
+
ProjectInfo(
|
|
74
|
+
name="Global Index",
|
|
75
|
+
path=str(global_path.parent),
|
|
76
|
+
db_path=str(global_path),
|
|
77
|
+
last_modified=datetime.fromtimestamp(stat.st_mtime),
|
|
78
|
+
memory_count=get_memory_count(global_path),
|
|
79
|
+
is_global=True,
|
|
80
|
+
)
|
|
81
|
+
)
|
|
82
|
+
seen_paths.add(str(global_path))
|
|
83
|
+
|
|
84
|
+
# 2. Scan common project directories
|
|
85
|
+
scan_dirs = [
|
|
86
|
+
Path("D:/Projects"),
|
|
87
|
+
Path.home() / "projects",
|
|
88
|
+
Path.home() / "Projects",
|
|
89
|
+
Path.home() / "code",
|
|
90
|
+
Path.home() / "Code",
|
|
91
|
+
Path.home() / "dev",
|
|
92
|
+
Path.home() / "Dev",
|
|
93
|
+
Path.home() / "src",
|
|
94
|
+
Path.home() / "workspace",
|
|
95
|
+
]
|
|
96
|
+
|
|
97
|
+
for scan_dir in scan_dirs:
|
|
98
|
+
if scan_dir.exists():
|
|
99
|
+
for db_path in scan_directory_for_cortex(scan_dir):
|
|
100
|
+
if str(db_path) not in seen_paths:
|
|
101
|
+
project_dir = db_path.parent.parent
|
|
102
|
+
stat = db_path.stat()
|
|
103
|
+
projects.append(
|
|
104
|
+
ProjectInfo(
|
|
105
|
+
name=project_dir.name,
|
|
106
|
+
path=str(project_dir),
|
|
107
|
+
db_path=str(db_path),
|
|
108
|
+
last_modified=datetime.fromtimestamp(stat.st_mtime),
|
|
109
|
+
memory_count=get_memory_count(db_path),
|
|
110
|
+
is_global=False,
|
|
111
|
+
)
|
|
112
|
+
)
|
|
113
|
+
seen_paths.add(str(db_path))
|
|
114
|
+
|
|
115
|
+
# 3. Add paths from global db that we haven't seen
|
|
116
|
+
for project_path in get_projects_from_global_db():
|
|
117
|
+
db_path = Path(project_path) / ".omni-cortex" / "cortex.db"
|
|
118
|
+
if db_path.exists() and str(db_path) not in seen_paths:
|
|
119
|
+
stat = db_path.stat()
|
|
120
|
+
projects.append(
|
|
121
|
+
ProjectInfo(
|
|
122
|
+
name=Path(project_path).name,
|
|
123
|
+
path=project_path,
|
|
124
|
+
db_path=str(db_path),
|
|
125
|
+
last_modified=datetime.fromtimestamp(stat.st_mtime),
|
|
126
|
+
memory_count=get_memory_count(db_path),
|
|
127
|
+
is_global=False,
|
|
128
|
+
)
|
|
129
|
+
)
|
|
130
|
+
seen_paths.add(str(db_path))
|
|
131
|
+
|
|
132
|
+
# Sort by last_modified (most recent first), with global always first
|
|
133
|
+
projects.sort(key=lambda p: (not p.is_global, -(p.last_modified.timestamp() if p.last_modified else 0)))
|
|
134
|
+
|
|
135
|
+
return projects
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
if __name__ == "__main__":
|
|
139
|
+
# Test the scanner
|
|
140
|
+
for project in scan_projects():
|
|
141
|
+
print(f"{project.name}: {project.db_path} ({project.memory_count} memories)")
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "omni-cortex-dashboard"
|
|
3
|
+
version = "0.1.0"
|
|
4
|
+
description = "Web dashboard for Omni-Cortex memory system"
|
|
5
|
+
requires-python = ">=3.11"
|
|
6
|
+
dependencies = [
|
|
7
|
+
"fastapi>=0.115.0",
|
|
8
|
+
"uvicorn[standard]>=0.30.0",
|
|
9
|
+
"websockets>=12.0",
|
|
10
|
+
"watchdog>=4.0.0",
|
|
11
|
+
"google-generativeai>=0.8.0",
|
|
12
|
+
"python-dotenv>=1.0.0",
|
|
13
|
+
]
|
|
14
|
+
|
|
15
|
+
[project.optional-dependencies]
|
|
16
|
+
dev = ["pytest", "ruff", "httpx"]
|
|
17
|
+
|
|
18
|
+
[tool.ruff]
|
|
19
|
+
line-length = 100
|
|
20
|
+
target-version = "py311"
|
|
21
|
+
|
|
22
|
+
[tool.ruff.lint]
|
|
23
|
+
select = ["E", "F", "I", "W"]
|