omni-cortex 1.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/.env.example +22 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/backfill_summaries.py +280 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/chat_service.py +315 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/database.py +1093 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/image_service.py +549 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/logging_config.py +122 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/main.py +1124 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/models.py +241 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/project_config.py +170 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/project_scanner.py +164 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/prompt_security.py +111 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/pyproject.toml +23 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/security.py +104 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/uv.lock +1110 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/dashboard/backend/websocket_manager.py +104 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/hooks/post_tool_use.py +335 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/hooks/pre_tool_use.py +333 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/hooks/stop.py +184 -0
- omni_cortex-1.6.0.data/data/share/omni-cortex/hooks/subagent_stop.py +120 -0
- omni_cortex-1.6.0.dist-info/METADATA +319 -0
- omni_cortex-1.6.0.dist-info/RECORD +24 -0
- omni_cortex-1.6.0.dist-info/WHEEL +4 -0
- omni_cortex-1.6.0.dist-info/entry_points.txt +4 -0
- omni_cortex-1.6.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
"""Pydantic models for the dashboard API."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from typing import Optional
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel, Field
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ProjectInfo(BaseModel):
|
|
10
|
+
"""Information about a project with omni-cortex database."""
|
|
11
|
+
|
|
12
|
+
name: str
|
|
13
|
+
path: str
|
|
14
|
+
db_path: str
|
|
15
|
+
last_modified: Optional[datetime] = None
|
|
16
|
+
memory_count: int = 0
|
|
17
|
+
is_global: bool = False
|
|
18
|
+
is_favorite: bool = False
|
|
19
|
+
is_registered: bool = False
|
|
20
|
+
display_name: Optional[str] = None
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class ScanDirectory(BaseModel):
|
|
24
|
+
"""A directory being scanned for projects."""
|
|
25
|
+
|
|
26
|
+
path: str
|
|
27
|
+
project_count: int = 0
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class ProjectRegistration(BaseModel):
|
|
31
|
+
"""Request to register a project."""
|
|
32
|
+
|
|
33
|
+
path: str
|
|
34
|
+
display_name: Optional[str] = None
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class ProjectConfigResponse(BaseModel):
|
|
38
|
+
"""Response with project configuration."""
|
|
39
|
+
|
|
40
|
+
scan_directories: list[str]
|
|
41
|
+
registered_count: int
|
|
42
|
+
favorites_count: int
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class Memory(BaseModel):
|
|
46
|
+
"""Memory record from the database."""
|
|
47
|
+
|
|
48
|
+
id: str
|
|
49
|
+
content: str
|
|
50
|
+
context: Optional[str] = None
|
|
51
|
+
memory_type: str = Field(default="other", validation_alias="type")
|
|
52
|
+
status: str = "fresh"
|
|
53
|
+
importance_score: int = 50
|
|
54
|
+
access_count: int = 0
|
|
55
|
+
created_at: datetime
|
|
56
|
+
last_accessed: Optional[datetime] = None
|
|
57
|
+
tags: list[str] = []
|
|
58
|
+
|
|
59
|
+
model_config = {"populate_by_name": True}
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class MemoryStats(BaseModel):
|
|
63
|
+
"""Statistics about memories in a database."""
|
|
64
|
+
|
|
65
|
+
total_count: int
|
|
66
|
+
by_type: dict[str, int]
|
|
67
|
+
by_status: dict[str, int]
|
|
68
|
+
avg_importance: float
|
|
69
|
+
total_access_count: int
|
|
70
|
+
tags: list[dict[str, int | str]]
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class Activity(BaseModel):
|
|
74
|
+
"""Activity log record."""
|
|
75
|
+
|
|
76
|
+
id: str
|
|
77
|
+
session_id: Optional[str] = None
|
|
78
|
+
event_type: str
|
|
79
|
+
tool_name: Optional[str] = None
|
|
80
|
+
tool_input: Optional[str] = None
|
|
81
|
+
tool_output: Optional[str] = None
|
|
82
|
+
success: bool = True
|
|
83
|
+
error_message: Optional[str] = None
|
|
84
|
+
duration_ms: Optional[int] = None
|
|
85
|
+
file_path: Optional[str] = None
|
|
86
|
+
timestamp: datetime
|
|
87
|
+
# Command analytics fields
|
|
88
|
+
command_name: Optional[str] = None
|
|
89
|
+
command_scope: Optional[str] = None
|
|
90
|
+
mcp_server: Optional[str] = None
|
|
91
|
+
skill_name: Optional[str] = None
|
|
92
|
+
# Natural language summary fields
|
|
93
|
+
summary: Optional[str] = None
|
|
94
|
+
summary_detail: Optional[str] = None
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class Session(BaseModel):
|
|
98
|
+
"""Session record."""
|
|
99
|
+
|
|
100
|
+
id: str
|
|
101
|
+
project_path: str
|
|
102
|
+
started_at: datetime
|
|
103
|
+
ended_at: Optional[datetime] = None
|
|
104
|
+
summary: Optional[str] = None
|
|
105
|
+
activity_count: int = 0
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class TimelineEntry(BaseModel):
|
|
109
|
+
"""Entry in the timeline view."""
|
|
110
|
+
|
|
111
|
+
timestamp: datetime
|
|
112
|
+
entry_type: str # "memory" or "activity"
|
|
113
|
+
data: dict
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
class FilterParams(BaseModel):
|
|
117
|
+
"""Query filter parameters."""
|
|
118
|
+
|
|
119
|
+
memory_type: Optional[str] = None
|
|
120
|
+
status: Optional[str] = None
|
|
121
|
+
tags: Optional[list[str]] = None
|
|
122
|
+
search: Optional[str] = None
|
|
123
|
+
min_importance: Optional[int] = None
|
|
124
|
+
max_importance: Optional[int] = None
|
|
125
|
+
sort_by: str = "last_accessed"
|
|
126
|
+
sort_order: str = "desc"
|
|
127
|
+
limit: int = 50
|
|
128
|
+
offset: int = 0
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
class MemoryUpdate(BaseModel):
|
|
132
|
+
"""Update request for a memory."""
|
|
133
|
+
|
|
134
|
+
content: Optional[str] = None
|
|
135
|
+
context: Optional[str] = None
|
|
136
|
+
memory_type: Optional[str] = Field(None, validation_alias="type")
|
|
137
|
+
status: Optional[str] = None
|
|
138
|
+
importance_score: Optional[int] = Field(None, ge=1, le=100)
|
|
139
|
+
tags: Optional[list[str]] = None
|
|
140
|
+
|
|
141
|
+
model_config = {"populate_by_name": True}
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
class WSEvent(BaseModel):
|
|
145
|
+
"""WebSocket event message."""
|
|
146
|
+
|
|
147
|
+
event_type: str
|
|
148
|
+
data: dict
|
|
149
|
+
timestamp: datetime = Field(default_factory=datetime.now)
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
class ChatRequest(BaseModel):
|
|
153
|
+
"""Request for the chat endpoint."""
|
|
154
|
+
|
|
155
|
+
question: str = Field(..., min_length=1, max_length=2000)
|
|
156
|
+
max_memories: int = Field(default=10, ge=1, le=50)
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
class ChatSource(BaseModel):
|
|
160
|
+
"""Source memory reference in chat response."""
|
|
161
|
+
|
|
162
|
+
id: str
|
|
163
|
+
type: str
|
|
164
|
+
content_preview: str
|
|
165
|
+
tags: list[str]
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
class ChatResponse(BaseModel):
|
|
169
|
+
"""Response from the chat endpoint."""
|
|
170
|
+
|
|
171
|
+
answer: str
|
|
172
|
+
sources: list[ChatSource]
|
|
173
|
+
error: Optional[str] = None
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
class ConversationMessage(BaseModel):
|
|
177
|
+
"""A message in a conversation."""
|
|
178
|
+
|
|
179
|
+
role: str # 'user' or 'assistant'
|
|
180
|
+
content: str
|
|
181
|
+
timestamp: str
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
class ConversationSaveRequest(BaseModel):
|
|
185
|
+
"""Request to save a conversation as memory."""
|
|
186
|
+
|
|
187
|
+
messages: list[ConversationMessage]
|
|
188
|
+
referenced_memory_ids: Optional[list[str]] = None
|
|
189
|
+
importance: Optional[int] = Field(default=60, ge=1, le=100)
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
class ConversationSaveResponse(BaseModel):
|
|
193
|
+
"""Response after saving a conversation."""
|
|
194
|
+
|
|
195
|
+
memory_id: str
|
|
196
|
+
summary: str
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
# --- Image Generation Models ---
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
class SingleImageRequestModel(BaseModel):
|
|
203
|
+
"""Request for a single image in a batch."""
|
|
204
|
+
preset: str = "custom" # Maps to ImagePreset enum
|
|
205
|
+
custom_prompt: str = ""
|
|
206
|
+
aspect_ratio: str = "16:9"
|
|
207
|
+
image_size: str = "2K"
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
class BatchImageGenerationRequest(BaseModel):
|
|
211
|
+
"""Request for generating multiple images."""
|
|
212
|
+
images: list[SingleImageRequestModel] # 1, 2, or 4 images
|
|
213
|
+
memory_ids: list[str] = []
|
|
214
|
+
chat_messages: list[dict] = [] # Recent chat for context
|
|
215
|
+
use_search_grounding: bool = False
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
class ImageRefineRequest(BaseModel):
|
|
219
|
+
"""Request for refining an existing image."""
|
|
220
|
+
image_id: str
|
|
221
|
+
refinement_prompt: str
|
|
222
|
+
aspect_ratio: Optional[str] = None
|
|
223
|
+
image_size: Optional[str] = None
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
class SingleImageResponseModel(BaseModel):
|
|
227
|
+
"""Response for a single generated image."""
|
|
228
|
+
success: bool
|
|
229
|
+
image_data: Optional[str] = None # Base64 encoded
|
|
230
|
+
text_response: Optional[str] = None
|
|
231
|
+
thought_signature: Optional[str] = None
|
|
232
|
+
image_id: Optional[str] = None
|
|
233
|
+
error: Optional[str] = None
|
|
234
|
+
index: int = 0
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
class BatchImageGenerationResponse(BaseModel):
|
|
238
|
+
"""Response for batch image generation."""
|
|
239
|
+
success: bool
|
|
240
|
+
images: list[SingleImageResponseModel] = []
|
|
241
|
+
errors: list[str] = []
|
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
"""Project configuration manager for user preferences."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import platform
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Optional
|
|
8
|
+
|
|
9
|
+
from pydantic import BaseModel
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class RegisteredProject(BaseModel):
|
|
13
|
+
"""A manually registered project."""
|
|
14
|
+
|
|
15
|
+
path: str
|
|
16
|
+
display_name: Optional[str] = None
|
|
17
|
+
added_at: datetime
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class RecentProject(BaseModel):
|
|
21
|
+
"""A recently accessed project."""
|
|
22
|
+
|
|
23
|
+
path: str
|
|
24
|
+
last_accessed: datetime
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class ProjectConfig(BaseModel):
|
|
28
|
+
"""User project configuration."""
|
|
29
|
+
|
|
30
|
+
version: int = 1
|
|
31
|
+
scan_directories: list[str] = []
|
|
32
|
+
registered_projects: list[RegisteredProject] = []
|
|
33
|
+
favorites: list[str] = []
|
|
34
|
+
recent: list[RecentProject] = []
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
CONFIG_PATH = Path.home() / ".omni-cortex" / "projects.json"
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def get_default_scan_dirs() -> list[str]:
|
|
41
|
+
"""Return platform-appropriate default scan directories."""
|
|
42
|
+
home = Path.home()
|
|
43
|
+
|
|
44
|
+
dirs = [
|
|
45
|
+
str(home / "projects"),
|
|
46
|
+
str(home / "Projects"),
|
|
47
|
+
str(home / "code"),
|
|
48
|
+
str(home / "Code"),
|
|
49
|
+
str(home / "dev"),
|
|
50
|
+
str(home / "workspace"),
|
|
51
|
+
]
|
|
52
|
+
|
|
53
|
+
if platform.system() == "Windows":
|
|
54
|
+
dirs.insert(0, "D:/Projects")
|
|
55
|
+
|
|
56
|
+
return [d for d in dirs if Path(d).exists()]
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def load_config() -> ProjectConfig:
|
|
60
|
+
"""Load config from disk, creating defaults if missing."""
|
|
61
|
+
if CONFIG_PATH.exists():
|
|
62
|
+
try:
|
|
63
|
+
data = json.loads(CONFIG_PATH.read_text(encoding="utf-8"))
|
|
64
|
+
return ProjectConfig(**data)
|
|
65
|
+
except Exception:
|
|
66
|
+
pass
|
|
67
|
+
|
|
68
|
+
# Create default config
|
|
69
|
+
config = ProjectConfig(scan_directories=get_default_scan_dirs())
|
|
70
|
+
save_config(config)
|
|
71
|
+
return config
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def save_config(config: ProjectConfig) -> None:
|
|
75
|
+
"""Save config to disk."""
|
|
76
|
+
CONFIG_PATH.parent.mkdir(parents=True, exist_ok=True)
|
|
77
|
+
CONFIG_PATH.write_text(config.model_dump_json(indent=2), encoding="utf-8")
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def add_registered_project(path: str, display_name: Optional[str] = None) -> bool:
|
|
81
|
+
"""Register a new project by path."""
|
|
82
|
+
config = load_config()
|
|
83
|
+
|
|
84
|
+
# Validate path has cortex.db
|
|
85
|
+
db_path = Path(path) / ".omni-cortex" / "cortex.db"
|
|
86
|
+
if not db_path.exists():
|
|
87
|
+
return False
|
|
88
|
+
|
|
89
|
+
# Check if already registered
|
|
90
|
+
if any(p.path == path for p in config.registered_projects):
|
|
91
|
+
return False
|
|
92
|
+
|
|
93
|
+
config.registered_projects.append(
|
|
94
|
+
RegisteredProject(path=path, display_name=display_name, added_at=datetime.now())
|
|
95
|
+
)
|
|
96
|
+
save_config(config)
|
|
97
|
+
return True
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def remove_registered_project(path: str) -> bool:
|
|
101
|
+
"""Remove a registered project."""
|
|
102
|
+
config = load_config()
|
|
103
|
+
original_len = len(config.registered_projects)
|
|
104
|
+
config.registered_projects = [
|
|
105
|
+
p for p in config.registered_projects if p.path != path
|
|
106
|
+
]
|
|
107
|
+
|
|
108
|
+
if len(config.registered_projects) < original_len:
|
|
109
|
+
save_config(config)
|
|
110
|
+
return True
|
|
111
|
+
return False
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def toggle_favorite(path: str) -> bool:
|
|
115
|
+
"""Toggle favorite status for a project. Returns new favorite status."""
|
|
116
|
+
config = load_config()
|
|
117
|
+
|
|
118
|
+
if path in config.favorites:
|
|
119
|
+
config.favorites.remove(path)
|
|
120
|
+
is_favorite = False
|
|
121
|
+
else:
|
|
122
|
+
config.favorites.append(path)
|
|
123
|
+
is_favorite = True
|
|
124
|
+
|
|
125
|
+
save_config(config)
|
|
126
|
+
return is_favorite
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def update_recent(path: str) -> None:
|
|
130
|
+
"""Update recent projects list."""
|
|
131
|
+
config = load_config()
|
|
132
|
+
|
|
133
|
+
# Remove if already in list
|
|
134
|
+
config.recent = [r for r in config.recent if r.path != path]
|
|
135
|
+
|
|
136
|
+
# Add to front
|
|
137
|
+
config.recent.insert(0, RecentProject(path=path, last_accessed=datetime.now()))
|
|
138
|
+
|
|
139
|
+
# Keep only last 10
|
|
140
|
+
config.recent = config.recent[:10]
|
|
141
|
+
|
|
142
|
+
save_config(config)
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def add_scan_directory(directory: str) -> bool:
|
|
146
|
+
"""Add a directory to scan list."""
|
|
147
|
+
config = load_config()
|
|
148
|
+
|
|
149
|
+
# Expand user path
|
|
150
|
+
expanded = str(Path(directory).expanduser())
|
|
151
|
+
|
|
152
|
+
if not Path(expanded).is_dir():
|
|
153
|
+
return False
|
|
154
|
+
|
|
155
|
+
if expanded not in config.scan_directories:
|
|
156
|
+
config.scan_directories.append(expanded)
|
|
157
|
+
save_config(config)
|
|
158
|
+
return True
|
|
159
|
+
return False
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def remove_scan_directory(directory: str) -> bool:
|
|
163
|
+
"""Remove a directory from scan list."""
|
|
164
|
+
config = load_config()
|
|
165
|
+
|
|
166
|
+
if directory in config.scan_directories:
|
|
167
|
+
config.scan_directories.remove(directory)
|
|
168
|
+
save_config(config)
|
|
169
|
+
return True
|
|
170
|
+
return False
|
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
"""Scanner to discover all omni-cortex databases on the system."""
|
|
2
|
+
|
|
3
|
+
import sqlite3
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from models import ProjectInfo
|
|
8
|
+
from project_config import load_config
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def get_global_db_path() -> Path:
|
|
12
|
+
"""Get path to the global index database."""
|
|
13
|
+
return Path.home() / ".omni-cortex" / "global.db"
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def get_memory_count(db_path: Path) -> int:
|
|
17
|
+
"""Get the number of memories in a database."""
|
|
18
|
+
try:
|
|
19
|
+
conn = sqlite3.connect(str(db_path))
|
|
20
|
+
cursor = conn.execute("SELECT COUNT(*) FROM memories")
|
|
21
|
+
count = cursor.fetchone()[0]
|
|
22
|
+
conn.close()
|
|
23
|
+
return count
|
|
24
|
+
except Exception:
|
|
25
|
+
return 0
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def get_projects_from_global_db() -> list[str]:
|
|
29
|
+
"""Get unique project paths from the global index."""
|
|
30
|
+
global_path = get_global_db_path()
|
|
31
|
+
if not global_path.exists():
|
|
32
|
+
return []
|
|
33
|
+
|
|
34
|
+
try:
|
|
35
|
+
conn = sqlite3.connect(str(global_path))
|
|
36
|
+
cursor = conn.execute("SELECT DISTINCT source_project FROM global_memories")
|
|
37
|
+
paths = [row[0] for row in cursor.fetchall() if row[0]]
|
|
38
|
+
conn.close()
|
|
39
|
+
return paths
|
|
40
|
+
except Exception:
|
|
41
|
+
return []
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def scan_directory_for_cortex(base_dir: Path) -> list[Path]:
|
|
45
|
+
"""Scan a directory for .omni-cortex/cortex.db files."""
|
|
46
|
+
found = []
|
|
47
|
+
try:
|
|
48
|
+
for item in base_dir.iterdir():
|
|
49
|
+
if item.is_dir():
|
|
50
|
+
cortex_dir = item / ".omni-cortex"
|
|
51
|
+
cortex_db = cortex_dir / "cortex.db"
|
|
52
|
+
if cortex_db.exists():
|
|
53
|
+
found.append(cortex_db)
|
|
54
|
+
except PermissionError:
|
|
55
|
+
pass
|
|
56
|
+
return found
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def scan_projects() -> list[ProjectInfo]:
|
|
60
|
+
"""
|
|
61
|
+
Scan for all omni-cortex databases.
|
|
62
|
+
|
|
63
|
+
Returns list of ProjectInfo with name, path, db_path, last_modified, memory_count.
|
|
64
|
+
"""
|
|
65
|
+
projects: list[ProjectInfo] = []
|
|
66
|
+
seen_paths: set[str] = set()
|
|
67
|
+
|
|
68
|
+
# Load user config
|
|
69
|
+
config = load_config()
|
|
70
|
+
|
|
71
|
+
# 1. Add global index if exists
|
|
72
|
+
global_path = get_global_db_path()
|
|
73
|
+
if global_path.exists():
|
|
74
|
+
stat = global_path.stat()
|
|
75
|
+
global_project_path = str(global_path.parent)
|
|
76
|
+
projects.append(
|
|
77
|
+
ProjectInfo(
|
|
78
|
+
name="Global Index",
|
|
79
|
+
path=global_project_path,
|
|
80
|
+
db_path=str(global_path),
|
|
81
|
+
last_modified=datetime.fromtimestamp(stat.st_mtime),
|
|
82
|
+
memory_count=get_memory_count(global_path),
|
|
83
|
+
is_global=True,
|
|
84
|
+
is_favorite=global_project_path in config.favorites,
|
|
85
|
+
)
|
|
86
|
+
)
|
|
87
|
+
seen_paths.add(str(global_path))
|
|
88
|
+
|
|
89
|
+
# 2. Use CONFIGURABLE scan directories
|
|
90
|
+
for scan_dir in config.scan_directories:
|
|
91
|
+
scan_path = Path(scan_dir).expanduser()
|
|
92
|
+
if scan_path.exists():
|
|
93
|
+
for db_path in scan_directory_for_cortex(scan_path):
|
|
94
|
+
if str(db_path) not in seen_paths:
|
|
95
|
+
project_dir = db_path.parent.parent
|
|
96
|
+
stat = db_path.stat()
|
|
97
|
+
project_path = str(project_dir)
|
|
98
|
+
projects.append(
|
|
99
|
+
ProjectInfo(
|
|
100
|
+
name=project_dir.name,
|
|
101
|
+
path=project_path,
|
|
102
|
+
db_path=str(db_path),
|
|
103
|
+
last_modified=datetime.fromtimestamp(stat.st_mtime),
|
|
104
|
+
memory_count=get_memory_count(db_path),
|
|
105
|
+
is_global=False,
|
|
106
|
+
is_favorite=project_path in config.favorites,
|
|
107
|
+
)
|
|
108
|
+
)
|
|
109
|
+
seen_paths.add(str(db_path))
|
|
110
|
+
|
|
111
|
+
# 3. Add REGISTERED projects (manual additions)
|
|
112
|
+
for reg in config.registered_projects:
|
|
113
|
+
db_path = Path(reg.path) / ".omni-cortex" / "cortex.db"
|
|
114
|
+
if db_path.exists() and str(db_path) not in seen_paths:
|
|
115
|
+
stat = db_path.stat()
|
|
116
|
+
projects.append(
|
|
117
|
+
ProjectInfo(
|
|
118
|
+
name=Path(reg.path).name,
|
|
119
|
+
path=reg.path,
|
|
120
|
+
db_path=str(db_path),
|
|
121
|
+
last_modified=datetime.fromtimestamp(stat.st_mtime),
|
|
122
|
+
memory_count=get_memory_count(db_path),
|
|
123
|
+
is_global=False,
|
|
124
|
+
is_favorite=reg.path in config.favorites,
|
|
125
|
+
is_registered=True,
|
|
126
|
+
display_name=reg.display_name,
|
|
127
|
+
)
|
|
128
|
+
)
|
|
129
|
+
seen_paths.add(str(db_path))
|
|
130
|
+
|
|
131
|
+
# 4. Add paths from global db that we haven't seen
|
|
132
|
+
for project_path in get_projects_from_global_db():
|
|
133
|
+
db_path = Path(project_path) / ".omni-cortex" / "cortex.db"
|
|
134
|
+
if db_path.exists() and str(db_path) not in seen_paths:
|
|
135
|
+
stat = db_path.stat()
|
|
136
|
+
projects.append(
|
|
137
|
+
ProjectInfo(
|
|
138
|
+
name=Path(project_path).name,
|
|
139
|
+
path=project_path,
|
|
140
|
+
db_path=str(db_path),
|
|
141
|
+
last_modified=datetime.fromtimestamp(stat.st_mtime),
|
|
142
|
+
memory_count=get_memory_count(db_path),
|
|
143
|
+
is_global=False,
|
|
144
|
+
is_favorite=project_path in config.favorites,
|
|
145
|
+
)
|
|
146
|
+
)
|
|
147
|
+
seen_paths.add(str(db_path))
|
|
148
|
+
|
|
149
|
+
# Sort: favorites first, then by last_modified (most recent first), with global always first
|
|
150
|
+
projects.sort(
|
|
151
|
+
key=lambda p: (
|
|
152
|
+
not p.is_global,
|
|
153
|
+
not p.is_favorite,
|
|
154
|
+
-(p.last_modified.timestamp() if p.last_modified else 0),
|
|
155
|
+
)
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
return projects
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
if __name__ == "__main__":
|
|
162
|
+
# Test the scanner
|
|
163
|
+
for project in scan_projects():
|
|
164
|
+
print(f"{project.name}: {project.db_path} ({project.memory_count} memories)")
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
"""Prompt injection protection for Omni-Cortex."""
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
import logging
|
|
5
|
+
from html import escape as html_escape
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
logger = logging.getLogger(__name__)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def xml_escape(text: str) -> str:
|
|
12
|
+
"""Escape text for safe inclusion in XML-structured prompts.
|
|
13
|
+
|
|
14
|
+
Converts special characters to prevent prompt injection via
|
|
15
|
+
XML/HTML-like delimiters.
|
|
16
|
+
"""
|
|
17
|
+
return html_escape(text, quote=True)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def build_safe_prompt(
|
|
21
|
+
system_instruction: str,
|
|
22
|
+
user_data: dict[str, str],
|
|
23
|
+
user_question: str
|
|
24
|
+
) -> str:
|
|
25
|
+
"""Build a prompt with clear instruction/data separation.
|
|
26
|
+
|
|
27
|
+
Uses XML tags to separate trusted instructions from untrusted data,
|
|
28
|
+
making it harder for injected content to be interpreted as instructions.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
system_instruction: Trusted system prompt (not escaped)
|
|
32
|
+
user_data: Dict of data sections to include (escaped)
|
|
33
|
+
user_question: User's question (escaped)
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
Safely structured prompt string
|
|
37
|
+
"""
|
|
38
|
+
parts = [system_instruction, ""]
|
|
39
|
+
|
|
40
|
+
# Add data sections with XML escaping
|
|
41
|
+
for section_name, content in user_data.items():
|
|
42
|
+
if content:
|
|
43
|
+
parts.append(f"<{section_name}>")
|
|
44
|
+
parts.append(xml_escape(content))
|
|
45
|
+
parts.append(f"</{section_name}>")
|
|
46
|
+
parts.append("")
|
|
47
|
+
|
|
48
|
+
# Add user question
|
|
49
|
+
parts.append("<user_question>")
|
|
50
|
+
parts.append(xml_escape(user_question))
|
|
51
|
+
parts.append("</user_question>")
|
|
52
|
+
|
|
53
|
+
return "\n".join(parts)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
# Known prompt injection patterns
|
|
57
|
+
INJECTION_PATTERNS = [
|
|
58
|
+
(r'(?i)(ignore|disregard|forget)\s+(all\s+)?(previous|prior|above)\s+instructions?',
|
|
59
|
+
'instruction override attempt'),
|
|
60
|
+
(r'(?i)(new\s+)?system\s+(prompt|instruction|message)',
|
|
61
|
+
'system prompt manipulation'),
|
|
62
|
+
(r'(?i)you\s+(must|should|will|are\s+required\s+to)\s+now',
|
|
63
|
+
'imperative command injection'),
|
|
64
|
+
(r'(?i)(hidden|secret|special)\s+instruction',
|
|
65
|
+
'hidden instruction claim'),
|
|
66
|
+
(r'(?i)\[/?system\]|\[/?inst\]|<\/?system>|<\/?instruction>',
|
|
67
|
+
'fake delimiter injection'),
|
|
68
|
+
(r'(?i)bypass|jailbreak|DAN|GODMODE',
|
|
69
|
+
'known jailbreak signature'),
|
|
70
|
+
]
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def detect_injection_patterns(content: str) -> list[str]:
|
|
74
|
+
"""Detect potential prompt injection patterns in content.
|
|
75
|
+
|
|
76
|
+
Returns list of detected patterns (empty if clean).
|
|
77
|
+
"""
|
|
78
|
+
detected = []
|
|
79
|
+
for pattern, description in INJECTION_PATTERNS:
|
|
80
|
+
if re.search(pattern, content):
|
|
81
|
+
detected.append(description)
|
|
82
|
+
|
|
83
|
+
return detected
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def sanitize_memory_content(content: str, warn_on_detection: bool = True) -> tuple[str, list[str]]:
|
|
87
|
+
"""Sanitize memory content and detect injection attempts.
|
|
88
|
+
|
|
89
|
+
Args:
|
|
90
|
+
content: Raw memory content
|
|
91
|
+
warn_on_detection: If True, log warnings for detected patterns
|
|
92
|
+
|
|
93
|
+
Returns:
|
|
94
|
+
Tuple of (sanitized_content, list_of_detected_patterns)
|
|
95
|
+
"""
|
|
96
|
+
detected = detect_injection_patterns(content)
|
|
97
|
+
|
|
98
|
+
if detected and warn_on_detection:
|
|
99
|
+
logger.warning(f"Potential injection patterns detected: {detected}")
|
|
100
|
+
|
|
101
|
+
# Content is still returned - we sanitize via XML escaping when used in prompts
|
|
102
|
+
return content, detected
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def sanitize_context_data(data: str) -> str:
|
|
106
|
+
"""Escape context data for safe inclusion in prompts.
|
|
107
|
+
|
|
108
|
+
This is the primary defense - all user-supplied data should be
|
|
109
|
+
escaped before inclusion in prompts to prevent injection.
|
|
110
|
+
"""
|
|
111
|
+
return xml_escape(data)
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "omni-cortex-dashboard"
|
|
3
|
+
version = "0.1.0"
|
|
4
|
+
description = "Web dashboard for Omni-Cortex memory system"
|
|
5
|
+
requires-python = ">=3.11"
|
|
6
|
+
dependencies = [
|
|
7
|
+
"fastapi>=0.115.0",
|
|
8
|
+
"uvicorn[standard]>=0.30.0",
|
|
9
|
+
"websockets>=12.0",
|
|
10
|
+
"watchdog>=4.0.0",
|
|
11
|
+
"google-generativeai>=0.8.0",
|
|
12
|
+
"python-dotenv>=1.0.0",
|
|
13
|
+
]
|
|
14
|
+
|
|
15
|
+
[project.optional-dependencies]
|
|
16
|
+
dev = ["pytest", "ruff", "httpx"]
|
|
17
|
+
|
|
18
|
+
[tool.ruff]
|
|
19
|
+
line-length = 100
|
|
20
|
+
target-version = "py311"
|
|
21
|
+
|
|
22
|
+
[tool.ruff.lint]
|
|
23
|
+
select = ["E", "F", "I", "W"]
|