basic-memory 0.12.3__py3-none-any.whl → 0.13.0b2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of basic-memory might be problematic. Click here for more details.
- basic_memory/__init__.py +7 -1
- basic_memory/alembic/env.py +1 -1
- basic_memory/alembic/versions/5fe1ab1ccebe_add_projects_table.py +108 -0
- basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py +0 -5
- basic_memory/api/app.py +43 -13
- basic_memory/api/routers/__init__.py +4 -2
- basic_memory/api/routers/directory_router.py +63 -0
- basic_memory/api/routers/importer_router.py +152 -0
- basic_memory/api/routers/knowledge_router.py +127 -38
- basic_memory/api/routers/management_router.py +78 -0
- basic_memory/api/routers/memory_router.py +4 -59
- basic_memory/api/routers/project_router.py +230 -0
- basic_memory/api/routers/prompt_router.py +260 -0
- basic_memory/api/routers/search_router.py +3 -21
- basic_memory/api/routers/utils.py +130 -0
- basic_memory/api/template_loader.py +292 -0
- basic_memory/cli/app.py +20 -21
- basic_memory/cli/commands/__init__.py +2 -1
- basic_memory/cli/commands/auth.py +136 -0
- basic_memory/cli/commands/db.py +3 -3
- basic_memory/cli/commands/import_chatgpt.py +31 -207
- basic_memory/cli/commands/import_claude_conversations.py +16 -142
- basic_memory/cli/commands/import_claude_projects.py +33 -143
- basic_memory/cli/commands/import_memory_json.py +26 -83
- basic_memory/cli/commands/mcp.py +71 -18
- basic_memory/cli/commands/project.py +99 -67
- basic_memory/cli/commands/status.py +19 -9
- basic_memory/cli/commands/sync.py +44 -58
- basic_memory/cli/main.py +1 -5
- basic_memory/config.py +144 -88
- basic_memory/db.py +6 -4
- basic_memory/deps.py +227 -30
- basic_memory/importers/__init__.py +27 -0
- basic_memory/importers/base.py +79 -0
- basic_memory/importers/chatgpt_importer.py +222 -0
- basic_memory/importers/claude_conversations_importer.py +172 -0
- basic_memory/importers/claude_projects_importer.py +148 -0
- basic_memory/importers/memory_json_importer.py +93 -0
- basic_memory/importers/utils.py +58 -0
- basic_memory/markdown/entity_parser.py +5 -2
- basic_memory/mcp/auth_provider.py +270 -0
- basic_memory/mcp/external_auth_provider.py +321 -0
- basic_memory/mcp/project_session.py +103 -0
- basic_memory/mcp/prompts/continue_conversation.py +18 -68
- basic_memory/mcp/prompts/recent_activity.py +19 -3
- basic_memory/mcp/prompts/search.py +14 -140
- basic_memory/mcp/prompts/utils.py +3 -3
- basic_memory/mcp/{tools → resources}/project_info.py +6 -2
- basic_memory/mcp/server.py +82 -8
- basic_memory/mcp/supabase_auth_provider.py +463 -0
- basic_memory/mcp/tools/__init__.py +20 -0
- basic_memory/mcp/tools/build_context.py +11 -1
- basic_memory/mcp/tools/canvas.py +15 -2
- basic_memory/mcp/tools/delete_note.py +12 -4
- basic_memory/mcp/tools/edit_note.py +297 -0
- basic_memory/mcp/tools/list_directory.py +154 -0
- basic_memory/mcp/tools/move_note.py +87 -0
- basic_memory/mcp/tools/project_management.py +300 -0
- basic_memory/mcp/tools/read_content.py +15 -6
- basic_memory/mcp/tools/read_note.py +17 -5
- basic_memory/mcp/tools/recent_activity.py +11 -2
- basic_memory/mcp/tools/search.py +10 -1
- basic_memory/mcp/tools/utils.py +137 -12
- basic_memory/mcp/tools/write_note.py +11 -15
- basic_memory/models/__init__.py +3 -2
- basic_memory/models/knowledge.py +16 -4
- basic_memory/models/project.py +80 -0
- basic_memory/models/search.py +8 -5
- basic_memory/repository/__init__.py +2 -0
- basic_memory/repository/entity_repository.py +8 -3
- basic_memory/repository/observation_repository.py +35 -3
- basic_memory/repository/project_info_repository.py +3 -2
- basic_memory/repository/project_repository.py +85 -0
- basic_memory/repository/relation_repository.py +8 -2
- basic_memory/repository/repository.py +107 -15
- basic_memory/repository/search_repository.py +87 -27
- basic_memory/schemas/__init__.py +6 -0
- basic_memory/schemas/directory.py +30 -0
- basic_memory/schemas/importer.py +34 -0
- basic_memory/schemas/memory.py +26 -12
- basic_memory/schemas/project_info.py +112 -2
- basic_memory/schemas/prompt.py +90 -0
- basic_memory/schemas/request.py +56 -2
- basic_memory/schemas/search.py +1 -1
- basic_memory/services/__init__.py +2 -1
- basic_memory/services/context_service.py +208 -95
- basic_memory/services/directory_service.py +167 -0
- basic_memory/services/entity_service.py +385 -5
- basic_memory/services/exceptions.py +6 -0
- basic_memory/services/file_service.py +14 -15
- basic_memory/services/initialization.py +144 -67
- basic_memory/services/link_resolver.py +16 -8
- basic_memory/services/project_service.py +548 -0
- basic_memory/services/search_service.py +77 -2
- basic_memory/sync/background_sync.py +25 -0
- basic_memory/sync/sync_service.py +10 -9
- basic_memory/sync/watch_service.py +63 -39
- basic_memory/templates/prompts/continue_conversation.hbs +110 -0
- basic_memory/templates/prompts/search.hbs +101 -0
- {basic_memory-0.12.3.dist-info → basic_memory-0.13.0b2.dist-info}/METADATA +23 -1
- basic_memory-0.13.0b2.dist-info/RECORD +132 -0
- basic_memory/api/routers/project_info_router.py +0 -274
- basic_memory/mcp/main.py +0 -24
- basic_memory-0.12.3.dist-info/RECORD +0 -100
- {basic_memory-0.12.3.dist-info → basic_memory-0.13.0b2.dist-info}/WHEEL +0 -0
- {basic_memory-0.12.3.dist-info → basic_memory-0.13.0b2.dist-info}/entry_points.txt +0 -0
- {basic_memory-0.12.3.dist-info → basic_memory-0.13.0b2.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,548 @@
|
|
|
1
|
+
"""Project management service for Basic Memory."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Dict, Optional, Sequence
|
|
8
|
+
|
|
9
|
+
from loguru import logger
|
|
10
|
+
from sqlalchemy import text
|
|
11
|
+
|
|
12
|
+
from basic_memory.config import config, app_config
|
|
13
|
+
from basic_memory.models import Project
|
|
14
|
+
from basic_memory.repository.project_repository import ProjectRepository
|
|
15
|
+
from basic_memory.schemas import (
|
|
16
|
+
ActivityMetrics,
|
|
17
|
+
ProjectInfoResponse,
|
|
18
|
+
ProjectStatistics,
|
|
19
|
+
SystemStatus,
|
|
20
|
+
)
|
|
21
|
+
from basic_memory.config import WATCH_STATUS_JSON
|
|
22
|
+
from basic_memory.utils import generate_permalink
|
|
23
|
+
from basic_memory.config import config_manager
|
|
24
|
+
|
|
25
|
+
class ProjectService:
|
|
26
|
+
"""Service for managing Basic Memory projects."""
|
|
27
|
+
|
|
28
|
+
repository: ProjectRepository
|
|
29
|
+
|
|
30
|
+
def __init__(self, repository: ProjectRepository):
|
|
31
|
+
"""Initialize the project service."""
|
|
32
|
+
super().__init__()
|
|
33
|
+
self.repository = repository
|
|
34
|
+
|
|
35
|
+
@property
|
|
36
|
+
def projects(self) -> Dict[str, str]:
|
|
37
|
+
"""Get all configured projects.
|
|
38
|
+
|
|
39
|
+
Returns:
|
|
40
|
+
Dict mapping project names to their file paths
|
|
41
|
+
"""
|
|
42
|
+
return config_manager.projects
|
|
43
|
+
|
|
44
|
+
@property
|
|
45
|
+
def default_project(self) -> str:
|
|
46
|
+
"""Get the name of the default project.
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
The name of the default project
|
|
50
|
+
"""
|
|
51
|
+
return config_manager.default_project
|
|
52
|
+
|
|
53
|
+
@property
|
|
54
|
+
def current_project(self) -> str:
|
|
55
|
+
"""Get the name of the currently active project.
|
|
56
|
+
|
|
57
|
+
Returns:
|
|
58
|
+
The name of the current project
|
|
59
|
+
"""
|
|
60
|
+
return os.environ.get("BASIC_MEMORY_PROJECT", config_manager.default_project)
|
|
61
|
+
|
|
62
|
+
async def list_projects(self) -> Sequence[Project]:
|
|
63
|
+
return await self.repository.find_all()
|
|
64
|
+
|
|
65
|
+
async def get_project(self, name: str) -> Optional[Project]:
|
|
66
|
+
"""Get the file path for a project by name."""
|
|
67
|
+
return await self.repository.get_by_name(name)
|
|
68
|
+
|
|
69
|
+
async def add_project(self, name: str, path: str) -> None:
|
|
70
|
+
"""Add a new project to the configuration and database.
|
|
71
|
+
|
|
72
|
+
Args:
|
|
73
|
+
name: The name of the project
|
|
74
|
+
path: The file path to the project directory
|
|
75
|
+
|
|
76
|
+
Raises:
|
|
77
|
+
ValueError: If the project already exists
|
|
78
|
+
"""
|
|
79
|
+
if not self.repository: # pragma: no cover
|
|
80
|
+
raise ValueError("Repository is required for add_project")
|
|
81
|
+
|
|
82
|
+
# Resolve to absolute path
|
|
83
|
+
resolved_path = os.path.abspath(os.path.expanduser(path))
|
|
84
|
+
|
|
85
|
+
# First add to config file (this will validate the project doesn't exist)
|
|
86
|
+
project_config = config_manager.add_project(name, resolved_path)
|
|
87
|
+
|
|
88
|
+
# Then add to database
|
|
89
|
+
project_data = {
|
|
90
|
+
"name": name,
|
|
91
|
+
"path": resolved_path,
|
|
92
|
+
"permalink": generate_permalink(project_config.name),
|
|
93
|
+
"is_active": True,
|
|
94
|
+
"is_default": False,
|
|
95
|
+
}
|
|
96
|
+
await self.repository.create(project_data)
|
|
97
|
+
|
|
98
|
+
logger.info(f"Project '{name}' added at {resolved_path}")
|
|
99
|
+
|
|
100
|
+
async def remove_project(self, name: str) -> None:
|
|
101
|
+
"""Remove a project from configuration and database.
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
name: The name of the project to remove
|
|
105
|
+
|
|
106
|
+
Raises:
|
|
107
|
+
ValueError: If the project doesn't exist or is the default project
|
|
108
|
+
"""
|
|
109
|
+
if not self.repository: # pragma: no cover
|
|
110
|
+
raise ValueError("Repository is required for remove_project")
|
|
111
|
+
|
|
112
|
+
# First remove from config (this will validate the project exists and is not default)
|
|
113
|
+
config_manager.remove_project(name)
|
|
114
|
+
|
|
115
|
+
# Then remove from database
|
|
116
|
+
project = await self.repository.get_by_name(name)
|
|
117
|
+
if project:
|
|
118
|
+
await self.repository.delete(project.id)
|
|
119
|
+
|
|
120
|
+
logger.info(f"Project '{name}' removed from configuration and database")
|
|
121
|
+
|
|
122
|
+
async def set_default_project(self, name: str) -> None:
|
|
123
|
+
"""Set the default project in configuration and database.
|
|
124
|
+
|
|
125
|
+
Args:
|
|
126
|
+
name: The name of the project to set as default
|
|
127
|
+
|
|
128
|
+
Raises:
|
|
129
|
+
ValueError: If the project doesn't exist
|
|
130
|
+
"""
|
|
131
|
+
if not self.repository: # pragma: no cover
|
|
132
|
+
raise ValueError("Repository is required for set_default_project")
|
|
133
|
+
|
|
134
|
+
# First update config file (this will validate the project exists)
|
|
135
|
+
config_manager.set_default_project(name)
|
|
136
|
+
|
|
137
|
+
# Then update database
|
|
138
|
+
project = await self.repository.get_by_name(name)
|
|
139
|
+
if project:
|
|
140
|
+
await self.repository.set_as_default(project.id)
|
|
141
|
+
else:
|
|
142
|
+
logger.error(f"Project '{name}' exists in config but not in database")
|
|
143
|
+
|
|
144
|
+
logger.info(f"Project '{name}' set as default in configuration and database")
|
|
145
|
+
|
|
146
|
+
async def synchronize_projects(self) -> None: # pragma: no cover
|
|
147
|
+
"""Synchronize projects between database and configuration.
|
|
148
|
+
|
|
149
|
+
Ensures that all projects in the configuration file exist in the database
|
|
150
|
+
and vice versa. This should be called during initialization to reconcile
|
|
151
|
+
any differences between the two sources.
|
|
152
|
+
"""
|
|
153
|
+
if not self.repository:
|
|
154
|
+
raise ValueError("Repository is required for synchronize_projects")
|
|
155
|
+
|
|
156
|
+
logger.info("Synchronizing projects between database and configuration")
|
|
157
|
+
|
|
158
|
+
# Get all projects from database
|
|
159
|
+
db_projects = await self.repository.get_active_projects()
|
|
160
|
+
db_projects_by_name = {p.name: p for p in db_projects}
|
|
161
|
+
|
|
162
|
+
# Get all projects from configuration
|
|
163
|
+
config_projects = config_manager.projects
|
|
164
|
+
|
|
165
|
+
# Add projects that exist in config but not in DB
|
|
166
|
+
for name, path in config_projects.items():
|
|
167
|
+
if name not in db_projects_by_name:
|
|
168
|
+
logger.info(f"Adding project '{name}' to database")
|
|
169
|
+
project_data = {
|
|
170
|
+
"name": name,
|
|
171
|
+
"path": path,
|
|
172
|
+
"permalink": name.lower().replace(" ", "-"),
|
|
173
|
+
"is_active": True,
|
|
174
|
+
"is_default": (name == config_manager.default_project),
|
|
175
|
+
}
|
|
176
|
+
await self.repository.create(project_data)
|
|
177
|
+
|
|
178
|
+
# Add projects that exist in DB but not in config to config
|
|
179
|
+
for name, project in db_projects_by_name.items():
|
|
180
|
+
if name not in config_projects:
|
|
181
|
+
logger.info(f"Adding project '{name}' to configuration")
|
|
182
|
+
config_manager.add_project(name, project.path)
|
|
183
|
+
|
|
184
|
+
# Make sure default project is synchronized
|
|
185
|
+
db_default = next((p for p in db_projects if p.is_default), None)
|
|
186
|
+
config_default = config_manager.default_project
|
|
187
|
+
|
|
188
|
+
if db_default and db_default.name != config_default:
|
|
189
|
+
# Update config to match DB default
|
|
190
|
+
logger.info(f"Updating default project in config to '{db_default.name}'")
|
|
191
|
+
config_manager.set_default_project(db_default.name)
|
|
192
|
+
elif not db_default and config_default in db_projects_by_name:
|
|
193
|
+
# Update DB to match config default
|
|
194
|
+
logger.info(f"Updating default project in database to '{config_default}'")
|
|
195
|
+
project = db_projects_by_name[config_default]
|
|
196
|
+
await self.repository.set_as_default(project.id)
|
|
197
|
+
|
|
198
|
+
logger.info("Project synchronization complete")
|
|
199
|
+
|
|
200
|
+
async def update_project( # pragma: no cover
|
|
201
|
+
self, name: str, updated_path: Optional[str] = None, is_active: Optional[bool] = None
|
|
202
|
+
) -> None:
|
|
203
|
+
"""Update project information in both config and database.
|
|
204
|
+
|
|
205
|
+
Args:
|
|
206
|
+
name: The name of the project to update
|
|
207
|
+
updated_path: Optional new path for the project
|
|
208
|
+
is_active: Optional flag to set project active status
|
|
209
|
+
|
|
210
|
+
Raises:
|
|
211
|
+
ValueError: If project doesn't exist or repository isn't initialized
|
|
212
|
+
"""
|
|
213
|
+
if not self.repository:
|
|
214
|
+
raise ValueError("Repository is required for update_project")
|
|
215
|
+
|
|
216
|
+
# Validate project exists in config
|
|
217
|
+
if name not in config_manager.projects:
|
|
218
|
+
raise ValueError(f"Project '{name}' not found in configuration")
|
|
219
|
+
|
|
220
|
+
# Get project from database
|
|
221
|
+
project = await self.repository.get_by_name(name)
|
|
222
|
+
if not project:
|
|
223
|
+
logger.error(f"Project '{name}' exists in config but not in database")
|
|
224
|
+
return
|
|
225
|
+
|
|
226
|
+
# Update path if provided
|
|
227
|
+
if updated_path:
|
|
228
|
+
resolved_path = os.path.abspath(os.path.expanduser(updated_path))
|
|
229
|
+
|
|
230
|
+
# Update in config
|
|
231
|
+
projects = config_manager.config.projects.copy()
|
|
232
|
+
projects[name] = resolved_path
|
|
233
|
+
config_manager.config.projects = projects
|
|
234
|
+
config_manager.save_config(config_manager.config)
|
|
235
|
+
|
|
236
|
+
# Update in database
|
|
237
|
+
project.path = resolved_path
|
|
238
|
+
await self.repository.update(project.id, project)
|
|
239
|
+
|
|
240
|
+
logger.info(f"Updated path for project '{name}' to {resolved_path}")
|
|
241
|
+
|
|
242
|
+
# Update active status if provided
|
|
243
|
+
if is_active is not None:
|
|
244
|
+
project.is_active = is_active
|
|
245
|
+
await self.repository.update(project.id, project)
|
|
246
|
+
logger.info(f"Set active status for project '{name}' to {is_active}")
|
|
247
|
+
|
|
248
|
+
# If project was made inactive and it was the default, we need to pick a new default
|
|
249
|
+
if is_active is False and project.is_default:
|
|
250
|
+
# Find another active project
|
|
251
|
+
active_projects = await self.repository.get_active_projects()
|
|
252
|
+
if active_projects:
|
|
253
|
+
new_default = active_projects[0]
|
|
254
|
+
await self.repository.set_as_default(new_default.id)
|
|
255
|
+
config_manager.set_default_project(new_default.name)
|
|
256
|
+
logger.info(
|
|
257
|
+
f"Changed default project to '{new_default.name}' as '{name}' was deactivated"
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
async def get_project_info(self) -> ProjectInfoResponse:
|
|
261
|
+
"""Get comprehensive information about the current Basic Memory project.
|
|
262
|
+
|
|
263
|
+
Returns:
|
|
264
|
+
Comprehensive project information and statistics
|
|
265
|
+
"""
|
|
266
|
+
if not self.repository: # pragma: no cover
|
|
267
|
+
raise ValueError("Repository is required for get_project_info")
|
|
268
|
+
|
|
269
|
+
# Get statistics
|
|
270
|
+
statistics = await self.get_statistics()
|
|
271
|
+
|
|
272
|
+
# Get activity metrics
|
|
273
|
+
activity = await self.get_activity_metrics()
|
|
274
|
+
|
|
275
|
+
# Get system status
|
|
276
|
+
system = self.get_system_status()
|
|
277
|
+
|
|
278
|
+
# Get current project information from config
|
|
279
|
+
project_name = config.project
|
|
280
|
+
project_path = str(config.home)
|
|
281
|
+
|
|
282
|
+
# Get enhanced project information from database
|
|
283
|
+
db_projects = await self.repository.get_active_projects()
|
|
284
|
+
db_projects_by_name = {p.name: p for p in db_projects}
|
|
285
|
+
|
|
286
|
+
# Get default project info
|
|
287
|
+
default_project = config_manager.default_project
|
|
288
|
+
|
|
289
|
+
# Convert config projects to include database info
|
|
290
|
+
enhanced_projects = {}
|
|
291
|
+
for name, path in config_manager.projects.items():
|
|
292
|
+
db_project = db_projects_by_name.get(name)
|
|
293
|
+
enhanced_projects[name] = {
|
|
294
|
+
"path": path,
|
|
295
|
+
"active": db_project.is_active if db_project else True,
|
|
296
|
+
"id": db_project.id if db_project else None,
|
|
297
|
+
"is_default": (name == default_project),
|
|
298
|
+
"permalink": db_project.permalink if db_project else name.lower().replace(" ", "-"),
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
# Construct the response
|
|
302
|
+
return ProjectInfoResponse(
|
|
303
|
+
project_name=project_name,
|
|
304
|
+
project_path=project_path,
|
|
305
|
+
available_projects=enhanced_projects,
|
|
306
|
+
default_project=default_project,
|
|
307
|
+
statistics=statistics,
|
|
308
|
+
activity=activity,
|
|
309
|
+
system=system,
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
async def get_statistics(self) -> ProjectStatistics:
|
|
313
|
+
"""Get statistics about the current project."""
|
|
314
|
+
if not self.repository: # pragma: no cover
|
|
315
|
+
raise ValueError("Repository is required for get_statistics")
|
|
316
|
+
|
|
317
|
+
# Get basic counts
|
|
318
|
+
entity_count_result = await self.repository.execute_query(
|
|
319
|
+
text("SELECT COUNT(*) FROM entity")
|
|
320
|
+
)
|
|
321
|
+
total_entities = entity_count_result.scalar() or 0
|
|
322
|
+
|
|
323
|
+
observation_count_result = await self.repository.execute_query(
|
|
324
|
+
text("SELECT COUNT(*) FROM observation")
|
|
325
|
+
)
|
|
326
|
+
total_observations = observation_count_result.scalar() or 0
|
|
327
|
+
|
|
328
|
+
relation_count_result = await self.repository.execute_query(
|
|
329
|
+
text("SELECT COUNT(*) FROM relation")
|
|
330
|
+
)
|
|
331
|
+
total_relations = relation_count_result.scalar() or 0
|
|
332
|
+
|
|
333
|
+
unresolved_count_result = await self.repository.execute_query(
|
|
334
|
+
text("SELECT COUNT(*) FROM relation WHERE to_id IS NULL")
|
|
335
|
+
)
|
|
336
|
+
total_unresolved = unresolved_count_result.scalar() or 0
|
|
337
|
+
|
|
338
|
+
# Get entity counts by type
|
|
339
|
+
entity_types_result = await self.repository.execute_query(
|
|
340
|
+
text("SELECT entity_type, COUNT(*) FROM entity GROUP BY entity_type")
|
|
341
|
+
)
|
|
342
|
+
entity_types = {row[0]: row[1] for row in entity_types_result.fetchall()}
|
|
343
|
+
|
|
344
|
+
# Get observation counts by category
|
|
345
|
+
category_result = await self.repository.execute_query(
|
|
346
|
+
text("SELECT category, COUNT(*) FROM observation GROUP BY category")
|
|
347
|
+
)
|
|
348
|
+
observation_categories = {row[0]: row[1] for row in category_result.fetchall()}
|
|
349
|
+
|
|
350
|
+
# Get relation counts by type
|
|
351
|
+
relation_types_result = await self.repository.execute_query(
|
|
352
|
+
text("SELECT relation_type, COUNT(*) FROM relation GROUP BY relation_type")
|
|
353
|
+
)
|
|
354
|
+
relation_types = {row[0]: row[1] for row in relation_types_result.fetchall()}
|
|
355
|
+
|
|
356
|
+
# Find most connected entities (most outgoing relations)
|
|
357
|
+
connected_result = await self.repository.execute_query(
|
|
358
|
+
text("""
|
|
359
|
+
SELECT e.id, e.title, e.permalink, COUNT(r.id) AS relation_count, file_path
|
|
360
|
+
FROM entity e
|
|
361
|
+
JOIN relation r ON e.id = r.from_id
|
|
362
|
+
GROUP BY e.id
|
|
363
|
+
ORDER BY relation_count DESC
|
|
364
|
+
LIMIT 10
|
|
365
|
+
""")
|
|
366
|
+
)
|
|
367
|
+
most_connected = [
|
|
368
|
+
{
|
|
369
|
+
"id": row[0],
|
|
370
|
+
"title": row[1],
|
|
371
|
+
"permalink": row[2],
|
|
372
|
+
"relation_count": row[3],
|
|
373
|
+
"file_path": row[4],
|
|
374
|
+
}
|
|
375
|
+
for row in connected_result.fetchall()
|
|
376
|
+
]
|
|
377
|
+
|
|
378
|
+
# Count isolated entities (no relations)
|
|
379
|
+
isolated_result = await self.repository.execute_query(
|
|
380
|
+
text("""
|
|
381
|
+
SELECT COUNT(e.id)
|
|
382
|
+
FROM entity e
|
|
383
|
+
LEFT JOIN relation r1 ON e.id = r1.from_id
|
|
384
|
+
LEFT JOIN relation r2 ON e.id = r2.to_id
|
|
385
|
+
WHERE r1.id IS NULL AND r2.id IS NULL
|
|
386
|
+
""")
|
|
387
|
+
)
|
|
388
|
+
isolated_count = isolated_result.scalar() or 0
|
|
389
|
+
|
|
390
|
+
return ProjectStatistics(
|
|
391
|
+
total_entities=total_entities,
|
|
392
|
+
total_observations=total_observations,
|
|
393
|
+
total_relations=total_relations,
|
|
394
|
+
total_unresolved_relations=total_unresolved,
|
|
395
|
+
entity_types=entity_types,
|
|
396
|
+
observation_categories=observation_categories,
|
|
397
|
+
relation_types=relation_types,
|
|
398
|
+
most_connected_entities=most_connected,
|
|
399
|
+
isolated_entities=isolated_count,
|
|
400
|
+
)
|
|
401
|
+
|
|
402
|
+
async def get_activity_metrics(self) -> ActivityMetrics:
|
|
403
|
+
"""Get activity metrics for the current project."""
|
|
404
|
+
if not self.repository: # pragma: no cover
|
|
405
|
+
raise ValueError("Repository is required for get_activity_metrics")
|
|
406
|
+
|
|
407
|
+
# Get recently created entities
|
|
408
|
+
created_result = await self.repository.execute_query(
|
|
409
|
+
text("""
|
|
410
|
+
SELECT id, title, permalink, entity_type, created_at, file_path
|
|
411
|
+
FROM entity
|
|
412
|
+
ORDER BY created_at DESC
|
|
413
|
+
LIMIT 10
|
|
414
|
+
""")
|
|
415
|
+
)
|
|
416
|
+
recently_created = [
|
|
417
|
+
{
|
|
418
|
+
"id": row[0],
|
|
419
|
+
"title": row[1],
|
|
420
|
+
"permalink": row[2],
|
|
421
|
+
"entity_type": row[3],
|
|
422
|
+
"created_at": row[4],
|
|
423
|
+
"file_path": row[5],
|
|
424
|
+
}
|
|
425
|
+
for row in created_result.fetchall()
|
|
426
|
+
]
|
|
427
|
+
|
|
428
|
+
# Get recently updated entities
|
|
429
|
+
updated_result = await self.repository.execute_query(
|
|
430
|
+
text("""
|
|
431
|
+
SELECT id, title, permalink, entity_type, updated_at, file_path
|
|
432
|
+
FROM entity
|
|
433
|
+
ORDER BY updated_at DESC
|
|
434
|
+
LIMIT 10
|
|
435
|
+
""")
|
|
436
|
+
)
|
|
437
|
+
recently_updated = [
|
|
438
|
+
{
|
|
439
|
+
"id": row[0],
|
|
440
|
+
"title": row[1],
|
|
441
|
+
"permalink": row[2],
|
|
442
|
+
"entity_type": row[3],
|
|
443
|
+
"updated_at": row[4],
|
|
444
|
+
"file_path": row[5],
|
|
445
|
+
}
|
|
446
|
+
for row in updated_result.fetchall()
|
|
447
|
+
]
|
|
448
|
+
|
|
449
|
+
# Get monthly growth over the last 6 months
|
|
450
|
+
# Calculate the start of 6 months ago
|
|
451
|
+
now = datetime.now()
|
|
452
|
+
six_months_ago = datetime(
|
|
453
|
+
now.year - (1 if now.month <= 6 else 0), ((now.month - 6) % 12) or 12, 1
|
|
454
|
+
)
|
|
455
|
+
|
|
456
|
+
# Query for monthly entity creation
|
|
457
|
+
entity_growth_result = await self.repository.execute_query(
|
|
458
|
+
text(f"""
|
|
459
|
+
SELECT
|
|
460
|
+
strftime('%Y-%m', created_at) AS month,
|
|
461
|
+
COUNT(*) AS count
|
|
462
|
+
FROM entity
|
|
463
|
+
WHERE created_at >= '{six_months_ago.isoformat()}'
|
|
464
|
+
GROUP BY month
|
|
465
|
+
ORDER BY month
|
|
466
|
+
""")
|
|
467
|
+
)
|
|
468
|
+
entity_growth = {row[0]: row[1] for row in entity_growth_result.fetchall()}
|
|
469
|
+
|
|
470
|
+
# Query for monthly observation creation
|
|
471
|
+
observation_growth_result = await self.repository.execute_query(
|
|
472
|
+
text(f"""
|
|
473
|
+
SELECT
|
|
474
|
+
strftime('%Y-%m', created_at) AS month,
|
|
475
|
+
COUNT(*) AS count
|
|
476
|
+
FROM observation
|
|
477
|
+
INNER JOIN entity ON observation.entity_id = entity.id
|
|
478
|
+
WHERE entity.created_at >= '{six_months_ago.isoformat()}'
|
|
479
|
+
GROUP BY month
|
|
480
|
+
ORDER BY month
|
|
481
|
+
""")
|
|
482
|
+
)
|
|
483
|
+
observation_growth = {row[0]: row[1] for row in observation_growth_result.fetchall()}
|
|
484
|
+
|
|
485
|
+
# Query for monthly relation creation
|
|
486
|
+
relation_growth_result = await self.repository.execute_query(
|
|
487
|
+
text(f"""
|
|
488
|
+
SELECT
|
|
489
|
+
strftime('%Y-%m', created_at) AS month,
|
|
490
|
+
COUNT(*) AS count
|
|
491
|
+
FROM relation
|
|
492
|
+
INNER JOIN entity ON relation.from_id = entity.id
|
|
493
|
+
WHERE entity.created_at >= '{six_months_ago.isoformat()}'
|
|
494
|
+
GROUP BY month
|
|
495
|
+
ORDER BY month
|
|
496
|
+
""")
|
|
497
|
+
)
|
|
498
|
+
relation_growth = {row[0]: row[1] for row in relation_growth_result.fetchall()}
|
|
499
|
+
|
|
500
|
+
# Combine all monthly growth data
|
|
501
|
+
monthly_growth = {}
|
|
502
|
+
for month in set(
|
|
503
|
+
list(entity_growth.keys())
|
|
504
|
+
+ list(observation_growth.keys())
|
|
505
|
+
+ list(relation_growth.keys())
|
|
506
|
+
):
|
|
507
|
+
monthly_growth[month] = {
|
|
508
|
+
"entities": entity_growth.get(month, 0),
|
|
509
|
+
"observations": observation_growth.get(month, 0),
|
|
510
|
+
"relations": relation_growth.get(month, 0),
|
|
511
|
+
"total": (
|
|
512
|
+
entity_growth.get(month, 0)
|
|
513
|
+
+ observation_growth.get(month, 0)
|
|
514
|
+
+ relation_growth.get(month, 0)
|
|
515
|
+
),
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
return ActivityMetrics(
|
|
519
|
+
recently_created=recently_created,
|
|
520
|
+
recently_updated=recently_updated,
|
|
521
|
+
monthly_growth=monthly_growth,
|
|
522
|
+
)
|
|
523
|
+
|
|
524
|
+
def get_system_status(self) -> SystemStatus:
|
|
525
|
+
"""Get system status information."""
|
|
526
|
+
import basic_memory
|
|
527
|
+
|
|
528
|
+
# Get database information
|
|
529
|
+
db_path = app_config.database_path
|
|
530
|
+
db_size = db_path.stat().st_size if db_path.exists() else 0
|
|
531
|
+
db_size_readable = f"{db_size / (1024 * 1024):.2f} MB"
|
|
532
|
+
|
|
533
|
+
# Get watch service status if available
|
|
534
|
+
watch_status = None
|
|
535
|
+
watch_status_path = Path.home() / ".basic-memory" / WATCH_STATUS_JSON
|
|
536
|
+
if watch_status_path.exists():
|
|
537
|
+
try:
|
|
538
|
+
watch_status = json.loads(watch_status_path.read_text(encoding="utf-8"))
|
|
539
|
+
except Exception: # pragma: no cover
|
|
540
|
+
pass
|
|
541
|
+
|
|
542
|
+
return SystemStatus(
|
|
543
|
+
version=basic_memory.__version__,
|
|
544
|
+
database_path=str(db_path),
|
|
545
|
+
database_size=db_size_readable,
|
|
546
|
+
watch_status=watch_status,
|
|
547
|
+
timestamp=datetime.now(),
|
|
548
|
+
)
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
"""Service for search operations."""
|
|
2
2
|
|
|
3
|
+
import ast
|
|
3
4
|
from datetime import datetime
|
|
4
5
|
from typing import List, Optional, Set
|
|
5
6
|
|
|
@@ -66,7 +67,7 @@ class SearchService:
|
|
|
66
67
|
logger.debug("no criteria passed to query")
|
|
67
68
|
return []
|
|
68
69
|
|
|
69
|
-
logger.
|
|
70
|
+
logger.trace(f"Searching with query: {query}")
|
|
70
71
|
|
|
71
72
|
after_date = (
|
|
72
73
|
(
|
|
@@ -85,7 +86,7 @@ class SearchService:
|
|
|
85
86
|
permalink_match=query.permalink_match,
|
|
86
87
|
title=query.title,
|
|
87
88
|
types=query.types,
|
|
88
|
-
|
|
89
|
+
search_item_types=query.entity_types,
|
|
89
90
|
after_date=after_date,
|
|
90
91
|
limit=limit,
|
|
91
92
|
offset=offset,
|
|
@@ -117,6 +118,38 @@ class SearchService:
|
|
|
117
118
|
|
|
118
119
|
return variants
|
|
119
120
|
|
|
121
|
+
def _extract_entity_tags(self, entity: Entity) -> List[str]:
|
|
122
|
+
"""Extract tags from entity metadata for search indexing.
|
|
123
|
+
|
|
124
|
+
Handles multiple tag formats:
|
|
125
|
+
- List format: ["tag1", "tag2"]
|
|
126
|
+
- String format: "['tag1', 'tag2']" or "[tag1, tag2]"
|
|
127
|
+
- Empty: [] or "[]"
|
|
128
|
+
|
|
129
|
+
Returns a list of tag strings for search indexing.
|
|
130
|
+
"""
|
|
131
|
+
if not entity.entity_metadata or "tags" not in entity.entity_metadata:
|
|
132
|
+
return []
|
|
133
|
+
|
|
134
|
+
tags = entity.entity_metadata["tags"]
|
|
135
|
+
|
|
136
|
+
# Handle list format (preferred)
|
|
137
|
+
if isinstance(tags, list):
|
|
138
|
+
return [str(tag) for tag in tags if tag]
|
|
139
|
+
|
|
140
|
+
# Handle string format (legacy)
|
|
141
|
+
if isinstance(tags, str):
|
|
142
|
+
try:
|
|
143
|
+
# Parse string representation of list
|
|
144
|
+
parsed_tags = ast.literal_eval(tags)
|
|
145
|
+
if isinstance(parsed_tags, list):
|
|
146
|
+
return [str(tag) for tag in parsed_tags if tag]
|
|
147
|
+
except (ValueError, SyntaxError):
|
|
148
|
+
# If parsing fails, treat as single tag
|
|
149
|
+
return [tags] if tags.strip() else []
|
|
150
|
+
|
|
151
|
+
return [] # pragma: no cover
|
|
152
|
+
|
|
120
153
|
async def index_entity(
|
|
121
154
|
self,
|
|
122
155
|
entity: Entity,
|
|
@@ -156,6 +189,7 @@ class SearchService:
|
|
|
156
189
|
},
|
|
157
190
|
created_at=entity.created_at,
|
|
158
191
|
updated_at=entity.updated_at,
|
|
192
|
+
project_id=entity.project_id,
|
|
159
193
|
)
|
|
160
194
|
)
|
|
161
195
|
|
|
@@ -169,16 +203,20 @@ class SearchService:
|
|
|
169
203
|
1. Entities
|
|
170
204
|
- permalink: direct from entity (e.g., "specs/search")
|
|
171
205
|
- file_path: physical file location
|
|
206
|
+
- project_id: project context for isolation
|
|
172
207
|
|
|
173
208
|
2. Observations
|
|
174
209
|
- permalink: entity permalink + /observations/id (e.g., "specs/search/observations/123")
|
|
175
210
|
- file_path: parent entity's file (where observation is defined)
|
|
211
|
+
- project_id: inherited from parent entity
|
|
176
212
|
|
|
177
213
|
3. Relations (only index outgoing relations defined in this file)
|
|
178
214
|
- permalink: from_entity/relation_type/to_entity (e.g., "specs/search/implements/features/search-ui")
|
|
179
215
|
- file_path: source entity's file (where relation is defined)
|
|
216
|
+
- project_id: inherited from source entity
|
|
180
217
|
|
|
181
218
|
Each type gets its own row in the search index with appropriate metadata.
|
|
219
|
+
The project_id is automatically added by the repository when indexing.
|
|
182
220
|
"""
|
|
183
221
|
|
|
184
222
|
content_stems = []
|
|
@@ -196,6 +234,11 @@ class SearchService:
|
|
|
196
234
|
|
|
197
235
|
content_stems.extend(self._generate_variants(entity.file_path))
|
|
198
236
|
|
|
237
|
+
# Add entity tags from frontmatter to search content
|
|
238
|
+
entity_tags = self._extract_entity_tags(entity)
|
|
239
|
+
if entity_tags:
|
|
240
|
+
content_stems.extend(entity_tags)
|
|
241
|
+
|
|
199
242
|
entity_content_stems = "\n".join(p for p in content_stems if p and p.strip())
|
|
200
243
|
|
|
201
244
|
# Index entity
|
|
@@ -214,6 +257,7 @@ class SearchService:
|
|
|
214
257
|
},
|
|
215
258
|
created_at=entity.created_at,
|
|
216
259
|
updated_at=entity.updated_at,
|
|
260
|
+
project_id=entity.project_id,
|
|
217
261
|
)
|
|
218
262
|
)
|
|
219
263
|
|
|
@@ -239,6 +283,7 @@ class SearchService:
|
|
|
239
283
|
},
|
|
240
284
|
created_at=entity.created_at,
|
|
241
285
|
updated_at=entity.updated_at,
|
|
286
|
+
project_id=entity.project_id,
|
|
242
287
|
)
|
|
243
288
|
)
|
|
244
289
|
|
|
@@ -268,6 +313,7 @@ class SearchService:
|
|
|
268
313
|
relation_type=rel.relation_type,
|
|
269
314
|
created_at=entity.created_at,
|
|
270
315
|
updated_at=entity.updated_at,
|
|
316
|
+
project_id=entity.project_id,
|
|
271
317
|
)
|
|
272
318
|
)
|
|
273
319
|
|
|
@@ -278,3 +324,32 @@ class SearchService:
|
|
|
278
324
|
async def delete_by_entity_id(self, entity_id: int):
|
|
279
325
|
"""Delete an item from the search index."""
|
|
280
326
|
await self.repository.delete_by_entity_id(entity_id)
|
|
327
|
+
|
|
328
|
+
async def handle_delete(self, entity: Entity):
|
|
329
|
+
"""Handle complete entity deletion from search index including observations and relations.
|
|
330
|
+
|
|
331
|
+
This replicates the logic from sync_service.handle_delete() to properly clean up
|
|
332
|
+
all search index entries for an entity and its related data.
|
|
333
|
+
"""
|
|
334
|
+
logger.debug(
|
|
335
|
+
f"Cleaning up search index for entity_id={entity.id}, file_path={entity.file_path}, "
|
|
336
|
+
f"observations={len(entity.observations)}, relations={len(entity.outgoing_relations)}"
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
# Clean up search index - same logic as sync_service.handle_delete()
|
|
340
|
+
permalinks = (
|
|
341
|
+
[entity.permalink]
|
|
342
|
+
+ [o.permalink for o in entity.observations]
|
|
343
|
+
+ [r.permalink for r in entity.outgoing_relations]
|
|
344
|
+
)
|
|
345
|
+
|
|
346
|
+
logger.debug(
|
|
347
|
+
f"Deleting search index entries for entity_id={entity.id}, "
|
|
348
|
+
f"index_entries={len(permalinks)}"
|
|
349
|
+
)
|
|
350
|
+
|
|
351
|
+
for permalink in permalinks:
|
|
352
|
+
if permalink:
|
|
353
|
+
await self.delete_by_permalink(permalink)
|
|
354
|
+
else:
|
|
355
|
+
await self.delete_by_entity_id(entity.id)
|