basic-memory 0.8.0__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (75) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/migrations.py +4 -9
  3. basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py +106 -0
  4. basic_memory/api/app.py +9 -6
  5. basic_memory/api/routers/__init__.py +2 -1
  6. basic_memory/api/routers/knowledge_router.py +30 -4
  7. basic_memory/api/routers/memory_router.py +3 -2
  8. basic_memory/api/routers/project_info_router.py +275 -0
  9. basic_memory/api/routers/search_router.py +22 -4
  10. basic_memory/cli/app.py +54 -3
  11. basic_memory/cli/commands/__init__.py +15 -2
  12. basic_memory/cli/commands/db.py +9 -13
  13. basic_memory/cli/commands/import_chatgpt.py +26 -30
  14. basic_memory/cli/commands/import_claude_conversations.py +27 -29
  15. basic_memory/cli/commands/import_claude_projects.py +29 -31
  16. basic_memory/cli/commands/import_memory_json.py +26 -28
  17. basic_memory/cli/commands/mcp.py +7 -1
  18. basic_memory/cli/commands/project.py +119 -0
  19. basic_memory/cli/commands/project_info.py +167 -0
  20. basic_memory/cli/commands/status.py +7 -9
  21. basic_memory/cli/commands/sync.py +54 -9
  22. basic_memory/cli/commands/{tools.py → tool.py} +92 -19
  23. basic_memory/cli/main.py +40 -1
  24. basic_memory/config.py +155 -7
  25. basic_memory/db.py +19 -4
  26. basic_memory/deps.py +10 -3
  27. basic_memory/file_utils.py +32 -16
  28. basic_memory/markdown/utils.py +5 -0
  29. basic_memory/mcp/main.py +1 -2
  30. basic_memory/mcp/prompts/__init__.py +6 -2
  31. basic_memory/mcp/prompts/ai_assistant_guide.py +6 -8
  32. basic_memory/mcp/prompts/continue_conversation.py +65 -126
  33. basic_memory/mcp/prompts/recent_activity.py +55 -13
  34. basic_memory/mcp/prompts/search.py +72 -17
  35. basic_memory/mcp/prompts/utils.py +139 -82
  36. basic_memory/mcp/server.py +1 -1
  37. basic_memory/mcp/tools/__init__.py +11 -22
  38. basic_memory/mcp/tools/build_context.py +85 -0
  39. basic_memory/mcp/tools/canvas.py +17 -19
  40. basic_memory/mcp/tools/delete_note.py +28 -0
  41. basic_memory/mcp/tools/project_info.py +51 -0
  42. basic_memory/mcp/tools/{resource.py → read_content.py} +42 -5
  43. basic_memory/mcp/tools/read_note.py +190 -0
  44. basic_memory/mcp/tools/recent_activity.py +100 -0
  45. basic_memory/mcp/tools/search.py +56 -17
  46. basic_memory/mcp/tools/utils.py +245 -17
  47. basic_memory/mcp/tools/write_note.py +124 -0
  48. basic_memory/models/search.py +2 -1
  49. basic_memory/repository/entity_repository.py +3 -2
  50. basic_memory/repository/project_info_repository.py +9 -0
  51. basic_memory/repository/repository.py +23 -6
  52. basic_memory/repository/search_repository.py +33 -10
  53. basic_memory/schemas/__init__.py +12 -0
  54. basic_memory/schemas/memory.py +3 -2
  55. basic_memory/schemas/project_info.py +96 -0
  56. basic_memory/schemas/search.py +27 -32
  57. basic_memory/services/context_service.py +3 -3
  58. basic_memory/services/entity_service.py +8 -2
  59. basic_memory/services/file_service.py +105 -53
  60. basic_memory/services/link_resolver.py +5 -45
  61. basic_memory/services/search_service.py +45 -16
  62. basic_memory/sync/sync_service.py +274 -39
  63. basic_memory/sync/watch_service.py +160 -30
  64. basic_memory/utils.py +40 -40
  65. basic_memory-0.9.0.dist-info/METADATA +736 -0
  66. basic_memory-0.9.0.dist-info/RECORD +99 -0
  67. basic_memory/mcp/prompts/json_canvas_spec.py +0 -25
  68. basic_memory/mcp/tools/knowledge.py +0 -68
  69. basic_memory/mcp/tools/memory.py +0 -177
  70. basic_memory/mcp/tools/notes.py +0 -201
  71. basic_memory-0.8.0.dist-info/METADATA +0 -379
  72. basic_memory-0.8.0.dist-info/RECORD +0 -91
  73. {basic_memory-0.8.0.dist-info → basic_memory-0.9.0.dist-info}/WHEEL +0 -0
  74. {basic_memory-0.8.0.dist-info → basic_memory-0.9.0.dist-info}/entry_points.txt +0 -0
  75. {basic_memory-0.8.0.dist-info → basic_memory-0.9.0.dist-info}/licenses/LICENSE +0 -0
basic_memory/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  """basic-memory - Local-first knowledge management combining Zettelkasten with knowledge graphs"""
2
2
 
3
- __version__ = "0.8.0"
3
+ __version__ = "0.9.0"
@@ -1,6 +1,5 @@
1
1
  """Functions for managing database migrations."""
2
2
 
3
- import asyncio
4
3
  from pathlib import Path
5
4
  from loguru import logger
6
5
  from alembic.config import Config
@@ -10,20 +9,16 @@ from alembic import command
10
9
  def get_alembic_config() -> Config: # pragma: no cover
11
10
  """Get alembic config with correct paths."""
12
11
  migrations_path = Path(__file__).parent
13
- alembic_ini = migrations_path.parent.parent.parent / "alembic.ini"
12
+ alembic_ini = migrations_path / "alembic.ini"
14
13
 
15
14
  config = Config(alembic_ini)
16
15
  config.set_main_option("script_location", str(migrations_path))
17
16
  return config
18
17
 
19
18
 
20
- async def reset_database(): # pragma: no cover
19
+ def reset_database(): # pragma: no cover
21
20
  """Drop and recreate all tables."""
22
21
  logger.info("Resetting database...")
23
22
  config = get_alembic_config()
24
-
25
- def _reset(cfg):
26
- command.downgrade(cfg, "base")
27
- command.upgrade(cfg, "head")
28
-
29
- await asyncio.get_event_loop().run_in_executor(None, _reset, config)
23
+ command.downgrade(config, "base")
24
+ command.upgrade(config, "head")
@@ -0,0 +1,106 @@
1
+ """Update search index schema
2
+
3
+ Revision ID: cc7172b46608
4
+ Revises: 502b60eaa905
5
+ Create Date: 2025-02-28 18:48:23.244941
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = "cc7172b46608"
16
+ down_revision: Union[str, None] = "502b60eaa905"
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ """Upgrade database schema to use new search index with content_stems and content_snippet."""
23
+
24
+ # First, drop the existing search_index table
25
+ op.execute("DROP TABLE IF EXISTS search_index")
26
+
27
+ # Create new search_index with updated schema
28
+ op.execute("""
29
+ CREATE VIRTUAL TABLE IF NOT EXISTS search_index USING fts5(
30
+ -- Core entity fields
31
+ id UNINDEXED, -- Row ID
32
+ title, -- Title for searching
33
+ content_stems, -- Main searchable content split into stems
34
+ content_snippet, -- File content snippet for display
35
+ permalink, -- Stable identifier (now indexed for path search)
36
+ file_path UNINDEXED, -- Physical location
37
+ type UNINDEXED, -- entity/relation/observation
38
+
39
+ -- Relation fields
40
+ from_id UNINDEXED, -- Source entity
41
+ to_id UNINDEXED, -- Target entity
42
+ relation_type UNINDEXED, -- Type of relation
43
+
44
+ -- Observation fields
45
+ entity_id UNINDEXED, -- Parent entity
46
+ category UNINDEXED, -- Observation category
47
+
48
+ -- Common fields
49
+ metadata UNINDEXED, -- JSON metadata
50
+ created_at UNINDEXED, -- Creation timestamp
51
+ updated_at UNINDEXED, -- Last update
52
+
53
+ -- Configuration
54
+ tokenize='unicode61 tokenchars 0x2F', -- Hex code for /
55
+ prefix='1,2,3,4' -- Support longer prefixes for paths
56
+ );
57
+ """)
58
+
59
+ # Print instruction to manually reindex after migration
60
+ print("\n------------------------------------------------------------------")
61
+ print("IMPORTANT: After migration completes, manually run the reindex command:")
62
+ print("basic-memory sync")
63
+ print("------------------------------------------------------------------\n")
64
+
65
+
66
+ def downgrade() -> None:
67
+ """Downgrade database schema to use old search index."""
68
+ # Drop the updated search_index table
69
+ op.execute("DROP TABLE IF EXISTS search_index")
70
+
71
+ # Recreate the original search_index schema
72
+ op.execute("""
73
+ CREATE VIRTUAL TABLE IF NOT EXISTS search_index USING fts5(
74
+ -- Core entity fields
75
+ id UNINDEXED, -- Row ID
76
+ title, -- Title for searching
77
+ content, -- Main searchable content
78
+ permalink, -- Stable identifier (now indexed for path search)
79
+ file_path UNINDEXED, -- Physical location
80
+ type UNINDEXED, -- entity/relation/observation
81
+
82
+ -- Relation fields
83
+ from_id UNINDEXED, -- Source entity
84
+ to_id UNINDEXED, -- Target entity
85
+ relation_type UNINDEXED, -- Type of relation
86
+
87
+ -- Observation fields
88
+ entity_id UNINDEXED, -- Parent entity
89
+ category UNINDEXED, -- Observation category
90
+
91
+ -- Common fields
92
+ metadata UNINDEXED, -- JSON metadata
93
+ created_at UNINDEXED, -- Creation timestamp
94
+ updated_at UNINDEXED, -- Last update
95
+
96
+ -- Configuration
97
+ tokenize='unicode61 tokenchars 0x2F', -- Hex code for /
98
+ prefix='1,2,3,4' -- Support longer prefixes for paths
99
+ );
100
+ """)
101
+
102
+ # Print instruction to manually reindex after migration
103
+ print("\n------------------------------------------------------------------")
104
+ print("IMPORTANT: After downgrade completes, manually run the reindex command:")
105
+ print("basic-memory sync")
106
+ print("------------------------------------------------------------------\n")
basic_memory/api/app.py CHANGED
@@ -2,14 +2,13 @@
2
2
 
3
3
  from contextlib import asynccontextmanager
4
4
 
5
- import logfire
6
5
  from fastapi import FastAPI, HTTPException
7
6
  from fastapi.exception_handlers import http_exception_handler
8
7
  from loguru import logger
9
8
 
10
9
  from basic_memory import db
11
10
  from basic_memory.config import config as app_config
12
- from basic_memory.api.routers import knowledge, search, memory, resource
11
+ from basic_memory.api.routers import knowledge, search, memory, resource, project_info
13
12
 
14
13
 
15
14
  @asynccontextmanager
@@ -29,20 +28,24 @@ app = FastAPI(
29
28
  lifespan=lifespan,
30
29
  )
31
30
 
32
- if app_config != "test":
33
- logfire.instrument_fastapi(app)
34
-
35
31
 
36
32
  # Include routers
37
33
  app.include_router(knowledge.router)
38
34
  app.include_router(search.router)
39
35
  app.include_router(memory.router)
40
36
  app.include_router(resource.router)
37
+ app.include_router(project_info.router)
41
38
 
42
39
 
43
40
  @app.exception_handler(Exception)
44
41
  async def exception_handler(request, exc): # pragma: no cover
45
42
  logger.exception(
46
- f"An unhandled exception occurred for request '{request.url}', exception: {exc}"
43
+ "API unhandled exception",
44
+ url=str(request.url),
45
+ method=request.method,
46
+ client=request.client.host if request.client else None,
47
+ path=request.url.path,
48
+ error_type=type(exc).__name__,
49
+ error=str(exc),
47
50
  )
48
51
  return await http_exception_handler(request, HTTPException(status_code=500, detail=str(exc)))
@@ -4,5 +4,6 @@ from . import knowledge_router as knowledge
4
4
  from . import memory_router as memory
5
5
  from . import resource_router as resource
6
6
  from . import search_router as search
7
+ from . import project_info_router as project_info
7
8
 
8
- __all__ = ["knowledge", "memory", "resource", "search"]
9
+ __all__ = ["knowledge", "memory", "resource", "search", "project_info"]
@@ -33,7 +33,9 @@ async def create_entity(
33
33
  search_service: SearchServiceDep,
34
34
  ) -> EntityResponse:
35
35
  """Create an entity."""
36
- logger.info(f"request: create_entity with data={data}")
36
+ logger.info(
37
+ "API request", endpoint="create_entity", entity_type=data.entity_type, title=data.title
38
+ )
37
39
 
38
40
  entity = await entity_service.create_entity(data)
39
41
 
@@ -41,7 +43,13 @@ async def create_entity(
41
43
  await search_service.index_entity(entity, background_tasks=background_tasks)
42
44
  result = EntityResponse.model_validate(entity)
43
45
 
44
- logger.info(f"response: create_entity with result={result}")
46
+ logger.info(
47
+ "API response",
48
+ endpoint="create_entity",
49
+ title=result.title,
50
+ permalink=result.permalink,
51
+ status_code=201,
52
+ )
45
53
  return result
46
54
 
47
55
 
@@ -55,10 +63,23 @@ async def create_or_update_entity(
55
63
  search_service: SearchServiceDep,
56
64
  ) -> EntityResponse:
57
65
  """Create or update an entity. If entity exists, it will be updated, otherwise created."""
58
- logger.info(f"request: create_or_update_entity with permalink={permalink}, data={data}")
66
+ logger.info(
67
+ "API request",
68
+ endpoint="create_or_update_entity",
69
+ permalink=permalink,
70
+ entity_type=data.entity_type,
71
+ title=data.title,
72
+ )
59
73
 
60
74
  # Validate permalink matches
61
75
  if data.permalink != permalink:
76
+ logger.warning(
77
+ "API validation error",
78
+ endpoint="create_or_update_entity",
79
+ permalink=permalink,
80
+ data_permalink=data.permalink,
81
+ error="Permalink mismatch",
82
+ )
62
83
  raise HTTPException(status_code=400, detail="Entity permalink must match URL path")
63
84
 
64
85
  # Try create_or_update operation
@@ -70,7 +91,12 @@ async def create_or_update_entity(
70
91
  result = EntityResponse.model_validate(entity)
71
92
 
72
93
  logger.info(
73
- f"response: create_or_update_entity with result={result}, status_code={response.status_code}"
94
+ "API response",
95
+ endpoint="create_or_update_entity",
96
+ title=result.title,
97
+ permalink=result.permalink,
98
+ created=created,
99
+ status_code=response.status_code,
74
100
  )
75
101
  return result
76
102
 
@@ -32,6 +32,7 @@ async def to_graph_context(context, entity_repository: EntityRepository, page: i
32
32
  return EntitySummary(
33
33
  title=item.title, # pyright: ignore
34
34
  permalink=item.permalink,
35
+ content=item.content,
35
36
  file_path=item.file_path,
36
37
  created_at=item.created_at,
37
38
  )
@@ -52,8 +53,8 @@ async def to_graph_context(context, entity_repository: EntityRepository, page: i
52
53
  file_path=item.file_path,
53
54
  permalink=item.permalink, # pyright: ignore
54
55
  relation_type=item.type,
55
- from_id=from_entity.permalink, # pyright: ignore
56
- to_id=to_entity.permalink if to_entity else None,
56
+ from_entity=from_entity.permalink, # pyright: ignore
57
+ to_entity=to_entity.permalink if to_entity else None,
57
58
  created_at=item.created_at,
58
59
  )
59
60
  case _: # pragma: no cover
@@ -0,0 +1,275 @@
1
+ """Router for statistics and system information."""
2
+
3
+ import json
4
+ from datetime import datetime
5
+
6
+ from fastapi import APIRouter
7
+ from sqlalchemy import text
8
+
9
+ from basic_memory.config import config, config_manager
10
+ from basic_memory.deps import (
11
+ ProjectInfoRepositoryDep,
12
+ )
13
+ from basic_memory.repository.project_info_repository import ProjectInfoRepository
14
+ from basic_memory.schemas import (
15
+ ProjectInfoResponse,
16
+ ProjectStatistics,
17
+ ActivityMetrics,
18
+ SystemStatus,
19
+ )
20
+ from basic_memory.sync.watch_service import WATCH_STATUS_JSON
21
+
22
+ router = APIRouter(prefix="/stats", tags=["statistics"])
23
+
24
+
25
+ @router.get("/project-info", response_model=ProjectInfoResponse)
26
+ async def get_project_info(
27
+ repository: ProjectInfoRepositoryDep,
28
+ ) -> ProjectInfoResponse:
29
+ """Get comprehensive information about the current Basic Memory project."""
30
+ # Get statistics
31
+ statistics = await get_statistics(repository)
32
+
33
+ # Get activity metrics
34
+ activity = await get_activity_metrics(repository)
35
+
36
+ # Get system status
37
+ system = await get_system_status()
38
+
39
+ # Get project configuration information
40
+ project_name = config.project
41
+ project_path = str(config.home)
42
+ available_projects = config_manager.projects
43
+ default_project = config_manager.default_project
44
+
45
+ # Construct the response
46
+ return ProjectInfoResponse(
47
+ project_name=project_name,
48
+ project_path=project_path,
49
+ available_projects=available_projects,
50
+ default_project=default_project,
51
+ statistics=statistics,
52
+ activity=activity,
53
+ system=system,
54
+ )
55
+
56
+
57
+ async def get_statistics(repository: ProjectInfoRepository) -> ProjectStatistics:
58
+ """Get statistics about the current project."""
59
+ # Get basic counts
60
+ entity_count_result = await repository.execute_query(text("SELECT COUNT(*) FROM entity"))
61
+ total_entities = entity_count_result.scalar() or 0
62
+
63
+ observation_count_result = await repository.execute_query(
64
+ text("SELECT COUNT(*) FROM observation")
65
+ )
66
+ total_observations = observation_count_result.scalar() or 0
67
+
68
+ relation_count_result = await repository.execute_query(text("SELECT COUNT(*) FROM relation"))
69
+ total_relations = relation_count_result.scalar() or 0
70
+
71
+ unresolved_count_result = await repository.execute_query(
72
+ text("SELECT COUNT(*) FROM relation WHERE to_id IS NULL")
73
+ )
74
+ total_unresolved = unresolved_count_result.scalar() or 0
75
+
76
+ # Get entity counts by type
77
+ entity_types_result = await repository.execute_query(
78
+ text("SELECT entity_type, COUNT(*) FROM entity GROUP BY entity_type")
79
+ )
80
+ entity_types = {row[0]: row[1] for row in entity_types_result.fetchall()}
81
+
82
+ # Get observation counts by category
83
+ category_result = await repository.execute_query(
84
+ text("SELECT category, COUNT(*) FROM observation GROUP BY category")
85
+ )
86
+ observation_categories = {row[0]: row[1] for row in category_result.fetchall()}
87
+
88
+ # Get relation counts by type
89
+ relation_types_result = await repository.execute_query(
90
+ text("SELECT relation_type, COUNT(*) FROM relation GROUP BY relation_type")
91
+ )
92
+ relation_types = {row[0]: row[1] for row in relation_types_result.fetchall()}
93
+
94
+ # Find most connected entities (most outgoing relations)
95
+ connected_result = await repository.execute_query(
96
+ text("""
97
+ SELECT e.id, e.title, e.permalink, COUNT(r.id) AS relation_count
98
+ FROM entity e
99
+ JOIN relation r ON e.id = r.from_id
100
+ GROUP BY e.id
101
+ ORDER BY relation_count DESC
102
+ LIMIT 10
103
+ """)
104
+ )
105
+ most_connected = [
106
+ {"id": row[0], "title": row[1], "permalink": row[2], "relation_count": row[3]}
107
+ for row in connected_result.fetchall()
108
+ ]
109
+
110
+ # Count isolated entities (no relations)
111
+ isolated_result = await repository.execute_query(
112
+ text("""
113
+ SELECT COUNT(e.id)
114
+ FROM entity e
115
+ LEFT JOIN relation r1 ON e.id = r1.from_id
116
+ LEFT JOIN relation r2 ON e.id = r2.to_id
117
+ WHERE r1.id IS NULL AND r2.id IS NULL
118
+ """)
119
+ )
120
+ isolated_count = isolated_result.scalar() or 0
121
+
122
+ return ProjectStatistics(
123
+ total_entities=total_entities,
124
+ total_observations=total_observations,
125
+ total_relations=total_relations,
126
+ total_unresolved_relations=total_unresolved,
127
+ entity_types=entity_types,
128
+ observation_categories=observation_categories,
129
+ relation_types=relation_types,
130
+ most_connected_entities=most_connected,
131
+ isolated_entities=isolated_count,
132
+ )
133
+
134
+
135
+ async def get_activity_metrics(repository: ProjectInfoRepository) -> ActivityMetrics:
136
+ """Get activity metrics for the current project."""
137
+ # Get recently created entities
138
+ created_result = await repository.execute_query(
139
+ text("""
140
+ SELECT id, title, permalink, entity_type, created_at
141
+ FROM entity
142
+ ORDER BY created_at DESC
143
+ LIMIT 10
144
+ """)
145
+ )
146
+ recently_created = [
147
+ {
148
+ "id": row[0],
149
+ "title": row[1],
150
+ "permalink": row[2],
151
+ "entity_type": row[3],
152
+ "created_at": row[4],
153
+ }
154
+ for row in created_result.fetchall()
155
+ ]
156
+
157
+ # Get recently updated entities
158
+ updated_result = await repository.execute_query(
159
+ text("""
160
+ SELECT id, title, permalink, entity_type, updated_at
161
+ FROM entity
162
+ ORDER BY updated_at DESC
163
+ LIMIT 10
164
+ """)
165
+ )
166
+ recently_updated = [
167
+ {
168
+ "id": row[0],
169
+ "title": row[1],
170
+ "permalink": row[2],
171
+ "entity_type": row[3],
172
+ "updated_at": row[4],
173
+ }
174
+ for row in updated_result.fetchall()
175
+ ]
176
+
177
+ # Get monthly growth over the last 6 months
178
+ # Calculate the start of 6 months ago
179
+ now = datetime.now()
180
+ six_months_ago = datetime(
181
+ now.year - (1 if now.month <= 6 else 0), ((now.month - 6) % 12) or 12, 1
182
+ )
183
+
184
+ # Query for monthly entity creation
185
+ entity_growth_result = await repository.execute_query(
186
+ text(f"""
187
+ SELECT
188
+ strftime('%Y-%m', created_at) AS month,
189
+ COUNT(*) AS count
190
+ FROM entity
191
+ WHERE created_at >= '{six_months_ago.isoformat()}'
192
+ GROUP BY month
193
+ ORDER BY month
194
+ """)
195
+ )
196
+ entity_growth = {row[0]: row[1] for row in entity_growth_result.fetchall()}
197
+
198
+ # Query for monthly observation creation
199
+ observation_growth_result = await repository.execute_query(
200
+ text(f"""
201
+ SELECT
202
+ strftime('%Y-%m', created_at) AS month,
203
+ COUNT(*) AS count
204
+ FROM observation
205
+ INNER JOIN entity ON observation.entity_id = entity.id
206
+ WHERE entity.created_at >= '{six_months_ago.isoformat()}'
207
+ GROUP BY month
208
+ ORDER BY month
209
+ """)
210
+ )
211
+ observation_growth = {row[0]: row[1] for row in observation_growth_result.fetchall()}
212
+
213
+ # Query for monthly relation creation
214
+ relation_growth_result = await repository.execute_query(
215
+ text(f"""
216
+ SELECT
217
+ strftime('%Y-%m', created_at) AS month,
218
+ COUNT(*) AS count
219
+ FROM relation
220
+ INNER JOIN entity ON relation.from_id = entity.id
221
+ WHERE entity.created_at >= '{six_months_ago.isoformat()}'
222
+ GROUP BY month
223
+ ORDER BY month
224
+ """)
225
+ )
226
+ relation_growth = {row[0]: row[1] for row in relation_growth_result.fetchall()}
227
+
228
+ # Combine all monthly growth data
229
+ monthly_growth = {}
230
+ for month in set(
231
+ list(entity_growth.keys()) + list(observation_growth.keys()) + list(relation_growth.keys())
232
+ ):
233
+ monthly_growth[month] = {
234
+ "entities": entity_growth.get(month, 0),
235
+ "observations": observation_growth.get(month, 0),
236
+ "relations": relation_growth.get(month, 0),
237
+ "total": (
238
+ entity_growth.get(month, 0)
239
+ + observation_growth.get(month, 0)
240
+ + relation_growth.get(month, 0)
241
+ ),
242
+ }
243
+
244
+ return ActivityMetrics(
245
+ recently_created=recently_created,
246
+ recently_updated=recently_updated,
247
+ monthly_growth=monthly_growth,
248
+ )
249
+
250
+
251
+ async def get_system_status() -> SystemStatus:
252
+ """Get system status information."""
253
+ import basic_memory
254
+
255
+ # Get database information
256
+ db_path = config.database_path
257
+ db_size = db_path.stat().st_size if db_path.exists() else 0
258
+ db_size_readable = f"{db_size / (1024 * 1024):.2f} MB"
259
+
260
+ # Get watch service status if available
261
+ watch_status = None
262
+ watch_status_path = config.home / ".basic-memory" / WATCH_STATUS_JSON
263
+ if watch_status_path.exists():
264
+ try:
265
+ watch_status = json.loads(watch_status_path.read_text())
266
+ except Exception: # pragma: no cover
267
+ pass
268
+
269
+ return SystemStatus(
270
+ version=basic_memory.__version__,
271
+ database_path=str(db_path),
272
+ database_size=db_size_readable,
273
+ watch_status=watch_status,
274
+ timestamp=datetime.now(),
275
+ )
@@ -1,11 +1,9 @@
1
1
  """Router for search operations."""
2
2
 
3
- from dataclasses import asdict
4
-
5
3
  from fastapi import APIRouter, BackgroundTasks
6
4
 
7
5
  from basic_memory.schemas.search import SearchQuery, SearchResult, SearchResponse
8
- from basic_memory.deps import SearchServiceDep
6
+ from basic_memory.deps import SearchServiceDep, EntityServiceDep
9
7
 
10
8
  router = APIRouter(prefix="/search", tags=["search"])
11
9
 
@@ -14,6 +12,7 @@ router = APIRouter(prefix="/search", tags=["search"])
14
12
  async def search(
15
13
  query: SearchQuery,
16
14
  search_service: SearchServiceDep,
15
+ entity_service: EntityServiceDep,
17
16
  page: int = 1,
18
17
  page_size: int = 10,
19
18
  ):
@@ -21,7 +20,26 @@ async def search(
21
20
  limit = page_size
22
21
  offset = (page - 1) * page_size
23
22
  results = await search_service.search(query, limit=limit, offset=offset)
24
- search_results = [SearchResult.model_validate(asdict(r)) for r in results]
23
+
24
+ search_results = []
25
+ for r in results:
26
+ entities = await entity_service.get_entities_by_id([r.entity_id, r.from_id, r.to_id]) # pyright: ignore
27
+ search_results.append(
28
+ SearchResult(
29
+ title=r.title, # pyright: ignore
30
+ type=r.type, # pyright: ignore
31
+ permalink=r.permalink,
32
+ score=r.score, # pyright: ignore
33
+ entity=entities[0].permalink if entities else None,
34
+ content=r.content,
35
+ file_path=r.file_path,
36
+ metadata=r.metadata,
37
+ category=r.category,
38
+ from_entity=entities[0].permalink if entities else None,
39
+ to_entity=entities[1].permalink if len(entities) > 1 else None,
40
+ relation_type=r.relation_type,
41
+ )
42
+ )
25
43
  return SearchResponse(
26
44
  results=search_results,
27
45
  current_page=page,
basic_memory/cli/app.py CHANGED
@@ -1,4 +1,5 @@
1
1
  import asyncio
2
+ from typing import Optional
2
3
 
3
4
  import typer
4
5
 
@@ -6,13 +7,63 @@ from basic_memory import db
6
7
  from basic_memory.config import config
7
8
 
8
9
 
9
- asyncio.run(db.run_migrations(config))
10
+ def version_callback(value: bool) -> None:
11
+ """Show version and exit."""
12
+ if value: # pragma: no cover
13
+ import basic_memory
14
+
15
+ typer.echo(f"Basic Memory version: {basic_memory.__version__}")
16
+ raise typer.Exit()
17
+
10
18
 
11
19
  app = typer.Typer(name="basic-memory")
12
20
 
13
- import_app = typer.Typer()
14
- app.add_typer(import_app, name="import")
15
21
 
22
+ @app.callback()
23
+ def app_callback(
24
+ project: Optional[str] = typer.Option(
25
+ None,
26
+ "--project",
27
+ "-p",
28
+ help="Specify which project to use",
29
+ envvar="BASIC_MEMORY_PROJECT",
30
+ ),
31
+ version: Optional[bool] = typer.Option(
32
+ None,
33
+ "--version",
34
+ "-v",
35
+ help="Show version and exit.",
36
+ callback=version_callback,
37
+ is_eager=True,
38
+ ),
39
+ ) -> None:
40
+ """Basic Memory - Local-first personal knowledge management."""
41
+ # We use the project option to set the BASIC_MEMORY_PROJECT environment variable
42
+ # The config module will pick this up when loading
43
+ if project: # pragma: no cover
44
+ import os
45
+ import importlib
46
+ from basic_memory import config as config_module
47
+
48
+ # Set the environment variable
49
+ os.environ["BASIC_MEMORY_PROJECT"] = project
50
+
51
+ # Reload the config module to pick up the new project
52
+ importlib.reload(config_module)
53
+
54
+ # Update the local reference
55
+ global config
56
+ from basic_memory.config import config as new_config
57
+
58
+ config = new_config
59
+
60
+
61
+ # Run database migrations
62
+ asyncio.run(db.run_migrations(config))
63
+
64
+ # Register sub-command groups
65
+ import_app = typer.Typer(help="Import data from various sources")
66
+ app.add_typer(import_app, name="import")
16
67
 
17
68
  claude_app = typer.Typer()
18
69
  import_app.add_typer(claude_app, name="claude")