basic-memory 0.8.0__py3-none-any.whl → 0.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (76) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/migrations.py +4 -9
  3. basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py +106 -0
  4. basic_memory/api/app.py +9 -6
  5. basic_memory/api/routers/__init__.py +2 -1
  6. basic_memory/api/routers/knowledge_router.py +30 -4
  7. basic_memory/api/routers/memory_router.py +3 -2
  8. basic_memory/api/routers/project_info_router.py +274 -0
  9. basic_memory/api/routers/search_router.py +22 -4
  10. basic_memory/cli/app.py +54 -3
  11. basic_memory/cli/commands/__init__.py +15 -2
  12. basic_memory/cli/commands/db.py +9 -13
  13. basic_memory/cli/commands/import_chatgpt.py +31 -36
  14. basic_memory/cli/commands/import_claude_conversations.py +32 -35
  15. basic_memory/cli/commands/import_claude_projects.py +34 -37
  16. basic_memory/cli/commands/import_memory_json.py +26 -28
  17. basic_memory/cli/commands/mcp.py +7 -1
  18. basic_memory/cli/commands/project.py +119 -0
  19. basic_memory/cli/commands/project_info.py +167 -0
  20. basic_memory/cli/commands/status.py +7 -9
  21. basic_memory/cli/commands/sync.py +54 -9
  22. basic_memory/cli/commands/{tools.py → tool.py} +92 -19
  23. basic_memory/cli/main.py +40 -1
  24. basic_memory/config.py +157 -10
  25. basic_memory/db.py +19 -4
  26. basic_memory/deps.py +10 -3
  27. basic_memory/file_utils.py +34 -18
  28. basic_memory/markdown/markdown_processor.py +1 -1
  29. basic_memory/markdown/utils.py +5 -0
  30. basic_memory/mcp/main.py +1 -2
  31. basic_memory/mcp/prompts/__init__.py +6 -2
  32. basic_memory/mcp/prompts/ai_assistant_guide.py +9 -10
  33. basic_memory/mcp/prompts/continue_conversation.py +65 -126
  34. basic_memory/mcp/prompts/recent_activity.py +55 -13
  35. basic_memory/mcp/prompts/search.py +72 -17
  36. basic_memory/mcp/prompts/utils.py +139 -82
  37. basic_memory/mcp/server.py +1 -1
  38. basic_memory/mcp/tools/__init__.py +11 -22
  39. basic_memory/mcp/tools/build_context.py +85 -0
  40. basic_memory/mcp/tools/canvas.py +17 -19
  41. basic_memory/mcp/tools/delete_note.py +28 -0
  42. basic_memory/mcp/tools/project_info.py +51 -0
  43. basic_memory/mcp/tools/{resource.py → read_content.py} +42 -5
  44. basic_memory/mcp/tools/read_note.py +190 -0
  45. basic_memory/mcp/tools/recent_activity.py +100 -0
  46. basic_memory/mcp/tools/search.py +56 -17
  47. basic_memory/mcp/tools/utils.py +245 -17
  48. basic_memory/mcp/tools/write_note.py +124 -0
  49. basic_memory/models/search.py +2 -1
  50. basic_memory/repository/entity_repository.py +3 -2
  51. basic_memory/repository/project_info_repository.py +9 -0
  52. basic_memory/repository/repository.py +23 -6
  53. basic_memory/repository/search_repository.py +33 -10
  54. basic_memory/schemas/__init__.py +12 -0
  55. basic_memory/schemas/memory.py +3 -2
  56. basic_memory/schemas/project_info.py +96 -0
  57. basic_memory/schemas/search.py +27 -32
  58. basic_memory/services/context_service.py +3 -3
  59. basic_memory/services/entity_service.py +8 -2
  60. basic_memory/services/file_service.py +107 -57
  61. basic_memory/services/link_resolver.py +5 -45
  62. basic_memory/services/search_service.py +45 -16
  63. basic_memory/sync/sync_service.py +274 -39
  64. basic_memory/sync/watch_service.py +174 -34
  65. basic_memory/utils.py +40 -40
  66. basic_memory-0.10.0.dist-info/METADATA +386 -0
  67. basic_memory-0.10.0.dist-info/RECORD +99 -0
  68. basic_memory/mcp/prompts/json_canvas_spec.py +0 -25
  69. basic_memory/mcp/tools/knowledge.py +0 -68
  70. basic_memory/mcp/tools/memory.py +0 -177
  71. basic_memory/mcp/tools/notes.py +0 -201
  72. basic_memory-0.8.0.dist-info/METADATA +0 -379
  73. basic_memory-0.8.0.dist-info/RECORD +0 -91
  74. {basic_memory-0.8.0.dist-info → basic_memory-0.10.0.dist-info}/WHEEL +0 -0
  75. {basic_memory-0.8.0.dist-info → basic_memory-0.10.0.dist-info}/entry_points.txt +0 -0
  76. {basic_memory-0.8.0.dist-info → basic_memory-0.10.0.dist-info}/licenses/LICENSE +0 -0
basic_memory/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  """basic-memory - Local-first knowledge management combining Zettelkasten with knowledge graphs"""
2
2
 
3
- __version__ = "0.8.0"
3
+ __version__ = "0.10.0"
@@ -1,6 +1,5 @@
1
1
  """Functions for managing database migrations."""
2
2
 
3
- import asyncio
4
3
  from pathlib import Path
5
4
  from loguru import logger
6
5
  from alembic.config import Config
@@ -10,20 +9,16 @@ from alembic import command
10
9
  def get_alembic_config() -> Config: # pragma: no cover
11
10
  """Get alembic config with correct paths."""
12
11
  migrations_path = Path(__file__).parent
13
- alembic_ini = migrations_path.parent.parent.parent / "alembic.ini"
12
+ alembic_ini = migrations_path / "alembic.ini"
14
13
 
15
14
  config = Config(alembic_ini)
16
15
  config.set_main_option("script_location", str(migrations_path))
17
16
  return config
18
17
 
19
18
 
20
- async def reset_database(): # pragma: no cover
19
+ def reset_database(): # pragma: no cover
21
20
  """Drop and recreate all tables."""
22
21
  logger.info("Resetting database...")
23
22
  config = get_alembic_config()
24
-
25
- def _reset(cfg):
26
- command.downgrade(cfg, "base")
27
- command.upgrade(cfg, "head")
28
-
29
- await asyncio.get_event_loop().run_in_executor(None, _reset, config)
23
+ command.downgrade(config, "base")
24
+ command.upgrade(config, "head")
@@ -0,0 +1,106 @@
1
+ """Update search index schema
2
+
3
+ Revision ID: cc7172b46608
4
+ Revises: 502b60eaa905
5
+ Create Date: 2025-02-28 18:48:23.244941
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = "cc7172b46608"
16
+ down_revision: Union[str, None] = "502b60eaa905"
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ """Upgrade database schema to use new search index with content_stems and content_snippet."""
23
+
24
+ # First, drop the existing search_index table
25
+ op.execute("DROP TABLE IF EXISTS search_index")
26
+
27
+ # Create new search_index with updated schema
28
+ op.execute("""
29
+ CREATE VIRTUAL TABLE IF NOT EXISTS search_index USING fts5(
30
+ -- Core entity fields
31
+ id UNINDEXED, -- Row ID
32
+ title, -- Title for searching
33
+ content_stems, -- Main searchable content split into stems
34
+ content_snippet, -- File content snippet for display
35
+ permalink, -- Stable identifier (now indexed for path search)
36
+ file_path UNINDEXED, -- Physical location
37
+ type UNINDEXED, -- entity/relation/observation
38
+
39
+ -- Relation fields
40
+ from_id UNINDEXED, -- Source entity
41
+ to_id UNINDEXED, -- Target entity
42
+ relation_type UNINDEXED, -- Type of relation
43
+
44
+ -- Observation fields
45
+ entity_id UNINDEXED, -- Parent entity
46
+ category UNINDEXED, -- Observation category
47
+
48
+ -- Common fields
49
+ metadata UNINDEXED, -- JSON metadata
50
+ created_at UNINDEXED, -- Creation timestamp
51
+ updated_at UNINDEXED, -- Last update
52
+
53
+ -- Configuration
54
+ tokenize='unicode61 tokenchars 0x2F', -- Hex code for /
55
+ prefix='1,2,3,4' -- Support longer prefixes for paths
56
+ );
57
+ """)
58
+
59
+ # Print instruction to manually reindex after migration
60
+ print("\n------------------------------------------------------------------")
61
+ print("IMPORTANT: After migration completes, manually run the reindex command:")
62
+ print("basic-memory sync")
63
+ print("------------------------------------------------------------------\n")
64
+
65
+
66
+ def downgrade() -> None:
67
+ """Downgrade database schema to use old search index."""
68
+ # Drop the updated search_index table
69
+ op.execute("DROP TABLE IF EXISTS search_index")
70
+
71
+ # Recreate the original search_index schema
72
+ op.execute("""
73
+ CREATE VIRTUAL TABLE IF NOT EXISTS search_index USING fts5(
74
+ -- Core entity fields
75
+ id UNINDEXED, -- Row ID
76
+ title, -- Title for searching
77
+ content, -- Main searchable content
78
+ permalink, -- Stable identifier (now indexed for path search)
79
+ file_path UNINDEXED, -- Physical location
80
+ type UNINDEXED, -- entity/relation/observation
81
+
82
+ -- Relation fields
83
+ from_id UNINDEXED, -- Source entity
84
+ to_id UNINDEXED, -- Target entity
85
+ relation_type UNINDEXED, -- Type of relation
86
+
87
+ -- Observation fields
88
+ entity_id UNINDEXED, -- Parent entity
89
+ category UNINDEXED, -- Observation category
90
+
91
+ -- Common fields
92
+ metadata UNINDEXED, -- JSON metadata
93
+ created_at UNINDEXED, -- Creation timestamp
94
+ updated_at UNINDEXED, -- Last update
95
+
96
+ -- Configuration
97
+ tokenize='unicode61 tokenchars 0x2F', -- Hex code for /
98
+ prefix='1,2,3,4' -- Support longer prefixes for paths
99
+ );
100
+ """)
101
+
102
+ # Print instruction to manually reindex after migration
103
+ print("\n------------------------------------------------------------------")
104
+ print("IMPORTANT: After downgrade completes, manually run the reindex command:")
105
+ print("basic-memory sync")
106
+ print("------------------------------------------------------------------\n")
basic_memory/api/app.py CHANGED
@@ -2,14 +2,13 @@
2
2
 
3
3
  from contextlib import asynccontextmanager
4
4
 
5
- import logfire
6
5
  from fastapi import FastAPI, HTTPException
7
6
  from fastapi.exception_handlers import http_exception_handler
8
7
  from loguru import logger
9
8
 
10
9
  from basic_memory import db
11
10
  from basic_memory.config import config as app_config
12
- from basic_memory.api.routers import knowledge, search, memory, resource
11
+ from basic_memory.api.routers import knowledge, search, memory, resource, project_info
13
12
 
14
13
 
15
14
  @asynccontextmanager
@@ -29,20 +28,24 @@ app = FastAPI(
29
28
  lifespan=lifespan,
30
29
  )
31
30
 
32
- if app_config != "test":
33
- logfire.instrument_fastapi(app)
34
-
35
31
 
36
32
  # Include routers
37
33
  app.include_router(knowledge.router)
38
34
  app.include_router(search.router)
39
35
  app.include_router(memory.router)
40
36
  app.include_router(resource.router)
37
+ app.include_router(project_info.router)
41
38
 
42
39
 
43
40
  @app.exception_handler(Exception)
44
41
  async def exception_handler(request, exc): # pragma: no cover
45
42
  logger.exception(
46
- f"An unhandled exception occurred for request '{request.url}', exception: {exc}"
43
+ "API unhandled exception",
44
+ url=str(request.url),
45
+ method=request.method,
46
+ client=request.client.host if request.client else None,
47
+ path=request.url.path,
48
+ error_type=type(exc).__name__,
49
+ error=str(exc),
47
50
  )
48
51
  return await http_exception_handler(request, HTTPException(status_code=500, detail=str(exc)))
@@ -4,5 +4,6 @@ from . import knowledge_router as knowledge
4
4
  from . import memory_router as memory
5
5
  from . import resource_router as resource
6
6
  from . import search_router as search
7
+ from . import project_info_router as project_info
7
8
 
8
- __all__ = ["knowledge", "memory", "resource", "search"]
9
+ __all__ = ["knowledge", "memory", "resource", "search", "project_info"]
@@ -33,7 +33,9 @@ async def create_entity(
33
33
  search_service: SearchServiceDep,
34
34
  ) -> EntityResponse:
35
35
  """Create an entity."""
36
- logger.info(f"request: create_entity with data={data}")
36
+ logger.info(
37
+ "API request", endpoint="create_entity", entity_type=data.entity_type, title=data.title
38
+ )
37
39
 
38
40
  entity = await entity_service.create_entity(data)
39
41
 
@@ -41,7 +43,13 @@ async def create_entity(
41
43
  await search_service.index_entity(entity, background_tasks=background_tasks)
42
44
  result = EntityResponse.model_validate(entity)
43
45
 
44
- logger.info(f"response: create_entity with result={result}")
46
+ logger.info(
47
+ "API response",
48
+ endpoint="create_entity",
49
+ title=result.title,
50
+ permalink=result.permalink,
51
+ status_code=201,
52
+ )
45
53
  return result
46
54
 
47
55
 
@@ -55,10 +63,23 @@ async def create_or_update_entity(
55
63
  search_service: SearchServiceDep,
56
64
  ) -> EntityResponse:
57
65
  """Create or update an entity. If entity exists, it will be updated, otherwise created."""
58
- logger.info(f"request: create_or_update_entity with permalink={permalink}, data={data}")
66
+ logger.info(
67
+ "API request",
68
+ endpoint="create_or_update_entity",
69
+ permalink=permalink,
70
+ entity_type=data.entity_type,
71
+ title=data.title,
72
+ )
59
73
 
60
74
  # Validate permalink matches
61
75
  if data.permalink != permalink:
76
+ logger.warning(
77
+ "API validation error",
78
+ endpoint="create_or_update_entity",
79
+ permalink=permalink,
80
+ data_permalink=data.permalink,
81
+ error="Permalink mismatch",
82
+ )
62
83
  raise HTTPException(status_code=400, detail="Entity permalink must match URL path")
63
84
 
64
85
  # Try create_or_update operation
@@ -70,7 +91,12 @@ async def create_or_update_entity(
70
91
  result = EntityResponse.model_validate(entity)
71
92
 
72
93
  logger.info(
73
- f"response: create_or_update_entity with result={result}, status_code={response.status_code}"
94
+ "API response",
95
+ endpoint="create_or_update_entity",
96
+ title=result.title,
97
+ permalink=result.permalink,
98
+ created=created,
99
+ status_code=response.status_code,
74
100
  )
75
101
  return result
76
102
 
@@ -32,6 +32,7 @@ async def to_graph_context(context, entity_repository: EntityRepository, page: i
32
32
  return EntitySummary(
33
33
  title=item.title, # pyright: ignore
34
34
  permalink=item.permalink,
35
+ content=item.content,
35
36
  file_path=item.file_path,
36
37
  created_at=item.created_at,
37
38
  )
@@ -52,8 +53,8 @@ async def to_graph_context(context, entity_repository: EntityRepository, page: i
52
53
  file_path=item.file_path,
53
54
  permalink=item.permalink, # pyright: ignore
54
55
  relation_type=item.type,
55
- from_id=from_entity.permalink, # pyright: ignore
56
- to_id=to_entity.permalink if to_entity else None,
56
+ from_entity=from_entity.permalink, # pyright: ignore
57
+ to_entity=to_entity.permalink if to_entity else None,
57
58
  created_at=item.created_at,
58
59
  )
59
60
  case _: # pragma: no cover
@@ -0,0 +1,274 @@
1
+ """Router for statistics and system information."""
2
+
3
+ import json
4
+ from datetime import datetime
5
+
6
+ from basic_memory.config import config, config_manager
7
+ from basic_memory.deps import (
8
+ ProjectInfoRepositoryDep,
9
+ )
10
+ from basic_memory.repository.project_info_repository import ProjectInfoRepository
11
+ from basic_memory.schemas import (
12
+ ProjectInfoResponse,
13
+ ProjectStatistics,
14
+ ActivityMetrics,
15
+ SystemStatus,
16
+ )
17
+ from basic_memory.sync.watch_service import WATCH_STATUS_JSON
18
+ from fastapi import APIRouter
19
+ from sqlalchemy import text
20
+
21
+ router = APIRouter(prefix="/stats", tags=["statistics"])
22
+
23
+
24
+ @router.get("/project-info", response_model=ProjectInfoResponse)
25
+ async def get_project_info(
26
+ repository: ProjectInfoRepositoryDep,
27
+ ) -> ProjectInfoResponse:
28
+ """Get comprehensive information about the current Basic Memory project."""
29
+ # Get statistics
30
+ statistics = await get_statistics(repository)
31
+
32
+ # Get activity metrics
33
+ activity = await get_activity_metrics(repository)
34
+
35
+ # Get system status
36
+ system = await get_system_status()
37
+
38
+ # Get project configuration information
39
+ project_name = config.project
40
+ project_path = str(config.home)
41
+ available_projects = config_manager.projects
42
+ default_project = config_manager.default_project
43
+
44
+ # Construct the response
45
+ return ProjectInfoResponse(
46
+ project_name=project_name,
47
+ project_path=project_path,
48
+ available_projects=available_projects,
49
+ default_project=default_project,
50
+ statistics=statistics,
51
+ activity=activity,
52
+ system=system,
53
+ )
54
+
55
+
56
+ async def get_statistics(repository: ProjectInfoRepository) -> ProjectStatistics:
57
+ """Get statistics about the current project."""
58
+ # Get basic counts
59
+ entity_count_result = await repository.execute_query(text("SELECT COUNT(*) FROM entity"))
60
+ total_entities = entity_count_result.scalar() or 0
61
+
62
+ observation_count_result = await repository.execute_query(
63
+ text("SELECT COUNT(*) FROM observation")
64
+ )
65
+ total_observations = observation_count_result.scalar() or 0
66
+
67
+ relation_count_result = await repository.execute_query(text("SELECT COUNT(*) FROM relation"))
68
+ total_relations = relation_count_result.scalar() or 0
69
+
70
+ unresolved_count_result = await repository.execute_query(
71
+ text("SELECT COUNT(*) FROM relation WHERE to_id IS NULL")
72
+ )
73
+ total_unresolved = unresolved_count_result.scalar() or 0
74
+
75
+ # Get entity counts by type
76
+ entity_types_result = await repository.execute_query(
77
+ text("SELECT entity_type, COUNT(*) FROM entity GROUP BY entity_type")
78
+ )
79
+ entity_types = {row[0]: row[1] for row in entity_types_result.fetchall()}
80
+
81
+ # Get observation counts by category
82
+ category_result = await repository.execute_query(
83
+ text("SELECT category, COUNT(*) FROM observation GROUP BY category")
84
+ )
85
+ observation_categories = {row[0]: row[1] for row in category_result.fetchall()}
86
+
87
+ # Get relation counts by type
88
+ relation_types_result = await repository.execute_query(
89
+ text("SELECT relation_type, COUNT(*) FROM relation GROUP BY relation_type")
90
+ )
91
+ relation_types = {row[0]: row[1] for row in relation_types_result.fetchall()}
92
+
93
+ # Find most connected entities (most outgoing relations)
94
+ connected_result = await repository.execute_query(
95
+ text("""
96
+ SELECT e.id, e.title, e.permalink, COUNT(r.id) AS relation_count
97
+ FROM entity e
98
+ JOIN relation r ON e.id = r.from_id
99
+ GROUP BY e.id
100
+ ORDER BY relation_count DESC
101
+ LIMIT 10
102
+ """)
103
+ )
104
+ most_connected = [
105
+ {"id": row[0], "title": row[1], "permalink": row[2], "relation_count": row[3]}
106
+ for row in connected_result.fetchall()
107
+ ]
108
+
109
+ # Count isolated entities (no relations)
110
+ isolated_result = await repository.execute_query(
111
+ text("""
112
+ SELECT COUNT(e.id)
113
+ FROM entity e
114
+ LEFT JOIN relation r1 ON e.id = r1.from_id
115
+ LEFT JOIN relation r2 ON e.id = r2.to_id
116
+ WHERE r1.id IS NULL AND r2.id IS NULL
117
+ """)
118
+ )
119
+ isolated_count = isolated_result.scalar() or 0
120
+
121
+ return ProjectStatistics(
122
+ total_entities=total_entities,
123
+ total_observations=total_observations,
124
+ total_relations=total_relations,
125
+ total_unresolved_relations=total_unresolved,
126
+ entity_types=entity_types,
127
+ observation_categories=observation_categories,
128
+ relation_types=relation_types,
129
+ most_connected_entities=most_connected,
130
+ isolated_entities=isolated_count,
131
+ )
132
+
133
+
134
+ async def get_activity_metrics(repository: ProjectInfoRepository) -> ActivityMetrics:
135
+ """Get activity metrics for the current project."""
136
+ # Get recently created entities
137
+ created_result = await repository.execute_query(
138
+ text("""
139
+ SELECT id, title, permalink, entity_type, created_at
140
+ FROM entity
141
+ ORDER BY created_at DESC
142
+ LIMIT 10
143
+ """)
144
+ )
145
+ recently_created = [
146
+ {
147
+ "id": row[0],
148
+ "title": row[1],
149
+ "permalink": row[2],
150
+ "entity_type": row[3],
151
+ "created_at": row[4],
152
+ }
153
+ for row in created_result.fetchall()
154
+ ]
155
+
156
+ # Get recently updated entities
157
+ updated_result = await repository.execute_query(
158
+ text("""
159
+ SELECT id, title, permalink, entity_type, updated_at
160
+ FROM entity
161
+ ORDER BY updated_at DESC
162
+ LIMIT 10
163
+ """)
164
+ )
165
+ recently_updated = [
166
+ {
167
+ "id": row[0],
168
+ "title": row[1],
169
+ "permalink": row[2],
170
+ "entity_type": row[3],
171
+ "updated_at": row[4],
172
+ }
173
+ for row in updated_result.fetchall()
174
+ ]
175
+
176
+ # Get monthly growth over the last 6 months
177
+ # Calculate the start of 6 months ago
178
+ now = datetime.now()
179
+ six_months_ago = datetime(
180
+ now.year - (1 if now.month <= 6 else 0), ((now.month - 6) % 12) or 12, 1
181
+ )
182
+
183
+ # Query for monthly entity creation
184
+ entity_growth_result = await repository.execute_query(
185
+ text(f"""
186
+ SELECT
187
+ strftime('%Y-%m', created_at) AS month,
188
+ COUNT(*) AS count
189
+ FROM entity
190
+ WHERE created_at >= '{six_months_ago.isoformat()}'
191
+ GROUP BY month
192
+ ORDER BY month
193
+ """)
194
+ )
195
+ entity_growth = {row[0]: row[1] for row in entity_growth_result.fetchall()}
196
+
197
+ # Query for monthly observation creation
198
+ observation_growth_result = await repository.execute_query(
199
+ text(f"""
200
+ SELECT
201
+ strftime('%Y-%m', created_at) AS month,
202
+ COUNT(*) AS count
203
+ FROM observation
204
+ INNER JOIN entity ON observation.entity_id = entity.id
205
+ WHERE entity.created_at >= '{six_months_ago.isoformat()}'
206
+ GROUP BY month
207
+ ORDER BY month
208
+ """)
209
+ )
210
+ observation_growth = {row[0]: row[1] for row in observation_growth_result.fetchall()}
211
+
212
+ # Query for monthly relation creation
213
+ relation_growth_result = await repository.execute_query(
214
+ text(f"""
215
+ SELECT
216
+ strftime('%Y-%m', created_at) AS month,
217
+ COUNT(*) AS count
218
+ FROM relation
219
+ INNER JOIN entity ON relation.from_id = entity.id
220
+ WHERE entity.created_at >= '{six_months_ago.isoformat()}'
221
+ GROUP BY month
222
+ ORDER BY month
223
+ """)
224
+ )
225
+ relation_growth = {row[0]: row[1] for row in relation_growth_result.fetchall()}
226
+
227
+ # Combine all monthly growth data
228
+ monthly_growth = {}
229
+ for month in set(
230
+ list(entity_growth.keys()) + list(observation_growth.keys()) + list(relation_growth.keys())
231
+ ):
232
+ monthly_growth[month] = {
233
+ "entities": entity_growth.get(month, 0),
234
+ "observations": observation_growth.get(month, 0),
235
+ "relations": relation_growth.get(month, 0),
236
+ "total": (
237
+ entity_growth.get(month, 0)
238
+ + observation_growth.get(month, 0)
239
+ + relation_growth.get(month, 0)
240
+ ),
241
+ }
242
+
243
+ return ActivityMetrics(
244
+ recently_created=recently_created,
245
+ recently_updated=recently_updated,
246
+ monthly_growth=monthly_growth,
247
+ )
248
+
249
+
250
+ async def get_system_status() -> SystemStatus:
251
+ """Get system status information."""
252
+ import basic_memory
253
+
254
+ # Get database information
255
+ db_path = config.database_path
256
+ db_size = db_path.stat().st_size if db_path.exists() else 0
257
+ db_size_readable = f"{db_size / (1024 * 1024):.2f} MB"
258
+
259
+ # Get watch service status if available
260
+ watch_status = None
261
+ watch_status_path = config.home / ".basic-memory" / WATCH_STATUS_JSON
262
+ if watch_status_path.exists():
263
+ try:
264
+ watch_status = json.loads(watch_status_path.read_text(encoding="utf-8"))
265
+ except Exception: # pragma: no cover
266
+ pass
267
+
268
+ return SystemStatus(
269
+ version=basic_memory.__version__,
270
+ database_path=str(db_path),
271
+ database_size=db_size_readable,
272
+ watch_status=watch_status,
273
+ timestamp=datetime.now(),
274
+ )
@@ -1,11 +1,9 @@
1
1
  """Router for search operations."""
2
2
 
3
- from dataclasses import asdict
4
-
5
3
  from fastapi import APIRouter, BackgroundTasks
6
4
 
7
5
  from basic_memory.schemas.search import SearchQuery, SearchResult, SearchResponse
8
- from basic_memory.deps import SearchServiceDep
6
+ from basic_memory.deps import SearchServiceDep, EntityServiceDep
9
7
 
10
8
  router = APIRouter(prefix="/search", tags=["search"])
11
9
 
@@ -14,6 +12,7 @@ router = APIRouter(prefix="/search", tags=["search"])
14
12
  async def search(
15
13
  query: SearchQuery,
16
14
  search_service: SearchServiceDep,
15
+ entity_service: EntityServiceDep,
17
16
  page: int = 1,
18
17
  page_size: int = 10,
19
18
  ):
@@ -21,7 +20,26 @@ async def search(
21
20
  limit = page_size
22
21
  offset = (page - 1) * page_size
23
22
  results = await search_service.search(query, limit=limit, offset=offset)
24
- search_results = [SearchResult.model_validate(asdict(r)) for r in results]
23
+
24
+ search_results = []
25
+ for r in results:
26
+ entities = await entity_service.get_entities_by_id([r.entity_id, r.from_id, r.to_id]) # pyright: ignore
27
+ search_results.append(
28
+ SearchResult(
29
+ title=r.title, # pyright: ignore
30
+ type=r.type, # pyright: ignore
31
+ permalink=r.permalink,
32
+ score=r.score, # pyright: ignore
33
+ entity=entities[0].permalink if entities else None,
34
+ content=r.content,
35
+ file_path=r.file_path,
36
+ metadata=r.metadata,
37
+ category=r.category,
38
+ from_entity=entities[0].permalink if entities else None,
39
+ to_entity=entities[1].permalink if len(entities) > 1 else None,
40
+ relation_type=r.relation_type,
41
+ )
42
+ )
25
43
  return SearchResponse(
26
44
  results=search_results,
27
45
  current_page=page,
basic_memory/cli/app.py CHANGED
@@ -1,4 +1,5 @@
1
1
  import asyncio
2
+ from typing import Optional
2
3
 
3
4
  import typer
4
5
 
@@ -6,13 +7,63 @@ from basic_memory import db
6
7
  from basic_memory.config import config
7
8
 
8
9
 
9
- asyncio.run(db.run_migrations(config))
10
+ def version_callback(value: bool) -> None:
11
+ """Show version and exit."""
12
+ if value: # pragma: no cover
13
+ import basic_memory
14
+
15
+ typer.echo(f"Basic Memory version: {basic_memory.__version__}")
16
+ raise typer.Exit()
17
+
10
18
 
11
19
  app = typer.Typer(name="basic-memory")
12
20
 
13
- import_app = typer.Typer()
14
- app.add_typer(import_app, name="import")
15
21
 
22
+ @app.callback()
23
+ def app_callback(
24
+ project: Optional[str] = typer.Option(
25
+ None,
26
+ "--project",
27
+ "-p",
28
+ help="Specify which project to use",
29
+ envvar="BASIC_MEMORY_PROJECT",
30
+ ),
31
+ version: Optional[bool] = typer.Option(
32
+ None,
33
+ "--version",
34
+ "-v",
35
+ help="Show version and exit.",
36
+ callback=version_callback,
37
+ is_eager=True,
38
+ ),
39
+ ) -> None:
40
+ """Basic Memory - Local-first personal knowledge management."""
41
+ # We use the project option to set the BASIC_MEMORY_PROJECT environment variable
42
+ # The config module will pick this up when loading
43
+ if project: # pragma: no cover
44
+ import os
45
+ import importlib
46
+ from basic_memory import config as config_module
47
+
48
+ # Set the environment variable
49
+ os.environ["BASIC_MEMORY_PROJECT"] = project
50
+
51
+ # Reload the config module to pick up the new project
52
+ importlib.reload(config_module)
53
+
54
+ # Update the local reference
55
+ global config
56
+ from basic_memory.config import config as new_config
57
+
58
+ config = new_config
59
+
60
+
61
+ # Run database migrations
62
+ asyncio.run(db.run_migrations(config))
63
+
64
+ # Register sub-command groups
65
+ import_app = typer.Typer(help="Import data from various sources")
66
+ app.add_typer(import_app, name="import")
16
67
 
17
68
  claude_app = typer.Typer()
18
69
  import_app.add_typer(claude_app, name="claude")