basic-memory 0.7.0__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (89) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/alembic.ini +119 -0
  3. basic_memory/alembic/env.py +23 -1
  4. basic_memory/alembic/migrations.py +4 -9
  5. basic_memory/alembic/versions/502b60eaa905_remove_required_from_entity_permalink.py +51 -0
  6. basic_memory/alembic/versions/b3c3938bacdb_relation_to_name_unique_index.py +44 -0
  7. basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py +106 -0
  8. basic_memory/api/app.py +9 -10
  9. basic_memory/api/routers/__init__.py +2 -1
  10. basic_memory/api/routers/knowledge_router.py +31 -5
  11. basic_memory/api/routers/memory_router.py +18 -17
  12. basic_memory/api/routers/project_info_router.py +275 -0
  13. basic_memory/api/routers/resource_router.py +105 -4
  14. basic_memory/api/routers/search_router.py +22 -4
  15. basic_memory/cli/app.py +54 -5
  16. basic_memory/cli/commands/__init__.py +15 -2
  17. basic_memory/cli/commands/db.py +9 -13
  18. basic_memory/cli/commands/import_chatgpt.py +26 -30
  19. basic_memory/cli/commands/import_claude_conversations.py +27 -29
  20. basic_memory/cli/commands/import_claude_projects.py +29 -31
  21. basic_memory/cli/commands/import_memory_json.py +26 -28
  22. basic_memory/cli/commands/mcp.py +7 -1
  23. basic_memory/cli/commands/project.py +119 -0
  24. basic_memory/cli/commands/project_info.py +167 -0
  25. basic_memory/cli/commands/status.py +14 -28
  26. basic_memory/cli/commands/sync.py +63 -22
  27. basic_memory/cli/commands/tool.py +253 -0
  28. basic_memory/cli/main.py +39 -1
  29. basic_memory/config.py +166 -4
  30. basic_memory/db.py +19 -4
  31. basic_memory/deps.py +10 -3
  32. basic_memory/file_utils.py +37 -19
  33. basic_memory/markdown/entity_parser.py +3 -3
  34. basic_memory/markdown/utils.py +5 -0
  35. basic_memory/mcp/async_client.py +1 -1
  36. basic_memory/mcp/main.py +24 -0
  37. basic_memory/mcp/prompts/__init__.py +19 -0
  38. basic_memory/mcp/prompts/ai_assistant_guide.py +26 -0
  39. basic_memory/mcp/prompts/continue_conversation.py +111 -0
  40. basic_memory/mcp/prompts/recent_activity.py +88 -0
  41. basic_memory/mcp/prompts/search.py +182 -0
  42. basic_memory/mcp/prompts/utils.py +155 -0
  43. basic_memory/mcp/server.py +2 -6
  44. basic_memory/mcp/tools/__init__.py +12 -21
  45. basic_memory/mcp/tools/build_context.py +85 -0
  46. basic_memory/mcp/tools/canvas.py +97 -0
  47. basic_memory/mcp/tools/delete_note.py +28 -0
  48. basic_memory/mcp/tools/project_info.py +51 -0
  49. basic_memory/mcp/tools/read_content.py +229 -0
  50. basic_memory/mcp/tools/read_note.py +190 -0
  51. basic_memory/mcp/tools/recent_activity.py +100 -0
  52. basic_memory/mcp/tools/search.py +56 -17
  53. basic_memory/mcp/tools/utils.py +245 -16
  54. basic_memory/mcp/tools/write_note.py +124 -0
  55. basic_memory/models/knowledge.py +27 -11
  56. basic_memory/models/search.py +2 -1
  57. basic_memory/repository/entity_repository.py +3 -2
  58. basic_memory/repository/project_info_repository.py +9 -0
  59. basic_memory/repository/repository.py +24 -7
  60. basic_memory/repository/search_repository.py +47 -14
  61. basic_memory/schemas/__init__.py +10 -9
  62. basic_memory/schemas/base.py +4 -1
  63. basic_memory/schemas/memory.py +14 -4
  64. basic_memory/schemas/project_info.py +96 -0
  65. basic_memory/schemas/search.py +29 -33
  66. basic_memory/services/context_service.py +3 -3
  67. basic_memory/services/entity_service.py +26 -13
  68. basic_memory/services/file_service.py +145 -26
  69. basic_memory/services/link_resolver.py +9 -46
  70. basic_memory/services/search_service.py +95 -22
  71. basic_memory/sync/__init__.py +3 -2
  72. basic_memory/sync/sync_service.py +523 -117
  73. basic_memory/sync/watch_service.py +258 -132
  74. basic_memory/utils.py +51 -36
  75. basic_memory-0.9.0.dist-info/METADATA +736 -0
  76. basic_memory-0.9.0.dist-info/RECORD +99 -0
  77. basic_memory/alembic/README +0 -1
  78. basic_memory/cli/commands/tools.py +0 -157
  79. basic_memory/mcp/tools/knowledge.py +0 -68
  80. basic_memory/mcp/tools/memory.py +0 -170
  81. basic_memory/mcp/tools/notes.py +0 -202
  82. basic_memory/schemas/discovery.py +0 -28
  83. basic_memory/sync/file_change_scanner.py +0 -158
  84. basic_memory/sync/utils.py +0 -31
  85. basic_memory-0.7.0.dist-info/METADATA +0 -378
  86. basic_memory-0.7.0.dist-info/RECORD +0 -82
  87. {basic_memory-0.7.0.dist-info → basic_memory-0.9.0.dist-info}/WHEEL +0 -0
  88. {basic_memory-0.7.0.dist-info → basic_memory-0.9.0.dist-info}/entry_points.txt +0 -0
  89. {basic_memory-0.7.0.dist-info → basic_memory-0.9.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,275 @@
1
+ """Router for statistics and system information."""
2
+
3
+ import json
4
+ from datetime import datetime
5
+
6
+ from fastapi import APIRouter
7
+ from sqlalchemy import text
8
+
9
+ from basic_memory.config import config, config_manager
10
+ from basic_memory.deps import (
11
+ ProjectInfoRepositoryDep,
12
+ )
13
+ from basic_memory.repository.project_info_repository import ProjectInfoRepository
14
+ from basic_memory.schemas import (
15
+ ProjectInfoResponse,
16
+ ProjectStatistics,
17
+ ActivityMetrics,
18
+ SystemStatus,
19
+ )
20
+ from basic_memory.sync.watch_service import WATCH_STATUS_JSON
21
+
22
+ router = APIRouter(prefix="/stats", tags=["statistics"])
23
+
24
+
25
+ @router.get("/project-info", response_model=ProjectInfoResponse)
26
+ async def get_project_info(
27
+ repository: ProjectInfoRepositoryDep,
28
+ ) -> ProjectInfoResponse:
29
+ """Get comprehensive information about the current Basic Memory project."""
30
+ # Get statistics
31
+ statistics = await get_statistics(repository)
32
+
33
+ # Get activity metrics
34
+ activity = await get_activity_metrics(repository)
35
+
36
+ # Get system status
37
+ system = await get_system_status()
38
+
39
+ # Get project configuration information
40
+ project_name = config.project
41
+ project_path = str(config.home)
42
+ available_projects = config_manager.projects
43
+ default_project = config_manager.default_project
44
+
45
+ # Construct the response
46
+ return ProjectInfoResponse(
47
+ project_name=project_name,
48
+ project_path=project_path,
49
+ available_projects=available_projects,
50
+ default_project=default_project,
51
+ statistics=statistics,
52
+ activity=activity,
53
+ system=system,
54
+ )
55
+
56
+
57
+ async def get_statistics(repository: ProjectInfoRepository) -> ProjectStatistics:
58
+ """Get statistics about the current project."""
59
+ # Get basic counts
60
+ entity_count_result = await repository.execute_query(text("SELECT COUNT(*) FROM entity"))
61
+ total_entities = entity_count_result.scalar() or 0
62
+
63
+ observation_count_result = await repository.execute_query(
64
+ text("SELECT COUNT(*) FROM observation")
65
+ )
66
+ total_observations = observation_count_result.scalar() or 0
67
+
68
+ relation_count_result = await repository.execute_query(text("SELECT COUNT(*) FROM relation"))
69
+ total_relations = relation_count_result.scalar() or 0
70
+
71
+ unresolved_count_result = await repository.execute_query(
72
+ text("SELECT COUNT(*) FROM relation WHERE to_id IS NULL")
73
+ )
74
+ total_unresolved = unresolved_count_result.scalar() or 0
75
+
76
+ # Get entity counts by type
77
+ entity_types_result = await repository.execute_query(
78
+ text("SELECT entity_type, COUNT(*) FROM entity GROUP BY entity_type")
79
+ )
80
+ entity_types = {row[0]: row[1] for row in entity_types_result.fetchall()}
81
+
82
+ # Get observation counts by category
83
+ category_result = await repository.execute_query(
84
+ text("SELECT category, COUNT(*) FROM observation GROUP BY category")
85
+ )
86
+ observation_categories = {row[0]: row[1] for row in category_result.fetchall()}
87
+
88
+ # Get relation counts by type
89
+ relation_types_result = await repository.execute_query(
90
+ text("SELECT relation_type, COUNT(*) FROM relation GROUP BY relation_type")
91
+ )
92
+ relation_types = {row[0]: row[1] for row in relation_types_result.fetchall()}
93
+
94
+ # Find most connected entities (most outgoing relations)
95
+ connected_result = await repository.execute_query(
96
+ text("""
97
+ SELECT e.id, e.title, e.permalink, COUNT(r.id) AS relation_count
98
+ FROM entity e
99
+ JOIN relation r ON e.id = r.from_id
100
+ GROUP BY e.id
101
+ ORDER BY relation_count DESC
102
+ LIMIT 10
103
+ """)
104
+ )
105
+ most_connected = [
106
+ {"id": row[0], "title": row[1], "permalink": row[2], "relation_count": row[3]}
107
+ for row in connected_result.fetchall()
108
+ ]
109
+
110
+ # Count isolated entities (no relations)
111
+ isolated_result = await repository.execute_query(
112
+ text("""
113
+ SELECT COUNT(e.id)
114
+ FROM entity e
115
+ LEFT JOIN relation r1 ON e.id = r1.from_id
116
+ LEFT JOIN relation r2 ON e.id = r2.to_id
117
+ WHERE r1.id IS NULL AND r2.id IS NULL
118
+ """)
119
+ )
120
+ isolated_count = isolated_result.scalar() or 0
121
+
122
+ return ProjectStatistics(
123
+ total_entities=total_entities,
124
+ total_observations=total_observations,
125
+ total_relations=total_relations,
126
+ total_unresolved_relations=total_unresolved,
127
+ entity_types=entity_types,
128
+ observation_categories=observation_categories,
129
+ relation_types=relation_types,
130
+ most_connected_entities=most_connected,
131
+ isolated_entities=isolated_count,
132
+ )
133
+
134
+
135
+ async def get_activity_metrics(repository: ProjectInfoRepository) -> ActivityMetrics:
136
+ """Get activity metrics for the current project."""
137
+ # Get recently created entities
138
+ created_result = await repository.execute_query(
139
+ text("""
140
+ SELECT id, title, permalink, entity_type, created_at
141
+ FROM entity
142
+ ORDER BY created_at DESC
143
+ LIMIT 10
144
+ """)
145
+ )
146
+ recently_created = [
147
+ {
148
+ "id": row[0],
149
+ "title": row[1],
150
+ "permalink": row[2],
151
+ "entity_type": row[3],
152
+ "created_at": row[4],
153
+ }
154
+ for row in created_result.fetchall()
155
+ ]
156
+
157
+ # Get recently updated entities
158
+ updated_result = await repository.execute_query(
159
+ text("""
160
+ SELECT id, title, permalink, entity_type, updated_at
161
+ FROM entity
162
+ ORDER BY updated_at DESC
163
+ LIMIT 10
164
+ """)
165
+ )
166
+ recently_updated = [
167
+ {
168
+ "id": row[0],
169
+ "title": row[1],
170
+ "permalink": row[2],
171
+ "entity_type": row[3],
172
+ "updated_at": row[4],
173
+ }
174
+ for row in updated_result.fetchall()
175
+ ]
176
+
177
+ # Get monthly growth over the last 6 months
178
+ # Calculate the start of 6 months ago
179
+ now = datetime.now()
180
+ six_months_ago = datetime(
181
+ now.year - (1 if now.month <= 6 else 0), ((now.month - 6) % 12) or 12, 1
182
+ )
183
+
184
+ # Query for monthly entity creation
185
+ entity_growth_result = await repository.execute_query(
186
+ text(f"""
187
+ SELECT
188
+ strftime('%Y-%m', created_at) AS month,
189
+ COUNT(*) AS count
190
+ FROM entity
191
+ WHERE created_at >= '{six_months_ago.isoformat()}'
192
+ GROUP BY month
193
+ ORDER BY month
194
+ """)
195
+ )
196
+ entity_growth = {row[0]: row[1] for row in entity_growth_result.fetchall()}
197
+
198
+ # Query for monthly observation creation
199
+ observation_growth_result = await repository.execute_query(
200
+ text(f"""
201
+ SELECT
202
+ strftime('%Y-%m', created_at) AS month,
203
+ COUNT(*) AS count
204
+ FROM observation
205
+ INNER JOIN entity ON observation.entity_id = entity.id
206
+ WHERE entity.created_at >= '{six_months_ago.isoformat()}'
207
+ GROUP BY month
208
+ ORDER BY month
209
+ """)
210
+ )
211
+ observation_growth = {row[0]: row[1] for row in observation_growth_result.fetchall()}
212
+
213
+ # Query for monthly relation creation
214
+ relation_growth_result = await repository.execute_query(
215
+ text(f"""
216
+ SELECT
217
+ strftime('%Y-%m', created_at) AS month,
218
+ COUNT(*) AS count
219
+ FROM relation
220
+ INNER JOIN entity ON relation.from_id = entity.id
221
+ WHERE entity.created_at >= '{six_months_ago.isoformat()}'
222
+ GROUP BY month
223
+ ORDER BY month
224
+ """)
225
+ )
226
+ relation_growth = {row[0]: row[1] for row in relation_growth_result.fetchall()}
227
+
228
+ # Combine all monthly growth data
229
+ monthly_growth = {}
230
+ for month in set(
231
+ list(entity_growth.keys()) + list(observation_growth.keys()) + list(relation_growth.keys())
232
+ ):
233
+ monthly_growth[month] = {
234
+ "entities": entity_growth.get(month, 0),
235
+ "observations": observation_growth.get(month, 0),
236
+ "relations": relation_growth.get(month, 0),
237
+ "total": (
238
+ entity_growth.get(month, 0)
239
+ + observation_growth.get(month, 0)
240
+ + relation_growth.get(month, 0)
241
+ ),
242
+ }
243
+
244
+ return ActivityMetrics(
245
+ recently_created=recently_created,
246
+ recently_updated=recently_updated,
247
+ monthly_growth=monthly_growth,
248
+ )
249
+
250
+
251
+ async def get_system_status() -> SystemStatus:
252
+ """Get system status information."""
253
+ import basic_memory
254
+
255
+ # Get database information
256
+ db_path = config.database_path
257
+ db_size = db_path.stat().st_size if db_path.exists() else 0
258
+ db_size_readable = f"{db_size / (1024 * 1024):.2f} MB"
259
+
260
+ # Get watch service status if available
261
+ watch_status = None
262
+ watch_status_path = config.home / ".basic-memory" / WATCH_STATUS_JSON
263
+ if watch_status_path.exists():
264
+ try:
265
+ watch_status = json.loads(watch_status_path.read_text())
266
+ except Exception: # pragma: no cover
267
+ pass
268
+
269
+ return SystemStatus(
270
+ version=basic_memory.__version__,
271
+ database_path=str(db_path),
272
+ database_size=db_size_readable,
273
+ watch_status=watch_status,
274
+ timestamp=datetime.now(),
275
+ )
@@ -2,9 +2,10 @@
2
2
 
3
3
  import tempfile
4
4
  from pathlib import Path
5
+ from typing import Annotated
5
6
 
6
- from fastapi import APIRouter, HTTPException, BackgroundTasks
7
- from fastapi.responses import FileResponse
7
+ from fastapi import APIRouter, HTTPException, BackgroundTasks, Body
8
+ from fastapi.responses import FileResponse, JSONResponse
8
9
  from loguru import logger
9
10
 
10
11
  from basic_memory.deps import (
@@ -13,10 +14,13 @@ from basic_memory.deps import (
13
14
  SearchServiceDep,
14
15
  EntityServiceDep,
15
16
  FileServiceDep,
17
+ EntityRepositoryDep,
16
18
  )
17
19
  from basic_memory.repository.search_repository import SearchIndexRow
18
20
  from basic_memory.schemas.memory import normalize_memory_url
19
21
  from basic_memory.schemas.search import SearchQuery, SearchItemType
22
+ from basic_memory.models.knowledge import Entity as EntityModel
23
+ from datetime import datetime
20
24
 
21
25
  router = APIRouter(prefix="/resource", tags=["resources"])
22
26
 
@@ -94,8 +98,7 @@ async def get_resource_content(
94
98
  content = await file_service.read_entity_content(result)
95
99
  memory_url = normalize_memory_url(result.permalink)
96
100
  modified_date = result.updated_at.isoformat()
97
- assert result.checksum
98
- checksum = result.checksum[:8]
101
+ checksum = result.checksum[:8] if result.checksum else ""
99
102
 
100
103
  # Prepare the delimited content
101
104
  response_content = f"--- {memory_url} {modified_date} {checksum}\n"
@@ -122,3 +125,101 @@ def cleanup_temp_file(file_path: str):
122
125
  logger.debug(f"Temporary file deleted: {file_path}")
123
126
  except Exception as e: # pragma: no cover
124
127
  logger.error(f"Error deleting temporary file {file_path}: {e}")
128
+
129
+
130
+ @router.put("/{file_path:path}")
131
+ async def write_resource(
132
+ config: ProjectConfigDep,
133
+ file_service: FileServiceDep,
134
+ entity_repository: EntityRepositoryDep,
135
+ search_service: SearchServiceDep,
136
+ file_path: str,
137
+ content: Annotated[str, Body()],
138
+ ) -> JSONResponse:
139
+ """Write content to a file in the project.
140
+
141
+ This endpoint allows writing content directly to a file in the project.
142
+ Also creates an entity record and indexes the file for search.
143
+
144
+ Args:
145
+ file_path: Path to write to, relative to project root
146
+ request: Contains the content to write
147
+
148
+ Returns:
149
+ JSON response with file information
150
+ """
151
+ try:
152
+ # Get content from request body
153
+
154
+ # Ensure it's UTF-8 string content
155
+ if isinstance(content, bytes): # pragma: no cover
156
+ content_str = content.decode("utf-8")
157
+ else:
158
+ content_str = str(content)
159
+
160
+ # Get full file path
161
+ full_path = Path(f"{config.home}/{file_path}")
162
+
163
+ # Ensure parent directory exists
164
+ full_path.parent.mkdir(parents=True, exist_ok=True)
165
+
166
+ # Write content to file
167
+ checksum = await file_service.write_file(full_path, content_str)
168
+
169
+ # Get file info
170
+ file_stats = file_service.file_stats(full_path)
171
+
172
+ # Determine file details
173
+ file_name = Path(file_path).name
174
+ content_type = file_service.content_type(full_path)
175
+
176
+ entity_type = "canvas" if file_path.endswith(".canvas") else "file"
177
+
178
+ # Check if entity already exists
179
+ existing_entity = await entity_repository.get_by_file_path(file_path)
180
+
181
+ if existing_entity:
182
+ # Update existing entity
183
+ entity = await entity_repository.update(
184
+ existing_entity.id,
185
+ {
186
+ "title": file_name,
187
+ "entity_type": entity_type,
188
+ "content_type": content_type,
189
+ "file_path": file_path,
190
+ "checksum": checksum,
191
+ "updated_at": datetime.fromtimestamp(file_stats.st_mtime),
192
+ },
193
+ )
194
+ status_code = 200
195
+ else:
196
+ # Create a new entity model
197
+ entity = EntityModel(
198
+ title=file_name,
199
+ entity_type=entity_type,
200
+ content_type=content_type,
201
+ file_path=file_path,
202
+ checksum=checksum,
203
+ created_at=datetime.fromtimestamp(file_stats.st_ctime),
204
+ updated_at=datetime.fromtimestamp(file_stats.st_mtime),
205
+ )
206
+ entity = await entity_repository.add(entity)
207
+ status_code = 201
208
+
209
+ # Index the file for search
210
+ await search_service.index_entity(entity) # pyright: ignore
211
+
212
+ # Return success response
213
+ return JSONResponse(
214
+ status_code=status_code,
215
+ content={
216
+ "file_path": file_path,
217
+ "checksum": checksum,
218
+ "size": file_stats.st_size,
219
+ "created_at": file_stats.st_ctime,
220
+ "modified_at": file_stats.st_mtime,
221
+ },
222
+ )
223
+ except Exception as e: # pragma: no cover
224
+ logger.error(f"Error writing resource {file_path}: {e}")
225
+ raise HTTPException(status_code=500, detail=f"Failed to write resource: {str(e)}")
@@ -1,11 +1,9 @@
1
1
  """Router for search operations."""
2
2
 
3
- from dataclasses import asdict
4
-
5
3
  from fastapi import APIRouter, BackgroundTasks
6
4
 
7
5
  from basic_memory.schemas.search import SearchQuery, SearchResult, SearchResponse
8
- from basic_memory.deps import SearchServiceDep
6
+ from basic_memory.deps import SearchServiceDep, EntityServiceDep
9
7
 
10
8
  router = APIRouter(prefix="/search", tags=["search"])
11
9
 
@@ -14,6 +12,7 @@ router = APIRouter(prefix="/search", tags=["search"])
14
12
  async def search(
15
13
  query: SearchQuery,
16
14
  search_service: SearchServiceDep,
15
+ entity_service: EntityServiceDep,
17
16
  page: int = 1,
18
17
  page_size: int = 10,
19
18
  ):
@@ -21,7 +20,26 @@ async def search(
21
20
  limit = page_size
22
21
  offset = (page - 1) * page_size
23
22
  results = await search_service.search(query, limit=limit, offset=offset)
24
- search_results = [SearchResult.model_validate(asdict(r)) for r in results]
23
+
24
+ search_results = []
25
+ for r in results:
26
+ entities = await entity_service.get_entities_by_id([r.entity_id, r.from_id, r.to_id]) # pyright: ignore
27
+ search_results.append(
28
+ SearchResult(
29
+ title=r.title, # pyright: ignore
30
+ type=r.type, # pyright: ignore
31
+ permalink=r.permalink,
32
+ score=r.score, # pyright: ignore
33
+ entity=entities[0].permalink if entities else None,
34
+ content=r.content,
35
+ file_path=r.file_path,
36
+ metadata=r.metadata,
37
+ category=r.category,
38
+ from_entity=entities[0].permalink if entities else None,
39
+ to_entity=entities[1].permalink if len(entities) > 1 else None,
40
+ relation_type=r.relation_type,
41
+ )
42
+ )
25
43
  return SearchResponse(
26
44
  results=search_results,
27
45
  current_page=page,
basic_memory/cli/app.py CHANGED
@@ -1,20 +1,69 @@
1
1
  import asyncio
2
+ from typing import Optional
2
3
 
3
4
  import typer
4
5
 
5
6
  from basic_memory import db
6
7
  from basic_memory.config import config
7
- from basic_memory.utils import setup_logging
8
8
 
9
- setup_logging(log_file=".basic-memory/basic-memory-cli.log", console=False) # pragma: no cover
10
9
 
11
- asyncio.run(db.run_migrations(config))
10
+ def version_callback(value: bool) -> None:
11
+ """Show version and exit."""
12
+ if value: # pragma: no cover
13
+ import basic_memory
14
+
15
+ typer.echo(f"Basic Memory version: {basic_memory.__version__}")
16
+ raise typer.Exit()
17
+
12
18
 
13
19
  app = typer.Typer(name="basic-memory")
14
20
 
15
- import_app = typer.Typer()
16
- app.add_typer(import_app, name="import")
17
21
 
22
+ @app.callback()
23
+ def app_callback(
24
+ project: Optional[str] = typer.Option(
25
+ None,
26
+ "--project",
27
+ "-p",
28
+ help="Specify which project to use",
29
+ envvar="BASIC_MEMORY_PROJECT",
30
+ ),
31
+ version: Optional[bool] = typer.Option(
32
+ None,
33
+ "--version",
34
+ "-v",
35
+ help="Show version and exit.",
36
+ callback=version_callback,
37
+ is_eager=True,
38
+ ),
39
+ ) -> None:
40
+ """Basic Memory - Local-first personal knowledge management."""
41
+ # We use the project option to set the BASIC_MEMORY_PROJECT environment variable
42
+ # The config module will pick this up when loading
43
+ if project: # pragma: no cover
44
+ import os
45
+ import importlib
46
+ from basic_memory import config as config_module
47
+
48
+ # Set the environment variable
49
+ os.environ["BASIC_MEMORY_PROJECT"] = project
50
+
51
+ # Reload the config module to pick up the new project
52
+ importlib.reload(config_module)
53
+
54
+ # Update the local reference
55
+ global config
56
+ from basic_memory.config import config as new_config
57
+
58
+ config = new_config
59
+
60
+
61
+ # Run database migrations
62
+ asyncio.run(db.run_migrations(config))
63
+
64
+ # Register sub-command groups
65
+ import_app = typer.Typer(help="Import data from various sources")
66
+ app.add_typer(import_app, name="import")
18
67
 
19
68
  claude_app = typer.Typer()
20
69
  import_app.add_typer(claude_app, name="claude")
@@ -1,5 +1,18 @@
1
1
  """CLI commands for basic-memory."""
2
2
 
3
- from . import status, sync, db, import_memory_json, mcp
3
+ from . import status, sync, db, import_memory_json, mcp, import_claude_conversations
4
+ from . import import_claude_projects, import_chatgpt, tool, project, project_info
4
5
 
5
- __all__ = ["status", "sync", "db", "import_memory_json", "mcp"]
6
+ __all__ = [
7
+ "status",
8
+ "sync",
9
+ "db",
10
+ "import_memory_json",
11
+ "mcp",
12
+ "import_claude_conversations",
13
+ "import_claude_projects",
14
+ "import_chatgpt",
15
+ "tool",
16
+ "project",
17
+ "project_info",
18
+ ]
@@ -1,8 +1,5 @@
1
1
  """Database management commands."""
2
2
 
3
- import asyncio
4
-
5
- import logfire
6
3
  import typer
7
4
  from loguru import logger
8
5
 
@@ -12,17 +9,16 @@ from basic_memory.cli.app import app
12
9
 
13
10
  @app.command()
14
11
  def reset(
15
- reindex: bool = typer.Option(False, "--reindex", help="Rebuild indices from filesystem"),
12
+ reindex: bool = typer.Option(False, "--reindex", help="Rebuild db index from filesystem"),
16
13
  ): # pragma: no cover
17
14
  """Reset database (drop all tables and recreate)."""
18
- with logfire.span("reset"): # pyright: ignore [reportGeneralTypeIssues]
19
- if typer.confirm("This will delete all data in your db. Are you sure?"):
20
- logger.info("Resetting database...")
21
- asyncio.run(migrations.reset_database())
15
+ if typer.confirm("This will delete all data in your db. Are you sure?"):
16
+ logger.info("Resetting database...")
17
+ migrations.reset_database()
22
18
 
23
- if reindex:
24
- # Import and run sync
25
- from basic_memory.cli.commands.sync import sync
19
+ if reindex:
20
+ # Import and run sync
21
+ from basic_memory.cli.commands.sync import sync
26
22
 
27
- logger.info("Rebuilding search index from filesystem...")
28
- sync(watch=False) # pyright: ignore
23
+ logger.info("Rebuilding search index from filesystem...")
24
+ sync(watch=False) # pyright: ignore
@@ -6,7 +6,6 @@ from datetime import datetime
6
6
  from pathlib import Path
7
7
  from typing import Dict, Any, List, Annotated, Set, Optional
8
8
 
9
- import logfire
10
9
  import typer
11
10
  from loguru import logger
12
11
  from rich.console import Console
@@ -226,38 +225,35 @@ def import_chatgpt(
226
225
  After importing, run 'basic-memory sync' to index the new files.
227
226
  """
228
227
 
229
- with logfire.span("import chatgpt"): # pyright: ignore [reportGeneralTypeIssues]
230
- try:
231
- if conversations_json:
232
- if not conversations_json.exists():
233
- typer.echo(f"Error: File not found: {conversations_json}", err=True)
234
- raise typer.Exit(1)
228
+ try:
229
+ if conversations_json:
230
+ if not conversations_json.exists():
231
+ typer.echo(f"Error: File not found: {conversations_json}", err=True)
232
+ raise typer.Exit(1)
235
233
 
236
- # Get markdown processor
237
- markdown_processor = asyncio.run(get_markdown_processor())
234
+ # Get markdown processor
235
+ markdown_processor = asyncio.run(get_markdown_processor())
238
236
 
239
- # Process the file
240
- base_path = config.home / folder
241
- console.print(
242
- f"\nImporting chats from {conversations_json}...writing to {base_path}"
243
- )
244
- results = asyncio.run(
245
- process_chatgpt_json(conversations_json, folder, markdown_processor)
246
- )
237
+ # Process the file
238
+ base_path = config.home / folder
239
+ console.print(f"\nImporting chats from {conversations_json}...writing to {base_path}")
240
+ results = asyncio.run(
241
+ process_chatgpt_json(conversations_json, folder, markdown_processor)
242
+ )
247
243
 
248
- # Show results
249
- console.print(
250
- Panel(
251
- f"[green]Import complete![/green]\n\n"
252
- f"Imported {results['conversations']} conversations\n"
253
- f"Containing {results['messages']} messages",
254
- expand=False,
255
- )
244
+ # Show results
245
+ console.print(
246
+ Panel(
247
+ f"[green]Import complete![/green]\n\n"
248
+ f"Imported {results['conversations']} conversations\n"
249
+ f"Containing {results['messages']} messages",
250
+ expand=False,
256
251
  )
252
+ )
257
253
 
258
- console.print("\nRun 'basic-memory sync' to index the new files.")
254
+ console.print("\nRun 'basic-memory sync' to index the new files.")
259
255
 
260
- except Exception as e:
261
- logger.error("Import failed")
262
- typer.echo(f"Error during import: {e}", err=True)
263
- raise typer.Exit(1)
256
+ except Exception as e:
257
+ logger.error("Import failed")
258
+ typer.echo(f"Error during import: {e}", err=True)
259
+ raise typer.Exit(1)