basic-memory 0.6.0__py3-none-any.whl → 0.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of basic-memory might be problematic. Click here for more details.
- basic_memory/__init__.py +1 -1
- basic_memory/alembic/alembic.ini +119 -0
- basic_memory/alembic/env.py +23 -1
- basic_memory/alembic/versions/502b60eaa905_remove_required_from_entity_permalink.py +51 -0
- basic_memory/alembic/versions/b3c3938bacdb_relation_to_name_unique_index.py +44 -0
- basic_memory/api/app.py +0 -4
- basic_memory/api/routers/knowledge_router.py +1 -9
- basic_memory/api/routers/memory_router.py +41 -25
- basic_memory/api/routers/resource_router.py +119 -12
- basic_memory/api/routers/search_router.py +17 -9
- basic_memory/cli/app.py +0 -2
- basic_memory/cli/commands/db.py +11 -8
- basic_memory/cli/commands/import_chatgpt.py +31 -27
- basic_memory/cli/commands/import_claude_conversations.py +29 -27
- basic_memory/cli/commands/import_claude_projects.py +30 -29
- basic_memory/cli/commands/import_memory_json.py +28 -26
- basic_memory/cli/commands/status.py +16 -26
- basic_memory/cli/commands/sync.py +11 -12
- basic_memory/cli/commands/tools.py +180 -0
- basic_memory/cli/main.py +1 -1
- basic_memory/config.py +16 -2
- basic_memory/db.py +1 -0
- basic_memory/deps.py +5 -1
- basic_memory/file_utils.py +6 -4
- basic_memory/markdown/entity_parser.py +3 -3
- basic_memory/mcp/async_client.py +1 -1
- basic_memory/mcp/main.py +25 -0
- basic_memory/mcp/prompts/__init__.py +15 -0
- basic_memory/mcp/prompts/ai_assistant_guide.py +28 -0
- basic_memory/mcp/prompts/continue_conversation.py +172 -0
- basic_memory/mcp/prompts/json_canvas_spec.py +25 -0
- basic_memory/mcp/prompts/recent_activity.py +46 -0
- basic_memory/mcp/prompts/search.py +127 -0
- basic_memory/mcp/prompts/utils.py +98 -0
- basic_memory/mcp/server.py +3 -7
- basic_memory/mcp/tools/__init__.py +6 -4
- basic_memory/mcp/tools/canvas.py +99 -0
- basic_memory/mcp/tools/knowledge.py +26 -14
- basic_memory/mcp/tools/memory.py +57 -31
- basic_memory/mcp/tools/notes.py +65 -72
- basic_memory/mcp/tools/resource.py +192 -0
- basic_memory/mcp/tools/search.py +13 -4
- basic_memory/mcp/tools/utils.py +2 -1
- basic_memory/models/knowledge.py +27 -11
- basic_memory/repository/repository.py +1 -1
- basic_memory/repository/search_repository.py +17 -4
- basic_memory/schemas/__init__.py +0 -11
- basic_memory/schemas/base.py +4 -1
- basic_memory/schemas/memory.py +14 -2
- basic_memory/schemas/request.py +1 -1
- basic_memory/schemas/search.py +4 -1
- basic_memory/services/context_service.py +14 -6
- basic_memory/services/entity_service.py +19 -12
- basic_memory/services/file_service.py +69 -2
- basic_memory/services/link_resolver.py +12 -9
- basic_memory/services/search_service.py +59 -13
- basic_memory/sync/__init__.py +3 -2
- basic_memory/sync/sync_service.py +287 -107
- basic_memory/sync/watch_service.py +125 -129
- basic_memory/utils.py +27 -15
- {basic_memory-0.6.0.dist-info → basic_memory-0.8.0.dist-info}/METADATA +3 -2
- basic_memory-0.8.0.dist-info/RECORD +91 -0
- basic_memory/alembic/README +0 -1
- basic_memory/schemas/discovery.py +0 -28
- basic_memory/sync/file_change_scanner.py +0 -158
- basic_memory/sync/utils.py +0 -31
- basic_memory-0.6.0.dist-info/RECORD +0 -81
- {basic_memory-0.6.0.dist-info → basic_memory-0.8.0.dist-info}/WHEEL +0 -0
- {basic_memory-0.6.0.dist-info → basic_memory-0.8.0.dist-info}/entry_points.txt +0 -0
- {basic_memory-0.6.0.dist-info → basic_memory-0.8.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -2,27 +2,35 @@
|
|
|
2
2
|
|
|
3
3
|
from dataclasses import asdict
|
|
4
4
|
|
|
5
|
-
from fastapi import APIRouter,
|
|
5
|
+
from fastapi import APIRouter, BackgroundTasks
|
|
6
6
|
|
|
7
|
-
from basic_memory.services.search_service import SearchService
|
|
8
7
|
from basic_memory.schemas.search import SearchQuery, SearchResult, SearchResponse
|
|
9
|
-
from basic_memory.deps import
|
|
8
|
+
from basic_memory.deps import SearchServiceDep
|
|
10
9
|
|
|
11
10
|
router = APIRouter(prefix="/search", tags=["search"])
|
|
12
11
|
|
|
13
12
|
|
|
14
13
|
@router.post("/", response_model=SearchResponse)
|
|
15
|
-
async def search(
|
|
14
|
+
async def search(
|
|
15
|
+
query: SearchQuery,
|
|
16
|
+
search_service: SearchServiceDep,
|
|
17
|
+
page: int = 1,
|
|
18
|
+
page_size: int = 10,
|
|
19
|
+
):
|
|
16
20
|
"""Search across all knowledge and documents."""
|
|
17
|
-
|
|
21
|
+
limit = page_size
|
|
22
|
+
offset = (page - 1) * page_size
|
|
23
|
+
results = await search_service.search(query, limit=limit, offset=offset)
|
|
18
24
|
search_results = [SearchResult.model_validate(asdict(r)) for r in results]
|
|
19
|
-
return SearchResponse(
|
|
25
|
+
return SearchResponse(
|
|
26
|
+
results=search_results,
|
|
27
|
+
current_page=page,
|
|
28
|
+
page_size=page_size,
|
|
29
|
+
)
|
|
20
30
|
|
|
21
31
|
|
|
22
32
|
@router.post("/reindex")
|
|
23
|
-
async def reindex(
|
|
24
|
-
background_tasks: BackgroundTasks, search_service: SearchService = Depends(get_search_service)
|
|
25
|
-
):
|
|
33
|
+
async def reindex(background_tasks: BackgroundTasks, search_service: SearchServiceDep):
|
|
26
34
|
"""Recreate and populate the search index."""
|
|
27
35
|
await search_service.reindex_all(background_tasks=background_tasks)
|
|
28
36
|
return {"status": "ok", "message": "Reindex initiated"}
|
basic_memory/cli/app.py
CHANGED
basic_memory/cli/commands/db.py
CHANGED
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
"""Database management commands."""
|
|
2
2
|
|
|
3
3
|
import asyncio
|
|
4
|
+
|
|
5
|
+
import logfire
|
|
4
6
|
import typer
|
|
5
7
|
from loguru import logger
|
|
6
8
|
|
|
@@ -13,13 +15,14 @@ def reset(
|
|
|
13
15
|
reindex: bool = typer.Option(False, "--reindex", help="Rebuild indices from filesystem"),
|
|
14
16
|
): # pragma: no cover
|
|
15
17
|
"""Reset database (drop all tables and recreate)."""
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
18
|
+
with logfire.span("reset"): # pyright: ignore [reportGeneralTypeIssues]
|
|
19
|
+
if typer.confirm("This will delete all data in your db. Are you sure?"):
|
|
20
|
+
logger.info("Resetting database...")
|
|
21
|
+
asyncio.run(migrations.reset_database())
|
|
19
22
|
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
+
if reindex:
|
|
24
|
+
# Import and run sync
|
|
25
|
+
from basic_memory.cli.commands.sync import sync
|
|
23
26
|
|
|
24
|
-
|
|
25
|
-
|
|
27
|
+
logger.info("Rebuilding search index from filesystem...")
|
|
28
|
+
sync(watch=False) # pyright: ignore
|
|
@@ -6,6 +6,7 @@ from datetime import datetime
|
|
|
6
6
|
from pathlib import Path
|
|
7
7
|
from typing import Dict, Any, List, Annotated, Set, Optional
|
|
8
8
|
|
|
9
|
+
import logfire
|
|
9
10
|
import typer
|
|
10
11
|
from loguru import logger
|
|
11
12
|
from rich.console import Console
|
|
@@ -209,7 +210,7 @@ async def get_markdown_processor() -> MarkdownProcessor:
|
|
|
209
210
|
@import_app.command(name="chatgpt", help="Import conversations from ChatGPT JSON export.")
|
|
210
211
|
def import_chatgpt(
|
|
211
212
|
conversations_json: Annotated[
|
|
212
|
-
Path, typer.
|
|
213
|
+
Path, typer.Argument(help="Path to ChatGPT conversations.json file")
|
|
213
214
|
] = Path("conversations.json"),
|
|
214
215
|
folder: Annotated[
|
|
215
216
|
str, typer.Option(help="The folder to place the files in.")
|
|
@@ -225,35 +226,38 @@ def import_chatgpt(
|
|
|
225
226
|
After importing, run 'basic-memory sync' to index the new files.
|
|
226
227
|
"""
|
|
227
228
|
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
if
|
|
231
|
-
|
|
232
|
-
|
|
229
|
+
with logfire.span("import chatgpt"): # pyright: ignore [reportGeneralTypeIssues]
|
|
230
|
+
try:
|
|
231
|
+
if conversations_json:
|
|
232
|
+
if not conversations_json.exists():
|
|
233
|
+
typer.echo(f"Error: File not found: {conversations_json}", err=True)
|
|
234
|
+
raise typer.Exit(1)
|
|
233
235
|
|
|
234
|
-
|
|
235
|
-
|
|
236
|
+
# Get markdown processor
|
|
237
|
+
markdown_processor = asyncio.run(get_markdown_processor())
|
|
236
238
|
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
239
|
+
# Process the file
|
|
240
|
+
base_path = config.home / folder
|
|
241
|
+
console.print(
|
|
242
|
+
f"\nImporting chats from {conversations_json}...writing to {base_path}"
|
|
243
|
+
)
|
|
244
|
+
results = asyncio.run(
|
|
245
|
+
process_chatgpt_json(conversations_json, folder, markdown_processor)
|
|
246
|
+
)
|
|
243
247
|
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
248
|
+
# Show results
|
|
249
|
+
console.print(
|
|
250
|
+
Panel(
|
|
251
|
+
f"[green]Import complete![/green]\n\n"
|
|
252
|
+
f"Imported {results['conversations']} conversations\n"
|
|
253
|
+
f"Containing {results['messages']} messages",
|
|
254
|
+
expand=False,
|
|
255
|
+
)
|
|
251
256
|
)
|
|
252
|
-
)
|
|
253
257
|
|
|
254
|
-
|
|
258
|
+
console.print("\nRun 'basic-memory sync' to index the new files.")
|
|
255
259
|
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
+
except Exception as e:
|
|
261
|
+
logger.error("Import failed")
|
|
262
|
+
typer.echo(f"Error during import: {e}", err=True)
|
|
263
|
+
raise typer.Exit(1)
|
|
@@ -6,6 +6,7 @@ from datetime import datetime
|
|
|
6
6
|
from pathlib import Path
|
|
7
7
|
from typing import Dict, Any, List, Annotated
|
|
8
8
|
|
|
9
|
+
import logfire
|
|
9
10
|
import typer
|
|
10
11
|
from loguru import logger
|
|
11
12
|
from rich.console import Console
|
|
@@ -178,34 +179,35 @@ def import_claude(
|
|
|
178
179
|
After importing, run 'basic-memory sync' to index the new files.
|
|
179
180
|
"""
|
|
180
181
|
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
182
|
+
with logfire.span("import claude conversations"): # pyright: ignore [reportGeneralTypeIssues]
|
|
183
|
+
try:
|
|
184
|
+
if not conversations_json.exists():
|
|
185
|
+
typer.echo(f"Error: File not found: {conversations_json}", err=True)
|
|
186
|
+
raise typer.Exit(1)
|
|
187
|
+
|
|
188
|
+
# Get markdown processor
|
|
189
|
+
markdown_processor = asyncio.run(get_markdown_processor())
|
|
185
190
|
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
console.print(f"\nImporting chats from {conversations_json}...writing to {base_path}")
|
|
192
|
-
results = asyncio.run(
|
|
193
|
-
process_conversations_json(conversations_json, base_path, markdown_processor)
|
|
194
|
-
)
|
|
195
|
-
|
|
196
|
-
# Show results
|
|
197
|
-
console.print(
|
|
198
|
-
Panel(
|
|
199
|
-
f"[green]Import complete![/green]\n\n"
|
|
200
|
-
f"Imported {results['conversations']} conversations\n"
|
|
201
|
-
f"Containing {results['messages']} messages",
|
|
202
|
-
expand=False,
|
|
191
|
+
# Process the file
|
|
192
|
+
base_path = config.home / folder
|
|
193
|
+
console.print(f"\nImporting chats from {conversations_json}...writing to {base_path}")
|
|
194
|
+
results = asyncio.run(
|
|
195
|
+
process_conversations_json(conversations_json, base_path, markdown_processor)
|
|
203
196
|
)
|
|
204
|
-
)
|
|
205
197
|
|
|
206
|
-
|
|
198
|
+
# Show results
|
|
199
|
+
console.print(
|
|
200
|
+
Panel(
|
|
201
|
+
f"[green]Import complete![/green]\n\n"
|
|
202
|
+
f"Imported {results['conversations']} conversations\n"
|
|
203
|
+
f"Containing {results['messages']} messages",
|
|
204
|
+
expand=False,
|
|
205
|
+
)
|
|
206
|
+
)
|
|
207
207
|
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
208
|
+
console.print("\nRun 'basic-memory sync' to index the new files.")
|
|
209
|
+
|
|
210
|
+
except Exception as e:
|
|
211
|
+
logger.error("Import failed")
|
|
212
|
+
typer.echo(f"Error during import: {e}", err=True)
|
|
213
|
+
raise typer.Exit(1)
|
|
@@ -5,6 +5,7 @@ import json
|
|
|
5
5
|
from pathlib import Path
|
|
6
6
|
from typing import Dict, Any, Annotated, Optional
|
|
7
7
|
|
|
8
|
+
import logfire
|
|
8
9
|
import typer
|
|
9
10
|
from loguru import logger
|
|
10
11
|
from rich.console import Console
|
|
@@ -160,36 +161,36 @@ def import_projects(
|
|
|
160
161
|
|
|
161
162
|
After importing, run 'basic-memory sync' to index the new files.
|
|
162
163
|
"""
|
|
164
|
+
with logfire.span("import claude projects"): # pyright: ignore [reportGeneralTypeIssues]
|
|
165
|
+
try:
|
|
166
|
+
if projects_json:
|
|
167
|
+
if not projects_json.exists():
|
|
168
|
+
typer.echo(f"Error: File not found: {projects_json}", err=True)
|
|
169
|
+
raise typer.Exit(1)
|
|
170
|
+
|
|
171
|
+
# Get markdown processor
|
|
172
|
+
markdown_processor = asyncio.run(get_markdown_processor())
|
|
173
|
+
|
|
174
|
+
# Process the file
|
|
175
|
+
base_path = config.home / base_folder if base_folder else config.home
|
|
176
|
+
console.print(f"\nImporting projects from {projects_json}...writing to {base_path}")
|
|
177
|
+
results = asyncio.run(
|
|
178
|
+
process_projects_json(projects_json, base_path, markdown_processor)
|
|
179
|
+
)
|
|
163
180
|
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
# Process the file
|
|
174
|
-
base_path = config.home / base_folder if base_folder else config.home
|
|
175
|
-
console.print(f"\nImporting projects from {projects_json}...writing to {base_path}")
|
|
176
|
-
results = asyncio.run(
|
|
177
|
-
process_projects_json(projects_json, base_path, markdown_processor)
|
|
178
|
-
)
|
|
179
|
-
|
|
180
|
-
# Show results
|
|
181
|
-
console.print(
|
|
182
|
-
Panel(
|
|
183
|
-
f"[green]Import complete![/green]\n\n"
|
|
184
|
-
f"Imported {results['documents']} project documents\n"
|
|
185
|
-
f"Imported {results['prompts']} prompt templates",
|
|
186
|
-
expand=False,
|
|
181
|
+
# Show results
|
|
182
|
+
console.print(
|
|
183
|
+
Panel(
|
|
184
|
+
f"[green]Import complete![/green]\n\n"
|
|
185
|
+
f"Imported {results['documents']} project documents\n"
|
|
186
|
+
f"Imported {results['prompts']} prompt templates",
|
|
187
|
+
expand=False,
|
|
188
|
+
)
|
|
187
189
|
)
|
|
188
|
-
)
|
|
189
190
|
|
|
190
|
-
|
|
191
|
+
console.print("\nRun 'basic-memory sync' to index the new files.")
|
|
191
192
|
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
193
|
+
except Exception as e:
|
|
194
|
+
logger.error("Import failed")
|
|
195
|
+
typer.echo(f"Error during import: {e}", err=True)
|
|
196
|
+
raise typer.Exit(1)
|
|
@@ -5,6 +5,7 @@ import json
|
|
|
5
5
|
from pathlib import Path
|
|
6
6
|
from typing import Dict, Any, List, Annotated
|
|
7
7
|
|
|
8
|
+
import logfire
|
|
8
9
|
import typer
|
|
9
10
|
from loguru import logger
|
|
10
11
|
from rich.console import Console
|
|
@@ -113,32 +114,33 @@ def memory_json(
|
|
|
113
114
|
After importing, run 'basic-memory sync' to index the new files.
|
|
114
115
|
"""
|
|
115
116
|
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
117
|
+
with logfire.span("import memory_json"): # pyright: ignore [reportGeneralTypeIssues]
|
|
118
|
+
if not json_path.exists():
|
|
119
|
+
typer.echo(f"Error: File not found: {json_path}", err=True)
|
|
120
|
+
raise typer.Exit(1)
|
|
121
|
+
|
|
122
|
+
try:
|
|
123
|
+
# Get markdown processor
|
|
124
|
+
markdown_processor = asyncio.run(get_markdown_processor())
|
|
125
|
+
|
|
126
|
+
# Process the file
|
|
127
|
+
base_path = config.home
|
|
128
|
+
console.print(f"\nImporting from {json_path}...writing to {base_path}")
|
|
129
|
+
results = asyncio.run(process_memory_json(json_path, base_path, markdown_processor))
|
|
130
|
+
|
|
131
|
+
# Show results
|
|
132
|
+
console.print(
|
|
133
|
+
Panel(
|
|
134
|
+
f"[green]Import complete![/green]\n\n"
|
|
135
|
+
f"Created {results['entities']} entities\n"
|
|
136
|
+
f"Added {results['relations']} relations",
|
|
137
|
+
expand=False,
|
|
138
|
+
)
|
|
136
139
|
)
|
|
137
|
-
)
|
|
138
140
|
|
|
139
|
-
|
|
141
|
+
console.print("\nRun 'basic-memory sync' to index the new files.")
|
|
140
142
|
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
143
|
+
except Exception as e:
|
|
144
|
+
logger.error("Import failed")
|
|
145
|
+
typer.echo(f"Error during import: {e}", err=True)
|
|
146
|
+
raise typer.Exit(1)
|
|
@@ -3,35 +3,23 @@
|
|
|
3
3
|
import asyncio
|
|
4
4
|
from typing import Set, Dict
|
|
5
5
|
|
|
6
|
+
import logfire
|
|
6
7
|
import typer
|
|
7
8
|
from loguru import logger
|
|
8
9
|
from rich.console import Console
|
|
9
10
|
from rich.panel import Panel
|
|
10
11
|
from rich.tree import Tree
|
|
11
12
|
|
|
12
|
-
from basic_memory import db
|
|
13
13
|
from basic_memory.cli.app import app
|
|
14
|
+
from basic_memory.cli.commands.sync import get_sync_service
|
|
14
15
|
from basic_memory.config import config
|
|
15
|
-
from basic_memory.
|
|
16
|
-
from basic_memory.
|
|
17
|
-
from basic_memory.sync import FileChangeScanner
|
|
18
|
-
from basic_memory.sync.utils import SyncReport
|
|
16
|
+
from basic_memory.sync import SyncService
|
|
17
|
+
from basic_memory.sync.sync_service import SyncReport
|
|
19
18
|
|
|
20
19
|
# Create rich console
|
|
21
20
|
console = Console()
|
|
22
21
|
|
|
23
22
|
|
|
24
|
-
async def get_file_change_scanner(
|
|
25
|
-
db_type=DatabaseType.FILESYSTEM,
|
|
26
|
-
) -> FileChangeScanner: # pragma: no cover
|
|
27
|
-
"""Get sync service instance."""
|
|
28
|
-
_, session_maker = await db.get_or_create_db(db_path=config.database_path, db_type=db_type)
|
|
29
|
-
|
|
30
|
-
entity_repository = EntityRepository(session_maker)
|
|
31
|
-
file_change_scanner = FileChangeScanner(entity_repository)
|
|
32
|
-
return file_change_scanner
|
|
33
|
-
|
|
34
|
-
|
|
35
23
|
def add_files_to_tree(
|
|
36
24
|
tree: Tree, paths: Set[str], style: str, checksums: Dict[str, str] | None = None
|
|
37
25
|
):
|
|
@@ -103,7 +91,7 @@ def display_changes(title: str, changes: SyncReport, verbose: bool = False):
|
|
|
103
91
|
"""Display changes using Rich for better visualization."""
|
|
104
92
|
tree = Tree(title)
|
|
105
93
|
|
|
106
|
-
if changes.
|
|
94
|
+
if changes.total == 0:
|
|
107
95
|
tree.add("No changes")
|
|
108
96
|
console.print(Panel(tree, expand=False))
|
|
109
97
|
return
|
|
@@ -134,11 +122,11 @@ def display_changes(title: str, changes: SyncReport, verbose: bool = False):
|
|
|
134
122
|
console.print(Panel(tree, expand=False))
|
|
135
123
|
|
|
136
124
|
|
|
137
|
-
async def run_status(sync_service:
|
|
125
|
+
async def run_status(sync_service: SyncService, verbose: bool = False):
|
|
138
126
|
"""Check sync status of files vs database."""
|
|
139
127
|
# Check knowledge/ directory
|
|
140
|
-
knowledge_changes = await sync_service.
|
|
141
|
-
display_changes("
|
|
128
|
+
knowledge_changes = await sync_service.scan(config.home)
|
|
129
|
+
display_changes("Status", knowledge_changes, verbose)
|
|
142
130
|
|
|
143
131
|
|
|
144
132
|
@app.command()
|
|
@@ -146,9 +134,11 @@ def status(
|
|
|
146
134
|
verbose: bool = typer.Option(False, "--verbose", "-v", help="Show detailed file information"),
|
|
147
135
|
):
|
|
148
136
|
"""Show sync status between files and database."""
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
137
|
+
with logfire.span("status"): # pyright: ignore [reportGeneralTypeIssues]
|
|
138
|
+
try:
|
|
139
|
+
sync_service = asyncio.run(get_sync_service())
|
|
140
|
+
asyncio.run(run_status(sync_service, verbose)) # pragma: no cover
|
|
141
|
+
except Exception as e:
|
|
142
|
+
logger.exception(f"Error checking status: {e}")
|
|
143
|
+
typer.echo(f"Error checking status: {e}", err=True)
|
|
144
|
+
raise typer.Exit(code=1) # pragma: no cover
|
|
@@ -25,8 +25,8 @@ from basic_memory.repository.search_repository import SearchRepository
|
|
|
25
25
|
from basic_memory.services import EntityService, FileService
|
|
26
26
|
from basic_memory.services.link_resolver import LinkResolver
|
|
27
27
|
from basic_memory.services.search_service import SearchService
|
|
28
|
-
from basic_memory.sync import SyncService
|
|
29
|
-
from basic_memory.sync.
|
|
28
|
+
from basic_memory.sync import SyncService
|
|
29
|
+
from basic_memory.sync.sync_service import SyncReport
|
|
30
30
|
from basic_memory.sync.watch_service import WatchService
|
|
31
31
|
|
|
32
32
|
console = Console()
|
|
@@ -58,9 +58,6 @@ async def get_sync_service(): # pragma: no cover
|
|
|
58
58
|
search_service = SearchService(search_repository, entity_repository, file_service)
|
|
59
59
|
link_resolver = LinkResolver(entity_repository, search_service)
|
|
60
60
|
|
|
61
|
-
# Initialize scanner
|
|
62
|
-
file_change_scanner = FileChangeScanner(entity_repository)
|
|
63
|
-
|
|
64
61
|
# Initialize services
|
|
65
62
|
entity_service = EntityService(
|
|
66
63
|
entity_parser,
|
|
@@ -73,12 +70,12 @@ async def get_sync_service(): # pragma: no cover
|
|
|
73
70
|
|
|
74
71
|
# Create sync service
|
|
75
72
|
sync_service = SyncService(
|
|
76
|
-
scanner=file_change_scanner,
|
|
77
73
|
entity_service=entity_service,
|
|
78
74
|
entity_parser=entity_parser,
|
|
79
75
|
entity_repository=entity_repository,
|
|
80
76
|
relation_repository=relation_repository,
|
|
81
77
|
search_service=search_service,
|
|
78
|
+
file_service=file_service,
|
|
82
79
|
)
|
|
83
80
|
|
|
84
81
|
return sync_service
|
|
@@ -95,7 +92,7 @@ def group_issues_by_directory(issues: List[ValidationIssue]) -> Dict[str, List[V
|
|
|
95
92
|
|
|
96
93
|
def display_sync_summary(knowledge: SyncReport):
|
|
97
94
|
"""Display a one-line summary of sync changes."""
|
|
98
|
-
total_changes = knowledge.
|
|
95
|
+
total_changes = knowledge.total
|
|
99
96
|
if total_changes == 0:
|
|
100
97
|
console.print("[green]Everything up to date[/green]")
|
|
101
98
|
return
|
|
@@ -121,13 +118,13 @@ def display_sync_summary(knowledge: SyncReport):
|
|
|
121
118
|
|
|
122
119
|
def display_detailed_sync_results(knowledge: SyncReport):
|
|
123
120
|
"""Display detailed sync results with trees."""
|
|
124
|
-
if knowledge.
|
|
121
|
+
if knowledge.total == 0:
|
|
125
122
|
console.print("\n[green]Everything up to date[/green]")
|
|
126
123
|
return
|
|
127
124
|
|
|
128
125
|
console.print("\n[bold]Sync Results[/bold]")
|
|
129
126
|
|
|
130
|
-
if knowledge.
|
|
127
|
+
if knowledge.total > 0:
|
|
131
128
|
knowledge_tree = Tree("[bold]Knowledge Files[/bold]")
|
|
132
129
|
if knowledge.new:
|
|
133
130
|
created = knowledge_tree.add("[green]Created[/green]")
|
|
@@ -151,7 +148,7 @@ def display_detailed_sync_results(knowledge: SyncReport):
|
|
|
151
148
|
console.print(knowledge_tree)
|
|
152
149
|
|
|
153
150
|
|
|
154
|
-
async def run_sync(verbose: bool = False, watch: bool = False):
|
|
151
|
+
async def run_sync(verbose: bool = False, watch: bool = False, console_status: bool = False):
|
|
155
152
|
"""Run sync operation."""
|
|
156
153
|
|
|
157
154
|
sync_service = await get_sync_service()
|
|
@@ -163,7 +160,9 @@ async def run_sync(verbose: bool = False, watch: bool = False):
|
|
|
163
160
|
file_service=sync_service.entity_service.file_service,
|
|
164
161
|
config=config,
|
|
165
162
|
)
|
|
166
|
-
|
|
163
|
+
# full sync
|
|
164
|
+
await sync_service.sync(config.home)
|
|
165
|
+
# watch changes
|
|
167
166
|
await watch_service.run() # pragma: no cover
|
|
168
167
|
else:
|
|
169
168
|
# one time sync
|
|
@@ -197,7 +196,7 @@ def sync(
|
|
|
197
196
|
|
|
198
197
|
except Exception as e: # pragma: no cover
|
|
199
198
|
if not isinstance(e, typer.Exit):
|
|
200
|
-
logger.exception("Sync failed")
|
|
199
|
+
logger.exception("Sync failed", e)
|
|
201
200
|
typer.echo(f"Error during sync: {e}", err=True)
|
|
202
201
|
raise typer.Exit(1)
|
|
203
202
|
raise
|