basic-memory 0.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (73) hide show
  1. basic_memory/__init__.py +3 -0
  2. basic_memory/api/__init__.py +4 -0
  3. basic_memory/api/app.py +42 -0
  4. basic_memory/api/routers/__init__.py +8 -0
  5. basic_memory/api/routers/knowledge_router.py +168 -0
  6. basic_memory/api/routers/memory_router.py +123 -0
  7. basic_memory/api/routers/resource_router.py +34 -0
  8. basic_memory/api/routers/search_router.py +34 -0
  9. basic_memory/cli/__init__.py +1 -0
  10. basic_memory/cli/app.py +4 -0
  11. basic_memory/cli/commands/__init__.py +9 -0
  12. basic_memory/cli/commands/init.py +38 -0
  13. basic_memory/cli/commands/status.py +152 -0
  14. basic_memory/cli/commands/sync.py +254 -0
  15. basic_memory/cli/main.py +48 -0
  16. basic_memory/config.py +53 -0
  17. basic_memory/db.py +135 -0
  18. basic_memory/deps.py +182 -0
  19. basic_memory/file_utils.py +248 -0
  20. basic_memory/markdown/__init__.py +19 -0
  21. basic_memory/markdown/entity_parser.py +137 -0
  22. basic_memory/markdown/markdown_processor.py +153 -0
  23. basic_memory/markdown/plugins.py +236 -0
  24. basic_memory/markdown/schemas.py +73 -0
  25. basic_memory/markdown/utils.py +144 -0
  26. basic_memory/mcp/__init__.py +1 -0
  27. basic_memory/mcp/async_client.py +10 -0
  28. basic_memory/mcp/main.py +21 -0
  29. basic_memory/mcp/server.py +39 -0
  30. basic_memory/mcp/tools/__init__.py +34 -0
  31. basic_memory/mcp/tools/ai_edit.py +84 -0
  32. basic_memory/mcp/tools/knowledge.py +56 -0
  33. basic_memory/mcp/tools/memory.py +142 -0
  34. basic_memory/mcp/tools/notes.py +122 -0
  35. basic_memory/mcp/tools/search.py +28 -0
  36. basic_memory/mcp/tools/utils.py +154 -0
  37. basic_memory/models/__init__.py +12 -0
  38. basic_memory/models/base.py +9 -0
  39. basic_memory/models/knowledge.py +204 -0
  40. basic_memory/models/search.py +34 -0
  41. basic_memory/repository/__init__.py +7 -0
  42. basic_memory/repository/entity_repository.py +156 -0
  43. basic_memory/repository/observation_repository.py +40 -0
  44. basic_memory/repository/relation_repository.py +78 -0
  45. basic_memory/repository/repository.py +303 -0
  46. basic_memory/repository/search_repository.py +259 -0
  47. basic_memory/schemas/__init__.py +73 -0
  48. basic_memory/schemas/base.py +216 -0
  49. basic_memory/schemas/delete.py +38 -0
  50. basic_memory/schemas/discovery.py +25 -0
  51. basic_memory/schemas/memory.py +111 -0
  52. basic_memory/schemas/request.py +77 -0
  53. basic_memory/schemas/response.py +220 -0
  54. basic_memory/schemas/search.py +117 -0
  55. basic_memory/services/__init__.py +11 -0
  56. basic_memory/services/context_service.py +274 -0
  57. basic_memory/services/entity_service.py +281 -0
  58. basic_memory/services/exceptions.py +15 -0
  59. basic_memory/services/file_service.py +213 -0
  60. basic_memory/services/link_resolver.py +126 -0
  61. basic_memory/services/search_service.py +218 -0
  62. basic_memory/services/service.py +36 -0
  63. basic_memory/sync/__init__.py +5 -0
  64. basic_memory/sync/file_change_scanner.py +162 -0
  65. basic_memory/sync/sync_service.py +140 -0
  66. basic_memory/sync/utils.py +66 -0
  67. basic_memory/sync/watch_service.py +197 -0
  68. basic_memory/utils.py +78 -0
  69. basic_memory-0.0.0.dist-info/METADATA +71 -0
  70. basic_memory-0.0.0.dist-info/RECORD +73 -0
  71. basic_memory-0.0.0.dist-info/WHEEL +4 -0
  72. basic_memory-0.0.0.dist-info/entry_points.txt +2 -0
  73. basic_memory-0.0.0.dist-info/licenses/LICENSE +661 -0
@@ -0,0 +1,3 @@
1
+ """basic-memory - Local-first knowledge management combining Zettelkasten with knowledge graphs"""
2
+
3
+ __version__ = "0.0.1"
@@ -0,0 +1,4 @@
1
+ """Basic Memory API module."""
2
+ from .app import app
3
+
4
+ __all__ = ["app"]
@@ -0,0 +1,42 @@
1
+ """FastAPI application for basic-memory knowledge graph API."""
2
+
3
+ from contextlib import asynccontextmanager
4
+
5
+ from fastapi import FastAPI, HTTPException
6
+ from fastapi.exception_handlers import http_exception_handler
7
+ from loguru import logger
8
+
9
+ from basic_memory import db
10
+ from .routers import knowledge, search, memory, resource
11
+
12
+
13
+ @asynccontextmanager
14
+ async def lifespan(app: FastAPI):
15
+ """Lifecycle manager for the FastAPI app."""
16
+ logger.info("Starting Basic Memory API")
17
+ yield
18
+ logger.info("Shutting down Basic Memory API")
19
+ await db.shutdown_db()
20
+
21
+
22
+ # Initialize FastAPI app
23
+ app = FastAPI(
24
+ title="Basic Memory API",
25
+ description="Knowledge graph API for basic-memory",
26
+ version="0.1.0",
27
+ lifespan=lifespan,
28
+ )
29
+
30
+ # Include routers
31
+ app.include_router(knowledge.router)
32
+ app.include_router(search.router)
33
+ app.include_router(memory.router)
34
+ app.include_router(resource.router)
35
+
36
+
37
+ @app.exception_handler(Exception)
38
+ async def exception_handler(request, exc):
39
+ logger.exception(
40
+ f"An unhandled exception occurred for request '{request.url}', exception: {exc}"
41
+ )
42
+ return await http_exception_handler(request, HTTPException(status_code=500, detail=str(exc)))
@@ -0,0 +1,8 @@
1
+ """API routers."""
2
+
3
+ from . import knowledge_router as knowledge
4
+ from . import memory_router as memory
5
+ from . import resource_router as resource
6
+ from . import search_router as search
7
+
8
+ __all__ = ["knowledge", "memory", "resource", "search"]
@@ -0,0 +1,168 @@
1
+ """Router for knowledge graph operations."""
2
+
3
+ from typing import Annotated
4
+
5
+ from fastapi import APIRouter, HTTPException, BackgroundTasks, Depends, Query, Response
6
+ from loguru import logger
7
+
8
+ from basic_memory.deps import (
9
+ EntityServiceDep,
10
+ get_search_service,
11
+ SearchServiceDep,
12
+ LinkResolverDep,
13
+ )
14
+ from basic_memory.schemas import (
15
+ EntityListResponse,
16
+ EntityResponse,
17
+ DeleteEntitiesResponse,
18
+ DeleteEntitiesRequest,
19
+ )
20
+ from basic_memory.schemas.base import PathId, Entity
21
+ from basic_memory.services.exceptions import EntityNotFoundError
22
+
23
+ router = APIRouter(prefix="/knowledge", tags=["knowledge"])
24
+
25
+ ## Create endpoints
26
+
27
+
28
+ @router.post("/entities", response_model=EntityResponse)
29
+ async def create_entity(
30
+ data: Entity,
31
+ background_tasks: BackgroundTasks,
32
+ entity_service: EntityServiceDep,
33
+ search_service: SearchServiceDep,
34
+ ) -> EntityResponse:
35
+ """Create an entity."""
36
+ logger.info(f"request: create_entity with data={data}")
37
+
38
+ entity = await entity_service.create_entity(data)
39
+
40
+ # reindex
41
+ await search_service.index_entity(entity, background_tasks=background_tasks)
42
+ result = EntityResponse.model_validate(entity)
43
+
44
+ logger.info(f"response: create_entity with result={result}")
45
+ return result
46
+
47
+
48
+ @router.put("/entities/{permalink:path}", response_model=EntityResponse)
49
+ async def create_or_update_entity(
50
+ permalink: PathId,
51
+ data: Entity,
52
+ response: Response,
53
+ background_tasks: BackgroundTasks,
54
+ entity_service: EntityServiceDep,
55
+ search_service: SearchServiceDep,
56
+ ) -> EntityResponse:
57
+ """Create or update an entity. If entity exists, it will be updated, otherwise created."""
58
+ logger.info(f"request: create_or_update_entity with permalink={permalink}, data={data}")
59
+
60
+ # Validate permalink matches
61
+ if data.permalink != permalink:
62
+ raise HTTPException(status_code=400, detail="Entity permalink must match URL path")
63
+
64
+ # Try create_or_update operation
65
+ entity, created = await entity_service.create_or_update_entity(data)
66
+ response.status_code = 201 if created else 200
67
+
68
+ # reindex
69
+ await search_service.index_entity(entity, background_tasks=background_tasks)
70
+ result = EntityResponse.model_validate(entity)
71
+
72
+ logger.info(f"response: create_or_update_entity with result={result}, status_code={response.status_code}")
73
+ return result
74
+
75
+
76
+ ## Read endpoints
77
+
78
+
79
+ @router.get("/entities/{permalink:path}", response_model=EntityResponse)
80
+ async def get_entity(
81
+ entity_service: EntityServiceDep,
82
+ permalink: str,
83
+ ) -> EntityResponse:
84
+ """Get a specific entity by ID.
85
+
86
+ Args:
87
+ permalink: Entity path ID
88
+ content: If True, include full file content
89
+ :param entity_service: EntityService
90
+ """
91
+ logger.info(f"request: get_entity with permalink={permalink}")
92
+ try:
93
+ entity = await entity_service.get_by_permalink(permalink)
94
+ result = EntityResponse.model_validate(entity)
95
+
96
+ logger.info(f"response: get_entity with result={result}")
97
+ return result
98
+ except EntityNotFoundError:
99
+ logger.error(f"Error: Entity with {permalink} not found")
100
+ raise HTTPException(status_code=404, detail=f"Entity with {permalink} not found")
101
+
102
+
103
+ @router.get("/entities", response_model=EntityListResponse)
104
+ async def get_entities(
105
+ entity_service: EntityServiceDep,
106
+ permalink: Annotated[list[str] | None, Query()] = None,
107
+ ) -> EntityListResponse:
108
+ """Open specific entities"""
109
+ logger.info(f"request: get_entities with permalinks={permalink}")
110
+
111
+ entities = await entity_service.get_entities_by_permalinks(permalink)
112
+ result = EntityListResponse(
113
+ entities=[EntityResponse.model_validate(entity) for entity in entities]
114
+ )
115
+
116
+ logger.info(f"response: get_entities with result={result}")
117
+ return result
118
+
119
+
120
+ ## Delete endpoints
121
+
122
+
123
+ @router.delete("/entities/{identifier:path}", response_model=DeleteEntitiesResponse)
124
+ async def delete_entity(
125
+ identifier: str,
126
+ background_tasks: BackgroundTasks,
127
+ entity_service: EntityServiceDep,
128
+ link_resolver: LinkResolverDep,
129
+ search_service=Depends(get_search_service),
130
+ ) -> DeleteEntitiesResponse:
131
+ """Delete a single entity and remove from search index."""
132
+ logger.info(f"request: delete_entity with identifier={identifier}")
133
+
134
+ entity = await link_resolver.resolve_link(identifier)
135
+ if entity is None:
136
+ logger.info("response: delete_entity with result=DeleteEntitiesResponse(deleted=False)")
137
+ return DeleteEntitiesResponse(deleted=False)
138
+
139
+ # Delete the entity
140
+ deleted = await entity_service.delete_entity(entity.permalink)
141
+
142
+ # Remove from search index
143
+ background_tasks.add_task(search_service.delete_by_permalink, entity.permalink)
144
+
145
+ result = DeleteEntitiesResponse(deleted=deleted)
146
+ logger.info(f"response: delete_entity with result={result}")
147
+ return result
148
+
149
+
150
+ @router.post("/entities/delete", response_model=DeleteEntitiesResponse)
151
+ async def delete_entities(
152
+ data: DeleteEntitiesRequest,
153
+ background_tasks: BackgroundTasks,
154
+ entity_service: EntityServiceDep,
155
+ search_service=Depends(get_search_service),
156
+ ) -> DeleteEntitiesResponse:
157
+ """Delete entities and remove from search index."""
158
+ logger.info(f"request: delete_entities with data={data}")
159
+ deleted = False
160
+
161
+ # Remove each deleted entity from search index
162
+ for permalink in data.permalinks:
163
+ deleted = await entity_service.delete_entity(permalink)
164
+ background_tasks.add_task(search_service.delete_by_permalink, permalink)
165
+
166
+ result = DeleteEntitiesResponse(deleted=deleted)
167
+ logger.info(f"response: delete_entities with result={result}")
168
+ return result
@@ -0,0 +1,123 @@
1
+ """Routes for memory:// URI operations."""
2
+
3
+ from datetime import datetime, timedelta
4
+ from typing import Optional, List, Annotated
5
+
6
+ from dateparser import parse
7
+ from fastapi import APIRouter, Query
8
+ from loguru import logger
9
+
10
+ from basic_memory.config import config
11
+ from basic_memory.deps import ContextServiceDep, EntityRepositoryDep
12
+ from basic_memory.repository import EntityRepository
13
+ from basic_memory.repository.search_repository import SearchIndexRow
14
+ from basic_memory.schemas.base import TimeFrame
15
+ from basic_memory.schemas.memory import (
16
+ GraphContext,
17
+ RelationSummary,
18
+ EntitySummary,
19
+ ObservationSummary,
20
+ MemoryMetadata, normalize_memory_url,
21
+ )
22
+ from basic_memory.schemas.search import SearchItemType
23
+ from basic_memory.services.context_service import ContextResultRow
24
+
25
+ router = APIRouter(prefix="/memory", tags=["memory"])
26
+
27
+
28
+
29
+ async def to_graph_context(context, entity_repository: EntityRepository):
30
+ # return results
31
+ async def to_summary(item: SearchIndexRow | ContextResultRow):
32
+ match item.type:
33
+ case SearchItemType.ENTITY:
34
+ return EntitySummary(
35
+ title=item.title,
36
+ permalink=item.permalink,
37
+ file_path=item.file_path,
38
+ created_at=item.created_at,
39
+ )
40
+ case SearchItemType.OBSERVATION:
41
+ return ObservationSummary(
42
+ category=item.category, content=item.content, permalink=item.permalink
43
+ )
44
+ case SearchItemType.RELATION:
45
+ from_entity = await entity_repository.find_by_id(item.from_id)
46
+ to_entity = await entity_repository.find_by_id(item.to_id)
47
+
48
+ return RelationSummary(
49
+ permalink=item.permalink,
50
+ type=item.type,
51
+ from_id=from_entity.permalink,
52
+ to_id=to_entity.permalink if to_entity else None,
53
+ )
54
+
55
+ primary_results = [await to_summary(r) for r in context["primary_results"]]
56
+ related_results = [await to_summary(r) for r in context["related_results"]]
57
+ metadata = MemoryMetadata.model_validate(context["metadata"])
58
+ # Transform to GraphContext
59
+ return GraphContext(
60
+ primary_results=primary_results, related_results=related_results, metadata=metadata
61
+ )
62
+
63
+
64
+
65
+ @router.get("/recent", response_model=GraphContext)
66
+ async def recent(
67
+ context_service: ContextServiceDep,
68
+ entity_repository: EntityRepositoryDep,
69
+ type: Annotated[list[SearchItemType] | None, Query()] = None,
70
+ depth: int = 1,
71
+ timeframe: TimeFrame = "7d",
72
+ max_results: int = 10,
73
+ ) -> GraphContext:
74
+ # return all types by default
75
+ types = (
76
+ [SearchItemType.ENTITY, SearchItemType.RELATION, SearchItemType.OBSERVATION]
77
+ if not type
78
+ else type
79
+ )
80
+
81
+ logger.debug(
82
+ f"Getting recent context: `{types}` depth: `{depth}` timeframe: `{timeframe}` max_results: `{max_results}`"
83
+ )
84
+ # Parse timeframe
85
+ since = parse(timeframe)
86
+
87
+ # Build context
88
+ context = await context_service.build_context(
89
+ types=types, depth=depth, since=since, max_results=max_results
90
+ )
91
+ return await to_graph_context(context, entity_repository=entity_repository)
92
+
93
+
94
+ # get_memory_context needs to be declared last so other paths can match
95
+
96
+ @router.get("/{uri:path}", response_model=GraphContext)
97
+ async def get_memory_context(
98
+ context_service: ContextServiceDep,
99
+ entity_repository: EntityRepositoryDep,
100
+ uri: str,
101
+ depth: int = 1,
102
+ timeframe: TimeFrame = "7d",
103
+ max_results: int = 10,
104
+ ) -> GraphContext:
105
+ """Get rich context from memory:// URI."""
106
+ # add the project name from the config to the url as the "host
107
+ # Parse URI
108
+ logger.debug(
109
+ f"Getting context for URI: `{uri}` depth: `{depth}` timeframe: `{timeframe}` max_results: `{max_results}`"
110
+ )
111
+ memory_url = normalize_memory_url(uri)
112
+
113
+ # Parse timeframe
114
+ since = parse(timeframe)
115
+
116
+ # Build context
117
+ context = await context_service.build_context(
118
+ memory_url, depth=depth, since=since, max_results=max_results
119
+ )
120
+ return await to_graph_context(context, entity_repository=entity_repository)
121
+
122
+
123
+
@@ -0,0 +1,34 @@
1
+ """Routes for getting entity content."""
2
+
3
+ from pathlib import Path
4
+
5
+ from fastapi import APIRouter, HTTPException
6
+ from fastapi.responses import FileResponse
7
+ from loguru import logger
8
+
9
+ from basic_memory.deps import ProjectConfigDep, LinkResolverDep
10
+
11
+ router = APIRouter(prefix="/resource", tags=["resources"])
12
+
13
+
14
+ @router.get("/{identifier:path}")
15
+ async def get_resource_content(
16
+ config: ProjectConfigDep,
17
+ link_resolver: LinkResolverDep,
18
+ identifier: str,
19
+ ) -> FileResponse:
20
+ """Get resource content by identifier: name or permalink."""
21
+ logger.debug(f"Getting content for permalink: {identifier}")
22
+
23
+ # Find entity by permalink
24
+ entity = await link_resolver.resolve_link(identifier)
25
+ if not entity:
26
+ raise HTTPException(status_code=404, detail=f"Entity not found: {identifier}")
27
+
28
+ file_path = Path(f"{config.home}/{entity.file_path}")
29
+ if not file_path.exists():
30
+ raise HTTPException(
31
+ status_code=404,
32
+ detail=f"File not found: {file_path}",
33
+ )
34
+ return FileResponse(path=file_path)
@@ -0,0 +1,34 @@
1
+ """Router for search operations."""
2
+ from dataclasses import asdict
3
+
4
+ from fastapi import APIRouter, Depends, BackgroundTasks
5
+ from typing import List
6
+
7
+ from loguru import logger
8
+ from basic_memory.services.search_service import SearchService
9
+ from basic_memory.schemas.search import SearchQuery, SearchResult, SearchResponse
10
+ from basic_memory.deps import get_search_service
11
+
12
+ router = APIRouter(prefix="/search", tags=["search"])
13
+
14
+ @router.post("/", response_model=SearchResponse)
15
+ async def search(
16
+ query: SearchQuery,
17
+ search_service: SearchService = Depends(get_search_service)
18
+ ):
19
+ """Search across all knowledge and documents."""
20
+ results = await search_service.search(query)
21
+ search_results = [SearchResult.model_validate(asdict(r)) for r in results]
22
+ return SearchResponse(results=search_results)
23
+
24
+ @router.post("/reindex")
25
+ async def reindex(
26
+ background_tasks: BackgroundTasks,
27
+ search_service: SearchService = Depends(get_search_service)
28
+ ):
29
+ """Recreate and populate the search index."""
30
+ await search_service.reindex_all(background_tasks=background_tasks)
31
+ return {
32
+ "status": "ok",
33
+ "message": "Reindex initiated"
34
+ }
@@ -0,0 +1 @@
1
+ """CLI tools for basic-memory"""
@@ -0,0 +1,4 @@
1
+ import typer
2
+
3
+ app = typer.Typer()
4
+
@@ -0,0 +1,9 @@
1
+ """CLI commands package."""
2
+
3
+ from . import init, status
4
+
5
+
6
+ __all__ = [
7
+ "init",
8
+ "status",
9
+ ]
@@ -0,0 +1,38 @@
1
+ """Initialize command for basic-memory CLI."""
2
+
3
+ import asyncio
4
+ from pathlib import Path
5
+
6
+ import typer
7
+ from loguru import logger
8
+
9
+ from basic_memory.cli.app import app
10
+ from basic_memory.db import engine_session_factory, DatabaseType
11
+ from basic_memory.config import config
12
+
13
+
14
+ async def _init(force: bool = False):
15
+ """Initialize the database."""
16
+ db_path = config.database_path
17
+
18
+ if db_path.exists() and not force:
19
+ typer.echo(f"Database already exists at {db_path}. Use --force to reinitialize.")
20
+ raise typer.Exit(1)
21
+
22
+ # Create data directory if needed
23
+ db_path.parent.mkdir(parents=True, exist_ok=True)
24
+
25
+ try:
26
+ async with engine_session_factory(db_path, db_type=DatabaseType.FILESYSTEM, init=True):
27
+ typer.echo(f"Initialized database at {db_path}")
28
+ except Exception as e:
29
+ logger.error(f"Error initializing database: {e}")
30
+ typer.echo(f"Error initializing database: {e}")
31
+ raise typer.Exit(1)
32
+
33
+ @app.command()
34
+ def init(
35
+ force: bool = typer.Option(False, "--force", "-f", help="Force reinitialization if database exists")
36
+ ):
37
+ """Initialize a new basic-memory database."""
38
+ asyncio.run(_init(force))
@@ -0,0 +1,152 @@
1
+ """Status command for basic-memory CLI."""
2
+
3
+ import asyncio
4
+ from typing import Set, Dict
5
+
6
+ import typer
7
+ from loguru import logger
8
+ from rich.console import Console
9
+ from rich.panel import Panel
10
+ from rich.tree import Tree
11
+
12
+ from basic_memory import db
13
+ from basic_memory.cli.app import app
14
+ from basic_memory.config import config
15
+ from basic_memory.db import DatabaseType
16
+ from basic_memory.repository import EntityRepository
17
+ from basic_memory.sync import FileChangeScanner
18
+ from basic_memory.sync.utils import SyncReport
19
+
20
+ # Create rich console
21
+ console = Console()
22
+
23
+
24
+ async def get_file_change_scanner(db_type=DatabaseType.FILESYSTEM) -> FileChangeScanner:
25
+ """Get sync service instance."""
26
+ async with db.engine_session_factory(db_path=config.database_path, db_type=db_type) as (
27
+ engine,
28
+ session_maker,
29
+ ):
30
+ entity_repository = EntityRepository(session_maker)
31
+ file_change_scanner = FileChangeScanner(entity_repository)
32
+ return file_change_scanner
33
+
34
+
35
+ def add_files_to_tree(tree: Tree, paths: Set[str], style: str, checksums: Dict[str, str] = None):
36
+ """Add files to tree, grouped by directory."""
37
+ # Group by directory
38
+ by_dir = {}
39
+ for path in sorted(paths):
40
+ parts = path.split("/", 1)
41
+ dir_name = parts[0] if len(parts) > 1 else ""
42
+ file_name = parts[1] if len(parts) > 1 else parts[0]
43
+ by_dir.setdefault(dir_name, []).append((file_name, path))
44
+
45
+ # Add to tree
46
+ for dir_name, files in sorted(by_dir.items()):
47
+ if dir_name:
48
+ branch = tree.add(f"[bold]{dir_name}/[/bold]")
49
+ else:
50
+ branch = tree
51
+
52
+ for file_name, full_path in sorted(files):
53
+ if checksums and full_path in checksums:
54
+ checksum_short = checksums[full_path][:8]
55
+ branch.add(f"[{style}]{file_name}[/{style}] ({checksum_short})")
56
+ else:
57
+ branch.add(f"[{style}]{file_name}[/{style}]")
58
+
59
+
60
+ def group_changes_by_directory(changes: SyncReport) -> Dict[str, Dict[str, int]]:
61
+ """Group changes by directory for summary view."""
62
+ by_dir = {}
63
+ for change_type, paths in [
64
+ ("new", changes.new),
65
+ ("modified", changes.modified),
66
+ ("deleted", changes.deleted),
67
+ ]:
68
+ for path in paths:
69
+ dir_name = path.split("/", 1)[0]
70
+ by_dir.setdefault(dir_name, {"new": 0, "modified": 0, "deleted": 0, "moved": 0})
71
+ by_dir[dir_name][change_type] += 1
72
+
73
+ # Handle moves - count in both source and destination directories
74
+ for old_path, new_path in changes.moves.items():
75
+ old_dir = old_path.split("/", 1)[0]
76
+ new_dir = new_path.split("/", 1)[0]
77
+ by_dir.setdefault(old_dir, {"new": 0, "modified": 0, "deleted": 0, "moved": 0})
78
+ by_dir.setdefault(new_dir, {"new": 0, "modified": 0, "deleted": 0, "moved": 0})
79
+ by_dir[old_dir]["moved"] += 1
80
+ if old_dir != new_dir:
81
+ by_dir[new_dir]["moved"] += 1
82
+
83
+ return by_dir
84
+
85
+
86
+ def build_directory_summary(counts: Dict[str, int]) -> str:
87
+ """Build summary string for directory changes."""
88
+ parts = []
89
+ if counts["new"]:
90
+ parts.append(f"[green]+{counts['new']} new[/green]")
91
+ if counts["modified"]:
92
+ parts.append(f"[yellow]~{counts['modified']} modified[/yellow]")
93
+ if counts["moved"]:
94
+ parts.append(f"[blue]↔{counts['moved']} moved[/blue]")
95
+ if counts["deleted"]:
96
+ parts.append(f"[red]-{counts['deleted']} deleted[/red]")
97
+ return " ".join(parts)
98
+
99
+
100
+ def display_changes(title: str, changes: SyncReport, verbose: bool = False):
101
+ """Display changes using Rich for better visualization."""
102
+ tree = Tree(title)
103
+
104
+ if changes.total_changes == 0:
105
+ tree.add("No changes")
106
+ console.print(Panel(tree, expand=False))
107
+ return
108
+
109
+ if verbose:
110
+ # Full file listing with checksums
111
+ if changes.new:
112
+ new_branch = tree.add("[green]New Files[/green]")
113
+ add_files_to_tree(new_branch, changes.new, "green", changes.checksums)
114
+ if changes.modified:
115
+ mod_branch = tree.add("[yellow]Modified[/yellow]")
116
+ add_files_to_tree(mod_branch, changes.modified, "yellow", changes.checksums)
117
+ if changes.moves:
118
+ move_branch = tree.add("[blue]Moved[/blue]")
119
+ for old_path, new_path in sorted(changes.moves.items()):
120
+ move_branch.add(f"[blue]{old_path}[/blue] → [blue]{new_path}[/blue]")
121
+ if changes.deleted:
122
+ del_branch = tree.add("[red]Deleted[/red]")
123
+ add_files_to_tree(del_branch, changes.deleted, "red")
124
+ else:
125
+ # Show directory summaries
126
+ by_dir = group_changes_by_directory(changes)
127
+ for dir_name, counts in sorted(by_dir.items()):
128
+ summary = build_directory_summary(counts)
129
+ tree.add(f"[bold]{dir_name}/[/bold] {summary}")
130
+
131
+ console.print(Panel(tree, expand=False))
132
+
133
+
134
+ async def run_status(sync_service: FileChangeScanner, verbose: bool = False):
135
+ """Check sync status of files vs database."""
136
+ # Check knowledge/ directory
137
+ knowledge_changes = await sync_service.find_knowledge_changes(config.home)
138
+ display_changes("Knowledge Files", knowledge_changes, verbose)
139
+
140
+
141
+ @app.command()
142
+ def status(
143
+ verbose: bool = typer.Option(False, "--verbose", "-v", help="Show detailed file information"),
144
+ ):
145
+ """Show sync status between files and database."""
146
+ try:
147
+ sync_service = asyncio.run(get_file_change_scanner())
148
+ asyncio.run(run_status(sync_service, verbose))
149
+ except Exception as e:
150
+ logger.exception(f"Error checking status: {e}")
151
+ typer.echo(f"Error checking status: {e}", err=True)
152
+ raise typer.Exit(1)