basic-memory 0.7.0__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (89) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/alembic.ini +119 -0
  3. basic_memory/alembic/env.py +23 -1
  4. basic_memory/alembic/migrations.py +4 -9
  5. basic_memory/alembic/versions/502b60eaa905_remove_required_from_entity_permalink.py +51 -0
  6. basic_memory/alembic/versions/b3c3938bacdb_relation_to_name_unique_index.py +44 -0
  7. basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py +106 -0
  8. basic_memory/api/app.py +9 -10
  9. basic_memory/api/routers/__init__.py +2 -1
  10. basic_memory/api/routers/knowledge_router.py +31 -5
  11. basic_memory/api/routers/memory_router.py +18 -17
  12. basic_memory/api/routers/project_info_router.py +275 -0
  13. basic_memory/api/routers/resource_router.py +105 -4
  14. basic_memory/api/routers/search_router.py +22 -4
  15. basic_memory/cli/app.py +54 -5
  16. basic_memory/cli/commands/__init__.py +15 -2
  17. basic_memory/cli/commands/db.py +9 -13
  18. basic_memory/cli/commands/import_chatgpt.py +26 -30
  19. basic_memory/cli/commands/import_claude_conversations.py +27 -29
  20. basic_memory/cli/commands/import_claude_projects.py +29 -31
  21. basic_memory/cli/commands/import_memory_json.py +26 -28
  22. basic_memory/cli/commands/mcp.py +7 -1
  23. basic_memory/cli/commands/project.py +119 -0
  24. basic_memory/cli/commands/project_info.py +167 -0
  25. basic_memory/cli/commands/status.py +14 -28
  26. basic_memory/cli/commands/sync.py +63 -22
  27. basic_memory/cli/commands/tool.py +253 -0
  28. basic_memory/cli/main.py +39 -1
  29. basic_memory/config.py +166 -4
  30. basic_memory/db.py +19 -4
  31. basic_memory/deps.py +10 -3
  32. basic_memory/file_utils.py +37 -19
  33. basic_memory/markdown/entity_parser.py +3 -3
  34. basic_memory/markdown/utils.py +5 -0
  35. basic_memory/mcp/async_client.py +1 -1
  36. basic_memory/mcp/main.py +24 -0
  37. basic_memory/mcp/prompts/__init__.py +19 -0
  38. basic_memory/mcp/prompts/ai_assistant_guide.py +26 -0
  39. basic_memory/mcp/prompts/continue_conversation.py +111 -0
  40. basic_memory/mcp/prompts/recent_activity.py +88 -0
  41. basic_memory/mcp/prompts/search.py +182 -0
  42. basic_memory/mcp/prompts/utils.py +155 -0
  43. basic_memory/mcp/server.py +2 -6
  44. basic_memory/mcp/tools/__init__.py +12 -21
  45. basic_memory/mcp/tools/build_context.py +85 -0
  46. basic_memory/mcp/tools/canvas.py +97 -0
  47. basic_memory/mcp/tools/delete_note.py +28 -0
  48. basic_memory/mcp/tools/project_info.py +51 -0
  49. basic_memory/mcp/tools/read_content.py +229 -0
  50. basic_memory/mcp/tools/read_note.py +190 -0
  51. basic_memory/mcp/tools/recent_activity.py +100 -0
  52. basic_memory/mcp/tools/search.py +56 -17
  53. basic_memory/mcp/tools/utils.py +245 -16
  54. basic_memory/mcp/tools/write_note.py +124 -0
  55. basic_memory/models/knowledge.py +27 -11
  56. basic_memory/models/search.py +2 -1
  57. basic_memory/repository/entity_repository.py +3 -2
  58. basic_memory/repository/project_info_repository.py +9 -0
  59. basic_memory/repository/repository.py +24 -7
  60. basic_memory/repository/search_repository.py +47 -14
  61. basic_memory/schemas/__init__.py +10 -9
  62. basic_memory/schemas/base.py +4 -1
  63. basic_memory/schemas/memory.py +14 -4
  64. basic_memory/schemas/project_info.py +96 -0
  65. basic_memory/schemas/search.py +29 -33
  66. basic_memory/services/context_service.py +3 -3
  67. basic_memory/services/entity_service.py +26 -13
  68. basic_memory/services/file_service.py +145 -26
  69. basic_memory/services/link_resolver.py +9 -46
  70. basic_memory/services/search_service.py +95 -22
  71. basic_memory/sync/__init__.py +3 -2
  72. basic_memory/sync/sync_service.py +523 -117
  73. basic_memory/sync/watch_service.py +258 -132
  74. basic_memory/utils.py +51 -36
  75. basic_memory-0.9.0.dist-info/METADATA +736 -0
  76. basic_memory-0.9.0.dist-info/RECORD +99 -0
  77. basic_memory/alembic/README +0 -1
  78. basic_memory/cli/commands/tools.py +0 -157
  79. basic_memory/mcp/tools/knowledge.py +0 -68
  80. basic_memory/mcp/tools/memory.py +0 -170
  81. basic_memory/mcp/tools/notes.py +0 -202
  82. basic_memory/schemas/discovery.py +0 -28
  83. basic_memory/sync/file_change_scanner.py +0 -158
  84. basic_memory/sync/utils.py +0 -31
  85. basic_memory-0.7.0.dist-info/METADATA +0 -378
  86. basic_memory-0.7.0.dist-info/RECORD +0 -82
  87. {basic_memory-0.7.0.dist-info → basic_memory-0.9.0.dist-info}/WHEEL +0 -0
  88. {basic_memory-0.7.0.dist-info → basic_memory-0.9.0.dist-info}/entry_points.txt +0 -0
  89. {basic_memory-0.7.0.dist-info → basic_memory-0.9.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,202 +0,0 @@
1
- """Note management tools for Basic Memory MCP server.
2
-
3
- These tools provide a natural interface for working with markdown notes
4
- while leveraging the underlying knowledge graph structure.
5
- """
6
-
7
- from typing import Optional, List
8
-
9
- from loguru import logger
10
- import logfire
11
-
12
- from basic_memory.mcp.server import mcp
13
- from basic_memory.mcp.async_client import client
14
- from basic_memory.schemas import EntityResponse, DeleteEntitiesResponse
15
- from basic_memory.schemas.base import Entity
16
- from basic_memory.mcp.tools.utils import call_get, call_put, call_delete
17
- from basic_memory.schemas.memory import memory_url_path
18
-
19
-
20
- @mcp.tool(
21
- description="Create or update a markdown note. Returns a markdown formatted summary of the semantic content.",
22
- )
23
- async def write_note(
24
- title: str,
25
- content: str,
26
- folder: str,
27
- tags: Optional[List[str]] = None,
28
- ) -> str:
29
- """Write a markdown note to the knowledge base.
30
-
31
- The content can include semantic observations and relations using markdown syntax.
32
- Relations can be specified either explicitly or through inline wiki-style links:
33
-
34
- Observations format:
35
- `- [category] Observation text #tag1 #tag2 (optional context)`
36
-
37
- Examples:
38
- `- [design] Files are the source of truth #architecture (All state comes from files)`
39
- `- [tech] Using SQLite for storage #implementation`
40
- `- [note] Need to add error handling #todo`
41
-
42
- Relations format:
43
- - Explicit: `- relation_type [[Entity]] (optional context)`
44
- - Inline: Any `[[Entity]]` reference creates a relation
45
-
46
- Examples:
47
- `- depends_on [[Content Parser]] (Need for semantic extraction)`
48
- `- implements [[Search Spec]] (Initial implementation)`
49
- `- This feature extends [[Base Design]] and uses [[Core Utils]]`
50
-
51
- Args:
52
- title: The title of the note
53
- content: Markdown content for the note, can include observations and relations
54
- folder: the folder where the file should be saved
55
- tags: Optional list of tags to categorize the note
56
-
57
- Returns:
58
- A markdown formatted summary of the semantic content, including:
59
- - Creation/update status
60
- - File path and checksum
61
- - Observation counts by category
62
- - Relation counts (resolved/unresolved)
63
- - Tags if present
64
- """
65
- with logfire.span("Writing note", title=title, folder=folder): # pyright: ignore [reportGeneralTypeIssues]
66
- logger.info(f"Writing note folder:'{folder}' title: '{title}'")
67
-
68
- # Create the entity request
69
- metadata = {"tags": [f"#{tag}" for tag in tags]} if tags else None
70
- entity = Entity(
71
- title=title,
72
- folder=folder,
73
- entity_type="note",
74
- content_type="text/markdown",
75
- content=content,
76
- entity_metadata=metadata,
77
- )
78
-
79
- # Create or update via knowledge API
80
- logger.info(f"Creating {entity.permalink}")
81
- url = f"/knowledge/entities/{entity.permalink}"
82
- response = await call_put(client, url, json=entity.model_dump())
83
- result = EntityResponse.model_validate(response.json())
84
-
85
- # Format semantic summary based on status code
86
- action = "Created" if response.status_code == 201 else "Updated"
87
- assert result.checksum is not None
88
- summary = [
89
- f"# {action} {result.file_path} ({result.checksum[:8]})",
90
- f"permalink: {result.permalink}",
91
- ]
92
-
93
- if result.observations:
94
- categories = {}
95
- for obs in result.observations:
96
- categories[obs.category] = categories.get(obs.category, 0) + 1
97
-
98
- summary.append("\n## Observations")
99
- for category, count in sorted(categories.items()):
100
- summary.append(f"- {category}: {count}")
101
-
102
- if result.relations:
103
- unresolved = sum(1 for r in result.relations if not r.to_id)
104
- resolved = len(result.relations) - unresolved
105
-
106
- summary.append("\n## Relations")
107
- summary.append(f"- Resolved: {resolved}")
108
- if unresolved:
109
- summary.append(f"- Unresolved: {unresolved}")
110
- summary.append("\nUnresolved relations will be retried on next sync.")
111
-
112
- if tags:
113
- summary.append(f"\n## Tags\n- {', '.join(tags)}")
114
-
115
- return "\n".join(summary)
116
-
117
-
118
- @mcp.tool(description="Read note content by title, permalink, relation, or pattern")
119
- async def read_note(identifier: str, page: int = 1, page_size: int = 10) -> str:
120
- """Get note content in unified diff format.
121
-
122
- The content is returned in a unified diff inspired format:
123
- ```
124
- --- memory://docs/example 2025-01-31T19:32:49 7d9f1c8b
125
- <document content>
126
- ```
127
-
128
- Multiple documents (from relations or pattern matches) are separated by
129
- additional headers.
130
-
131
- Args:
132
- identifier: Can be one of:
133
- - Note title ("Project Planning")
134
- - Note permalink ("docs/example")
135
- - Relation path ("docs/example/depends-on/other-doc")
136
- - Pattern match ("docs/*-architecture")
137
- page: the page number of results to return (default 1)
138
- page_size: the number of results to return per page (default 10)
139
-
140
- Returns:
141
- Document content in unified diff format. For single documents, returns
142
- just that document's content. For relations or pattern matches, returns
143
- multiple documents separated by unified diff headers.
144
-
145
- Examples:
146
- # Single document
147
- content = await read_note("Project Planning")
148
-
149
- # Read by permalink
150
- content = await read_note("docs/architecture/file-first")
151
-
152
- # Follow relation
153
- content = await read_note("docs/architecture/depends-on/docs/content-parser")
154
-
155
- # Pattern matching
156
- content = await read_note("docs/*-architecture") # All architecture docs
157
- content = await read_note("docs/*/implements/*") # Find implementations
158
-
159
- Output format:
160
- ```
161
- --- memory://docs/example 2025-01-31T19:32:49 7d9f1c8b
162
- <first document content>
163
-
164
- --- memory://docs/other 2025-01-30T15:45:22 a1b2c3d4
165
- <second document content>
166
- ```
167
-
168
- The headers include:
169
- - Full memory:// URI for the document
170
- - Last modified timestamp
171
- - Content checksum
172
- """
173
- with logfire.span("Reading note", identifier=identifier): # pyright: ignore [reportGeneralTypeIssues]
174
- logger.info(f"Reading note {identifier}")
175
- url = memory_url_path(identifier)
176
- response = await call_get(
177
- client, f"/resource/{url}", params={"page": page, "page_size": page_size}
178
- )
179
- return response.text
180
-
181
-
182
- @mcp.tool(description="Delete a note by title or permalink")
183
- async def delete_note(identifier: str) -> bool:
184
- """Delete a note from the knowledge base.
185
-
186
- Args:
187
- identifier: Note title or permalink
188
-
189
- Returns:
190
- True if note was deleted, False otherwise
191
-
192
- Examples:
193
- # Delete by title
194
- delete_note("Meeting Notes: Project Planning")
195
-
196
- # Delete by permalink
197
- delete_note("notes/project-planning")
198
- """
199
- with logfire.span("Deleting note", identifier=identifier): # pyright: ignore [reportGeneralTypeIssues]
200
- response = await call_delete(client, f"/knowledge/entities/{identifier}")
201
- result = DeleteEntitiesResponse.model_validate(response.json())
202
- return result.deleted
@@ -1,28 +0,0 @@
1
- """Schemas for knowledge discovery and analytics endpoints."""
2
-
3
- from typing import List, Optional
4
- from pydantic import BaseModel, Field
5
-
6
- from basic_memory.schemas.response import EntityResponse
7
-
8
-
9
- class EntityTypeList(BaseModel):
10
- """List of unique entity types in the system."""
11
-
12
- types: List[str]
13
-
14
-
15
- class ObservationCategoryList(BaseModel):
16
- """List of unique observation categories in the system."""
17
-
18
- categories: List[str]
19
-
20
-
21
- class TypedEntityList(BaseModel):
22
- """List of entities of a specific type."""
23
-
24
- entity_type: str = Field(..., description="Type of entities in the list")
25
- entities: List[EntityResponse]
26
- total: int = Field(..., description="Total number of entities")
27
- sort_by: Optional[str] = Field(None, description="Field used for sorting")
28
- include_related: bool = Field(False, description="Whether related entities are included")
@@ -1,158 +0,0 @@
1
- """Service for detecting changes between filesystem and database."""
2
-
3
- from dataclasses import dataclass, field
4
- from pathlib import Path
5
- from typing import Dict, Sequence
6
-
7
- from loguru import logger
8
-
9
- from basic_memory.file_utils import compute_checksum
10
- from basic_memory.models import Entity
11
- from basic_memory.repository.entity_repository import EntityRepository
12
- from basic_memory.sync.utils import SyncReport
13
-
14
-
15
- @dataclass
16
- class FileState:
17
- """State of a file including file path, permalink and checksum info."""
18
-
19
- file_path: str
20
- permalink: str
21
- checksum: str
22
-
23
-
24
- @dataclass
25
- class ScanResult:
26
- """Result of scanning a directory."""
27
-
28
- # file_path -> checksum
29
- files: Dict[str, str] = field(default_factory=dict)
30
- # file_path -> error message
31
- errors: Dict[str, str] = field(default_factory=dict)
32
-
33
-
34
- class FileChangeScanner:
35
- """
36
- Service for detecting changes between filesystem and database.
37
- The filesystem is treated as the source of truth.
38
- """
39
-
40
- def __init__(self, entity_repository: EntityRepository):
41
- self.entity_repository = entity_repository
42
-
43
- async def scan_directory(self, directory: Path) -> ScanResult:
44
- """
45
- Scan directory for markdown files and their checksums.
46
- Only processes .md files, logs and skips others.
47
-
48
- Args:
49
- directory: Directory to scan
50
-
51
- Returns:
52
- ScanResult containing found files and any errors
53
- """
54
- logger.debug(f"Scanning directory: {directory}")
55
- result = ScanResult()
56
-
57
- if not directory.exists():
58
- logger.debug(f"Directory does not exist: {directory}")
59
- return result
60
-
61
- for path in directory.rglob("*"):
62
- if not path.is_file() or not path.name.endswith(".md"):
63
- if path.is_file():
64
- logger.debug(f"Skipping non-markdown file: {path}")
65
- continue
66
-
67
- try:
68
- # Get relative path first - used in error reporting if needed
69
- rel_path = str(path.relative_to(directory))
70
- content = path.read_text()
71
- checksum = await compute_checksum(content)
72
- result.files[rel_path] = checksum
73
-
74
- except Exception as e:
75
- rel_path = str(path.relative_to(directory))
76
- result.errors[rel_path] = str(e)
77
- logger.error(f"Failed to read {rel_path}: {e}")
78
-
79
- logger.debug(f"Found {len(result.files)} markdown files")
80
- if result.errors:
81
- logger.warning(f"Encountered {len(result.errors)} errors while scanning")
82
-
83
- return result
84
-
85
- async def find_changes(
86
- self, directory: Path, db_file_state: Dict[str, FileState]
87
- ) -> SyncReport:
88
- """Find changes between filesystem and database."""
89
- # Get current files and checksums
90
- scan_result = await self.scan_directory(directory)
91
- current_files = scan_result.files
92
-
93
- # Build report
94
- report = SyncReport(total=len(current_files))
95
-
96
- # Track potentially moved files by checksum
97
- files_by_checksum = {} # checksum -> file_path
98
-
99
- # First find potential new files and record checksums
100
- for file_path, checksum in current_files.items():
101
- logger.debug(f"{file_path} ({checksum[:8]})")
102
-
103
- if file_path not in db_file_state:
104
- # Could be new or could be the destination of a move
105
- report.new.add(file_path)
106
- files_by_checksum[checksum] = file_path
107
- elif checksum != db_file_state[file_path].checksum:
108
- report.modified.add(file_path)
109
-
110
- report.checksums[file_path] = checksum
111
-
112
- # Now detect moves and deletions
113
- for db_file_path, db_state in db_file_state.items():
114
- if db_file_path not in current_files:
115
- if db_state.checksum in files_by_checksum:
116
- # Found a move - file exists at new path with same checksum
117
- new_path = files_by_checksum[db_state.checksum]
118
- report.moves[db_file_path] = new_path
119
- # Remove from new files since it's a move
120
- report.new.remove(new_path)
121
- else:
122
- # Actually deleted
123
- report.deleted.add(db_file_path)
124
-
125
- # Log summary
126
- logger.debug(f"Total files: {report.total}")
127
- logger.debug(f"Changes found: {report.total_changes}")
128
- logger.debug(f" New: {len(report.new)}")
129
- logger.debug(f" Modified: {len(report.modified)}")
130
- logger.debug(f" Moved: {len(report.moves)}")
131
- logger.debug(f" Deleted: {len(report.deleted)}")
132
-
133
- if scan_result.errors: # pragma: no cover
134
- logger.warning("Files skipped due to errors:")
135
- for file_path, error in scan_result.errors.items():
136
- logger.warning(f" {file_path}: {error}")
137
-
138
- return report
139
-
140
- async def get_db_file_state(self, db_records: Sequence[Entity]) -> Dict[str, FileState]:
141
- """Get file_path and checksums from database.
142
- Args:
143
- db_records: database records
144
- Returns:
145
- Dict mapping file paths to FileState
146
- :param db_records: the data from the db
147
- """
148
- return {
149
- r.file_path: FileState(
150
- file_path=r.file_path, permalink=r.permalink, checksum=r.checksum or ""
151
- )
152
- for r in db_records
153
- }
154
-
155
- async def find_knowledge_changes(self, directory: Path) -> SyncReport:
156
- """Find changes in knowledge directory."""
157
- db_file_state = await self.get_db_file_state(await self.entity_repository.find_all())
158
- return await self.find_changes(directory=directory, db_file_state=db_file_state)
@@ -1,31 +0,0 @@
1
- """Types and utilities for file sync."""
2
-
3
- from dataclasses import dataclass, field
4
- from typing import Set, Dict
5
-
6
-
7
- @dataclass
8
- class SyncReport:
9
- """Report of file changes found compared to database state.
10
-
11
- Attributes:
12
- total: Total number of files in directory being synced
13
- new: Files that exist on disk but not in database
14
- modified: Files that exist in both but have different checksums
15
- deleted: Files that exist in database but not on disk
16
- moves: Files that have been moved from one location to another
17
- checksums: Current checksums for files on disk
18
- """
19
-
20
- total: int = 0
21
- # We keep paths as strings in sets/dicts for easier serialization
22
- new: Set[str] = field(default_factory=set)
23
- modified: Set[str] = field(default_factory=set)
24
- deleted: Set[str] = field(default_factory=set)
25
- moves: Dict[str, str] = field(default_factory=dict) # old_path -> new_path
26
- checksums: Dict[str, str] = field(default_factory=dict) # path -> checksum
27
-
28
- @property
29
- def total_changes(self) -> int:
30
- """Total number of changes."""
31
- return len(self.new) + len(self.modified) + len(self.deleted) + len(self.moves)