basic-memory 0.6.0__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (70) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/alembic.ini +119 -0
  3. basic_memory/alembic/env.py +23 -1
  4. basic_memory/alembic/versions/502b60eaa905_remove_required_from_entity_permalink.py +51 -0
  5. basic_memory/alembic/versions/b3c3938bacdb_relation_to_name_unique_index.py +44 -0
  6. basic_memory/api/app.py +0 -4
  7. basic_memory/api/routers/knowledge_router.py +1 -9
  8. basic_memory/api/routers/memory_router.py +41 -25
  9. basic_memory/api/routers/resource_router.py +119 -12
  10. basic_memory/api/routers/search_router.py +17 -9
  11. basic_memory/cli/app.py +0 -2
  12. basic_memory/cli/commands/db.py +11 -8
  13. basic_memory/cli/commands/import_chatgpt.py +31 -27
  14. basic_memory/cli/commands/import_claude_conversations.py +29 -27
  15. basic_memory/cli/commands/import_claude_projects.py +30 -29
  16. basic_memory/cli/commands/import_memory_json.py +28 -26
  17. basic_memory/cli/commands/status.py +16 -26
  18. basic_memory/cli/commands/sync.py +11 -12
  19. basic_memory/cli/commands/tools.py +180 -0
  20. basic_memory/cli/main.py +1 -1
  21. basic_memory/config.py +16 -2
  22. basic_memory/db.py +1 -0
  23. basic_memory/deps.py +5 -1
  24. basic_memory/file_utils.py +6 -4
  25. basic_memory/markdown/entity_parser.py +3 -3
  26. basic_memory/mcp/async_client.py +1 -1
  27. basic_memory/mcp/main.py +25 -0
  28. basic_memory/mcp/prompts/__init__.py +15 -0
  29. basic_memory/mcp/prompts/ai_assistant_guide.py +28 -0
  30. basic_memory/mcp/prompts/continue_conversation.py +172 -0
  31. basic_memory/mcp/prompts/json_canvas_spec.py +25 -0
  32. basic_memory/mcp/prompts/recent_activity.py +46 -0
  33. basic_memory/mcp/prompts/search.py +127 -0
  34. basic_memory/mcp/prompts/utils.py +98 -0
  35. basic_memory/mcp/server.py +3 -7
  36. basic_memory/mcp/tools/__init__.py +6 -4
  37. basic_memory/mcp/tools/canvas.py +99 -0
  38. basic_memory/mcp/tools/knowledge.py +26 -14
  39. basic_memory/mcp/tools/memory.py +57 -31
  40. basic_memory/mcp/tools/notes.py +65 -72
  41. basic_memory/mcp/tools/resource.py +192 -0
  42. basic_memory/mcp/tools/search.py +13 -4
  43. basic_memory/mcp/tools/utils.py +2 -1
  44. basic_memory/models/knowledge.py +27 -11
  45. basic_memory/repository/repository.py +1 -1
  46. basic_memory/repository/search_repository.py +17 -4
  47. basic_memory/schemas/__init__.py +0 -11
  48. basic_memory/schemas/base.py +4 -1
  49. basic_memory/schemas/memory.py +14 -2
  50. basic_memory/schemas/request.py +1 -1
  51. basic_memory/schemas/search.py +4 -1
  52. basic_memory/services/context_service.py +14 -6
  53. basic_memory/services/entity_service.py +19 -12
  54. basic_memory/services/file_service.py +69 -2
  55. basic_memory/services/link_resolver.py +12 -9
  56. basic_memory/services/search_service.py +59 -13
  57. basic_memory/sync/__init__.py +3 -2
  58. basic_memory/sync/sync_service.py +287 -107
  59. basic_memory/sync/watch_service.py +125 -129
  60. basic_memory/utils.py +27 -15
  61. {basic_memory-0.6.0.dist-info → basic_memory-0.8.0.dist-info}/METADATA +3 -2
  62. basic_memory-0.8.0.dist-info/RECORD +91 -0
  63. basic_memory/alembic/README +0 -1
  64. basic_memory/schemas/discovery.py +0 -28
  65. basic_memory/sync/file_change_scanner.py +0 -158
  66. basic_memory/sync/utils.py +0 -31
  67. basic_memory-0.6.0.dist-info/RECORD +0 -81
  68. {basic_memory-0.6.0.dist-info → basic_memory-0.8.0.dist-info}/WHEEL +0 -0
  69. {basic_memory-0.6.0.dist-info → basic_memory-0.8.0.dist-info}/entry_points.txt +0 -0
  70. {basic_memory-0.6.0.dist-info → basic_memory-0.8.0.dist-info}/licenses/LICENSE +0 -0
@@ -51,7 +51,7 @@ class GetEntitiesRequest(BaseModel):
51
51
  discovered through search.
52
52
  """
53
53
 
54
- permalinks: Annotated[List[Permalink], MinLen(1)]
54
+ permalinks: Annotated[List[Permalink], MinLen(1), MaxLen(10)]
55
55
 
56
56
 
57
57
  class CreateRelationsRequest(BaseModel):
@@ -68,9 +68,10 @@ class SearchResult(BaseModel):
68
68
  """Search result with score and metadata."""
69
69
 
70
70
  id: int
71
+ title: str
71
72
  type: SearchItemType
72
73
  score: float
73
- permalink: str
74
+ permalink: Optional[str]
74
75
  file_path: str
75
76
 
76
77
  metadata: Optional[dict] = None
@@ -102,6 +103,8 @@ class SearchResponse(BaseModel):
102
103
  """Wrapper for search results."""
103
104
 
104
105
  results: List[SearchResult]
106
+ current_page: int
107
+ page_size: int
105
108
 
106
109
 
107
110
  # Schema for future advanced search endpoint
@@ -54,11 +54,13 @@ class ContextService:
54
54
  types: Optional[List[SearchItemType]] = None,
55
55
  depth: int = 1,
56
56
  since: Optional[datetime] = None,
57
- max_results: int = 10,
57
+ limit=10,
58
+ offset=0,
59
+ max_related: int = 10,
58
60
  ):
59
61
  """Build rich context from a memory:// URI."""
60
62
  logger.debug(
61
- f"Building context for URI: '{memory_url}' depth: '{depth}' since: '{since}' max_results: '{max_results}'"
63
+ f"Building context for URI: '{memory_url}' depth: '{depth}' since: '{since}' limit: '{limit}' offset: '{offset}' max_related: '{max_related}'"
62
64
  )
63
65
 
64
66
  if memory_url:
@@ -66,15 +68,21 @@ class ContextService:
66
68
  # Pattern matching - use search
67
69
  if "*" in path:
68
70
  logger.debug(f"Pattern search for '{path}'")
69
- primary = await self.search_repository.search(permalink_match=path)
71
+ primary = await self.search_repository.search(
72
+ permalink_match=path, limit=limit, offset=offset
73
+ )
70
74
 
71
75
  # Direct lookup for exact path
72
76
  else:
73
77
  logger.debug(f"Direct lookup for '{path}'")
74
- primary = await self.search_repository.search(permalink=path)
78
+ primary = await self.search_repository.search(
79
+ permalink=path, limit=limit, offset=offset
80
+ )
75
81
  else:
76
82
  logger.debug(f"Build context for '{types}'")
77
- primary = await self.search_repository.search(types=types, after_date=since)
83
+ primary = await self.search_repository.search(
84
+ types=types, after_date=since, limit=limit, offset=offset
85
+ )
78
86
 
79
87
  # Get type_id pairs for traversal
80
88
 
@@ -83,7 +91,7 @@ class ContextService:
83
91
 
84
92
  # Find related content
85
93
  related = await self.find_related(
86
- type_id_pairs, max_depth=depth, since=since, max_results=max_results
94
+ type_id_pairs, max_depth=depth, since=since, max_results=max_related
87
95
  )
88
96
  logger.debug(f"Found {len(related)} related results")
89
97
  for r in related:
@@ -124,17 +124,19 @@ class EntityService(BaseService[EntityModel]):
124
124
  entity_markdown = await self.entity_parser.parse_file(file_path)
125
125
 
126
126
  # create entity
127
- await self.create_entity_from_markdown(file_path, entity_markdown)
127
+ created = await self.create_entity_from_markdown(file_path, entity_markdown)
128
128
 
129
129
  # add relations
130
- entity = await self.update_entity_relations(file_path, entity_markdown)
130
+ entity = await self.update_entity_relations(created.file_path, entity_markdown)
131
131
 
132
132
  # Set final checksum to mark complete
133
133
  return await self.repository.update(entity.id, {"checksum": checksum})
134
134
 
135
135
  async def update_entity(self, entity: EntityModel, schema: EntitySchema) -> EntityModel:
136
136
  """Update an entity's content and metadata."""
137
- logger.debug(f"Updating entity with permalink: {entity.permalink}")
137
+ logger.debug(
138
+ f"Updating entity with permalink: {entity.permalink} content-type: {schema.content_type}"
139
+ )
138
140
 
139
141
  # Convert file path string to Path
140
142
  file_path = Path(entity.file_path)
@@ -152,20 +154,25 @@ class EntityService(BaseService[EntityModel]):
152
154
  entity = await self.update_entity_and_observations(file_path, entity_markdown)
153
155
 
154
156
  # add relations
155
- await self.update_entity_relations(file_path, entity_markdown)
157
+ await self.update_entity_relations(str(file_path), entity_markdown)
156
158
 
157
159
  # Set final checksum to match file
158
160
  entity = await self.repository.update(entity.id, {"checksum": checksum})
159
161
 
160
162
  return entity
161
163
 
162
- async def delete_entity(self, permalink: str) -> bool:
164
+ async def delete_entity(self, permalink_or_id: str | int) -> bool:
163
165
  """Delete entity and its file."""
164
- logger.debug(f"Deleting entity: {permalink}")
166
+ logger.debug(f"Deleting entity: {permalink_or_id}")
165
167
 
166
168
  try:
167
169
  # Get entity first for file deletion
168
- entity = await self.get_by_permalink(permalink)
170
+ if isinstance(permalink_or_id, str):
171
+ entity = await self.get_by_permalink(permalink_or_id)
172
+ else:
173
+ entities = await self.get_entities_by_id([permalink_or_id])
174
+ assert len(entities) == 1, f"Expected 1 entity, got {len(entities)}"
175
+ entity = entities[0]
169
176
 
170
177
  # Delete file first
171
178
  await self.file_service.delete_entity_file(entity)
@@ -174,7 +181,7 @@ class EntityService(BaseService[EntityModel]):
174
181
  return await self.repository.delete(entity.id)
175
182
 
176
183
  except EntityNotFoundError:
177
- logger.info(f"Entity not found: {permalink}")
184
+ logger.info(f"Entity not found: {permalink_or_id}")
178
185
  return True # Already deleted
179
186
 
180
187
  async def get_by_permalink(self, permalink: str) -> EntityModel:
@@ -256,13 +263,13 @@ class EntityService(BaseService[EntityModel]):
256
263
 
257
264
  async def update_entity_relations(
258
265
  self,
259
- file_path: Path,
266
+ path: str,
260
267
  markdown: EntityMarkdown,
261
268
  ) -> EntityModel:
262
269
  """Update relations for entity"""
263
- logger.debug(f"Updating relations for entity: {file_path}")
270
+ logger.debug(f"Updating relations for entity: {path}")
264
271
 
265
- db_entity = await self.repository.get_by_file_path(str(file_path))
272
+ db_entity = await self.repository.get_by_file_path(path)
266
273
 
267
274
  # Clear existing relations first
268
275
  await self.relation_repository.delete_outgoing_relations_from_entity(db_entity.id)
@@ -296,4 +303,4 @@ class EntityService(BaseService[EntityModel]):
296
303
  )
297
304
  continue
298
305
 
299
- return await self.repository.get_by_file_path(str(file_path))
306
+ return await self.repository.get_by_file_path(path)
@@ -1,11 +1,14 @@
1
1
  """Service for file operations with checksum tracking."""
2
2
 
3
+ import mimetypes
4
+ from os import stat_result
3
5
  from pathlib import Path
4
- from typing import Tuple, Union
6
+ from typing import Tuple, Union, Dict, Any
5
7
 
6
8
  from loguru import logger
7
9
 
8
10
  from basic_memory import file_utils
11
+ from basic_memory.file_utils import FileError
9
12
  from basic_memory.markdown.markdown_processor import MarkdownProcessor
10
13
  from basic_memory.models import Entity as EntityModel
11
14
  from basic_memory.schemas import Entity as EntitySchema
@@ -134,6 +137,7 @@ class FileService:
134
137
  logger.error(f"Failed to write file {full_path}: {e}")
135
138
  raise FileOperationError(f"Failed to write file: {e}")
136
139
 
140
+ # TODO remove read_file
137
141
  async def read_file(self, path: Union[Path, str]) -> Tuple[str, str]:
138
142
  """Read file and compute checksum.
139
143
 
@@ -153,7 +157,7 @@ class FileService:
153
157
  full_path = path if path.is_absolute() else self.base_path / path
154
158
 
155
159
  try:
156
- content = path.read_text()
160
+ content = full_path.read_text()
157
161
  checksum = await file_utils.compute_checksum(content)
158
162
  logger.debug(f"read file: {full_path}, checksum: {checksum}")
159
163
  return content, checksum
@@ -174,3 +178,66 @@ class FileService:
174
178
  path = Path(path)
175
179
  full_path = path if path.is_absolute() else self.base_path / path
176
180
  full_path.unlink(missing_ok=True)
181
+
182
+ async def update_frontmatter(self, path: Union[Path, str], updates: Dict[str, Any]) -> str:
183
+ """
184
+ Update frontmatter fields in a file while preserving all content.
185
+ """
186
+
187
+ path = Path(path)
188
+ full_path = path if path.is_absolute() else self.base_path / path
189
+ return await file_utils.update_frontmatter(full_path, updates)
190
+
191
+ async def compute_checksum(self, path: Union[str, Path]) -> str:
192
+ """Compute checksum for a file."""
193
+ path = Path(path)
194
+ full_path = path if path.is_absolute() else self.base_path / path
195
+ try:
196
+ if self.is_markdown(path):
197
+ # read str
198
+ content = full_path.read_text()
199
+ else:
200
+ # read bytes
201
+ content = full_path.read_bytes()
202
+ return await file_utils.compute_checksum(content)
203
+
204
+ except Exception as e: # pragma: no cover
205
+ logger.error(f"Failed to compute checksum for {path}: {e}")
206
+ raise FileError(f"Failed to compute checksum for {path}: {e}")
207
+
208
+ def file_stats(self, path: Union[Path, str]) -> stat_result:
209
+ """
210
+ Return file stats for a given path.
211
+ :param path:
212
+ :return:
213
+ """
214
+ path = Path(path)
215
+ full_path = path if path.is_absolute() else self.base_path / path
216
+ # get file timestamps
217
+ return full_path.stat()
218
+
219
+ def content_type(self, path: Union[Path, str]) -> str:
220
+ """
221
+ Return content_type for a given path.
222
+ :param path:
223
+ :return:
224
+ """
225
+ path = Path(path)
226
+ full_path = path if path.is_absolute() else self.base_path / path
227
+ # get file timestamps
228
+ mime_type, _ = mimetypes.guess_type(full_path.name)
229
+
230
+ # .canvas files are json
231
+ if full_path.suffix == ".canvas":
232
+ mime_type = "application/json"
233
+
234
+ content_type = mime_type or "text/plain"
235
+ return content_type
236
+
237
+ def is_markdown(self, path: Union[Path, str]) -> bool:
238
+ """
239
+ Return content_type for a given path.
240
+ :param path:
241
+ :return:
242
+ """
243
+ return self.content_type(path) == "text/markdown"
@@ -4,11 +4,11 @@ from typing import Optional, Tuple, List
4
4
 
5
5
  from loguru import logger
6
6
 
7
+ from basic_memory.models import Entity
7
8
  from basic_memory.repository.entity_repository import EntityRepository
8
9
  from basic_memory.repository.search_repository import SearchIndexRow
9
- from basic_memory.services.search_service import SearchService
10
- from basic_memory.models import Entity
11
10
  from basic_memory.schemas.search import SearchQuery, SearchItemType
11
+ from basic_memory.services.search_service import SearchService
12
12
 
13
13
 
14
14
  class LinkResolver:
@@ -58,7 +58,8 @@ class LinkResolver:
58
58
  logger.debug(
59
59
  f"Selected best match from {len(results)} results: {best_match.permalink}"
60
60
  )
61
- return await self.entity_repository.get_by_permalink(best_match.permalink)
61
+ if best_match.permalink:
62
+ return await self.entity_repository.get_by_permalink(best_match.permalink)
62
63
 
63
64
  # if we couldn't find anything then return None
64
65
  return None
@@ -103,12 +104,14 @@ class LinkResolver:
103
104
  scored_results = []
104
105
  for result in results:
105
106
  # Start with base score (lower is better)
106
- score = result.score
107
- assert score is not None
108
-
109
- # Parse path components
110
- path_parts = result.permalink.lower().split("/")
111
- last_part = path_parts[-1] if path_parts else ""
107
+ score = result.score or 0
108
+
109
+ if result.permalink:
110
+ # Parse path components
111
+ path_parts = result.permalink.lower().split("/")
112
+ last_part = path_parts[-1] if path_parts else ""
113
+ else:
114
+ last_part = "" # pragma: no cover
112
115
 
113
116
  # Title word match boosts
114
117
  term_matches = [term for term in terms if term in last_part]
@@ -3,6 +3,7 @@
3
3
  from datetime import datetime
4
4
  from typing import List, Optional, Set
5
5
 
6
+ from dateparser import parse
6
7
  from fastapi import BackgroundTasks
7
8
  from loguru import logger
8
9
 
@@ -51,7 +52,7 @@ class SearchService:
51
52
 
52
53
  logger.info("Reindex complete")
53
54
 
54
- async def search(self, query: SearchQuery) -> List[SearchIndexRow]:
55
+ async def search(self, query: SearchQuery, limit=10, offset=0) -> List[SearchIndexRow]:
55
56
  """Search across all indexed content.
56
57
 
57
58
  Supports three modes:
@@ -69,7 +70,7 @@ class SearchService:
69
70
  (
70
71
  query.after_date
71
72
  if isinstance(query.after_date, datetime)
72
- else datetime.fromisoformat(query.after_date)
73
+ else parse(query.after_date)
73
74
  )
74
75
  if query.after_date
75
76
  else None
@@ -84,6 +85,8 @@ class SearchService:
84
85
  types=query.types,
85
86
  entity_types=query.entity_types,
86
87
  after_date=after_date,
88
+ limit=limit,
89
+ offset=offset,
87
90
  )
88
91
 
89
92
  return results
@@ -116,6 +119,46 @@ class SearchService:
116
119
  self,
117
120
  entity: Entity,
118
121
  background_tasks: Optional[BackgroundTasks] = None,
122
+ ) -> None:
123
+ if background_tasks:
124
+ background_tasks.add_task(self.index_entity_data, entity)
125
+ else:
126
+ await self.index_entity_data(entity)
127
+
128
+ async def index_entity_data(
129
+ self,
130
+ entity: Entity,
131
+ ) -> None:
132
+ # delete all search index data associated with entity
133
+ await self.repository.delete_by_entity_id(entity_id=entity.id)
134
+
135
+ # reindex
136
+ await self.index_entity_markdown(
137
+ entity
138
+ ) if entity.is_markdown else await self.index_entity_file(entity)
139
+
140
+ async def index_entity_file(
141
+ self,
142
+ entity: Entity,
143
+ ) -> None:
144
+ # Index entity file with no content
145
+ await self.repository.index_item(
146
+ SearchIndexRow(
147
+ id=entity.id,
148
+ type=SearchItemType.ENTITY.value,
149
+ title=entity.title,
150
+ file_path=entity.file_path,
151
+ metadata={
152
+ "entity_type": entity.entity_type,
153
+ },
154
+ created_at=entity.created_at,
155
+ updated_at=entity.updated_at,
156
+ )
157
+ )
158
+
159
+ async def index_entity_markdown(
160
+ self,
161
+ entity: Entity,
119
162
  ) -> None:
120
163
  """Index an entity and all its observations and relations.
121
164
 
@@ -134,16 +177,10 @@ class SearchService:
134
177
 
135
178
  Each type gets its own row in the search index with appropriate metadata.
136
179
  """
137
- if background_tasks:
138
- background_tasks.add_task(self.index_entity_data, entity)
139
- else:
140
- await self.index_entity_data(entity)
141
180
 
142
- async def index_entity_data(
143
- self,
144
- entity: Entity,
145
- ) -> None:
146
- """Actually perform the indexing."""
181
+ assert entity.permalink is not None, (
182
+ "entity.permalink should not be None for markdown entities"
183
+ )
147
184
 
148
185
  content_parts = []
149
186
  title_variants = self._generate_variants(entity.title)
@@ -158,6 +195,9 @@ class SearchService:
158
195
 
159
196
  entity_content = "\n".join(p for p in content_parts if p and p.strip())
160
197
 
198
+ assert entity.permalink is not None, (
199
+ "entity.permalink should not be None for markdown entities"
200
+ )
161
201
  # Index entity
162
202
  await self.repository.index_item(
163
203
  SearchIndexRow(
@@ -167,6 +207,7 @@ class SearchService:
167
207
  content=entity_content,
168
208
  permalink=entity.permalink,
169
209
  file_path=entity.file_path,
210
+ entity_id=entity.id,
170
211
  metadata={
171
212
  "entity_type": entity.entity_type,
172
213
  },
@@ -212,6 +253,7 @@ class SearchService:
212
253
  permalink=rel.permalink,
213
254
  file_path=entity.file_path,
214
255
  type=SearchItemType.RELATION.value,
256
+ entity_id=entity.id,
215
257
  from_id=rel.from_id,
216
258
  to_id=rel.to_id,
217
259
  relation_type=rel.relation_type,
@@ -220,6 +262,10 @@ class SearchService:
220
262
  )
221
263
  )
222
264
 
223
- async def delete_by_permalink(self, path_id: str):
265
+ async def delete_by_permalink(self, permalink: str):
266
+ """Delete an item from the search index."""
267
+ await self.repository.delete_by_permalink(permalink)
268
+
269
+ async def delete_by_entity_id(self, entity_id: int):
224
270
  """Delete an item from the search index."""
225
- await self.repository.delete_by_permalink(path_id)
271
+ await self.repository.delete_by_entity_id(entity_id)
@@ -1,5 +1,6 @@
1
- from .file_change_scanner import FileChangeScanner
1
+ """Basic Memory sync services."""
2
+
2
3
  from .sync_service import SyncService
3
4
  from .watch_service import WatchService
4
5
 
5
- __all__ = ["SyncService", "FileChangeScanner", "WatchService"]
6
+ __all__ = ["SyncService", "WatchService"]