basic-memory 0.7.0__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (89) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/alembic.ini +119 -0
  3. basic_memory/alembic/env.py +23 -1
  4. basic_memory/alembic/migrations.py +4 -9
  5. basic_memory/alembic/versions/502b60eaa905_remove_required_from_entity_permalink.py +51 -0
  6. basic_memory/alembic/versions/b3c3938bacdb_relation_to_name_unique_index.py +44 -0
  7. basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py +106 -0
  8. basic_memory/api/app.py +9 -10
  9. basic_memory/api/routers/__init__.py +2 -1
  10. basic_memory/api/routers/knowledge_router.py +31 -5
  11. basic_memory/api/routers/memory_router.py +18 -17
  12. basic_memory/api/routers/project_info_router.py +275 -0
  13. basic_memory/api/routers/resource_router.py +105 -4
  14. basic_memory/api/routers/search_router.py +22 -4
  15. basic_memory/cli/app.py +54 -5
  16. basic_memory/cli/commands/__init__.py +15 -2
  17. basic_memory/cli/commands/db.py +9 -13
  18. basic_memory/cli/commands/import_chatgpt.py +26 -30
  19. basic_memory/cli/commands/import_claude_conversations.py +27 -29
  20. basic_memory/cli/commands/import_claude_projects.py +29 -31
  21. basic_memory/cli/commands/import_memory_json.py +26 -28
  22. basic_memory/cli/commands/mcp.py +7 -1
  23. basic_memory/cli/commands/project.py +119 -0
  24. basic_memory/cli/commands/project_info.py +167 -0
  25. basic_memory/cli/commands/status.py +14 -28
  26. basic_memory/cli/commands/sync.py +63 -22
  27. basic_memory/cli/commands/tool.py +253 -0
  28. basic_memory/cli/main.py +39 -1
  29. basic_memory/config.py +166 -4
  30. basic_memory/db.py +19 -4
  31. basic_memory/deps.py +10 -3
  32. basic_memory/file_utils.py +37 -19
  33. basic_memory/markdown/entity_parser.py +3 -3
  34. basic_memory/markdown/utils.py +5 -0
  35. basic_memory/mcp/async_client.py +1 -1
  36. basic_memory/mcp/main.py +24 -0
  37. basic_memory/mcp/prompts/__init__.py +19 -0
  38. basic_memory/mcp/prompts/ai_assistant_guide.py +26 -0
  39. basic_memory/mcp/prompts/continue_conversation.py +111 -0
  40. basic_memory/mcp/prompts/recent_activity.py +88 -0
  41. basic_memory/mcp/prompts/search.py +182 -0
  42. basic_memory/mcp/prompts/utils.py +155 -0
  43. basic_memory/mcp/server.py +2 -6
  44. basic_memory/mcp/tools/__init__.py +12 -21
  45. basic_memory/mcp/tools/build_context.py +85 -0
  46. basic_memory/mcp/tools/canvas.py +97 -0
  47. basic_memory/mcp/tools/delete_note.py +28 -0
  48. basic_memory/mcp/tools/project_info.py +51 -0
  49. basic_memory/mcp/tools/read_content.py +229 -0
  50. basic_memory/mcp/tools/read_note.py +190 -0
  51. basic_memory/mcp/tools/recent_activity.py +100 -0
  52. basic_memory/mcp/tools/search.py +56 -17
  53. basic_memory/mcp/tools/utils.py +245 -16
  54. basic_memory/mcp/tools/write_note.py +124 -0
  55. basic_memory/models/knowledge.py +27 -11
  56. basic_memory/models/search.py +2 -1
  57. basic_memory/repository/entity_repository.py +3 -2
  58. basic_memory/repository/project_info_repository.py +9 -0
  59. basic_memory/repository/repository.py +24 -7
  60. basic_memory/repository/search_repository.py +47 -14
  61. basic_memory/schemas/__init__.py +10 -9
  62. basic_memory/schemas/base.py +4 -1
  63. basic_memory/schemas/memory.py +14 -4
  64. basic_memory/schemas/project_info.py +96 -0
  65. basic_memory/schemas/search.py +29 -33
  66. basic_memory/services/context_service.py +3 -3
  67. basic_memory/services/entity_service.py +26 -13
  68. basic_memory/services/file_service.py +145 -26
  69. basic_memory/services/link_resolver.py +9 -46
  70. basic_memory/services/search_service.py +95 -22
  71. basic_memory/sync/__init__.py +3 -2
  72. basic_memory/sync/sync_service.py +523 -117
  73. basic_memory/sync/watch_service.py +258 -132
  74. basic_memory/utils.py +51 -36
  75. basic_memory-0.9.0.dist-info/METADATA +736 -0
  76. basic_memory-0.9.0.dist-info/RECORD +99 -0
  77. basic_memory/alembic/README +0 -1
  78. basic_memory/cli/commands/tools.py +0 -157
  79. basic_memory/mcp/tools/knowledge.py +0 -68
  80. basic_memory/mcp/tools/memory.py +0 -170
  81. basic_memory/mcp/tools/notes.py +0 -202
  82. basic_memory/schemas/discovery.py +0 -28
  83. basic_memory/sync/file_change_scanner.py +0 -158
  84. basic_memory/sync/utils.py +0 -31
  85. basic_memory-0.7.0.dist-info/METADATA +0 -378
  86. basic_memory-0.7.0.dist-info/RECORD +0 -82
  87. {basic_memory-0.7.0.dist-info → basic_memory-0.9.0.dist-info}/WHEEL +0 -0
  88. {basic_memory-0.7.0.dist-info → basic_memory-0.9.0.dist-info}/entry_points.txt +0 -0
  89. {basic_memory-0.7.0.dist-info → basic_memory-0.9.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,3 +1,9 @@
1
+ """Utility functions for making HTTP requests in Basic Memory MCP tools.
2
+
3
+ These functions provide a consistent interface for making HTTP requests
4
+ to the Basic Memory API, with improved error handling and logging.
5
+ """
6
+
1
7
  import typing
2
8
 
3
9
  from httpx import Response, URL, AsyncClient, HTTPStatusError
@@ -17,6 +23,54 @@ from loguru import logger
17
23
  from mcp.server.fastmcp.exceptions import ToolError
18
24
 
19
25
 
26
+ def get_error_message(status_code: int, url: URL | str, method: str) -> str:
27
+ """Get a friendly error message based on the HTTP status code.
28
+
29
+ Args:
30
+ status_code: The HTTP status code
31
+ url: The URL that was requested
32
+ method: The HTTP method used
33
+
34
+ Returns:
35
+ A user-friendly error message
36
+ """
37
+ # Extract path from URL for cleaner error messages
38
+ if isinstance(url, str):
39
+ path = url.split("/")[-1]
40
+ else:
41
+ path = str(url).split("/")[-1] if url else "resource"
42
+
43
+ # Client errors (400-499)
44
+ if status_code == 400:
45
+ return f"Invalid request: The request to '{path}' was malformed or invalid"
46
+ elif status_code == 401: # pragma: no cover
47
+ return f"Authentication required: You need to authenticate to access '{path}'"
48
+ elif status_code == 403: # pragma: no cover
49
+ return f"Access denied: You don't have permission to access '{path}'"
50
+ elif status_code == 404:
51
+ return f"Resource not found: '{path}' doesn't exist or has been moved"
52
+ elif status_code == 409: # pragma: no cover
53
+ return f"Conflict: The request for '{path}' conflicts with the current state"
54
+ elif status_code == 429: # pragma: no cover
55
+ return "Too many requests: Please slow down and try again later"
56
+ elif 400 <= status_code < 500: # pragma: no cover
57
+ return f"Client error ({status_code}): The request for '{path}' could not be completed"
58
+
59
+ # Server errors (500-599)
60
+ elif status_code == 500:
61
+ return f"Internal server error: Something went wrong processing '{path}'"
62
+ elif status_code == 503: # pragma: no cover
63
+ return (
64
+ f"Service unavailable: The server is currently unable to handle requests for '{path}'"
65
+ )
66
+ elif 500 <= status_code < 600: # pragma: no cover
67
+ return f"Server error ({status_code}): The server encountered an error handling '{path}'"
68
+
69
+ # Fallback for any other status code
70
+ else: # pragma: no cover
71
+ return f"HTTP error {status_code}: {method} request to '{path}' failed"
72
+
73
+
20
74
  async def call_get(
21
75
  client: AsyncClient,
22
76
  url: URL | str,
@@ -29,6 +83,25 @@ async def call_get(
29
83
  timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
30
84
  extensions: RequestExtensions | None = None,
31
85
  ) -> Response:
86
+ """Make a GET request and handle errors appropriately.
87
+
88
+ Args:
89
+ client: The HTTPX AsyncClient to use
90
+ url: The URL to request
91
+ params: Query parameters
92
+ headers: HTTP headers
93
+ cookies: HTTP cookies
94
+ auth: Authentication
95
+ follow_redirects: Whether to follow redirects
96
+ timeout: Request timeout
97
+ extensions: HTTPX extensions
98
+
99
+ Returns:
100
+ The HTTP response
101
+
102
+ Raises:
103
+ ToolError: If the request fails with an appropriate error message
104
+ """
32
105
  logger.debug(f"Calling GET '{url}' params: '{params}'")
33
106
  try:
34
107
  response = await client.get(
@@ -41,11 +114,33 @@ async def call_get(
41
114
  timeout=timeout,
42
115
  extensions=extensions,
43
116
  )
44
- response.raise_for_status()
45
- return response
117
+
118
+ if response.is_success:
119
+ return response
120
+
121
+ # Handle different status codes differently
122
+ status_code = response.status_code
123
+ error_message = get_error_message(status_code, url, "GET")
124
+
125
+ # Log at appropriate level based on status code
126
+ if 400 <= status_code < 500:
127
+ # Client errors: log as info except for 429 (Too Many Requests)
128
+ if status_code == 429: # pragma: no cover
129
+ logger.warning(f"Rate limit exceeded: GET {url}: {error_message}")
130
+ else:
131
+ logger.info(f"Client error: GET {url}: {error_message}")
132
+ else: # pragma: no cover
133
+ # Server errors: log as error
134
+ logger.error(f"Server error: GET {url}: {error_message}")
135
+
136
+ # Raise a tool error with the friendly message
137
+ response.raise_for_status() # Will always raise since we're in the error case
138
+ return response # This line will never execute, but it satisfies the type checker # pragma: no cover
139
+
46
140
  except HTTPStatusError as e:
47
- logger.error(f"Error calling GET {url}: {e}")
48
- raise ToolError(f"Error calling tool: {e}.") from e
141
+ status_code = e.response.status_code
142
+ error_message = get_error_message(status_code, url, "GET")
143
+ raise ToolError(error_message) from e
49
144
 
50
145
 
51
146
  async def call_put(
@@ -64,6 +159,30 @@ async def call_put(
64
159
  timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
65
160
  extensions: RequestExtensions | None = None,
66
161
  ) -> Response:
162
+ """Make a PUT request and handle errors appropriately.
163
+
164
+ Args:
165
+ client: The HTTPX AsyncClient to use
166
+ url: The URL to request
167
+ content: Request content
168
+ data: Form data
169
+ files: Files to upload
170
+ json: JSON data
171
+ params: Query parameters
172
+ headers: HTTP headers
173
+ cookies: HTTP cookies
174
+ auth: Authentication
175
+ follow_redirects: Whether to follow redirects
176
+ timeout: Request timeout
177
+ extensions: HTTPX extensions
178
+
179
+ Returns:
180
+ The HTTP response
181
+
182
+ Raises:
183
+ ToolError: If the request fails with an appropriate error message
184
+ """
185
+ logger.debug(f"Calling PUT '{url}'")
67
186
  try:
68
187
  response = await client.put(
69
188
  url,
@@ -79,11 +198,33 @@ async def call_put(
79
198
  timeout=timeout,
80
199
  extensions=extensions,
81
200
  )
82
- response.raise_for_status()
83
- return response
201
+
202
+ if response.is_success:
203
+ return response
204
+
205
+ # Handle different status codes differently
206
+ status_code = response.status_code
207
+ error_message = get_error_message(status_code, url, "PUT")
208
+
209
+ # Log at appropriate level based on status code
210
+ if 400 <= status_code < 500:
211
+ # Client errors: log as info except for 429 (Too Many Requests)
212
+ if status_code == 429: # pragma: no cover
213
+ logger.warning(f"Rate limit exceeded: PUT {url}: {error_message}")
214
+ else:
215
+ logger.info(f"Client error: PUT {url}: {error_message}")
216
+ else: # pragma: no cover
217
+ # Server errors: log as error
218
+ logger.error(f"Server error: PUT {url}: {error_message}")
219
+
220
+ # Raise a tool error with the friendly message
221
+ response.raise_for_status() # Will always raise since we're in the error case
222
+ return response # This line will never execute, but it satisfies the type checker # pragma: no cover
223
+
84
224
  except HTTPStatusError as e:
85
- logger.error(f"Error calling PUT {url}: {e}")
86
- raise ToolError(f"Error calling tool: {e}") from e
225
+ status_code = e.response.status_code
226
+ error_message = get_error_message(status_code, url, "PUT")
227
+ raise ToolError(error_message) from e
87
228
 
88
229
 
89
230
  async def call_post(
@@ -102,6 +243,30 @@ async def call_post(
102
243
  timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
103
244
  extensions: RequestExtensions | None = None,
104
245
  ) -> Response:
246
+ """Make a POST request and handle errors appropriately.
247
+
248
+ Args:
249
+ client: The HTTPX AsyncClient to use
250
+ url: The URL to request
251
+ content: Request content
252
+ data: Form data
253
+ files: Files to upload
254
+ json: JSON data
255
+ params: Query parameters
256
+ headers: HTTP headers
257
+ cookies: HTTP cookies
258
+ auth: Authentication
259
+ follow_redirects: Whether to follow redirects
260
+ timeout: Request timeout
261
+ extensions: HTTPX extensions
262
+
263
+ Returns:
264
+ The HTTP response
265
+
266
+ Raises:
267
+ ToolError: If the request fails with an appropriate error message
268
+ """
269
+ logger.debug(f"Calling POST '{url}'")
105
270
  try:
106
271
  response = await client.post(
107
272
  url=url,
@@ -117,11 +282,33 @@ async def call_post(
117
282
  timeout=timeout,
118
283
  extensions=extensions,
119
284
  )
120
- response.raise_for_status()
121
- return response
285
+
286
+ if response.is_success:
287
+ return response
288
+
289
+ # Handle different status codes differently
290
+ status_code = response.status_code
291
+ error_message = get_error_message(status_code, url, "POST")
292
+
293
+ # Log at appropriate level based on status code
294
+ if 400 <= status_code < 500:
295
+ # Client errors: log as info except for 429 (Too Many Requests)
296
+ if status_code == 429: # pragma: no cover
297
+ logger.warning(f"Rate limit exceeded: POST {url}: {error_message}")
298
+ else: # pragma: no cover
299
+ logger.info(f"Client error: POST {url}: {error_message}")
300
+ else:
301
+ # Server errors: log as error
302
+ logger.error(f"Server error: POST {url}: {error_message}")
303
+
304
+ # Raise a tool error with the friendly message
305
+ response.raise_for_status() # Will always raise since we're in the error case
306
+ return response # This line will never execute, but it satisfies the type checker # pragma: no cover
307
+
122
308
  except HTTPStatusError as e:
123
- logger.error(f"Error calling POST {url}: {e}")
124
- raise ToolError(f"Error calling tool: {e}") from e
309
+ status_code = e.response.status_code
310
+ error_message = get_error_message(status_code, url, "POST")
311
+ raise ToolError(error_message) from e
125
312
 
126
313
 
127
314
  async def call_delete(
@@ -136,6 +323,26 @@ async def call_delete(
136
323
  timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
137
324
  extensions: RequestExtensions | None = None,
138
325
  ) -> Response:
326
+ """Make a DELETE request and handle errors appropriately.
327
+
328
+ Args:
329
+ client: The HTTPX AsyncClient to use
330
+ url: The URL to request
331
+ params: Query parameters
332
+ headers: HTTP headers
333
+ cookies: HTTP cookies
334
+ auth: Authentication
335
+ follow_redirects: Whether to follow redirects
336
+ timeout: Request timeout
337
+ extensions: HTTPX extensions
338
+
339
+ Returns:
340
+ The HTTP response
341
+
342
+ Raises:
343
+ ToolError: If the request fails with an appropriate error message
344
+ """
345
+ logger.debug(f"Calling DELETE '{url}'")
139
346
  try:
140
347
  response = await client.delete(
141
348
  url=url,
@@ -147,8 +354,30 @@ async def call_delete(
147
354
  timeout=timeout,
148
355
  extensions=extensions,
149
356
  )
150
- response.raise_for_status()
151
- return response
357
+
358
+ if response.is_success:
359
+ return response
360
+
361
+ # Handle different status codes differently
362
+ status_code = response.status_code
363
+ error_message = get_error_message(status_code, url, "DELETE")
364
+
365
+ # Log at appropriate level based on status code
366
+ if 400 <= status_code < 500:
367
+ # Client errors: log as info except for 429 (Too Many Requests)
368
+ if status_code == 429: # pragma: no cover
369
+ logger.warning(f"Rate limit exceeded: DELETE {url}: {error_message}")
370
+ else:
371
+ logger.info(f"Client error: DELETE {url}: {error_message}")
372
+ else: # pragma: no cover
373
+ # Server errors: log as error
374
+ logger.error(f"Server error: DELETE {url}: {error_message}")
375
+
376
+ # Raise a tool error with the friendly message
377
+ response.raise_for_status() # Will always raise since we're in the error case
378
+ return response # This line will never execute, but it satisfies the type checker # pragma: no cover
379
+
152
380
  except HTTPStatusError as e:
153
- logger.error(f"Error calling DELETE {url}: {e}")
154
- raise ToolError(f"Error calling tool: {e}") from e
381
+ status_code = e.response.status_code
382
+ error_message = get_error_message(status_code, url, "DELETE")
383
+ raise ToolError(error_message) from e
@@ -0,0 +1,124 @@
1
+ """Write note tool for Basic Memory MCP server."""
2
+
3
+ from typing import Optional, List
4
+
5
+ from loguru import logger
6
+
7
+ from basic_memory.mcp.async_client import client
8
+ from basic_memory.mcp.server import mcp
9
+ from basic_memory.mcp.tools.utils import call_put
10
+ from basic_memory.schemas import EntityResponse
11
+ from basic_memory.schemas.base import Entity
12
+
13
+
14
+ @mcp.tool(
15
+ description="Create or update a markdown note. Returns a markdown formatted summary of the semantic content.",
16
+ )
17
+ async def write_note(
18
+ title: str,
19
+ content: str,
20
+ folder: str,
21
+ tags: Optional[List[str]] = None,
22
+ ) -> str:
23
+ """Write a markdown note to the knowledge base.
24
+
25
+ The content can include semantic observations and relations using markdown syntax.
26
+ Relations can be specified either explicitly or through inline wiki-style links:
27
+
28
+ Observations format:
29
+ `- [category] Observation text #tag1 #tag2 (optional context)`
30
+
31
+ Examples:
32
+ `- [design] Files are the source of truth #architecture (All state comes from files)`
33
+ `- [tech] Using SQLite for storage #implementation`
34
+ `- [note] Need to add error handling #todo`
35
+
36
+ Relations format:
37
+ - Explicit: `- relation_type [[Entity]] (optional context)`
38
+ - Inline: Any `[[Entity]]` reference creates a relation
39
+
40
+ Examples:
41
+ `- depends_on [[Content Parser]] (Need for semantic extraction)`
42
+ `- implements [[Search Spec]] (Initial implementation)`
43
+ `- This feature extends [[Base Design]] and uses [[Core Utils]]`
44
+
45
+ Args:
46
+ title: The title of the note
47
+ content: Markdown content for the note, can include observations and relations
48
+ folder: the folder where the file should be saved
49
+ tags: Optional list of tags to categorize the note
50
+
51
+ Returns:
52
+ A markdown formatted summary of the semantic content, including:
53
+ - Creation/update status
54
+ - File path and checksum
55
+ - Observation counts by category
56
+ - Relation counts (resolved/unresolved)
57
+ - Tags if present
58
+ """
59
+ logger.info("MCP tool call", tool="write_note", folder=folder, title=title, tags=tags)
60
+
61
+ # Create the entity request
62
+ metadata = {"tags": [f"#{tag}" for tag in tags]} if tags else None
63
+ entity = Entity(
64
+ title=title,
65
+ folder=folder,
66
+ entity_type="note",
67
+ content_type="text/markdown",
68
+ content=content,
69
+ entity_metadata=metadata,
70
+ )
71
+
72
+ # Create or update via knowledge API
73
+ logger.debug("Creating entity via API", permalink=entity.permalink)
74
+ url = f"/knowledge/entities/{entity.permalink}"
75
+ response = await call_put(client, url, json=entity.model_dump())
76
+ result = EntityResponse.model_validate(response.json())
77
+
78
+ # Format semantic summary based on status code
79
+ action = "Created" if response.status_code == 201 else "Updated"
80
+ summary = [
81
+ f"# {action} {result.file_path} ({result.checksum[:8] if result.checksum else 'unknown'})",
82
+ f"permalink: {result.permalink}",
83
+ ]
84
+
85
+ # Count observations by category
86
+ categories = {}
87
+ if result.observations:
88
+ for obs in result.observations:
89
+ categories[obs.category] = categories.get(obs.category, 0) + 1
90
+
91
+ summary.append("\n## Observations")
92
+ for category, count in sorted(categories.items()):
93
+ summary.append(f"- {category}: {count}")
94
+
95
+ # Count resolved/unresolved relations
96
+ unresolved = 0
97
+ resolved = 0
98
+ if result.relations:
99
+ unresolved = sum(1 for r in result.relations if not r.to_id)
100
+ resolved = len(result.relations) - unresolved
101
+
102
+ summary.append("\n## Relations")
103
+ summary.append(f"- Resolved: {resolved}")
104
+ if unresolved:
105
+ summary.append(f"- Unresolved: {unresolved}")
106
+ summary.append("\nUnresolved relations will be retried on next sync.")
107
+
108
+ if tags:
109
+ summary.append(f"\n## Tags\n- {', '.join(tags)}")
110
+
111
+ # Log the response with structured data
112
+ logger.info(
113
+ "MCP tool response",
114
+ tool="write_note",
115
+ action=action,
116
+ permalink=result.permalink,
117
+ observations_count=len(result.observations),
118
+ relations_count=len(result.relations),
119
+ resolved_relations=resolved,
120
+ unresolved_relations=unresolved,
121
+ status_code=response.status_code,
122
+ )
123
+
124
+ return "\n".join(summary)
@@ -12,6 +12,7 @@ from sqlalchemy import (
12
12
  DateTime,
13
13
  Index,
14
14
  JSON,
15
+ text,
15
16
  )
16
17
  from sqlalchemy.orm import Mapped, mapped_column, relationship
17
18
 
@@ -32,11 +33,18 @@ class Entity(Base):
32
33
 
33
34
  __tablename__ = "entity"
34
35
  __table_args__ = (
35
- UniqueConstraint("permalink", name="uix_entity_permalink"), # Make permalink unique
36
+ # Regular indexes
36
37
  Index("ix_entity_type", "entity_type"),
37
38
  Index("ix_entity_title", "title"),
38
39
  Index("ix_entity_created_at", "created_at"), # For timeline queries
39
40
  Index("ix_entity_updated_at", "updated_at"), # For timeline queries
41
+ # Unique index only for markdown files with non-null permalinks
42
+ Index(
43
+ "uix_entity_permalink",
44
+ "permalink",
45
+ unique=True,
46
+ sqlite_where=text("content_type = 'text/markdown' AND permalink IS NOT NULL"),
47
+ ),
40
48
  )
41
49
 
42
50
  # Core identity
@@ -46,8 +54,8 @@ class Entity(Base):
46
54
  entity_metadata: Mapped[Optional[dict]] = mapped_column(JSON, nullable=True)
47
55
  content_type: Mapped[str] = mapped_column(String)
48
56
 
49
- # Normalized path for URIs
50
- permalink: Mapped[str] = mapped_column(String, unique=True, index=True)
57
+ # Normalized path for URIs - required for markdown files only
58
+ permalink: Mapped[Optional[str]] = mapped_column(String, nullable=True, index=True)
51
59
  # Actual filesystem relative path
52
60
  file_path: Mapped[str] = mapped_column(String, unique=True, index=True)
53
61
  # checksum of file
@@ -79,6 +87,11 @@ class Entity(Base):
79
87
  """Get all relations (incoming and outgoing) for this entity."""
80
88
  return self.incoming_relations + self.outgoing_relations
81
89
 
90
+ @property
91
+ def is_markdown(self):
92
+ """Check if the entity is a markdown file."""
93
+ return self.content_type == "text/markdown"
94
+
82
95
  def __repr__(self) -> str:
83
96
  return f"Entity(id={self.id}, name='{self.title}', type='{self.entity_type}'"
84
97
 
@@ -127,7 +140,10 @@ class Relation(Base):
127
140
 
128
141
  __tablename__ = "relation"
129
142
  __table_args__ = (
130
- UniqueConstraint("from_id", "to_id", "relation_type", name="uix_relation"),
143
+ UniqueConstraint("from_id", "to_id", "relation_type", name="uix_relation_from_id_to_id"),
144
+ UniqueConstraint(
145
+ "from_id", "to_name", "relation_type", name="uix_relation_from_id_to_name"
146
+ ),
131
147
  Index("ix_relation_type", "relation_type"),
132
148
  Index("ix_relation_from_id", "from_id"), # Add FK indexes
133
149
  Index("ix_relation_to_id", "to_id"),
@@ -155,13 +171,13 @@ class Relation(Base):
155
171
  Format: source/relation_type/target
156
172
  Example: "specs/search/implements/features/search-ui"
157
173
  """
174
+ # Only create permalinks when both source and target have permalinks
175
+ from_permalink = self.from_entity.permalink or self.from_entity.file_path
176
+
158
177
  if self.to_entity:
159
- return generate_permalink(
160
- f"{self.from_entity.permalink}/{self.relation_type}/{self.to_entity.permalink}"
161
- )
162
- return generate_permalink(
163
- f"{self.from_entity.permalink}/{self.relation_type}/{self.to_name}"
164
- )
178
+ to_permalink = self.to_entity.permalink or self.to_entity.file_path
179
+ return generate_permalink(f"{from_permalink}/{self.relation_type}/{to_permalink}")
180
+ return generate_permalink(f"{from_permalink}/{self.relation_type}/{self.to_name}")
165
181
 
166
182
  def __repr__(self) -> str:
167
- return f"Relation(id={self.id}, from_id={self.from_id}, to_id={self.to_id}, to_name={self.to_name}, type='{self.relation_type}')"
183
+ return f"Relation(id={self.id}, from_id={self.from_id}, to_id={self.to_id}, to_name={self.to_name}, type='{self.relation_type}')" # pragma: no cover
@@ -8,7 +8,8 @@ CREATE VIRTUAL TABLE IF NOT EXISTS search_index USING fts5(
8
8
  -- Core entity fields
9
9
  id UNINDEXED, -- Row ID
10
10
  title, -- Title for searching
11
- content, -- Main searchable content
11
+ content_stems, -- Main searchable content split into stems
12
+ content_snippet, -- File content snippet for display
12
13
  permalink, -- Stable identifier (now indexed for path search)
13
14
  file_path UNINDEXED, -- Physical location
14
15
  type UNINDEXED, -- entity/relation/observation
@@ -31,14 +31,15 @@ class EntityRepository(Repository[Entity]):
31
31
  query = self.select().where(Entity.permalink == permalink).options(*self.get_load_options())
32
32
  return await self.find_one(query)
33
33
 
34
- async def get_by_title(self, title: str) -> Optional[Entity]:
34
+ async def get_by_title(self, title: str) -> Sequence[Entity]:
35
35
  """Get entity by title.
36
36
 
37
37
  Args:
38
38
  title: Title of the entity to find
39
39
  """
40
40
  query = self.select().where(Entity.title == title).options(*self.get_load_options())
41
- return await self.find_one(query)
41
+ result = await self.execute_query(query)
42
+ return list(result.scalars().all())
42
43
 
43
44
  async def get_by_file_path(self, file_path: Union[Path, str]) -> Optional[Entity]:
44
45
  """Get entity by file_path.
@@ -0,0 +1,9 @@
1
+ from basic_memory.repository.repository import Repository
2
+
3
+
4
+ class ProjectInfoRepository(Repository):
5
+ """Repository for statistics queries."""
6
+
7
+ def __init__(self, session_maker):
8
+ # Initialize with a dummy model since we're just using the execute_query method
9
+ super().__init__(session_maker, None) # type: ignore
@@ -29,10 +29,11 @@ class Repository[T: Base]:
29
29
 
30
30
  def __init__(self, session_maker: async_sessionmaker[AsyncSession], Model: Type[T]):
31
31
  self.session_maker = session_maker
32
- self.Model = Model
33
- self.mapper = inspect(self.Model).mapper
34
- self.primary_key: Column[Any] = self.mapper.primary_key[0]
35
- self.valid_columns = [column.key for column in self.mapper.columns]
32
+ if Model:
33
+ self.Model = Model
34
+ self.mapper = inspect(self.Model).mapper
35
+ self.primary_key: Column[Any] = self.mapper.primary_key[0]
36
+ self.valid_columns = [column.key for column in self.mapper.columns]
36
37
 
37
38
  def get_model_data(self, entity_data):
38
39
  model_data = {
@@ -70,7 +71,15 @@ class Repository[T: Base]:
70
71
 
71
72
  # Query within same session
72
73
  found = await self.select_by_id(session, model.id) # pyright: ignore [reportAttributeAccessIssue]
73
- assert found is not None, "can't find model after session.add"
74
+ if found is None: # pragma: no cover
75
+ logger.error(
76
+ "Failed to retrieve model after add",
77
+ model_type=self.Model.__name__,
78
+ model_id=model.id, # pyright: ignore
79
+ )
80
+ raise ValueError(
81
+ f"Can't find {self.Model.__name__} with ID {model.id} after session.add" # pyright: ignore
82
+ )
74
83
  return found
75
84
 
76
85
  async def add_all(self, models: List[T]) -> Sequence[T]:
@@ -97,7 +106,7 @@ class Repository[T: Base]:
97
106
  entities = (self.Model,)
98
107
  return select(*entities)
99
108
 
100
- async def find_all(self, skip: int = 0, limit: Optional[int] = 0) -> Sequence[T]:
109
+ async def find_all(self, skip: int = 0, limit: Optional[int] = None) -> Sequence[T]:
101
110
  """Fetch records from the database with pagination."""
102
111
  logger.debug(f"Finding all {self.Model.__name__} (skip={skip}, limit={limit})")
103
112
 
@@ -152,7 +161,15 @@ class Repository[T: Base]:
152
161
  await session.flush()
153
162
 
154
163
  return_instance = await self.select_by_id(session, model.id) # pyright: ignore [reportAttributeAccessIssue]
155
- assert return_instance is not None, "can't find model after session.add"
164
+ if return_instance is None: # pragma: no cover
165
+ logger.error(
166
+ "Failed to retrieve model after create",
167
+ model_type=self.Model.__name__,
168
+ model_id=model.id, # pyright: ignore
169
+ )
170
+ raise ValueError(
171
+ f"Can't find {self.Model.__name__} with ID {model.id} after session.add" # pyright: ignore
172
+ )
156
173
  return return_instance
157
174
 
158
175
  async def create_all(self, data_list: List[dict]) -> Sequence[T]: