basic-memory 0.8.0__py3-none-any.whl → 0.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (76) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/migrations.py +4 -9
  3. basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py +106 -0
  4. basic_memory/api/app.py +9 -6
  5. basic_memory/api/routers/__init__.py +2 -1
  6. basic_memory/api/routers/knowledge_router.py +30 -4
  7. basic_memory/api/routers/memory_router.py +3 -2
  8. basic_memory/api/routers/project_info_router.py +274 -0
  9. basic_memory/api/routers/search_router.py +22 -4
  10. basic_memory/cli/app.py +54 -3
  11. basic_memory/cli/commands/__init__.py +15 -2
  12. basic_memory/cli/commands/db.py +9 -13
  13. basic_memory/cli/commands/import_chatgpt.py +31 -36
  14. basic_memory/cli/commands/import_claude_conversations.py +32 -35
  15. basic_memory/cli/commands/import_claude_projects.py +34 -37
  16. basic_memory/cli/commands/import_memory_json.py +26 -28
  17. basic_memory/cli/commands/mcp.py +7 -1
  18. basic_memory/cli/commands/project.py +119 -0
  19. basic_memory/cli/commands/project_info.py +167 -0
  20. basic_memory/cli/commands/status.py +7 -9
  21. basic_memory/cli/commands/sync.py +54 -9
  22. basic_memory/cli/commands/{tools.py → tool.py} +92 -19
  23. basic_memory/cli/main.py +40 -1
  24. basic_memory/config.py +157 -10
  25. basic_memory/db.py +19 -4
  26. basic_memory/deps.py +10 -3
  27. basic_memory/file_utils.py +34 -18
  28. basic_memory/markdown/markdown_processor.py +1 -1
  29. basic_memory/markdown/utils.py +5 -0
  30. basic_memory/mcp/main.py +1 -2
  31. basic_memory/mcp/prompts/__init__.py +6 -2
  32. basic_memory/mcp/prompts/ai_assistant_guide.py +9 -10
  33. basic_memory/mcp/prompts/continue_conversation.py +65 -126
  34. basic_memory/mcp/prompts/recent_activity.py +55 -13
  35. basic_memory/mcp/prompts/search.py +72 -17
  36. basic_memory/mcp/prompts/utils.py +139 -82
  37. basic_memory/mcp/server.py +1 -1
  38. basic_memory/mcp/tools/__init__.py +11 -22
  39. basic_memory/mcp/tools/build_context.py +85 -0
  40. basic_memory/mcp/tools/canvas.py +17 -19
  41. basic_memory/mcp/tools/delete_note.py +28 -0
  42. basic_memory/mcp/tools/project_info.py +51 -0
  43. basic_memory/mcp/tools/{resource.py → read_content.py} +42 -5
  44. basic_memory/mcp/tools/read_note.py +190 -0
  45. basic_memory/mcp/tools/recent_activity.py +100 -0
  46. basic_memory/mcp/tools/search.py +56 -17
  47. basic_memory/mcp/tools/utils.py +245 -17
  48. basic_memory/mcp/tools/write_note.py +124 -0
  49. basic_memory/models/search.py +2 -1
  50. basic_memory/repository/entity_repository.py +3 -2
  51. basic_memory/repository/project_info_repository.py +9 -0
  52. basic_memory/repository/repository.py +23 -6
  53. basic_memory/repository/search_repository.py +33 -10
  54. basic_memory/schemas/__init__.py +12 -0
  55. basic_memory/schemas/memory.py +3 -2
  56. basic_memory/schemas/project_info.py +96 -0
  57. basic_memory/schemas/search.py +27 -32
  58. basic_memory/services/context_service.py +3 -3
  59. basic_memory/services/entity_service.py +8 -2
  60. basic_memory/services/file_service.py +107 -57
  61. basic_memory/services/link_resolver.py +5 -45
  62. basic_memory/services/search_service.py +45 -16
  63. basic_memory/sync/sync_service.py +274 -39
  64. basic_memory/sync/watch_service.py +174 -34
  65. basic_memory/utils.py +40 -40
  66. basic_memory-0.10.0.dist-info/METADATA +386 -0
  67. basic_memory-0.10.0.dist-info/RECORD +99 -0
  68. basic_memory/mcp/prompts/json_canvas_spec.py +0 -25
  69. basic_memory/mcp/tools/knowledge.py +0 -68
  70. basic_memory/mcp/tools/memory.py +0 -177
  71. basic_memory/mcp/tools/notes.py +0 -201
  72. basic_memory-0.8.0.dist-info/METADATA +0 -379
  73. basic_memory-0.8.0.dist-info/RECORD +0 -91
  74. {basic_memory-0.8.0.dist-info → basic_memory-0.10.0.dist-info}/WHEEL +0 -0
  75. {basic_memory-0.8.0.dist-info → basic_memory-0.10.0.dist-info}/entry_points.txt +0 -0
  76. {basic_memory-0.8.0.dist-info → basic_memory-0.10.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,3 +1,9 @@
1
+ """Utility functions for making HTTP requests in Basic Memory MCP tools.
2
+
3
+ These functions provide a consistent interface for making HTTP requests
4
+ to the Basic Memory API, with improved error handling and logging.
5
+ """
6
+
1
7
  import typing
2
8
 
3
9
  from httpx import Response, URL, AsyncClient, HTTPStatusError
@@ -17,6 +23,54 @@ from loguru import logger
17
23
  from mcp.server.fastmcp.exceptions import ToolError
18
24
 
19
25
 
26
+ def get_error_message(status_code: int, url: URL | str, method: str) -> str:
27
+ """Get a friendly error message based on the HTTP status code.
28
+
29
+ Args:
30
+ status_code: The HTTP status code
31
+ url: The URL that was requested
32
+ method: The HTTP method used
33
+
34
+ Returns:
35
+ A user-friendly error message
36
+ """
37
+ # Extract path from URL for cleaner error messages
38
+ if isinstance(url, str):
39
+ path = url.split("/")[-1]
40
+ else:
41
+ path = str(url).split("/")[-1] if url else "resource"
42
+
43
+ # Client errors (400-499)
44
+ if status_code == 400:
45
+ return f"Invalid request: The request to '{path}' was malformed or invalid"
46
+ elif status_code == 401: # pragma: no cover
47
+ return f"Authentication required: You need to authenticate to access '{path}'"
48
+ elif status_code == 403: # pragma: no cover
49
+ return f"Access denied: You don't have permission to access '{path}'"
50
+ elif status_code == 404:
51
+ return f"Resource not found: '{path}' doesn't exist or has been moved"
52
+ elif status_code == 409: # pragma: no cover
53
+ return f"Conflict: The request for '{path}' conflicts with the current state"
54
+ elif status_code == 429: # pragma: no cover
55
+ return "Too many requests: Please slow down and try again later"
56
+ elif 400 <= status_code < 500: # pragma: no cover
57
+ return f"Client error ({status_code}): The request for '{path}' could not be completed"
58
+
59
+ # Server errors (500-599)
60
+ elif status_code == 500:
61
+ return f"Internal server error: Something went wrong processing '{path}'"
62
+ elif status_code == 503: # pragma: no cover
63
+ return (
64
+ f"Service unavailable: The server is currently unable to handle requests for '{path}'"
65
+ )
66
+ elif 500 <= status_code < 600: # pragma: no cover
67
+ return f"Server error ({status_code}): The server encountered an error handling '{path}'"
68
+
69
+ # Fallback for any other status code
70
+ else: # pragma: no cover
71
+ return f"HTTP error {status_code}: {method} request to '{path}' failed"
72
+
73
+
20
74
  async def call_get(
21
75
  client: AsyncClient,
22
76
  url: URL | str,
@@ -29,6 +83,25 @@ async def call_get(
29
83
  timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
30
84
  extensions: RequestExtensions | None = None,
31
85
  ) -> Response:
86
+ """Make a GET request and handle errors appropriately.
87
+
88
+ Args:
89
+ client: The HTTPX AsyncClient to use
90
+ url: The URL to request
91
+ params: Query parameters
92
+ headers: HTTP headers
93
+ cookies: HTTP cookies
94
+ auth: Authentication
95
+ follow_redirects: Whether to follow redirects
96
+ timeout: Request timeout
97
+ extensions: HTTPX extensions
98
+
99
+ Returns:
100
+ The HTTP response
101
+
102
+ Raises:
103
+ ToolError: If the request fails with an appropriate error message
104
+ """
32
105
  logger.debug(f"Calling GET '{url}' params: '{params}'")
33
106
  try:
34
107
  response = await client.get(
@@ -41,11 +114,33 @@ async def call_get(
41
114
  timeout=timeout,
42
115
  extensions=extensions,
43
116
  )
44
- response.raise_for_status()
45
- return response
117
+
118
+ if response.is_success:
119
+ return response
120
+
121
+ # Handle different status codes differently
122
+ status_code = response.status_code
123
+ error_message = get_error_message(status_code, url, "GET")
124
+
125
+ # Log at appropriate level based on status code
126
+ if 400 <= status_code < 500:
127
+ # Client errors: log as info except for 429 (Too Many Requests)
128
+ if status_code == 429: # pragma: no cover
129
+ logger.warning(f"Rate limit exceeded: GET {url}: {error_message}")
130
+ else:
131
+ logger.info(f"Client error: GET {url}: {error_message}")
132
+ else: # pragma: no cover
133
+ # Server errors: log as error
134
+ logger.error(f"Server error: GET {url}: {error_message}")
135
+
136
+ # Raise a tool error with the friendly message
137
+ response.raise_for_status() # Will always raise since we're in the error case
138
+ return response # This line will never execute, but it satisfies the type checker # pragma: no cover
139
+
46
140
  except HTTPStatusError as e:
47
- logger.exception(f"Error calling GET {url}: {e}")
48
- raise ToolError(f"Error calling tool: {e}.") from e
141
+ status_code = e.response.status_code
142
+ error_message = get_error_message(status_code, url, "GET")
143
+ raise ToolError(error_message) from e
49
144
 
50
145
 
51
146
  async def call_put(
@@ -64,6 +159,30 @@ async def call_put(
64
159
  timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
65
160
  extensions: RequestExtensions | None = None,
66
161
  ) -> Response:
162
+ """Make a PUT request and handle errors appropriately.
163
+
164
+ Args:
165
+ client: The HTTPX AsyncClient to use
166
+ url: The URL to request
167
+ content: Request content
168
+ data: Form data
169
+ files: Files to upload
170
+ json: JSON data
171
+ params: Query parameters
172
+ headers: HTTP headers
173
+ cookies: HTTP cookies
174
+ auth: Authentication
175
+ follow_redirects: Whether to follow redirects
176
+ timeout: Request timeout
177
+ extensions: HTTPX extensions
178
+
179
+ Returns:
180
+ The HTTP response
181
+
182
+ Raises:
183
+ ToolError: If the request fails with an appropriate error message
184
+ """
185
+ logger.debug(f"Calling PUT '{url}'")
67
186
  try:
68
187
  response = await client.put(
69
188
  url,
@@ -79,12 +198,33 @@ async def call_put(
79
198
  timeout=timeout,
80
199
  extensions=extensions,
81
200
  )
82
- logger.debug(response)
83
- response.raise_for_status()
84
- return response
201
+
202
+ if response.is_success:
203
+ return response
204
+
205
+ # Handle different status codes differently
206
+ status_code = response.status_code
207
+ error_message = get_error_message(status_code, url, "PUT")
208
+
209
+ # Log at appropriate level based on status code
210
+ if 400 <= status_code < 500:
211
+ # Client errors: log as info except for 429 (Too Many Requests)
212
+ if status_code == 429: # pragma: no cover
213
+ logger.warning(f"Rate limit exceeded: PUT {url}: {error_message}")
214
+ else:
215
+ logger.info(f"Client error: PUT {url}: {error_message}")
216
+ else: # pragma: no cover
217
+ # Server errors: log as error
218
+ logger.error(f"Server error: PUT {url}: {error_message}")
219
+
220
+ # Raise a tool error with the friendly message
221
+ response.raise_for_status() # Will always raise since we're in the error case
222
+ return response # This line will never execute, but it satisfies the type checker # pragma: no cover
223
+
85
224
  except HTTPStatusError as e:
86
- logger.error(f"Error calling PUT {url}: {e}")
87
- raise ToolError(f"Error calling tool: {e}") from e
225
+ status_code = e.response.status_code
226
+ error_message = get_error_message(status_code, url, "PUT")
227
+ raise ToolError(error_message) from e
88
228
 
89
229
 
90
230
  async def call_post(
@@ -103,6 +243,30 @@ async def call_post(
103
243
  timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
104
244
  extensions: RequestExtensions | None = None,
105
245
  ) -> Response:
246
+ """Make a POST request and handle errors appropriately.
247
+
248
+ Args:
249
+ client: The HTTPX AsyncClient to use
250
+ url: The URL to request
251
+ content: Request content
252
+ data: Form data
253
+ files: Files to upload
254
+ json: JSON data
255
+ params: Query parameters
256
+ headers: HTTP headers
257
+ cookies: HTTP cookies
258
+ auth: Authentication
259
+ follow_redirects: Whether to follow redirects
260
+ timeout: Request timeout
261
+ extensions: HTTPX extensions
262
+
263
+ Returns:
264
+ The HTTP response
265
+
266
+ Raises:
267
+ ToolError: If the request fails with an appropriate error message
268
+ """
269
+ logger.debug(f"Calling POST '{url}'")
106
270
  try:
107
271
  response = await client.post(
108
272
  url=url,
@@ -118,11 +282,33 @@ async def call_post(
118
282
  timeout=timeout,
119
283
  extensions=extensions,
120
284
  )
121
- response.raise_for_status()
122
- return response
285
+
286
+ if response.is_success:
287
+ return response
288
+
289
+ # Handle different status codes differently
290
+ status_code = response.status_code
291
+ error_message = get_error_message(status_code, url, "POST")
292
+
293
+ # Log at appropriate level based on status code
294
+ if 400 <= status_code < 500:
295
+ # Client errors: log as info except for 429 (Too Many Requests)
296
+ if status_code == 429: # pragma: no cover
297
+ logger.warning(f"Rate limit exceeded: POST {url}: {error_message}")
298
+ else: # pragma: no cover
299
+ logger.info(f"Client error: POST {url}: {error_message}")
300
+ else:
301
+ # Server errors: log as error
302
+ logger.error(f"Server error: POST {url}: {error_message}")
303
+
304
+ # Raise a tool error with the friendly message
305
+ response.raise_for_status() # Will always raise since we're in the error case
306
+ return response # This line will never execute, but it satisfies the type checker # pragma: no cover
307
+
123
308
  except HTTPStatusError as e:
124
- logger.error(f"Error calling POST {url}: {e}")
125
- raise ToolError(f"Error calling tool: {e}") from e
309
+ status_code = e.response.status_code
310
+ error_message = get_error_message(status_code, url, "POST")
311
+ raise ToolError(error_message) from e
126
312
 
127
313
 
128
314
  async def call_delete(
@@ -137,6 +323,26 @@ async def call_delete(
137
323
  timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
138
324
  extensions: RequestExtensions | None = None,
139
325
  ) -> Response:
326
+ """Make a DELETE request and handle errors appropriately.
327
+
328
+ Args:
329
+ client: The HTTPX AsyncClient to use
330
+ url: The URL to request
331
+ params: Query parameters
332
+ headers: HTTP headers
333
+ cookies: HTTP cookies
334
+ auth: Authentication
335
+ follow_redirects: Whether to follow redirects
336
+ timeout: Request timeout
337
+ extensions: HTTPX extensions
338
+
339
+ Returns:
340
+ The HTTP response
341
+
342
+ Raises:
343
+ ToolError: If the request fails with an appropriate error message
344
+ """
345
+ logger.debug(f"Calling DELETE '{url}'")
140
346
  try:
141
347
  response = await client.delete(
142
348
  url=url,
@@ -148,8 +354,30 @@ async def call_delete(
148
354
  timeout=timeout,
149
355
  extensions=extensions,
150
356
  )
151
- response.raise_for_status()
152
- return response
357
+
358
+ if response.is_success:
359
+ return response
360
+
361
+ # Handle different status codes differently
362
+ status_code = response.status_code
363
+ error_message = get_error_message(status_code, url, "DELETE")
364
+
365
+ # Log at appropriate level based on status code
366
+ if 400 <= status_code < 500:
367
+ # Client errors: log as info except for 429 (Too Many Requests)
368
+ if status_code == 429: # pragma: no cover
369
+ logger.warning(f"Rate limit exceeded: DELETE {url}: {error_message}")
370
+ else:
371
+ logger.info(f"Client error: DELETE {url}: {error_message}")
372
+ else: # pragma: no cover
373
+ # Server errors: log as error
374
+ logger.error(f"Server error: DELETE {url}: {error_message}")
375
+
376
+ # Raise a tool error with the friendly message
377
+ response.raise_for_status() # Will always raise since we're in the error case
378
+ return response # This line will never execute, but it satisfies the type checker # pragma: no cover
379
+
153
380
  except HTTPStatusError as e:
154
- logger.error(f"Error calling DELETE {url}: {e}")
155
- raise ToolError(f"Error calling tool: {e}") from e
381
+ status_code = e.response.status_code
382
+ error_message = get_error_message(status_code, url, "DELETE")
383
+ raise ToolError(error_message) from e
@@ -0,0 +1,124 @@
1
+ """Write note tool for Basic Memory MCP server."""
2
+
3
+ from typing import Optional, List
4
+
5
+ from loguru import logger
6
+
7
+ from basic_memory.mcp.async_client import client
8
+ from basic_memory.mcp.server import mcp
9
+ from basic_memory.mcp.tools.utils import call_put
10
+ from basic_memory.schemas import EntityResponse
11
+ from basic_memory.schemas.base import Entity
12
+
13
+
14
+ @mcp.tool(
15
+ description="Create or update a markdown note. Returns a markdown formatted summary of the semantic content.",
16
+ )
17
+ async def write_note(
18
+ title: str,
19
+ content: str,
20
+ folder: str,
21
+ tags: Optional[List[str]] = None,
22
+ ) -> str:
23
+ """Write a markdown note to the knowledge base.
24
+
25
+ The content can include semantic observations and relations using markdown syntax.
26
+ Relations can be specified either explicitly or through inline wiki-style links:
27
+
28
+ Observations format:
29
+ `- [category] Observation text #tag1 #tag2 (optional context)`
30
+
31
+ Examples:
32
+ `- [design] Files are the source of truth #architecture (All state comes from files)`
33
+ `- [tech] Using SQLite for storage #implementation`
34
+ `- [note] Need to add error handling #todo`
35
+
36
+ Relations format:
37
+ - Explicit: `- relation_type [[Entity]] (optional context)`
38
+ - Inline: Any `[[Entity]]` reference creates a relation
39
+
40
+ Examples:
41
+ `- depends_on [[Content Parser]] (Need for semantic extraction)`
42
+ `- implements [[Search Spec]] (Initial implementation)`
43
+ `- This feature extends [[Base Design]] and uses [[Core Utils]]`
44
+
45
+ Args:
46
+ title: The title of the note
47
+ content: Markdown content for the note, can include observations and relations
48
+ folder: the folder where the file should be saved
49
+ tags: Optional list of tags to categorize the note
50
+
51
+ Returns:
52
+ A markdown formatted summary of the semantic content, including:
53
+ - Creation/update status
54
+ - File path and checksum
55
+ - Observation counts by category
56
+ - Relation counts (resolved/unresolved)
57
+ - Tags if present
58
+ """
59
+ logger.info("MCP tool call", tool="write_note", folder=folder, title=title, tags=tags)
60
+
61
+ # Create the entity request
62
+ metadata = {"tags": [f"#{tag}" for tag in tags]} if tags else None
63
+ entity = Entity(
64
+ title=title,
65
+ folder=folder,
66
+ entity_type="note",
67
+ content_type="text/markdown",
68
+ content=content,
69
+ entity_metadata=metadata,
70
+ )
71
+
72
+ # Create or update via knowledge API
73
+ logger.debug("Creating entity via API", permalink=entity.permalink)
74
+ url = f"/knowledge/entities/{entity.permalink}"
75
+ response = await call_put(client, url, json=entity.model_dump())
76
+ result = EntityResponse.model_validate(response.json())
77
+
78
+ # Format semantic summary based on status code
79
+ action = "Created" if response.status_code == 201 else "Updated"
80
+ summary = [
81
+ f"# {action} {result.file_path} ({result.checksum[:8] if result.checksum else 'unknown'})",
82
+ f"permalink: {result.permalink}",
83
+ ]
84
+
85
+ # Count observations by category
86
+ categories = {}
87
+ if result.observations:
88
+ for obs in result.observations:
89
+ categories[obs.category] = categories.get(obs.category, 0) + 1
90
+
91
+ summary.append("\n## Observations")
92
+ for category, count in sorted(categories.items()):
93
+ summary.append(f"- {category}: {count}")
94
+
95
+ # Count resolved/unresolved relations
96
+ unresolved = 0
97
+ resolved = 0
98
+ if result.relations:
99
+ unresolved = sum(1 for r in result.relations if not r.to_id)
100
+ resolved = len(result.relations) - unresolved
101
+
102
+ summary.append("\n## Relations")
103
+ summary.append(f"- Resolved: {resolved}")
104
+ if unresolved:
105
+ summary.append(f"- Unresolved: {unresolved}")
106
+ summary.append("\nUnresolved relations will be retried on next sync.")
107
+
108
+ if tags:
109
+ summary.append(f"\n## Tags\n- {', '.join(tags)}")
110
+
111
+ # Log the response with structured data
112
+ logger.info(
113
+ "MCP tool response",
114
+ tool="write_note",
115
+ action=action,
116
+ permalink=result.permalink,
117
+ observations_count=len(result.observations),
118
+ relations_count=len(result.relations),
119
+ resolved_relations=resolved,
120
+ unresolved_relations=unresolved,
121
+ status_code=response.status_code,
122
+ )
123
+
124
+ return "\n".join(summary)
@@ -8,7 +8,8 @@ CREATE VIRTUAL TABLE IF NOT EXISTS search_index USING fts5(
8
8
  -- Core entity fields
9
9
  id UNINDEXED, -- Row ID
10
10
  title, -- Title for searching
11
- content, -- Main searchable content
11
+ content_stems, -- Main searchable content split into stems
12
+ content_snippet, -- File content snippet for display
12
13
  permalink, -- Stable identifier (now indexed for path search)
13
14
  file_path UNINDEXED, -- Physical location
14
15
  type UNINDEXED, -- entity/relation/observation
@@ -31,14 +31,15 @@ class EntityRepository(Repository[Entity]):
31
31
  query = self.select().where(Entity.permalink == permalink).options(*self.get_load_options())
32
32
  return await self.find_one(query)
33
33
 
34
- async def get_by_title(self, title: str) -> Optional[Entity]:
34
+ async def get_by_title(self, title: str) -> Sequence[Entity]:
35
35
  """Get entity by title.
36
36
 
37
37
  Args:
38
38
  title: Title of the entity to find
39
39
  """
40
40
  query = self.select().where(Entity.title == title).options(*self.get_load_options())
41
- return await self.find_one(query)
41
+ result = await self.execute_query(query)
42
+ return list(result.scalars().all())
42
43
 
43
44
  async def get_by_file_path(self, file_path: Union[Path, str]) -> Optional[Entity]:
44
45
  """Get entity by file_path.
@@ -0,0 +1,9 @@
1
+ from basic_memory.repository.repository import Repository
2
+
3
+
4
+ class ProjectInfoRepository(Repository):
5
+ """Repository for statistics queries."""
6
+
7
+ def __init__(self, session_maker):
8
+ # Initialize with a dummy model since we're just using the execute_query method
9
+ super().__init__(session_maker, None) # type: ignore
@@ -29,10 +29,11 @@ class Repository[T: Base]:
29
29
 
30
30
  def __init__(self, session_maker: async_sessionmaker[AsyncSession], Model: Type[T]):
31
31
  self.session_maker = session_maker
32
- self.Model = Model
33
- self.mapper = inspect(self.Model).mapper
34
- self.primary_key: Column[Any] = self.mapper.primary_key[0]
35
- self.valid_columns = [column.key for column in self.mapper.columns]
32
+ if Model:
33
+ self.Model = Model
34
+ self.mapper = inspect(self.Model).mapper
35
+ self.primary_key: Column[Any] = self.mapper.primary_key[0]
36
+ self.valid_columns = [column.key for column in self.mapper.columns]
36
37
 
37
38
  def get_model_data(self, entity_data):
38
39
  model_data = {
@@ -70,7 +71,15 @@ class Repository[T: Base]:
70
71
 
71
72
  # Query within same session
72
73
  found = await self.select_by_id(session, model.id) # pyright: ignore [reportAttributeAccessIssue]
73
- assert found is not None, "can't find model after session.add"
74
+ if found is None: # pragma: no cover
75
+ logger.error(
76
+ "Failed to retrieve model after add",
77
+ model_type=self.Model.__name__,
78
+ model_id=model.id, # pyright: ignore
79
+ )
80
+ raise ValueError(
81
+ f"Can't find {self.Model.__name__} with ID {model.id} after session.add" # pyright: ignore
82
+ )
74
83
  return found
75
84
 
76
85
  async def add_all(self, models: List[T]) -> Sequence[T]:
@@ -152,7 +161,15 @@ class Repository[T: Base]:
152
161
  await session.flush()
153
162
 
154
163
  return_instance = await self.select_by_id(session, model.id) # pyright: ignore [reportAttributeAccessIssue]
155
- assert return_instance is not None, "can't find model after session.add"
164
+ if return_instance is None: # pragma: no cover
165
+ logger.error(
166
+ "Failed to retrieve model after create",
167
+ model_type=self.Model.__name__,
168
+ model_id=model.id, # pyright: ignore
169
+ )
170
+ raise ValueError(
171
+ f"Can't find {self.Model.__name__} with ID {model.id} after session.add" # pyright: ignore
172
+ )
156
173
  return return_instance
157
174
 
158
175
  async def create_all(self, data_list: List[dict]) -> Sequence[T]:
@@ -35,18 +35,24 @@ class SearchIndexRow:
35
35
 
36
36
  # Type-specific fields
37
37
  title: Optional[str] = None # entity
38
- content: Optional[str] = None # entity, observation
38
+ content_stems: Optional[str] = None # entity, observation
39
+ content_snippet: Optional[str] = None # entity, observation
39
40
  entity_id: Optional[int] = None # observations
40
41
  category: Optional[str] = None # observations
41
42
  from_id: Optional[int] = None # relations
42
43
  to_id: Optional[int] = None # relations
43
44
  relation_type: Optional[str] = None # relations
44
45
 
46
+ @property
47
+ def content(self):
48
+ return self.content_snippet
49
+
45
50
  def to_insert(self):
46
51
  return {
47
52
  "id": self.id,
48
53
  "title": self.title,
49
- "content": self.content,
54
+ "content_stems": self.content_stems,
55
+ "content_snippet": self.content_snippet,
50
56
  "permalink": self.permalink,
51
57
  "file_path": self.file_path,
52
58
  "type": self.type,
@@ -88,10 +94,16 @@ class SearchRepository:
88
94
  For FTS5:
89
95
  - Special characters and phrases need to be quoted
90
96
  - Terms with spaces or special chars need quotes
97
+ - Boolean operators (AND, OR, NOT) and parentheses are preserved
91
98
  """
92
99
  if "*" in term:
93
100
  return term
94
101
 
102
+ # Check for boolean operators - if present, return the term as is
103
+ boolean_operators = [" AND ", " OR ", " NOT ", "(", ")"]
104
+ if any(op in f" {term} " for op in boolean_operators):
105
+ return term
106
+
95
107
  # List of special characters that need quoting (excluding *)
96
108
  special_chars = ["/", "-", ".", " ", "(", ")", "[", "]", '"', "'"]
97
109
 
@@ -124,9 +136,20 @@ class SearchRepository:
124
136
 
125
137
  # Handle text search for title and content
126
138
  if search_text:
127
- search_text = self._prepare_search_term(search_text.strip())
128
- params["text"] = search_text
129
- conditions.append("(title MATCH :text OR content MATCH :text)")
139
+ has_boolean = any(
140
+ op in f" {search_text} " for op in [" AND ", " OR ", " NOT ", "(", ")"]
141
+ )
142
+
143
+ if has_boolean:
144
+ # If boolean operators are present, use the raw query
145
+ # No need to prepare it, FTS5 will understand the operators
146
+ params["text"] = search_text
147
+ conditions.append("(title MATCH :text OR content_stems MATCH :text)")
148
+ else:
149
+ # Standard search with term preparation
150
+ processed_text = self._prepare_search_term(search_text.strip())
151
+ params["text"] = processed_text
152
+ conditions.append("(title MATCH :text OR content_stems MATCH :text)")
130
153
 
131
154
  # Handle title match search
132
155
  if title:
@@ -188,7 +211,7 @@ class SearchRepository:
188
211
  to_id,
189
212
  relation_type,
190
213
  entity_id,
191
- content,
214
+ content_snippet,
192
215
  category,
193
216
  created_at,
194
217
  updated_at,
@@ -200,7 +223,7 @@ class SearchRepository:
200
223
  OFFSET :offset
201
224
  """
202
225
 
203
- logger.debug(f"Search {sql} params: {params}")
226
+ logger.trace(f"Search {sql} params: {params}")
204
227
  async with db.scoped_session(self.session_maker) as session:
205
228
  result = await session.execute(text(sql), params)
206
229
  rows = result.fetchall()
@@ -218,7 +241,7 @@ class SearchRepository:
218
241
  to_id=row.to_id,
219
242
  relation_type=row.relation_type,
220
243
  entity_id=row.entity_id,
221
- content=row.content,
244
+ content_snippet=row.content_snippet,
222
245
  category=row.category,
223
246
  created_at=row.created_at,
224
247
  updated_at=row.updated_at,
@@ -250,12 +273,12 @@ class SearchRepository:
250
273
  await session.execute(
251
274
  text("""
252
275
  INSERT INTO search_index (
253
- id, title, content, permalink, file_path, type, metadata,
276
+ id, title, content_stems, content_snippet, permalink, file_path, type, metadata,
254
277
  from_id, to_id, relation_type,
255
278
  entity_id, category,
256
279
  created_at, updated_at
257
280
  ) VALUES (
258
- :id, :title, :content, :permalink, :file_path, :type, :metadata,
281
+ :id, :title, :content_stems, :content_snippet, :permalink, :file_path, :type, :metadata,
259
282
  :from_id, :to_id, :relation_type,
260
283
  :entity_id, :category,
261
284
  :created_at, :updated_at
@@ -37,6 +37,13 @@ from basic_memory.schemas.response import (
37
37
  DeleteEntitiesResponse,
38
38
  )
39
39
 
40
+ from basic_memory.schemas.project_info import (
41
+ ProjectStatistics,
42
+ ActivityMetrics,
43
+ SystemStatus,
44
+ ProjectInfoResponse,
45
+ )
46
+
40
47
  # For convenient imports, export all models
41
48
  __all__ = [
42
49
  # Base
@@ -59,4 +66,9 @@ __all__ = [
59
66
  "DeleteEntitiesResponse",
60
67
  # Delete Operations
61
68
  "DeleteEntitiesRequest",
69
+ # Project Info
70
+ "ProjectStatistics",
71
+ "ActivityMetrics",
72
+ "SystemStatus",
73
+ "ProjectInfoResponse",
62
74
  ]