basic-memory 0.6.0__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (35) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/api/routers/knowledge_router.py +0 -8
  3. basic_memory/api/routers/memory_router.py +26 -10
  4. basic_memory/api/routers/resource_router.py +14 -8
  5. basic_memory/api/routers/search_router.py +17 -9
  6. basic_memory/cli/app.py +1 -1
  7. basic_memory/cli/commands/db.py +11 -8
  8. basic_memory/cli/commands/import_chatgpt.py +31 -27
  9. basic_memory/cli/commands/import_claude_conversations.py +29 -27
  10. basic_memory/cli/commands/import_claude_projects.py +30 -29
  11. basic_memory/cli/commands/import_memory_json.py +28 -26
  12. basic_memory/cli/commands/status.py +8 -6
  13. basic_memory/cli/commands/sync.py +6 -3
  14. basic_memory/cli/commands/tools.py +157 -0
  15. basic_memory/cli/main.py +1 -0
  16. basic_memory/config.py +1 -1
  17. basic_memory/db.py +1 -0
  18. basic_memory/deps.py +5 -1
  19. basic_memory/mcp/tools/knowledge.py +26 -14
  20. basic_memory/mcp/tools/memory.py +48 -29
  21. basic_memory/mcp/tools/notes.py +66 -72
  22. basic_memory/mcp/tools/search.py +13 -4
  23. basic_memory/repository/search_repository.py +3 -0
  24. basic_memory/schemas/memory.py +3 -0
  25. basic_memory/schemas/request.py +1 -1
  26. basic_memory/schemas/search.py +2 -0
  27. basic_memory/services/context_service.py +14 -6
  28. basic_memory/services/search_service.py +3 -1
  29. basic_memory/sync/sync_service.py +98 -89
  30. basic_memory/utils.py +4 -7
  31. {basic_memory-0.6.0.dist-info → basic_memory-0.7.0.dist-info}/METADATA +2 -2
  32. {basic_memory-0.6.0.dist-info → basic_memory-0.7.0.dist-info}/RECORD +35 -34
  33. {basic_memory-0.6.0.dist-info → basic_memory-0.7.0.dist-info}/WHEEL +0 -0
  34. {basic_memory-0.6.0.dist-info → basic_memory-0.7.0.dist-info}/entry_points.txt +0 -0
  35. {basic_memory-0.6.0.dist-info → basic_memory-0.7.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,157 @@
1
+ """Database management commands."""
2
+
3
+ import asyncio
4
+ from typing import Optional, List, Annotated
5
+
6
+ import typer
7
+ from rich import print as rprint
8
+
9
+ from basic_memory.cli.app import app
10
+ from basic_memory.mcp.tools import build_context as mcp_build_context
11
+ from basic_memory.mcp.tools import get_entity as mcp_get_entity
12
+ from basic_memory.mcp.tools import read_note as mcp_read_note
13
+ from basic_memory.mcp.tools import recent_activity as mcp_recent_activity
14
+ from basic_memory.mcp.tools import search as mcp_search
15
+ from basic_memory.mcp.tools import write_note as mcp_write_note
16
+ from basic_memory.schemas.base import TimeFrame
17
+ from basic_memory.schemas.memory import MemoryUrl
18
+ from basic_memory.schemas.search import SearchQuery
19
+
20
+ tool_app = typer.Typer()
21
+ app.add_typer(tool_app, name="tools", help="cli versions mcp tools")
22
+
23
+
24
+ @tool_app.command()
25
+ def write_note(
26
+ title: Annotated[str, typer.Option(help="The title of the note")],
27
+ content: Annotated[str, typer.Option(help="The content of the note")],
28
+ folder: Annotated[str, typer.Option(help="The folder to create the note in")],
29
+ tags: Annotated[
30
+ Optional[List[str]], typer.Option(help="A list of tags to apply to the note")
31
+ ] = None,
32
+ ):
33
+ try:
34
+ note = asyncio.run(mcp_write_note(title, content, folder, tags))
35
+ rprint(note)
36
+ except Exception as e: # pragma: no cover
37
+ if not isinstance(e, typer.Exit):
38
+ typer.echo(f"Error during write_note: {e}", err=True)
39
+ raise typer.Exit(1)
40
+ raise
41
+
42
+
43
+ @tool_app.command()
44
+ def read_note(identifier: str, page: int = 1, page_size: int = 10):
45
+ try:
46
+ note = asyncio.run(mcp_read_note(identifier, page, page_size))
47
+ rprint(note)
48
+ except Exception as e: # pragma: no cover
49
+ if not isinstance(e, typer.Exit):
50
+ typer.echo(f"Error during read_note: {e}", err=True)
51
+ raise typer.Exit(1)
52
+ raise
53
+
54
+
55
+ @tool_app.command()
56
+ def build_context(
57
+ url: MemoryUrl,
58
+ depth: Optional[int] = 1,
59
+ timeframe: Optional[TimeFrame] = "7d",
60
+ page: int = 1,
61
+ page_size: int = 10,
62
+ max_related: int = 10,
63
+ ):
64
+ try:
65
+ context = asyncio.run(
66
+ mcp_build_context(
67
+ url=url,
68
+ depth=depth,
69
+ timeframe=timeframe,
70
+ page=page,
71
+ page_size=page_size,
72
+ max_related=max_related,
73
+ )
74
+ )
75
+ rprint(context.model_dump())
76
+ except Exception as e: # pragma: no cover
77
+ if not isinstance(e, typer.Exit):
78
+ typer.echo(f"Error during build_context: {e}", err=True)
79
+ raise typer.Exit(1)
80
+ raise
81
+
82
+
83
+ @tool_app.command()
84
+ def recent_activity(
85
+ type: Annotated[Optional[List[str]], typer.Option()] = ["entity", "observation", "relation"],
86
+ depth: Optional[int] = 1,
87
+ timeframe: Optional[TimeFrame] = "7d",
88
+ page: int = 1,
89
+ page_size: int = 10,
90
+ max_related: int = 10,
91
+ ):
92
+ assert type is not None, "type is required"
93
+ if any(t not in ["entity", "observation", "relation"] for t in type): # pragma: no cover
94
+ print("type must be one of ['entity', 'observation', 'relation']")
95
+ raise typer.Abort()
96
+
97
+ try:
98
+ context = asyncio.run(
99
+ mcp_recent_activity(
100
+ type=type, # pyright: ignore [reportArgumentType]
101
+ depth=depth,
102
+ timeframe=timeframe,
103
+ page=page,
104
+ page_size=page_size,
105
+ max_related=max_related,
106
+ )
107
+ )
108
+ rprint(context.model_dump())
109
+ except Exception as e: # pragma: no cover
110
+ if not isinstance(e, typer.Exit):
111
+ typer.echo(f"Error during build_context: {e}", err=True)
112
+ raise typer.Exit(1)
113
+ raise
114
+
115
+
116
+ @tool_app.command()
117
+ def search(
118
+ query: str,
119
+ permalink: Annotated[bool, typer.Option("--permalink", help="Search permalink values")] = False,
120
+ title: Annotated[bool, typer.Option("--title", help="Search title values")] = False,
121
+ after_date: Annotated[
122
+ Optional[str],
123
+ typer.Option("--after_date", help="Search results after date, eg. '2d', '1 week'"),
124
+ ] = None,
125
+ page: int = 1,
126
+ page_size: int = 10,
127
+ ):
128
+ if permalink and title: # pragma: no cover
129
+ print("Cannot search both permalink and title")
130
+ raise typer.Abort()
131
+
132
+ try:
133
+ search_query = SearchQuery(
134
+ permalink_match=query if permalink else None,
135
+ text=query if query else None,
136
+ title=query if title else None,
137
+ after_date=after_date,
138
+ )
139
+ results = asyncio.run(mcp_search(query=search_query, page=page, page_size=page_size))
140
+ rprint(results.model_dump())
141
+ except Exception as e: # pragma: no cover
142
+ if not isinstance(e, typer.Exit):
143
+ typer.echo(f"Error during search: {e}", err=True)
144
+ raise typer.Exit(1)
145
+ raise
146
+
147
+
148
+ @tool_app.command()
149
+ def get_entity(identifier: str):
150
+ try:
151
+ entity = asyncio.run(mcp_get_entity(identifier=identifier))
152
+ rprint(entity.model_dump())
153
+ except Exception as e: # pragma: no cover
154
+ if not isinstance(e, typer.Exit):
155
+ typer.echo(f"Error during get_entity: {e}", err=True)
156
+ raise typer.Exit(1)
157
+ raise
basic_memory/cli/main.py CHANGED
@@ -12,6 +12,7 @@ from basic_memory.cli.commands import ( # noqa: F401 # pragma: no cover
12
12
  import_claude_conversations,
13
13
  import_claude_projects,
14
14
  import_chatgpt,
15
+ tools,
15
16
  )
16
17
 
17
18
 
basic_memory/config.py CHANGED
@@ -9,7 +9,7 @@ from pydantic_settings import BaseSettings, SettingsConfigDict
9
9
  DATABASE_NAME = "memory.db"
10
10
  DATA_DIR_NAME = ".basic-memory"
11
11
 
12
- Environment = Literal["test", "dev", "prod"]
12
+ Environment = Literal["test", "dev", "user"]
13
13
 
14
14
 
15
15
  class ProjectConfig(BaseSettings):
basic_memory/db.py CHANGED
@@ -4,6 +4,7 @@ from enum import Enum, auto
4
4
  from pathlib import Path
5
5
  from typing import AsyncGenerator, Optional
6
6
 
7
+
7
8
  from basic_memory.config import ProjectConfig
8
9
  from alembic import command
9
10
  from alembic.config import Config
basic_memory/deps.py CHANGED
@@ -2,6 +2,7 @@
2
2
 
3
3
  from typing import Annotated
4
4
 
5
+ import logfire
5
6
  from fastapi import Depends
6
7
  from sqlalchemy.ext.asyncio import (
7
8
  AsyncSession,
@@ -43,7 +44,10 @@ async def get_engine_factory(
43
44
  project_config: ProjectConfigDep,
44
45
  ) -> tuple[AsyncEngine, async_sessionmaker[AsyncSession]]: # pragma: no cover
45
46
  """Get engine and session maker."""
46
- return await db.get_or_create_db(project_config.database_path)
47
+ engine, session_maker = await db.get_or_create_db(project_config.database_path)
48
+ if project_config.env != "test":
49
+ logfire.instrument_sqlalchemy(engine=engine)
50
+ return engine, session_maker
47
51
 
48
52
 
49
53
  EngineFactoryDep = Annotated[
@@ -1,8 +1,10 @@
1
1
  """Knowledge graph management tools for Basic Memory MCP server."""
2
2
 
3
+ import logfire
4
+
3
5
  from basic_memory.mcp.server import mcp
4
6
  from basic_memory.mcp.tools.utils import call_get, call_post
5
- from basic_memory.schemas.base import Permalink
7
+ from basic_memory.schemas.memory import memory_url_path
6
8
  from basic_memory.schemas.request import (
7
9
  GetEntitiesRequest,
8
10
  )
@@ -16,15 +18,17 @@ from basic_memory.mcp.async_client import client
16
18
  @mcp.tool(
17
19
  description="Get complete information about a specific entity including observations and relations",
18
20
  )
19
- async def get_entity(permalink: Permalink) -> EntityResponse:
21
+ async def get_entity(identifier: str) -> EntityResponse:
20
22
  """Get a specific entity info by its permalink.
21
23
 
22
24
  Args:
23
- permalink: Path identifier for the entity
25
+ identifier: Path identifier for the entity
24
26
  """
25
- url = f"/knowledge/entities/{permalink}"
26
- response = await call_get(client, url)
27
- return EntityResponse.model_validate(response.json())
27
+ with logfire.span("Getting entity", permalink=identifier): # pyright: ignore [reportGeneralTypeIssues]
28
+ permalink = memory_url_path(identifier)
29
+ url = f"/knowledge/entities/{permalink}"
30
+ response = await call_get(client, url)
31
+ return EntityResponse.model_validate(response.json())
28
32
 
29
33
 
30
34
  @mcp.tool(
@@ -39,11 +43,16 @@ async def get_entities(request: GetEntitiesRequest) -> EntityListResponse:
39
43
  Returns:
40
44
  EntityListResponse containing complete details for each requested entity
41
45
  """
42
- url = "/knowledge/entities"
43
- response = await call_get(
44
- client, url, params=[("permalink", permalink) for permalink in request.permalinks]
45
- )
46
- return EntityListResponse.model_validate(response.json())
46
+ with logfire.span("Getting multiple entities", permalink_count=len(request.permalinks)): # pyright: ignore [reportGeneralTypeIssues]
47
+ url = "/knowledge/entities"
48
+ response = await call_get(
49
+ client,
50
+ url,
51
+ params=[
52
+ ("permalink", memory_url_path(identifier)) for identifier in request.permalinks
53
+ ],
54
+ )
55
+ return EntityListResponse.model_validate(response.json())
47
56
 
48
57
 
49
58
  @mcp.tool(
@@ -51,6 +60,9 @@ async def get_entities(request: GetEntitiesRequest) -> EntityListResponse:
51
60
  )
52
61
  async def delete_entities(request: DeleteEntitiesRequest) -> DeleteEntitiesResponse:
53
62
  """Delete entities from the knowledge graph."""
54
- url = "/knowledge/entities/delete"
55
- response = await call_post(client, url, json=request.model_dump())
56
- return DeleteEntitiesResponse.model_validate(response.json())
63
+ with logfire.span("Deleting entities", permalink_count=len(request.permalinks)): # pyright: ignore [reportGeneralTypeIssues]
64
+ url = "/knowledge/entities/delete"
65
+
66
+ request.permalinks = [memory_url_path(permlink) for permlink in request.permalinks]
67
+ response = await call_post(client, url, json=request.model_dump())
68
+ return DeleteEntitiesResponse.model_validate(response.json())
@@ -3,6 +3,7 @@
3
3
  from typing import Optional, Literal, List
4
4
 
5
5
  from loguru import logger
6
+ import logfire
6
7
 
7
8
  from basic_memory.mcp.async_client import client
8
9
  from basic_memory.mcp.server import mcp
@@ -32,7 +33,9 @@ async def build_context(
32
33
  url: MemoryUrl,
33
34
  depth: Optional[int] = 1,
34
35
  timeframe: Optional[TimeFrame] = "7d",
35
- max_results: int = 10,
36
+ page: int = 1,
37
+ page_size: int = 10,
38
+ max_related: int = 10,
36
39
  ) -> GraphContext:
37
40
  """Get context needed to continue a discussion.
38
41
 
@@ -44,7 +47,9 @@ async def build_context(
44
47
  url: memory:// URI pointing to discussion content (e.g. memory://specs/search)
45
48
  depth: How many relation hops to traverse (1-3 recommended for performance)
46
49
  timeframe: How far back to look. Supports natural language like "2 days ago", "last week"
47
- max_results: Maximum number of results to return (default: 10)
50
+ page: Page number of results to return (default: 1)
51
+ page_size: Number of results to return per page (default: 10)
52
+ max_related: Maximum number of related results to return (default: 10)
48
53
 
49
54
  Returns:
50
55
  GraphContext containing:
@@ -65,14 +70,21 @@ async def build_context(
65
70
  # Research the history of a feature
66
71
  build_context("memory://features/knowledge-graph", timeframe="3 months ago")
67
72
  """
68
- logger.info(f"Building context from {url}")
69
- url = normalize_memory_url(url)
70
- response = await call_get(
71
- client,
72
- f"/memory/{memory_url_path(url)}",
73
- params={"depth": depth, "timeframe": timeframe, "max_results": max_results},
74
- )
75
- return GraphContext.model_validate(response.json())
73
+ with logfire.span("Building context", url=url, depth=depth, timeframe=timeframe): # pyright: ignore [reportGeneralTypeIssues]
74
+ logger.info(f"Building context from {url}")
75
+ url = normalize_memory_url(url)
76
+ response = await call_get(
77
+ client,
78
+ f"/memory/{memory_url_path(url)}",
79
+ params={
80
+ "depth": depth,
81
+ "timeframe": timeframe,
82
+ "page": page,
83
+ "page_size": page_size,
84
+ "max_related": max_related,
85
+ },
86
+ )
87
+ return GraphContext.model_validate(response.json())
76
88
 
77
89
 
78
90
  @mcp.tool(
@@ -91,7 +103,9 @@ async def recent_activity(
91
103
  type: List[Literal["entity", "observation", "relation"]] = [],
92
104
  depth: Optional[int] = 1,
93
105
  timeframe: Optional[TimeFrame] = "7d",
94
- max_results: int = 10,
106
+ page: int = 1,
107
+ page_size: int = 10,
108
+ max_related: int = 10,
95
109
  ) -> GraphContext:
96
110
  """Get recent activity across the knowledge base.
97
111
 
@@ -106,7 +120,9 @@ async def recent_activity(
106
120
  - Relative: "2 days ago", "last week", "yesterday"
107
121
  - Points in time: "2024-01-01", "January 1st"
108
122
  - Standard format: "7d", "24h"
109
- max_results: Maximum number of results to return (default: 10)
123
+ page: Page number of results to return (default: 1)
124
+ page_size: Number of results to return per page (default: 10)
125
+ max_related: Maximum number of related results to return (default: 10)
110
126
 
111
127
  Returns:
112
128
  GraphContext containing:
@@ -132,20 +148,23 @@ async def recent_activity(
132
148
  - For focused queries, consider using build_context with a specific URI
133
149
  - Max timeframe is 1 year in the past
134
150
  """
135
- logger.info(
136
- f"Getting recent activity from {type}, depth={depth}, timeframe={timeframe}, max_results={max_results}"
137
- )
138
- params = {
139
- "depth": depth,
140
- "timeframe": timeframe,
141
- "max_results": max_results,
142
- }
143
- if type:
144
- params["type"] = type
145
-
146
- response = await call_get(
147
- client,
148
- "/memory/recent",
149
- params=params,
150
- )
151
- return GraphContext.model_validate(response.json())
151
+ with logfire.span("Getting recent activity", type=type, depth=depth, timeframe=timeframe): # pyright: ignore [reportGeneralTypeIssues]
152
+ logger.info(
153
+ f"Getting recent activity from {type}, depth={depth}, timeframe={timeframe}, page={page}, page_size={page_size}, max_related={max_related}"
154
+ )
155
+ params = {
156
+ "depth": depth,
157
+ "timeframe": timeframe,
158
+ "page": page,
159
+ "page_size": page_size,
160
+ "max_related": max_related,
161
+ }
162
+ if type:
163
+ params["type"] = type
164
+
165
+ response = await call_get(
166
+ client,
167
+ "/memory/recent",
168
+ params=params,
169
+ )
170
+ return GraphContext.model_validate(response.json())
@@ -7,6 +7,7 @@ while leveraging the underlying knowledge graph structure.
7
7
  from typing import Optional, List
8
8
 
9
9
  from loguru import logger
10
+ import logfire
10
11
 
11
12
  from basic_memory.mcp.server import mcp
12
13
  from basic_memory.mcp.async_client import client
@@ -60,75 +61,62 @@ async def write_note(
60
61
  - Observation counts by category
61
62
  - Relation counts (resolved/unresolved)
62
63
  - Tags if present
63
-
64
- Examples:
65
- write_note(
66
- title="Search Implementation",
67
- content="# Search Component\\n\\n"
68
- "Implementation of the search feature, building on [[Core Search]].\\n\\n"
69
- "## Observations\\n"
70
- "- [tech] Using FTS5 for full-text search #implementation\\n"
71
- "- [design] Need pagination support #todo\\n\\n"
72
- "## Relations\\n"
73
- "- implements [[Search Spec]]\\n"
74
- "- depends_on [[Database Schema]]",
75
- folder="docs/components"
76
- )
77
64
  """
78
- logger.info(f"Writing note folder:'{folder}' title: '{title}'")
79
-
80
- # Create the entity request
81
- metadata = {"tags": [f"#{tag}" for tag in tags]} if tags else None
82
- entity = Entity(
83
- title=title,
84
- folder=folder,
85
- entity_type="note",
86
- content_type="text/markdown",
87
- content=content,
88
- entity_metadata=metadata,
89
- )
90
-
91
- # Create or update via knowledge API
92
- logger.info(f"Creating {entity.permalink}")
93
- url = f"/knowledge/entities/{entity.permalink}"
94
- response = await call_put(client, url, json=entity.model_dump())
95
- result = EntityResponse.model_validate(response.json())
96
-
97
- # Format semantic summary based on status code
98
- action = "Created" if response.status_code == 201 else "Updated"
99
- assert result.checksum is not None
100
- summary = [
101
- f"# {action} {result.file_path} ({result.checksum[:8]})",
102
- f"permalink: {result.permalink}",
103
- ]
104
-
105
- if result.observations:
106
- categories = {}
107
- for obs in result.observations:
108
- categories[obs.category] = categories.get(obs.category, 0) + 1
109
-
110
- summary.append("\n## Observations")
111
- for category, count in sorted(categories.items()):
112
- summary.append(f"- {category}: {count}")
113
-
114
- if result.relations:
115
- unresolved = sum(1 for r in result.relations if not r.to_id)
116
- resolved = len(result.relations) - unresolved
117
-
118
- summary.append("\n## Relations")
119
- summary.append(f"- Resolved: {resolved}")
120
- if unresolved:
121
- summary.append(f"- Unresolved: {unresolved}")
122
- summary.append("\nUnresolved relations will be retried on next sync.")
123
-
124
- if tags:
125
- summary.append(f"\n## Tags\n- {', '.join(tags)}")
126
-
127
- return "\n".join(summary)
65
+ with logfire.span("Writing note", title=title, folder=folder): # pyright: ignore [reportGeneralTypeIssues]
66
+ logger.info(f"Writing note folder:'{folder}' title: '{title}'")
67
+
68
+ # Create the entity request
69
+ metadata = {"tags": [f"#{tag}" for tag in tags]} if tags else None
70
+ entity = Entity(
71
+ title=title,
72
+ folder=folder,
73
+ entity_type="note",
74
+ content_type="text/markdown",
75
+ content=content,
76
+ entity_metadata=metadata,
77
+ )
78
+
79
+ # Create or update via knowledge API
80
+ logger.info(f"Creating {entity.permalink}")
81
+ url = f"/knowledge/entities/{entity.permalink}"
82
+ response = await call_put(client, url, json=entity.model_dump())
83
+ result = EntityResponse.model_validate(response.json())
84
+
85
+ # Format semantic summary based on status code
86
+ action = "Created" if response.status_code == 201 else "Updated"
87
+ assert result.checksum is not None
88
+ summary = [
89
+ f"# {action} {result.file_path} ({result.checksum[:8]})",
90
+ f"permalink: {result.permalink}",
91
+ ]
92
+
93
+ if result.observations:
94
+ categories = {}
95
+ for obs in result.observations:
96
+ categories[obs.category] = categories.get(obs.category, 0) + 1
97
+
98
+ summary.append("\n## Observations")
99
+ for category, count in sorted(categories.items()):
100
+ summary.append(f"- {category}: {count}")
101
+
102
+ if result.relations:
103
+ unresolved = sum(1 for r in result.relations if not r.to_id)
104
+ resolved = len(result.relations) - unresolved
105
+
106
+ summary.append("\n## Relations")
107
+ summary.append(f"- Resolved: {resolved}")
108
+ if unresolved:
109
+ summary.append(f"- Unresolved: {unresolved}")
110
+ summary.append("\nUnresolved relations will be retried on next sync.")
111
+
112
+ if tags:
113
+ summary.append(f"\n## Tags\n- {', '.join(tags)}")
114
+
115
+ return "\n".join(summary)
128
116
 
129
117
 
130
118
  @mcp.tool(description="Read note content by title, permalink, relation, or pattern")
131
- async def read_note(identifier: str) -> str:
119
+ async def read_note(identifier: str, page: int = 1, page_size: int = 10) -> str:
132
120
  """Get note content in unified diff format.
133
121
 
134
122
  The content is returned in a unified diff inspired format:
@@ -146,6 +134,8 @@ async def read_note(identifier: str) -> str:
146
134
  - Note permalink ("docs/example")
147
135
  - Relation path ("docs/example/depends-on/other-doc")
148
136
  - Pattern match ("docs/*-architecture")
137
+ page: the page number of results to return (default 1)
138
+ page_size: the number of results to return per page (default 10)
149
139
 
150
140
  Returns:
151
141
  Document content in unified diff format. For single documents, returns
@@ -180,10 +170,13 @@ async def read_note(identifier: str) -> str:
180
170
  - Last modified timestamp
181
171
  - Content checksum
182
172
  """
183
- logger.info(f"Reading note {identifier}")
184
- url = memory_url_path(identifier)
185
- response = await call_get(client, f"/resource/{url}")
186
- return response.text
173
+ with logfire.span("Reading note", identifier=identifier): # pyright: ignore [reportGeneralTypeIssues]
174
+ logger.info(f"Reading note {identifier}")
175
+ url = memory_url_path(identifier)
176
+ response = await call_get(
177
+ client, f"/resource/{url}", params={"page": page, "page_size": page_size}
178
+ )
179
+ return response.text
187
180
 
188
181
 
189
182
  @mcp.tool(description="Delete a note by title or permalink")
@@ -203,6 +196,7 @@ async def delete_note(identifier: str) -> bool:
203
196
  # Delete by permalink
204
197
  delete_note("notes/project-planning")
205
198
  """
206
- response = await call_delete(client, f"/knowledge/entities/{identifier}")
207
- result = DeleteEntitiesResponse.model_validate(response.json())
208
- return result.deleted
199
+ with logfire.span("Deleting note", identifier=identifier): # pyright: ignore [reportGeneralTypeIssues]
200
+ response = await call_delete(client, f"/knowledge/entities/{identifier}")
201
+ result = DeleteEntitiesResponse.model_validate(response.json())
202
+ return result.deleted
@@ -1,5 +1,6 @@
1
1
  """Search tools for Basic Memory MCP server."""
2
2
 
3
+ import logfire
3
4
  from loguru import logger
4
5
 
5
6
  from basic_memory.mcp.server import mcp
@@ -11,7 +12,7 @@ from basic_memory.mcp.async_client import client
11
12
  @mcp.tool(
12
13
  description="Search across all content in basic-memory, including documents and entities",
13
14
  )
14
- async def search(query: SearchQuery) -> SearchResponse:
15
+ async def search(query: SearchQuery, page: int = 1, page_size: int = 10) -> SearchResponse:
15
16
  """Search across all content in basic-memory.
16
17
 
17
18
  Args:
@@ -20,10 +21,18 @@ async def search(query: SearchQuery) -> SearchResponse:
20
21
  - types: Optional list of content types to search ("document" or "entity")
21
22
  - entity_types: Optional list of entity types to filter by
22
23
  - after_date: Optional date filter for recent content
24
+ page: the page number of results to return (default 1)
25
+ page_size: the number of results to return per page (default 10)
23
26
 
24
27
  Returns:
25
28
  SearchResponse with search results and metadata
26
29
  """
27
- logger.info(f"Searching for {query.text}")
28
- response = await call_post(client, "/search/", json=query.model_dump())
29
- return SearchResponse.model_validate(response.json())
30
+ with logfire.span("Searching for {query}", query=query): # pyright: ignore [reportGeneralTypeIssues]
31
+ logger.info(f"Searching for {query}")
32
+ response = await call_post(
33
+ client,
34
+ "/search/",
35
+ json=query.model_dump(),
36
+ params={"page": page, "page_size": page_size},
37
+ )
38
+ return SearchResponse.model_validate(response.json())
@@ -114,6 +114,7 @@ class SearchRepository:
114
114
  after_date: Optional[datetime] = None,
115
115
  entity_types: Optional[List[str]] = None,
116
116
  limit: int = 10,
117
+ offset: int = 0,
117
118
  ) -> List[SearchIndexRow]:
118
119
  """Search across all indexed content with fuzzy matching."""
119
120
  conditions = []
@@ -169,6 +170,7 @@ class SearchRepository:
169
170
 
170
171
  # set limit on search query
171
172
  params["limit"] = limit
173
+ params["offset"] = offset
172
174
 
173
175
  # Build WHERE clause
174
176
  where_clause = " AND ".join(conditions) if conditions else "1=1"
@@ -194,6 +196,7 @@ class SearchRepository:
194
196
  WHERE {where_clause}
195
197
  ORDER BY score ASC {order_by_clause}
196
198
  LIMIT :limit
199
+ OFFSET :offset
197
200
  """
198
201
 
199
202
  logger.debug(f"Search {sql} params: {params}")
@@ -111,3 +111,6 @@ class GraphContext(BaseModel):
111
111
 
112
112
  # Context metadata
113
113
  metadata: MemoryMetadata
114
+
115
+ page: int = 1
116
+ page_size: int = 1
@@ -51,7 +51,7 @@ class GetEntitiesRequest(BaseModel):
51
51
  discovered through search.
52
52
  """
53
53
 
54
- permalinks: Annotated[List[Permalink], MinLen(1)]
54
+ permalinks: Annotated[List[Permalink], MinLen(1), MaxLen(10)]
55
55
 
56
56
 
57
57
  class CreateRelationsRequest(BaseModel):
@@ -102,6 +102,8 @@ class SearchResponse(BaseModel):
102
102
  """Wrapper for search results."""
103
103
 
104
104
  results: List[SearchResult]
105
+ current_page: int
106
+ page_size: int
105
107
 
106
108
 
107
109
  # Schema for future advanced search endpoint