basic-memory 0.7.0__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (89) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/alembic.ini +119 -0
  3. basic_memory/alembic/env.py +23 -1
  4. basic_memory/alembic/migrations.py +4 -9
  5. basic_memory/alembic/versions/502b60eaa905_remove_required_from_entity_permalink.py +51 -0
  6. basic_memory/alembic/versions/b3c3938bacdb_relation_to_name_unique_index.py +44 -0
  7. basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py +106 -0
  8. basic_memory/api/app.py +9 -10
  9. basic_memory/api/routers/__init__.py +2 -1
  10. basic_memory/api/routers/knowledge_router.py +31 -5
  11. basic_memory/api/routers/memory_router.py +18 -17
  12. basic_memory/api/routers/project_info_router.py +275 -0
  13. basic_memory/api/routers/resource_router.py +105 -4
  14. basic_memory/api/routers/search_router.py +22 -4
  15. basic_memory/cli/app.py +54 -5
  16. basic_memory/cli/commands/__init__.py +15 -2
  17. basic_memory/cli/commands/db.py +9 -13
  18. basic_memory/cli/commands/import_chatgpt.py +26 -30
  19. basic_memory/cli/commands/import_claude_conversations.py +27 -29
  20. basic_memory/cli/commands/import_claude_projects.py +29 -31
  21. basic_memory/cli/commands/import_memory_json.py +26 -28
  22. basic_memory/cli/commands/mcp.py +7 -1
  23. basic_memory/cli/commands/project.py +119 -0
  24. basic_memory/cli/commands/project_info.py +167 -0
  25. basic_memory/cli/commands/status.py +14 -28
  26. basic_memory/cli/commands/sync.py +63 -22
  27. basic_memory/cli/commands/tool.py +253 -0
  28. basic_memory/cli/main.py +39 -1
  29. basic_memory/config.py +166 -4
  30. basic_memory/db.py +19 -4
  31. basic_memory/deps.py +10 -3
  32. basic_memory/file_utils.py +37 -19
  33. basic_memory/markdown/entity_parser.py +3 -3
  34. basic_memory/markdown/utils.py +5 -0
  35. basic_memory/mcp/async_client.py +1 -1
  36. basic_memory/mcp/main.py +24 -0
  37. basic_memory/mcp/prompts/__init__.py +19 -0
  38. basic_memory/mcp/prompts/ai_assistant_guide.py +26 -0
  39. basic_memory/mcp/prompts/continue_conversation.py +111 -0
  40. basic_memory/mcp/prompts/recent_activity.py +88 -0
  41. basic_memory/mcp/prompts/search.py +182 -0
  42. basic_memory/mcp/prompts/utils.py +155 -0
  43. basic_memory/mcp/server.py +2 -6
  44. basic_memory/mcp/tools/__init__.py +12 -21
  45. basic_memory/mcp/tools/build_context.py +85 -0
  46. basic_memory/mcp/tools/canvas.py +97 -0
  47. basic_memory/mcp/tools/delete_note.py +28 -0
  48. basic_memory/mcp/tools/project_info.py +51 -0
  49. basic_memory/mcp/tools/read_content.py +229 -0
  50. basic_memory/mcp/tools/read_note.py +190 -0
  51. basic_memory/mcp/tools/recent_activity.py +100 -0
  52. basic_memory/mcp/tools/search.py +56 -17
  53. basic_memory/mcp/tools/utils.py +245 -16
  54. basic_memory/mcp/tools/write_note.py +124 -0
  55. basic_memory/models/knowledge.py +27 -11
  56. basic_memory/models/search.py +2 -1
  57. basic_memory/repository/entity_repository.py +3 -2
  58. basic_memory/repository/project_info_repository.py +9 -0
  59. basic_memory/repository/repository.py +24 -7
  60. basic_memory/repository/search_repository.py +47 -14
  61. basic_memory/schemas/__init__.py +10 -9
  62. basic_memory/schemas/base.py +4 -1
  63. basic_memory/schemas/memory.py +14 -4
  64. basic_memory/schemas/project_info.py +96 -0
  65. basic_memory/schemas/search.py +29 -33
  66. basic_memory/services/context_service.py +3 -3
  67. basic_memory/services/entity_service.py +26 -13
  68. basic_memory/services/file_service.py +145 -26
  69. basic_memory/services/link_resolver.py +9 -46
  70. basic_memory/services/search_service.py +95 -22
  71. basic_memory/sync/__init__.py +3 -2
  72. basic_memory/sync/sync_service.py +523 -117
  73. basic_memory/sync/watch_service.py +258 -132
  74. basic_memory/utils.py +51 -36
  75. basic_memory-0.9.0.dist-info/METADATA +736 -0
  76. basic_memory-0.9.0.dist-info/RECORD +99 -0
  77. basic_memory/alembic/README +0 -1
  78. basic_memory/cli/commands/tools.py +0 -157
  79. basic_memory/mcp/tools/knowledge.py +0 -68
  80. basic_memory/mcp/tools/memory.py +0 -170
  81. basic_memory/mcp/tools/notes.py +0 -202
  82. basic_memory/schemas/discovery.py +0 -28
  83. basic_memory/sync/file_change_scanner.py +0 -158
  84. basic_memory/sync/utils.py +0 -31
  85. basic_memory-0.7.0.dist-info/METADATA +0 -378
  86. basic_memory-0.7.0.dist-info/RECORD +0 -82
  87. {basic_memory-0.7.0.dist-info → basic_memory-0.9.0.dist-info}/WHEEL +0 -0
  88. {basic_memory-0.7.0.dist-info → basic_memory-0.9.0.dist-info}/entry_points.txt +0 -0
  89. {basic_memory-0.7.0.dist-info → basic_memory-0.9.0.dist-info}/licenses/LICENSE +0 -0
basic_memory/db.py CHANGED
@@ -86,8 +86,16 @@ async def get_or_create_db(
86
86
  _engine = create_async_engine(db_url, connect_args={"check_same_thread": False})
87
87
  _session_maker = async_sessionmaker(_engine, expire_on_commit=False)
88
88
 
89
- assert _engine is not None # for type checker
90
- assert _session_maker is not None # for type checker
89
+ # These checks should never fail since we just created the engine and session maker
90
+ # if they were None, but we'll check anyway for the type checker
91
+ if _engine is None:
92
+ logger.error("Failed to create database engine", db_path=str(db_path))
93
+ raise RuntimeError("Database engine initialization failed")
94
+
95
+ if _session_maker is None:
96
+ logger.error("Failed to create session maker", db_path=str(db_path))
97
+ raise RuntimeError("Session maker initialization failed")
98
+
91
99
  return _engine, _session_maker
92
100
 
93
101
 
@@ -121,8 +129,15 @@ async def engine_session_factory(
121
129
  try:
122
130
  _session_maker = async_sessionmaker(_engine, expire_on_commit=False)
123
131
 
124
- assert _engine is not None # for type checker
125
- assert _session_maker is not None # for type checker
132
+ # Verify that engine and session maker are initialized
133
+ if _engine is None: # pragma: no cover
134
+ logger.error("Database engine is None in engine_session_factory")
135
+ raise RuntimeError("Database engine initialization failed")
136
+
137
+ if _session_maker is None: # pragma: no cover
138
+ logger.error("Session maker is None in engine_session_factory")
139
+ raise RuntimeError("Session maker initialization failed")
140
+
126
141
  yield _engine, _session_maker
127
142
  finally:
128
143
  if _engine:
basic_memory/deps.py CHANGED
@@ -2,7 +2,6 @@
2
2
 
3
3
  from typing import Annotated
4
4
 
5
- import logfire
6
5
  from fastapi import Depends
7
6
  from sqlalchemy.ext.asyncio import (
8
7
  AsyncSession,
@@ -16,6 +15,7 @@ from basic_memory.markdown import EntityParser
16
15
  from basic_memory.markdown.markdown_processor import MarkdownProcessor
17
16
  from basic_memory.repository.entity_repository import EntityRepository
18
17
  from basic_memory.repository.observation_repository import ObservationRepository
18
+ from basic_memory.repository.project_info_repository import ProjectInfoRepository
19
19
  from basic_memory.repository.relation_repository import RelationRepository
20
20
  from basic_memory.repository.search_repository import SearchRepository
21
21
  from basic_memory.services import (
@@ -45,8 +45,6 @@ async def get_engine_factory(
45
45
  ) -> tuple[AsyncEngine, async_sessionmaker[AsyncSession]]: # pragma: no cover
46
46
  """Get engine and session maker."""
47
47
  engine, session_maker = await db.get_or_create_db(project_config.database_path)
48
- if project_config.env != "test":
49
- logfire.instrument_sqlalchemy(engine=engine)
50
48
  return engine, session_maker
51
49
 
52
50
 
@@ -107,6 +105,15 @@ async def get_search_repository(
107
105
  SearchRepositoryDep = Annotated[SearchRepository, Depends(get_search_repository)]
108
106
 
109
107
 
108
+ def get_project_info_repository(
109
+ session_maker: SessionMakerDep,
110
+ ):
111
+ """Dependency for StatsRepository."""
112
+ return ProjectInfoRepository(session_maker)
113
+
114
+
115
+ ProjectInfoRepositoryDep = Annotated[ProjectInfoRepository, Depends(get_project_info_repository)]
116
+
110
117
  ## services
111
118
 
112
119
 
@@ -2,11 +2,13 @@
2
2
 
3
3
  import hashlib
4
4
  from pathlib import Path
5
- from typing import Dict, Any
5
+ from typing import Any, Dict, Union
6
6
 
7
7
  import yaml
8
8
  from loguru import logger
9
9
 
10
+ from basic_memory.utils import FilePath
11
+
10
12
 
11
13
  class FileError(Exception):
12
14
  """Base exception for file operations."""
@@ -26,12 +28,12 @@ class ParseError(FileError):
26
28
  pass
27
29
 
28
30
 
29
- async def compute_checksum(content: str) -> str:
31
+ async def compute_checksum(content: Union[str, bytes]) -> str:
30
32
  """
31
33
  Compute SHA-256 checksum of content.
32
34
 
33
35
  Args:
34
- content: Text content to hash
36
+ content: Content to hash (either text string or bytes)
35
37
 
36
38
  Returns:
37
39
  SHA-256 hex digest
@@ -40,48 +42,55 @@ async def compute_checksum(content: str) -> str:
40
42
  FileError: If checksum computation fails
41
43
  """
42
44
  try:
43
- return hashlib.sha256(content.encode()).hexdigest()
45
+ if isinstance(content, str):
46
+ content = content.encode()
47
+ return hashlib.sha256(content).hexdigest()
44
48
  except Exception as e: # pragma: no cover
45
49
  logger.error(f"Failed to compute checksum: {e}")
46
50
  raise FileError(f"Failed to compute checksum: {e}")
47
51
 
48
52
 
49
- async def ensure_directory(path: Path) -> None:
53
+ async def ensure_directory(path: FilePath) -> None:
50
54
  """
51
55
  Ensure directory exists, creating if necessary.
52
56
 
53
57
  Args:
54
- path: Directory path to ensure
58
+ path: Directory path to ensure (Path or string)
55
59
 
56
60
  Raises:
57
61
  FileWriteError: If directory creation fails
58
62
  """
59
63
  try:
60
- path.mkdir(parents=True, exist_ok=True)
64
+ # Convert string to Path if needed
65
+ path_obj = Path(path) if isinstance(path, str) else path
66
+ path_obj.mkdir(parents=True, exist_ok=True)
61
67
  except Exception as e: # pragma: no cover
62
- logger.error(f"Failed to create directory: {path}: {e}")
68
+ logger.error("Failed to create directory", path=str(path), error=str(e))
63
69
  raise FileWriteError(f"Failed to create directory {path}: {e}")
64
70
 
65
71
 
66
- async def write_file_atomic(path: Path, content: str) -> None:
72
+ async def write_file_atomic(path: FilePath, content: str) -> None:
67
73
  """
68
74
  Write file with atomic operation using temporary file.
69
75
 
70
76
  Args:
71
- path: Target file path
77
+ path: Target file path (Path or string)
72
78
  content: Content to write
73
79
 
74
80
  Raises:
75
81
  FileWriteError: If write operation fails
76
82
  """
77
- temp_path = path.with_suffix(".tmp")
83
+ # Convert string to Path if needed
84
+ path_obj = Path(path) if isinstance(path, str) else path
85
+ temp_path = path_obj.with_suffix(".tmp")
86
+
78
87
  try:
79
88
  temp_path.write_text(content)
80
- temp_path.replace(path)
81
- logger.debug(f"wrote file: {path}")
89
+ temp_path.replace(path_obj)
90
+ logger.debug("Wrote file atomically", path=str(path_obj), content_length=len(content))
82
91
  except Exception as e: # pragma: no cover
83
92
  temp_path.unlink(missing_ok=True)
84
- logger.error(f"Failed to write file: {path}: {e}")
93
+ logger.error("Failed to write file", path=str(path_obj), error=str(e))
85
94
  raise FileWriteError(f"Failed to write file {path}: {e}")
86
95
 
87
96
 
@@ -171,7 +180,7 @@ def remove_frontmatter(content: str) -> str:
171
180
  return parts[2].strip()
172
181
 
173
182
 
174
- async def update_frontmatter(path: Path, updates: Dict[str, Any]) -> str:
183
+ async def update_frontmatter(path: FilePath, updates: Dict[str, Any]) -> str:
175
184
  """Update frontmatter fields in a file while preserving all content.
176
185
 
177
186
  Only modifies the frontmatter section, leaving all content untouched.
@@ -179,7 +188,7 @@ async def update_frontmatter(path: Path, updates: Dict[str, Any]) -> str:
179
188
  Returns checksum of updated file.
180
189
 
181
190
  Args:
182
- path: Path to markdown file
191
+ path: Path to markdown file (Path or string)
183
192
  updates: Dict of frontmatter fields to update
184
193
 
185
194
  Returns:
@@ -190,8 +199,11 @@ async def update_frontmatter(path: Path, updates: Dict[str, Any]) -> str:
190
199
  ParseError: If frontmatter parsing fails
191
200
  """
192
201
  try:
202
+ # Convert string to Path if needed
203
+ path_obj = Path(path) if isinstance(path, str) else path
204
+
193
205
  # Read current content
194
- content = path.read_text()
206
+ content = path_obj.read_text()
195
207
 
196
208
  # Parse current frontmatter
197
209
  current_fm = {}
@@ -206,9 +218,15 @@ async def update_frontmatter(path: Path, updates: Dict[str, Any]) -> str:
206
218
  yaml_fm = yaml.dump(new_fm, sort_keys=False)
207
219
  final_content = f"---\n{yaml_fm}---\n\n{content.strip()}"
208
220
 
209
- await write_file_atomic(path, final_content)
221
+ logger.debug("Updating frontmatter", path=str(path_obj), update_keys=list(updates.keys()))
222
+
223
+ await write_file_atomic(path_obj, final_content)
210
224
  return await compute_checksum(final_content)
211
225
 
212
226
  except Exception as e: # pragma: no cover
213
- logger.error(f"Failed to update frontmatter in {path}: {e}")
227
+ logger.error(
228
+ "Failed to update frontmatter",
229
+ path=str(path) if isinstance(path, (str, Path)) else "<unknown>",
230
+ error=str(e),
231
+ )
214
232
  raise FileError(f"Failed to update frontmatter: {e}")
@@ -88,10 +88,10 @@ class EntityParser:
88
88
  return parsed
89
89
  return None
90
90
 
91
- async def parse_file(self, file_path: Path) -> EntityMarkdown:
91
+ async def parse_file(self, path: Path | str) -> EntityMarkdown:
92
92
  """Parse markdown file into EntityMarkdown."""
93
93
 
94
- absolute_path = self.base_path / file_path
94
+ absolute_path = self.base_path / path
95
95
  # Parse frontmatter and content using python-frontmatter
96
96
  post = frontmatter.load(str(absolute_path))
97
97
 
@@ -99,7 +99,7 @@ class EntityParser:
99
99
  file_stats = absolute_path.stat()
100
100
 
101
101
  metadata = post.metadata
102
- metadata["title"] = post.metadata.get("title", file_path.name)
102
+ metadata["title"] = post.metadata.get("title", absolute_path.name)
103
103
  metadata["type"] = post.metadata.get("type", "note")
104
104
  metadata["tags"] = parse_tags(post.metadata.get("tags", []))
105
105
 
@@ -5,6 +5,7 @@ from typing import Optional, Any
5
5
 
6
6
  from frontmatter import Post
7
7
 
8
+ from basic_memory.file_utils import has_frontmatter, remove_frontmatter
8
9
  from basic_memory.markdown import EntityMarkdown
9
10
  from basic_memory.models import Entity, Observation as ObservationModel
10
11
  from basic_memory.utils import generate_permalink
@@ -78,6 +79,10 @@ async def schema_to_markdown(schema: Any) -> Post:
78
79
  content = schema.content or ""
79
80
  frontmatter_metadata = dict(schema.entity_metadata or {})
80
81
 
82
+ # if the content contains frontmatter, remove it and merge
83
+ if has_frontmatter(content):
84
+ content = remove_frontmatter(content)
85
+
81
86
  # Remove special fields for ordered frontmatter
82
87
  for field in ["type", "title", "permalink"]:
83
88
  frontmatter_metadata.pop(field, None)
@@ -2,7 +2,7 @@ from httpx import ASGITransport, AsyncClient
2
2
 
3
3
  from basic_memory.api.app import app as fastapi_app
4
4
 
5
- BASE_URL = "memory://"
5
+ BASE_URL = "http://test"
6
6
 
7
7
  # Create shared async client
8
8
  client = AsyncClient(transport=ASGITransport(app=fastapi_app), base_url=BASE_URL)
@@ -0,0 +1,24 @@
1
+ """Main MCP entrypoint for Basic Memory.
2
+
3
+ Creates and configures the shared MCP instance and handles server startup.
4
+ """
5
+
6
+ from loguru import logger # pragma: no cover
7
+
8
+ from basic_memory.config import config # pragma: no cover
9
+
10
+ # Import shared mcp instance
11
+ from basic_memory.mcp.server import mcp # pragma: no cover
12
+
13
+ # Import tools to register them
14
+ import basic_memory.mcp.tools # noqa: F401 # pragma: no cover
15
+
16
+ # Import prompts to register them
17
+ import basic_memory.mcp.prompts # noqa: F401 # pragma: no cover
18
+
19
+
20
+ if __name__ == "__main__": # pragma: no cover
21
+ home_dir = config.home
22
+ logger.info("Starting Basic Memory MCP server")
23
+ logger.info(f"Home directory: {home_dir}")
24
+ mcp.run()
@@ -0,0 +1,19 @@
1
+ """Basic Memory MCP prompts.
2
+
3
+ Prompts are a special type of tool that returns a string response
4
+ formatted for a user to read, typically invoking one or more tools
5
+ and transforming their results into user-friendly text.
6
+ """
7
+
8
+ # Import individual prompt modules to register them with the MCP server
9
+ from basic_memory.mcp.prompts import continue_conversation
10
+ from basic_memory.mcp.prompts import recent_activity
11
+ from basic_memory.mcp.prompts import search
12
+ from basic_memory.mcp.prompts import ai_assistant_guide
13
+
14
+ __all__ = [
15
+ "ai_assistant_guide",
16
+ "continue_conversation",
17
+ "recent_activity",
18
+ "search",
19
+ ]
@@ -0,0 +1,26 @@
1
+ from pathlib import Path
2
+
3
+ from loguru import logger
4
+
5
+ from basic_memory.mcp.server import mcp
6
+
7
+
8
+ @mcp.resource(
9
+ uri="memory://ai_assistant_guide",
10
+ name="ai assistant guide",
11
+ description="Give an AI assistant guidance on how to use Basic Memory tools effectively",
12
+ )
13
+ def ai_assistant_guide() -> str:
14
+ """Return a concise guide on Basic Memory tools and how to use them.
15
+
16
+ Args:
17
+ focus: Optional area to focus on ("writing", "context", "search", etc.)
18
+
19
+ Returns:
20
+ A focused guide on Basic Memory usage.
21
+ """
22
+ logger.info("Loading AI assistant guide resource")
23
+ guide_doc = Path(__file__).parent.parent.parent.parent.parent / "static" / "ai_assistant_guide.md"
24
+ content = guide_doc.read_text()
25
+ logger.info(f"Loaded AI assistant guide ({len(content)} chars)")
26
+ return content
@@ -0,0 +1,111 @@
1
+ """Session continuation prompts for Basic Memory MCP server.
2
+
3
+ These prompts help users continue conversations and work across sessions,
4
+ providing context from previous interactions to maintain continuity.
5
+ """
6
+
7
+ from textwrap import dedent
8
+ from typing import Optional, Annotated
9
+
10
+ from loguru import logger
11
+ from pydantic import Field
12
+
13
+ from basic_memory.mcp.prompts.utils import format_prompt_context, PromptContext, PromptContextItem
14
+ from basic_memory.mcp.server import mcp
15
+ from basic_memory.mcp.tools.build_context import build_context
16
+ from basic_memory.mcp.tools.recent_activity import recent_activity
17
+ from basic_memory.mcp.tools.search import search
18
+ from basic_memory.schemas.base import TimeFrame
19
+ from basic_memory.schemas.memory import GraphContext
20
+ from basic_memory.schemas.search import SearchQuery, SearchItemType
21
+
22
+
23
+ @mcp.prompt(
24
+ name="Continue Conversation",
25
+ description="Continue a previous conversation",
26
+ )
27
+ async def continue_conversation(
28
+ topic: Annotated[Optional[str], Field(description="Topic or keyword to search for")] = None,
29
+ timeframe: Annotated[
30
+ Optional[TimeFrame],
31
+ Field(description="How far back to look for activity (e.g. '1d', '1 week')"),
32
+ ] = None,
33
+ ) -> str:
34
+ """Continue a previous conversation or work session.
35
+
36
+ This prompt helps you pick up where you left off by finding recent context
37
+ about a specific topic or showing general recent activity.
38
+
39
+ Args:
40
+ topic: Topic or keyword to search for (optional)
41
+ timeframe: How far back to look for activity
42
+
43
+ Returns:
44
+ Context from previous sessions on this topic
45
+ """
46
+ logger.info(f"Continuing session, topic: {topic}, timeframe: {timeframe}")
47
+
48
+ # If topic provided, search for it
49
+ if topic:
50
+ search_results = await search(
51
+ SearchQuery(text=topic, after_date=timeframe, types=[SearchItemType.ENTITY])
52
+ )
53
+
54
+ # Build context from results
55
+ contexts = []
56
+ for result in search_results.results:
57
+ if hasattr(result, "permalink") and result.permalink:
58
+ context: GraphContext = await build_context(f"memory://{result.permalink}")
59
+ if context.primary_results:
60
+ contexts.append(
61
+ PromptContextItem(
62
+ primary_results=context.primary_results[:1], # pyright: ignore
63
+ related_results=context.related_results[:3], # pyright: ignore
64
+ )
65
+ )
66
+
67
+ # get context for the top 3 results
68
+ prompt_context = format_prompt_context(
69
+ PromptContext(topic=topic, timeframe=timeframe, results=contexts) # pyright: ignore
70
+ )
71
+
72
+ else:
73
+ # If no topic, get recent activity
74
+ timeframe = timeframe or "7d"
75
+ recent: GraphContext = await recent_activity(
76
+ timeframe=timeframe, type=[SearchItemType.ENTITY]
77
+ )
78
+ prompt_context = format_prompt_context(
79
+ PromptContext(
80
+ topic=f"Recent Activity from ({timeframe})",
81
+ timeframe=timeframe,
82
+ results=[
83
+ PromptContextItem(
84
+ primary_results=recent.primary_results[:5], # pyright: ignore
85
+ related_results=recent.related_results[:2], # pyright: ignore
86
+ )
87
+ ],
88
+ )
89
+ )
90
+
91
+ # Add next steps with strong encouragement to write
92
+ next_steps = dedent(f"""
93
+ ## Next Steps
94
+
95
+ You can:
96
+ - Explore more with: `search({{"text": "{topic}"}})`
97
+ - See what's changed: `recent_activity(timeframe="{timeframe or "7d"}")`
98
+ - **Record new learnings or decisions from this conversation:** `write_note(title="[Create a meaningful title]", content="[Content with observations and relations]")`
99
+
100
+ ## Knowledge Capture Recommendation
101
+
102
+ As you continue this conversation, **actively look for opportunities to:**
103
+ 1. Record key information, decisions, or insights that emerge
104
+ 2. Link new knowledge to existing topics
105
+ 3. Suggest capturing important context when appropriate
106
+ 4. Create forward references to topics that might be created later
107
+
108
+ Remember that capturing knowledge during conversations is one of the most valuable aspects of Basic Memory.
109
+ """)
110
+
111
+ return prompt_context + next_steps
@@ -0,0 +1,88 @@
1
+ """Recent activity prompts for Basic Memory MCP server.
2
+
3
+ These prompts help users see what has changed in their knowledge base recently.
4
+ """
5
+
6
+ from typing import Annotated
7
+
8
+ from loguru import logger
9
+ from pydantic import Field
10
+
11
+ from basic_memory.mcp.prompts.utils import format_prompt_context, PromptContext, PromptContextItem
12
+ from basic_memory.mcp.server import mcp
13
+ from basic_memory.mcp.tools.recent_activity import recent_activity
14
+ from basic_memory.schemas.base import TimeFrame
15
+ from basic_memory.schemas.search import SearchItemType
16
+
17
+
18
+ @mcp.prompt(
19
+ name="Share Recent Activity",
20
+ description="Get recent activity from across the knowledge base",
21
+ )
22
+ async def recent_activity_prompt(
23
+ timeframe: Annotated[
24
+ TimeFrame,
25
+ Field(description="How far back to look for activity (e.g. '1d', '1 week')"),
26
+ ] = "7d",
27
+ ) -> str:
28
+ """Get recent activity from across the knowledge base.
29
+
30
+ This prompt helps you see what's changed recently in the knowledge base,
31
+ showing new or updated documents and related information.
32
+
33
+ Args:
34
+ timeframe: How far back to look for activity (e.g. '1d', '1 week')
35
+
36
+ Returns:
37
+ Formatted summary of recent activity
38
+ """
39
+ logger.info(f"Getting recent activity, timeframe: {timeframe}")
40
+
41
+ recent = await recent_activity(timeframe=timeframe, type=[SearchItemType.ENTITY])
42
+
43
+ prompt_context = format_prompt_context(
44
+ PromptContext(
45
+ topic=f"Recent Activity from ({timeframe})",
46
+ timeframe=timeframe,
47
+ results=[
48
+ PromptContextItem(
49
+ primary_results=recent.primary_results[:5],
50
+ related_results=recent.related_results[:2],
51
+ )
52
+ ],
53
+ )
54
+ )
55
+
56
+ # Add suggestions for summarizing recent activity
57
+ capture_suggestions = f"""
58
+ ## Opportunity to Capture Activity Summary
59
+
60
+ Consider creating a summary note of recent activity:
61
+
62
+ ```python
63
+ await write_note(
64
+ title="Activity Summary {timeframe}",
65
+ content='''
66
+ # Activity Summary for {timeframe}
67
+
68
+ ## Overview
69
+ [Summary of key changes and developments over this period]
70
+
71
+ ## Key Updates
72
+ [List main updates and their significance]
73
+
74
+ ## Observations
75
+ - [trend] [Observation about patterns in recent activity]
76
+ - [insight] [Connection between different activities]
77
+
78
+ ## Relations
79
+ - summarizes [[{recent.primary_results[0].title if recent.primary_results else "Recent Topic"}]]
80
+ - relates_to [[Project Overview]]
81
+ '''
82
+ )
83
+ ```
84
+
85
+ Summarizing periodic activity helps create high-level insights and connections between topics.
86
+ """
87
+
88
+ return prompt_context + capture_suggestions