basic-memory 0.13.0b3__py3-none-any.whl → 0.13.0b5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (41) hide show
  1. basic_memory/__init__.py +1 -7
  2. basic_memory/alembic/versions/647e7a75e2cd_project_constraint_fix.py +104 -0
  3. basic_memory/api/routers/knowledge_router.py +13 -0
  4. basic_memory/api/routers/memory_router.py +3 -4
  5. basic_memory/api/routers/project_router.py +9 -9
  6. basic_memory/api/routers/prompt_router.py +2 -2
  7. basic_memory/cli/commands/project.py +2 -2
  8. basic_memory/cli/commands/status.py +1 -1
  9. basic_memory/cli/commands/sync.py +1 -1
  10. basic_memory/mcp/prompts/__init__.py +2 -0
  11. basic_memory/mcp/prompts/sync_status.py +116 -0
  12. basic_memory/mcp/server.py +6 -6
  13. basic_memory/mcp/tools/__init__.py +4 -0
  14. basic_memory/mcp/tools/build_context.py +32 -7
  15. basic_memory/mcp/tools/canvas.py +2 -1
  16. basic_memory/mcp/tools/delete_note.py +159 -4
  17. basic_memory/mcp/tools/edit_note.py +17 -11
  18. basic_memory/mcp/tools/move_note.py +252 -40
  19. basic_memory/mcp/tools/project_management.py +35 -3
  20. basic_memory/mcp/tools/read_note.py +9 -2
  21. basic_memory/mcp/tools/search.py +180 -8
  22. basic_memory/mcp/tools/sync_status.py +254 -0
  23. basic_memory/mcp/tools/utils.py +47 -0
  24. basic_memory/mcp/tools/view_note.py +66 -0
  25. basic_memory/mcp/tools/write_note.py +13 -2
  26. basic_memory/models/project.py +1 -3
  27. basic_memory/repository/search_repository.py +99 -26
  28. basic_memory/schemas/base.py +33 -5
  29. basic_memory/schemas/memory.py +58 -1
  30. basic_memory/services/entity_service.py +4 -4
  31. basic_memory/services/initialization.py +32 -5
  32. basic_memory/services/link_resolver.py +20 -5
  33. basic_memory/services/migration_service.py +168 -0
  34. basic_memory/services/project_service.py +157 -56
  35. basic_memory/services/sync_status_service.py +181 -0
  36. basic_memory/sync/sync_service.py +55 -2
  37. {basic_memory-0.13.0b3.dist-info → basic_memory-0.13.0b5.dist-info}/METADATA +2 -2
  38. {basic_memory-0.13.0b3.dist-info → basic_memory-0.13.0b5.dist-info}/RECORD +41 -35
  39. {basic_memory-0.13.0b3.dist-info → basic_memory-0.13.0b5.dist-info}/WHEEL +0 -0
  40. {basic_memory-0.13.0b3.dist-info → basic_memory-0.13.0b5.dist-info}/entry_points.txt +0 -0
  41. {basic_memory-0.13.0b3.dist-info → basic_memory-0.13.0b5.dist-info}/licenses/LICENSE +0 -0
basic_memory/__init__.py CHANGED
@@ -1,9 +1,3 @@
1
1
  """basic-memory - Local-first knowledge management combining Zettelkasten with knowledge graphs"""
2
2
 
3
- try:
4
- from importlib.metadata import version
5
-
6
- __version__ = version("basic-memory")
7
- except Exception: # pragma: no cover
8
- # Fallback if package not installed (e.g., during development)
9
- __version__ = "0.0.0" # pragma: no cover
3
+ __version__ = "0.13.0b5"
@@ -0,0 +1,104 @@
1
+ """project constraint fix
2
+
3
+ Revision ID: 647e7a75e2cd
4
+ Revises: 5fe1ab1ccebe
5
+ Create Date: 2025-06-03 12:48:30.162566
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = "647e7a75e2cd"
17
+ down_revision: Union[str, None] = "5fe1ab1ccebe"
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ """Remove the problematic UNIQUE constraint on is_default column.
24
+
25
+ The UNIQUE constraint prevents multiple projects from having is_default=FALSE,
26
+ which breaks project creation when the service sets is_default=False.
27
+
28
+ Since SQLite doesn't support dropping specific constraints easily, we'll
29
+ recreate the table without the problematic constraint.
30
+ """
31
+ # For SQLite, we need to recreate the table without the UNIQUE constraint
32
+ # Create a new table without the UNIQUE constraint on is_default
33
+ op.create_table(
34
+ "project_new",
35
+ sa.Column("id", sa.Integer(), nullable=False),
36
+ sa.Column("name", sa.String(), nullable=False),
37
+ sa.Column("description", sa.Text(), nullable=True),
38
+ sa.Column("permalink", sa.String(), nullable=False),
39
+ sa.Column("path", sa.String(), nullable=False),
40
+ sa.Column("is_active", sa.Boolean(), nullable=False),
41
+ sa.Column("is_default", sa.Boolean(), nullable=True), # No UNIQUE constraint!
42
+ sa.Column("created_at", sa.DateTime(), nullable=False),
43
+ sa.Column("updated_at", sa.DateTime(), nullable=False),
44
+ sa.PrimaryKeyConstraint("id"),
45
+ sa.UniqueConstraint("name"),
46
+ sa.UniqueConstraint("permalink"),
47
+ )
48
+
49
+ # Copy data from old table to new table
50
+ op.execute("INSERT INTO project_new SELECT * FROM project")
51
+
52
+ # Drop the old table
53
+ op.drop_table("project")
54
+
55
+ # Rename the new table
56
+ op.rename_table("project_new", "project")
57
+
58
+ # Recreate the indexes
59
+ with op.batch_alter_table("project", schema=None) as batch_op:
60
+ batch_op.create_index("ix_project_created_at", ["created_at"], unique=False)
61
+ batch_op.create_index("ix_project_name", ["name"], unique=True)
62
+ batch_op.create_index("ix_project_path", ["path"], unique=False)
63
+ batch_op.create_index("ix_project_permalink", ["permalink"], unique=True)
64
+ batch_op.create_index("ix_project_updated_at", ["updated_at"], unique=False)
65
+
66
+
67
+ def downgrade() -> None:
68
+ """Add back the UNIQUE constraint on is_default column.
69
+
70
+ WARNING: This will break project creation again if multiple projects
71
+ have is_default=FALSE.
72
+ """
73
+ # Recreate the table with the UNIQUE constraint
74
+ op.create_table(
75
+ "project_old",
76
+ sa.Column("id", sa.Integer(), nullable=False),
77
+ sa.Column("name", sa.String(), nullable=False),
78
+ sa.Column("description", sa.Text(), nullable=True),
79
+ sa.Column("permalink", sa.String(), nullable=False),
80
+ sa.Column("path", sa.String(), nullable=False),
81
+ sa.Column("is_active", sa.Boolean(), nullable=False),
82
+ sa.Column("is_default", sa.Boolean(), nullable=True),
83
+ sa.Column("created_at", sa.DateTime(), nullable=False),
84
+ sa.Column("updated_at", sa.DateTime(), nullable=False),
85
+ sa.PrimaryKeyConstraint("id"),
86
+ sa.UniqueConstraint("is_default"), # Add back the problematic constraint
87
+ sa.UniqueConstraint("name"),
88
+ sa.UniqueConstraint("permalink"),
89
+ )
90
+
91
+ # Copy data (this may fail if multiple FALSE values exist)
92
+ op.execute("INSERT INTO project_old SELECT * FROM project")
93
+
94
+ # Drop the current table and rename
95
+ op.drop_table("project")
96
+ op.rename_table("project_old", "project")
97
+
98
+ # Recreate indexes
99
+ with op.batch_alter_table("project", schema=None) as batch_op:
100
+ batch_op.create_index("ix_project_created_at", ["created_at"], unique=False)
101
+ batch_op.create_index("ix_project_name", ["name"], unique=True)
102
+ batch_op.create_index("ix_project_path", ["path"], unique=False)
103
+ batch_op.create_index("ix_project_permalink", ["permalink"], unique=True)
104
+ batch_op.create_index("ix_project_updated_at", ["updated_at"], unique=False)
@@ -14,6 +14,7 @@ from basic_memory.deps import (
14
14
  FileServiceDep,
15
15
  ProjectConfigDep,
16
16
  AppConfigDep,
17
+ SyncServiceDep,
17
18
  )
18
19
  from basic_memory.schemas import (
19
20
  EntityListResponse,
@@ -63,6 +64,7 @@ async def create_or_update_entity(
63
64
  entity_service: EntityServiceDep,
64
65
  search_service: SearchServiceDep,
65
66
  file_service: FileServiceDep,
67
+ sync_service: SyncServiceDep,
66
68
  ) -> EntityResponse:
67
69
  """Create or update an entity. If entity exists, it will be updated, otherwise created."""
68
70
  logger.info(
@@ -85,6 +87,17 @@ async def create_or_update_entity(
85
87
 
86
88
  # reindex
87
89
  await search_service.index_entity(entity, background_tasks=background_tasks)
90
+
91
+ # Attempt immediate relation resolution when creating new entities
92
+ # This helps resolve forward references when related entities are created in the same session
93
+ if created:
94
+ try:
95
+ await sync_service.resolve_relations()
96
+ logger.debug(f"Resolved relations after creating entity: {entity.permalink}")
97
+ except Exception as e: # pragma: no cover
98
+ # Don't fail the entire request if relation resolution fails
99
+ logger.warning(f"Failed to resolve relations after entity creation: {e}")
100
+
88
101
  result = EntityResponse.model_validate(entity)
89
102
 
90
103
  logger.info(
@@ -2,12 +2,11 @@
2
2
 
3
3
  from typing import Annotated, Optional
4
4
 
5
- from dateparser import parse
6
5
  from fastapi import APIRouter, Query
7
6
  from loguru import logger
8
7
 
9
8
  from basic_memory.deps import ContextServiceDep, EntityRepositoryDep
10
- from basic_memory.schemas.base import TimeFrame
9
+ from basic_memory.schemas.base import TimeFrame, parse_timeframe
11
10
  from basic_memory.schemas.memory import (
12
11
  GraphContext,
13
12
  normalize_memory_url,
@@ -40,7 +39,7 @@ async def recent(
40
39
  f"Getting recent context: `{types}` depth: `{depth}` timeframe: `{timeframe}` page: `{page}` page_size: `{page_size}` max_related: `{max_related}`"
41
40
  )
42
41
  # Parse timeframe
43
- since = parse(timeframe)
42
+ since = parse_timeframe(timeframe)
44
43
  limit = page_size
45
44
  offset = (page - 1) * page_size
46
45
 
@@ -78,7 +77,7 @@ async def get_memory_context(
78
77
  memory_url = normalize_memory_url(uri)
79
78
 
80
79
  # Parse timeframe
81
- since = parse(timeframe) if timeframe else None
80
+ since = parse_timeframe(timeframe) if timeframe else None
82
81
  limit = page_size
83
82
  offset = (page - 1) * page_size
84
83
 
@@ -3,7 +3,7 @@
3
3
  from fastapi import APIRouter, HTTPException, Path, Body
4
4
  from typing import Optional
5
5
 
6
- from basic_memory.deps import ProjectServiceDep
6
+ from basic_memory.deps import ProjectServiceDep, ProjectPathDep
7
7
  from basic_memory.schemas import ProjectInfoResponse
8
8
  from basic_memory.schemas.project_info import (
9
9
  ProjectList,
@@ -22,9 +22,10 @@ project_resource_router = APIRouter(prefix="/projects", tags=["project_managemen
22
22
  @project_router.get("/info", response_model=ProjectInfoResponse)
23
23
  async def get_project_info(
24
24
  project_service: ProjectServiceDep,
25
+ project: ProjectPathDep,
25
26
  ) -> ProjectInfoResponse:
26
- """Get comprehensive information about the current Basic Memory project."""
27
- return await project_service.get_project_info()
27
+ """Get comprehensive information about the specified Basic Memory project."""
28
+ return await project_service.get_project_info(project)
28
29
 
29
30
 
30
31
  # Update a project
@@ -47,7 +48,7 @@ async def update_project(
47
48
  """
48
49
  try: # pragma: no cover
49
50
  # Get original project info for the response
50
- old_project = ProjectItem(
51
+ old_project_info = ProjectItem(
51
52
  name=project_name,
52
53
  path=project_service.projects.get(project_name, ""),
53
54
  )
@@ -61,7 +62,7 @@ async def update_project(
61
62
  message=f"Project '{project_name}' updated successfully",
62
63
  status="success",
63
64
  default=(project_name == project_service.default_project),
64
- old_project=old_project,
65
+ old_project=old_project_info,
65
66
  new_project=ProjectItem(name=project_name, path=updated_path),
66
67
  )
67
68
  except ValueError as e: # pragma: no cover
@@ -111,10 +112,9 @@ async def add_project(
111
112
  Response confirming the project was added
112
113
  """
113
114
  try: # pragma: no cover
114
- await project_service.add_project(project_data.name, project_data.path)
115
-
116
- if project_data.set_default: # pragma: no cover
117
- await project_service.set_default_project(project_data.name)
115
+ await project_service.add_project(
116
+ project_data.name, project_data.path, set_default=project_data.set_default
117
+ )
118
118
 
119
119
  return ProjectStatusResponse( # pyright: ignore [reportCallIssue]
120
120
  message=f"Project '{project_data.name}' added successfully",
@@ -5,12 +5,12 @@ It centralizes all prompt formatting logic that was previously in the MCP prompt
5
5
  """
6
6
 
7
7
  from datetime import datetime, timezone
8
- from dateparser import parse
9
8
  from fastapi import APIRouter, HTTPException, status
10
9
  from loguru import logger
11
10
 
12
11
  from basic_memory.api.routers.utils import to_graph_context, to_search_results
13
12
  from basic_memory.api.template_loader import template_loader
13
+ from basic_memory.schemas.base import parse_timeframe
14
14
  from basic_memory.deps import (
15
15
  ContextServiceDep,
16
16
  EntityRepositoryDep,
@@ -51,7 +51,7 @@ async def continue_conversation(
51
51
  f"Generating continue conversation prompt, topic: {request.topic}, timeframe: {request.timeframe}"
52
52
  )
53
53
 
54
- since = parse(request.timeframe) if request.timeframe else None
54
+ since = parse_timeframe(request.timeframe) if request.timeframe else None
55
55
 
56
56
  # Initialize search results
57
57
  search_results = []
@@ -221,7 +221,7 @@ def display_project_info(
221
221
  console.print(entity_types_table)
222
222
 
223
223
  # Most connected entities
224
- if info.statistics.most_connected_entities:
224
+ if info.statistics.most_connected_entities: # pragma: no cover
225
225
  connected_table = Table(title="🔗 Most Connected Entities")
226
226
  connected_table.add_column("Title", style="blue")
227
227
  connected_table.add_column("Permalink", style="cyan")
@@ -235,7 +235,7 @@ def display_project_info(
235
235
  console.print(connected_table)
236
236
 
237
237
  # Recent activity
238
- if info.activity.recently_updated:
238
+ if info.activity.recently_updated: # pragma: no cover
239
239
  recent_table = Table(title="🕒 Recent Activity")
240
240
  recent_table.add_column("Title", style="blue")
241
241
  recent_table.add_column("Type", style="cyan")
@@ -122,7 +122,7 @@ def display_changes(project_name: str, title: str, changes: SyncReport, verbose:
122
122
  console.print(Panel(tree, expand=False))
123
123
 
124
124
 
125
- async def run_status(verbose: bool = False):
125
+ async def run_status(verbose: bool = False): # pragma: no cover
126
126
  """Check sync status of files vs database."""
127
127
  # Check knowledge/ directory
128
128
 
@@ -180,7 +180,7 @@ async def run_sync(verbose: bool = False):
180
180
  sync_service = await get_sync_service(project)
181
181
 
182
182
  logger.info("Running one-time sync")
183
- knowledge_changes = await sync_service.sync(config.home)
183
+ knowledge_changes = await sync_service.sync(config.home, project_name=project.name)
184
184
 
185
185
  # Log results
186
186
  duration_ms = int((time.time() - start_time) * 1000)
@@ -10,10 +10,12 @@ from basic_memory.mcp.prompts import continue_conversation
10
10
  from basic_memory.mcp.prompts import recent_activity
11
11
  from basic_memory.mcp.prompts import search
12
12
  from basic_memory.mcp.prompts import ai_assistant_guide
13
+ from basic_memory.mcp.prompts import sync_status
13
14
 
14
15
  __all__ = [
15
16
  "ai_assistant_guide",
16
17
  "continue_conversation",
17
18
  "recent_activity",
18
19
  "search",
20
+ "sync_status",
19
21
  ]
@@ -0,0 +1,116 @@
1
+ """Sync status prompt for Basic Memory MCP server."""
2
+
3
+ from basic_memory.mcp.server import mcp
4
+
5
+
6
+ @mcp.prompt(
7
+ description="""Get sync status with recommendations for AI assistants.
8
+
9
+ This prompt provides both current sync status and guidance on how
10
+ AI assistants should respond when sync operations are in progress or completed.
11
+ """,
12
+ )
13
+ async def sync_status_prompt() -> str:
14
+ """Get sync status with AI assistant guidance.
15
+
16
+ This prompt provides detailed sync status information along with
17
+ recommendations for how AI assistants should handle different sync states.
18
+
19
+ Returns:
20
+ Formatted sync status with AI assistant guidance
21
+ """
22
+ try: # pragma: no cover
23
+ from basic_memory.services.migration_service import migration_manager
24
+
25
+ state = migration_manager.state
26
+
27
+ # Build status report
28
+ lines = [
29
+ "# Basic Memory Sync Status",
30
+ "",
31
+ f"**Current Status**: {state.status.value.replace('_', ' ').title()}",
32
+ f"**System Ready**: {'Yes' if migration_manager.is_ready else 'No'}",
33
+ "",
34
+ ]
35
+
36
+ if migration_manager.is_ready:
37
+ lines.extend(
38
+ [
39
+ "✅ **All sync operations completed** - System is fully operational",
40
+ "",
41
+ "All Basic Memory tools are available and functioning normally.",
42
+ "File indexing is complete and knowledge graphs are up to date.",
43
+ "You can proceed with any knowledge management tasks.",
44
+ ]
45
+ )
46
+ else:
47
+ lines.append(f"**Status Message**: {state.message}")
48
+
49
+ if state.status.value == "in_progress":
50
+ if state.projects_total > 0:
51
+ progress = f" ({state.projects_migrated}/{state.projects_total})"
52
+ lines.append(f"**Progress**: {progress}")
53
+
54
+ lines.extend(
55
+ [
56
+ "",
57
+ "🔄 **File synchronization in progress** - Processing files and building knowledge graphs",
58
+ "",
59
+ "**Impact**: Some tools may show status messages instead of normal responses",
60
+ "until sync completes (usually 1-3 minutes).",
61
+ ]
62
+ )
63
+
64
+ elif state.status.value == "failed":
65
+ lines.extend(
66
+ [
67
+ "",
68
+ f"❌ **Sync failed**: {state.error or 'Unknown error'}",
69
+ "",
70
+ "**Impact**: System may have limited functionality until issue is resolved.",
71
+ ]
72
+ )
73
+
74
+ # Add AI assistant recommendations
75
+ if not migration_manager.is_ready:
76
+ lines.extend(
77
+ [
78
+ "",
79
+ "---",
80
+ "",
81
+ "## AI Assistant Recommendations",
82
+ "",
83
+ "**When sync is in progress:**",
84
+ "- Inform the user about the background file processing",
85
+ "- Suggest using `sync_status()` tool to check progress",
86
+ "- Explain that tools will work normally once sync completes",
87
+ "- Avoid creating complex workflows until sync is done",
88
+ "",
89
+ "**What to tell users:**",
90
+ "- 'Basic Memory is processing your files and building knowledge graphs'",
91
+ "- 'This usually takes 1-3 minutes depending on your content size'",
92
+ "- 'You can check progress anytime with the sync_status tool'",
93
+ "- 'Full functionality will be available once processing completes'",
94
+ "",
95
+ "**User-friendly language:**",
96
+ "- Say 'processing files' instead of 'migration' or 'sync'",
97
+ "- Say 'building knowledge graphs' instead of 'indexing'",
98
+ "- Say 'setting up your knowledge base' instead of 'running migrations'",
99
+ ]
100
+ )
101
+
102
+ return "\n".join(lines)
103
+
104
+ except Exception as e: # pragma: no cover
105
+ return f"""# Sync Status - Error
106
+
107
+ ❌ **Unable to check sync status**: {str(e)}
108
+
109
+ ## AI Assistant Recommendations
110
+
111
+ **When status is unavailable:**
112
+ - Assume the system is likely working normally
113
+ - Try proceeding with normal operations
114
+ - If users report issues, suggest checking logs or restarting
115
+ - Use user-friendly language about 'setting up the knowledge base'
116
+ """
@@ -31,23 +31,23 @@ load_dotenv()
31
31
  @dataclass
32
32
  class AppContext:
33
33
  watch_task: Optional[asyncio.Task]
34
+ migration_manager: Optional[Any] = None
34
35
 
35
36
 
36
37
  @asynccontextmanager
37
38
  async def app_lifespan(server: FastMCP) -> AsyncIterator[AppContext]: # pragma: no cover
38
39
  """Manage application lifecycle with type-safe context"""
39
- # Initialize on startup
40
- watch_task = await initialize_app(app_config)
40
+ # Initialize on startup (now returns migration_manager)
41
+ migration_manager = await initialize_app(app_config)
41
42
 
42
43
  # Initialize project session with default project
43
44
  session.initialize(app_config.default_project)
44
45
 
45
46
  try:
46
- yield AppContext(watch_task=watch_task)
47
+ yield AppContext(watch_task=None, migration_manager=migration_manager)
47
48
  finally:
48
- # Cleanup on shutdown
49
- if watch_task:
50
- watch_task.cancel()
49
+ # Cleanup on shutdown - migration tasks will be cancelled automatically
50
+ pass
51
51
 
52
52
 
53
53
  # OAuth configuration function
@@ -11,12 +11,14 @@ from basic_memory.mcp.tools.read_content import read_content
11
11
  from basic_memory.mcp.tools.build_context import build_context
12
12
  from basic_memory.mcp.tools.recent_activity import recent_activity
13
13
  from basic_memory.mcp.tools.read_note import read_note
14
+ from basic_memory.mcp.tools.view_note import view_note
14
15
  from basic_memory.mcp.tools.write_note import write_note
15
16
  from basic_memory.mcp.tools.search import search_notes
16
17
  from basic_memory.mcp.tools.canvas import canvas
17
18
  from basic_memory.mcp.tools.list_directory import list_directory
18
19
  from basic_memory.mcp.tools.edit_note import edit_note
19
20
  from basic_memory.mcp.tools.move_note import move_note
21
+ from basic_memory.mcp.tools.sync_status import sync_status
20
22
  from basic_memory.mcp.tools.project_management import (
21
23
  list_projects,
22
24
  switch_project,
@@ -43,5 +45,7 @@ __all__ = [
43
45
  "search_notes",
44
46
  "set_default_project",
45
47
  "switch_project",
48
+ "sync_status",
49
+ "view_note",
46
50
  "write_note",
47
51
  ]
@@ -13,7 +13,6 @@ from basic_memory.schemas.memory import (
13
13
  GraphContext,
14
14
  MemoryUrl,
15
15
  memory_url_path,
16
- normalize_memory_url,
17
16
  )
18
17
 
19
18
 
@@ -21,12 +20,17 @@ from basic_memory.schemas.memory import (
21
20
  description="""Build context from a memory:// URI to continue conversations naturally.
22
21
 
23
22
  Use this to follow up on previous discussions or explore related topics.
23
+
24
+ Memory URL Format:
25
+ - Use paths like "folder/note" or "memory://folder/note"
26
+ - Pattern matching: "folder/*" matches all notes in folder
27
+ - Valid characters: letters, numbers, hyphens, underscores, forward slashes
28
+ - Avoid: double slashes (//), angle brackets (<>), quotes, pipes (|)
29
+ - Examples: "specs/search", "projects/basic-memory", "notes/*"
30
+
24
31
  Timeframes support natural language like:
25
- - "2 days ago"
26
- - "last week"
27
- - "today"
28
- - "3 months ago"
29
- Or standard formats like "7d", "24h"
32
+ - "2 days ago", "last week", "today", "3 months ago"
33
+ - Or standard formats like "7d", "24h"
30
34
  """,
31
35
  )
32
36
  async def build_context(
@@ -76,7 +80,28 @@ async def build_context(
76
80
  build_context("memory://specs/search", project="work-project")
77
81
  """
78
82
  logger.info(f"Building context from {url}")
79
- url = normalize_memory_url(url)
83
+ # URL is already validated and normalized by MemoryUrl type annotation
84
+
85
+ # Check migration status and wait briefly if needed
86
+ from basic_memory.mcp.tools.utils import wait_for_migration_or_return_status
87
+
88
+ migration_status = await wait_for_migration_or_return_status(timeout=5.0)
89
+ if migration_status: # pragma: no cover
90
+ # Return a proper GraphContext with status message
91
+ from basic_memory.schemas.memory import MemoryMetadata
92
+ from datetime import datetime
93
+
94
+ return GraphContext(
95
+ results=[],
96
+ metadata=MemoryMetadata(
97
+ depth=depth or 1,
98
+ timeframe=timeframe,
99
+ generated_at=datetime.now(),
100
+ primary_count=0,
101
+ related_count=0,
102
+ uri=migration_status, # Include status in metadata
103
+ ),
104
+ )
80
105
 
81
106
  active_project = get_active_project(project)
82
107
  project_url = active_project.project_url
@@ -35,7 +35,8 @@ async def canvas(
35
35
  nodes: List of node objects following JSON Canvas 1.0 spec
36
36
  edges: List of edge objects following JSON Canvas 1.0 spec
37
37
  title: The title of the canvas (will be saved as title.canvas)
38
- folder: The folder where the file should be saved
38
+ folder: Folder path relative to project root where the canvas should be saved.
39
+ Use forward slashes (/) as separators. Examples: "diagrams", "projects/2025", "visual/maps"
39
40
  project: Optional project name to create canvas in. If not provided, uses current active project.
40
41
 
41
42
  Returns: