basic-memory 0.14.4__py3-none-any.whl → 0.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (82) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/versions/a1b2c3d4e5f6_fix_project_foreign_keys.py +5 -9
  3. basic_memory/api/app.py +10 -4
  4. basic_memory/api/routers/knowledge_router.py +25 -8
  5. basic_memory/api/routers/project_router.py +99 -4
  6. basic_memory/cli/app.py +9 -28
  7. basic_memory/cli/auth.py +277 -0
  8. basic_memory/cli/commands/cloud/__init__.py +5 -0
  9. basic_memory/cli/commands/cloud/api_client.py +112 -0
  10. basic_memory/cli/commands/cloud/bisync_commands.py +818 -0
  11. basic_memory/cli/commands/cloud/core_commands.py +288 -0
  12. basic_memory/cli/commands/cloud/mount_commands.py +295 -0
  13. basic_memory/cli/commands/cloud/rclone_config.py +288 -0
  14. basic_memory/cli/commands/cloud/rclone_installer.py +198 -0
  15. basic_memory/cli/commands/command_utils.py +60 -0
  16. basic_memory/cli/commands/import_memory_json.py +0 -4
  17. basic_memory/cli/commands/mcp.py +16 -4
  18. basic_memory/cli/commands/project.py +139 -142
  19. basic_memory/cli/commands/status.py +34 -22
  20. basic_memory/cli/commands/sync.py +45 -228
  21. basic_memory/cli/commands/tool.py +87 -16
  22. basic_memory/cli/main.py +1 -0
  23. basic_memory/config.py +76 -12
  24. basic_memory/db.py +104 -3
  25. basic_memory/deps.py +20 -3
  26. basic_memory/file_utils.py +37 -13
  27. basic_memory/ignore_utils.py +295 -0
  28. basic_memory/markdown/plugins.py +9 -7
  29. basic_memory/mcp/async_client.py +22 -10
  30. basic_memory/mcp/project_context.py +141 -0
  31. basic_memory/mcp/prompts/ai_assistant_guide.py +49 -4
  32. basic_memory/mcp/prompts/continue_conversation.py +1 -1
  33. basic_memory/mcp/prompts/recent_activity.py +116 -32
  34. basic_memory/mcp/prompts/search.py +1 -1
  35. basic_memory/mcp/prompts/utils.py +11 -4
  36. basic_memory/mcp/resources/ai_assistant_guide.md +179 -41
  37. basic_memory/mcp/resources/project_info.py +20 -6
  38. basic_memory/mcp/server.py +0 -37
  39. basic_memory/mcp/tools/__init__.py +5 -6
  40. basic_memory/mcp/tools/build_context.py +29 -19
  41. basic_memory/mcp/tools/canvas.py +19 -8
  42. basic_memory/mcp/tools/chatgpt_tools.py +178 -0
  43. basic_memory/mcp/tools/delete_note.py +67 -34
  44. basic_memory/mcp/tools/edit_note.py +55 -39
  45. basic_memory/mcp/tools/headers.py +44 -0
  46. basic_memory/mcp/tools/list_directory.py +18 -8
  47. basic_memory/mcp/tools/move_note.py +119 -41
  48. basic_memory/mcp/tools/project_management.py +61 -228
  49. basic_memory/mcp/tools/read_content.py +28 -12
  50. basic_memory/mcp/tools/read_note.py +83 -46
  51. basic_memory/mcp/tools/recent_activity.py +441 -42
  52. basic_memory/mcp/tools/search.py +82 -70
  53. basic_memory/mcp/tools/sync_status.py +5 -4
  54. basic_memory/mcp/tools/utils.py +19 -0
  55. basic_memory/mcp/tools/view_note.py +31 -6
  56. basic_memory/mcp/tools/write_note.py +65 -14
  57. basic_memory/models/knowledge.py +12 -6
  58. basic_memory/models/project.py +6 -2
  59. basic_memory/repository/entity_repository.py +29 -82
  60. basic_memory/repository/relation_repository.py +13 -0
  61. basic_memory/repository/repository.py +2 -2
  62. basic_memory/repository/search_repository.py +4 -2
  63. basic_memory/schemas/__init__.py +6 -0
  64. basic_memory/schemas/base.py +39 -11
  65. basic_memory/schemas/cloud.py +46 -0
  66. basic_memory/schemas/memory.py +90 -21
  67. basic_memory/schemas/project_info.py +9 -10
  68. basic_memory/schemas/sync_report.py +48 -0
  69. basic_memory/services/context_service.py +25 -11
  70. basic_memory/services/entity_service.py +75 -45
  71. basic_memory/services/initialization.py +30 -11
  72. basic_memory/services/project_service.py +13 -23
  73. basic_memory/sync/sync_service.py +145 -21
  74. basic_memory/sync/watch_service.py +101 -40
  75. basic_memory/utils.py +14 -4
  76. {basic_memory-0.14.4.dist-info → basic_memory-0.15.0.dist-info}/METADATA +7 -6
  77. basic_memory-0.15.0.dist-info/RECORD +147 -0
  78. basic_memory/mcp/project_session.py +0 -120
  79. basic_memory-0.14.4.dist-info/RECORD +0 -133
  80. {basic_memory-0.14.4.dist-info → basic_memory-0.15.0.dist-info}/WHEEL +0 -0
  81. {basic_memory-0.14.4.dist-info → basic_memory-0.15.0.dist-info}/entry_points.txt +0 -0
  82. {basic_memory-0.14.4.dist-info → basic_memory-0.15.0.dist-info}/licenses/LICENSE +0 -0
@@ -2,6 +2,7 @@
2
2
 
3
3
  import os
4
4
  from datetime import datetime
5
+ from pathlib import Path
5
6
  from typing import Dict, List, Optional, Any
6
7
 
7
8
  from pydantic import Field, BaseModel
@@ -78,16 +79,6 @@ class SystemStatus(BaseModel):
78
79
  timestamp: datetime = Field(description="Timestamp when the information was collected")
79
80
 
80
81
 
81
- class ProjectDetail(BaseModel):
82
- """Detailed information about a project."""
83
-
84
- path: str = Field(description="Path to the project directory")
85
- active: bool = Field(description="Whether the project is active")
86
- id: Optional[int] = Field(description="Database ID of the project if available")
87
- is_default: bool = Field(description="Whether this is the default project")
88
- permalink: str = Field(description="URL-friendly identifier for the project")
89
-
90
-
91
82
  class ProjectInfoResponse(BaseModel):
92
83
  """Response for the project_info tool."""
93
84
 
@@ -190,6 +181,14 @@ class ProjectItem(BaseModel):
190
181
  def permalink(self) -> str: # pragma: no cover
191
182
  return generate_permalink(self.name)
192
183
 
184
+ @property
185
+ def home(self) -> Path: # pragma: no cover
186
+ return Path(self.name)
187
+
188
+ @property
189
+ def project_url(self) -> str: # pragma: no cover
190
+ return f"/{generate_permalink(self.name)}"
191
+
193
192
 
194
193
  class ProjectList(BaseModel):
195
194
  """Response model for listing projects."""
@@ -0,0 +1,48 @@
1
+ """Pydantic schemas for sync report responses."""
2
+
3
+ from typing import TYPE_CHECKING, Dict, Set
4
+
5
+ from pydantic import BaseModel, Field
6
+
7
+ # avoid cirular imports
8
+ if TYPE_CHECKING:
9
+ from basic_memory.sync.sync_service import SyncReport
10
+
11
+
12
+ class SyncReportResponse(BaseModel):
13
+ """Report of file changes found compared to database state.
14
+
15
+ Used for API responses when scanning or syncing files.
16
+ """
17
+
18
+ new: Set[str] = Field(default_factory=set, description="Files on disk but not in database")
19
+ modified: Set[str] = Field(default_factory=set, description="Files with different checksums")
20
+ deleted: Set[str] = Field(default_factory=set, description="Files in database but not on disk")
21
+ moves: Dict[str, str] = Field(
22
+ default_factory=dict, description="Files moved (old_path -> new_path)"
23
+ )
24
+ checksums: Dict[str, str] = Field(
25
+ default_factory=dict, description="Current file checksums (path -> checksum)"
26
+ )
27
+ total: int = Field(description="Total number of changes")
28
+
29
+ @classmethod
30
+ def from_sync_report(cls, report: "SyncReport") -> "SyncReportResponse":
31
+ """Convert SyncReport dataclass to Pydantic model.
32
+
33
+ Args:
34
+ report: SyncReport dataclass from sync service
35
+
36
+ Returns:
37
+ SyncReportResponse with same data
38
+ """
39
+ return cls(
40
+ new=report.new,
41
+ modified=report.modified,
42
+ deleted=report.deleted,
43
+ moves=report.moves,
44
+ checksums=report.checksums,
45
+ total=report.total,
46
+ )
47
+
48
+ model_config = {"from_attributes": True}
@@ -100,20 +100,30 @@ class ContextService:
100
100
  f"Building context for URI: '{memory_url}' depth: '{depth}' since: '{since}' limit: '{limit}' offset: '{offset}' max_related: '{max_related}'"
101
101
  )
102
102
 
103
+ normalized_path: Optional[str] = None
103
104
  if memory_url:
104
105
  path = memory_url_path(memory_url)
105
- # Pattern matching - use search
106
- if "*" in path:
107
- logger.debug(f"Pattern search for '{path}'")
106
+ # Check for wildcards before normalization
107
+ has_wildcard = "*" in path
108
+
109
+ if has_wildcard:
110
+ # For wildcard patterns, normalize each segment separately to preserve the *
111
+ parts = path.split("*")
112
+ normalized_parts = [
113
+ generate_permalink(part, split_extension=False) if part else ""
114
+ for part in parts
115
+ ]
116
+ normalized_path = "*".join(normalized_parts)
117
+ logger.debug(f"Pattern search for '{normalized_path}'")
108
118
  primary = await self.search_repository.search(
109
- permalink_match=path, limit=limit, offset=offset
119
+ permalink_match=normalized_path, limit=limit, offset=offset
110
120
  )
111
-
112
- # Direct lookup for exact path
113
121
  else:
114
- logger.debug(f"Direct lookup for '{path}'")
122
+ # For exact paths, normalize the whole thing
123
+ normalized_path = generate_permalink(path, split_extension=False)
124
+ logger.debug(f"Direct lookup for '{normalized_path}'")
115
125
  primary = await self.search_repository.search(
116
- permalink=path, limit=limit, offset=offset
126
+ permalink=normalized_path, limit=limit, offset=offset
117
127
  )
118
128
  else:
119
129
  logger.debug(f"Build context for '{types}'")
@@ -151,7 +161,7 @@ class ContextService:
151
161
 
152
162
  # Create metadata dataclass
153
163
  metadata = ContextMetadata(
154
- uri=memory_url_path(memory_url) if memory_url else None,
164
+ uri=normalized_path if memory_url else None,
155
165
  types=types,
156
166
  depth=depth,
157
167
  timeframe=since.isoformat() if since else None,
@@ -246,7 +256,11 @@ class ContextService:
246
256
  values = ", ".join([f"('{t}', {i})" for t, i in type_id_pairs])
247
257
 
248
258
  # Parameters for bindings - include project_id for security filtering
249
- params = {"max_depth": max_depth, "max_results": max_results, "project_id": self.search_repository.project_id}
259
+ params = {
260
+ "max_depth": max_depth,
261
+ "max_results": max_results,
262
+ "project_id": self.search_repository.project_id,
263
+ }
250
264
 
251
265
  # Build date and timeframe filters conditionally based on since parameter
252
266
  if since:
@@ -258,7 +272,7 @@ class ContextService:
258
272
  date_filter = ""
259
273
  relation_date_filter = ""
260
274
  timeframe_condition = ""
261
-
275
+
262
276
  # Add project filtering for security - ensure all entities and relations belong to the same project
263
277
  project_filter = "AND e.project_id = :project_id"
264
278
  relation_project_filter = "AND e_from.project_id = :project_id"
@@ -9,7 +9,12 @@ from loguru import logger
9
9
  from sqlalchemy.exc import IntegrityError
10
10
 
11
11
  from basic_memory.config import ProjectConfig, BasicMemoryConfig
12
- from basic_memory.file_utils import has_frontmatter, parse_frontmatter, remove_frontmatter, dump_frontmatter
12
+ from basic_memory.file_utils import (
13
+ has_frontmatter,
14
+ parse_frontmatter,
15
+ remove_frontmatter,
16
+ dump_frontmatter,
17
+ )
13
18
  from basic_memory.markdown import EntityMarkdown
14
19
  from basic_memory.markdown.entity_parser import EntityParser
15
20
  from basic_memory.markdown.utils import entity_model_from_markdown, schema_to_markdown
@@ -37,6 +42,7 @@ class EntityService(BaseService[EntityModel]):
37
42
  relation_repository: RelationRepository,
38
43
  file_service: FileService,
39
44
  link_resolver: LinkResolver,
45
+ app_config: Optional[BasicMemoryConfig] = None,
40
46
  ):
41
47
  super().__init__(entity_repository)
42
48
  self.observation_repository = observation_repository
@@ -44,6 +50,7 @@ class EntityService(BaseService[EntityModel]):
44
50
  self.entity_parser = entity_parser
45
51
  self.file_service = file_service
46
52
  self.link_resolver = link_resolver
53
+ self.app_config = app_config
47
54
 
48
55
  async def detect_file_path_conflicts(self, file_path: str) -> List[Entity]:
49
56
  """Detect potential file path conflicts for a given file path.
@@ -139,10 +146,11 @@ class EntityService(BaseService[EntityModel]):
139
146
  f"Creating or updating entity: {schema.file_path}, permalink: {schema.permalink}"
140
147
  )
141
148
 
142
- # Try to find existing entity using smart resolution
143
- existing = await self.link_resolver.resolve_link(
144
- schema.file_path
145
- ) or await self.link_resolver.resolve_link(schema.permalink)
149
+ # Try to find existing entity using strict resolution (no fuzzy search)
150
+ # This prevents incorrectly matching similar file paths like "Node A.md" and "Node C.md"
151
+ existing = await self.link_resolver.resolve_link(schema.file_path, strict=True)
152
+ if not existing and schema.permalink:
153
+ existing = await self.link_resolver.resolve_link(schema.permalink, strict=True)
146
154
 
147
155
  if existing:
148
156
  logger.debug(f"Found existing entity: {existing.file_path}")
@@ -189,9 +197,15 @@ class EntityService(BaseService[EntityModel]):
189
197
  relations=[],
190
198
  )
191
199
 
192
- # Get unique permalink (prioritizing content frontmatter)
193
- permalink = await self.resolve_permalink(file_path, content_markdown)
194
- schema._permalink = permalink
200
+ # Get unique permalink (prioritizing content frontmatter) unless disabled
201
+ if self.app_config and self.app_config.disable_permalinks:
202
+ # Use empty string as sentinel to indicate permalinks are disabled
203
+ # The permalink property will return None when it sees empty string
204
+ schema._permalink = ""
205
+ else:
206
+ # Generate and set permalink
207
+ permalink = await self.resolve_permalink(file_path, content_markdown)
208
+ schema._permalink = permalink
195
209
 
196
210
  post = await schema_to_markdown(schema)
197
211
 
@@ -249,15 +263,16 @@ class EntityService(BaseService[EntityModel]):
249
263
  relations=[],
250
264
  )
251
265
 
252
- # Check if we need to update the permalink based on content frontmatter
266
+ # Check if we need to update the permalink based on content frontmatter (unless disabled)
253
267
  new_permalink = entity.permalink # Default to existing
254
- if content_markdown and content_markdown.frontmatter.permalink:
255
- # Resolve permalink with the new content frontmatter
256
- resolved_permalink = await self.resolve_permalink(file_path, content_markdown)
257
- if resolved_permalink != entity.permalink:
258
- new_permalink = resolved_permalink
259
- # Update the schema to use the new permalink
260
- schema._permalink = new_permalink
268
+ if self.app_config and not self.app_config.disable_permalinks:
269
+ if content_markdown and content_markdown.frontmatter.permalink:
270
+ # Resolve permalink with the new content frontmatter
271
+ resolved_permalink = await self.resolve_permalink(file_path, content_markdown)
272
+ if resolved_permalink != entity.permalink:
273
+ new_permalink = resolved_permalink
274
+ # Update the schema to use the new permalink
275
+ schema._permalink = new_permalink
261
276
 
262
277
  # Create post with new content from schema
263
278
  post = await schema_to_markdown(schema)
@@ -417,34 +432,47 @@ class EntityService(BaseService[EntityModel]):
417
432
  # Clear existing relations first
418
433
  await self.relation_repository.delete_outgoing_relations_from_entity(db_entity.id)
419
434
 
420
- # Process each relation
421
- for rel in markdown.relations:
422
- # Resolve the target permalink
423
- target_entity = await self.link_resolver.resolve_link(
424
- rel.target,
425
- )
426
-
427
- # if the target is found, store the id
428
- target_id = target_entity.id if target_entity else None
429
- # if the target is found, store the title, otherwise add the target for a "forward link"
430
- target_name = target_entity.title if target_entity else rel.target
431
-
432
- # Create the relation
433
- relation = Relation(
434
- from_id=db_entity.id,
435
- to_id=target_id,
436
- to_name=target_name,
437
- relation_type=rel.type,
438
- context=rel.context,
439
- )
440
- try:
441
- await self.relation_repository.add(relation)
442
- except IntegrityError:
443
- # Unique constraint violation - relation already exists
444
- logger.debug(
445
- f"Skipping duplicate relation {rel.type} from {db_entity.permalink} target: {rel.target}"
435
+ # Batch resolve all relation targets in parallel
436
+ if markdown.relations:
437
+ import asyncio
438
+
439
+ # Create tasks for all relation lookups
440
+ lookup_tasks = [
441
+ self.link_resolver.resolve_link(rel.target) for rel in markdown.relations
442
+ ]
443
+
444
+ # Execute all lookups in parallel
445
+ resolved_entities = await asyncio.gather(*lookup_tasks, return_exceptions=True)
446
+
447
+ # Process results and create relation records
448
+ for rel, resolved in zip(markdown.relations, resolved_entities):
449
+ # Handle exceptions from gather and None results
450
+ target_entity: Optional[Entity] = None
451
+ if not isinstance(resolved, Exception):
452
+ # Type narrowing: resolved is Optional[Entity] here, not Exception
453
+ target_entity = resolved # type: ignore
454
+
455
+ # if the target is found, store the id
456
+ target_id = target_entity.id if target_entity else None
457
+ # if the target is found, store the title, otherwise add the target for a "forward link"
458
+ target_name = target_entity.title if target_entity else rel.target
459
+
460
+ # Create the relation
461
+ relation = Relation(
462
+ from_id=db_entity.id,
463
+ to_id=target_id,
464
+ to_name=target_name,
465
+ relation_type=rel.type,
466
+ context=rel.context,
446
467
  )
447
- continue
468
+ try:
469
+ await self.relation_repository.add(relation)
470
+ except IntegrityError:
471
+ # Unique constraint violation - relation already exists
472
+ logger.debug(
473
+ f"Skipping duplicate relation {rel.type} from {db_entity.permalink} target: {rel.target}"
474
+ )
475
+ continue
448
476
 
449
477
  return await self.repository.get_by_file_path(path)
450
478
 
@@ -728,8 +756,10 @@ class EntityService(BaseService[EntityModel]):
728
756
  # 6. Prepare database updates
729
757
  updates = {"file_path": destination_path}
730
758
 
731
- # 7. Update permalink if configured or if entity has null permalink
732
- if app_config.update_permalinks_on_move or old_permalink is None:
759
+ # 7. Update permalink if configured or if entity has null permalink (unless disabled)
760
+ if not app_config.disable_permalinks and (
761
+ app_config.update_permalinks_on_move or old_permalink is None
762
+ ):
733
763
  # Generate new permalink from destination path
734
764
  new_permalink = await self.resolve_permalink(destination_path)
735
765
 
@@ -11,7 +11,10 @@ from loguru import logger
11
11
 
12
12
  from basic_memory import db
13
13
  from basic_memory.config import BasicMemoryConfig
14
- from basic_memory.repository import ProjectRepository
14
+ from basic_memory.models import Project
15
+ from basic_memory.repository import (
16
+ ProjectRepository,
17
+ )
15
18
 
16
19
 
17
20
  async def initialize_database(app_config: BasicMemoryConfig) -> None:
@@ -101,18 +104,20 @@ async def initialize_file_sync(
101
104
  # Get active projects
102
105
  active_projects = await project_repository.get_active_projects()
103
106
 
104
- # First, sync all projects sequentially
105
- for project in active_projects:
107
+ # Start sync for all projects as background tasks (non-blocking)
108
+ async def sync_project_background(project: Project):
109
+ """Sync a single project in the background."""
106
110
  # avoid circular imports
107
- from basic_memory.cli.commands.sync import get_sync_service
108
-
109
- logger.info(f"Starting sync for project: {project.name}")
110
- sync_service = await get_sync_service(project)
111
- sync_dir = Path(project.path)
111
+ from basic_memory.sync.sync_service import get_sync_service
112
112
 
113
+ logger.info(f"Starting background sync for project: {project.name}")
113
114
  try:
115
+ # Create sync service
116
+ sync_service = await get_sync_service(project)
117
+
118
+ sync_dir = Path(project.path)
114
119
  await sync_service.sync(sync_dir, project_name=project.name)
115
- logger.info(f"Sync completed successfully for project: {project.name}")
120
+ logger.info(f"Background sync completed successfully for project: {project.name}")
116
121
 
117
122
  # Mark project as watching for changes after successful sync
118
123
  from basic_memory.services.sync_status_service import sync_status_tracker
@@ -120,12 +125,19 @@ async def initialize_file_sync(
120
125
  sync_status_tracker.start_project_watch(project.name)
121
126
  logger.info(f"Project {project.name} is now watching for changes")
122
127
  except Exception as e: # pragma: no cover
123
- logger.error(f"Error syncing project {project.name}: {e}")
128
+ logger.error(f"Error in background sync for project {project.name}: {e}")
124
129
  # Mark sync as failed for this project
125
130
  from basic_memory.services.sync_status_service import sync_status_tracker
126
131
 
127
132
  sync_status_tracker.fail_project_sync(project.name, str(e))
128
- # Continue with other projects even if one fails
133
+
134
+ # Create background tasks for all project syncs (non-blocking)
135
+ sync_tasks = [
136
+ asyncio.create_task(sync_project_background(project)) for project in active_projects
137
+ ]
138
+ logger.info(f"Created {len(sync_tasks)} background sync tasks")
139
+
140
+ # Don't await the tasks - let them run in background while we continue
129
141
 
130
142
  # Then start the watch service in the background
131
143
  logger.info("Starting watch service for all projects")
@@ -169,9 +181,16 @@ def ensure_initialization(app_config: BasicMemoryConfig) -> None:
169
181
  This is a wrapper for the async initialize_app function that can be
170
182
  called from synchronous code like CLI entry points.
171
183
 
184
+ No-op if app_config.cloud_mode == True. Cloud basic memory manages it's own projects
185
+
172
186
  Args:
173
187
  app_config: The Basic Memory project configuration
174
188
  """
189
+ # Skip initialization in cloud mode - cloud manages its own projects
190
+ if app_config.cloud_mode_enabled:
191
+ logger.debug("Skipping initialization in cloud mode - projects managed by cloud")
192
+ return
193
+
175
194
  try:
176
195
  result = asyncio.run(initialize_app(app_config))
177
196
  logger.info(f"Initialization completed successfully: result={result}")
@@ -21,6 +21,9 @@ from basic_memory.config import WATCH_STATUS_JSON, ConfigManager, get_project_co
21
21
  from basic_memory.utils import generate_permalink
22
22
 
23
23
 
24
+ config = ConfigManager().config
25
+
26
+
24
27
  class ProjectService:
25
28
  """Service for managing Basic Memory projects."""
26
29
 
@@ -96,11 +99,16 @@ class ProjectService:
96
99
  Raises:
97
100
  ValueError: If the project already exists
98
101
  """
99
- if not self.repository: # pragma: no cover
100
- raise ValueError("Repository is required for add_project")
101
-
102
- # Resolve to absolute path
103
- resolved_path = Path(os.path.abspath(os.path.expanduser(path))).as_posix()
102
+ # in cloud mode, don't allow arbitrary paths.
103
+ if config.cloud_mode:
104
+ basic_memory_home = os.getenv("BASIC_MEMORY_HOME")
105
+ assert basic_memory_home is not None
106
+ base_path = Path(basic_memory_home)
107
+
108
+ # Resolve to absolute path
109
+ resolved_path = Path(os.path.abspath(os.path.expanduser(base_path / path))).as_posix()
110
+ else:
111
+ resolved_path = Path(os.path.abspath(os.path.expanduser(path))).as_posix()
104
112
 
105
113
  # First add to config file (this will validate the project doesn't exist)
106
114
  project_config = self.config_manager.add_project(name, resolved_path)
@@ -170,15 +178,6 @@ class ProjectService:
170
178
 
171
179
  logger.info(f"Project '{name}' set as default in configuration and database")
172
180
 
173
- # Refresh MCP session to pick up the new default project
174
- try:
175
- from basic_memory.mcp.project_session import session
176
-
177
- session.refresh_from_config()
178
- except ImportError: # pragma: no cover
179
- # MCP components might not be available in all contexts (e.g., CLI-only usage)
180
- logger.debug("MCP session not available, skipping session refresh")
181
-
182
181
  async def _ensure_single_default_project(self) -> None:
183
182
  """Ensure only one project has is_default=True.
184
183
 
@@ -300,15 +299,6 @@ class ProjectService:
300
299
 
301
300
  logger.info("Project synchronization complete")
302
301
 
303
- # Refresh MCP session to ensure it's in sync with current config
304
- try:
305
- from basic_memory.mcp.project_session import session
306
-
307
- session.refresh_from_config()
308
- except ImportError:
309
- # MCP components might not be available in all contexts
310
- logger.debug("MCP session not available, skipping session refresh")
311
-
312
302
  async def move_project(self, name: str, new_path: str) -> None:
313
303
  """Move a project to a new location.
314
304