basic-memory 0.14.4__py3-none-any.whl → 0.15.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (84) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/versions/a1b2c3d4e5f6_fix_project_foreign_keys.py +5 -9
  3. basic_memory/api/app.py +10 -4
  4. basic_memory/api/routers/directory_router.py +23 -2
  5. basic_memory/api/routers/knowledge_router.py +25 -8
  6. basic_memory/api/routers/project_router.py +100 -4
  7. basic_memory/cli/app.py +9 -28
  8. basic_memory/cli/auth.py +277 -0
  9. basic_memory/cli/commands/cloud/__init__.py +5 -0
  10. basic_memory/cli/commands/cloud/api_client.py +112 -0
  11. basic_memory/cli/commands/cloud/bisync_commands.py +818 -0
  12. basic_memory/cli/commands/cloud/core_commands.py +288 -0
  13. basic_memory/cli/commands/cloud/mount_commands.py +295 -0
  14. basic_memory/cli/commands/cloud/rclone_config.py +288 -0
  15. basic_memory/cli/commands/cloud/rclone_installer.py +198 -0
  16. basic_memory/cli/commands/command_utils.py +43 -0
  17. basic_memory/cli/commands/import_memory_json.py +0 -4
  18. basic_memory/cli/commands/mcp.py +77 -60
  19. basic_memory/cli/commands/project.py +154 -152
  20. basic_memory/cli/commands/status.py +25 -22
  21. basic_memory/cli/commands/sync.py +45 -228
  22. basic_memory/cli/commands/tool.py +87 -16
  23. basic_memory/cli/main.py +1 -0
  24. basic_memory/config.py +131 -21
  25. basic_memory/db.py +104 -3
  26. basic_memory/deps.py +27 -8
  27. basic_memory/file_utils.py +37 -13
  28. basic_memory/ignore_utils.py +295 -0
  29. basic_memory/markdown/plugins.py +9 -7
  30. basic_memory/mcp/async_client.py +124 -14
  31. basic_memory/mcp/project_context.py +141 -0
  32. basic_memory/mcp/prompts/ai_assistant_guide.py +49 -4
  33. basic_memory/mcp/prompts/continue_conversation.py +17 -16
  34. basic_memory/mcp/prompts/recent_activity.py +116 -32
  35. basic_memory/mcp/prompts/search.py +13 -12
  36. basic_memory/mcp/prompts/utils.py +11 -4
  37. basic_memory/mcp/resources/ai_assistant_guide.md +211 -341
  38. basic_memory/mcp/resources/project_info.py +27 -11
  39. basic_memory/mcp/server.py +0 -37
  40. basic_memory/mcp/tools/__init__.py +5 -6
  41. basic_memory/mcp/tools/build_context.py +67 -56
  42. basic_memory/mcp/tools/canvas.py +38 -26
  43. basic_memory/mcp/tools/chatgpt_tools.py +187 -0
  44. basic_memory/mcp/tools/delete_note.py +81 -47
  45. basic_memory/mcp/tools/edit_note.py +155 -138
  46. basic_memory/mcp/tools/list_directory.py +112 -99
  47. basic_memory/mcp/tools/move_note.py +181 -101
  48. basic_memory/mcp/tools/project_management.py +113 -277
  49. basic_memory/mcp/tools/read_content.py +91 -74
  50. basic_memory/mcp/tools/read_note.py +152 -115
  51. basic_memory/mcp/tools/recent_activity.py +471 -68
  52. basic_memory/mcp/tools/search.py +105 -92
  53. basic_memory/mcp/tools/sync_status.py +136 -130
  54. basic_memory/mcp/tools/utils.py +4 -0
  55. basic_memory/mcp/tools/view_note.py +44 -33
  56. basic_memory/mcp/tools/write_note.py +151 -90
  57. basic_memory/models/knowledge.py +12 -6
  58. basic_memory/models/project.py +6 -2
  59. basic_memory/repository/entity_repository.py +89 -82
  60. basic_memory/repository/relation_repository.py +13 -0
  61. basic_memory/repository/repository.py +18 -5
  62. basic_memory/repository/search_repository.py +46 -2
  63. basic_memory/schemas/__init__.py +6 -0
  64. basic_memory/schemas/base.py +39 -11
  65. basic_memory/schemas/cloud.py +46 -0
  66. basic_memory/schemas/memory.py +90 -21
  67. basic_memory/schemas/project_info.py +9 -10
  68. basic_memory/schemas/sync_report.py +48 -0
  69. basic_memory/services/context_service.py +25 -11
  70. basic_memory/services/directory_service.py +124 -3
  71. basic_memory/services/entity_service.py +100 -48
  72. basic_memory/services/initialization.py +30 -11
  73. basic_memory/services/project_service.py +101 -24
  74. basic_memory/services/search_service.py +16 -8
  75. basic_memory/sync/sync_service.py +173 -34
  76. basic_memory/sync/watch_service.py +101 -40
  77. basic_memory/utils.py +14 -4
  78. {basic_memory-0.14.4.dist-info → basic_memory-0.15.1.dist-info}/METADATA +57 -9
  79. basic_memory-0.15.1.dist-info/RECORD +146 -0
  80. basic_memory/mcp/project_session.py +0 -120
  81. basic_memory-0.14.4.dist-info/RECORD +0 -133
  82. {basic_memory-0.14.4.dist-info → basic_memory-0.15.1.dist-info}/WHEEL +0 -0
  83. {basic_memory-0.14.4.dist-info → basic_memory-0.15.1.dist-info}/entry_points.txt +0 -0
  84. {basic_memory-0.14.4.dist-info → basic_memory-0.15.1.dist-info}/licenses/LICENSE +0 -0
@@ -3,8 +3,9 @@
3
3
  import fnmatch
4
4
  import logging
5
5
  import os
6
- from typing import Dict, List, Optional
6
+ from typing import Dict, List, Optional, Sequence
7
7
 
8
+ from basic_memory.models import Entity
8
9
  from basic_memory.repository import EntityRepository
9
10
  from basic_memory.schemas.directory import DirectoryNode
10
11
 
@@ -89,6 +90,49 @@ class DirectoryService:
89
90
  # Return the root node with its children
90
91
  return root_node
91
92
 
93
+ async def get_directory_structure(self) -> DirectoryNode:
94
+ """Build a hierarchical directory structure without file details.
95
+
96
+ Optimized method for folder navigation that only returns directory nodes,
97
+ no file metadata. Much faster than get_directory_tree() for large knowledge bases.
98
+
99
+ Returns:
100
+ DirectoryNode tree containing only folders (type="directory")
101
+ """
102
+ # Get unique directories without loading entities
103
+ directories = await self.entity_repository.get_distinct_directories()
104
+
105
+ # Create a root directory node
106
+ root_node = DirectoryNode(name="Root", directory_path="/", type="directory")
107
+
108
+ # Map to store directory nodes by path for easy lookup
109
+ dir_map: Dict[str, DirectoryNode] = {"/": root_node}
110
+
111
+ # Build tree with just folders
112
+ for dir_path in directories:
113
+ parts = [p for p in dir_path.split("/") if p]
114
+ current_path = "/"
115
+
116
+ for i, part in enumerate(parts):
117
+ parent_path = current_path
118
+ # Build the directory path
119
+ current_path = (
120
+ f"{current_path}{part}" if current_path == "/" else f"{current_path}/{part}"
121
+ )
122
+
123
+ # Create directory node if it doesn't exist
124
+ if current_path not in dir_map:
125
+ dir_node = DirectoryNode(
126
+ name=part, directory_path=current_path, type="directory"
127
+ )
128
+ dir_map[current_path] = dir_node
129
+
130
+ # Add to parent's children
131
+ if parent_path in dir_map:
132
+ dir_map[parent_path].children.append(dir_node)
133
+
134
+ return root_node
135
+
92
136
  async def list_directory(
93
137
  self,
94
138
  dir_name: str = "/",
@@ -118,8 +162,13 @@ class DirectoryService:
118
162
  if dir_name != "/" and dir_name.endswith("/"):
119
163
  dir_name = dir_name.rstrip("/")
120
164
 
121
- # Get the full directory tree
122
- root_tree = await self.get_directory_tree()
165
+ # Optimize: Query only entities in the target directory
166
+ # instead of loading the entire tree
167
+ dir_prefix = dir_name.lstrip("/")
168
+ entity_rows = await self.entity_repository.find_by_directory_prefix(dir_prefix)
169
+
170
+ # Build a partial tree from only the relevant entities
171
+ root_tree = self._build_directory_tree_from_entities(entity_rows, dir_name)
123
172
 
124
173
  # Find the target directory node
125
174
  target_node = self._find_directory_node(root_tree, dir_name)
@@ -132,6 +181,78 @@ class DirectoryService:
132
181
 
133
182
  return result
134
183
 
184
+ def _build_directory_tree_from_entities(
185
+ self, entity_rows: Sequence[Entity], root_path: str
186
+ ) -> DirectoryNode:
187
+ """Build a directory tree from a subset of entities.
188
+
189
+ Args:
190
+ entity_rows: Sequence of entity objects to build tree from
191
+ root_path: Root directory path for the tree
192
+
193
+ Returns:
194
+ DirectoryNode representing the tree root
195
+ """
196
+ # Create a root directory node
197
+ root_node = DirectoryNode(name="Root", directory_path=root_path, type="directory")
198
+
199
+ # Map to store directory nodes by path for easy lookup
200
+ dir_map: Dict[str, DirectoryNode] = {root_path: root_node}
201
+
202
+ # First pass: create all directory nodes
203
+ for file in entity_rows:
204
+ # Process directory path components
205
+ parts = [p for p in file.file_path.split("/") if p]
206
+
207
+ # Create directory structure
208
+ current_path = "/"
209
+ for i, part in enumerate(parts[:-1]): # Skip the filename
210
+ parent_path = current_path
211
+ # Build the directory path
212
+ current_path = (
213
+ f"{current_path}{part}" if current_path == "/" else f"{current_path}/{part}"
214
+ )
215
+
216
+ # Create directory node if it doesn't exist
217
+ if current_path not in dir_map:
218
+ dir_node = DirectoryNode(
219
+ name=part, directory_path=current_path, type="directory"
220
+ )
221
+ dir_map[current_path] = dir_node
222
+
223
+ # Add to parent's children
224
+ if parent_path in dir_map:
225
+ dir_map[parent_path].children.append(dir_node)
226
+
227
+ # Second pass: add file nodes to their parent directories
228
+ for file in entity_rows:
229
+ file_name = os.path.basename(file.file_path)
230
+ parent_dir = os.path.dirname(file.file_path)
231
+ directory_path = "/" if parent_dir == "" else f"/{parent_dir}"
232
+
233
+ # Create file node
234
+ file_node = DirectoryNode(
235
+ name=file_name,
236
+ file_path=file.file_path,
237
+ directory_path=f"/{file.file_path}",
238
+ type="file",
239
+ title=file.title,
240
+ permalink=file.permalink,
241
+ entity_id=file.id,
242
+ entity_type=file.entity_type,
243
+ content_type=file.content_type,
244
+ updated_at=file.updated_at,
245
+ )
246
+
247
+ # Add to parent directory's children
248
+ if directory_path in dir_map:
249
+ dir_map[directory_path].children.append(file_node)
250
+ elif root_path in dir_map:
251
+ # Fallback to root if parent not found
252
+ dir_map[root_path].children.append(file_node)
253
+
254
+ return root_node
255
+
135
256
  def _find_directory_node(
136
257
  self, root: DirectoryNode, target_path: str
137
258
  ) -> Optional[DirectoryNode]:
@@ -9,7 +9,12 @@ from loguru import logger
9
9
  from sqlalchemy.exc import IntegrityError
10
10
 
11
11
  from basic_memory.config import ProjectConfig, BasicMemoryConfig
12
- from basic_memory.file_utils import has_frontmatter, parse_frontmatter, remove_frontmatter, dump_frontmatter
12
+ from basic_memory.file_utils import (
13
+ has_frontmatter,
14
+ parse_frontmatter,
15
+ remove_frontmatter,
16
+ dump_frontmatter,
17
+ )
13
18
  from basic_memory.markdown import EntityMarkdown
14
19
  from basic_memory.markdown.entity_parser import EntityParser
15
20
  from basic_memory.markdown.utils import entity_model_from_markdown, schema_to_markdown
@@ -37,6 +42,7 @@ class EntityService(BaseService[EntityModel]):
37
42
  relation_repository: RelationRepository,
38
43
  file_service: FileService,
39
44
  link_resolver: LinkResolver,
45
+ app_config: Optional[BasicMemoryConfig] = None,
40
46
  ):
41
47
  super().__init__(entity_repository)
42
48
  self.observation_repository = observation_repository
@@ -44,8 +50,11 @@ class EntityService(BaseService[EntityModel]):
44
50
  self.entity_parser = entity_parser
45
51
  self.file_service = file_service
46
52
  self.link_resolver = link_resolver
53
+ self.app_config = app_config
47
54
 
48
- async def detect_file_path_conflicts(self, file_path: str) -> List[Entity]:
55
+ async def detect_file_path_conflicts(
56
+ self, file_path: str, skip_check: bool = False
57
+ ) -> List[Entity]:
49
58
  """Detect potential file path conflicts for a given file path.
50
59
 
51
60
  This checks for entities with similar file paths that might cause conflicts:
@@ -56,10 +65,14 @@ class EntityService(BaseService[EntityModel]):
56
65
 
57
66
  Args:
58
67
  file_path: The file path to check for conflicts
68
+ skip_check: If True, skip the check and return empty list (optimization for bulk operations)
59
69
 
60
70
  Returns:
61
71
  List of entities that might conflict with the given file path
62
72
  """
73
+ if skip_check:
74
+ return []
75
+
63
76
  from basic_memory.utils import detect_potential_file_conflicts
64
77
 
65
78
  conflicts = []
@@ -79,7 +92,10 @@ class EntityService(BaseService[EntityModel]):
79
92
  return conflicts
80
93
 
81
94
  async def resolve_permalink(
82
- self, file_path: Permalink | Path, markdown: Optional[EntityMarkdown] = None
95
+ self,
96
+ file_path: Permalink | Path,
97
+ markdown: Optional[EntityMarkdown] = None,
98
+ skip_conflict_check: bool = False,
83
99
  ) -> str:
84
100
  """Get or generate unique permalink for an entity.
85
101
 
@@ -94,7 +110,9 @@ class EntityService(BaseService[EntityModel]):
94
110
  file_path_str = Path(file_path).as_posix()
95
111
 
96
112
  # Check for potential file path conflicts before resolving permalink
97
- conflicts = await self.detect_file_path_conflicts(file_path_str)
113
+ conflicts = await self.detect_file_path_conflicts(
114
+ file_path_str, skip_check=skip_conflict_check
115
+ )
98
116
  if conflicts:
99
117
  logger.warning(
100
118
  f"Detected potential file path conflicts for '{file_path_str}': "
@@ -139,10 +157,11 @@ class EntityService(BaseService[EntityModel]):
139
157
  f"Creating or updating entity: {schema.file_path}, permalink: {schema.permalink}"
140
158
  )
141
159
 
142
- # Try to find existing entity using smart resolution
143
- existing = await self.link_resolver.resolve_link(
144
- schema.file_path
145
- ) or await self.link_resolver.resolve_link(schema.permalink)
160
+ # Try to find existing entity using strict resolution (no fuzzy search)
161
+ # This prevents incorrectly matching similar file paths like "Node A.md" and "Node C.md"
162
+ existing = await self.link_resolver.resolve_link(schema.file_path, strict=True)
163
+ if not existing and schema.permalink:
164
+ existing = await self.link_resolver.resolve_link(schema.permalink, strict=True)
146
165
 
147
166
  if existing:
148
167
  logger.debug(f"Found existing entity: {existing.file_path}")
@@ -189,9 +208,15 @@ class EntityService(BaseService[EntityModel]):
189
208
  relations=[],
190
209
  )
191
210
 
192
- # Get unique permalink (prioritizing content frontmatter)
193
- permalink = await self.resolve_permalink(file_path, content_markdown)
194
- schema._permalink = permalink
211
+ # Get unique permalink (prioritizing content frontmatter) unless disabled
212
+ if self.app_config and self.app_config.disable_permalinks:
213
+ # Use empty string as sentinel to indicate permalinks are disabled
214
+ # The permalink property will return None when it sees empty string
215
+ schema._permalink = ""
216
+ else:
217
+ # Generate and set permalink
218
+ permalink = await self.resolve_permalink(file_path, content_markdown)
219
+ schema._permalink = permalink
195
220
 
196
221
  post = await schema_to_markdown(schema)
197
222
 
@@ -249,15 +274,16 @@ class EntityService(BaseService[EntityModel]):
249
274
  relations=[],
250
275
  )
251
276
 
252
- # Check if we need to update the permalink based on content frontmatter
277
+ # Check if we need to update the permalink based on content frontmatter (unless disabled)
253
278
  new_permalink = entity.permalink # Default to existing
254
- if content_markdown and content_markdown.frontmatter.permalink:
255
- # Resolve permalink with the new content frontmatter
256
- resolved_permalink = await self.resolve_permalink(file_path, content_markdown)
257
- if resolved_permalink != entity.permalink:
258
- new_permalink = resolved_permalink
259
- # Update the schema to use the new permalink
260
- schema._permalink = new_permalink
279
+ if self.app_config and not self.app_config.disable_permalinks:
280
+ if content_markdown and content_markdown.frontmatter.permalink:
281
+ # Resolve permalink with the new content frontmatter
282
+ resolved_permalink = await self.resolve_permalink(file_path, content_markdown)
283
+ if resolved_permalink != entity.permalink:
284
+ new_permalink = resolved_permalink
285
+ # Update the schema to use the new permalink
286
+ schema._permalink = new_permalink
261
287
 
262
288
  # Create post with new content from schema
263
289
  post = await schema_to_markdown(schema)
@@ -417,34 +443,58 @@ class EntityService(BaseService[EntityModel]):
417
443
  # Clear existing relations first
418
444
  await self.relation_repository.delete_outgoing_relations_from_entity(db_entity.id)
419
445
 
420
- # Process each relation
421
- for rel in markdown.relations:
422
- # Resolve the target permalink
423
- target_entity = await self.link_resolver.resolve_link(
424
- rel.target,
425
- )
426
-
427
- # if the target is found, store the id
428
- target_id = target_entity.id if target_entity else None
429
- # if the target is found, store the title, otherwise add the target for a "forward link"
430
- target_name = target_entity.title if target_entity else rel.target
431
-
432
- # Create the relation
433
- relation = Relation(
434
- from_id=db_entity.id,
435
- to_id=target_id,
436
- to_name=target_name,
437
- relation_type=rel.type,
438
- context=rel.context,
439
- )
440
- try:
441
- await self.relation_repository.add(relation)
442
- except IntegrityError:
443
- # Unique constraint violation - relation already exists
444
- logger.debug(
445
- f"Skipping duplicate relation {rel.type} from {db_entity.permalink} target: {rel.target}"
446
+ # Batch resolve all relation targets in parallel
447
+ if markdown.relations:
448
+ import asyncio
449
+
450
+ # Create tasks for all relation lookups
451
+ lookup_tasks = [
452
+ self.link_resolver.resolve_link(rel.target) for rel in markdown.relations
453
+ ]
454
+
455
+ # Execute all lookups in parallel
456
+ resolved_entities = await asyncio.gather(*lookup_tasks, return_exceptions=True)
457
+
458
+ # Process results and create relation records
459
+ relations_to_add = []
460
+ for rel, resolved in zip(markdown.relations, resolved_entities):
461
+ # Handle exceptions from gather and None results
462
+ target_entity: Optional[Entity] = None
463
+ if not isinstance(resolved, Exception):
464
+ # Type narrowing: resolved is Optional[Entity] here, not Exception
465
+ target_entity = resolved # type: ignore
466
+
467
+ # if the target is found, store the id
468
+ target_id = target_entity.id if target_entity else None
469
+ # if the target is found, store the title, otherwise add the target for a "forward link"
470
+ target_name = target_entity.title if target_entity else rel.target
471
+
472
+ # Create the relation
473
+ relation = Relation(
474
+ from_id=db_entity.id,
475
+ to_id=target_id,
476
+ to_name=target_name,
477
+ relation_type=rel.type,
478
+ context=rel.context,
446
479
  )
447
- continue
480
+ relations_to_add.append(relation)
481
+
482
+ # Batch insert all relations
483
+ if relations_to_add:
484
+ try:
485
+ await self.relation_repository.add_all(relations_to_add)
486
+ except IntegrityError:
487
+ # Some relations might be duplicates - fall back to individual inserts
488
+ logger.debug("Batch relation insert failed, trying individual inserts")
489
+ for relation in relations_to_add:
490
+ try:
491
+ await self.relation_repository.add(relation)
492
+ except IntegrityError:
493
+ # Unique constraint violation - relation already exists
494
+ logger.debug(
495
+ f"Skipping duplicate relation {relation.relation_type} from {db_entity.permalink}"
496
+ )
497
+ continue
448
498
 
449
499
  return await self.repository.get_by_file_path(path)
450
500
 
@@ -728,8 +778,10 @@ class EntityService(BaseService[EntityModel]):
728
778
  # 6. Prepare database updates
729
779
  updates = {"file_path": destination_path}
730
780
 
731
- # 7. Update permalink if configured or if entity has null permalink
732
- if app_config.update_permalinks_on_move or old_permalink is None:
781
+ # 7. Update permalink if configured or if entity has null permalink (unless disabled)
782
+ if not app_config.disable_permalinks and (
783
+ app_config.update_permalinks_on_move or old_permalink is None
784
+ ):
733
785
  # Generate new permalink from destination path
734
786
  new_permalink = await self.resolve_permalink(destination_path)
735
787
 
@@ -11,7 +11,10 @@ from loguru import logger
11
11
 
12
12
  from basic_memory import db
13
13
  from basic_memory.config import BasicMemoryConfig
14
- from basic_memory.repository import ProjectRepository
14
+ from basic_memory.models import Project
15
+ from basic_memory.repository import (
16
+ ProjectRepository,
17
+ )
15
18
 
16
19
 
17
20
  async def initialize_database(app_config: BasicMemoryConfig) -> None:
@@ -101,18 +104,20 @@ async def initialize_file_sync(
101
104
  # Get active projects
102
105
  active_projects = await project_repository.get_active_projects()
103
106
 
104
- # First, sync all projects sequentially
105
- for project in active_projects:
107
+ # Start sync for all projects as background tasks (non-blocking)
108
+ async def sync_project_background(project: Project):
109
+ """Sync a single project in the background."""
106
110
  # avoid circular imports
107
- from basic_memory.cli.commands.sync import get_sync_service
108
-
109
- logger.info(f"Starting sync for project: {project.name}")
110
- sync_service = await get_sync_service(project)
111
- sync_dir = Path(project.path)
111
+ from basic_memory.sync.sync_service import get_sync_service
112
112
 
113
+ logger.info(f"Starting background sync for project: {project.name}")
113
114
  try:
115
+ # Create sync service
116
+ sync_service = await get_sync_service(project)
117
+
118
+ sync_dir = Path(project.path)
114
119
  await sync_service.sync(sync_dir, project_name=project.name)
115
- logger.info(f"Sync completed successfully for project: {project.name}")
120
+ logger.info(f"Background sync completed successfully for project: {project.name}")
116
121
 
117
122
  # Mark project as watching for changes after successful sync
118
123
  from basic_memory.services.sync_status_service import sync_status_tracker
@@ -120,12 +125,19 @@ async def initialize_file_sync(
120
125
  sync_status_tracker.start_project_watch(project.name)
121
126
  logger.info(f"Project {project.name} is now watching for changes")
122
127
  except Exception as e: # pragma: no cover
123
- logger.error(f"Error syncing project {project.name}: {e}")
128
+ logger.error(f"Error in background sync for project {project.name}: {e}")
124
129
  # Mark sync as failed for this project
125
130
  from basic_memory.services.sync_status_service import sync_status_tracker
126
131
 
127
132
  sync_status_tracker.fail_project_sync(project.name, str(e))
128
- # Continue with other projects even if one fails
133
+
134
+ # Create background tasks for all project syncs (non-blocking)
135
+ sync_tasks = [
136
+ asyncio.create_task(sync_project_background(project)) for project in active_projects
137
+ ]
138
+ logger.info(f"Created {len(sync_tasks)} background sync tasks")
139
+
140
+ # Don't await the tasks - let them run in background while we continue
129
141
 
130
142
  # Then start the watch service in the background
131
143
  logger.info("Starting watch service for all projects")
@@ -169,9 +181,16 @@ def ensure_initialization(app_config: BasicMemoryConfig) -> None:
169
181
  This is a wrapper for the async initialize_app function that can be
170
182
  called from synchronous code like CLI entry points.
171
183
 
184
+ No-op if app_config.cloud_mode == True. Cloud basic memory manages it's own projects
185
+
172
186
  Args:
173
187
  app_config: The Basic Memory project configuration
174
188
  """
189
+ # Skip initialization in cloud mode - cloud manages its own projects
190
+ if app_config.cloud_mode_enabled:
191
+ logger.debug("Skipping initialization in cloud mode - projects managed by cloud")
192
+ return
193
+
175
194
  try:
176
195
  result = asyncio.run(initialize_app(app_config))
177
196
  logger.info(f"Initialization completed successfully: result={result}")
@@ -21,6 +21,9 @@ from basic_memory.config import WATCH_STATUS_JSON, ConfigManager, get_project_co
21
21
  from basic_memory.utils import generate_permalink
22
22
 
23
23
 
24
+ config = ConfigManager().config
25
+
26
+
24
27
  class ProjectService:
25
28
  """Service for managing Basic Memory projects."""
26
29
 
@@ -77,7 +80,13 @@ class ProjectService:
77
80
  return os.environ.get("BASIC_MEMORY_PROJECT", self.config_manager.default_project)
78
81
 
79
82
  async def list_projects(self) -> Sequence[Project]:
80
- return await self.repository.find_all()
83
+ """List all projects without loading entity relationships.
84
+
85
+ Returns only basic project fields (name, path, etc.) without
86
+ eager loading the entities relationship which could load thousands
87
+ of entities for large knowledge bases.
88
+ """
89
+ return await self.repository.find_all(use_load_options=False)
81
90
 
82
91
  async def get_project(self, name: str) -> Optional[Project]:
83
92
  """Get the file path for a project by name or permalink."""
@@ -85,6 +94,40 @@ class ProjectService:
85
94
  name
86
95
  )
87
96
 
97
+ def _check_nested_paths(self, path1: str, path2: str) -> bool:
98
+ """Check if two paths are nested (one is a prefix of the other).
99
+
100
+ Args:
101
+ path1: First path to compare
102
+ path2: Second path to compare
103
+
104
+ Returns:
105
+ True if one path is nested within the other, False otherwise
106
+
107
+ Examples:
108
+ _check_nested_paths("/foo", "/foo/bar") # True (child under parent)
109
+ _check_nested_paths("/foo/bar", "/foo") # True (parent over child)
110
+ _check_nested_paths("/foo", "/bar") # False (siblings)
111
+ """
112
+ # Normalize paths to ensure proper comparison
113
+ p1 = Path(path1).resolve()
114
+ p2 = Path(path2).resolve()
115
+
116
+ # Check if either path is a parent of the other
117
+ try:
118
+ # Check if p2 is under p1
119
+ p2.relative_to(p1)
120
+ return True
121
+ except ValueError:
122
+ # Not nested in this direction, check the other
123
+ try:
124
+ # Check if p1 is under p2
125
+ p1.relative_to(p2)
126
+ return True
127
+ except ValueError:
128
+ # Not nested in either direction
129
+ return False
130
+
88
131
  async def add_project(self, name: str, path: str, set_default: bool = False) -> None:
89
132
  """Add a new project to the configuration and database.
90
133
 
@@ -94,13 +137,65 @@ class ProjectService:
94
137
  set_default: Whether to set this project as the default
95
138
 
96
139
  Raises:
97
- ValueError: If the project already exists
140
+ ValueError: If the project already exists or path collides with existing project
98
141
  """
99
- if not self.repository: # pragma: no cover
100
- raise ValueError("Repository is required for add_project")
142
+ # If project_root is set, constrain all projects to that directory
143
+ project_root = self.config_manager.config.project_root
144
+ if project_root:
145
+ base_path = Path(project_root)
146
+
147
+ # In cloud mode (when project_root is set), ignore user's path completely
148
+ # and use sanitized project name as the directory name
149
+ # This ensures flat structure: /app/data/test-bisync instead of /app/data/documents/test bisync
150
+ sanitized_name = generate_permalink(name)
151
+
152
+ # Construct path using sanitized project name only
153
+ resolved_path = (base_path / sanitized_name).resolve().as_posix()
154
+
155
+ # Verify the resolved path is actually under project_root
156
+ if not resolved_path.startswith(base_path.resolve().as_posix()):
157
+ raise ValueError(
158
+ f"BASIC_MEMORY_PROJECT_ROOT is set to {project_root}. "
159
+ f"All projects must be created under this directory. Invalid path: {path}"
160
+ )
101
161
 
102
- # Resolve to absolute path
103
- resolved_path = Path(os.path.abspath(os.path.expanduser(path))).as_posix()
162
+ # Check for case-insensitive path collisions with existing projects
163
+ existing_projects = await self.list_projects()
164
+ for existing in existing_projects:
165
+ if (
166
+ existing.path.lower() == resolved_path.lower()
167
+ and existing.path != resolved_path
168
+ ):
169
+ raise ValueError(
170
+ f"Path collision detected: '{resolved_path}' conflicts with existing project "
171
+ f"'{existing.name}' at '{existing.path}'. "
172
+ f"In cloud mode, paths are normalized to lowercase to prevent case-sensitivity issues."
173
+ )
174
+ else:
175
+ resolved_path = Path(os.path.abspath(os.path.expanduser(path))).as_posix()
176
+
177
+ # Check for nested paths with existing projects
178
+ existing_projects = await self.list_projects()
179
+ for existing in existing_projects:
180
+ if self._check_nested_paths(resolved_path, existing.path):
181
+ # Determine which path is nested within which for appropriate error message
182
+ p_new = Path(resolved_path).resolve()
183
+ p_existing = Path(existing.path).resolve()
184
+
185
+ # Check if new path is nested under existing project
186
+ if p_new.is_relative_to(p_existing):
187
+ raise ValueError(
188
+ f"Cannot create project at '{resolved_path}': "
189
+ f"path is nested within existing project '{existing.name}' at '{existing.path}'. "
190
+ f"Projects cannot share directory trees."
191
+ )
192
+ else:
193
+ # Existing project is nested under new path
194
+ raise ValueError(
195
+ f"Cannot create project at '{resolved_path}': "
196
+ f"existing project '{existing.name}' at '{existing.path}' is nested within this path. "
197
+ f"Projects cannot share directory trees."
198
+ )
104
199
 
105
200
  # First add to config file (this will validate the project doesn't exist)
106
201
  project_config = self.config_manager.add_project(name, resolved_path)
@@ -170,15 +265,6 @@ class ProjectService:
170
265
 
171
266
  logger.info(f"Project '{name}' set as default in configuration and database")
172
267
 
173
- # Refresh MCP session to pick up the new default project
174
- try:
175
- from basic_memory.mcp.project_session import session
176
-
177
- session.refresh_from_config()
178
- except ImportError: # pragma: no cover
179
- # MCP components might not be available in all contexts (e.g., CLI-only usage)
180
- logger.debug("MCP session not available, skipping session refresh")
181
-
182
268
  async def _ensure_single_default_project(self) -> None:
183
269
  """Ensure only one project has is_default=True.
184
270
 
@@ -300,15 +386,6 @@ class ProjectService:
300
386
 
301
387
  logger.info("Project synchronization complete")
302
388
 
303
- # Refresh MCP session to ensure it's in sync with current config
304
- try:
305
- from basic_memory.mcp.project_session import session
306
-
307
- session.refresh_from_config()
308
- except ImportError:
309
- # MCP components might not be available in all contexts
310
- logger.debug("MCP session not available, skipping session refresh")
311
-
312
389
  async def move_project(self, name: str, new_path: str) -> None:
313
390
  """Move a project to a new location.
314
391