basic-memory 0.14.3__py3-none-any.whl → 0.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (90) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/versions/a1b2c3d4e5f6_fix_project_foreign_keys.py +49 -0
  3. basic_memory/api/app.py +10 -4
  4. basic_memory/api/routers/knowledge_router.py +25 -8
  5. basic_memory/api/routers/project_router.py +99 -4
  6. basic_memory/api/routers/resource_router.py +3 -3
  7. basic_memory/cli/app.py +9 -28
  8. basic_memory/cli/auth.py +277 -0
  9. basic_memory/cli/commands/cloud/__init__.py +5 -0
  10. basic_memory/cli/commands/cloud/api_client.py +112 -0
  11. basic_memory/cli/commands/cloud/bisync_commands.py +818 -0
  12. basic_memory/cli/commands/cloud/core_commands.py +288 -0
  13. basic_memory/cli/commands/cloud/mount_commands.py +295 -0
  14. basic_memory/cli/commands/cloud/rclone_config.py +288 -0
  15. basic_memory/cli/commands/cloud/rclone_installer.py +198 -0
  16. basic_memory/cli/commands/command_utils.py +60 -0
  17. basic_memory/cli/commands/import_memory_json.py +0 -4
  18. basic_memory/cli/commands/mcp.py +16 -4
  19. basic_memory/cli/commands/project.py +141 -145
  20. basic_memory/cli/commands/status.py +34 -22
  21. basic_memory/cli/commands/sync.py +45 -228
  22. basic_memory/cli/commands/tool.py +87 -16
  23. basic_memory/cli/main.py +1 -0
  24. basic_memory/config.py +96 -20
  25. basic_memory/db.py +104 -3
  26. basic_memory/deps.py +20 -3
  27. basic_memory/file_utils.py +89 -0
  28. basic_memory/ignore_utils.py +295 -0
  29. basic_memory/importers/chatgpt_importer.py +1 -1
  30. basic_memory/importers/utils.py +2 -2
  31. basic_memory/markdown/entity_parser.py +2 -2
  32. basic_memory/markdown/markdown_processor.py +2 -2
  33. basic_memory/markdown/plugins.py +39 -21
  34. basic_memory/markdown/utils.py +1 -1
  35. basic_memory/mcp/async_client.py +22 -10
  36. basic_memory/mcp/project_context.py +141 -0
  37. basic_memory/mcp/prompts/ai_assistant_guide.py +49 -4
  38. basic_memory/mcp/prompts/continue_conversation.py +1 -1
  39. basic_memory/mcp/prompts/recent_activity.py +116 -32
  40. basic_memory/mcp/prompts/search.py +1 -1
  41. basic_memory/mcp/prompts/utils.py +11 -4
  42. basic_memory/mcp/resources/ai_assistant_guide.md +179 -41
  43. basic_memory/mcp/resources/project_info.py +20 -6
  44. basic_memory/mcp/server.py +0 -37
  45. basic_memory/mcp/tools/__init__.py +5 -6
  46. basic_memory/mcp/tools/build_context.py +39 -19
  47. basic_memory/mcp/tools/canvas.py +19 -8
  48. basic_memory/mcp/tools/chatgpt_tools.py +178 -0
  49. basic_memory/mcp/tools/delete_note.py +67 -34
  50. basic_memory/mcp/tools/edit_note.py +55 -39
  51. basic_memory/mcp/tools/headers.py +44 -0
  52. basic_memory/mcp/tools/list_directory.py +18 -8
  53. basic_memory/mcp/tools/move_note.py +119 -41
  54. basic_memory/mcp/tools/project_management.py +77 -229
  55. basic_memory/mcp/tools/read_content.py +28 -12
  56. basic_memory/mcp/tools/read_note.py +97 -57
  57. basic_memory/mcp/tools/recent_activity.py +441 -42
  58. basic_memory/mcp/tools/search.py +82 -70
  59. basic_memory/mcp/tools/sync_status.py +5 -4
  60. basic_memory/mcp/tools/utils.py +19 -0
  61. basic_memory/mcp/tools/view_note.py +31 -6
  62. basic_memory/mcp/tools/write_note.py +65 -14
  63. basic_memory/models/knowledge.py +19 -2
  64. basic_memory/models/project.py +6 -2
  65. basic_memory/repository/entity_repository.py +31 -84
  66. basic_memory/repository/project_repository.py +1 -1
  67. basic_memory/repository/relation_repository.py +13 -0
  68. basic_memory/repository/repository.py +2 -2
  69. basic_memory/repository/search_repository.py +9 -3
  70. basic_memory/schemas/__init__.py +6 -0
  71. basic_memory/schemas/base.py +70 -12
  72. basic_memory/schemas/cloud.py +46 -0
  73. basic_memory/schemas/memory.py +99 -18
  74. basic_memory/schemas/project_info.py +9 -10
  75. basic_memory/schemas/sync_report.py +48 -0
  76. basic_memory/services/context_service.py +35 -11
  77. basic_memory/services/directory_service.py +7 -0
  78. basic_memory/services/entity_service.py +82 -52
  79. basic_memory/services/initialization.py +30 -11
  80. basic_memory/services/project_service.py +23 -33
  81. basic_memory/sync/sync_service.py +148 -24
  82. basic_memory/sync/watch_service.py +128 -44
  83. basic_memory/utils.py +181 -109
  84. {basic_memory-0.14.3.dist-info → basic_memory-0.15.0.dist-info}/METADATA +26 -96
  85. basic_memory-0.15.0.dist-info/RECORD +147 -0
  86. basic_memory/mcp/project_session.py +0 -120
  87. basic_memory-0.14.3.dist-info/RECORD +0 -132
  88. {basic_memory-0.14.3.dist-info → basic_memory-0.15.0.dist-info}/WHEEL +0 -0
  89. {basic_memory-0.14.3.dist-info → basic_memory-0.15.0.dist-info}/entry_points.txt +0 -0
  90. {basic_memory-0.14.3.dist-info → basic_memory-0.15.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,10 +1,10 @@
1
1
  """Schemas for memory context."""
2
2
 
3
3
  from datetime import datetime
4
- from typing import List, Optional, Annotated, Sequence
4
+ from typing import List, Optional, Annotated, Sequence, Literal, Union, Dict
5
5
 
6
6
  from annotated_types import MinLen, MaxLen
7
- from pydantic import BaseModel, Field, BeforeValidator, TypeAdapter
7
+ from pydantic import BaseModel, Field, BeforeValidator, TypeAdapter, field_serializer
8
8
 
9
9
  from basic_memory.schemas.search import SearchItemType
10
10
 
@@ -26,6 +26,7 @@ def validate_memory_url_path(path: str) -> bool:
26
26
  >>> validate_memory_url_path("invalid://test") # Contains protocol
27
27
  False
28
28
  """
29
+ # Empty paths are not valid
29
30
  if not path or not path.strip():
30
31
  return False
31
32
 
@@ -68,7 +69,13 @@ def normalize_memory_url(url: str | None) -> str:
68
69
  ValueError: Invalid memory URL path: 'memory//test' contains double slashes
69
70
  """
70
71
  if not url:
71
- return ""
72
+ raise ValueError("Memory URL cannot be empty")
73
+
74
+ # Strip whitespace for consistency
75
+ url = url.strip()
76
+
77
+ if not url:
78
+ raise ValueError("Memory URL cannot be empty or whitespace")
72
79
 
73
80
  clean_path = url.removeprefix("memory://")
74
81
 
@@ -79,8 +86,6 @@ def normalize_memory_url(url: str | None) -> str:
79
86
  raise ValueError(f"Invalid memory URL path: '{clean_path}' contains protocol scheme")
80
87
  elif "//" in clean_path:
81
88
  raise ValueError(f"Invalid memory URL path: '{clean_path}' contains double slashes")
82
- elif not clean_path.strip():
83
- raise ValueError("Memory URL path cannot be empty or whitespace")
84
89
  else:
85
90
  raise ValueError(f"Invalid memory URL path: '{clean_path}' contains invalid characters")
86
91
 
@@ -118,37 +123,55 @@ def memory_url_path(url: memory_url) -> str: # pyright: ignore
118
123
  class EntitySummary(BaseModel):
119
124
  """Simplified entity representation."""
120
125
 
121
- type: str = "entity"
126
+ type: Literal["entity"] = "entity"
122
127
  permalink: Optional[str]
123
128
  title: str
124
129
  content: Optional[str] = None
125
130
  file_path: str
126
- created_at: datetime
131
+ created_at: Annotated[
132
+ datetime, Field(json_schema_extra={"type": "string", "format": "date-time"})
133
+ ]
134
+
135
+ @field_serializer("created_at")
136
+ def serialize_created_at(self, dt: datetime) -> str:
137
+ return dt.isoformat()
127
138
 
128
139
 
129
140
  class RelationSummary(BaseModel):
130
141
  """Simplified relation representation."""
131
142
 
132
- type: str = "relation"
143
+ type: Literal["relation"] = "relation"
133
144
  title: str
134
145
  file_path: str
135
146
  permalink: str
136
147
  relation_type: str
137
148
  from_entity: Optional[str] = None
138
149
  to_entity: Optional[str] = None
139
- created_at: datetime
150
+ created_at: Annotated[
151
+ datetime, Field(json_schema_extra={"type": "string", "format": "date-time"})
152
+ ]
153
+
154
+ @field_serializer("created_at")
155
+ def serialize_created_at(self, dt: datetime) -> str:
156
+ return dt.isoformat()
140
157
 
141
158
 
142
159
  class ObservationSummary(BaseModel):
143
160
  """Simplified observation representation."""
144
161
 
145
- type: str = "observation"
162
+ type: Literal["observation"] = "observation"
146
163
  title: str
147
164
  file_path: str
148
165
  permalink: str
149
166
  category: str
150
167
  content: str
151
- created_at: datetime
168
+ created_at: Annotated[
169
+ datetime, Field(json_schema_extra={"type": "string", "format": "date-time"})
170
+ ]
171
+
172
+ @field_serializer("created_at")
173
+ def serialize_created_at(self, dt: datetime) -> str:
174
+ return dt.isoformat()
152
175
 
153
176
 
154
177
  class MemoryMetadata(BaseModel):
@@ -158,28 +181,37 @@ class MemoryMetadata(BaseModel):
158
181
  types: Optional[List[SearchItemType]] = None
159
182
  depth: int
160
183
  timeframe: Optional[str] = None
161
- generated_at: datetime
184
+ generated_at: Annotated[
185
+ datetime, Field(json_schema_extra={"type": "string", "format": "date-time"})
186
+ ]
162
187
  primary_count: Optional[int] = None # Changed field name
163
188
  related_count: Optional[int] = None # Changed field name
164
189
  total_results: Optional[int] = None # For backward compatibility
165
190
  total_relations: Optional[int] = None
166
191
  total_observations: Optional[int] = None
167
192
 
193
+ @field_serializer("generated_at")
194
+ def serialize_generated_at(self, dt: datetime) -> str:
195
+ return dt.isoformat()
196
+
168
197
 
169
198
  class ContextResult(BaseModel):
170
199
  """Context result containing a primary item with its observations and related items."""
171
200
 
172
- primary_result: EntitySummary | RelationSummary | ObservationSummary = Field(
173
- description="Primary item"
174
- )
201
+ primary_result: Annotated[
202
+ Union[EntitySummary, RelationSummary, ObservationSummary],
203
+ Field(discriminator="type", description="Primary item"),
204
+ ]
175
205
 
176
206
  observations: Sequence[ObservationSummary] = Field(
177
207
  description="Observations belonging to this entity", default_factory=list
178
208
  )
179
209
 
180
- related_results: Sequence[EntitySummary | RelationSummary | ObservationSummary] = Field(
181
- description="Related items", default_factory=list
182
- )
210
+ related_results: Sequence[
211
+ Annotated[
212
+ Union[EntitySummary, RelationSummary, ObservationSummary], Field(discriminator="type")
213
+ ]
214
+ ] = Field(description="Related items", default_factory=list)
183
215
 
184
216
 
185
217
  class GraphContext(BaseModel):
@@ -195,3 +227,52 @@ class GraphContext(BaseModel):
195
227
 
196
228
  page: Optional[int] = None
197
229
  page_size: Optional[int] = None
230
+
231
+
232
+ class ActivityStats(BaseModel):
233
+ """Statistics about activity across all projects."""
234
+
235
+ total_projects: int
236
+ active_projects: int = Field(description="Projects with activity in timeframe")
237
+ most_active_project: Optional[str] = None
238
+ total_items: int = Field(description="Total items across all projects")
239
+ total_entities: int = 0
240
+ total_relations: int = 0
241
+ total_observations: int = 0
242
+
243
+
244
+ class ProjectActivity(BaseModel):
245
+ """Activity summary for a single project."""
246
+
247
+ project_name: str
248
+ project_path: str
249
+ activity: GraphContext = Field(description="The actual activity data for this project")
250
+ item_count: int = Field(description="Total items in this project's activity")
251
+ last_activity: Optional[
252
+ Annotated[datetime, Field(json_schema_extra={"type": "string", "format": "date-time"})]
253
+ ] = Field(default=None, description="Most recent activity timestamp")
254
+ active_folders: List[str] = Field(default_factory=list, description="Most active folders")
255
+
256
+ @field_serializer("last_activity")
257
+ def serialize_last_activity(self, dt: Optional[datetime]) -> Optional[str]:
258
+ return dt.isoformat() if dt else None
259
+
260
+
261
+ class ProjectActivitySummary(BaseModel):
262
+ """Summary of activity across all projects."""
263
+
264
+ projects: Dict[str, ProjectActivity] = Field(
265
+ description="Activity per project, keyed by project name"
266
+ )
267
+ summary: ActivityStats
268
+ timeframe: str = Field(description="The timeframe used for the query")
269
+ generated_at: Annotated[
270
+ datetime, Field(json_schema_extra={"type": "string", "format": "date-time"})
271
+ ]
272
+ guidance: Optional[str] = Field(
273
+ default=None, description="Assistant guidance for project selection and session management"
274
+ )
275
+
276
+ @field_serializer("generated_at")
277
+ def serialize_generated_at(self, dt: datetime) -> str:
278
+ return dt.isoformat()
@@ -2,6 +2,7 @@
2
2
 
3
3
  import os
4
4
  from datetime import datetime
5
+ from pathlib import Path
5
6
  from typing import Dict, List, Optional, Any
6
7
 
7
8
  from pydantic import Field, BaseModel
@@ -78,16 +79,6 @@ class SystemStatus(BaseModel):
78
79
  timestamp: datetime = Field(description="Timestamp when the information was collected")
79
80
 
80
81
 
81
- class ProjectDetail(BaseModel):
82
- """Detailed information about a project."""
83
-
84
- path: str = Field(description="Path to the project directory")
85
- active: bool = Field(description="Whether the project is active")
86
- id: Optional[int] = Field(description="Database ID of the project if available")
87
- is_default: bool = Field(description="Whether this is the default project")
88
- permalink: str = Field(description="URL-friendly identifier for the project")
89
-
90
-
91
82
  class ProjectInfoResponse(BaseModel):
92
83
  """Response for the project_info tool."""
93
84
 
@@ -190,6 +181,14 @@ class ProjectItem(BaseModel):
190
181
  def permalink(self) -> str: # pragma: no cover
191
182
  return generate_permalink(self.name)
192
183
 
184
+ @property
185
+ def home(self) -> Path: # pragma: no cover
186
+ return Path(self.name)
187
+
188
+ @property
189
+ def project_url(self) -> str: # pragma: no cover
190
+ return f"/{generate_permalink(self.name)}"
191
+
193
192
 
194
193
  class ProjectList(BaseModel):
195
194
  """Response model for listing projects."""
@@ -0,0 +1,48 @@
1
+ """Pydantic schemas for sync report responses."""
2
+
3
+ from typing import TYPE_CHECKING, Dict, Set
4
+
5
+ from pydantic import BaseModel, Field
6
+
7
+ # avoid cirular imports
8
+ if TYPE_CHECKING:
9
+ from basic_memory.sync.sync_service import SyncReport
10
+
11
+
12
+ class SyncReportResponse(BaseModel):
13
+ """Report of file changes found compared to database state.
14
+
15
+ Used for API responses when scanning or syncing files.
16
+ """
17
+
18
+ new: Set[str] = Field(default_factory=set, description="Files on disk but not in database")
19
+ modified: Set[str] = Field(default_factory=set, description="Files with different checksums")
20
+ deleted: Set[str] = Field(default_factory=set, description="Files in database but not on disk")
21
+ moves: Dict[str, str] = Field(
22
+ default_factory=dict, description="Files moved (old_path -> new_path)"
23
+ )
24
+ checksums: Dict[str, str] = Field(
25
+ default_factory=dict, description="Current file checksums (path -> checksum)"
26
+ )
27
+ total: int = Field(description="Total number of changes")
28
+
29
+ @classmethod
30
+ def from_sync_report(cls, report: "SyncReport") -> "SyncReportResponse":
31
+ """Convert SyncReport dataclass to Pydantic model.
32
+
33
+ Args:
34
+ report: SyncReport dataclass from sync service
35
+
36
+ Returns:
37
+ SyncReportResponse with same data
38
+ """
39
+ return cls(
40
+ new=report.new,
41
+ modified=report.modified,
42
+ deleted=report.deleted,
43
+ moves=report.moves,
44
+ checksums=report.checksums,
45
+ total=report.total,
46
+ )
47
+
48
+ model_config = {"from_attributes": True}
@@ -100,20 +100,30 @@ class ContextService:
100
100
  f"Building context for URI: '{memory_url}' depth: '{depth}' since: '{since}' limit: '{limit}' offset: '{offset}' max_related: '{max_related}'"
101
101
  )
102
102
 
103
+ normalized_path: Optional[str] = None
103
104
  if memory_url:
104
105
  path = memory_url_path(memory_url)
105
- # Pattern matching - use search
106
- if "*" in path:
107
- logger.debug(f"Pattern search for '{path}'")
106
+ # Check for wildcards before normalization
107
+ has_wildcard = "*" in path
108
+
109
+ if has_wildcard:
110
+ # For wildcard patterns, normalize each segment separately to preserve the *
111
+ parts = path.split("*")
112
+ normalized_parts = [
113
+ generate_permalink(part, split_extension=False) if part else ""
114
+ for part in parts
115
+ ]
116
+ normalized_path = "*".join(normalized_parts)
117
+ logger.debug(f"Pattern search for '{normalized_path}'")
108
118
  primary = await self.search_repository.search(
109
- permalink_match=path, limit=limit, offset=offset
119
+ permalink_match=normalized_path, limit=limit, offset=offset
110
120
  )
111
-
112
- # Direct lookup for exact path
113
121
  else:
114
- logger.debug(f"Direct lookup for '{path}'")
122
+ # For exact paths, normalize the whole thing
123
+ normalized_path = generate_permalink(path, split_extension=False)
124
+ logger.debug(f"Direct lookup for '{normalized_path}'")
115
125
  primary = await self.search_repository.search(
116
- permalink=path, limit=limit, offset=offset
126
+ permalink=normalized_path, limit=limit, offset=offset
117
127
  )
118
128
  else:
119
129
  logger.debug(f"Build context for '{types}'")
@@ -151,7 +161,7 @@ class ContextService:
151
161
 
152
162
  # Create metadata dataclass
153
163
  metadata = ContextMetadata(
154
- uri=memory_url_path(memory_url) if memory_url else None,
164
+ uri=normalized_path if memory_url else None,
155
165
  types=types,
156
166
  depth=depth,
157
167
  timeframe=since.isoformat() if since else None,
@@ -245,8 +255,12 @@ class ContextService:
245
255
  # For compatibility with the old query, we still need this for filtering
246
256
  values = ", ".join([f"('{t}', {i})" for t, i in type_id_pairs])
247
257
 
248
- # Parameters for bindings
249
- params = {"max_depth": max_depth, "max_results": max_results}
258
+ # Parameters for bindings - include project_id for security filtering
259
+ params = {
260
+ "max_depth": max_depth,
261
+ "max_results": max_results,
262
+ "project_id": self.search_repository.project_id,
263
+ }
250
264
 
251
265
  # Build date and timeframe filters conditionally based on since parameter
252
266
  if since:
@@ -259,6 +273,10 @@ class ContextService:
259
273
  relation_date_filter = ""
260
274
  timeframe_condition = ""
261
275
 
276
+ # Add project filtering for security - ensure all entities and relations belong to the same project
277
+ project_filter = "AND e.project_id = :project_id"
278
+ relation_project_filter = "AND e_from.project_id = :project_id"
279
+
262
280
  # Use a CTE that operates directly on entity and relation tables
263
281
  # This avoids the overhead of the search_index virtual table
264
282
  query = text(f"""
@@ -284,6 +302,7 @@ class ContextService:
284
302
  FROM entity e
285
303
  WHERE e.id IN ({entity_id_values})
286
304
  {date_filter}
305
+ {project_filter}
287
306
 
288
307
  UNION ALL
289
308
 
@@ -314,8 +333,12 @@ class ContextService:
314
333
  JOIN entity e_from ON (
315
334
  r.from_id = e_from.id
316
335
  {relation_date_filter}
336
+ {relation_project_filter}
317
337
  )
338
+ LEFT JOIN entity e_to ON (r.to_id = e_to.id)
318
339
  WHERE eg.depth < :max_depth
340
+ -- Ensure to_entity (if exists) also belongs to same project
341
+ AND (r.to_id IS NULL OR e_to.project_id = :project_id)
319
342
 
320
343
  UNION ALL
321
344
 
@@ -348,6 +371,7 @@ class ContextService:
348
371
  ELSE eg.from_id
349
372
  END
350
373
  {date_filter}
374
+ {project_filter}
351
375
  )
352
376
  WHERE eg.depth < :max_depth
353
377
  -- Only include entities connected by relations within timeframe if specified
@@ -106,8 +106,15 @@ class DirectoryService:
106
106
  List of DirectoryNode objects matching the criteria
107
107
  """
108
108
  # Normalize directory path
109
+ # Strip ./ prefix if present (handles relative path notation)
110
+ if dir_name.startswith("./"):
111
+ dir_name = dir_name[2:] # Remove "./" prefix
112
+
113
+ # Ensure path starts with "/"
109
114
  if not dir_name.startswith("/"):
110
115
  dir_name = f"/{dir_name}"
116
+
117
+ # Remove trailing slashes except for root
111
118
  if dir_name != "/" and dir_name.endswith("/"):
112
119
  dir_name = dir_name.rstrip("/")
113
120
 
@@ -9,7 +9,12 @@ from loguru import logger
9
9
  from sqlalchemy.exc import IntegrityError
10
10
 
11
11
  from basic_memory.config import ProjectConfig, BasicMemoryConfig
12
- from basic_memory.file_utils import has_frontmatter, parse_frontmatter, remove_frontmatter
12
+ from basic_memory.file_utils import (
13
+ has_frontmatter,
14
+ parse_frontmatter,
15
+ remove_frontmatter,
16
+ dump_frontmatter,
17
+ )
13
18
  from basic_memory.markdown import EntityMarkdown
14
19
  from basic_memory.markdown.entity_parser import EntityParser
15
20
  from basic_memory.markdown.utils import entity_model_from_markdown, schema_to_markdown
@@ -37,6 +42,7 @@ class EntityService(BaseService[EntityModel]):
37
42
  relation_repository: RelationRepository,
38
43
  file_service: FileService,
39
44
  link_resolver: LinkResolver,
45
+ app_config: Optional[BasicMemoryConfig] = None,
40
46
  ):
41
47
  super().__init__(entity_repository)
42
48
  self.observation_repository = observation_repository
@@ -44,6 +50,7 @@ class EntityService(BaseService[EntityModel]):
44
50
  self.entity_parser = entity_parser
45
51
  self.file_service = file_service
46
52
  self.link_resolver = link_resolver
53
+ self.app_config = app_config
47
54
 
48
55
  async def detect_file_path_conflicts(self, file_path: str) -> List[Entity]:
49
56
  """Detect potential file path conflicts for a given file path.
@@ -91,7 +98,7 @@ class EntityService(BaseService[EntityModel]):
91
98
 
92
99
  Enhanced to detect and handle character-related conflicts.
93
100
  """
94
- file_path_str = str(file_path)
101
+ file_path_str = Path(file_path).as_posix()
95
102
 
96
103
  # Check for potential file path conflicts before resolving permalink
97
104
  conflicts = await self.detect_file_path_conflicts(file_path_str)
@@ -119,7 +126,7 @@ class EntityService(BaseService[EntityModel]):
119
126
  if markdown and markdown.frontmatter.permalink:
120
127
  desired_permalink = markdown.frontmatter.permalink
121
128
  else:
122
- desired_permalink = generate_permalink(file_path)
129
+ desired_permalink = generate_permalink(file_path_str)
123
130
 
124
131
  # Make unique if needed - enhanced to handle character conflicts
125
132
  permalink = desired_permalink
@@ -139,10 +146,11 @@ class EntityService(BaseService[EntityModel]):
139
146
  f"Creating or updating entity: {schema.file_path}, permalink: {schema.permalink}"
140
147
  )
141
148
 
142
- # Try to find existing entity using smart resolution
143
- existing = await self.link_resolver.resolve_link(
144
- schema.file_path
145
- ) or await self.link_resolver.resolve_link(schema.permalink)
149
+ # Try to find existing entity using strict resolution (no fuzzy search)
150
+ # This prevents incorrectly matching similar file paths like "Node A.md" and "Node C.md"
151
+ existing = await self.link_resolver.resolve_link(schema.file_path, strict=True)
152
+ if not existing and schema.permalink:
153
+ existing = await self.link_resolver.resolve_link(schema.permalink, strict=True)
146
154
 
147
155
  if existing:
148
156
  logger.debug(f"Found existing entity: {existing.file_path}")
@@ -189,14 +197,20 @@ class EntityService(BaseService[EntityModel]):
189
197
  relations=[],
190
198
  )
191
199
 
192
- # Get unique permalink (prioritizing content frontmatter)
193
- permalink = await self.resolve_permalink(file_path, content_markdown)
194
- schema._permalink = permalink
200
+ # Get unique permalink (prioritizing content frontmatter) unless disabled
201
+ if self.app_config and self.app_config.disable_permalinks:
202
+ # Use empty string as sentinel to indicate permalinks are disabled
203
+ # The permalink property will return None when it sees empty string
204
+ schema._permalink = ""
205
+ else:
206
+ # Generate and set permalink
207
+ permalink = await self.resolve_permalink(file_path, content_markdown)
208
+ schema._permalink = permalink
195
209
 
196
210
  post = await schema_to_markdown(schema)
197
211
 
198
212
  # write file
199
- final_content = frontmatter.dumps(post, sort_keys=False)
213
+ final_content = dump_frontmatter(post)
200
214
  checksum = await self.file_service.write_file(file_path, final_content)
201
215
 
202
216
  # parse entity from file
@@ -249,15 +263,16 @@ class EntityService(BaseService[EntityModel]):
249
263
  relations=[],
250
264
  )
251
265
 
252
- # Check if we need to update the permalink based on content frontmatter
266
+ # Check if we need to update the permalink based on content frontmatter (unless disabled)
253
267
  new_permalink = entity.permalink # Default to existing
254
- if content_markdown and content_markdown.frontmatter.permalink:
255
- # Resolve permalink with the new content frontmatter
256
- resolved_permalink = await self.resolve_permalink(file_path, content_markdown)
257
- if resolved_permalink != entity.permalink:
258
- new_permalink = resolved_permalink
259
- # Update the schema to use the new permalink
260
- schema._permalink = new_permalink
268
+ if self.app_config and not self.app_config.disable_permalinks:
269
+ if content_markdown and content_markdown.frontmatter.permalink:
270
+ # Resolve permalink with the new content frontmatter
271
+ resolved_permalink = await self.resolve_permalink(file_path, content_markdown)
272
+ if resolved_permalink != entity.permalink:
273
+ new_permalink = resolved_permalink
274
+ # Update the schema to use the new permalink
275
+ schema._permalink = new_permalink
261
276
 
262
277
  # Create post with new content from schema
263
278
  post = await schema_to_markdown(schema)
@@ -273,7 +288,7 @@ class EntityService(BaseService[EntityModel]):
273
288
  merged_post = frontmatter.Post(post.content, **existing_markdown.frontmatter.metadata)
274
289
 
275
290
  # write file
276
- final_content = frontmatter.dumps(merged_post, sort_keys=False)
291
+ final_content = dump_frontmatter(merged_post)
277
292
  checksum = await self.file_service.write_file(file_path, final_content)
278
293
 
279
294
  # parse entity from file
@@ -283,7 +298,7 @@ class EntityService(BaseService[EntityModel]):
283
298
  entity = await self.update_entity_and_observations(file_path, entity_markdown)
284
299
 
285
300
  # add relations
286
- await self.update_entity_relations(str(file_path), entity_markdown)
301
+ await self.update_entity_relations(file_path.as_posix(), entity_markdown)
287
302
 
288
303
  # Set final checksum to match file
289
304
  entity = await self.repository.update(entity.id, {"checksum": checksum})
@@ -374,7 +389,7 @@ class EntityService(BaseService[EntityModel]):
374
389
  """
375
390
  logger.debug(f"Updating entity and observations: {file_path}")
376
391
 
377
- db_entity = await self.repository.get_by_file_path(str(file_path))
392
+ db_entity = await self.repository.get_by_file_path(file_path.as_posix())
378
393
 
379
394
  # Clear observations for entity
380
395
  await self.observation_repository.delete_by_fields(entity_id=db_entity.id)
@@ -417,34 +432,47 @@ class EntityService(BaseService[EntityModel]):
417
432
  # Clear existing relations first
418
433
  await self.relation_repository.delete_outgoing_relations_from_entity(db_entity.id)
419
434
 
420
- # Process each relation
421
- for rel in markdown.relations:
422
- # Resolve the target permalink
423
- target_entity = await self.link_resolver.resolve_link(
424
- rel.target,
425
- )
426
-
427
- # if the target is found, store the id
428
- target_id = target_entity.id if target_entity else None
429
- # if the target is found, store the title, otherwise add the target for a "forward link"
430
- target_name = target_entity.title if target_entity else rel.target
431
-
432
- # Create the relation
433
- relation = Relation(
434
- from_id=db_entity.id,
435
- to_id=target_id,
436
- to_name=target_name,
437
- relation_type=rel.type,
438
- context=rel.context,
439
- )
440
- try:
441
- await self.relation_repository.add(relation)
442
- except IntegrityError:
443
- # Unique constraint violation - relation already exists
444
- logger.debug(
445
- f"Skipping duplicate relation {rel.type} from {db_entity.permalink} target: {rel.target}"
435
+ # Batch resolve all relation targets in parallel
436
+ if markdown.relations:
437
+ import asyncio
438
+
439
+ # Create tasks for all relation lookups
440
+ lookup_tasks = [
441
+ self.link_resolver.resolve_link(rel.target) for rel in markdown.relations
442
+ ]
443
+
444
+ # Execute all lookups in parallel
445
+ resolved_entities = await asyncio.gather(*lookup_tasks, return_exceptions=True)
446
+
447
+ # Process results and create relation records
448
+ for rel, resolved in zip(markdown.relations, resolved_entities):
449
+ # Handle exceptions from gather and None results
450
+ target_entity: Optional[Entity] = None
451
+ if not isinstance(resolved, Exception):
452
+ # Type narrowing: resolved is Optional[Entity] here, not Exception
453
+ target_entity = resolved # type: ignore
454
+
455
+ # if the target is found, store the id
456
+ target_id = target_entity.id if target_entity else None
457
+ # if the target is found, store the title, otherwise add the target for a "forward link"
458
+ target_name = target_entity.title if target_entity else rel.target
459
+
460
+ # Create the relation
461
+ relation = Relation(
462
+ from_id=db_entity.id,
463
+ to_id=target_id,
464
+ to_name=target_name,
465
+ relation_type=rel.type,
466
+ context=rel.context,
446
467
  )
447
- continue
468
+ try:
469
+ await self.relation_repository.add(relation)
470
+ except IntegrityError:
471
+ # Unique constraint violation - relation already exists
472
+ logger.debug(
473
+ f"Skipping duplicate relation {rel.type} from {db_entity.permalink} target: {rel.target}"
474
+ )
475
+ continue
448
476
 
449
477
  return await self.repository.get_by_file_path(path)
450
478
 
@@ -498,7 +526,7 @@ class EntityService(BaseService[EntityModel]):
498
526
 
499
527
  # Update entity and its relationships
500
528
  entity = await self.update_entity_and_observations(file_path, entity_markdown)
501
- await self.update_entity_relations(str(file_path), entity_markdown)
529
+ await self.update_entity_relations(file_path.as_posix(), entity_markdown)
502
530
 
503
531
  # Set final checksum to match file
504
532
  entity = await self.repository.update(entity.id, {"checksum": checksum})
@@ -728,8 +756,10 @@ class EntityService(BaseService[EntityModel]):
728
756
  # 6. Prepare database updates
729
757
  updates = {"file_path": destination_path}
730
758
 
731
- # 7. Update permalink if configured or if entity has null permalink
732
- if app_config.update_permalinks_on_move or old_permalink is None:
759
+ # 7. Update permalink if configured or if entity has null permalink (unless disabled)
760
+ if not app_config.disable_permalinks and (
761
+ app_config.update_permalinks_on_move or old_permalink is None
762
+ ):
733
763
  # Generate new permalink from destination path
734
764
  new_permalink = await self.resolve_permalink(destination_path)
735
765