basic-memory 0.7.0__py3-none-any.whl → 0.17.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of basic-memory might be problematic. Click here for more details.
- basic_memory/__init__.py +5 -1
- basic_memory/alembic/alembic.ini +119 -0
- basic_memory/alembic/env.py +130 -20
- basic_memory/alembic/migrations.py +4 -9
- basic_memory/alembic/versions/314f1ea54dc4_add_postgres_full_text_search_support_.py +131 -0
- basic_memory/alembic/versions/502b60eaa905_remove_required_from_entity_permalink.py +51 -0
- basic_memory/alembic/versions/5fe1ab1ccebe_add_projects_table.py +120 -0
- basic_memory/alembic/versions/647e7a75e2cd_project_constraint_fix.py +112 -0
- basic_memory/alembic/versions/6830751f5fb6_merge_multiple_heads.py +24 -0
- basic_memory/alembic/versions/9d9c1cb7d8f5_add_mtime_and_size_columns_to_entity_.py +49 -0
- basic_memory/alembic/versions/a1b2c3d4e5f6_fix_project_foreign_keys.py +49 -0
- basic_memory/alembic/versions/a2b3c4d5e6f7_add_search_index_entity_cascade.py +56 -0
- basic_memory/alembic/versions/b3c3938bacdb_relation_to_name_unique_index.py +44 -0
- basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py +113 -0
- basic_memory/alembic/versions/e7e1f4367280_add_scan_watermark_tracking_to_project.py +37 -0
- basic_memory/alembic/versions/f8a9b2c3d4e5_add_pg_trgm_for_fuzzy_link_resolution.py +239 -0
- basic_memory/alembic/versions/g9a0b3c4d5e6_add_external_id_to_project_and_entity.py +173 -0
- basic_memory/api/app.py +87 -20
- basic_memory/api/container.py +133 -0
- basic_memory/api/routers/__init__.py +4 -1
- basic_memory/api/routers/directory_router.py +84 -0
- basic_memory/api/routers/importer_router.py +152 -0
- basic_memory/api/routers/knowledge_router.py +180 -23
- basic_memory/api/routers/management_router.py +80 -0
- basic_memory/api/routers/memory_router.py +9 -64
- basic_memory/api/routers/project_router.py +460 -0
- basic_memory/api/routers/prompt_router.py +260 -0
- basic_memory/api/routers/resource_router.py +136 -11
- basic_memory/api/routers/search_router.py +5 -5
- basic_memory/api/routers/utils.py +169 -0
- basic_memory/api/template_loader.py +292 -0
- basic_memory/api/v2/__init__.py +35 -0
- basic_memory/api/v2/routers/__init__.py +21 -0
- basic_memory/api/v2/routers/directory_router.py +93 -0
- basic_memory/api/v2/routers/importer_router.py +181 -0
- basic_memory/api/v2/routers/knowledge_router.py +427 -0
- basic_memory/api/v2/routers/memory_router.py +130 -0
- basic_memory/api/v2/routers/project_router.py +359 -0
- basic_memory/api/v2/routers/prompt_router.py +269 -0
- basic_memory/api/v2/routers/resource_router.py +286 -0
- basic_memory/api/v2/routers/search_router.py +73 -0
- basic_memory/cli/app.py +80 -10
- basic_memory/cli/auth.py +300 -0
- basic_memory/cli/commands/__init__.py +15 -2
- basic_memory/cli/commands/cloud/__init__.py +6 -0
- basic_memory/cli/commands/cloud/api_client.py +127 -0
- basic_memory/cli/commands/cloud/bisync_commands.py +110 -0
- basic_memory/cli/commands/cloud/cloud_utils.py +108 -0
- basic_memory/cli/commands/cloud/core_commands.py +195 -0
- basic_memory/cli/commands/cloud/rclone_commands.py +397 -0
- basic_memory/cli/commands/cloud/rclone_config.py +110 -0
- basic_memory/cli/commands/cloud/rclone_installer.py +263 -0
- basic_memory/cli/commands/cloud/upload.py +240 -0
- basic_memory/cli/commands/cloud/upload_command.py +124 -0
- basic_memory/cli/commands/command_utils.py +99 -0
- basic_memory/cli/commands/db.py +87 -12
- basic_memory/cli/commands/format.py +198 -0
- basic_memory/cli/commands/import_chatgpt.py +47 -223
- basic_memory/cli/commands/import_claude_conversations.py +48 -171
- basic_memory/cli/commands/import_claude_projects.py +53 -160
- basic_memory/cli/commands/import_memory_json.py +55 -111
- basic_memory/cli/commands/mcp.py +67 -11
- basic_memory/cli/commands/project.py +889 -0
- basic_memory/cli/commands/status.py +52 -34
- basic_memory/cli/commands/telemetry.py +81 -0
- basic_memory/cli/commands/tool.py +341 -0
- basic_memory/cli/container.py +84 -0
- basic_memory/cli/main.py +14 -6
- basic_memory/config.py +580 -26
- basic_memory/db.py +285 -28
- basic_memory/deps/__init__.py +293 -0
- basic_memory/deps/config.py +26 -0
- basic_memory/deps/db.py +56 -0
- basic_memory/deps/importers.py +200 -0
- basic_memory/deps/projects.py +238 -0
- basic_memory/deps/repositories.py +179 -0
- basic_memory/deps/services.py +480 -0
- basic_memory/deps.py +16 -185
- basic_memory/file_utils.py +318 -54
- basic_memory/ignore_utils.py +297 -0
- basic_memory/importers/__init__.py +27 -0
- basic_memory/importers/base.py +100 -0
- basic_memory/importers/chatgpt_importer.py +245 -0
- basic_memory/importers/claude_conversations_importer.py +192 -0
- basic_memory/importers/claude_projects_importer.py +184 -0
- basic_memory/importers/memory_json_importer.py +128 -0
- basic_memory/importers/utils.py +61 -0
- basic_memory/markdown/entity_parser.py +182 -23
- basic_memory/markdown/markdown_processor.py +70 -7
- basic_memory/markdown/plugins.py +43 -23
- basic_memory/markdown/schemas.py +1 -1
- basic_memory/markdown/utils.py +38 -14
- basic_memory/mcp/async_client.py +135 -4
- basic_memory/mcp/clients/__init__.py +28 -0
- basic_memory/mcp/clients/directory.py +70 -0
- basic_memory/mcp/clients/knowledge.py +176 -0
- basic_memory/mcp/clients/memory.py +120 -0
- basic_memory/mcp/clients/project.py +89 -0
- basic_memory/mcp/clients/resource.py +71 -0
- basic_memory/mcp/clients/search.py +65 -0
- basic_memory/mcp/container.py +110 -0
- basic_memory/mcp/project_context.py +155 -0
- basic_memory/mcp/prompts/__init__.py +19 -0
- basic_memory/mcp/prompts/ai_assistant_guide.py +70 -0
- basic_memory/mcp/prompts/continue_conversation.py +62 -0
- basic_memory/mcp/prompts/recent_activity.py +188 -0
- basic_memory/mcp/prompts/search.py +57 -0
- basic_memory/mcp/prompts/utils.py +162 -0
- basic_memory/mcp/resources/ai_assistant_guide.md +283 -0
- basic_memory/mcp/resources/project_info.py +71 -0
- basic_memory/mcp/server.py +61 -9
- basic_memory/mcp/tools/__init__.py +33 -21
- basic_memory/mcp/tools/build_context.py +120 -0
- basic_memory/mcp/tools/canvas.py +152 -0
- basic_memory/mcp/tools/chatgpt_tools.py +190 -0
- basic_memory/mcp/tools/delete_note.py +249 -0
- basic_memory/mcp/tools/edit_note.py +325 -0
- basic_memory/mcp/tools/list_directory.py +157 -0
- basic_memory/mcp/tools/move_note.py +549 -0
- basic_memory/mcp/tools/project_management.py +204 -0
- basic_memory/mcp/tools/read_content.py +281 -0
- basic_memory/mcp/tools/read_note.py +265 -0
- basic_memory/mcp/tools/recent_activity.py +528 -0
- basic_memory/mcp/tools/search.py +377 -24
- basic_memory/mcp/tools/utils.py +402 -16
- basic_memory/mcp/tools/view_note.py +78 -0
- basic_memory/mcp/tools/write_note.py +230 -0
- basic_memory/models/__init__.py +3 -2
- basic_memory/models/knowledge.py +82 -17
- basic_memory/models/project.py +93 -0
- basic_memory/models/search.py +68 -8
- basic_memory/project_resolver.py +222 -0
- basic_memory/repository/__init__.py +2 -0
- basic_memory/repository/entity_repository.py +437 -8
- basic_memory/repository/observation_repository.py +36 -3
- basic_memory/repository/postgres_search_repository.py +451 -0
- basic_memory/repository/project_info_repository.py +10 -0
- basic_memory/repository/project_repository.py +140 -0
- basic_memory/repository/relation_repository.py +79 -4
- basic_memory/repository/repository.py +148 -29
- basic_memory/repository/search_index_row.py +95 -0
- basic_memory/repository/search_repository.py +79 -268
- basic_memory/repository/search_repository_base.py +241 -0
- basic_memory/repository/sqlite_search_repository.py +437 -0
- basic_memory/runtime.py +61 -0
- basic_memory/schemas/__init__.py +22 -9
- basic_memory/schemas/base.py +131 -12
- basic_memory/schemas/cloud.py +50 -0
- basic_memory/schemas/directory.py +31 -0
- basic_memory/schemas/importer.py +35 -0
- basic_memory/schemas/memory.py +194 -25
- basic_memory/schemas/project_info.py +213 -0
- basic_memory/schemas/prompt.py +90 -0
- basic_memory/schemas/request.py +56 -2
- basic_memory/schemas/response.py +85 -28
- basic_memory/schemas/search.py +36 -35
- basic_memory/schemas/sync_report.py +72 -0
- basic_memory/schemas/v2/__init__.py +27 -0
- basic_memory/schemas/v2/entity.py +133 -0
- basic_memory/schemas/v2/resource.py +47 -0
- basic_memory/services/__init__.py +2 -1
- basic_memory/services/context_service.py +451 -138
- basic_memory/services/directory_service.py +310 -0
- basic_memory/services/entity_service.py +636 -71
- basic_memory/services/exceptions.py +21 -0
- basic_memory/services/file_service.py +402 -33
- basic_memory/services/initialization.py +216 -0
- basic_memory/services/link_resolver.py +50 -56
- basic_memory/services/project_service.py +888 -0
- basic_memory/services/search_service.py +232 -37
- basic_memory/sync/__init__.py +4 -2
- basic_memory/sync/background_sync.py +26 -0
- basic_memory/sync/coordinator.py +160 -0
- basic_memory/sync/sync_service.py +1200 -109
- basic_memory/sync/watch_service.py +432 -135
- basic_memory/telemetry.py +249 -0
- basic_memory/templates/prompts/continue_conversation.hbs +110 -0
- basic_memory/templates/prompts/search.hbs +101 -0
- basic_memory/utils.py +407 -54
- basic_memory-0.17.4.dist-info/METADATA +617 -0
- basic_memory-0.17.4.dist-info/RECORD +193 -0
- {basic_memory-0.7.0.dist-info → basic_memory-0.17.4.dist-info}/WHEEL +1 -1
- {basic_memory-0.7.0.dist-info → basic_memory-0.17.4.dist-info}/entry_points.txt +1 -0
- basic_memory/alembic/README +0 -1
- basic_memory/cli/commands/sync.py +0 -206
- basic_memory/cli/commands/tools.py +0 -157
- basic_memory/mcp/tools/knowledge.py +0 -68
- basic_memory/mcp/tools/memory.py +0 -170
- basic_memory/mcp/tools/notes.py +0 -202
- basic_memory/schemas/discovery.py +0 -28
- basic_memory/sync/file_change_scanner.py +0 -158
- basic_memory/sync/utils.py +0 -31
- basic_memory-0.7.0.dist-info/METADATA +0 -378
- basic_memory-0.7.0.dist-info/RECORD +0 -82
- {basic_memory-0.7.0.dist-info → basic_memory-0.17.4.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,24 +1,35 @@
|
|
|
1
1
|
"""Service for managing entities in the database."""
|
|
2
2
|
|
|
3
3
|
from pathlib import Path
|
|
4
|
-
from typing import
|
|
4
|
+
from typing import List, Optional, Sequence, Tuple, Union
|
|
5
5
|
|
|
6
6
|
import frontmatter
|
|
7
|
+
import yaml
|
|
7
8
|
from loguru import logger
|
|
8
9
|
from sqlalchemy.exc import IntegrityError
|
|
9
10
|
|
|
11
|
+
|
|
12
|
+
from basic_memory.config import ProjectConfig, BasicMemoryConfig
|
|
13
|
+
from basic_memory.file_utils import (
|
|
14
|
+
has_frontmatter,
|
|
15
|
+
parse_frontmatter,
|
|
16
|
+
remove_frontmatter,
|
|
17
|
+
dump_frontmatter,
|
|
18
|
+
)
|
|
10
19
|
from basic_memory.markdown import EntityMarkdown
|
|
20
|
+
from basic_memory.markdown.entity_parser import EntityParser
|
|
11
21
|
from basic_memory.markdown.utils import entity_model_from_markdown, schema_to_markdown
|
|
12
|
-
from basic_memory.models import Entity as EntityModel
|
|
22
|
+
from basic_memory.models import Entity as EntityModel
|
|
23
|
+
from basic_memory.models import Observation, Relation
|
|
24
|
+
from basic_memory.models.knowledge import Entity
|
|
13
25
|
from basic_memory.repository import ObservationRepository, RelationRepository
|
|
14
26
|
from basic_memory.repository.entity_repository import EntityRepository
|
|
15
27
|
from basic_memory.schemas import Entity as EntitySchema
|
|
16
28
|
from basic_memory.schemas.base import Permalink
|
|
17
|
-
from basic_memory.services
|
|
18
|
-
from basic_memory.services import
|
|
19
|
-
from basic_memory.services import BaseService
|
|
29
|
+
from basic_memory.services import BaseService, FileService
|
|
30
|
+
from basic_memory.services.exceptions import EntityCreationError, EntityNotFoundError
|
|
20
31
|
from basic_memory.services.link_resolver import LinkResolver
|
|
21
|
-
from basic_memory.
|
|
32
|
+
from basic_memory.services.search_service import SearchService
|
|
22
33
|
from basic_memory.utils import generate_permalink
|
|
23
34
|
|
|
24
35
|
|
|
@@ -33,6 +44,8 @@ class EntityService(BaseService[EntityModel]):
|
|
|
33
44
|
relation_repository: RelationRepository,
|
|
34
45
|
file_service: FileService,
|
|
35
46
|
link_resolver: LinkResolver,
|
|
47
|
+
search_service: Optional[SearchService] = None,
|
|
48
|
+
app_config: Optional[BasicMemoryConfig] = None,
|
|
36
49
|
):
|
|
37
50
|
super().__init__(entity_repository)
|
|
38
51
|
self.observation_repository = observation_repository
|
|
@@ -40,9 +53,53 @@ class EntityService(BaseService[EntityModel]):
|
|
|
40
53
|
self.entity_parser = entity_parser
|
|
41
54
|
self.file_service = file_service
|
|
42
55
|
self.link_resolver = link_resolver
|
|
56
|
+
self.search_service = search_service
|
|
57
|
+
self.app_config = app_config
|
|
58
|
+
|
|
59
|
+
async def detect_file_path_conflicts(
|
|
60
|
+
self, file_path: str, skip_check: bool = False
|
|
61
|
+
) -> List[Entity]:
|
|
62
|
+
"""Detect potential file path conflicts for a given file path.
|
|
63
|
+
|
|
64
|
+
This checks for entities with similar file paths that might cause conflicts:
|
|
65
|
+
- Case sensitivity differences (Finance/file.md vs finance/file.md)
|
|
66
|
+
- Character encoding differences
|
|
67
|
+
- Hyphen vs space differences
|
|
68
|
+
- Unicode normalization differences
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
file_path: The file path to check for conflicts
|
|
72
|
+
skip_check: If True, skip the check and return empty list (optimization for bulk operations)
|
|
73
|
+
|
|
74
|
+
Returns:
|
|
75
|
+
List of entities that might conflict with the given file path
|
|
76
|
+
"""
|
|
77
|
+
if skip_check:
|
|
78
|
+
return []
|
|
79
|
+
|
|
80
|
+
from basic_memory.utils import detect_potential_file_conflicts
|
|
81
|
+
|
|
82
|
+
conflicts = []
|
|
83
|
+
|
|
84
|
+
# Get all existing file paths
|
|
85
|
+
all_entities = await self.repository.find_all()
|
|
86
|
+
existing_paths = [entity.file_path for entity in all_entities]
|
|
87
|
+
|
|
88
|
+
# Use the enhanced conflict detection utility
|
|
89
|
+
conflicting_paths = detect_potential_file_conflicts(file_path, existing_paths)
|
|
90
|
+
|
|
91
|
+
# Find the entities corresponding to conflicting paths
|
|
92
|
+
for entity in all_entities:
|
|
93
|
+
if entity.file_path in conflicting_paths:
|
|
94
|
+
conflicts.append(entity)
|
|
95
|
+
|
|
96
|
+
return conflicts
|
|
43
97
|
|
|
44
98
|
async def resolve_permalink(
|
|
45
|
-
self,
|
|
99
|
+
self,
|
|
100
|
+
file_path: Permalink | Path,
|
|
101
|
+
markdown: Optional[EntityMarkdown] = None,
|
|
102
|
+
skip_conflict_check: bool = False,
|
|
46
103
|
) -> str:
|
|
47
104
|
"""Get or generate unique permalink for an entity.
|
|
48
105
|
|
|
@@ -51,31 +108,53 @@ class EntityService(BaseService[EntityModel]):
|
|
|
51
108
|
2. If markdown has permalink but it's used by another file -> make unique
|
|
52
109
|
3. For existing files, keep current permalink from db
|
|
53
110
|
4. Generate new unique permalink from file path
|
|
111
|
+
|
|
112
|
+
Enhanced to detect and handle character-related conflicts.
|
|
113
|
+
|
|
114
|
+
Note: Uses lightweight repository methods that skip eager loading of
|
|
115
|
+
observations and relations for better performance during bulk operations.
|
|
54
116
|
"""
|
|
117
|
+
file_path_str = Path(file_path).as_posix()
|
|
118
|
+
|
|
119
|
+
# Check for potential file path conflicts before resolving permalink
|
|
120
|
+
conflicts = await self.detect_file_path_conflicts(
|
|
121
|
+
file_path_str, skip_check=skip_conflict_check
|
|
122
|
+
)
|
|
123
|
+
if conflicts:
|
|
124
|
+
logger.warning(
|
|
125
|
+
f"Detected potential file path conflicts for '{file_path_str}': "
|
|
126
|
+
f"{[entity.file_path for entity in conflicts]}"
|
|
127
|
+
)
|
|
128
|
+
|
|
55
129
|
# If markdown has explicit permalink, try to validate it
|
|
56
130
|
if markdown and markdown.frontmatter.permalink:
|
|
57
131
|
desired_permalink = markdown.frontmatter.permalink
|
|
58
|
-
|
|
132
|
+
# Use lightweight method - we only need to check file_path
|
|
133
|
+
existing_file_path = await self.repository.get_file_path_for_permalink(
|
|
134
|
+
desired_permalink
|
|
135
|
+
)
|
|
59
136
|
|
|
60
137
|
# If no conflict or it's our own file, use as is
|
|
61
|
-
if not
|
|
138
|
+
if not existing_file_path or existing_file_path == file_path_str:
|
|
62
139
|
return desired_permalink
|
|
63
140
|
|
|
64
141
|
# For existing files, try to find current permalink
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
142
|
+
# Use lightweight method - we only need the permalink
|
|
143
|
+
existing_permalink = await self.repository.get_permalink_for_file_path(file_path_str)
|
|
144
|
+
if existing_permalink:
|
|
145
|
+
return existing_permalink
|
|
68
146
|
|
|
69
147
|
# New file - generate permalink
|
|
70
148
|
if markdown and markdown.frontmatter.permalink:
|
|
71
149
|
desired_permalink = markdown.frontmatter.permalink
|
|
72
150
|
else:
|
|
73
|
-
desired_permalink = generate_permalink(
|
|
151
|
+
desired_permalink = generate_permalink(file_path_str)
|
|
74
152
|
|
|
75
|
-
# Make unique if needed
|
|
153
|
+
# Make unique if needed - enhanced to handle character conflicts
|
|
154
|
+
# Use lightweight existence check instead of loading full entity
|
|
76
155
|
permalink = desired_permalink
|
|
77
156
|
suffix = 1
|
|
78
|
-
while await self.repository.
|
|
157
|
+
while await self.repository.permalink_exists(permalink):
|
|
79
158
|
permalink = f"{desired_permalink}-{suffix}"
|
|
80
159
|
suffix += 1
|
|
81
160
|
logger.debug(f"creating unique permalink: {permalink}")
|
|
@@ -86,13 +165,18 @@ class EntityService(BaseService[EntityModel]):
|
|
|
86
165
|
"""Create new entity or update existing one.
|
|
87
166
|
Returns: (entity, is_new) where is_new is True if a new entity was created
|
|
88
167
|
"""
|
|
89
|
-
logger.debug(
|
|
168
|
+
logger.debug(
|
|
169
|
+
f"Creating or updating entity: {schema.file_path}, permalink: {schema.permalink}"
|
|
170
|
+
)
|
|
90
171
|
|
|
91
|
-
# Try to find existing entity using
|
|
92
|
-
|
|
172
|
+
# Try to find existing entity using strict resolution (no fuzzy search)
|
|
173
|
+
# This prevents incorrectly matching similar file paths like "Node A.md" and "Node C.md"
|
|
174
|
+
existing = await self.link_resolver.resolve_link(schema.file_path, strict=True)
|
|
175
|
+
if not existing and schema.permalink:
|
|
176
|
+
existing = await self.link_resolver.resolve_link(schema.permalink, strict=True)
|
|
93
177
|
|
|
94
178
|
if existing:
|
|
95
|
-
logger.debug(f"Found existing entity: {existing.
|
|
179
|
+
logger.debug(f"Found existing entity: {existing.file_path}")
|
|
96
180
|
return await self.update_entity(existing, schema), False
|
|
97
181
|
else:
|
|
98
182
|
# Create new entity
|
|
@@ -100,7 +184,7 @@ class EntityService(BaseService[EntityModel]):
|
|
|
100
184
|
|
|
101
185
|
async def create_entity(self, schema: EntitySchema) -> EntityModel:
|
|
102
186
|
"""Create a new entity and write to filesystem."""
|
|
103
|
-
logger.debug(f"Creating entity: {schema.
|
|
187
|
+
logger.debug(f"Creating entity: {schema.title}")
|
|
104
188
|
|
|
105
189
|
# Get file path and ensure it's a Path object
|
|
106
190
|
file_path = Path(schema.file_path)
|
|
@@ -110,71 +194,181 @@ class EntityService(BaseService[EntityModel]):
|
|
|
110
194
|
f"file for entity {schema.folder}/{schema.title} already exists: {file_path}"
|
|
111
195
|
)
|
|
112
196
|
|
|
113
|
-
#
|
|
114
|
-
|
|
115
|
-
schema.
|
|
197
|
+
# Parse content frontmatter to check for user-specified permalink and entity_type
|
|
198
|
+
content_markdown = None
|
|
199
|
+
if schema.content and has_frontmatter(schema.content):
|
|
200
|
+
content_frontmatter = parse_frontmatter(schema.content)
|
|
201
|
+
|
|
202
|
+
# If content has entity_type/type, use it to override the schema entity_type
|
|
203
|
+
if "type" in content_frontmatter:
|
|
204
|
+
schema.entity_type = content_frontmatter["type"]
|
|
205
|
+
|
|
206
|
+
if "permalink" in content_frontmatter:
|
|
207
|
+
# Create a minimal EntityMarkdown object for permalink resolution
|
|
208
|
+
from basic_memory.markdown.schemas import EntityFrontmatter
|
|
209
|
+
|
|
210
|
+
frontmatter_metadata = {
|
|
211
|
+
"title": schema.title,
|
|
212
|
+
"type": schema.entity_type,
|
|
213
|
+
"permalink": content_frontmatter["permalink"],
|
|
214
|
+
}
|
|
215
|
+
frontmatter_obj = EntityFrontmatter(metadata=frontmatter_metadata)
|
|
216
|
+
content_markdown = EntityMarkdown(
|
|
217
|
+
frontmatter=frontmatter_obj,
|
|
218
|
+
content="", # content not needed for permalink resolution
|
|
219
|
+
observations=[],
|
|
220
|
+
relations=[],
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
# Get unique permalink (prioritizing content frontmatter) unless disabled
|
|
224
|
+
if self.app_config and self.app_config.disable_permalinks:
|
|
225
|
+
# Use empty string as sentinel to indicate permalinks are disabled
|
|
226
|
+
# The permalink property will return None when it sees empty string
|
|
227
|
+
schema._permalink = ""
|
|
228
|
+
else:
|
|
229
|
+
# Generate and set permalink
|
|
230
|
+
permalink = await self.resolve_permalink(file_path, content_markdown)
|
|
231
|
+
schema._permalink = permalink
|
|
116
232
|
|
|
117
233
|
post = await schema_to_markdown(schema)
|
|
118
234
|
|
|
119
235
|
# write file
|
|
120
|
-
final_content =
|
|
236
|
+
final_content = dump_frontmatter(post)
|
|
121
237
|
checksum = await self.file_service.write_file(file_path, final_content)
|
|
122
238
|
|
|
123
|
-
# parse entity from file
|
|
124
|
-
entity_markdown = await self.entity_parser.
|
|
239
|
+
# parse entity from content we just wrote (avoids re-reading file for cloud compatibility)
|
|
240
|
+
entity_markdown = await self.entity_parser.parse_markdown_content(
|
|
241
|
+
file_path=file_path,
|
|
242
|
+
content=final_content,
|
|
243
|
+
)
|
|
125
244
|
|
|
126
245
|
# create entity
|
|
127
|
-
await self.create_entity_from_markdown(file_path, entity_markdown)
|
|
246
|
+
created = await self.create_entity_from_markdown(file_path, entity_markdown)
|
|
128
247
|
|
|
129
248
|
# add relations
|
|
130
|
-
entity = await self.update_entity_relations(file_path, entity_markdown)
|
|
249
|
+
entity = await self.update_entity_relations(created.file_path, entity_markdown)
|
|
131
250
|
|
|
132
251
|
# Set final checksum to mark complete
|
|
133
252
|
return await self.repository.update(entity.id, {"checksum": checksum})
|
|
134
253
|
|
|
135
254
|
async def update_entity(self, entity: EntityModel, schema: EntitySchema) -> EntityModel:
|
|
136
255
|
"""Update an entity's content and metadata."""
|
|
137
|
-
logger.debug(
|
|
256
|
+
logger.debug(
|
|
257
|
+
f"Updating entity with permalink: {entity.permalink} content-type: {schema.content_type}"
|
|
258
|
+
)
|
|
138
259
|
|
|
139
260
|
# Convert file path string to Path
|
|
140
261
|
file_path = Path(entity.file_path)
|
|
141
262
|
|
|
263
|
+
# Read existing content via file_service (for cloud compatibility)
|
|
264
|
+
existing_content = await self.file_service.read_file_content(file_path)
|
|
265
|
+
existing_markdown = await self.entity_parser.parse_markdown_content(
|
|
266
|
+
file_path=file_path,
|
|
267
|
+
content=existing_content,
|
|
268
|
+
)
|
|
269
|
+
|
|
270
|
+
# Parse content frontmatter to check for user-specified permalink and entity_type
|
|
271
|
+
content_markdown = None
|
|
272
|
+
if schema.content and has_frontmatter(schema.content):
|
|
273
|
+
content_frontmatter = parse_frontmatter(schema.content)
|
|
274
|
+
|
|
275
|
+
# If content has entity_type/type, use it to override the schema entity_type
|
|
276
|
+
if "type" in content_frontmatter:
|
|
277
|
+
schema.entity_type = content_frontmatter["type"]
|
|
278
|
+
|
|
279
|
+
if "permalink" in content_frontmatter:
|
|
280
|
+
# Create a minimal EntityMarkdown object for permalink resolution
|
|
281
|
+
from basic_memory.markdown.schemas import EntityFrontmatter
|
|
282
|
+
|
|
283
|
+
frontmatter_metadata = {
|
|
284
|
+
"title": schema.title,
|
|
285
|
+
"type": schema.entity_type,
|
|
286
|
+
"permalink": content_frontmatter["permalink"],
|
|
287
|
+
}
|
|
288
|
+
frontmatter_obj = EntityFrontmatter(metadata=frontmatter_metadata)
|
|
289
|
+
content_markdown = EntityMarkdown(
|
|
290
|
+
frontmatter=frontmatter_obj,
|
|
291
|
+
content="", # content not needed for permalink resolution
|
|
292
|
+
observations=[],
|
|
293
|
+
relations=[],
|
|
294
|
+
)
|
|
295
|
+
|
|
296
|
+
# Check if we need to update the permalink based on content frontmatter (unless disabled)
|
|
297
|
+
new_permalink = entity.permalink # Default to existing
|
|
298
|
+
if self.app_config and not self.app_config.disable_permalinks:
|
|
299
|
+
if content_markdown and content_markdown.frontmatter.permalink:
|
|
300
|
+
# Resolve permalink with the new content frontmatter
|
|
301
|
+
resolved_permalink = await self.resolve_permalink(file_path, content_markdown)
|
|
302
|
+
if resolved_permalink != entity.permalink:
|
|
303
|
+
new_permalink = resolved_permalink
|
|
304
|
+
# Update the schema to use the new permalink
|
|
305
|
+
schema._permalink = new_permalink
|
|
306
|
+
|
|
307
|
+
# Create post with new content from schema
|
|
142
308
|
post = await schema_to_markdown(schema)
|
|
143
309
|
|
|
310
|
+
# Merge new metadata with existing metadata
|
|
311
|
+
existing_markdown.frontmatter.metadata.update(post.metadata)
|
|
312
|
+
|
|
313
|
+
# Ensure the permalink in the metadata is the resolved one
|
|
314
|
+
if new_permalink != entity.permalink:
|
|
315
|
+
existing_markdown.frontmatter.metadata["permalink"] = new_permalink
|
|
316
|
+
|
|
317
|
+
# Create a new post with merged metadata
|
|
318
|
+
merged_post = frontmatter.Post(post.content, **existing_markdown.frontmatter.metadata)
|
|
319
|
+
|
|
144
320
|
# write file
|
|
145
|
-
final_content =
|
|
321
|
+
final_content = dump_frontmatter(merged_post)
|
|
146
322
|
checksum = await self.file_service.write_file(file_path, final_content)
|
|
147
323
|
|
|
148
|
-
# parse entity from file
|
|
149
|
-
entity_markdown = await self.entity_parser.
|
|
324
|
+
# parse entity from content we just wrote (avoids re-reading file for cloud compatibility)
|
|
325
|
+
entity_markdown = await self.entity_parser.parse_markdown_content(
|
|
326
|
+
file_path=file_path,
|
|
327
|
+
content=final_content,
|
|
328
|
+
)
|
|
150
329
|
|
|
151
330
|
# update entity in db
|
|
152
331
|
entity = await self.update_entity_and_observations(file_path, entity_markdown)
|
|
153
332
|
|
|
154
333
|
# add relations
|
|
155
|
-
await self.update_entity_relations(file_path, entity_markdown)
|
|
334
|
+
await self.update_entity_relations(file_path.as_posix(), entity_markdown)
|
|
156
335
|
|
|
157
336
|
# Set final checksum to match file
|
|
158
337
|
entity = await self.repository.update(entity.id, {"checksum": checksum})
|
|
159
338
|
|
|
160
339
|
return entity
|
|
161
340
|
|
|
162
|
-
async def delete_entity(self,
|
|
341
|
+
async def delete_entity(self, permalink_or_id: str | int) -> bool:
|
|
163
342
|
"""Delete entity and its file."""
|
|
164
|
-
logger.debug(f"Deleting entity: {
|
|
343
|
+
logger.debug(f"Deleting entity: {permalink_or_id}")
|
|
165
344
|
|
|
166
345
|
try:
|
|
167
346
|
# Get entity first for file deletion
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
347
|
+
if isinstance(permalink_or_id, str):
|
|
348
|
+
entity = await self.get_by_permalink(permalink_or_id)
|
|
349
|
+
else:
|
|
350
|
+
entities = await self.get_entities_by_id([permalink_or_id])
|
|
351
|
+
if len(entities) != 1: # pragma: no cover
|
|
352
|
+
logger.error(
|
|
353
|
+
"Entity lookup error", entity_id=permalink_or_id, found_count=len(entities)
|
|
354
|
+
)
|
|
355
|
+
raise ValueError(
|
|
356
|
+
f"Expected 1 entity with ID {permalink_or_id}, got {len(entities)}"
|
|
357
|
+
)
|
|
358
|
+
entity = entities[0]
|
|
359
|
+
|
|
360
|
+
# Delete from search index first (if search_service is available)
|
|
361
|
+
if self.search_service:
|
|
362
|
+
await self.search_service.handle_delete(entity)
|
|
363
|
+
|
|
364
|
+
# Delete file
|
|
171
365
|
await self.file_service.delete_entity_file(entity)
|
|
172
366
|
|
|
173
367
|
# Delete from DB (this will cascade to observations/relations)
|
|
174
368
|
return await self.repository.delete(entity.id)
|
|
175
369
|
|
|
176
370
|
except EntityNotFoundError:
|
|
177
|
-
logger.info(f"Entity not found: {
|
|
371
|
+
logger.info(f"Entity not found: {permalink_or_id}")
|
|
178
372
|
return True # Already deleted
|
|
179
373
|
|
|
180
374
|
async def get_by_permalink(self, permalink: str) -> EntityModel:
|
|
@@ -206,13 +400,23 @@ class EntityService(BaseService[EntityModel]):
|
|
|
206
400
|
|
|
207
401
|
Creates the entity with null checksum to indicate sync not complete.
|
|
208
402
|
Relations will be added in second pass.
|
|
403
|
+
|
|
404
|
+
Uses UPSERT approach to handle permalink/file_path conflicts cleanly.
|
|
209
405
|
"""
|
|
210
|
-
logger.debug(f"Creating entity: {markdown.frontmatter.title}")
|
|
211
|
-
model = entity_model_from_markdown(
|
|
406
|
+
logger.debug(f"Creating entity: {markdown.frontmatter.title} file_path: {file_path}")
|
|
407
|
+
model = entity_model_from_markdown(
|
|
408
|
+
file_path, markdown, project_id=self.repository.project_id
|
|
409
|
+
)
|
|
212
410
|
|
|
213
411
|
# Mark as incomplete because we still need to add relations
|
|
214
412
|
model.checksum = None
|
|
215
|
-
|
|
413
|
+
|
|
414
|
+
# Use UPSERT to handle conflicts cleanly
|
|
415
|
+
try:
|
|
416
|
+
return await self.repository.upsert_entity(model)
|
|
417
|
+
except Exception as e:
|
|
418
|
+
logger.error(f"Failed to upsert entity for {file_path}: {e}")
|
|
419
|
+
raise EntityCreationError(f"Failed to create entity: {str(e)}") from e
|
|
216
420
|
|
|
217
421
|
async def update_entity_and_observations(
|
|
218
422
|
self, file_path: Path, markdown: EntityMarkdown
|
|
@@ -224,7 +428,7 @@ class EntityService(BaseService[EntityModel]):
|
|
|
224
428
|
"""
|
|
225
429
|
logger.debug(f"Updating entity and observations: {file_path}")
|
|
226
430
|
|
|
227
|
-
db_entity = await self.repository.get_by_file_path(
|
|
431
|
+
db_entity = await self.repository.get_by_file_path(file_path.as_posix())
|
|
228
432
|
|
|
229
433
|
# Clear observations for entity
|
|
230
434
|
await self.observation_repository.delete_by_fields(entity_id=db_entity.id)
|
|
@@ -232,6 +436,7 @@ class EntityService(BaseService[EntityModel]):
|
|
|
232
436
|
# add new observations
|
|
233
437
|
observations = [
|
|
234
438
|
Observation(
|
|
439
|
+
project_id=self.observation_repository.project_id,
|
|
235
440
|
entity_id=db_entity.id,
|
|
236
441
|
content=obs.content,
|
|
237
442
|
category=obs.category,
|
|
@@ -256,44 +461,404 @@ class EntityService(BaseService[EntityModel]):
|
|
|
256
461
|
|
|
257
462
|
async def update_entity_relations(
|
|
258
463
|
self,
|
|
259
|
-
|
|
464
|
+
path: str,
|
|
260
465
|
markdown: EntityMarkdown,
|
|
261
466
|
) -> EntityModel:
|
|
262
467
|
"""Update relations for entity"""
|
|
263
|
-
logger.debug(f"Updating relations for entity: {
|
|
468
|
+
logger.debug(f"Updating relations for entity: {path}")
|
|
264
469
|
|
|
265
|
-
db_entity = await self.repository.get_by_file_path(
|
|
470
|
+
db_entity = await self.repository.get_by_file_path(path)
|
|
266
471
|
|
|
267
472
|
# Clear existing relations first
|
|
268
473
|
await self.relation_repository.delete_outgoing_relations_from_entity(db_entity.id)
|
|
269
474
|
|
|
270
|
-
#
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
475
|
+
# Batch resolve all relation targets in parallel
|
|
476
|
+
if markdown.relations:
|
|
477
|
+
import asyncio
|
|
478
|
+
|
|
479
|
+
# Create tasks for all relation lookups
|
|
480
|
+
# Use strict=True to disable fuzzy search - only exact matches should create resolved relations
|
|
481
|
+
# This ensures forward references (links to non-existent entities) remain unresolved (to_id=NULL)
|
|
482
|
+
lookup_tasks = [
|
|
483
|
+
self.link_resolver.resolve_link(rel.target, strict=True)
|
|
484
|
+
for rel in markdown.relations
|
|
485
|
+
]
|
|
486
|
+
|
|
487
|
+
# Execute all lookups in parallel
|
|
488
|
+
resolved_entities = await asyncio.gather(*lookup_tasks, return_exceptions=True)
|
|
489
|
+
|
|
490
|
+
# Process results and create relation records
|
|
491
|
+
relations_to_add = []
|
|
492
|
+
for rel, resolved in zip(markdown.relations, resolved_entities):
|
|
493
|
+
# Handle exceptions from gather and None results
|
|
494
|
+
target_entity: Optional[Entity] = None
|
|
495
|
+
if not isinstance(resolved, Exception):
|
|
496
|
+
# Type narrowing: resolved is Optional[Entity] here, not Exception
|
|
497
|
+
target_entity = resolved # type: ignore
|
|
498
|
+
|
|
499
|
+
# if the target is found, store the id
|
|
500
|
+
target_id = target_entity.id if target_entity else None
|
|
501
|
+
# if the target is found, store the title, otherwise add the target for a "forward link"
|
|
502
|
+
target_name = target_entity.title if target_entity else rel.target
|
|
503
|
+
|
|
504
|
+
# Create the relation
|
|
505
|
+
relation = Relation(
|
|
506
|
+
project_id=self.relation_repository.project_id,
|
|
507
|
+
from_id=db_entity.id,
|
|
508
|
+
to_id=target_id,
|
|
509
|
+
to_name=target_name,
|
|
510
|
+
relation_type=rel.type,
|
|
511
|
+
context=rel.context,
|
|
512
|
+
)
|
|
513
|
+
relations_to_add.append(relation)
|
|
514
|
+
|
|
515
|
+
# Batch insert all relations
|
|
516
|
+
if relations_to_add:
|
|
517
|
+
try:
|
|
518
|
+
await self.relation_repository.add_all(relations_to_add)
|
|
519
|
+
except IntegrityError:
|
|
520
|
+
# Some relations might be duplicates - fall back to individual inserts
|
|
521
|
+
logger.debug("Batch relation insert failed, trying individual inserts")
|
|
522
|
+
for relation in relations_to_add:
|
|
523
|
+
try:
|
|
524
|
+
await self.relation_repository.add(relation)
|
|
525
|
+
except IntegrityError:
|
|
526
|
+
# Unique constraint violation - relation already exists
|
|
527
|
+
logger.debug(
|
|
528
|
+
f"Skipping duplicate relation {relation.relation_type} from {db_entity.permalink}"
|
|
529
|
+
)
|
|
530
|
+
continue
|
|
531
|
+
|
|
532
|
+
return await self.repository.get_by_file_path(path)
|
|
533
|
+
|
|
534
|
+
async def edit_entity(
|
|
535
|
+
self,
|
|
536
|
+
identifier: str,
|
|
537
|
+
operation: str,
|
|
538
|
+
content: str,
|
|
539
|
+
section: Optional[str] = None,
|
|
540
|
+
find_text: Optional[str] = None,
|
|
541
|
+
expected_replacements: int = 1,
|
|
542
|
+
) -> EntityModel:
|
|
543
|
+
"""Edit an existing entity's content using various operations.
|
|
544
|
+
|
|
545
|
+
Args:
|
|
546
|
+
identifier: Entity identifier (permalink, title, etc.)
|
|
547
|
+
operation: The editing operation (append, prepend, find_replace, replace_section)
|
|
548
|
+
content: The content to add or use for replacement
|
|
549
|
+
section: For replace_section operation - the markdown header
|
|
550
|
+
find_text: For find_replace operation - the text to find and replace
|
|
551
|
+
expected_replacements: For find_replace operation - expected number of replacements (default: 1)
|
|
552
|
+
|
|
553
|
+
Returns:
|
|
554
|
+
The updated entity model
|
|
555
|
+
|
|
556
|
+
Raises:
|
|
557
|
+
EntityNotFoundError: If the entity cannot be found
|
|
558
|
+
ValueError: If required parameters are missing for the operation or replacement count doesn't match expected
|
|
559
|
+
"""
|
|
560
|
+
logger.debug(f"Editing entity: {identifier}, operation: {operation}")
|
|
561
|
+
|
|
562
|
+
# Find the entity using the link resolver with strict mode for destructive operations
|
|
563
|
+
entity = await self.link_resolver.resolve_link(identifier, strict=True)
|
|
564
|
+
if not entity:
|
|
565
|
+
raise EntityNotFoundError(f"Entity not found: {identifier}")
|
|
566
|
+
|
|
567
|
+
# Read the current file content
|
|
568
|
+
file_path = Path(entity.file_path)
|
|
569
|
+
current_content, _ = await self.file_service.read_file(file_path)
|
|
570
|
+
|
|
571
|
+
# Apply the edit operation
|
|
572
|
+
new_content = self.apply_edit_operation(
|
|
573
|
+
current_content, operation, content, section, find_text, expected_replacements
|
|
574
|
+
)
|
|
276
575
|
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
576
|
+
# Write the updated content back to the file
|
|
577
|
+
checksum = await self.file_service.write_file(file_path, new_content)
|
|
578
|
+
|
|
579
|
+
# Parse the content we just wrote (avoids re-reading file for cloud compatibility)
|
|
580
|
+
entity_markdown = await self.entity_parser.parse_markdown_content(
|
|
581
|
+
file_path=file_path,
|
|
582
|
+
content=new_content,
|
|
583
|
+
)
|
|
584
|
+
|
|
585
|
+
# Update entity and its relationships
|
|
586
|
+
entity = await self.update_entity_and_observations(file_path, entity_markdown)
|
|
587
|
+
await self.update_entity_relations(file_path.as_posix(), entity_markdown)
|
|
588
|
+
|
|
589
|
+
# Set final checksum to match file
|
|
590
|
+
entity = await self.repository.update(entity.id, {"checksum": checksum})
|
|
591
|
+
|
|
592
|
+
return entity
|
|
593
|
+
|
|
594
|
+
def apply_edit_operation(
|
|
595
|
+
self,
|
|
596
|
+
current_content: str,
|
|
597
|
+
operation: str,
|
|
598
|
+
content: str,
|
|
599
|
+
section: Optional[str] = None,
|
|
600
|
+
find_text: Optional[str] = None,
|
|
601
|
+
expected_replacements: int = 1,
|
|
602
|
+
) -> str:
|
|
603
|
+
"""Apply the specified edit operation to the current content."""
|
|
604
|
+
|
|
605
|
+
if operation == "append":
|
|
606
|
+
# Ensure proper spacing
|
|
607
|
+
if current_content and not current_content.endswith("\n"):
|
|
608
|
+
return current_content + "\n" + content
|
|
609
|
+
return current_content + content # pragma: no cover
|
|
610
|
+
|
|
611
|
+
elif operation == "prepend":
|
|
612
|
+
# Handle frontmatter-aware prepending
|
|
613
|
+
return self._prepend_after_frontmatter(current_content, content)
|
|
614
|
+
|
|
615
|
+
elif operation == "find_replace":
|
|
616
|
+
if not find_text:
|
|
617
|
+
raise ValueError("find_text is required for find_replace operation")
|
|
618
|
+
if not find_text.strip():
|
|
619
|
+
raise ValueError("find_text cannot be empty or whitespace only")
|
|
620
|
+
|
|
621
|
+
# Count actual occurrences
|
|
622
|
+
actual_count = current_content.count(find_text)
|
|
623
|
+
|
|
624
|
+
# Validate count matches expected
|
|
625
|
+
if actual_count != expected_replacements:
|
|
626
|
+
if actual_count == 0:
|
|
627
|
+
raise ValueError(f"Text to replace not found: '{find_text}'")
|
|
628
|
+
else:
|
|
629
|
+
raise ValueError(
|
|
630
|
+
f"Expected {expected_replacements} occurrences of '{find_text}', "
|
|
631
|
+
f"but found {actual_count}"
|
|
632
|
+
)
|
|
633
|
+
|
|
634
|
+
return current_content.replace(find_text, content)
|
|
635
|
+
|
|
636
|
+
elif operation == "replace_section":
|
|
637
|
+
if not section:
|
|
638
|
+
raise ValueError("section is required for replace_section operation")
|
|
639
|
+
if not section.strip():
|
|
640
|
+
raise ValueError("section cannot be empty or whitespace only")
|
|
641
|
+
return self.replace_section_content(current_content, section, content)
|
|
642
|
+
|
|
643
|
+
else:
|
|
644
|
+
raise ValueError(f"Unsupported operation: {operation}")
|
|
645
|
+
|
|
646
|
+
def replace_section_content(
|
|
647
|
+
self, current_content: str, section_header: str, new_content: str
|
|
648
|
+
) -> str:
|
|
649
|
+
"""Replace content under a specific markdown section header.
|
|
650
|
+
|
|
651
|
+
This method uses a simple, safe approach: when replacing a section, it only
|
|
652
|
+
replaces the immediate content under that header until it encounters the next
|
|
653
|
+
header of ANY level. This means:
|
|
654
|
+
|
|
655
|
+
- Replacing "# Header" replaces content until "## Subsection" (preserves subsections)
|
|
656
|
+
- Replacing "## Section" replaces content until "### Subsection" (preserves subsections)
|
|
657
|
+
- More predictable and safer than trying to consume entire hierarchies
|
|
658
|
+
|
|
659
|
+
Args:
|
|
660
|
+
current_content: The current markdown content
|
|
661
|
+
section_header: The section header to find and replace (e.g., "## Section Name")
|
|
662
|
+
new_content: The new content to replace the section with (should not include the header itself)
|
|
663
|
+
|
|
664
|
+
Returns:
|
|
665
|
+
The updated content with the section replaced
|
|
666
|
+
|
|
667
|
+
Raises:
|
|
668
|
+
ValueError: If multiple sections with the same header are found
|
|
669
|
+
"""
|
|
670
|
+
# Normalize the section header (ensure it starts with #)
|
|
671
|
+
if not section_header.startswith("#"):
|
|
672
|
+
section_header = "## " + section_header
|
|
673
|
+
|
|
674
|
+
# Strip duplicate header from new_content if present (fix for issue #390)
|
|
675
|
+
# LLMs sometimes include the section header in their content, which would create duplicates
|
|
676
|
+
new_content_lines = new_content.lstrip().split("\n")
|
|
677
|
+
if new_content_lines and new_content_lines[0].strip() == section_header.strip():
|
|
678
|
+
# Remove the duplicate header line
|
|
679
|
+
new_content = "\n".join(new_content_lines[1:]).lstrip()
|
|
680
|
+
|
|
681
|
+
# First pass: count matching sections to check for duplicates
|
|
682
|
+
lines = current_content.split("\n")
|
|
683
|
+
matching_sections = []
|
|
684
|
+
|
|
685
|
+
for i, line in enumerate(lines):
|
|
686
|
+
if line.strip() == section_header.strip():
|
|
687
|
+
matching_sections.append(i)
|
|
688
|
+
|
|
689
|
+
# Handle multiple sections error
|
|
690
|
+
if len(matching_sections) > 1:
|
|
691
|
+
raise ValueError(
|
|
692
|
+
f"Multiple sections found with header '{section_header}'. "
|
|
693
|
+
f"Section replacement requires unique headers."
|
|
289
694
|
)
|
|
695
|
+
|
|
696
|
+
# If no section found, append it
|
|
697
|
+
if len(matching_sections) == 0:
|
|
698
|
+
logger.info(f"Section '{section_header}' not found, appending to end of document")
|
|
699
|
+
separator = "\n\n" if current_content and not current_content.endswith("\n\n") else ""
|
|
700
|
+
return current_content + separator + section_header + "\n" + new_content
|
|
701
|
+
|
|
702
|
+
# Replace the single matching section
|
|
703
|
+
result_lines = []
|
|
704
|
+
section_line_idx = matching_sections[0]
|
|
705
|
+
|
|
706
|
+
i = 0
|
|
707
|
+
while i < len(lines):
|
|
708
|
+
line = lines[i]
|
|
709
|
+
|
|
710
|
+
# Check if this is our target section header
|
|
711
|
+
if i == section_line_idx:
|
|
712
|
+
# Add the section header and new content
|
|
713
|
+
result_lines.append(line)
|
|
714
|
+
result_lines.append(new_content)
|
|
715
|
+
i += 1
|
|
716
|
+
|
|
717
|
+
# Skip the original section content until next header or end
|
|
718
|
+
while i < len(lines):
|
|
719
|
+
next_line = lines[i]
|
|
720
|
+
# Stop consuming when we hit any header (preserve subsections)
|
|
721
|
+
if next_line.startswith("#"):
|
|
722
|
+
# We found another header - continue processing from here
|
|
723
|
+
break
|
|
724
|
+
i += 1
|
|
725
|
+
# Continue processing from the next header (don't increment i again)
|
|
726
|
+
continue
|
|
727
|
+
|
|
728
|
+
# Add all other lines (including subsequent sections)
|
|
729
|
+
result_lines.append(line)
|
|
730
|
+
i += 1
|
|
731
|
+
|
|
732
|
+
return "\n".join(result_lines)
|
|
733
|
+
|
|
734
|
+
def _prepend_after_frontmatter(self, current_content: str, content: str) -> str:
|
|
735
|
+
"""Prepend content after frontmatter, preserving frontmatter structure."""
|
|
736
|
+
|
|
737
|
+
# Check if file has frontmatter
|
|
738
|
+
if has_frontmatter(current_content):
|
|
290
739
|
try:
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
740
|
+
# Parse and separate frontmatter from body
|
|
741
|
+
frontmatter_data = parse_frontmatter(current_content)
|
|
742
|
+
body_content = remove_frontmatter(current_content)
|
|
743
|
+
|
|
744
|
+
# Prepend content to the body
|
|
745
|
+
if content and not content.endswith("\n"):
|
|
746
|
+
new_body = content + "\n" + body_content
|
|
747
|
+
else:
|
|
748
|
+
new_body = content + body_content
|
|
749
|
+
|
|
750
|
+
# Reconstruct file with frontmatter + prepended body
|
|
751
|
+
yaml_fm = yaml.dump(frontmatter_data, sort_keys=False, allow_unicode=True)
|
|
752
|
+
return f"---\n{yaml_fm}---\n\n{new_body.strip()}"
|
|
753
|
+
|
|
754
|
+
except Exception as e: # pragma: no cover
|
|
755
|
+
logger.warning(
|
|
756
|
+
f"Failed to parse frontmatter during prepend: {e}"
|
|
757
|
+
) # pragma: no cover
|
|
758
|
+
# Fall back to simple prepend if frontmatter parsing fails # pragma: no cover
|
|
759
|
+
|
|
760
|
+
# No frontmatter or parsing failed - do simple prepend # pragma: no cover
|
|
761
|
+
if content and not content.endswith("\n"): # pragma: no cover
|
|
762
|
+
return content + "\n" + current_content # pragma: no cover
|
|
763
|
+
return content + current_content # pragma: no cover
|
|
764
|
+
|
|
765
|
+
async def move_entity(
|
|
766
|
+
self,
|
|
767
|
+
identifier: str,
|
|
768
|
+
destination_path: str,
|
|
769
|
+
project_config: ProjectConfig,
|
|
770
|
+
app_config: BasicMemoryConfig,
|
|
771
|
+
) -> EntityModel:
|
|
772
|
+
"""Move entity to new location with database consistency.
|
|
773
|
+
|
|
774
|
+
Args:
|
|
775
|
+
identifier: Entity identifier (title, permalink, or memory:// URL)
|
|
776
|
+
destination_path: New path relative to project root
|
|
777
|
+
project_config: Project configuration for file operations
|
|
778
|
+
app_config: App configuration for permalink update settings
|
|
779
|
+
|
|
780
|
+
Returns:
|
|
781
|
+
Success message with move details
|
|
782
|
+
|
|
783
|
+
Raises:
|
|
784
|
+
EntityNotFoundError: If the entity cannot be found
|
|
785
|
+
ValueError: If move operation fails due to validation or filesystem errors
|
|
786
|
+
"""
|
|
787
|
+
logger.debug(f"Moving entity: {identifier} to {destination_path}")
|
|
788
|
+
|
|
789
|
+
# 1. Resolve identifier to entity with strict mode for destructive operations
|
|
790
|
+
entity = await self.link_resolver.resolve_link(identifier, strict=True)
|
|
791
|
+
if not entity:
|
|
792
|
+
raise EntityNotFoundError(f"Entity not found: {identifier}")
|
|
793
|
+
|
|
794
|
+
current_path = entity.file_path
|
|
795
|
+
old_permalink = entity.permalink
|
|
796
|
+
|
|
797
|
+
# 2. Validate destination path format first
|
|
798
|
+
if not destination_path or destination_path.startswith("/") or not destination_path.strip():
|
|
799
|
+
raise ValueError(f"Invalid destination path: {destination_path}")
|
|
800
|
+
|
|
801
|
+
# 3. Validate paths
|
|
802
|
+
# NOTE: In tenantless/cloud mode, we cannot rely on local filesystem paths.
|
|
803
|
+
# Use FileService for existence checks and moving.
|
|
804
|
+
if not await self.file_service.exists(current_path):
|
|
805
|
+
raise ValueError(f"Source file not found: {current_path}")
|
|
806
|
+
|
|
807
|
+
if await self.file_service.exists(destination_path):
|
|
808
|
+
raise ValueError(f"Destination already exists: {destination_path}")
|
|
809
|
+
|
|
810
|
+
try:
|
|
811
|
+
# 4. Ensure destination directory if needed (no-op for S3)
|
|
812
|
+
await self.file_service.ensure_directory(Path(destination_path).parent)
|
|
813
|
+
|
|
814
|
+
# 5. Move physical file via FileService (filesystem rename or cloud move)
|
|
815
|
+
await self.file_service.move_file(current_path, destination_path)
|
|
816
|
+
logger.info(f"Moved file: {current_path} -> {destination_path}")
|
|
817
|
+
|
|
818
|
+
# 6. Prepare database updates
|
|
819
|
+
updates = {"file_path": destination_path}
|
|
820
|
+
|
|
821
|
+
# 7. Update permalink if configured or if entity has null permalink (unless disabled)
|
|
822
|
+
if not app_config.disable_permalinks and (
|
|
823
|
+
app_config.update_permalinks_on_move or old_permalink is None
|
|
824
|
+
):
|
|
825
|
+
# Generate new permalink from destination path
|
|
826
|
+
new_permalink = await self.resolve_permalink(destination_path)
|
|
827
|
+
|
|
828
|
+
# Update frontmatter with new permalink
|
|
829
|
+
await self.file_service.update_frontmatter(
|
|
830
|
+
destination_path, {"permalink": new_permalink}
|
|
296
831
|
)
|
|
297
|
-
continue
|
|
298
832
|
|
|
299
|
-
|
|
833
|
+
updates["permalink"] = new_permalink
|
|
834
|
+
if old_permalink is None:
|
|
835
|
+
logger.info(
|
|
836
|
+
f"Generated permalink for entity with null permalink: {new_permalink}"
|
|
837
|
+
)
|
|
838
|
+
else:
|
|
839
|
+
logger.info(f"Updated permalink: {old_permalink} -> {new_permalink}")
|
|
840
|
+
|
|
841
|
+
# 8. Recalculate checksum
|
|
842
|
+
new_checksum = await self.file_service.compute_checksum(destination_path)
|
|
843
|
+
updates["checksum"] = new_checksum
|
|
844
|
+
|
|
845
|
+
# 9. Update database
|
|
846
|
+
updated_entity = await self.repository.update(entity.id, updates)
|
|
847
|
+
if not updated_entity:
|
|
848
|
+
raise ValueError(f"Failed to update entity in database: {entity.id}")
|
|
849
|
+
|
|
850
|
+
return updated_entity
|
|
851
|
+
|
|
852
|
+
except Exception as e:
|
|
853
|
+
# Rollback: try to restore original file location if move succeeded
|
|
854
|
+
try:
|
|
855
|
+
if await self.file_service.exists(
|
|
856
|
+
destination_path
|
|
857
|
+
) and not await self.file_service.exists(current_path):
|
|
858
|
+
await self.file_service.move_file(destination_path, current_path)
|
|
859
|
+
logger.info(f"Rolled back file move: {destination_path} -> {current_path}")
|
|
860
|
+
except Exception as rollback_error: # pragma: no cover
|
|
861
|
+
logger.error(f"Failed to rollback file move: {rollback_error}")
|
|
862
|
+
|
|
863
|
+
# Re-raise the original error with context
|
|
864
|
+
raise ValueError(f"Move failed: {str(e)}") from e
|