basic-memory 0.12.2__py3-none-any.whl → 0.13.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of basic-memory might be problematic. Click here for more details.
- basic_memory/__init__.py +2 -1
- basic_memory/alembic/env.py +1 -1
- basic_memory/alembic/versions/5fe1ab1ccebe_add_projects_table.py +108 -0
- basic_memory/alembic/versions/647e7a75e2cd_project_constraint_fix.py +104 -0
- basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py +0 -6
- basic_memory/api/app.py +43 -13
- basic_memory/api/routers/__init__.py +4 -2
- basic_memory/api/routers/directory_router.py +63 -0
- basic_memory/api/routers/importer_router.py +152 -0
- basic_memory/api/routers/knowledge_router.py +139 -37
- basic_memory/api/routers/management_router.py +78 -0
- basic_memory/api/routers/memory_router.py +6 -62
- basic_memory/api/routers/project_router.py +234 -0
- basic_memory/api/routers/prompt_router.py +260 -0
- basic_memory/api/routers/search_router.py +3 -21
- basic_memory/api/routers/utils.py +130 -0
- basic_memory/api/template_loader.py +292 -0
- basic_memory/cli/app.py +20 -21
- basic_memory/cli/commands/__init__.py +2 -1
- basic_memory/cli/commands/auth.py +136 -0
- basic_memory/cli/commands/db.py +3 -3
- basic_memory/cli/commands/import_chatgpt.py +31 -207
- basic_memory/cli/commands/import_claude_conversations.py +16 -142
- basic_memory/cli/commands/import_claude_projects.py +33 -143
- basic_memory/cli/commands/import_memory_json.py +26 -83
- basic_memory/cli/commands/mcp.py +71 -18
- basic_memory/cli/commands/project.py +102 -70
- basic_memory/cli/commands/status.py +19 -9
- basic_memory/cli/commands/sync.py +44 -58
- basic_memory/cli/commands/tool.py +6 -6
- basic_memory/cli/main.py +1 -5
- basic_memory/config.py +143 -87
- basic_memory/db.py +6 -4
- basic_memory/deps.py +227 -30
- basic_memory/importers/__init__.py +27 -0
- basic_memory/importers/base.py +79 -0
- basic_memory/importers/chatgpt_importer.py +222 -0
- basic_memory/importers/claude_conversations_importer.py +172 -0
- basic_memory/importers/claude_projects_importer.py +148 -0
- basic_memory/importers/memory_json_importer.py +93 -0
- basic_memory/importers/utils.py +58 -0
- basic_memory/markdown/entity_parser.py +5 -2
- basic_memory/mcp/auth_provider.py +270 -0
- basic_memory/mcp/external_auth_provider.py +321 -0
- basic_memory/mcp/project_session.py +103 -0
- basic_memory/mcp/prompts/__init__.py +2 -0
- basic_memory/mcp/prompts/continue_conversation.py +18 -68
- basic_memory/mcp/prompts/recent_activity.py +20 -4
- basic_memory/mcp/prompts/search.py +14 -140
- basic_memory/mcp/prompts/sync_status.py +116 -0
- basic_memory/mcp/prompts/utils.py +3 -3
- basic_memory/mcp/{tools → resources}/project_info.py +6 -2
- basic_memory/mcp/server.py +86 -13
- basic_memory/mcp/supabase_auth_provider.py +463 -0
- basic_memory/mcp/tools/__init__.py +24 -0
- basic_memory/mcp/tools/build_context.py +43 -8
- basic_memory/mcp/tools/canvas.py +17 -3
- basic_memory/mcp/tools/delete_note.py +168 -5
- basic_memory/mcp/tools/edit_note.py +303 -0
- basic_memory/mcp/tools/list_directory.py +154 -0
- basic_memory/mcp/tools/move_note.py +299 -0
- basic_memory/mcp/tools/project_management.py +332 -0
- basic_memory/mcp/tools/read_content.py +15 -6
- basic_memory/mcp/tools/read_note.py +28 -9
- basic_memory/mcp/tools/recent_activity.py +47 -16
- basic_memory/mcp/tools/search.py +189 -8
- basic_memory/mcp/tools/sync_status.py +254 -0
- basic_memory/mcp/tools/utils.py +184 -12
- basic_memory/mcp/tools/view_note.py +66 -0
- basic_memory/mcp/tools/write_note.py +24 -17
- basic_memory/models/__init__.py +3 -2
- basic_memory/models/knowledge.py +16 -4
- basic_memory/models/project.py +78 -0
- basic_memory/models/search.py +8 -5
- basic_memory/repository/__init__.py +2 -0
- basic_memory/repository/entity_repository.py +8 -3
- basic_memory/repository/observation_repository.py +35 -3
- basic_memory/repository/project_info_repository.py +3 -2
- basic_memory/repository/project_repository.py +85 -0
- basic_memory/repository/relation_repository.py +8 -2
- basic_memory/repository/repository.py +107 -15
- basic_memory/repository/search_repository.py +192 -54
- basic_memory/schemas/__init__.py +6 -0
- basic_memory/schemas/base.py +33 -5
- basic_memory/schemas/directory.py +30 -0
- basic_memory/schemas/importer.py +34 -0
- basic_memory/schemas/memory.py +84 -13
- basic_memory/schemas/project_info.py +112 -2
- basic_memory/schemas/prompt.py +90 -0
- basic_memory/schemas/request.py +56 -2
- basic_memory/schemas/search.py +1 -1
- basic_memory/services/__init__.py +2 -1
- basic_memory/services/context_service.py +208 -95
- basic_memory/services/directory_service.py +167 -0
- basic_memory/services/entity_service.py +399 -6
- basic_memory/services/exceptions.py +6 -0
- basic_memory/services/file_service.py +14 -15
- basic_memory/services/initialization.py +170 -66
- basic_memory/services/link_resolver.py +35 -12
- basic_memory/services/migration_service.py +168 -0
- basic_memory/services/project_service.py +671 -0
- basic_memory/services/search_service.py +77 -2
- basic_memory/services/sync_status_service.py +181 -0
- basic_memory/sync/background_sync.py +25 -0
- basic_memory/sync/sync_service.py +102 -21
- basic_memory/sync/watch_service.py +63 -39
- basic_memory/templates/prompts/continue_conversation.hbs +110 -0
- basic_memory/templates/prompts/search.hbs +101 -0
- basic_memory/utils.py +67 -17
- {basic_memory-0.12.2.dist-info → basic_memory-0.13.0.dist-info}/METADATA +26 -4
- basic_memory-0.13.0.dist-info/RECORD +138 -0
- basic_memory/api/routers/project_info_router.py +0 -274
- basic_memory/mcp/main.py +0 -24
- basic_memory-0.12.2.dist-info/RECORD +0 -100
- {basic_memory-0.12.2.dist-info → basic_memory-0.13.0.dist-info}/WHEEL +0 -0
- {basic_memory-0.12.2.dist-info → basic_memory-0.13.0.dist-info}/entry_points.txt +0 -0
- {basic_memory-0.12.2.dist-info → basic_memory-0.13.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -4,9 +4,12 @@ from pathlib import Path
|
|
|
4
4
|
from typing import List, Optional, Sequence, Tuple, Union
|
|
5
5
|
|
|
6
6
|
import frontmatter
|
|
7
|
+
import yaml
|
|
7
8
|
from loguru import logger
|
|
8
9
|
from sqlalchemy.exc import IntegrityError
|
|
9
10
|
|
|
11
|
+
from basic_memory.config import ProjectConfig, BasicMemoryConfig
|
|
12
|
+
from basic_memory.file_utils import has_frontmatter, parse_frontmatter, remove_frontmatter
|
|
10
13
|
from basic_memory.markdown import EntityMarkdown
|
|
11
14
|
from basic_memory.markdown.entity_parser import EntityParser
|
|
12
15
|
from basic_memory.markdown.utils import entity_model_from_markdown, schema_to_markdown
|
|
@@ -86,13 +89,17 @@ class EntityService(BaseService[EntityModel]):
|
|
|
86
89
|
"""Create new entity or update existing one.
|
|
87
90
|
Returns: (entity, is_new) where is_new is True if a new entity was created
|
|
88
91
|
"""
|
|
89
|
-
logger.debug(
|
|
92
|
+
logger.debug(
|
|
93
|
+
f"Creating or updating entity: {schema.file_path}, permalink: {schema.permalink}"
|
|
94
|
+
)
|
|
90
95
|
|
|
91
96
|
# Try to find existing entity using smart resolution
|
|
92
|
-
existing = await self.link_resolver.resolve_link(
|
|
97
|
+
existing = await self.link_resolver.resolve_link(
|
|
98
|
+
schema.file_path
|
|
99
|
+
) or await self.link_resolver.resolve_link(schema.permalink)
|
|
93
100
|
|
|
94
101
|
if existing:
|
|
95
|
-
logger.debug(f"Found existing entity: {existing.
|
|
102
|
+
logger.debug(f"Found existing entity: {existing.file_path}")
|
|
96
103
|
return await self.update_entity(existing, schema), False
|
|
97
104
|
else:
|
|
98
105
|
# Create new entity
|
|
@@ -110,8 +117,29 @@ class EntityService(BaseService[EntityModel]):
|
|
|
110
117
|
f"file for entity {schema.folder}/{schema.title} already exists: {file_path}"
|
|
111
118
|
)
|
|
112
119
|
|
|
113
|
-
#
|
|
114
|
-
|
|
120
|
+
# Parse content frontmatter to check for user-specified permalink
|
|
121
|
+
content_markdown = None
|
|
122
|
+
if schema.content and has_frontmatter(schema.content):
|
|
123
|
+
content_frontmatter = parse_frontmatter(schema.content)
|
|
124
|
+
if "permalink" in content_frontmatter:
|
|
125
|
+
# Create a minimal EntityMarkdown object for permalink resolution
|
|
126
|
+
from basic_memory.markdown.schemas import EntityFrontmatter
|
|
127
|
+
|
|
128
|
+
frontmatter_metadata = {
|
|
129
|
+
"title": schema.title,
|
|
130
|
+
"type": schema.entity_type,
|
|
131
|
+
"permalink": content_frontmatter["permalink"],
|
|
132
|
+
}
|
|
133
|
+
frontmatter_obj = EntityFrontmatter(metadata=frontmatter_metadata)
|
|
134
|
+
content_markdown = EntityMarkdown(
|
|
135
|
+
frontmatter=frontmatter_obj,
|
|
136
|
+
content="", # content not needed for permalink resolution
|
|
137
|
+
observations=[],
|
|
138
|
+
relations=[],
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
# Get unique permalink (prioritizing content frontmatter)
|
|
142
|
+
permalink = await self.resolve_permalink(file_path, content_markdown)
|
|
115
143
|
schema._permalink = permalink
|
|
116
144
|
|
|
117
145
|
post = await schema_to_markdown(schema)
|
|
@@ -144,12 +172,47 @@ class EntityService(BaseService[EntityModel]):
|
|
|
144
172
|
# Read existing frontmatter from the file if it exists
|
|
145
173
|
existing_markdown = await self.entity_parser.parse_file(file_path)
|
|
146
174
|
|
|
175
|
+
# Parse content frontmatter to check for user-specified permalink
|
|
176
|
+
content_markdown = None
|
|
177
|
+
if schema.content and has_frontmatter(schema.content):
|
|
178
|
+
content_frontmatter = parse_frontmatter(schema.content)
|
|
179
|
+
if "permalink" in content_frontmatter:
|
|
180
|
+
# Create a minimal EntityMarkdown object for permalink resolution
|
|
181
|
+
from basic_memory.markdown.schemas import EntityFrontmatter
|
|
182
|
+
|
|
183
|
+
frontmatter_metadata = {
|
|
184
|
+
"title": schema.title,
|
|
185
|
+
"type": schema.entity_type,
|
|
186
|
+
"permalink": content_frontmatter["permalink"],
|
|
187
|
+
}
|
|
188
|
+
frontmatter_obj = EntityFrontmatter(metadata=frontmatter_metadata)
|
|
189
|
+
content_markdown = EntityMarkdown(
|
|
190
|
+
frontmatter=frontmatter_obj,
|
|
191
|
+
content="", # content not needed for permalink resolution
|
|
192
|
+
observations=[],
|
|
193
|
+
relations=[],
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
# Check if we need to update the permalink based on content frontmatter
|
|
197
|
+
new_permalink = entity.permalink # Default to existing
|
|
198
|
+
if content_markdown and content_markdown.frontmatter.permalink:
|
|
199
|
+
# Resolve permalink with the new content frontmatter
|
|
200
|
+
resolved_permalink = await self.resolve_permalink(file_path, content_markdown)
|
|
201
|
+
if resolved_permalink != entity.permalink:
|
|
202
|
+
new_permalink = resolved_permalink
|
|
203
|
+
# Update the schema to use the new permalink
|
|
204
|
+
schema._permalink = new_permalink
|
|
205
|
+
|
|
147
206
|
# Create post with new content from schema
|
|
148
207
|
post = await schema_to_markdown(schema)
|
|
149
208
|
|
|
150
209
|
# Merge new metadata with existing metadata
|
|
151
210
|
existing_markdown.frontmatter.metadata.update(post.metadata)
|
|
152
211
|
|
|
212
|
+
# Ensure the permalink in the metadata is the resolved one
|
|
213
|
+
if new_permalink != entity.permalink:
|
|
214
|
+
existing_markdown.frontmatter.metadata["permalink"] = new_permalink
|
|
215
|
+
|
|
153
216
|
# Create a new post with merged metadata
|
|
154
217
|
merged_post = frontmatter.Post(post.content, **existing_markdown.frontmatter.metadata)
|
|
155
218
|
|
|
@@ -235,7 +298,21 @@ class EntityService(BaseService[EntityModel]):
|
|
|
235
298
|
|
|
236
299
|
# Mark as incomplete because we still need to add relations
|
|
237
300
|
model.checksum = None
|
|
238
|
-
|
|
301
|
+
# Repository will set project_id automatically
|
|
302
|
+
try:
|
|
303
|
+
return await self.repository.add(model)
|
|
304
|
+
except IntegrityError as e:
|
|
305
|
+
# Handle race condition where entity was created by another process
|
|
306
|
+
if "UNIQUE constraint failed: entity.file_path" in str(
|
|
307
|
+
e
|
|
308
|
+
) or "UNIQUE constraint failed: entity.permalink" in str(e):
|
|
309
|
+
logger.info(
|
|
310
|
+
f"Entity already exists for file_path={file_path} (file_path or permalink conflict), updating instead of creating"
|
|
311
|
+
)
|
|
312
|
+
return await self.update_entity_and_observations(file_path, markdown)
|
|
313
|
+
else:
|
|
314
|
+
# Re-raise if it's a different integrity error
|
|
315
|
+
raise
|
|
239
316
|
|
|
240
317
|
async def update_entity_and_observations(
|
|
241
318
|
self, file_path: Path, markdown: EntityMarkdown
|
|
@@ -320,3 +397,319 @@ class EntityService(BaseService[EntityModel]):
|
|
|
320
397
|
continue
|
|
321
398
|
|
|
322
399
|
return await self.repository.get_by_file_path(path)
|
|
400
|
+
|
|
401
|
+
async def edit_entity(
|
|
402
|
+
self,
|
|
403
|
+
identifier: str,
|
|
404
|
+
operation: str,
|
|
405
|
+
content: str,
|
|
406
|
+
section: Optional[str] = None,
|
|
407
|
+
find_text: Optional[str] = None,
|
|
408
|
+
expected_replacements: int = 1,
|
|
409
|
+
) -> EntityModel:
|
|
410
|
+
"""Edit an existing entity's content using various operations.
|
|
411
|
+
|
|
412
|
+
Args:
|
|
413
|
+
identifier: Entity identifier (permalink, title, etc.)
|
|
414
|
+
operation: The editing operation (append, prepend, find_replace, replace_section)
|
|
415
|
+
content: The content to add or use for replacement
|
|
416
|
+
section: For replace_section operation - the markdown header
|
|
417
|
+
find_text: For find_replace operation - the text to find and replace
|
|
418
|
+
expected_replacements: For find_replace operation - expected number of replacements (default: 1)
|
|
419
|
+
|
|
420
|
+
Returns:
|
|
421
|
+
The updated entity model
|
|
422
|
+
|
|
423
|
+
Raises:
|
|
424
|
+
EntityNotFoundError: If the entity cannot be found
|
|
425
|
+
ValueError: If required parameters are missing for the operation or replacement count doesn't match expected
|
|
426
|
+
"""
|
|
427
|
+
logger.debug(f"Editing entity: {identifier}, operation: {operation}")
|
|
428
|
+
|
|
429
|
+
# Find the entity using the link resolver with strict mode for destructive operations
|
|
430
|
+
entity = await self.link_resolver.resolve_link(identifier, strict=True)
|
|
431
|
+
if not entity:
|
|
432
|
+
raise EntityNotFoundError(f"Entity not found: {identifier}")
|
|
433
|
+
|
|
434
|
+
# Read the current file content
|
|
435
|
+
file_path = Path(entity.file_path)
|
|
436
|
+
current_content, _ = await self.file_service.read_file(file_path)
|
|
437
|
+
|
|
438
|
+
# Apply the edit operation
|
|
439
|
+
new_content = self.apply_edit_operation(
|
|
440
|
+
current_content, operation, content, section, find_text, expected_replacements
|
|
441
|
+
)
|
|
442
|
+
|
|
443
|
+
# Write the updated content back to the file
|
|
444
|
+
checksum = await self.file_service.write_file(file_path, new_content)
|
|
445
|
+
|
|
446
|
+
# Parse the updated file to get new observations/relations
|
|
447
|
+
entity_markdown = await self.entity_parser.parse_file(file_path)
|
|
448
|
+
|
|
449
|
+
# Update entity and its relationships
|
|
450
|
+
entity = await self.update_entity_and_observations(file_path, entity_markdown)
|
|
451
|
+
await self.update_entity_relations(str(file_path), entity_markdown)
|
|
452
|
+
|
|
453
|
+
# Set final checksum to match file
|
|
454
|
+
entity = await self.repository.update(entity.id, {"checksum": checksum})
|
|
455
|
+
|
|
456
|
+
return entity
|
|
457
|
+
|
|
458
|
+
def apply_edit_operation(
|
|
459
|
+
self,
|
|
460
|
+
current_content: str,
|
|
461
|
+
operation: str,
|
|
462
|
+
content: str,
|
|
463
|
+
section: Optional[str] = None,
|
|
464
|
+
find_text: Optional[str] = None,
|
|
465
|
+
expected_replacements: int = 1,
|
|
466
|
+
) -> str:
|
|
467
|
+
"""Apply the specified edit operation to the current content."""
|
|
468
|
+
|
|
469
|
+
if operation == "append":
|
|
470
|
+
# Ensure proper spacing
|
|
471
|
+
if current_content and not current_content.endswith("\n"):
|
|
472
|
+
return current_content + "\n" + content
|
|
473
|
+
return current_content + content # pragma: no cover
|
|
474
|
+
|
|
475
|
+
elif operation == "prepend":
|
|
476
|
+
# Handle frontmatter-aware prepending
|
|
477
|
+
return self._prepend_after_frontmatter(current_content, content)
|
|
478
|
+
|
|
479
|
+
elif operation == "find_replace":
|
|
480
|
+
if not find_text:
|
|
481
|
+
raise ValueError("find_text is required for find_replace operation")
|
|
482
|
+
if not find_text.strip():
|
|
483
|
+
raise ValueError("find_text cannot be empty or whitespace only")
|
|
484
|
+
|
|
485
|
+
# Count actual occurrences
|
|
486
|
+
actual_count = current_content.count(find_text)
|
|
487
|
+
|
|
488
|
+
# Validate count matches expected
|
|
489
|
+
if actual_count != expected_replacements:
|
|
490
|
+
if actual_count == 0:
|
|
491
|
+
raise ValueError(f"Text to replace not found: '{find_text}'")
|
|
492
|
+
else:
|
|
493
|
+
raise ValueError(
|
|
494
|
+
f"Expected {expected_replacements} occurrences of '{find_text}', "
|
|
495
|
+
f"but found {actual_count}"
|
|
496
|
+
)
|
|
497
|
+
|
|
498
|
+
return current_content.replace(find_text, content)
|
|
499
|
+
|
|
500
|
+
elif operation == "replace_section":
|
|
501
|
+
if not section:
|
|
502
|
+
raise ValueError("section is required for replace_section operation")
|
|
503
|
+
if not section.strip():
|
|
504
|
+
raise ValueError("section cannot be empty or whitespace only")
|
|
505
|
+
return self.replace_section_content(current_content, section, content)
|
|
506
|
+
|
|
507
|
+
else:
|
|
508
|
+
raise ValueError(f"Unsupported operation: {operation}")
|
|
509
|
+
|
|
510
|
+
def replace_section_content(
|
|
511
|
+
self, current_content: str, section_header: str, new_content: str
|
|
512
|
+
) -> str:
|
|
513
|
+
"""Replace content under a specific markdown section header.
|
|
514
|
+
|
|
515
|
+
This method uses a simple, safe approach: when replacing a section, it only
|
|
516
|
+
replaces the immediate content under that header until it encounters the next
|
|
517
|
+
header of ANY level. This means:
|
|
518
|
+
|
|
519
|
+
- Replacing "# Header" replaces content until "## Subsection" (preserves subsections)
|
|
520
|
+
- Replacing "## Section" replaces content until "### Subsection" (preserves subsections)
|
|
521
|
+
- More predictable and safer than trying to consume entire hierarchies
|
|
522
|
+
|
|
523
|
+
Args:
|
|
524
|
+
current_content: The current markdown content
|
|
525
|
+
section_header: The section header to find and replace (e.g., "## Section Name")
|
|
526
|
+
new_content: The new content to replace the section with
|
|
527
|
+
|
|
528
|
+
Returns:
|
|
529
|
+
The updated content with the section replaced
|
|
530
|
+
|
|
531
|
+
Raises:
|
|
532
|
+
ValueError: If multiple sections with the same header are found
|
|
533
|
+
"""
|
|
534
|
+
# Normalize the section header (ensure it starts with #)
|
|
535
|
+
if not section_header.startswith("#"):
|
|
536
|
+
section_header = "## " + section_header
|
|
537
|
+
|
|
538
|
+
# First pass: count matching sections to check for duplicates
|
|
539
|
+
lines = current_content.split("\n")
|
|
540
|
+
matching_sections = []
|
|
541
|
+
|
|
542
|
+
for i, line in enumerate(lines):
|
|
543
|
+
if line.strip() == section_header.strip():
|
|
544
|
+
matching_sections.append(i)
|
|
545
|
+
|
|
546
|
+
# Handle multiple sections error
|
|
547
|
+
if len(matching_sections) > 1:
|
|
548
|
+
raise ValueError(
|
|
549
|
+
f"Multiple sections found with header '{section_header}'. "
|
|
550
|
+
f"Section replacement requires unique headers."
|
|
551
|
+
)
|
|
552
|
+
|
|
553
|
+
# If no section found, append it
|
|
554
|
+
if len(matching_sections) == 0:
|
|
555
|
+
logger.info(f"Section '{section_header}' not found, appending to end of document")
|
|
556
|
+
separator = "\n\n" if current_content and not current_content.endswith("\n\n") else ""
|
|
557
|
+
return current_content + separator + section_header + "\n" + new_content
|
|
558
|
+
|
|
559
|
+
# Replace the single matching section
|
|
560
|
+
result_lines = []
|
|
561
|
+
section_line_idx = matching_sections[0]
|
|
562
|
+
|
|
563
|
+
i = 0
|
|
564
|
+
while i < len(lines):
|
|
565
|
+
line = lines[i]
|
|
566
|
+
|
|
567
|
+
# Check if this is our target section header
|
|
568
|
+
if i == section_line_idx:
|
|
569
|
+
# Add the section header and new content
|
|
570
|
+
result_lines.append(line)
|
|
571
|
+
result_lines.append(new_content)
|
|
572
|
+
i += 1
|
|
573
|
+
|
|
574
|
+
# Skip the original section content until next header or end
|
|
575
|
+
while i < len(lines):
|
|
576
|
+
next_line = lines[i]
|
|
577
|
+
# Stop consuming when we hit any header (preserve subsections)
|
|
578
|
+
if next_line.startswith("#"):
|
|
579
|
+
# We found another header - continue processing from here
|
|
580
|
+
break
|
|
581
|
+
i += 1
|
|
582
|
+
# Continue processing from the next header (don't increment i again)
|
|
583
|
+
continue
|
|
584
|
+
|
|
585
|
+
# Add all other lines (including subsequent sections)
|
|
586
|
+
result_lines.append(line)
|
|
587
|
+
i += 1
|
|
588
|
+
|
|
589
|
+
return "\n".join(result_lines)
|
|
590
|
+
|
|
591
|
+
def _prepend_after_frontmatter(self, current_content: str, content: str) -> str:
|
|
592
|
+
"""Prepend content after frontmatter, preserving frontmatter structure."""
|
|
593
|
+
|
|
594
|
+
# Check if file has frontmatter
|
|
595
|
+
if has_frontmatter(current_content):
|
|
596
|
+
try:
|
|
597
|
+
# Parse and separate frontmatter from body
|
|
598
|
+
frontmatter_data = parse_frontmatter(current_content)
|
|
599
|
+
body_content = remove_frontmatter(current_content)
|
|
600
|
+
|
|
601
|
+
# Prepend content to the body
|
|
602
|
+
if content and not content.endswith("\n"):
|
|
603
|
+
new_body = content + "\n" + body_content
|
|
604
|
+
else:
|
|
605
|
+
new_body = content + body_content
|
|
606
|
+
|
|
607
|
+
# Reconstruct file with frontmatter + prepended body
|
|
608
|
+
yaml_fm = yaml.dump(frontmatter_data, sort_keys=False, allow_unicode=True)
|
|
609
|
+
return f"---\n{yaml_fm}---\n\n{new_body.strip()}"
|
|
610
|
+
|
|
611
|
+
except Exception as e: # pragma: no cover
|
|
612
|
+
logger.warning(
|
|
613
|
+
f"Failed to parse frontmatter during prepend: {e}"
|
|
614
|
+
) # pragma: no cover
|
|
615
|
+
# Fall back to simple prepend if frontmatter parsing fails # pragma: no cover
|
|
616
|
+
|
|
617
|
+
# No frontmatter or parsing failed - do simple prepend # pragma: no cover
|
|
618
|
+
if content and not content.endswith("\n"): # pragma: no cover
|
|
619
|
+
return content + "\n" + current_content # pragma: no cover
|
|
620
|
+
return content + current_content # pragma: no cover
|
|
621
|
+
|
|
622
|
+
async def move_entity(
|
|
623
|
+
self,
|
|
624
|
+
identifier: str,
|
|
625
|
+
destination_path: str,
|
|
626
|
+
project_config: ProjectConfig,
|
|
627
|
+
app_config: BasicMemoryConfig,
|
|
628
|
+
) -> EntityModel:
|
|
629
|
+
"""Move entity to new location with database consistency.
|
|
630
|
+
|
|
631
|
+
Args:
|
|
632
|
+
identifier: Entity identifier (title, permalink, or memory:// URL)
|
|
633
|
+
destination_path: New path relative to project root
|
|
634
|
+
project_config: Project configuration for file operations
|
|
635
|
+
app_config: App configuration for permalink update settings
|
|
636
|
+
|
|
637
|
+
Returns:
|
|
638
|
+
Success message with move details
|
|
639
|
+
|
|
640
|
+
Raises:
|
|
641
|
+
EntityNotFoundError: If the entity cannot be found
|
|
642
|
+
ValueError: If move operation fails due to validation or filesystem errors
|
|
643
|
+
"""
|
|
644
|
+
logger.debug(f"Moving entity: {identifier} to {destination_path}")
|
|
645
|
+
|
|
646
|
+
# 1. Resolve identifier to entity with strict mode for destructive operations
|
|
647
|
+
entity = await self.link_resolver.resolve_link(identifier, strict=True)
|
|
648
|
+
if not entity:
|
|
649
|
+
raise EntityNotFoundError(f"Entity not found: {identifier}")
|
|
650
|
+
|
|
651
|
+
current_path = entity.file_path
|
|
652
|
+
old_permalink = entity.permalink
|
|
653
|
+
|
|
654
|
+
# 2. Validate destination path format first
|
|
655
|
+
if not destination_path or destination_path.startswith("/") or not destination_path.strip():
|
|
656
|
+
raise ValueError(f"Invalid destination path: {destination_path}")
|
|
657
|
+
|
|
658
|
+
# 3. Validate paths
|
|
659
|
+
source_file = project_config.home / current_path
|
|
660
|
+
destination_file = project_config.home / destination_path
|
|
661
|
+
|
|
662
|
+
# Validate source exists
|
|
663
|
+
if not source_file.exists():
|
|
664
|
+
raise ValueError(f"Source file not found: {current_path}")
|
|
665
|
+
|
|
666
|
+
# Check if destination already exists
|
|
667
|
+
if destination_file.exists():
|
|
668
|
+
raise ValueError(f"Destination already exists: {destination_path}")
|
|
669
|
+
|
|
670
|
+
try:
|
|
671
|
+
# 4. Create destination directory if needed
|
|
672
|
+
destination_file.parent.mkdir(parents=True, exist_ok=True)
|
|
673
|
+
|
|
674
|
+
# 5. Move physical file
|
|
675
|
+
source_file.rename(destination_file)
|
|
676
|
+
logger.info(f"Moved file: {current_path} -> {destination_path}")
|
|
677
|
+
|
|
678
|
+
# 6. Prepare database updates
|
|
679
|
+
updates = {"file_path": destination_path}
|
|
680
|
+
|
|
681
|
+
# 7. Update permalink if configured
|
|
682
|
+
if app_config.update_permalinks_on_move:
|
|
683
|
+
# Generate new permalink from destination path
|
|
684
|
+
new_permalink = await self.resolve_permalink(destination_path)
|
|
685
|
+
|
|
686
|
+
# Update frontmatter with new permalink
|
|
687
|
+
await self.file_service.update_frontmatter(
|
|
688
|
+
destination_path, {"permalink": new_permalink}
|
|
689
|
+
)
|
|
690
|
+
|
|
691
|
+
updates["permalink"] = new_permalink
|
|
692
|
+
logger.info(f"Updated permalink: {old_permalink} -> {new_permalink}")
|
|
693
|
+
|
|
694
|
+
# 8. Recalculate checksum
|
|
695
|
+
new_checksum = await self.file_service.compute_checksum(destination_path)
|
|
696
|
+
updates["checksum"] = new_checksum
|
|
697
|
+
|
|
698
|
+
# 9. Update database
|
|
699
|
+
updated_entity = await self.repository.update(entity.id, updates)
|
|
700
|
+
if not updated_entity:
|
|
701
|
+
raise ValueError(f"Failed to update entity in database: {entity.id}")
|
|
702
|
+
|
|
703
|
+
return updated_entity
|
|
704
|
+
|
|
705
|
+
except Exception as e:
|
|
706
|
+
# Rollback: try to restore original file location if move succeeded
|
|
707
|
+
if destination_file.exists() and not source_file.exists():
|
|
708
|
+
try:
|
|
709
|
+
destination_file.rename(source_file)
|
|
710
|
+
logger.info(f"Rolled back file move: {destination_path} -> {current_path}")
|
|
711
|
+
except Exception as rollback_error: # pragma: no cover
|
|
712
|
+
logger.error(f"Failed to rollback file move: {rollback_error}")
|
|
713
|
+
|
|
714
|
+
# Re-raise the original error with context
|
|
715
|
+
raise ValueError(f"Move failed: {str(e)}") from e
|
|
@@ -94,8 +94,8 @@ class FileService:
|
|
|
94
94
|
"""
|
|
95
95
|
try:
|
|
96
96
|
# Convert string to Path if needed
|
|
97
|
-
path_obj =
|
|
98
|
-
|
|
97
|
+
path_obj = self.base_path / path if isinstance(path, str) else path
|
|
98
|
+
logger.debug(f"Checking file existence: path={path_obj}")
|
|
99
99
|
if path_obj.is_absolute():
|
|
100
100
|
return path_obj.exists()
|
|
101
101
|
else:
|
|
@@ -121,7 +121,7 @@ class FileService:
|
|
|
121
121
|
FileOperationError: If write fails
|
|
122
122
|
"""
|
|
123
123
|
# Convert string to Path if needed
|
|
124
|
-
path_obj =
|
|
124
|
+
path_obj = self.base_path / path if isinstance(path, str) else path
|
|
125
125
|
full_path = path_obj if path_obj.is_absolute() else self.base_path / path_obj
|
|
126
126
|
|
|
127
127
|
try:
|
|
@@ -130,18 +130,17 @@ class FileService:
|
|
|
130
130
|
|
|
131
131
|
# Write content atomically
|
|
132
132
|
logger.info(
|
|
133
|
-
"Writing file"
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
is_markdown=full_path.suffix.lower() == ".md",
|
|
133
|
+
"Writing file: "
|
|
134
|
+
f"path={path_obj}, "
|
|
135
|
+
f"content_length={len(content)}, "
|
|
136
|
+
f"is_markdown={full_path.suffix.lower() == '.md'}"
|
|
138
137
|
)
|
|
139
138
|
|
|
140
139
|
await file_utils.write_file_atomic(full_path, content)
|
|
141
140
|
|
|
142
141
|
# Compute and return checksum
|
|
143
142
|
checksum = await file_utils.compute_checksum(content)
|
|
144
|
-
logger.debug("File write completed
|
|
143
|
+
logger.debug(f"File write completed path={full_path}, {checksum=}")
|
|
145
144
|
return checksum
|
|
146
145
|
|
|
147
146
|
except Exception as e:
|
|
@@ -165,7 +164,7 @@ class FileService:
|
|
|
165
164
|
FileOperationError: If read fails
|
|
166
165
|
"""
|
|
167
166
|
# Convert string to Path if needed
|
|
168
|
-
path_obj =
|
|
167
|
+
path_obj = self.base_path / path if isinstance(path, str) else path
|
|
169
168
|
full_path = path_obj if path_obj.is_absolute() else self.base_path / path_obj
|
|
170
169
|
|
|
171
170
|
try:
|
|
@@ -195,7 +194,7 @@ class FileService:
|
|
|
195
194
|
path: Path to delete (Path or string)
|
|
196
195
|
"""
|
|
197
196
|
# Convert string to Path if needed
|
|
198
|
-
path_obj =
|
|
197
|
+
path_obj = self.base_path / path if isinstance(path, str) else path
|
|
199
198
|
full_path = path_obj if path_obj.is_absolute() else self.base_path / path_obj
|
|
200
199
|
full_path.unlink(missing_ok=True)
|
|
201
200
|
|
|
@@ -211,7 +210,7 @@ class FileService:
|
|
|
211
210
|
Checksum of updated file
|
|
212
211
|
"""
|
|
213
212
|
# Convert string to Path if needed
|
|
214
|
-
path_obj =
|
|
213
|
+
path_obj = self.base_path / path if isinstance(path, str) else path
|
|
215
214
|
full_path = path_obj if path_obj.is_absolute() else self.base_path / path_obj
|
|
216
215
|
return await file_utils.update_frontmatter(full_path, updates)
|
|
217
216
|
|
|
@@ -228,7 +227,7 @@ class FileService:
|
|
|
228
227
|
FileError: If checksum computation fails
|
|
229
228
|
"""
|
|
230
229
|
# Convert string to Path if needed
|
|
231
|
-
path_obj =
|
|
230
|
+
path_obj = self.base_path / path if isinstance(path, str) else path
|
|
232
231
|
full_path = path_obj if path_obj.is_absolute() else self.base_path / path_obj
|
|
233
232
|
|
|
234
233
|
try:
|
|
@@ -254,7 +253,7 @@ class FileService:
|
|
|
254
253
|
File statistics
|
|
255
254
|
"""
|
|
256
255
|
# Convert string to Path if needed
|
|
257
|
-
path_obj =
|
|
256
|
+
path_obj = self.base_path / path if isinstance(path, str) else path
|
|
258
257
|
full_path = path_obj if path_obj.is_absolute() else self.base_path / path_obj
|
|
259
258
|
# get file timestamps
|
|
260
259
|
return full_path.stat()
|
|
@@ -269,7 +268,7 @@ class FileService:
|
|
|
269
268
|
MIME type of the file
|
|
270
269
|
"""
|
|
271
270
|
# Convert string to Path if needed
|
|
272
|
-
path_obj =
|
|
271
|
+
path_obj = self.base_path / path if isinstance(path, str) else path
|
|
273
272
|
full_path = path_obj if path_obj.is_absolute() else self.base_path / path_obj
|
|
274
273
|
# get file timestamps
|
|
275
274
|
mime_type, _ = mimetypes.guess_type(full_path.name)
|