basic-memory 0.8.0__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (75) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/migrations.py +4 -9
  3. basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py +106 -0
  4. basic_memory/api/app.py +9 -6
  5. basic_memory/api/routers/__init__.py +2 -1
  6. basic_memory/api/routers/knowledge_router.py +30 -4
  7. basic_memory/api/routers/memory_router.py +3 -2
  8. basic_memory/api/routers/project_info_router.py +275 -0
  9. basic_memory/api/routers/search_router.py +22 -4
  10. basic_memory/cli/app.py +54 -3
  11. basic_memory/cli/commands/__init__.py +15 -2
  12. basic_memory/cli/commands/db.py +9 -13
  13. basic_memory/cli/commands/import_chatgpt.py +26 -30
  14. basic_memory/cli/commands/import_claude_conversations.py +27 -29
  15. basic_memory/cli/commands/import_claude_projects.py +29 -31
  16. basic_memory/cli/commands/import_memory_json.py +26 -28
  17. basic_memory/cli/commands/mcp.py +7 -1
  18. basic_memory/cli/commands/project.py +119 -0
  19. basic_memory/cli/commands/project_info.py +167 -0
  20. basic_memory/cli/commands/status.py +7 -9
  21. basic_memory/cli/commands/sync.py +54 -9
  22. basic_memory/cli/commands/{tools.py → tool.py} +92 -19
  23. basic_memory/cli/main.py +40 -1
  24. basic_memory/config.py +155 -7
  25. basic_memory/db.py +19 -4
  26. basic_memory/deps.py +10 -3
  27. basic_memory/file_utils.py +32 -16
  28. basic_memory/markdown/utils.py +5 -0
  29. basic_memory/mcp/main.py +1 -2
  30. basic_memory/mcp/prompts/__init__.py +6 -2
  31. basic_memory/mcp/prompts/ai_assistant_guide.py +6 -8
  32. basic_memory/mcp/prompts/continue_conversation.py +65 -126
  33. basic_memory/mcp/prompts/recent_activity.py +55 -13
  34. basic_memory/mcp/prompts/search.py +72 -17
  35. basic_memory/mcp/prompts/utils.py +139 -82
  36. basic_memory/mcp/server.py +1 -1
  37. basic_memory/mcp/tools/__init__.py +11 -22
  38. basic_memory/mcp/tools/build_context.py +85 -0
  39. basic_memory/mcp/tools/canvas.py +17 -19
  40. basic_memory/mcp/tools/delete_note.py +28 -0
  41. basic_memory/mcp/tools/project_info.py +51 -0
  42. basic_memory/mcp/tools/{resource.py → read_content.py} +42 -5
  43. basic_memory/mcp/tools/read_note.py +190 -0
  44. basic_memory/mcp/tools/recent_activity.py +100 -0
  45. basic_memory/mcp/tools/search.py +56 -17
  46. basic_memory/mcp/tools/utils.py +245 -17
  47. basic_memory/mcp/tools/write_note.py +124 -0
  48. basic_memory/models/search.py +2 -1
  49. basic_memory/repository/entity_repository.py +3 -2
  50. basic_memory/repository/project_info_repository.py +9 -0
  51. basic_memory/repository/repository.py +23 -6
  52. basic_memory/repository/search_repository.py +33 -10
  53. basic_memory/schemas/__init__.py +12 -0
  54. basic_memory/schemas/memory.py +3 -2
  55. basic_memory/schemas/project_info.py +96 -0
  56. basic_memory/schemas/search.py +27 -32
  57. basic_memory/services/context_service.py +3 -3
  58. basic_memory/services/entity_service.py +8 -2
  59. basic_memory/services/file_service.py +105 -53
  60. basic_memory/services/link_resolver.py +5 -45
  61. basic_memory/services/search_service.py +45 -16
  62. basic_memory/sync/sync_service.py +274 -39
  63. basic_memory/sync/watch_service.py +160 -30
  64. basic_memory/utils.py +40 -40
  65. basic_memory-0.9.0.dist-info/METADATA +736 -0
  66. basic_memory-0.9.0.dist-info/RECORD +99 -0
  67. basic_memory/mcp/prompts/json_canvas_spec.py +0 -25
  68. basic_memory/mcp/tools/knowledge.py +0 -68
  69. basic_memory/mcp/tools/memory.py +0 -177
  70. basic_memory/mcp/tools/notes.py +0 -201
  71. basic_memory-0.8.0.dist-info/METADATA +0 -379
  72. basic_memory-0.8.0.dist-info/RECORD +0 -91
  73. {basic_memory-0.8.0.dist-info → basic_memory-0.9.0.dist-info}/WHEEL +0 -0
  74. {basic_memory-0.8.0.dist-info → basic_memory-0.9.0.dist-info}/entry_points.txt +0 -0
  75. {basic_memory-0.8.0.dist-info → basic_memory-0.9.0.dist-info}/licenses/LICENSE +0 -0
@@ -6,6 +6,7 @@ from typing import List, Optional, Set
6
6
  from dateparser import parse
7
7
  from fastapi import BackgroundTasks
8
8
  from loguru import logger
9
+ from sqlalchemy import text
9
10
 
10
11
  from basic_memory.models import Entity
11
12
  from basic_memory.repository import EntityRepository
@@ -39,9 +40,10 @@ class SearchService:
39
40
 
40
41
  async def reindex_all(self, background_tasks: Optional[BackgroundTasks] = None) -> None:
41
42
  """Reindex all content from database."""
42
- logger.info("Starting full reindex")
43
43
 
44
+ logger.info("Starting full reindex")
44
45
  # Clear and recreate search index
46
+ await self.repository.execute_query(text("DROP TABLE IF EXISTS search_index"), params={})
45
47
  await self.init_search_index()
46
48
 
47
49
  # Reindex all entities
@@ -145,6 +147,7 @@ class SearchService:
145
147
  await self.repository.index_item(
146
148
  SearchIndexRow(
147
149
  id=entity.id,
150
+ entity_id=entity.id,
148
151
  type=SearchItemType.ENTITY.value,
149
152
  title=entity.title,
150
153
  file_path=entity.file_path,
@@ -178,33 +181,51 @@ class SearchService:
178
181
  Each type gets its own row in the search index with appropriate metadata.
179
182
  """
180
183
 
181
- assert entity.permalink is not None, (
182
- "entity.permalink should not be None for markdown entities"
183
- )
184
+ if entity.permalink is None: # pragma: no cover
185
+ logger.error(
186
+ "Missing permalink for markdown entity",
187
+ entity_id=entity.id,
188
+ title=entity.title,
189
+ file_path=entity.file_path,
190
+ )
191
+ raise ValueError(
192
+ f"Entity permalink should not be None for markdown entity: {entity.id} ({entity.title})"
193
+ )
184
194
 
185
- content_parts = []
195
+ content_stems = []
196
+ content_snippet = ""
186
197
  title_variants = self._generate_variants(entity.title)
187
- content_parts.extend(title_variants)
198
+ content_stems.extend(title_variants)
188
199
 
189
200
  content = await self.file_service.read_entity_content(entity)
190
201
  if content:
191
- content_parts.append(content)
202
+ content_stems.append(content)
203
+ content_snippet = f"{content[:250]}"
192
204
 
193
- content_parts.extend(self._generate_variants(entity.permalink))
194
- content_parts.extend(self._generate_variants(entity.file_path))
205
+ content_stems.extend(self._generate_variants(entity.permalink))
206
+ content_stems.extend(self._generate_variants(entity.file_path))
195
207
 
196
- entity_content = "\n".join(p for p in content_parts if p and p.strip())
208
+ entity_content_stems = "\n".join(p for p in content_stems if p and p.strip())
209
+
210
+ if entity.permalink is None: # pragma: no cover
211
+ logger.error(
212
+ "Missing permalink for markdown entity",
213
+ entity_id=entity.id,
214
+ title=entity.title,
215
+ file_path=entity.file_path,
216
+ )
217
+ raise ValueError(
218
+ f"Entity permalink should not be None for markdown entity: {entity.id} ({entity.title})"
219
+ )
197
220
 
198
- assert entity.permalink is not None, (
199
- "entity.permalink should not be None for markdown entities"
200
- )
201
221
  # Index entity
202
222
  await self.repository.index_item(
203
223
  SearchIndexRow(
204
224
  id=entity.id,
205
225
  type=SearchItemType.ENTITY.value,
206
226
  title=entity.title,
207
- content=entity_content,
227
+ content_stems=entity_content_stems,
228
+ content_snippet=content_snippet,
208
229
  permalink=entity.permalink,
209
230
  file_path=entity.file_path,
210
231
  entity_id=entity.id,
@@ -219,12 +240,16 @@ class SearchService:
219
240
  # Index each observation with permalink
220
241
  for obs in entity.observations:
221
242
  # Index with parent entity's file path since that's where it's defined
243
+ obs_content_stems = "\n".join(
244
+ p for p in self._generate_variants(obs.content) if p and p.strip()
245
+ )
222
246
  await self.repository.index_item(
223
247
  SearchIndexRow(
224
248
  id=obs.id,
225
249
  type=SearchItemType.OBSERVATION.value,
226
- title=f"{obs.category}: {obs.content[:50]}...",
227
- content=obs.content,
250
+ title=f"{obs.category}: {obs.content[:100]}...",
251
+ content_stems=obs_content_stems,
252
+ content_snippet=obs.content,
228
253
  permalink=obs.permalink,
229
254
  file_path=entity.file_path,
230
255
  category=obs.category,
@@ -246,11 +271,15 @@ class SearchService:
246
271
  else f"{rel.from_entity.title}"
247
272
  )
248
273
 
274
+ rel_content_stems = "\n".join(
275
+ p for p in self._generate_variants(relation_title) if p and p.strip()
276
+ )
249
277
  await self.repository.index_item(
250
278
  SearchIndexRow(
251
279
  id=rel.id,
252
280
  title=relation_title,
253
281
  permalink=rel.permalink,
282
+ content_stems=rel_content_stems,
254
283
  file_path=entity.file_path,
255
284
  type=SearchItemType.RELATION.value,
256
285
  entity_id=entity.id,
@@ -1,21 +1,23 @@
1
1
  """Service for syncing files between filesystem and database."""
2
2
 
3
3
  import os
4
+
4
5
  from dataclasses import dataclass
5
6
  from dataclasses import field
6
7
  from datetime import datetime
7
8
  from pathlib import Path
8
- from typing import Set, Dict
9
- from typing import Tuple
9
+ from typing import Dict, Optional, Set, Tuple
10
10
 
11
- import logfire
12
11
  from loguru import logger
12
+ from sqlalchemy.exc import IntegrityError
13
13
 
14
14
  from basic_memory.markdown import EntityParser
15
15
  from basic_memory.models import Entity
16
16
  from basic_memory.repository import EntityRepository, RelationRepository
17
17
  from basic_memory.services import EntityService, FileService
18
18
  from basic_memory.services.search_service import SearchService
19
+ import time
20
+ from rich.progress import Progress, TextColumn, BarColumn, TaskProgressColumn
19
21
 
20
22
 
21
23
  @dataclass
@@ -77,22 +79,123 @@ class SyncService:
77
79
  self.search_service = search_service
78
80
  self.file_service = file_service
79
81
 
80
- async def sync(self, directory: Path) -> SyncReport:
82
+ async def sync(self, directory: Path, show_progress: bool = True) -> SyncReport:
81
83
  """Sync all files with database."""
82
84
 
83
- with logfire.span(f"sync {directory}", directory=directory): # pyright: ignore [reportGeneralTypeIssues]
84
- # initial paths from db to sync
85
- # path -> checksum
86
- report = await self.scan(directory)
85
+ start_time = time.time()
86
+ console = None
87
+ progress = None # Will be initialized if show_progress is True
88
+
89
+ logger.info("Sync operation started", directory=str(directory))
90
+
91
+ # initial paths from db to sync
92
+ # path -> checksum
93
+ if show_progress:
94
+ from rich.console import Console
95
+
96
+ console = Console()
97
+ console.print(f"Scanning directory: {directory}")
98
+
99
+ report = await self.scan(directory)
87
100
 
88
- # order of sync matters to resolve relations effectively
101
+ # Initialize progress tracking if requested
102
+ if show_progress and report.total > 0:
103
+ progress = Progress(
104
+ TextColumn("[bold blue]{task.description}"),
105
+ BarColumn(),
106
+ TaskProgressColumn(),
107
+ console=console,
108
+ expand=True,
109
+ )
110
+
111
+ # order of sync matters to resolve relations effectively
112
+ logger.info(
113
+ "Sync changes detected",
114
+ new_files=len(report.new),
115
+ modified_files=len(report.modified),
116
+ deleted_files=len(report.deleted),
117
+ moved_files=len(report.moves),
118
+ )
119
+
120
+ if show_progress and report.total > 0:
121
+ with progress: # pyright: ignore
122
+ # Track each category separately
123
+ move_task = None
124
+ if report.moves: # pragma: no cover
125
+ move_task = progress.add_task("[blue]Moving files...", total=len(report.moves)) # pyright: ignore
126
+
127
+ delete_task = None
128
+ if report.deleted: # pragma: no cover
129
+ delete_task = progress.add_task( # pyright: ignore
130
+ "[red]Deleting files...", total=len(report.deleted)
131
+ )
132
+
133
+ new_task = None
134
+ if report.new:
135
+ new_task = progress.add_task( # pyright: ignore
136
+ "[green]Adding new files...", total=len(report.new)
137
+ )
138
+
139
+ modify_task = None
140
+ if report.modified: # pragma: no cover
141
+ modify_task = progress.add_task( # pyright: ignore
142
+ "[yellow]Updating modified files...", total=len(report.modified)
143
+ )
89
144
 
145
+ # sync moves first
146
+ for i, (old_path, new_path) in enumerate(report.moves.items()):
147
+ # in the case where a file has been deleted and replaced by another file
148
+ # it will show up in the move and modified lists, so handle it in modified
149
+ if new_path in report.modified: # pragma: no cover
150
+ report.modified.remove(new_path)
151
+ logger.debug(
152
+ "File marked as moved and modified",
153
+ old_path=old_path,
154
+ new_path=new_path,
155
+ action="processing as modified",
156
+ )
157
+ else: # pragma: no cover
158
+ await self.handle_move(old_path, new_path)
159
+
160
+ if move_task is not None: # pragma: no cover
161
+ progress.update(move_task, advance=1) # pyright: ignore
162
+
163
+ # deleted next
164
+ for i, path in enumerate(report.deleted): # pragma: no cover
165
+ await self.handle_delete(path)
166
+ if delete_task is not None: # pragma: no cover
167
+ progress.update(delete_task, advance=1) # pyright: ignore
168
+
169
+ # then new and modified
170
+ for i, path in enumerate(report.new):
171
+ await self.sync_file(path, new=True)
172
+ if new_task is not None:
173
+ progress.update(new_task, advance=1) # pyright: ignore
174
+
175
+ for i, path in enumerate(report.modified): # pragma: no cover
176
+ await self.sync_file(path, new=False)
177
+ if modify_task is not None: # pragma: no cover
178
+ progress.update(modify_task, advance=1) # pyright: ignore
179
+
180
+ # Final step - resolving relations
181
+ if report.total > 0:
182
+ relation_task = progress.add_task("[cyan]Resolving relations...", total=1) # pyright: ignore
183
+ await self.resolve_relations()
184
+ progress.update(relation_task, advance=1) # pyright: ignore
185
+ else:
186
+ # No progress display - proceed with normal sync
90
187
  # sync moves first
91
188
  for old_path, new_path in report.moves.items():
92
189
  # in the case where a file has been deleted and replaced by another file
93
190
  # it will show up in the move and modified lists, so handle it in modified
94
191
  if new_path in report.modified:
95
192
  report.modified.remove(new_path)
193
+ logger.debug(
194
+ "File marked as moved and modified",
195
+ old_path=old_path,
196
+ new_path=new_path,
197
+ action="processing as modified",
198
+ )
96
199
  else:
97
200
  await self.handle_move(old_path, new_path)
98
201
 
@@ -108,7 +211,16 @@ class SyncService:
108
211
  await self.sync_file(path, new=False)
109
212
 
110
213
  await self.resolve_relations()
111
- return report
214
+
215
+ duration_ms = int((time.time() - start_time) * 1000)
216
+ logger.info(
217
+ "Sync operation completed",
218
+ directory=str(directory),
219
+ total_changes=report.total,
220
+ duration_ms=duration_ms,
221
+ )
222
+
223
+ return report
112
224
 
113
225
  async def scan(self, directory):
114
226
  """Scan directory for changes compared to database state."""
@@ -166,25 +278,55 @@ class SyncService:
166
278
  db_records = await self.entity_repository.find_all()
167
279
  return {r.file_path: r.checksum or "" for r in db_records}
168
280
 
169
- async def sync_file(self, path: str, new: bool = True) -> Tuple[Entity, str]:
170
- """Sync a single file."""
281
+ async def sync_file(
282
+ self, path: str, new: bool = True
283
+ ) -> Tuple[Optional[Entity], Optional[str]]:
284
+ """Sync a single file.
171
285
 
286
+ Args:
287
+ path: Path to file to sync
288
+ new: Whether this is a new file
289
+
290
+ Returns:
291
+ Tuple of (entity, checksum) or (None, None) if sync fails
292
+ """
172
293
  try:
294
+ logger.debug(
295
+ "Syncing file",
296
+ path=path,
297
+ is_new=new,
298
+ is_markdown=self.file_service.is_markdown(path),
299
+ )
300
+
173
301
  if self.file_service.is_markdown(path):
174
302
  entity, checksum = await self.sync_markdown_file(path, new)
175
303
  else:
176
304
  entity, checksum = await self.sync_regular_file(path, new)
177
- await self.search_service.index_entity(entity)
305
+
306
+ if entity is not None:
307
+ await self.search_service.index_entity(entity)
308
+
309
+ logger.debug(
310
+ "File sync completed", path=path, entity_id=entity.id, checksum=checksum
311
+ )
178
312
  return entity, checksum
179
313
 
180
314
  except Exception as e: # pragma: no cover
181
- logger.error(f"Failed to sync {path}: {e}")
182
- raise
315
+ logger.exception("Failed to sync file", path=path, error=str(e))
316
+ return None, None
183
317
 
184
- async def sync_markdown_file(self, path: str, new: bool = True) -> Tuple[Entity, str]:
185
- """Sync a markdown file with full processing."""
318
+ async def sync_markdown_file(self, path: str, new: bool = True) -> Tuple[Optional[Entity], str]:
319
+ """Sync a markdown file with full processing.
320
+
321
+ Args:
322
+ path: Path to markdown file
323
+ new: Whether this is a new file
186
324
 
325
+ Returns:
326
+ Tuple of (entity, checksum)
327
+ """
187
328
  # Parse markdown first to get any existing permalink
329
+ logger.debug("Parsing markdown file", path=path)
188
330
  entity_markdown = await self.entity_parser.parse_file(path)
189
331
 
190
332
  # Resolve permalink - this handles all the cases including conflicts
@@ -192,7 +334,13 @@ class SyncService:
192
334
 
193
335
  # If permalink changed, update the file
194
336
  if permalink != entity_markdown.frontmatter.permalink:
195
- logger.info(f"Updating permalink in {path}: {permalink}")
337
+ logger.info(
338
+ "Updating permalink",
339
+ path=path,
340
+ old_permalink=entity_markdown.frontmatter.permalink,
341
+ new_permalink=permalink,
342
+ )
343
+
196
344
  entity_markdown.frontmatter.metadata["permalink"] = permalink
197
345
  checksum = await self.file_service.update_frontmatter(path, {"permalink": permalink})
198
346
  else:
@@ -201,12 +349,14 @@ class SyncService:
201
349
  # if the file is new, create an entity
202
350
  if new:
203
351
  # Create entity with final permalink
204
- logger.debug(f"Creating new entity from markdown: {path}")
352
+ logger.debug("Creating new entity from markdown", path=path, permalink=permalink)
353
+
205
354
  await self.entity_service.create_entity_from_markdown(Path(path), entity_markdown)
206
355
 
207
356
  # otherwise we need to update the entity and observations
208
357
  else:
209
- logger.debug(f"Updating entity from markdown: {path}")
358
+ logger.debug("Updating entity from markdown", path=path, permalink=permalink)
359
+
210
360
  await self.entity_service.update_entity_and_observations(Path(path), entity_markdown)
211
361
 
212
362
  # Update relations and search index
@@ -214,11 +364,27 @@ class SyncService:
214
364
 
215
365
  # set checksum
216
366
  await self.entity_repository.update(entity.id, {"checksum": checksum})
367
+
368
+ logger.debug(
369
+ "Markdown sync completed",
370
+ path=path,
371
+ entity_id=entity.id,
372
+ observation_count=len(entity.observations),
373
+ relation_count=len(entity.relations),
374
+ )
375
+
217
376
  return entity, checksum
218
377
 
219
- async def sync_regular_file(self, path: str, new: bool = True) -> Tuple[Entity, str]:
220
- """Sync a non-markdown file with basic tracking."""
378
+ async def sync_regular_file(self, path: str, new: bool = True) -> Tuple[Optional[Entity], str]:
379
+ """Sync a non-markdown file with basic tracking.
221
380
 
381
+ Args:
382
+ path: Path to file
383
+ new: Whether this is a new file
384
+
385
+ Returns:
386
+ Tuple of (entity, checksum)
387
+ """
222
388
  checksum = await self.file_service.compute_checksum(path)
223
389
  if new:
224
390
  # Generate permalink from path
@@ -247,11 +413,18 @@ class SyncService:
247
413
  return entity, checksum
248
414
  else:
249
415
  entity = await self.entity_repository.get_by_file_path(path)
250
- assert entity is not None, "entity should not be None for existing file"
416
+ if entity is None: # pragma: no cover
417
+ logger.error("Entity not found for existing file", path=path)
418
+ raise ValueError(f"Entity not found for existing file: {path}")
419
+
251
420
  updated = await self.entity_repository.update(
252
421
  entity.id, {"file_path": path, "checksum": checksum}
253
422
  )
254
- assert updated is not None, "entity should be updated"
423
+
424
+ if updated is None: # pragma: no cover
425
+ logger.error("Failed to update entity", entity_id=entity.id, path=path)
426
+ raise ValueError(f"Failed to update entity with ID {entity.id}")
427
+
255
428
  return updated, checksum
256
429
 
257
430
  async def handle_delete(self, file_path: str):
@@ -260,7 +433,12 @@ class SyncService:
260
433
  # First get entity to get permalink before deletion
261
434
  entity = await self.entity_repository.get_by_file_path(file_path)
262
435
  if entity:
263
- logger.debug(f"Deleting entity and cleaning up search index: {file_path}")
436
+ logger.info(
437
+ "Deleting entity",
438
+ file_path=file_path,
439
+ entity_id=entity.id,
440
+ permalink=entity.permalink,
441
+ )
264
442
 
265
443
  # Delete from db (this cascades to observations/relations)
266
444
  await self.entity_service.delete_entity_by_file_path(file_path)
@@ -271,7 +449,14 @@ class SyncService:
271
449
  + [o.permalink for o in entity.observations]
272
450
  + [r.permalink for r in entity.relations]
273
451
  )
274
- logger.debug(f"Deleting from search index: {permalinks}")
452
+
453
+ logger.debug(
454
+ "Cleaning up search index",
455
+ entity_id=entity.id,
456
+ file_path=file_path,
457
+ index_entries=len(permalinks),
458
+ )
459
+
275
460
  for permalink in permalinks:
276
461
  if permalink:
277
462
  await self.search_service.delete_by_permalink(permalink)
@@ -279,12 +464,30 @@ class SyncService:
279
464
  await self.search_service.delete_by_entity_id(entity.id)
280
465
 
281
466
  async def handle_move(self, old_path, new_path):
282
- logger.debug(f"Moving entity: {old_path} -> {new_path}")
467
+ logger.info("Moving entity", old_path=old_path, new_path=new_path)
468
+
283
469
  entity = await self.entity_repository.get_by_file_path(old_path)
284
470
  if entity:
285
471
  # Update file_path but keep the same permalink for link stability
286
472
  updated = await self.entity_repository.update(entity.id, {"file_path": new_path})
287
- assert updated is not None, "entity should be updated"
473
+
474
+ if updated is None: # pragma: no cover
475
+ logger.error(
476
+ "Failed to update entity path",
477
+ entity_id=entity.id,
478
+ old_path=old_path,
479
+ new_path=new_path,
480
+ )
481
+ raise ValueError(f"Failed to update entity path for ID {entity.id}")
482
+
483
+ logger.debug(
484
+ "Entity path updated",
485
+ entity_id=entity.id,
486
+ permalink=entity.permalink,
487
+ old_path=old_path,
488
+ new_path=new_path,
489
+ )
490
+
288
491
  # update search index
289
492
  await self.search_service.index_entity(updated)
290
493
 
@@ -292,22 +495,44 @@ class SyncService:
292
495
  """Try to resolve any unresolved relations"""
293
496
 
294
497
  unresolved_relations = await self.relation_repository.find_unresolved_relations()
295
- logger.debug(f"Attempting to resolve {len(unresolved_relations)} forward references")
498
+
499
+ logger.info("Resolving forward references", count=len(unresolved_relations))
500
+
296
501
  for relation in unresolved_relations:
502
+ logger.debug(
503
+ "Attempting to resolve relation",
504
+ relation_id=relation.id,
505
+ from_id=relation.from_id,
506
+ to_name=relation.to_name,
507
+ )
508
+
297
509
  resolved_entity = await self.entity_service.link_resolver.resolve_link(relation.to_name)
298
510
 
299
511
  # ignore reference to self
300
512
  if resolved_entity and resolved_entity.id != relation.from_id:
301
513
  logger.debug(
302
- f"Resolved forward reference: {relation.to_name} -> {resolved_entity.title}"
303
- )
304
- await self.relation_repository.update(
305
- relation.id,
306
- {
307
- "to_id": resolved_entity.id,
308
- "to_name": resolved_entity.title,
309
- },
514
+ "Resolved forward reference",
515
+ relation_id=relation.id,
516
+ from_id=relation.from_id,
517
+ to_name=relation.to_name,
518
+ resolved_id=resolved_entity.id,
519
+ resolved_title=resolved_entity.title,
310
520
  )
521
+ try:
522
+ await self.relation_repository.update(
523
+ relation.id,
524
+ {
525
+ "to_id": resolved_entity.id,
526
+ "to_name": resolved_entity.title,
527
+ },
528
+ )
529
+ except IntegrityError: # pragma: no cover
530
+ logger.debug(
531
+ "Ignoring duplicate relation",
532
+ relation_id=relation.id,
533
+ from_id=relation.from_id,
534
+ to_name=relation.to_name,
535
+ )
311
536
 
312
537
  # update search index
313
538
  await self.search_service.index_entity(resolved_entity)
@@ -322,8 +547,9 @@ class SyncService:
322
547
  Returns:
323
548
  ScanResult containing found files and any errors
324
549
  """
550
+ start_time = time.time()
325
551
 
326
- logger.debug(f"Scanning directory: {directory}")
552
+ logger.debug("Scanning directory", directory=str(directory))
327
553
  result = ScanResult()
328
554
 
329
555
  for root, dirnames, filenames in os.walk(str(directory)):
@@ -340,6 +566,15 @@ class SyncService:
340
566
  checksum = await self.file_service.compute_checksum(rel_path)
341
567
  result.files[rel_path] = checksum
342
568
  result.checksums[checksum] = rel_path
343
- logger.debug(f"Found file: {rel_path} with checksum: {checksum}")
569
+
570
+ logger.debug("Found file", path=rel_path, checksum=checksum)
571
+
572
+ duration_ms = int((time.time() - start_time) * 1000)
573
+ logger.debug(
574
+ "Directory scan completed",
575
+ directory=str(directory),
576
+ files_found=len(result.files),
577
+ duration_ms=duration_ms,
578
+ )
344
579
 
345
580
  return result