basic-memory 0.2.12__py3-none-any.whl → 0.16.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (149) hide show
  1. basic_memory/__init__.py +5 -1
  2. basic_memory/alembic/alembic.ini +119 -0
  3. basic_memory/alembic/env.py +27 -3
  4. basic_memory/alembic/migrations.py +4 -9
  5. basic_memory/alembic/versions/502b60eaa905_remove_required_from_entity_permalink.py +51 -0
  6. basic_memory/alembic/versions/5fe1ab1ccebe_add_projects_table.py +108 -0
  7. basic_memory/alembic/versions/647e7a75e2cd_project_constraint_fix.py +104 -0
  8. basic_memory/alembic/versions/9d9c1cb7d8f5_add_mtime_and_size_columns_to_entity_.py +49 -0
  9. basic_memory/alembic/versions/a1b2c3d4e5f6_fix_project_foreign_keys.py +49 -0
  10. basic_memory/alembic/versions/b3c3938bacdb_relation_to_name_unique_index.py +44 -0
  11. basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py +100 -0
  12. basic_memory/alembic/versions/e7e1f4367280_add_scan_watermark_tracking_to_project.py +37 -0
  13. basic_memory/api/app.py +63 -31
  14. basic_memory/api/routers/__init__.py +4 -1
  15. basic_memory/api/routers/directory_router.py +84 -0
  16. basic_memory/api/routers/importer_router.py +152 -0
  17. basic_memory/api/routers/knowledge_router.py +165 -28
  18. basic_memory/api/routers/management_router.py +80 -0
  19. basic_memory/api/routers/memory_router.py +28 -67
  20. basic_memory/api/routers/project_router.py +406 -0
  21. basic_memory/api/routers/prompt_router.py +260 -0
  22. basic_memory/api/routers/resource_router.py +219 -14
  23. basic_memory/api/routers/search_router.py +21 -13
  24. basic_memory/api/routers/utils.py +130 -0
  25. basic_memory/api/template_loader.py +292 -0
  26. basic_memory/cli/app.py +52 -1
  27. basic_memory/cli/auth.py +277 -0
  28. basic_memory/cli/commands/__init__.py +13 -2
  29. basic_memory/cli/commands/cloud/__init__.py +6 -0
  30. basic_memory/cli/commands/cloud/api_client.py +112 -0
  31. basic_memory/cli/commands/cloud/bisync_commands.py +110 -0
  32. basic_memory/cli/commands/cloud/cloud_utils.py +101 -0
  33. basic_memory/cli/commands/cloud/core_commands.py +195 -0
  34. basic_memory/cli/commands/cloud/rclone_commands.py +301 -0
  35. basic_memory/cli/commands/cloud/rclone_config.py +110 -0
  36. basic_memory/cli/commands/cloud/rclone_installer.py +249 -0
  37. basic_memory/cli/commands/cloud/upload.py +233 -0
  38. basic_memory/cli/commands/cloud/upload_command.py +124 -0
  39. basic_memory/cli/commands/command_utils.py +51 -0
  40. basic_memory/cli/commands/db.py +26 -7
  41. basic_memory/cli/commands/import_chatgpt.py +83 -0
  42. basic_memory/cli/commands/import_claude_conversations.py +86 -0
  43. basic_memory/cli/commands/import_claude_projects.py +85 -0
  44. basic_memory/cli/commands/import_memory_json.py +35 -92
  45. basic_memory/cli/commands/mcp.py +84 -10
  46. basic_memory/cli/commands/project.py +876 -0
  47. basic_memory/cli/commands/status.py +47 -30
  48. basic_memory/cli/commands/tool.py +341 -0
  49. basic_memory/cli/main.py +13 -6
  50. basic_memory/config.py +481 -22
  51. basic_memory/db.py +192 -32
  52. basic_memory/deps.py +252 -22
  53. basic_memory/file_utils.py +113 -58
  54. basic_memory/ignore_utils.py +297 -0
  55. basic_memory/importers/__init__.py +27 -0
  56. basic_memory/importers/base.py +79 -0
  57. basic_memory/importers/chatgpt_importer.py +232 -0
  58. basic_memory/importers/claude_conversations_importer.py +177 -0
  59. basic_memory/importers/claude_projects_importer.py +148 -0
  60. basic_memory/importers/memory_json_importer.py +108 -0
  61. basic_memory/importers/utils.py +58 -0
  62. basic_memory/markdown/entity_parser.py +143 -23
  63. basic_memory/markdown/markdown_processor.py +3 -3
  64. basic_memory/markdown/plugins.py +39 -21
  65. basic_memory/markdown/schemas.py +1 -1
  66. basic_memory/markdown/utils.py +28 -13
  67. basic_memory/mcp/async_client.py +134 -4
  68. basic_memory/mcp/project_context.py +141 -0
  69. basic_memory/mcp/prompts/__init__.py +19 -0
  70. basic_memory/mcp/prompts/ai_assistant_guide.py +70 -0
  71. basic_memory/mcp/prompts/continue_conversation.py +62 -0
  72. basic_memory/mcp/prompts/recent_activity.py +188 -0
  73. basic_memory/mcp/prompts/search.py +57 -0
  74. basic_memory/mcp/prompts/utils.py +162 -0
  75. basic_memory/mcp/resources/ai_assistant_guide.md +283 -0
  76. basic_memory/mcp/resources/project_info.py +71 -0
  77. basic_memory/mcp/server.py +7 -13
  78. basic_memory/mcp/tools/__init__.py +33 -21
  79. basic_memory/mcp/tools/build_context.py +120 -0
  80. basic_memory/mcp/tools/canvas.py +130 -0
  81. basic_memory/mcp/tools/chatgpt_tools.py +187 -0
  82. basic_memory/mcp/tools/delete_note.py +225 -0
  83. basic_memory/mcp/tools/edit_note.py +320 -0
  84. basic_memory/mcp/tools/list_directory.py +167 -0
  85. basic_memory/mcp/tools/move_note.py +545 -0
  86. basic_memory/mcp/tools/project_management.py +200 -0
  87. basic_memory/mcp/tools/read_content.py +271 -0
  88. basic_memory/mcp/tools/read_note.py +255 -0
  89. basic_memory/mcp/tools/recent_activity.py +534 -0
  90. basic_memory/mcp/tools/search.py +369 -14
  91. basic_memory/mcp/tools/utils.py +374 -16
  92. basic_memory/mcp/tools/view_note.py +77 -0
  93. basic_memory/mcp/tools/write_note.py +207 -0
  94. basic_memory/models/__init__.py +3 -2
  95. basic_memory/models/knowledge.py +67 -15
  96. basic_memory/models/project.py +87 -0
  97. basic_memory/models/search.py +10 -6
  98. basic_memory/repository/__init__.py +2 -0
  99. basic_memory/repository/entity_repository.py +229 -7
  100. basic_memory/repository/observation_repository.py +35 -3
  101. basic_memory/repository/project_info_repository.py +10 -0
  102. basic_memory/repository/project_repository.py +103 -0
  103. basic_memory/repository/relation_repository.py +21 -2
  104. basic_memory/repository/repository.py +147 -29
  105. basic_memory/repository/search_repository.py +437 -59
  106. basic_memory/schemas/__init__.py +22 -9
  107. basic_memory/schemas/base.py +97 -8
  108. basic_memory/schemas/cloud.py +50 -0
  109. basic_memory/schemas/directory.py +30 -0
  110. basic_memory/schemas/importer.py +35 -0
  111. basic_memory/schemas/memory.py +188 -23
  112. basic_memory/schemas/project_info.py +211 -0
  113. basic_memory/schemas/prompt.py +90 -0
  114. basic_memory/schemas/request.py +57 -3
  115. basic_memory/schemas/response.py +9 -1
  116. basic_memory/schemas/search.py +33 -35
  117. basic_memory/schemas/sync_report.py +72 -0
  118. basic_memory/services/__init__.py +2 -1
  119. basic_memory/services/context_service.py +251 -106
  120. basic_memory/services/directory_service.py +295 -0
  121. basic_memory/services/entity_service.py +595 -60
  122. basic_memory/services/exceptions.py +21 -0
  123. basic_memory/services/file_service.py +284 -30
  124. basic_memory/services/initialization.py +191 -0
  125. basic_memory/services/link_resolver.py +50 -56
  126. basic_memory/services/project_service.py +863 -0
  127. basic_memory/services/search_service.py +172 -34
  128. basic_memory/sync/__init__.py +3 -2
  129. basic_memory/sync/background_sync.py +26 -0
  130. basic_memory/sync/sync_service.py +1176 -96
  131. basic_memory/sync/watch_service.py +412 -135
  132. basic_memory/templates/prompts/continue_conversation.hbs +110 -0
  133. basic_memory/templates/prompts/search.hbs +101 -0
  134. basic_memory/utils.py +388 -28
  135. basic_memory-0.16.1.dist-info/METADATA +493 -0
  136. basic_memory-0.16.1.dist-info/RECORD +148 -0
  137. {basic_memory-0.2.12.dist-info → basic_memory-0.16.1.dist-info}/entry_points.txt +1 -0
  138. basic_memory/alembic/README +0 -1
  139. basic_memory/cli/commands/sync.py +0 -203
  140. basic_memory/mcp/tools/knowledge.py +0 -56
  141. basic_memory/mcp/tools/memory.py +0 -151
  142. basic_memory/mcp/tools/notes.py +0 -122
  143. basic_memory/schemas/discovery.py +0 -28
  144. basic_memory/sync/file_change_scanner.py +0 -158
  145. basic_memory/sync/utils.py +0 -34
  146. basic_memory-0.2.12.dist-info/METADATA +0 -291
  147. basic_memory-0.2.12.dist-info/RECORD +0 -78
  148. {basic_memory-0.2.12.dist-info → basic_memory-0.16.1.dist-info}/WHEEL +0 -0
  149. {basic_memory-0.2.12.dist-info → basic_memory-0.16.1.dist-info}/licenses/LICENSE +0 -0
@@ -1,22 +1,22 @@
1
1
  """Watch service for Basic Memory."""
2
2
 
3
- import dataclasses
4
-
5
- from loguru import logger
6
- from pydantic import BaseModel
3
+ import asyncio
4
+ import os
5
+ from collections import defaultdict
7
6
  from datetime import datetime
8
7
  from pathlib import Path
9
- from typing import List, Optional
8
+ from typing import List, Optional, Set, Sequence
10
9
 
10
+ from basic_memory.config import BasicMemoryConfig, WATCH_STATUS_JSON
11
+ from basic_memory.ignore_utils import load_gitignore_patterns, should_ignore_path
12
+ from basic_memory.models import Project
13
+ from basic_memory.repository import ProjectRepository
14
+ from loguru import logger
15
+ from pydantic import BaseModel
11
16
  from rich.console import Console
12
- from rich.live import Live
13
- from rich.table import Table
14
- from watchfiles import awatch, Change
15
- import os
16
-
17
- from basic_memory.config import ProjectConfig
18
- from basic_memory.sync.sync_service import SyncService
19
- from basic_memory.services.file_service import FileService
17
+ from watchfiles import awatch
18
+ from watchfiles.main import FileChange, Change
19
+ import time
20
20
 
21
21
 
22
22
  class WatchEvent(BaseModel):
@@ -31,8 +31,8 @@ class WatchEvent(BaseModel):
31
31
  class WatchServiceState(BaseModel):
32
32
  # Service status
33
33
  running: bool = False
34
- start_time: datetime = dataclasses.field(default_factory=datetime.now)
35
- pid: int = dataclasses.field(default_factory=os.getpid)
34
+ start_time: datetime = datetime.now() # Use directly with Pydantic model
35
+ pid: int = os.getpid() # Use directly with Pydantic model
36
36
 
37
37
  # Stats
38
38
  error_count: int = 0
@@ -43,7 +43,7 @@ class WatchServiceState(BaseModel):
43
43
  synced_files: int = 0
44
44
 
45
45
  # Recent activity
46
- recent_events: List[WatchEvent] = dataclasses.field(default_factory=list)
46
+ recent_events: List[WatchEvent] = [] # Use directly with Pydantic model
47
47
 
48
48
  def add_event(
49
49
  self,
@@ -72,147 +72,424 @@ class WatchServiceState(BaseModel):
72
72
 
73
73
 
74
74
  class WatchService:
75
- def __init__(self, sync_service: SyncService, file_service: FileService, config: ProjectConfig):
76
- self.sync_service = sync_service
77
- self.file_service = file_service
78
- self.config = config
75
+ def __init__(
76
+ self,
77
+ app_config: BasicMemoryConfig,
78
+ project_repository: ProjectRepository,
79
+ quiet: bool = False,
80
+ ):
81
+ self.app_config = app_config
82
+ self.project_repository = project_repository
79
83
  self.state = WatchServiceState()
80
- self.status_path = config.home / ".basic-memory" / "watch-status.json"
84
+ self.status_path = Path.home() / ".basic-memory" / WATCH_STATUS_JSON
81
85
  self.status_path.parent.mkdir(parents=True, exist_ok=True)
82
- self.console = Console()
83
-
84
- def generate_table(self) -> Table:
85
- """Generate status display table"""
86
- table = Table()
87
-
88
- # Add status row
89
- table.add_column("Status", style="cyan")
90
- table.add_column("Last Scan", style="cyan")
91
- table.add_column("Files", style="cyan")
92
- table.add_column("Errors", style="red")
93
-
94
- # Add main status row
95
- table.add_row(
96
- "✓ Running" if self.state.running else "✗ Stopped",
97
- self.state.last_scan.strftime("%H:%M:%S") if self.state.last_scan else "-",
98
- str(self.state.synced_files),
99
- f"{self.state.error_count} ({self.state.last_error.strftime('%H:%M:%S') if self.state.last_error else 'none'})",
100
- )
101
-
102
- if self.state.recent_events:
103
- # Add recent events
104
- table.add_section()
105
- table.add_row("Recent Events", "", "", "")
106
-
107
- for event in self.state.recent_events[:5]: # Show last 5 events
108
- color = {
109
- "new": "green",
110
- "modified": "yellow",
111
- "moved": "blue",
112
- "deleted": "red",
113
- "error": "red",
114
- }.get(event.action, "white")
115
-
116
- icon = {
117
- "new": "✚",
118
- "modified": "✎",
119
- "moved": "→",
120
- "deleted": "✖",
121
- "error": "!",
122
- }.get(event.action, "*")
123
-
124
- table.add_row(
125
- f"[{color}]{icon} {event.action}[/{color}]",
126
- event.timestamp.strftime("%H:%M:%S"),
127
- f"[{color}]{event.path}[/{color}]",
128
- f"[dim]{event.checksum[:8] if event.checksum else ''}[/dim]",
129
- )
130
-
131
- return table
132
-
133
- async def run(self, console_status: bool = False): # pragma: no cover
86
+ self._ignore_patterns_cache: dict[Path, Set[str]] = {}
87
+
88
+ # quiet mode for mcp so it doesn't mess up stdout
89
+ self.console = Console(quiet=quiet)
90
+
91
+ async def _schedule_restart(self, stop_event: asyncio.Event):
92
+ """Schedule a restart of the watch service after the configured interval."""
93
+ await asyncio.sleep(self.app_config.watch_project_reload_interval)
94
+ stop_event.set()
95
+
96
+ def _get_ignore_patterns(self, project_path: Path) -> Set[str]:
97
+ """Get or load ignore patterns for a project path."""
98
+ if project_path not in self._ignore_patterns_cache:
99
+ self._ignore_patterns_cache[project_path] = load_gitignore_patterns(project_path)
100
+ return self._ignore_patterns_cache[project_path]
101
+
102
+ async def _watch_projects_cycle(self, projects: Sequence[Project], stop_event: asyncio.Event):
103
+ """Run one cycle of watching the given projects until stop_event is set."""
104
+ project_paths = [project.path for project in projects]
105
+
106
+ async for changes in awatch(
107
+ *project_paths,
108
+ debounce=self.app_config.sync_delay,
109
+ watch_filter=self.filter_changes,
110
+ recursive=True,
111
+ stop_event=stop_event,
112
+ ):
113
+ # group changes by project and filter using ignore patterns
114
+ project_changes = defaultdict(list)
115
+ for change, path in changes:
116
+ for project in projects:
117
+ if self.is_project_path(project, path):
118
+ # Check if the file should be ignored based on gitignore patterns
119
+ project_path = Path(project.path)
120
+ file_path = Path(path)
121
+ ignore_patterns = self._get_ignore_patterns(project_path)
122
+
123
+ if should_ignore_path(file_path, project_path, ignore_patterns):
124
+ logger.trace(
125
+ f"Ignoring watched file change: {file_path.relative_to(project_path)}"
126
+ )
127
+ continue
128
+
129
+ project_changes[project].append((change, path))
130
+ break
131
+
132
+ # create coroutines to handle changes
133
+ change_handlers = [
134
+ self.handle_changes(project, changes) # pyright: ignore
135
+ for project, changes in project_changes.items()
136
+ ]
137
+
138
+ # process changes
139
+ await asyncio.gather(*change_handlers)
140
+
141
+ async def run(self): # pragma: no cover
134
142
  """Watch for file changes and sync them"""
135
- logger.info("Watching for sync changes")
143
+
136
144
  self.state.running = True
137
145
  self.state.start_time = datetime.now()
138
146
  await self.write_status()
139
147
 
140
- if console_status:
141
- with Live(self.generate_table(), refresh_per_second=4, console=self.console) as live:
142
- try:
143
- async for changes in awatch(
144
- self.config.home,
145
- watch_filter=self.filter_changes,
146
- debounce=self.config.sync_delay,
147
- recursive=True,
148
- ):
149
- # Process changes
150
- await self.handle_changes(self.config.home)
151
- # Update display
152
- live.update(self.generate_table())
148
+ logger.info(
149
+ "Watch service started",
150
+ f"debounce_ms={self.app_config.sync_delay}",
151
+ f"pid={os.getpid()}",
152
+ )
153
+
154
+ try:
155
+ while self.state.running:
156
+ # Clear ignore patterns cache to pick up any .gitignore changes
157
+ self._ignore_patterns_cache.clear()
153
158
 
159
+ # Reload projects to catch any new/removed projects
160
+ projects = await self.project_repository.get_active_projects()
161
+
162
+ project_paths = [project.path for project in projects]
163
+ logger.debug(f"Starting watch cycle for directories: {project_paths}")
164
+
165
+ # Create stop event for this watch cycle
166
+ stop_event = asyncio.Event()
167
+
168
+ # Schedule restart after configured interval to reload projects
169
+ timer_task = asyncio.create_task(self._schedule_restart(stop_event))
170
+
171
+ try:
172
+ await self._watch_projects_cycle(projects, stop_event)
154
173
  except Exception as e:
174
+ logger.exception("Watch service error during cycle", error=str(e))
155
175
  self.state.record_error(str(e))
156
176
  await self.write_status()
157
- raise
177
+ # Continue to next cycle instead of exiting
178
+ await asyncio.sleep(5) # Brief pause before retry
158
179
  finally:
159
- self.state.running = False
160
- await self.write_status()
180
+ # Cancel timer task if it's still running
181
+ if not timer_task.done():
182
+ timer_task.cancel()
183
+ try:
184
+ await timer_task
185
+ except asyncio.CancelledError:
186
+ pass
187
+
188
+ except Exception as e:
189
+ logger.exception("Watch service error", error=str(e))
190
+ self.state.record_error(str(e))
191
+ await self.write_status()
192
+ raise
193
+
194
+ finally:
195
+ logger.info(
196
+ "Watch service stopped",
197
+ f"runtime_seconds={int((datetime.now() - self.state.start_time).total_seconds())}",
198
+ )
199
+
200
+ self.state.running = False
201
+ await self.write_status()
202
+
203
+ def filter_changes(self, change: Change, path: str) -> bool: # pragma: no cover
204
+ """Filter to only watch non-hidden files and directories.
205
+
206
+ Returns:
207
+ True if the file should be watched, False if it should be ignored
208
+ """
209
+
210
+ # Skip hidden directories and files
211
+ path_parts = Path(path).parts
212
+ for part in path_parts:
213
+ if part.startswith("."):
214
+ return False
215
+
216
+ # Skip temp files used in atomic operations
217
+ if path.endswith(".tmp"):
218
+ return False
161
219
 
162
- else:
163
- try:
164
- async for changes in awatch(
165
- self.config.home,
166
- watch_filter=self.filter_changes,
167
- debounce=self.config.sync_delay,
168
- recursive=True,
169
- ):
170
- # Process changes
171
- await self.handle_changes(self.config.home)
172
- # Update display
173
-
174
- except Exception as e:
175
- self.state.record_error(str(e))
176
- await self.write_status()
177
- raise
178
- finally:
179
- self.state.running = False
180
- await self.write_status()
220
+ return True
181
221
 
182
222
  async def write_status(self):
183
223
  """Write current state to status file"""
184
224
  self.status_path.write_text(WatchServiceState.model_dump_json(self.state, indent=2))
185
225
 
186
- def filter_changes(self, change: Change, path: str) -> bool:
187
- """Filter to only watch markdown files"""
188
- return path.endswith(".md") and not Path(path).name.startswith(".")
226
+ def is_project_path(self, project: Project, path):
227
+ """
228
+ Checks if path is a subdirectory or file within a project
229
+ """
230
+ project_path = Path(project.path).resolve()
231
+ sub_path = Path(path).resolve()
232
+ return project_path in sub_path.parents
189
233
 
190
- async def handle_changes(self, directory: Path):
234
+ async def handle_changes(self, project: Project, changes: Set[FileChange]) -> None:
191
235
  """Process a batch of file changes"""
236
+ # avoid circular imports
237
+ from basic_memory.sync.sync_service import get_sync_service
238
+
239
+ # Check if project still exists in configuration before processing
240
+ # This prevents deleted projects from being recreated by background sync
241
+ from basic_memory.config import ConfigManager
242
+
243
+ config_manager = ConfigManager()
244
+ if (
245
+ project.name not in config_manager.projects
246
+ and project.permalink not in config_manager.projects
247
+ ):
248
+ logger.info(
249
+ f"Skipping sync for deleted project: {project.name}, change_count={len(changes)}"
250
+ )
251
+ return
252
+
253
+ sync_service = await get_sync_service(project)
254
+ file_service = sync_service.file_service
255
+
256
+ start_time = time.time()
257
+ directory = Path(project.path).resolve()
258
+ logger.info(
259
+ f"Processing project: {project.name} changes, change_count={len(changes)}, directory={directory}"
260
+ )
261
+
262
+ # Group changes by type
263
+ adds: List[str] = []
264
+ deletes: List[str] = []
265
+ modifies: List[str] = []
266
+
267
+ for change, path in changes:
268
+ # convert to relative path
269
+ relative_path = Path(path).relative_to(directory).as_posix()
192
270
 
193
- logger.debug(f"handling change in directory: {directory} ...")
194
- # Process changes with timeout
195
- report = await self.sync_service.sync(directory)
271
+ # Skip .tmp files - they're temporary and shouldn't be synced
272
+ if relative_path.endswith(".tmp"):
273
+ continue
274
+
275
+ if change == Change.added:
276
+ adds.append(relative_path)
277
+ elif change == Change.deleted:
278
+ deletes.append(relative_path)
279
+ elif change == Change.modified:
280
+ modifies.append(relative_path)
281
+
282
+ logger.debug(
283
+ f"Grouped file changes, added={len(adds)}, deleted={len(deletes)}, modified={len(modifies)}"
284
+ )
285
+
286
+ # because of our atomic writes on updates, an add may be an existing file
287
+ for added_path in adds: # pragma: no cover TODO add test
288
+ entity = await sync_service.entity_repository.get_by_file_path(added_path)
289
+ if entity is not None:
290
+ logger.debug(f"Existing file will be processed as modified, path={added_path}")
291
+ adds.remove(added_path)
292
+ modifies.append(added_path)
293
+
294
+ # Track processed files to avoid duplicates
295
+ processed: Set[str] = set()
296
+
297
+ # First handle potential moves
298
+ for added_path in adds:
299
+ if added_path in processed:
300
+ continue # pragma: no cover
301
+
302
+ # Skip directories for added paths
303
+ # We don't need to process directories, only the files inside them
304
+ # This prevents errors when trying to compute checksums or read directories as files
305
+ added_full_path = directory / added_path
306
+ if not added_full_path.exists() or added_full_path.is_dir():
307
+ logger.debug("Skipping non-existent or directory path", path=added_path)
308
+ processed.add(added_path)
309
+ continue
310
+
311
+ for deleted_path in deletes:
312
+ if deleted_path in processed:
313
+ continue # pragma: no cover
314
+
315
+ # Skip directories for deleted paths (based on entity type in db)
316
+ deleted_entity = await sync_service.entity_repository.get_by_file_path(deleted_path)
317
+ if deleted_entity is None:
318
+ # If this was a directory, it wouldn't have an entity
319
+ logger.debug("Skipping unknown path for move detection", path=deleted_path)
320
+ continue
321
+
322
+ if added_path != deleted_path:
323
+ # Compare checksums to detect moves
324
+ try:
325
+ added_checksum = await file_service.compute_checksum(added_path)
326
+
327
+ if deleted_entity and deleted_entity.checksum == added_checksum:
328
+ await sync_service.handle_move(deleted_path, added_path)
329
+ self.state.add_event(
330
+ path=f"{deleted_path} -> {added_path}",
331
+ action="moved",
332
+ status="success",
333
+ )
334
+ self.console.print(f"[blue]→[/blue] {deleted_path} → {added_path}")
335
+ logger.info(f"move: {deleted_path} -> {added_path}")
336
+ processed.add(added_path)
337
+ processed.add(deleted_path)
338
+ break
339
+ except Exception as e: # pragma: no cover
340
+ logger.warning(
341
+ "Error checking for move",
342
+ f"old_path={deleted_path}",
343
+ f"new_path={added_path}",
344
+ f"error={str(e)}",
345
+ )
346
+
347
+ # Handle remaining changes - group them by type for concise output
348
+ moved_count = len([p for p in processed if p in deletes or p in adds])
349
+ delete_count = 0
350
+ add_count = 0
351
+ modify_count = 0
352
+
353
+ # Process deletes
354
+ for path in deletes:
355
+ if path not in processed:
356
+ # Check if file still exists on disk (vim atomic write edge case)
357
+ full_path = directory / path
358
+ if full_path.exists() and full_path.is_file():
359
+ # File still exists despite DELETE event - treat as modification
360
+ logger.debug(
361
+ "File exists despite DELETE event, treating as modification", path=path
362
+ )
363
+ entity, checksum = await sync_service.sync_file(path, new=False)
364
+ self.state.add_event(
365
+ path=path, action="modified", status="success", checksum=checksum
366
+ )
367
+ self.console.print(f"[yellow]✎[/yellow] {path} (atomic write)")
368
+ logger.info(f"atomic write detected: {path}")
369
+ processed.add(path)
370
+ modify_count += 1
371
+ else:
372
+ # Check if this was a directory - skip if so
373
+ # (we can't tell if the deleted path was a directory since it no longer exists,
374
+ # so we check if there's an entity in the database for it)
375
+ entity = await sync_service.entity_repository.get_by_file_path(path)
376
+ if entity is None:
377
+ # No entity means this was likely a directory - skip it
378
+ logger.debug(
379
+ f"Skipping deleted path with no entity (likely directory), path={path}"
380
+ )
381
+ processed.add(path)
382
+ continue
383
+
384
+ # File truly deleted
385
+ logger.debug("Processing deleted file", path=path)
386
+ await sync_service.handle_delete(path)
387
+ self.state.add_event(path=path, action="deleted", status="success")
388
+ self.console.print(f"[red]✕[/red] {path}")
389
+ logger.info(f"deleted: {path}")
390
+ processed.add(path)
391
+ delete_count += 1
392
+
393
+ # Process adds
394
+ for path in adds:
395
+ if path not in processed:
396
+ # Skip directories - only process files
397
+ full_path = directory / path
398
+ if not full_path.exists() or full_path.is_dir():
399
+ logger.debug(
400
+ f"Skipping non-existent or directory path, path={path}"
401
+ ) # pragma: no cover
402
+ processed.add(path) # pragma: no cover
403
+ continue # pragma: no cover
404
+
405
+ logger.debug(f"Processing new file, path={path}")
406
+ entity, checksum = await sync_service.sync_file(path, new=True)
407
+ if checksum:
408
+ self.state.add_event(
409
+ path=path, action="new", status="success", checksum=checksum
410
+ )
411
+ self.console.print(f"[green]✓[/green] {path}")
412
+ logger.info(
413
+ "new file processed",
414
+ f"path={path}",
415
+ f"checksum={checksum}",
416
+ )
417
+ processed.add(path)
418
+ add_count += 1
419
+ else: # pragma: no cover
420
+ logger.warning(f"Error syncing new file, path={path}") # pragma: no cover
421
+ self.console.print(
422
+ f"[orange]?[/orange] Error syncing: {path}"
423
+ ) # pragma: no cover
424
+
425
+ # Process modifies - detect repeats
426
+ last_modified_path = None
427
+ repeat_count = 0
428
+
429
+ for path in modifies:
430
+ if path not in processed:
431
+ # Skip directories - only process files
432
+ full_path = directory / path
433
+ if not full_path.exists() or full_path.is_dir():
434
+ logger.debug("Skipping non-existent or directory path", path=path)
435
+ processed.add(path)
436
+ continue
437
+
438
+ logger.debug(f"Processing modified file: path={path}")
439
+ entity, checksum = await sync_service.sync_file(path, new=False)
440
+ self.state.add_event(
441
+ path=path, action="modified", status="success", checksum=checksum
442
+ )
443
+
444
+ # Check if this is a repeat of the last modified file
445
+ if path == last_modified_path: # pragma: no cover
446
+ repeat_count += 1 # pragma: no cover
447
+ # Only show a message for the first repeat
448
+ if repeat_count == 1: # pragma: no cover
449
+ self.console.print(
450
+ f"[yellow]...[/yellow] Repeated changes to {path}"
451
+ ) # pragma: no cover
452
+ else:
453
+ # haven't processed this file
454
+ self.console.print(f"[yellow]✎[/yellow] {path}")
455
+ logger.info(f"modified: {path}")
456
+ last_modified_path = path
457
+ repeat_count = 0
458
+ modify_count += 1
459
+
460
+ logger.debug( # pragma: no cover
461
+ "Modified file processed, "
462
+ f"path={path} "
463
+ f"entity_id={entity.id if entity else None} "
464
+ f"checksum={checksum}",
465
+ )
466
+ processed.add(path)
467
+
468
+ # Add a concise summary instead of a divider
469
+ if processed:
470
+ changes = [] # pyright: ignore
471
+ if add_count > 0:
472
+ changes.append(f"[green]{add_count} added[/green]") # pyright: ignore
473
+ if modify_count > 0:
474
+ changes.append(f"[yellow]{modify_count} modified[/yellow]") # pyright: ignore
475
+ if moved_count > 0:
476
+ changes.append(f"[blue]{moved_count} moved[/blue]") # pyright: ignore
477
+ if delete_count > 0:
478
+ changes.append(f"[red]{delete_count} deleted[/red]") # pyright: ignore
479
+
480
+ if changes:
481
+ self.console.print(f"{', '.join(changes)}", style="dim") # pyright: ignore
482
+ logger.info(f"changes: {len(changes)}")
483
+
484
+ duration_ms = int((time.time() - start_time) * 1000)
196
485
  self.state.last_scan = datetime.now()
197
- self.state.synced_files = report.total
486
+ self.state.synced_files += len(processed)
198
487
 
199
- # Update stats
200
- for path in report.new:
201
- self.state.add_event(
202
- path=path, action="new", status="success", checksum=report.checksums[path]
203
- )
204
- for path in report.modified:
205
- self.state.add_event(
206
- path=path, action="modified", status="success", checksum=report.checksums[path]
207
- )
208
- for old_path, new_path in report.moves.items():
209
- self.state.add_event(
210
- path=f"{old_path} -> {new_path}",
211
- action="moved",
212
- status="success",
213
- checksum=report.checksums[new_path],
214
- )
215
- for path in report.deleted:
216
- self.state.add_event(path=path, action="deleted", status="success")
488
+ logger.info(
489
+ "File change processing completed, "
490
+ f"processed_files={len(processed)}, "
491
+ f"total_synced_files={self.state.synced_files}, "
492
+ f"duration_ms={duration_ms}"
493
+ )
217
494
 
218
495
  await self.write_status()