basic-memory 0.14.3__py3-none-any.whl → 0.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (90) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/versions/a1b2c3d4e5f6_fix_project_foreign_keys.py +49 -0
  3. basic_memory/api/app.py +10 -4
  4. basic_memory/api/routers/knowledge_router.py +25 -8
  5. basic_memory/api/routers/project_router.py +99 -4
  6. basic_memory/api/routers/resource_router.py +3 -3
  7. basic_memory/cli/app.py +9 -28
  8. basic_memory/cli/auth.py +277 -0
  9. basic_memory/cli/commands/cloud/__init__.py +5 -0
  10. basic_memory/cli/commands/cloud/api_client.py +112 -0
  11. basic_memory/cli/commands/cloud/bisync_commands.py +818 -0
  12. basic_memory/cli/commands/cloud/core_commands.py +288 -0
  13. basic_memory/cli/commands/cloud/mount_commands.py +295 -0
  14. basic_memory/cli/commands/cloud/rclone_config.py +288 -0
  15. basic_memory/cli/commands/cloud/rclone_installer.py +198 -0
  16. basic_memory/cli/commands/command_utils.py +60 -0
  17. basic_memory/cli/commands/import_memory_json.py +0 -4
  18. basic_memory/cli/commands/mcp.py +16 -4
  19. basic_memory/cli/commands/project.py +141 -145
  20. basic_memory/cli/commands/status.py +34 -22
  21. basic_memory/cli/commands/sync.py +45 -228
  22. basic_memory/cli/commands/tool.py +87 -16
  23. basic_memory/cli/main.py +1 -0
  24. basic_memory/config.py +96 -20
  25. basic_memory/db.py +104 -3
  26. basic_memory/deps.py +20 -3
  27. basic_memory/file_utils.py +89 -0
  28. basic_memory/ignore_utils.py +295 -0
  29. basic_memory/importers/chatgpt_importer.py +1 -1
  30. basic_memory/importers/utils.py +2 -2
  31. basic_memory/markdown/entity_parser.py +2 -2
  32. basic_memory/markdown/markdown_processor.py +2 -2
  33. basic_memory/markdown/plugins.py +39 -21
  34. basic_memory/markdown/utils.py +1 -1
  35. basic_memory/mcp/async_client.py +22 -10
  36. basic_memory/mcp/project_context.py +141 -0
  37. basic_memory/mcp/prompts/ai_assistant_guide.py +49 -4
  38. basic_memory/mcp/prompts/continue_conversation.py +1 -1
  39. basic_memory/mcp/prompts/recent_activity.py +116 -32
  40. basic_memory/mcp/prompts/search.py +1 -1
  41. basic_memory/mcp/prompts/utils.py +11 -4
  42. basic_memory/mcp/resources/ai_assistant_guide.md +179 -41
  43. basic_memory/mcp/resources/project_info.py +20 -6
  44. basic_memory/mcp/server.py +0 -37
  45. basic_memory/mcp/tools/__init__.py +5 -6
  46. basic_memory/mcp/tools/build_context.py +39 -19
  47. basic_memory/mcp/tools/canvas.py +19 -8
  48. basic_memory/mcp/tools/chatgpt_tools.py +178 -0
  49. basic_memory/mcp/tools/delete_note.py +67 -34
  50. basic_memory/mcp/tools/edit_note.py +55 -39
  51. basic_memory/mcp/tools/headers.py +44 -0
  52. basic_memory/mcp/tools/list_directory.py +18 -8
  53. basic_memory/mcp/tools/move_note.py +119 -41
  54. basic_memory/mcp/tools/project_management.py +77 -229
  55. basic_memory/mcp/tools/read_content.py +28 -12
  56. basic_memory/mcp/tools/read_note.py +97 -57
  57. basic_memory/mcp/tools/recent_activity.py +441 -42
  58. basic_memory/mcp/tools/search.py +82 -70
  59. basic_memory/mcp/tools/sync_status.py +5 -4
  60. basic_memory/mcp/tools/utils.py +19 -0
  61. basic_memory/mcp/tools/view_note.py +31 -6
  62. basic_memory/mcp/tools/write_note.py +65 -14
  63. basic_memory/models/knowledge.py +19 -2
  64. basic_memory/models/project.py +6 -2
  65. basic_memory/repository/entity_repository.py +31 -84
  66. basic_memory/repository/project_repository.py +1 -1
  67. basic_memory/repository/relation_repository.py +13 -0
  68. basic_memory/repository/repository.py +2 -2
  69. basic_memory/repository/search_repository.py +9 -3
  70. basic_memory/schemas/__init__.py +6 -0
  71. basic_memory/schemas/base.py +70 -12
  72. basic_memory/schemas/cloud.py +46 -0
  73. basic_memory/schemas/memory.py +99 -18
  74. basic_memory/schemas/project_info.py +9 -10
  75. basic_memory/schemas/sync_report.py +48 -0
  76. basic_memory/services/context_service.py +35 -11
  77. basic_memory/services/directory_service.py +7 -0
  78. basic_memory/services/entity_service.py +82 -52
  79. basic_memory/services/initialization.py +30 -11
  80. basic_memory/services/project_service.py +23 -33
  81. basic_memory/sync/sync_service.py +148 -24
  82. basic_memory/sync/watch_service.py +128 -44
  83. basic_memory/utils.py +181 -109
  84. {basic_memory-0.14.3.dist-info → basic_memory-0.15.0.dist-info}/METADATA +26 -96
  85. basic_memory-0.15.0.dist-info/RECORD +147 -0
  86. basic_memory/mcp/project_session.py +0 -120
  87. basic_memory-0.14.3.dist-info/RECORD +0 -132
  88. {basic_memory-0.14.3.dist-info → basic_memory-0.15.0.dist-info}/WHEEL +0 -0
  89. {basic_memory-0.14.3.dist-info → basic_memory-0.15.0.dist-info}/entry_points.txt +0 -0
  90. {basic_memory-0.14.3.dist-info → basic_memory-0.15.0.dist-info}/licenses/LICENSE +0 -0
@@ -11,7 +11,10 @@ from loguru import logger
11
11
 
12
12
  from basic_memory import db
13
13
  from basic_memory.config import BasicMemoryConfig
14
- from basic_memory.repository import ProjectRepository
14
+ from basic_memory.models import Project
15
+ from basic_memory.repository import (
16
+ ProjectRepository,
17
+ )
15
18
 
16
19
 
17
20
  async def initialize_database(app_config: BasicMemoryConfig) -> None:
@@ -101,18 +104,20 @@ async def initialize_file_sync(
101
104
  # Get active projects
102
105
  active_projects = await project_repository.get_active_projects()
103
106
 
104
- # First, sync all projects sequentially
105
- for project in active_projects:
107
+ # Start sync for all projects as background tasks (non-blocking)
108
+ async def sync_project_background(project: Project):
109
+ """Sync a single project in the background."""
106
110
  # avoid circular imports
107
- from basic_memory.cli.commands.sync import get_sync_service
108
-
109
- logger.info(f"Starting sync for project: {project.name}")
110
- sync_service = await get_sync_service(project)
111
- sync_dir = Path(project.path)
111
+ from basic_memory.sync.sync_service import get_sync_service
112
112
 
113
+ logger.info(f"Starting background sync for project: {project.name}")
113
114
  try:
115
+ # Create sync service
116
+ sync_service = await get_sync_service(project)
117
+
118
+ sync_dir = Path(project.path)
114
119
  await sync_service.sync(sync_dir, project_name=project.name)
115
- logger.info(f"Sync completed successfully for project: {project.name}")
120
+ logger.info(f"Background sync completed successfully for project: {project.name}")
116
121
 
117
122
  # Mark project as watching for changes after successful sync
118
123
  from basic_memory.services.sync_status_service import sync_status_tracker
@@ -120,12 +125,19 @@ async def initialize_file_sync(
120
125
  sync_status_tracker.start_project_watch(project.name)
121
126
  logger.info(f"Project {project.name} is now watching for changes")
122
127
  except Exception as e: # pragma: no cover
123
- logger.error(f"Error syncing project {project.name}: {e}")
128
+ logger.error(f"Error in background sync for project {project.name}: {e}")
124
129
  # Mark sync as failed for this project
125
130
  from basic_memory.services.sync_status_service import sync_status_tracker
126
131
 
127
132
  sync_status_tracker.fail_project_sync(project.name, str(e))
128
- # Continue with other projects even if one fails
133
+
134
+ # Create background tasks for all project syncs (non-blocking)
135
+ sync_tasks = [
136
+ asyncio.create_task(sync_project_background(project)) for project in active_projects
137
+ ]
138
+ logger.info(f"Created {len(sync_tasks)} background sync tasks")
139
+
140
+ # Don't await the tasks - let them run in background while we continue
129
141
 
130
142
  # Then start the watch service in the background
131
143
  logger.info("Starting watch service for all projects")
@@ -169,9 +181,16 @@ def ensure_initialization(app_config: BasicMemoryConfig) -> None:
169
181
  This is a wrapper for the async initialize_app function that can be
170
182
  called from synchronous code like CLI entry points.
171
183
 
184
+ No-op if app_config.cloud_mode == True. Cloud basic memory manages it's own projects
185
+
172
186
  Args:
173
187
  app_config: The Basic Memory project configuration
174
188
  """
189
+ # Skip initialization in cloud mode - cloud manages its own projects
190
+ if app_config.cloud_mode_enabled:
191
+ logger.debug("Skipping initialization in cloud mode - projects managed by cloud")
192
+ return
193
+
175
194
  try:
176
195
  result = asyncio.run(initialize_app(app_config))
177
196
  logger.info(f"Initialization completed successfully: result={result}")
@@ -21,6 +21,9 @@ from basic_memory.config import WATCH_STATUS_JSON, ConfigManager, get_project_co
21
21
  from basic_memory.utils import generate_permalink
22
22
 
23
23
 
24
+ config = ConfigManager().config
25
+
26
+
24
27
  class ProjectService:
25
28
  """Service for managing Basic Memory projects."""
26
29
 
@@ -96,11 +99,16 @@ class ProjectService:
96
99
  Raises:
97
100
  ValueError: If the project already exists
98
101
  """
99
- if not self.repository: # pragma: no cover
100
- raise ValueError("Repository is required for add_project")
101
-
102
- # Resolve to absolute path
103
- resolved_path = os.path.abspath(os.path.expanduser(path))
102
+ # in cloud mode, don't allow arbitrary paths.
103
+ if config.cloud_mode:
104
+ basic_memory_home = os.getenv("BASIC_MEMORY_HOME")
105
+ assert basic_memory_home is not None
106
+ base_path = Path(basic_memory_home)
107
+
108
+ # Resolve to absolute path
109
+ resolved_path = Path(os.path.abspath(os.path.expanduser(base_path / path))).as_posix()
110
+ else:
111
+ resolved_path = Path(os.path.abspath(os.path.expanduser(path))).as_posix()
104
112
 
105
113
  # First add to config file (this will validate the project doesn't exist)
106
114
  project_config = self.config_manager.add_project(name, resolved_path)
@@ -139,8 +147,8 @@ class ProjectService:
139
147
  # First remove from config (this will validate the project exists and is not default)
140
148
  self.config_manager.remove_project(name)
141
149
 
142
- # Then remove from database
143
- project = await self.repository.get_by_name(name)
150
+ # Then remove from database using robust lookup
151
+ project = await self.get_project(name)
144
152
  if project:
145
153
  await self.repository.delete(project.id)
146
154
 
@@ -161,8 +169,8 @@ class ProjectService:
161
169
  # First update config file (this will validate the project exists)
162
170
  self.config_manager.set_default_project(name)
163
171
 
164
- # Then update database
165
- project = await self.repository.get_by_name(name)
172
+ # Then update database using the same lookup logic as get_project
173
+ project = await self.get_project(name)
166
174
  if project:
167
175
  await self.repository.set_as_default(project.id)
168
176
  else:
@@ -170,15 +178,6 @@ class ProjectService:
170
178
 
171
179
  logger.info(f"Project '{name}' set as default in configuration and database")
172
180
 
173
- # Refresh MCP session to pick up the new default project
174
- try:
175
- from basic_memory.mcp.project_session import session
176
-
177
- session.refresh_from_config()
178
- except ImportError: # pragma: no cover
179
- # MCP components might not be available in all contexts (e.g., CLI-only usage)
180
- logger.debug("MCP session not available, skipping session refresh")
181
-
182
181
  async def _ensure_single_default_project(self) -> None:
183
182
  """Ensure only one project has is_default=True.
184
183
 
@@ -300,15 +299,6 @@ class ProjectService:
300
299
 
301
300
  logger.info("Project synchronization complete")
302
301
 
303
- # Refresh MCP session to ensure it's in sync with current config
304
- try:
305
- from basic_memory.mcp.project_session import session
306
-
307
- session.refresh_from_config()
308
- except ImportError:
309
- # MCP components might not be available in all contexts
310
- logger.debug("MCP session not available, skipping session refresh")
311
-
312
302
  async def move_project(self, name: str, new_path: str) -> None:
313
303
  """Move a project to a new location.
314
304
 
@@ -323,7 +313,7 @@ class ProjectService:
323
313
  raise ValueError("Repository is required for move_project")
324
314
 
325
315
  # Resolve to absolute path
326
- resolved_path = os.path.abspath(os.path.expanduser(new_path))
316
+ resolved_path = Path(os.path.abspath(os.path.expanduser(new_path))).as_posix()
327
317
 
328
318
  # Validate project exists in config
329
319
  if name not in self.config_manager.projects:
@@ -338,8 +328,8 @@ class ProjectService:
338
328
  config.projects[name] = resolved_path
339
329
  self.config_manager.save_config(config)
340
330
 
341
- # Update in database
342
- project = await self.repository.get_by_name(name)
331
+ # Update in database using robust lookup
332
+ project = await self.get_project(name)
343
333
  if project:
344
334
  await self.repository.update_path(project.id, resolved_path)
345
335
  logger.info(f"Moved project '{name}' from {old_path} to {resolved_path}")
@@ -370,15 +360,15 @@ class ProjectService:
370
360
  if name not in self.config_manager.projects:
371
361
  raise ValueError(f"Project '{name}' not found in configuration")
372
362
 
373
- # Get project from database
374
- project = await self.repository.get_by_name(name)
363
+ # Get project from database using robust lookup
364
+ project = await self.get_project(name)
375
365
  if not project:
376
366
  logger.error(f"Project '{name}' exists in config but not in database")
377
367
  return
378
368
 
379
369
  # Update path if provided
380
370
  if updated_path:
381
- resolved_path = os.path.abspath(os.path.expanduser(updated_path))
371
+ resolved_path = Path(os.path.abspath(os.path.expanduser(updated_path))).as_posix()
382
372
 
383
373
  # Update in config
384
374
  config = self.config_manager.load_config()
@@ -1,7 +1,9 @@
1
1
  """Service for syncing files between filesystem and database."""
2
2
 
3
+ import asyncio
3
4
  import os
4
5
  import time
6
+ from concurrent.futures import ThreadPoolExecutor
5
7
  from dataclasses import dataclass, field
6
8
  from datetime import datetime
7
9
  from pathlib import Path
@@ -10,12 +12,16 @@ from typing import Dict, Optional, Set, Tuple
10
12
  from loguru import logger
11
13
  from sqlalchemy.exc import IntegrityError
12
14
 
13
- from basic_memory.config import BasicMemoryConfig
15
+ from basic_memory import db
16
+ from basic_memory.config import BasicMemoryConfig, ConfigManager
14
17
  from basic_memory.file_utils import has_frontmatter
15
- from basic_memory.markdown import EntityParser
16
- from basic_memory.models import Entity
17
- from basic_memory.repository import EntityRepository, RelationRepository
18
+ from basic_memory.ignore_utils import load_bmignore_patterns, should_ignore_path
19
+ from basic_memory.markdown import EntityParser, MarkdownProcessor
20
+ from basic_memory.models import Entity, Project
21
+ from basic_memory.repository import EntityRepository, RelationRepository, ObservationRepository
22
+ from basic_memory.repository.search_repository import SearchRepository
18
23
  from basic_memory.services import EntityService, FileService
24
+ from basic_memory.services.link_resolver import LinkResolver
19
25
  from basic_memory.services.search_service import SearchService
20
26
  from basic_memory.services.sync_status_service import sync_status_tracker, SyncStatus
21
27
 
@@ -80,6 +86,43 @@ class SyncService:
80
86
  self.relation_repository = relation_repository
81
87
  self.search_service = search_service
82
88
  self.file_service = file_service
89
+ self._thread_pool = ThreadPoolExecutor(max_workers=app_config.sync_thread_pool_size)
90
+ # Load ignore patterns once at initialization for performance
91
+ self._ignore_patterns = load_bmignore_patterns()
92
+
93
+ async def _read_file_async(self, file_path: Path) -> str:
94
+ """Read file content in thread pool to avoid blocking the event loop."""
95
+ loop = asyncio.get_event_loop()
96
+ return await loop.run_in_executor(self._thread_pool, file_path.read_text, "utf-8")
97
+
98
+ async def _compute_checksum_async(self, path: str) -> str:
99
+ """Compute file checksum in thread pool to avoid blocking the event loop."""
100
+
101
+ def _sync_compute_checksum(path_str: str) -> str:
102
+ # Synchronous version for thread pool execution
103
+ path_obj = self.file_service.base_path / path_str
104
+
105
+ if self.file_service.is_markdown(path_str):
106
+ content = path_obj.read_text(encoding="utf-8")
107
+ else:
108
+ content = path_obj.read_bytes()
109
+
110
+ # Use the synchronous version of compute_checksum
111
+ import hashlib
112
+
113
+ if isinstance(content, str):
114
+ content_bytes = content.encode("utf-8")
115
+ else:
116
+ content_bytes = content
117
+ return hashlib.sha256(content_bytes).hexdigest()
118
+
119
+ loop = asyncio.get_event_loop()
120
+ return await loop.run_in_executor(self._thread_pool, _sync_compute_checksum, path)
121
+
122
+ def __del__(self):
123
+ """Cleanup thread pool when service is destroyed."""
124
+ if hasattr(self, "_thread_pool"):
125
+ self._thread_pool.shutdown(wait=False)
83
126
 
84
127
  async def sync(self, directory: Path, project_name: Optional[str] = None) -> SyncReport:
85
128
  """Sync all files with database."""
@@ -289,14 +332,14 @@ class SyncService:
289
332
  logger.debug(f"Parsing markdown file, path: {path}, new: {new}")
290
333
 
291
334
  file_path = self.entity_parser.base_path / path
292
- file_content = file_path.read_text(encoding="utf-8")
335
+ file_content = await self._read_file_async(file_path)
293
336
  file_contains_frontmatter = has_frontmatter(file_content)
294
337
 
295
338
  # entity markdown will always contain front matter, so it can be used up create/update the entity
296
339
  entity_markdown = await self.entity_parser.parse_file(path)
297
340
 
298
- # if the file contains frontmatter, resolve a permalink
299
- if file_contains_frontmatter:
341
+ # if the file contains frontmatter, resolve a permalink (unless disabled)
342
+ if file_contains_frontmatter and not self.app_config.disable_permalinks:
300
343
  # Resolve permalink - this handles all the cases including conflicts
301
344
  permalink = await self.entity_service.resolve_permalink(path, markdown=entity_markdown)
302
345
 
@@ -326,7 +369,7 @@ class SyncService:
326
369
  # After updating relations, we need to compute the checksum again
327
370
  # This is necessary for files with wikilinks to ensure consistent checksums
328
371
  # after relation processing is complete
329
- final_checksum = await self.file_service.compute_checksum(path)
372
+ final_checksum = await self._compute_checksum_async(path)
330
373
 
331
374
  # set checksum
332
375
  await self.entity_repository.update(entity.id, {"checksum": final_checksum})
@@ -350,15 +393,15 @@ class SyncService:
350
393
  Returns:
351
394
  Tuple of (entity, checksum)
352
395
  """
353
- checksum = await self.file_service.compute_checksum(path)
396
+ checksum = await self._compute_checksum_async(path)
354
397
  if new:
355
398
  # Generate permalink from path
356
399
  await self.entity_service.resolve_permalink(path)
357
400
 
358
401
  # get file timestamps
359
402
  file_stats = self.file_service.file_stats(path)
360
- created = datetime.fromtimestamp(file_stats.st_ctime)
361
- modified = datetime.fromtimestamp(file_stats.st_mtime)
403
+ created = datetime.fromtimestamp(file_stats.st_ctime).astimezone()
404
+ modified = datetime.fromtimestamp(file_stats.st_mtime).astimezone()
362
405
 
363
406
  # get mime type
364
407
  content_type = self.file_service.content_type(path)
@@ -487,8 +530,10 @@ class SyncService:
487
530
  updates = {"file_path": new_path}
488
531
 
489
532
  # If configured, also update permalink to match new path
490
- if self.app_config.update_permalinks_on_move and self.file_service.is_markdown(
491
- new_path
533
+ if (
534
+ self.app_config.update_permalinks_on_move
535
+ and not self.app_config.disable_permalinks
536
+ and self.file_service.is_markdown(new_path)
492
537
  ):
493
538
  # generate new permalink value
494
539
  new_permalink = await self.entity_service.resolve_permalink(new_path)
@@ -548,12 +593,27 @@ class SyncService:
548
593
  # update search index
549
594
  await self.search_service.index_entity(updated)
550
595
 
551
- async def resolve_relations(self):
552
- """Try to resolve any unresolved relations"""
596
+ async def resolve_relations(self, entity_id: int | None = None):
597
+ """Try to resolve unresolved relations.
553
598
 
554
- unresolved_relations = await self.relation_repository.find_unresolved_relations()
599
+ Args:
600
+ entity_id: If provided, only resolve relations for this specific entity.
601
+ Otherwise, resolve all unresolved relations in the database.
602
+ """
555
603
 
556
- logger.info("Resolving forward references", count=len(unresolved_relations))
604
+ if entity_id:
605
+ # Only get unresolved relations for the specific entity
606
+ unresolved_relations = (
607
+ await self.relation_repository.find_unresolved_relations_for_entity(entity_id)
608
+ )
609
+ logger.info(
610
+ f"Resolving forward references for entity {entity_id}",
611
+ count=len(unresolved_relations),
612
+ )
613
+ else:
614
+ # Get all unresolved relations (original behavior)
615
+ unresolved_relations = await self.relation_repository.find_unresolved_relations()
616
+ logger.info("Resolving all forward references", count=len(unresolved_relations))
557
617
 
558
618
  for relation in unresolved_relations:
559
619
  logger.trace(
@@ -608,19 +668,35 @@ class SyncService:
608
668
 
609
669
  logger.debug(f"Scanning directory {directory}")
610
670
  result = ScanResult()
671
+ ignored_count = 0
611
672
 
612
673
  for root, dirnames, filenames in os.walk(str(directory)):
613
- # Skip dot directories in-place
614
- dirnames[:] = [d for d in dirnames if not d.startswith(".")]
674
+ # Convert root to Path for easier manipulation
675
+ root_path = Path(root)
676
+
677
+ # Filter out ignored directories in-place
678
+ dirnames_to_remove = []
679
+ for dirname in dirnames:
680
+ dir_path = root_path / dirname
681
+ if should_ignore_path(dir_path, directory, self._ignore_patterns):
682
+ dirnames_to_remove.append(dirname)
683
+ ignored_count += 1
684
+
685
+ # Remove ignored directories from dirnames to prevent os.walk from descending
686
+ for dirname in dirnames_to_remove:
687
+ dirnames.remove(dirname)
615
688
 
616
689
  for filename in filenames:
617
- # Skip dot files
618
- if filename.startswith("."):
690
+ path = root_path / filename
691
+
692
+ # Check if file should be ignored
693
+ if should_ignore_path(path, directory, self._ignore_patterns):
694
+ ignored_count += 1
695
+ logger.trace(f"Ignoring file per .bmignore: {path.relative_to(directory)}")
619
696
  continue
620
697
 
621
- path = Path(root) / filename
622
- rel_path = str(path.relative_to(directory))
623
- checksum = await self.file_service.compute_checksum(rel_path)
698
+ rel_path = path.relative_to(directory).as_posix()
699
+ checksum = await self._compute_checksum_async(rel_path)
624
700
  result.files[rel_path] = checksum
625
701
  result.checksums[checksum] = rel_path
626
702
 
@@ -631,7 +707,55 @@ class SyncService:
631
707
  f"{directory} scan completed "
632
708
  f"directory={str(directory)} "
633
709
  f"files_found={len(result.files)} "
710
+ f"files_ignored={ignored_count} "
634
711
  f"duration_ms={duration_ms}"
635
712
  )
636
713
 
637
714
  return result
715
+
716
+
717
+ async def get_sync_service(project: Project) -> SyncService: # pragma: no cover
718
+ """Get sync service instance with all dependencies."""
719
+
720
+ app_config = ConfigManager().config
721
+ _, session_maker = await db.get_or_create_db(
722
+ db_path=app_config.database_path, db_type=db.DatabaseType.FILESYSTEM
723
+ )
724
+
725
+ project_path = Path(project.path)
726
+ entity_parser = EntityParser(project_path)
727
+ markdown_processor = MarkdownProcessor(entity_parser)
728
+ file_service = FileService(project_path, markdown_processor)
729
+
730
+ # Initialize repositories
731
+ entity_repository = EntityRepository(session_maker, project_id=project.id)
732
+ observation_repository = ObservationRepository(session_maker, project_id=project.id)
733
+ relation_repository = RelationRepository(session_maker, project_id=project.id)
734
+ search_repository = SearchRepository(session_maker, project_id=project.id)
735
+
736
+ # Initialize services
737
+ search_service = SearchService(search_repository, entity_repository, file_service)
738
+ link_resolver = LinkResolver(entity_repository, search_service)
739
+
740
+ # Initialize services
741
+ entity_service = EntityService(
742
+ entity_parser,
743
+ entity_repository,
744
+ observation_repository,
745
+ relation_repository,
746
+ file_service,
747
+ link_resolver,
748
+ )
749
+
750
+ # Create sync service
751
+ sync_service = SyncService(
752
+ app_config=app_config,
753
+ entity_service=entity_service,
754
+ entity_parser=entity_parser,
755
+ entity_repository=entity_repository,
756
+ relation_repository=relation_repository,
757
+ search_service=search_service,
758
+ file_service=file_service,
759
+ )
760
+
761
+ return sync_service