basic-memory 0.12.0__py3-none-any.whl → 0.12.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

basic_memory/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  """basic-memory - Local-first knowledge management combining Zettelkasten with knowledge graphs"""
2
2
 
3
- __version__ = "0.12.0"
3
+ __version__ = "0.12.1"
basic_memory/api/app.py CHANGED
@@ -1,6 +1,5 @@
1
1
  """FastAPI application for basic-memory knowledge graph API."""
2
2
 
3
- import asyncio
4
3
  from contextlib import asynccontextmanager
5
4
 
6
5
  from fastapi import FastAPI, HTTPException
@@ -10,44 +9,14 @@ from loguru import logger
10
9
  from basic_memory import db
11
10
  from basic_memory.api.routers import knowledge, memory, project_info, resource, search
12
11
  from basic_memory.config import config as project_config
13
- from basic_memory.config import config_manager
14
- from basic_memory.sync import SyncService, WatchService
15
-
16
-
17
- async def run_background_sync(sync_service: SyncService, watch_service: WatchService): # pragma: no cover
18
- logger.info(f"Starting watch service to sync file changes in dir: {project_config.home}")
19
- # full sync
20
- await sync_service.sync(project_config.home, show_progress=False)
21
-
22
- # watch changes
23
- await watch_service.run()
12
+ from basic_memory.services.initialization import initialize_app
24
13
 
25
14
 
26
15
  @asynccontextmanager
27
16
  async def lifespan(app: FastAPI): # pragma: no cover
28
17
  """Lifecycle manager for the FastAPI app."""
29
- await db.run_migrations(project_config)
30
-
31
- # app config
32
- basic_memory_config = config_manager.load_config()
33
- logger.info(f"Sync changes enabled: {basic_memory_config.sync_changes}")
34
- logger.info(f"Update permalinks on move enabled: {basic_memory_config.update_permalinks_on_move}")
35
-
36
- watch_task = None
37
- if basic_memory_config.sync_changes:
38
- # import after migrations have run
39
- from basic_memory.cli.commands.sync import get_sync_service
40
-
41
- sync_service = await get_sync_service()
42
- watch_service = WatchService(
43
- sync_service=sync_service,
44
- file_service=sync_service.entity_service.file_service,
45
- config=project_config,
46
- )
47
- watch_task = asyncio.create_task(run_background_sync(sync_service, watch_service))
48
- else:
49
- logger.info("Sync changes disabled. Skipping watch service.")
50
-
18
+ # Initialize database and file sync services
19
+ watch_task = await initialize_app(project_config)
51
20
 
52
21
  # proceed with startup
53
22
  yield
basic_memory/cli/app.py CHANGED
@@ -7,8 +7,11 @@ def version_callback(value: bool) -> None:
7
7
  """Show version and exit."""
8
8
  if value: # pragma: no cover
9
9
  import basic_memory
10
+ from basic_memory.config import config
10
11
 
11
12
  typer.echo(f"Basic Memory version: {basic_memory.__version__}")
13
+ typer.echo(f"Current project: {config.project}")
14
+ typer.echo(f"Project path: {config.home}")
12
15
  raise typer.Exit()
13
16
 
14
17
 
@@ -17,11 +20,12 @@ app = typer.Typer(name="basic-memory")
17
20
 
18
21
  @app.callback()
19
22
  def app_callback(
23
+ ctx: typer.Context,
20
24
  project: Optional[str] = typer.Option(
21
25
  None,
22
26
  "--project",
23
27
  "-p",
24
- help="Specify which project to use",
28
+ help="Specify which project to use 1",
25
29
  envvar="BASIC_MEMORY_PROJECT",
26
30
  ),
27
31
  version: Optional[bool] = typer.Option(
@@ -34,6 +38,7 @@ def app_callback(
34
38
  ),
35
39
  ) -> None:
36
40
  """Basic Memory - Local-first personal knowledge management."""
41
+
37
42
  # We use the project option to set the BASIC_MEMORY_PROJECT environment variable
38
43
  # The config module will pick this up when loading
39
44
  if project: # pragma: no cover
@@ -53,6 +58,13 @@ def app_callback(
53
58
 
54
59
  config = new_config
55
60
 
61
+ # Run migrations for every command unless --version was specified
62
+ if not version and ctx.invoked_subcommand is not None:
63
+ from basic_memory.config import config
64
+ from basic_memory.services.initialization import ensure_initialize_database
65
+
66
+ ensure_initialize_database(config)
67
+
56
68
 
57
69
  # Register sub-command groups
58
70
  import_app = typer.Typer(help="Import data from various sources")
@@ -1,10 +1,7 @@
1
1
  """MCP server command."""
2
2
 
3
- from loguru import logger
4
-
5
3
  import basic_memory
6
4
  from basic_memory.cli.app import app
7
- from basic_memory.config import config, config_manager
8
5
 
9
6
  # Import mcp instance
10
7
  from basic_memory.mcp.server import mcp as mcp_server # pragma: no cover
@@ -15,19 +12,24 @@ import basic_memory.mcp.tools # noqa: F401 # pragma: no cover
15
12
 
16
13
  @app.command()
17
14
  def mcp(): # pragma: no cover
18
- """Run the MCP server for Claude Desktop integration."""
19
- home_dir = config.home
20
- project_name = config.project
15
+ """Run the MCP server"""
16
+ from basic_memory.config import config
17
+ import asyncio
18
+ from basic_memory.services.initialization import initialize_database
19
+
20
+ # First, run just the database migrations synchronously
21
+ asyncio.run(initialize_database(config))
22
+
23
+ # Load config to check if sync is enabled
24
+ from basic_memory.config import config_manager
21
25
 
22
- # app config
23
26
  basic_memory_config = config_manager.load_config()
24
27
 
25
- logger.info(f"Starting Basic Memory MCP server {basic_memory.__version__}")
26
- logger.info(f"Project: {project_name}")
27
- logger.info(f"Project directory: {home_dir}")
28
- logger.info(f"Sync changes enabled: {basic_memory_config.sync_changes}")
29
- logger.info(
30
- f"Update permalinks on move enabled: {basic_memory_config.update_permalinks_on_move}"
31
- )
28
+ if basic_memory_config.sync_changes:
29
+ # For now, we'll just log that sync will be handled by the MCP server
30
+ from loguru import logger
31
+
32
+ logger.info("File sync will be handled by the MCP server")
32
33
 
34
+ # Start the MCP server
33
35
  mcp_server.run()
@@ -179,14 +179,14 @@ async def run_sync(verbose: bool = False, watch: bool = False, console_status: b
179
179
  )
180
180
 
181
181
  # full sync - no progress bars in watch mode
182
- await sync_service.sync(config.home, show_progress=False)
182
+ await sync_service.sync(config.home)
183
183
 
184
184
  # watch changes
185
185
  await watch_service.run() # pragma: no cover
186
186
  else:
187
- # one time sync - use progress bars for better UX
187
+ # one time sync
188
188
  logger.info("Running one-time sync")
189
- knowledge_changes = await sync_service.sync(config.home, show_progress=True)
189
+ knowledge_changes = await sync_service.sync(config.home)
190
190
 
191
191
  # Log results
192
192
  duration_ms = int((time.time() - start_time) * 1000)
@@ -237,11 +237,11 @@ def sync(
237
237
  if not isinstance(e, typer.Exit):
238
238
  logger.exception(
239
239
  "Sync command failed",
240
- project=config.project,
241
- error=str(e),
242
- error_type=type(e).__name__,
243
- watch_mode=watch,
244
- directory=str(config.home),
240
+ f"project={config.project},"
241
+ f"error={str(e)},"
242
+ f"error_type={type(e).__name__},"
243
+ f"watch_mode={watch},"
244
+ f"directory={str(config.home)}",
245
245
  )
246
246
  typer.echo(f"Error during sync: {e}", err=True)
247
247
  raise typer.Exit(1)
basic_memory/cli/main.py CHANGED
@@ -1,9 +1,5 @@
1
1
  """Main CLI entry point for basic-memory.""" # pragma: no cover
2
2
 
3
- import asyncio
4
-
5
- import typer
6
-
7
3
  from basic_memory.cli.app import app # pragma: no cover
8
4
 
9
5
  # Register commands
@@ -20,48 +16,11 @@ from basic_memory.cli.commands import ( # noqa: F401 # pragma: no cover
20
16
  tool,
21
17
  )
22
18
  from basic_memory.config import config
23
- from basic_memory.db import run_migrations as db_run_migrations
24
-
25
-
26
- # Version command
27
- @app.callback(invoke_without_command=True)
28
- def main(
29
- ctx: typer.Context,
30
- project: str = typer.Option( # noqa
31
- "main",
32
- "--project",
33
- "-p",
34
- help="Specify which project to use",
35
- envvar="BASIC_MEMORY_PROJECT",
36
- ),
37
- version: bool = typer.Option(
38
- False,
39
- "--version",
40
- "-V",
41
- help="Show version information and exit.",
42
- is_eager=True,
43
- ),
44
- ):
45
- """Basic Memory - Local-first personal knowledge management system."""
46
- if version: # pragma: no cover
47
- from basic_memory import __version__
48
- from basic_memory.config import config
49
-
50
- typer.echo(f"Basic Memory v{__version__}")
51
- typer.echo(f"Current project: {config.project}")
52
- typer.echo(f"Project path: {config.home}")
53
- raise typer.Exit()
54
-
55
- # Handle project selection via environment variable
56
- if project:
57
- import os
58
-
59
- os.environ["BASIC_MEMORY_PROJECT"] = project
60
-
19
+ from basic_memory.services.initialization import ensure_initialization
61
20
 
62
21
  if __name__ == "__main__": # pragma: no cover
63
- # Run database migrations
64
- asyncio.run(db_run_migrations(config))
22
+ # Run initialization if we are starting as a module
23
+ ensure_initialization(config)
65
24
 
66
25
  # start the app
67
26
  app()
basic_memory/config.py CHANGED
@@ -35,7 +35,7 @@ class ProjectConfig(BaseSettings):
35
35
 
36
36
  # Watch service configuration
37
37
  sync_delay: int = Field(
38
- default=500, description="Milliseconds to wait after changes before syncing", gt=0
38
+ default=1000, description="Milliseconds to wait after changes before syncing", gt=0
39
39
  )
40
40
 
41
41
  # update permalinks on move
@@ -274,7 +274,7 @@ def setup_basic_memory_logging(): # pragma: no cover
274
274
  console=False,
275
275
  )
276
276
 
277
- logger.info(f"Starting Basic Memory {basic_memory.__version__} (Project: {config.project})")
277
+ logger.info(f"Basic Memory {basic_memory.__version__} (Project: {config.project})")
278
278
  _LOGGING_SETUP = True
279
279
 
280
280
 
@@ -1,11 +1,37 @@
1
1
  """Enhanced FastMCP server instance for Basic Memory."""
2
2
 
3
+ import asyncio
4
+ from contextlib import asynccontextmanager
5
+ from typing import AsyncIterator, Optional
6
+
3
7
  from mcp.server.fastmcp import FastMCP
4
- from mcp.server.fastmcp.utilities.logging import configure_logging
8
+ from mcp.server.fastmcp.utilities.logging import configure_logging as mcp_configure_logging
9
+ from dataclasses import dataclass
10
+
11
+ from basic_memory.config import config as project_config
12
+ from basic_memory.services.initialization import initialize_app
5
13
 
6
14
  # mcp console logging
7
- configure_logging(level="ERROR")
15
+ mcp_configure_logging(level="ERROR")
16
+
17
+
18
+ @dataclass
19
+ class AppContext:
20
+ watch_task: Optional[asyncio.Task]
21
+
22
+
23
+ @asynccontextmanager
24
+ async def app_lifespan(server: FastMCP) -> AsyncIterator[AppContext]: # pragma: no cover
25
+ """Manage application lifecycle with type-safe context"""
26
+ # Initialize on startup
27
+ watch_task = await initialize_app(project_config)
28
+ try:
29
+ yield AppContext(watch_task=watch_task)
30
+ finally:
31
+ # Cleanup on shutdown
32
+ if watch_task:
33
+ watch_task.cancel()
8
34
 
9
35
 
10
36
  # Create the shared server instance
11
- mcp = FastMCP("Basic Memory", log_level="ERROR")
37
+ mcp = FastMCP("Basic Memory", log_level="ERROR", lifespan=app_lifespan)
@@ -137,8 +137,6 @@ class Repository[T: Base]:
137
137
 
138
138
  async def find_one(self, query: Select[tuple[T]]) -> Optional[T]:
139
139
  """Execute a query and retrieve a single record."""
140
- logger.debug(f"Finding one {self.Model.__name__} with query: {query}")
141
-
142
140
  # add in load options
143
141
  query = query.options(*self.get_load_options())
144
142
  result = await self.execute_query(query)
@@ -270,11 +268,9 @@ class Repository[T: Base]:
270
268
  """Execute a query asynchronously."""
271
269
 
272
270
  query = query.options(*self.get_load_options()) if use_query_options else query
273
-
274
271
  logger.debug(f"Executing query: {query}")
275
272
  async with db.scoped_session(self.session_maker) as session:
276
273
  result = await session.execute(query)
277
- logger.debug("Query executed successfully")
278
274
  return result
279
275
 
280
276
  def get_load_options(self) -> List[LoaderOption]:
@@ -60,7 +60,7 @@ class FileService:
60
60
  Returns:
61
61
  Raw content string without metadata sections
62
62
  """
63
- logger.debug("Reading entity content", entity_id=entity.id, permalink=entity.permalink)
63
+ logger.debug(f"Reading entity content, entity_id={entity.id}, permalink={entity.permalink}")
64
64
 
65
65
  file_path = self.get_entity_path(entity)
66
66
  markdown = await self.markdown_processor.read_file(file_path)
@@ -0,0 +1,143 @@
1
+ """Shared initialization service for Basic Memory.
2
+
3
+ This module provides shared initialization functions used by both CLI and API
4
+ to ensure consistent application startup across all entry points.
5
+ """
6
+
7
+ import asyncio
8
+ from typing import Optional
9
+
10
+ from loguru import logger
11
+
12
+ from basic_memory import db
13
+ from basic_memory.config import ProjectConfig, config_manager
14
+ from basic_memory.sync import WatchService
15
+
16
+ # Import this inside functions to avoid circular imports
17
+ # from basic_memory.cli.commands.sync import get_sync_service
18
+
19
+
20
+ async def initialize_database(app_config: ProjectConfig) -> None:
21
+ """Run database migrations to ensure schema is up to date.
22
+
23
+ Args:
24
+ app_config: The Basic Memory project configuration
25
+ """
26
+ try:
27
+ logger.info("Running database migrations...")
28
+ await db.run_migrations(app_config)
29
+ logger.info("Migrations completed successfully")
30
+ except Exception as e:
31
+ logger.error(f"Error running migrations: {e}")
32
+ # Allow application to continue - it might still work
33
+ # depending on what the error was, and will fail with a
34
+ # more specific error if the database is actually unusable
35
+
36
+
37
+ async def initialize_file_sync(
38
+ app_config: ProjectConfig,
39
+ ) -> asyncio.Task:
40
+ """Initialize file synchronization services.
41
+
42
+ Args:
43
+ app_config: The Basic Memory project configuration
44
+
45
+ Returns:
46
+ Tuple of (sync_service, watch_service, watch_task) if sync is enabled,
47
+ or (None, None, None) if sync is disabled
48
+ """
49
+ # Load app configuration
50
+ # Import here to avoid circular imports
51
+ from basic_memory.cli.commands.sync import get_sync_service
52
+
53
+ # Initialize sync service
54
+ sync_service = await get_sync_service()
55
+
56
+ # Initialize watch service
57
+ watch_service = WatchService(
58
+ sync_service=sync_service,
59
+ file_service=sync_service.entity_service.file_service,
60
+ config=app_config,
61
+ quiet=True,
62
+ )
63
+
64
+ # Create the background task for running sync
65
+ async def run_background_sync(): # pragma: no cover
66
+ # Run initial full sync
67
+ await sync_service.sync(app_config.home)
68
+ logger.info("Sync completed successfully")
69
+
70
+ # Start background sync task
71
+ logger.info(f"Starting watch service to sync file changes in dir: {app_config.home}")
72
+
73
+ # Start watching for changes
74
+ await watch_service.run()
75
+
76
+ watch_task = asyncio.create_task(run_background_sync())
77
+ logger.info("Watch service started")
78
+ return watch_task
79
+
80
+
81
+ async def initialize_app(
82
+ app_config: ProjectConfig,
83
+ ) -> Optional[asyncio.Task]:
84
+ """Initialize the Basic Memory application.
85
+
86
+ This function handles all initialization steps needed for both API and shor lived CLI commands.
87
+ For long running commands like mcp, a
88
+ - Running database migrations
89
+ - Setting up file synchronization
90
+
91
+ Args:
92
+ app_config: The Basic Memory project configuration
93
+ """
94
+ # Initialize database first
95
+ await initialize_database(app_config)
96
+
97
+ basic_memory_config = config_manager.load_config()
98
+ logger.info(f"Sync changes enabled: {basic_memory_config.sync_changes}")
99
+ logger.info(
100
+ f"Update permalinks on move enabled: {basic_memory_config.update_permalinks_on_move}"
101
+ )
102
+ if not basic_memory_config.sync_changes: # pragma: no cover
103
+ logger.info("Sync changes disabled. Skipping watch service.")
104
+ return
105
+
106
+ # Initialize file sync services
107
+ return await initialize_file_sync(app_config)
108
+
109
+
110
+ def ensure_initialization(app_config: ProjectConfig) -> None:
111
+ """Ensure initialization runs in a synchronous context.
112
+
113
+ This is a wrapper for the async initialize_app function that can be
114
+ called from synchronous code like CLI entry points.
115
+
116
+ Args:
117
+ app_config: The Basic Memory project configuration
118
+ """
119
+ try:
120
+ asyncio.run(initialize_app(app_config))
121
+ except Exception as e:
122
+ logger.error(f"Error during initialization: {e}")
123
+ # Continue execution even if initialization fails
124
+ # The command might still work, or will fail with a
125
+ # more specific error message
126
+
127
+
128
+ def ensure_initialize_database(app_config: ProjectConfig) -> None:
129
+ """Ensure initialization runs in a synchronous context.
130
+
131
+ This is a wrapper for the async initialize_database function that can be
132
+ called from synchronous code like CLI entry points.
133
+
134
+ Args:
135
+ app_config: The Basic Memory project configuration
136
+ """
137
+ try:
138
+ asyncio.run(initialize_database(app_config))
139
+ except Exception as e:
140
+ logger.error(f"Error during initialization: {e}")
141
+ # Continue execution even if initialization fails
142
+ # The command might still work, or will fail with a
143
+ # more specific error message
@@ -19,7 +19,6 @@ from basic_memory.repository import EntityRepository, RelationRepository
19
19
  from basic_memory.services import EntityService, FileService
20
20
  from basic_memory.services.search_service import SearchService
21
21
  import time
22
- from rich.progress import Progress, TextColumn, BarColumn, TaskProgressColumn
23
22
 
24
23
 
25
24
  @dataclass
@@ -83,145 +82,51 @@ class SyncService:
83
82
  self.search_service = search_service
84
83
  self.file_service = file_service
85
84
 
86
- async def sync(self, directory: Path, show_progress: bool = True) -> SyncReport:
85
+ async def sync(self, directory: Path) -> SyncReport:
87
86
  """Sync all files with database."""
88
87
 
89
88
  start_time = time.time()
90
- console = None
91
- progress = None # Will be initialized if show_progress is True
92
-
93
- logger.info("Sync operation started", directory=str(directory))
89
+ logger.info(f"Sync operation started for directory: {directory}")
94
90
 
95
91
  # initial paths from db to sync
96
92
  # path -> checksum
97
- if show_progress:
98
- from rich.console import Console
99
-
100
- console = Console()
101
- console.print(f"Scanning directory: {directory}")
102
-
103
93
  report = await self.scan(directory)
104
94
 
105
95
  # Initialize progress tracking if requested
106
- if show_progress and report.total > 0:
107
- progress = Progress(
108
- TextColumn("[bold blue]{task.description}"),
109
- BarColumn(),
110
- TaskProgressColumn(),
111
- console=console,
112
- expand=True,
113
- )
114
-
115
96
  # order of sync matters to resolve relations effectively
116
97
  logger.info(
117
- "Sync changes detected",
118
- new_files=len(report.new),
119
- modified_files=len(report.modified),
120
- deleted_files=len(report.deleted),
121
- moved_files=len(report.moves),
98
+ f"Sync changes detected: new_files={len(report.new)}, modified_files={len(report.modified)}, "
99
+ + f"deleted_files={len(report.deleted)}, moved_files={len(report.moves)}"
122
100
  )
123
101
 
124
- if show_progress and report.total > 0:
125
- with progress: # pyright: ignore
126
- # Track each category separately
127
- move_task = None
128
- if report.moves: # pragma: no cover
129
- move_task = progress.add_task("[blue]Moving files...", total=len(report.moves)) # pyright: ignore
130
-
131
- delete_task = None
132
- if report.deleted: # pragma: no cover
133
- delete_task = progress.add_task( # pyright: ignore
134
- "[red]Deleting files...", total=len(report.deleted)
135
- )
136
-
137
- new_task = None
138
- if report.new:
139
- new_task = progress.add_task( # pyright: ignore
140
- "[green]Adding new files...", total=len(report.new)
141
- )
142
-
143
- modify_task = None
144
- if report.modified: # pragma: no cover
145
- modify_task = progress.add_task( # pyright: ignore
146
- "[yellow]Updating modified files...", total=len(report.modified)
102
+ # sync moves first
103
+ for old_path, new_path in report.moves.items():
104
+ # in the case where a file has been deleted and replaced by another file
105
+ # it will show up in the move and modified lists, so handle it in modified
106
+ if new_path in report.modified:
107
+ report.modified.remove(new_path)
108
+ logger.debug(
109
+ f"File marked as moved and modified: old_path={old_path}, new_path={new_path}"
147
110
  )
111
+ else:
112
+ await self.handle_move(old_path, new_path)
148
113
 
149
- # sync moves first
150
- for i, (old_path, new_path) in enumerate(report.moves.items()):
151
- # in the case where a file has been deleted and replaced by another file
152
- # it will show up in the move and modified lists, so handle it in modified
153
- if new_path in report.modified: # pragma: no cover
154
- report.modified.remove(new_path)
155
- logger.debug(
156
- "File marked as moved and modified",
157
- old_path=old_path,
158
- new_path=new_path,
159
- action="processing as modified",
160
- )
161
- else: # pragma: no cover
162
- await self.handle_move(old_path, new_path)
163
-
164
- if move_task is not None: # pragma: no cover
165
- progress.update(move_task, advance=1) # pyright: ignore
166
-
167
- # deleted next
168
- for i, path in enumerate(report.deleted): # pragma: no cover
169
- await self.handle_delete(path)
170
- if delete_task is not None: # pragma: no cover
171
- progress.update(delete_task, advance=1) # pyright: ignore
172
-
173
- # then new and modified
174
- for i, path in enumerate(report.new):
175
- await self.sync_file(path, new=True)
176
- if new_task is not None:
177
- progress.update(new_task, advance=1) # pyright: ignore
178
-
179
- for i, path in enumerate(report.modified): # pragma: no cover
180
- await self.sync_file(path, new=False)
181
- if modify_task is not None: # pragma: no cover
182
- progress.update(modify_task, advance=1) # pyright: ignore
183
-
184
- # Final step - resolving relations
185
- if report.total > 0:
186
- relation_task = progress.add_task("[cyan]Resolving relations...", total=1) # pyright: ignore
187
- await self.resolve_relations()
188
- progress.update(relation_task, advance=1) # pyright: ignore
189
- else:
190
- # No progress display - proceed with normal sync
191
- # sync moves first
192
- for old_path, new_path in report.moves.items():
193
- # in the case where a file has been deleted and replaced by another file
194
- # it will show up in the move and modified lists, so handle it in modified
195
- if new_path in report.modified:
196
- report.modified.remove(new_path)
197
- logger.debug(
198
- "File marked as moved and modified",
199
- old_path=old_path,
200
- new_path=new_path,
201
- action="processing as modified",
202
- )
203
- else:
204
- await self.handle_move(old_path, new_path)
205
-
206
- # deleted next
207
- for path in report.deleted:
208
- await self.handle_delete(path)
114
+ # deleted next
115
+ for path in report.deleted:
116
+ await self.handle_delete(path)
209
117
 
210
- # then new and modified
211
- for path in report.new:
212
- await self.sync_file(path, new=True)
118
+ # then new and modified
119
+ for path in report.new:
120
+ await self.sync_file(path, new=True)
213
121
 
214
- for path in report.modified:
215
- await self.sync_file(path, new=False)
122
+ for path in report.modified:
123
+ await self.sync_file(path, new=False)
216
124
 
217
- await self.resolve_relations()
125
+ await self.resolve_relations()
218
126
 
219
127
  duration_ms = int((time.time() - start_time) * 1000)
220
128
  logger.info(
221
- "Sync operation completed",
222
- directory=str(directory),
223
- total_changes=report.total,
224
- duration_ms=duration_ms,
129
+ f"Sync operation completed: directory={directory}, total_changes={report.total}, duration_ms={duration_ms}"
225
130
  )
226
131
 
227
132
  return report
@@ -230,6 +135,7 @@ class SyncService:
230
135
  """Scan directory for changes compared to database state."""
231
136
 
232
137
  db_paths = await self.get_db_file_state()
138
+ logger.debug(f"Found {len(db_paths)} db paths")
233
139
 
234
140
  # Track potentially moved files by checksum
235
141
  scan_result = await self.scan_directory(directory)
@@ -280,6 +186,7 @@ class SyncService:
280
186
  :param db_records: the data from the db
281
187
  """
282
188
  db_records = await self.entity_repository.find_all()
189
+ logger.info(f"Found {len(db_records)} db records")
283
190
  return {r.file_path: r.checksum or "" for r in db_records}
284
191
 
285
192
  async def sync_file(
@@ -296,10 +203,7 @@ class SyncService:
296
203
  """
297
204
  try:
298
205
  logger.debug(
299
- "Syncing file",
300
- path=path,
301
- is_new=new,
302
- is_markdown=self.file_service.is_markdown(path),
206
+ f"Syncing file path={path} is_new={new} is_markdown={self.file_service.is_markdown(path)}"
303
207
  )
304
208
 
305
209
  if self.file_service.is_markdown(path):
@@ -311,7 +215,7 @@ class SyncService:
311
215
  await self.search_service.index_entity(entity)
312
216
 
313
217
  logger.debug(
314
- "File sync completed", path=path, entity_id=entity.id, checksum=checksum
218
+ f"File sync completed, path={path}, entity_id={entity.id}, checksum={checksum[:8]}"
315
219
  )
316
220
  return entity, checksum
317
221
 
@@ -330,7 +234,7 @@ class SyncService:
330
234
  Tuple of (entity, checksum)
331
235
  """
332
236
  # Parse markdown first to get any existing permalink
333
- logger.debug("Parsing markdown file", path=path)
237
+ logger.debug(f"Parsing markdown file, path: {path}, new: {new}")
334
238
 
335
239
  file_path = self.entity_parser.base_path / path
336
240
  file_content = file_path.read_text()
@@ -347,10 +251,7 @@ class SyncService:
347
251
  # If permalink changed, update the file
348
252
  if permalink != entity_markdown.frontmatter.permalink:
349
253
  logger.info(
350
- "Updating permalink",
351
- path=path,
352
- old_permalink=entity_markdown.frontmatter.permalink,
353
- new_permalink=permalink,
254
+ f"Updating permalink for path: {path}, old_permalink: {entity_markdown.frontmatter.permalink}, new_permalink: {permalink}"
354
255
  )
355
256
 
356
257
  entity_markdown.frontmatter.metadata["permalink"] = permalink
@@ -359,12 +260,12 @@ class SyncService:
359
260
  # if the file is new, create an entity
360
261
  if new:
361
262
  # Create entity with final permalink
362
- logger.debug("Creating new entity from markdown", path=path)
263
+ logger.debug(f"Creating new entity from markdown, path={path}")
363
264
  await self.entity_service.create_entity_from_markdown(Path(path), entity_markdown)
364
265
 
365
266
  # otherwise we need to update the entity and observations
366
267
  else:
367
- logger.debug("Updating entity from markdown", path=path)
268
+ logger.debug(f"Updating entity from markdown, path={path}")
368
269
  await self.entity_service.update_entity_and_observations(Path(path), entity_markdown)
369
270
 
370
271
  # Update relations and search index
@@ -379,12 +280,9 @@ class SyncService:
379
280
  await self.entity_repository.update(entity.id, {"checksum": final_checksum})
380
281
 
381
282
  logger.debug(
382
- "Markdown sync completed",
383
- path=path,
384
- entity_id=entity.id,
385
- observation_count=len(entity.observations),
386
- relation_count=len(entity.relations),
387
- checksum=final_checksum,
283
+ f"Markdown sync completed: path={path}, entity_id={entity.id}, "
284
+ f"observation_count={len(entity.observations)}, relation_count={len(entity.relations)}, "
285
+ f"checksum={final_checksum[:8]}"
388
286
  )
389
287
 
390
288
  # Return the final checksum to ensure everything is consistent
@@ -429,7 +327,7 @@ class SyncService:
429
327
  else:
430
328
  entity = await self.entity_repository.get_by_file_path(path)
431
329
  if entity is None: # pragma: no cover
432
- logger.error("Entity not found for existing file", path=path)
330
+ logger.error(f"Entity not found for existing file, path={path}")
433
331
  raise ValueError(f"Entity not found for existing file: {path}")
434
332
 
435
333
  updated = await self.entity_repository.update(
@@ -437,7 +335,7 @@ class SyncService:
437
335
  )
438
336
 
439
337
  if updated is None: # pragma: no cover
440
- logger.error("Failed to update entity", entity_id=entity.id, path=path)
338
+ logger.error(f"Failed to update entity, entity_id={entity.id}, path={path}")
441
339
  raise ValueError(f"Failed to update entity with ID {entity.id}")
442
340
 
443
341
  return updated, checksum
@@ -449,10 +347,7 @@ class SyncService:
449
347
  entity = await self.entity_repository.get_by_file_path(file_path)
450
348
  if entity:
451
349
  logger.info(
452
- "Deleting entity",
453
- file_path=file_path,
454
- entity_id=entity.id,
455
- permalink=entity.permalink,
350
+ f"Deleting entity with file_path={file_path}, entity_id={entity.id}, permalink={entity.permalink}"
456
351
  )
457
352
 
458
353
  # Delete from db (this cascades to observations/relations)
@@ -466,10 +361,8 @@ class SyncService:
466
361
  )
467
362
 
468
363
  logger.debug(
469
- "Cleaning up search index",
470
- entity_id=entity.id,
471
- file_path=file_path,
472
- index_entries=len(permalinks),
364
+ f"Cleaning up search index for entity_id={entity.id}, file_path={file_path}, "
365
+ f"index_entries={len(permalinks)}"
473
366
  )
474
367
 
475
368
  for permalink in permalinks:
@@ -479,7 +372,7 @@ class SyncService:
479
372
  await self.search_service.delete_by_entity_id(entity.id)
480
373
 
481
374
  async def handle_move(self, old_path, new_path):
482
- logger.info("Moving entity", old_path=old_path, new_path=new_path)
375
+ logger.debug("Moving entity", old_path=old_path, new_path=new_path)
483
376
 
484
377
  entity = await self.entity_repository.get_by_file_path(old_path)
485
378
  if entity:
@@ -500,29 +393,28 @@ class SyncService:
500
393
  updates["checksum"] = new_checksum
501
394
 
502
395
  logger.info(
503
- "Updating permalink on move",
504
- old_permalink=entity.permalink,
505
- new_permalink=new_permalink,
506
- new_checksum=new_checksum,
396
+ f"Updating permalink on move,old_permalink={entity.permalink}"
397
+ f"new_permalink={new_permalink}"
398
+ f"new_checksum={new_checksum}"
507
399
  )
508
400
 
509
401
  updated = await self.entity_repository.update(entity.id, updates)
510
402
 
511
403
  if updated is None: # pragma: no cover
512
404
  logger.error(
513
- "Failed to update entity path",
514
- entity_id=entity.id,
515
- old_path=old_path,
516
- new_path=new_path,
405
+ "Failed to update entity path"
406
+ f"entity_id={entity.id}"
407
+ f"old_path={old_path}"
408
+ f"new_path={new_path}"
517
409
  )
518
410
  raise ValueError(f"Failed to update entity path for ID {entity.id}")
519
411
 
520
412
  logger.debug(
521
- "Entity path updated",
522
- entity_id=entity.id,
523
- permalink=entity.permalink,
524
- old_path=old_path,
525
- new_path=new_path,
413
+ "Entity path updated"
414
+ f"entity_id={entity.id} "
415
+ f"permalink={entity.permalink} "
416
+ f"old_path={old_path} "
417
+ f"new_path={new_path} "
526
418
  )
527
419
 
528
420
  # update search index
@@ -537,10 +429,10 @@ class SyncService:
537
429
 
538
430
  for relation in unresolved_relations:
539
431
  logger.debug(
540
- "Attempting to resolve relation",
541
- relation_id=relation.id,
542
- from_id=relation.from_id,
543
- to_name=relation.to_name,
432
+ "Attempting to resolve relation "
433
+ f"relation_id={relation.id} "
434
+ f"from_id={relation.from_id} "
435
+ f"to_name={relation.to_name}"
544
436
  )
545
437
 
546
438
  resolved_entity = await self.entity_service.link_resolver.resolve_link(relation.to_name)
@@ -548,12 +440,12 @@ class SyncService:
548
440
  # ignore reference to self
549
441
  if resolved_entity and resolved_entity.id != relation.from_id:
550
442
  logger.debug(
551
- "Resolved forward reference",
552
- relation_id=relation.id,
553
- from_id=relation.from_id,
554
- to_name=relation.to_name,
555
- resolved_id=resolved_entity.id,
556
- resolved_title=resolved_entity.title,
443
+ "Resolved forward reference "
444
+ f"relation_id={relation.id} "
445
+ f"from_id={relation.from_id} "
446
+ f"to_name={relation.to_name} "
447
+ f"resolved_id={resolved_entity.id} "
448
+ f"resolved_title={resolved_entity.title}",
557
449
  )
558
450
  try:
559
451
  await self.relation_repository.update(
@@ -565,10 +457,10 @@ class SyncService:
565
457
  )
566
458
  except IntegrityError: # pragma: no cover
567
459
  logger.debug(
568
- "Ignoring duplicate relation",
569
- relation_id=relation.id,
570
- from_id=relation.from_id,
571
- to_name=relation.to_name,
460
+ "Ignoring duplicate relation "
461
+ f"relation_id={relation.id} "
462
+ f"from_id={relation.from_id} "
463
+ f"to_name={relation.to_name}"
572
464
  )
573
465
 
574
466
  # update search index
@@ -586,7 +478,7 @@ class SyncService:
586
478
  """
587
479
  start_time = time.time()
588
480
 
589
- logger.debug("Scanning directory", directory=str(directory))
481
+ logger.debug(f"Scanning directory {directory}")
590
482
  result = ScanResult()
591
483
 
592
484
  for root, dirnames, filenames in os.walk(str(directory)):
@@ -608,10 +500,10 @@ class SyncService:
608
500
 
609
501
  duration_ms = int((time.time() - start_time) * 1000)
610
502
  logger.debug(
611
- "Directory scan completed",
612
- directory=str(directory),
613
- files_found=len(result.files),
614
- duration_ms=duration_ms,
503
+ f"{directory} scan completed "
504
+ f"directory={str(directory)} "
505
+ f"files_found={len(result.files)} "
506
+ f"duration_ms={duration_ms}"
615
507
  )
616
508
 
617
509
  return result
@@ -70,22 +70,30 @@ class WatchServiceState(BaseModel):
70
70
 
71
71
 
72
72
  class WatchService:
73
- def __init__(self, sync_service: SyncService, file_service: FileService, config: ProjectConfig):
73
+ def __init__(
74
+ self,
75
+ sync_service: SyncService,
76
+ file_service: FileService,
77
+ config: ProjectConfig,
78
+ quiet: bool = False,
79
+ ):
74
80
  self.sync_service = sync_service
75
81
  self.file_service = file_service
76
82
  self.config = config
77
83
  self.state = WatchServiceState()
78
84
  self.status_path = config.home / ".basic-memory" / WATCH_STATUS_JSON
79
85
  self.status_path.parent.mkdir(parents=True, exist_ok=True)
80
- self.console = Console()
86
+
87
+ # quiet mode for mcp so it doesn't mess up stdout
88
+ self.console = Console(quiet=quiet)
81
89
 
82
90
  async def run(self): # pragma: no cover
83
91
  """Watch for file changes and sync them"""
84
92
  logger.info(
85
93
  "Watch service started",
86
- directory=str(self.config.home),
87
- debounce_ms=self.config.sync_delay,
88
- pid=os.getpid(),
94
+ f"directory={str(self.config.home)}",
95
+ f"debounce_ms={self.config.sync_delay}",
96
+ f"pid={os.getpid()}",
89
97
  )
90
98
 
91
99
  self.state.running = True
@@ -111,8 +119,8 @@ class WatchService:
111
119
  finally:
112
120
  logger.info(
113
121
  "Watch service stopped",
114
- directory=str(self.config.home),
115
- runtime_seconds=int((datetime.now() - self.state.start_time).total_seconds()),
122
+ f"directory={str(self.config.home)}",
123
+ f"runtime_seconds={int((datetime.now() - self.state.start_time).total_seconds())}",
116
124
  )
117
125
 
118
126
  self.state.running = False
@@ -154,7 +162,7 @@ class WatchService:
154
162
 
155
163
  start_time = time.time()
156
164
 
157
- logger.info("Processing file changes", change_count=len(changes), directory=str(directory))
165
+ logger.info(f"Processing file changes, change_count={len(changes)}, directory={directory}")
158
166
 
159
167
  # Group changes by type
160
168
  adds: List[str] = []
@@ -177,9 +185,17 @@ class WatchService:
177
185
  modifies.append(relative_path)
178
186
 
179
187
  logger.debug(
180
- "Grouped file changes", added=len(adds), deleted=len(deletes), modified=len(modifies)
188
+ f"Grouped file changes, added={len(adds)}, deleted={len(deletes)}, modified={len(modifies)}"
181
189
  )
182
190
 
191
+ # because of our atomic writes on updates, an add may be an existing file
192
+ for added_path in adds: # pragma: no cover TODO add test
193
+ entity = await self.sync_service.entity_repository.get_by_file_path(added_path)
194
+ if entity is not None:
195
+ logger.debug(f"Existing file will be processed as modified, path={added_path}")
196
+ adds.remove(added_path)
197
+ modifies.append(added_path)
198
+
183
199
  # Track processed files to avoid duplicates
184
200
  processed: Set[str] = set()
185
201
 
@@ -223,15 +239,16 @@ class WatchService:
223
239
  status="success",
224
240
  )
225
241
  self.console.print(f"[blue]→[/blue] {deleted_path} → {added_path}")
242
+ logger.info(f"move: {deleted_path} -> {added_path}")
226
243
  processed.add(added_path)
227
244
  processed.add(deleted_path)
228
245
  break
229
246
  except Exception as e: # pragma: no cover
230
247
  logger.warning(
231
248
  "Error checking for move",
232
- old_path=deleted_path,
233
- new_path=added_path,
234
- error=str(e),
249
+ f"old_path={deleted_path}",
250
+ f"new_path={added_path}",
251
+ f"error={str(e)}",
235
252
  )
236
253
 
237
254
  # Handle remaining changes - group them by type for concise output
@@ -247,6 +264,7 @@ class WatchService:
247
264
  await self.sync_service.handle_delete(path)
248
265
  self.state.add_event(path=path, action="deleted", status="success")
249
266
  self.console.print(f"[red]✕[/red] {path}")
267
+ logger.info(f"deleted: {path}")
250
268
  processed.add(path)
251
269
  delete_count += 1
252
270
 
@@ -257,28 +275,27 @@ class WatchService:
257
275
  full_path = directory / path
258
276
  if not full_path.exists() or full_path.is_dir():
259
277
  logger.debug(
260
- "Skipping non-existent or directory path", path=path
278
+ f"Skipping non-existent or directory path, path={path}"
261
279
  ) # pragma: no cover
262
280
  processed.add(path) # pragma: no cover
263
281
  continue # pragma: no cover
264
282
 
265
- logger.debug("Processing new file", path=path)
283
+ logger.debug(f"Processing new file, path={path}")
266
284
  entity, checksum = await self.sync_service.sync_file(path, new=True)
267
285
  if checksum:
268
286
  self.state.add_event(
269
287
  path=path, action="new", status="success", checksum=checksum
270
288
  )
271
289
  self.console.print(f"[green]✓[/green] {path}")
272
- logger.debug(
273
- "Added file processed",
274
- path=path,
275
- entity_id=entity.id if entity else None,
276
- checksum=checksum,
290
+ logger.info(
291
+ "new file processed",
292
+ f"path={path}",
293
+ f"checksum={checksum}",
277
294
  )
278
295
  processed.add(path)
279
296
  add_count += 1
280
297
  else: # pragma: no cover
281
- logger.warning("Error syncing new file", path=path) # pragma: no cover
298
+ logger.warning(f"Error syncing new file, path={path}") # pragma: no cover
282
299
  self.console.print(
283
300
  f"[orange]?[/orange] Error syncing: {path}"
284
301
  ) # pragma: no cover
@@ -296,7 +313,7 @@ class WatchService:
296
313
  processed.add(path)
297
314
  continue
298
315
 
299
- logger.debug("Processing modified file", path=path)
316
+ logger.debug(f"Processing modified file: path={path}")
300
317
  entity, checksum = await self.sync_service.sync_file(path, new=False)
301
318
  self.state.add_event(
302
319
  path=path, action="modified", status="success", checksum=checksum
@@ -311,17 +328,18 @@ class WatchService:
311
328
  f"[yellow]...[/yellow] Repeated changes to {path}"
312
329
  ) # pragma: no cover
313
330
  else:
314
- # New file being modified
331
+ # haven't processed this file
315
332
  self.console.print(f"[yellow]✎[/yellow] {path}")
333
+ logger.info(f"modified: {path}")
316
334
  last_modified_path = path
317
335
  repeat_count = 0
318
336
  modify_count += 1
319
337
 
320
338
  logger.debug(
321
- "Modified file processed",
322
- path=path,
323
- entity_id=entity.id if entity else None,
324
- checksum=checksum,
339
+ "Modified file processed, "
340
+ f"path={path} "
341
+ f"entity_id={entity.id if entity else None} "
342
+ f"checksum={checksum}",
325
343
  )
326
344
  processed.add(path)
327
345
 
@@ -339,16 +357,17 @@ class WatchService:
339
357
 
340
358
  if changes:
341
359
  self.console.print(f"{', '.join(changes)}", style="dim") # pyright: ignore
360
+ logger.info(f"changes: {len(changes)}")
342
361
 
343
362
  duration_ms = int((time.time() - start_time) * 1000)
344
363
  self.state.last_scan = datetime.now()
345
364
  self.state.synced_files += len(processed)
346
365
 
347
366
  logger.info(
348
- "File change processing completed",
349
- processed_files=len(processed),
350
- total_synced_files=self.state.synced_files,
351
- duration_ms=duration_ms,
367
+ "File change processing completed, "
368
+ f"processed_files={len(processed)}, "
369
+ f"total_synced_files={self.state.synced_files}, "
370
+ f"duration_ms={duration_ms}"
352
371
  )
353
372
 
354
373
  await self.write_status()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: basic-memory
3
- Version: 0.12.0
3
+ Version: 0.12.1
4
4
  Summary: Local-first knowledge management combining Zettelkasten with knowledge graphs
5
5
  Project-URL: Homepage, https://github.com/basicmachines-co/basic-memory
6
6
  Project-URL: Repository, https://github.com/basicmachines-co/basic-memory
@@ -187,7 +187,7 @@ The note embeds semantic content and links to other topics via simple Markdown f
187
187
 
188
188
  3. You see this file on your computer in real time in the current project directory (default `~/$HOME/basic-memory`).
189
189
 
190
- - Realtime sync can be enabled via running `basic-memory sync --watch`
190
+ - Realtime sync is enabled by default with the v0.12.0 version
191
191
 
192
192
  4. In a chat with the LLM, you can reference a topic:
193
193
 
@@ -378,13 +378,7 @@ config:
378
378
 
379
379
  2. Sync your knowledge:
380
380
 
381
- ```bash
382
- # One-time sync of local knowledge updates
383
- basic-memory sync
384
-
385
- # Run realtime sync process (recommended)
386
- basic-memory sync --watch
387
- ```
381
+ Basic Memory will sync the files in your project in real time if you make manual edits.
388
382
 
389
383
  3. In Claude Desktop, the LLM can now use these tools:
390
384
 
@@ -1,5 +1,5 @@
1
- basic_memory/__init__.py,sha256=RtfHqJWLL9RssuJgBRYNskTtP0k1jVVukaswAEsHjfU,123
2
- basic_memory/config.py,sha256=ONdKTjIS_W2d0k6epzV0dY0Gdv8OC_woBkK-ElXpxW8,9211
1
+ basic_memory/__init__.py,sha256=45SW0xM0KuIkvy6KfeAwERAglEya26oqmJ0WvHoG_gA,123
2
+ basic_memory/config.py,sha256=jZmBOj4Gl2l56pApiN88s6juPDaX1g2LcvuVUnUeG0Q,9203
3
3
  basic_memory/db.py,sha256=8SmrmNAlJlmYT9yIJiPwNq8SN8mB2rbW5t33Rqpyl2I,6052
4
4
  basic_memory/deps.py,sha256=yI6RL_5-8LXw7ywSJ_84BXAczDtv2h9GFLw-E9XDJFg,5770
5
5
  basic_memory/file_utils.py,sha256=eaxTKLLEbTIy_Mb_Iv_Dmt4IXAJSrZGVi-Knrpyci3E,6700
@@ -13,7 +13,7 @@ basic_memory/alembic/versions/502b60eaa905_remove_required_from_entity_permalink
13
13
  basic_memory/alembic/versions/b3c3938bacdb_relation_to_name_unique_index.py,sha256=RsGymQzfRXV1LSNKiyi0lMilTxW1NgwS9jR67ye2apI,1428
14
14
  basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py,sha256=Lbo3dEzdId_vKRFe3jMkGFF3dNQpblPIQa4Bh7np-zA,4020
15
15
  basic_memory/api/__init__.py,sha256=wCpj-21j1D0KzKl9Ql6unLBVFY0K1uGp_FeSZRKtqpk,72
16
- basic_memory/api/app.py,sha256=yoVghdAdkPrnYRdtSgUEJ1tS1FAjfxpGOh40DVL9myQ,2896
16
+ basic_memory/api/app.py,sha256=GFFX3MOusEzbDaAVDECk3B46xybuinUfMt4Atw0Nr8c,1724
17
17
  basic_memory/api/routers/__init__.py,sha256=SKuL-weA58hYj0NOMCQRfmsaumlNjjyVHDXNpRO38bQ,305
18
18
  basic_memory/api/routers/knowledge_router.py,sha256=iYuBguMb6ERitAwoelSejBYJqLTGfjpkzAHrqwTKjVE,5876
19
19
  basic_memory/api/routers/memory_router.py,sha256=W_uHJe2c4XN96mFj6XNvUH6INVbl1BMxy0KUchLcbxU,5421
@@ -21,18 +21,18 @@ basic_memory/api/routers/project_info_router.py,sha256=Qv12_QL3SRpo7bPcpAjizJmkZ
21
21
  basic_memory/api/routers/resource_router.py,sha256=WEJEqEaY_yTKj5-U-rW4kXQKUcJflykgwI6_g_R41ck,8058
22
22
  basic_memory/api/routers/search_router.py,sha256=R_a5OF5_8rCjmoOMhmw3M4VLCy6I1KLGJ-otSLB0rbI,1953
23
23
  basic_memory/cli/__init__.py,sha256=arcKLAWRDhPD7x5t80MlviZeYzwHZ0GZigyy3NKVoGk,33
24
- basic_memory/cli/app.py,sha256=qTalo-Pk2zJCa6u4xQD0ORkCnu_e1Bjw0stPIJWdmxQ,1655
25
- basic_memory/cli/main.py,sha256=52mBF4HWBG0WGlqXzwn5pDpVFNap51aNEimybZ28lpM,1656
24
+ basic_memory/cli/app.py,sha256=hFRYznTSI58t6FEDUwF_PUgKZF0V63sCHzBDDb5FOAk,2142
25
+ basic_memory/cli/main.py,sha256=WhYOCKliF48DLDOukOg3QPiD16IP3AJfhdCIe7Wlh2g,666
26
26
  basic_memory/cli/commands/__init__.py,sha256=3oojcC-Y-4RPqff9vtwWziT_T4uvBVicL0pSHNilVkU,393
27
27
  basic_memory/cli/commands/db.py,sha256=-jgVH2fs_s1vvBNJx_FWspQVHv0F6Qd7V5ZPxtYn_jQ,1125
28
28
  basic_memory/cli/commands/import_chatgpt.py,sha256=M4_oUN9o_BaW5jpKQu2pTEybivB5ccVolhdZzmhLOsI,8162
29
29
  basic_memory/cli/commands/import_claude_conversations.py,sha256=D_4-0xFKkZka7xFvvW8OkgjLv3TFqsC_VuB2Z-Y3avU,6827
30
30
  basic_memory/cli/commands/import_claude_projects.py,sha256=KzUuf3wrlvJlqTWCzoLRrNxD3OYNteRXaTFj5IB1FA8,6649
31
31
  basic_memory/cli/commands/import_memory_json.py,sha256=zqpU4eCzQXx04aRsigddJAyhvklmTgSAzeRTuEdNw0c,5194
32
- basic_memory/cli/commands/mcp.py,sha256=01yBzmFAwxqDUvTQWu--aynvWlHD8P8D3syYUqGJFQw,998
32
+ basic_memory/cli/commands/mcp.py,sha256=sWwRLRbY_FYUNxoy1a8risypnjS9YvZbnP3IjijiUZ0,1025
33
33
  basic_memory/cli/commands/project.py,sha256=BSjdz07xDM3R4CUXggv1qhrWLJsEgvGFir6aOUzdr2Q,11330
34
34
  basic_memory/cli/commands/status.py,sha256=nbs3myxaNtehEEJ4BBngPuKs-vqZTHNCCb0bTgDsE-s,5277
35
- basic_memory/cli/commands/sync.py,sha256=3MD99jZuIQ16cKx6mvOtsCZR7FGFPU9Bb1WUQAb6VVY,8374
35
+ basic_memory/cli/commands/sync.py,sha256=3jwgabxkF4WyFZ-gRC1l8A8p8Z_aYrzHRXOtUfYy2yc,8324
36
36
  basic_memory/cli/commands/tool.py,sha256=7wte1TqjG__NcC7BB0BRLl8THB3t5eAngud0zVHBQ8k,9506
37
37
  basic_memory/markdown/__init__.py,sha256=DdzioCWtDnKaq05BHYLgL_78FawEHLpLXnp-kPSVfIc,501
38
38
  basic_memory/markdown/entity_parser.py,sha256=I4K1Uq8kLYBmShIZQuXI9FuQjzpoV924-jDH9dIiYb8,4420
@@ -43,7 +43,7 @@ basic_memory/markdown/utils.py,sha256=wr7KnDMThgnztkOoqSG_ANPhwNBoPkyjYP1sA1Wnxe
43
43
  basic_memory/mcp/__init__.py,sha256=dsDOhKqjYeIbCULbHIxfcItTbqudEuEg1Np86eq0GEQ,35
44
44
  basic_memory/mcp/async_client.py,sha256=Eo345wANiBRSM4u3j_Vd6Ax4YtMg7qbWd9PIoFfj61I,236
45
45
  basic_memory/mcp/main.py,sha256=0kbcyf1PxRC1bLnHv2zzParfJ6cOq7Am9ScF9UoI50U,703
46
- basic_memory/mcp/server.py,sha256=xrgG602nqrS6XZAUYl-KfpEFH78eKtJMO0WyKo4H9VE,307
46
+ basic_memory/mcp/server.py,sha256=RgNIyRUsuBgoCntj_5Dn2_QNTBYQ1tjFSEn-Z1PoFzU,1099
47
47
  basic_memory/mcp/prompts/__init__.py,sha256=-Bl9Dgj2TD9PULjzggPqXuvPEjWCRy7S9Yg03h2-U7A,615
48
48
  basic_memory/mcp/prompts/ai_assistant_guide.py,sha256=8TI5xObiRVcwv6w9by1xQHlX0whvyE7-LGsiqDMRTFg,821
49
49
  basic_memory/mcp/prompts/continue_conversation.py,sha256=I1FdNXIsSBKsu2ABj8TRRr-mdZKZ1K8LMCUfAik5Iqs,4424
@@ -71,7 +71,7 @@ basic_memory/repository/entity_repository.py,sha256=VLKlQ97-_HhSqc-st_YToWUNE4pJ
71
71
  basic_memory/repository/observation_repository.py,sha256=BOcy4wARqCXu-thYyt7mPxt2A2C8TW0le3s_X9wrK6I,1701
72
72
  basic_memory/repository/project_info_repository.py,sha256=nHWzs0WBQ366WfzIYZgnAfU6tyQ_8slEszWNlDSeIlo,336
73
73
  basic_memory/repository/relation_repository.py,sha256=DwpTcn9z_1sZQcyMOUABz1k1VSwo_AU63x2zR7aerTk,2933
74
- basic_memory/repository/repository.py,sha256=cZFCjp7Q-fKwjEYe3ubG1rgYbPEQXsocAn9LgFNXCG0,12071
74
+ basic_memory/repository/repository.py,sha256=X03U6FA3tpQ8FoULL7J7GByUeArSc2Ajb5GIJjZ8HBA,11934
75
75
  basic_memory/repository/search_repository.py,sha256=z6oX6wCLo2JaW2hawtgiyxmTsboKTjuO7cgXsFsQhmA,11607
76
76
  basic_memory/schemas/__init__.py,sha256=KHzF2lZhYXRsH2g6tV5Oivlk1EHFfrlbKuiRllqnBzs,1570
77
77
  basic_memory/schemas/base.py,sha256=dwnaI5fJXsdp81mdH0ZpmJ-WICY-0M7ZPWeW5OUgBG8,5685
@@ -85,15 +85,16 @@ basic_memory/services/__init__.py,sha256=oop6SKmzV4_NAYt9otGnupLGVCCKIVgxEcdRQWw
85
85
  basic_memory/services/context_service.py,sha256=vUo4j-_UDDlpL_PxoNTqmG5kZ2m7UfPyIz6FY3SCU7o,9715
86
86
  basic_memory/services/entity_service.py,sha256=lCdqRnAkaolt_pr48lFxRXAj2YRS-nasJTkepBf3zlU,12915
87
87
  basic_memory/services/exceptions.py,sha256=VGlCLd4UD2w5NWKqC7QpG4jOM_hA7jKRRM-MqvEVMNk,288
88
- basic_memory/services/file_service.py,sha256=7uLVkcFMFj20srdch8c6u9T6nO5X4wHgcXdL81pGV88,9935
88
+ basic_memory/services/file_service.py,sha256=egoJnhoHBUX_wepjkLyyc6qZkPfOexlj8p0HRvtdxWw,9940
89
+ basic_memory/services/initialization.py,sha256=T8KPFpFzeURORPjvfhvHE7Vnmx_TXUUGumBCEEiCJaM,4787
89
90
  basic_memory/services/link_resolver.py,sha256=3I3wp5HHpe17DNHhn1IG3_yWWHYtEZKRNL7I2j_AHos,3599
90
91
  basic_memory/services/search_service.py,sha256=1K1YuWFVreKjn6LkbOpl-zCmXYjqOQS1qB-yvkwu-zc,9817
91
92
  basic_memory/services/service.py,sha256=V-d_8gOV07zGIQDpL-Ksqs3ZN9l3qf3HZOK1f_YNTag,336
92
93
  basic_memory/sync/__init__.py,sha256=CVHguYH457h2u2xoM8KvOilJC71XJlZ-qUh8lHcjYj4,156
93
- basic_memory/sync/sync_service.py,sha256=R0mBgTRQP_o55ldzn4Il876D1eXVyJo32cgLcPjQPXk,23611
94
- basic_memory/sync/watch_service.py,sha256=IliisNN8HMKLnBaF95wGHpA5WqRWujprWJVxOnAQ2yc,13468
95
- basic_memory-0.12.0.dist-info/METADATA,sha256=jtcnzL11Nr6pAEBENRwEUu5Z0qfHmGM7dyrxtap8ULQ,15054
96
- basic_memory-0.12.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
97
- basic_memory-0.12.0.dist-info/entry_points.txt,sha256=wvE2mRF6-Pg4weIYcfQ-86NOLZD4WJg7F7TIsRVFLb8,90
98
- basic_memory-0.12.0.dist-info/licenses/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
99
- basic_memory-0.12.0.dist-info/RECORD,,
94
+ basic_memory/sync/sync_service.py,sha256=UYIHk6ACVLo34RLg08voo_vDosNh_pua3HEXY9JM2zw,19610
95
+ basic_memory/sync/watch_service.py,sha256=ipkW9zK1MhisvdHambB9sesB6vNm0OapMZZM7w0GmsQ,14338
96
+ basic_memory-0.12.1.dist-info/METADATA,sha256=zdQD7uRrb9DBoBS8XM362yUZ5nd-w3PFARc4E7BIMAI,14992
97
+ basic_memory-0.12.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
98
+ basic_memory-0.12.1.dist-info/entry_points.txt,sha256=wvE2mRF6-Pg4weIYcfQ-86NOLZD4WJg7F7TIsRVFLb8,90
99
+ basic_memory-0.12.1.dist-info/licenses/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
100
+ basic_memory-0.12.1.dist-info/RECORD,,