basic-memory 0.12.0__py3-none-any.whl → 0.12.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of basic-memory might be problematic. Click here for more details.
- basic_memory/__init__.py +1 -1
- basic_memory/api/app.py +3 -34
- basic_memory/cli/app.py +13 -1
- basic_memory/cli/commands/import_memory_json.py +1 -1
- basic_memory/cli/commands/mcp.py +16 -14
- basic_memory/cli/commands/sync.py +8 -8
- basic_memory/cli/main.py +3 -44
- basic_memory/config.py +2 -2
- basic_memory/markdown/entity_parser.py +1 -1
- basic_memory/mcp/server.py +29 -3
- basic_memory/repository/repository.py +0 -4
- basic_memory/services/file_service.py +1 -1
- basic_memory/services/initialization.py +143 -0
- basic_memory/sync/sync_service.py +75 -185
- basic_memory/sync/watch_service.py +49 -30
- {basic_memory-0.12.0.dist-info → basic_memory-0.12.2.dist-info}/METADATA +3 -9
- {basic_memory-0.12.0.dist-info → basic_memory-0.12.2.dist-info}/RECORD +20 -19
- {basic_memory-0.12.0.dist-info → basic_memory-0.12.2.dist-info}/WHEEL +0 -0
- {basic_memory-0.12.0.dist-info → basic_memory-0.12.2.dist-info}/entry_points.txt +0 -0
- {basic_memory-0.12.0.dist-info → basic_memory-0.12.2.dist-info}/licenses/LICENSE +0 -0
basic_memory/__init__.py
CHANGED
basic_memory/api/app.py
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
"""FastAPI application for basic-memory knowledge graph API."""
|
|
2
2
|
|
|
3
|
-
import asyncio
|
|
4
3
|
from contextlib import asynccontextmanager
|
|
5
4
|
|
|
6
5
|
from fastapi import FastAPI, HTTPException
|
|
@@ -10,44 +9,14 @@ from loguru import logger
|
|
|
10
9
|
from basic_memory import db
|
|
11
10
|
from basic_memory.api.routers import knowledge, memory, project_info, resource, search
|
|
12
11
|
from basic_memory.config import config as project_config
|
|
13
|
-
from basic_memory.
|
|
14
|
-
from basic_memory.sync import SyncService, WatchService
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
async def run_background_sync(sync_service: SyncService, watch_service: WatchService): # pragma: no cover
|
|
18
|
-
logger.info(f"Starting watch service to sync file changes in dir: {project_config.home}")
|
|
19
|
-
# full sync
|
|
20
|
-
await sync_service.sync(project_config.home, show_progress=False)
|
|
21
|
-
|
|
22
|
-
# watch changes
|
|
23
|
-
await watch_service.run()
|
|
12
|
+
from basic_memory.services.initialization import initialize_app
|
|
24
13
|
|
|
25
14
|
|
|
26
15
|
@asynccontextmanager
|
|
27
16
|
async def lifespan(app: FastAPI): # pragma: no cover
|
|
28
17
|
"""Lifecycle manager for the FastAPI app."""
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
# app config
|
|
32
|
-
basic_memory_config = config_manager.load_config()
|
|
33
|
-
logger.info(f"Sync changes enabled: {basic_memory_config.sync_changes}")
|
|
34
|
-
logger.info(f"Update permalinks on move enabled: {basic_memory_config.update_permalinks_on_move}")
|
|
35
|
-
|
|
36
|
-
watch_task = None
|
|
37
|
-
if basic_memory_config.sync_changes:
|
|
38
|
-
# import after migrations have run
|
|
39
|
-
from basic_memory.cli.commands.sync import get_sync_service
|
|
40
|
-
|
|
41
|
-
sync_service = await get_sync_service()
|
|
42
|
-
watch_service = WatchService(
|
|
43
|
-
sync_service=sync_service,
|
|
44
|
-
file_service=sync_service.entity_service.file_service,
|
|
45
|
-
config=project_config,
|
|
46
|
-
)
|
|
47
|
-
watch_task = asyncio.create_task(run_background_sync(sync_service, watch_service))
|
|
48
|
-
else:
|
|
49
|
-
logger.info("Sync changes disabled. Skipping watch service.")
|
|
50
|
-
|
|
18
|
+
# Initialize database and file sync services
|
|
19
|
+
watch_task = await initialize_app(project_config)
|
|
51
20
|
|
|
52
21
|
# proceed with startup
|
|
53
22
|
yield
|
basic_memory/cli/app.py
CHANGED
|
@@ -7,8 +7,11 @@ def version_callback(value: bool) -> None:
|
|
|
7
7
|
"""Show version and exit."""
|
|
8
8
|
if value: # pragma: no cover
|
|
9
9
|
import basic_memory
|
|
10
|
+
from basic_memory.config import config
|
|
10
11
|
|
|
11
12
|
typer.echo(f"Basic Memory version: {basic_memory.__version__}")
|
|
13
|
+
typer.echo(f"Current project: {config.project}")
|
|
14
|
+
typer.echo(f"Project path: {config.home}")
|
|
12
15
|
raise typer.Exit()
|
|
13
16
|
|
|
14
17
|
|
|
@@ -17,11 +20,12 @@ app = typer.Typer(name="basic-memory")
|
|
|
17
20
|
|
|
18
21
|
@app.callback()
|
|
19
22
|
def app_callback(
|
|
23
|
+
ctx: typer.Context,
|
|
20
24
|
project: Optional[str] = typer.Option(
|
|
21
25
|
None,
|
|
22
26
|
"--project",
|
|
23
27
|
"-p",
|
|
24
|
-
help="Specify which project to use",
|
|
28
|
+
help="Specify which project to use 1",
|
|
25
29
|
envvar="BASIC_MEMORY_PROJECT",
|
|
26
30
|
),
|
|
27
31
|
version: Optional[bool] = typer.Option(
|
|
@@ -34,6 +38,7 @@ def app_callback(
|
|
|
34
38
|
),
|
|
35
39
|
) -> None:
|
|
36
40
|
"""Basic Memory - Local-first personal knowledge management."""
|
|
41
|
+
|
|
37
42
|
# We use the project option to set the BASIC_MEMORY_PROJECT environment variable
|
|
38
43
|
# The config module will pick this up when loading
|
|
39
44
|
if project: # pragma: no cover
|
|
@@ -53,6 +58,13 @@ def app_callback(
|
|
|
53
58
|
|
|
54
59
|
config = new_config
|
|
55
60
|
|
|
61
|
+
# Run migrations for every command unless --version was specified
|
|
62
|
+
if not version and ctx.invoked_subcommand is not None:
|
|
63
|
+
from basic_memory.config import config
|
|
64
|
+
from basic_memory.services.initialization import ensure_initialize_database
|
|
65
|
+
|
|
66
|
+
ensure_initialize_database(config)
|
|
67
|
+
|
|
56
68
|
|
|
57
69
|
# Register sub-command groups
|
|
58
70
|
import_app = typer.Typer(help="Import data from various sources")
|
|
@@ -38,7 +38,7 @@ async def process_memory_json(
|
|
|
38
38
|
read_task = progress.add_task("Reading memory.json...", total=None)
|
|
39
39
|
|
|
40
40
|
# First pass - collect entities and relations
|
|
41
|
-
with open(json_path) as f:
|
|
41
|
+
with open(json_path, encoding="utf-8") as f:
|
|
42
42
|
lines = f.readlines()
|
|
43
43
|
progress.update(read_task, total=len(lines))
|
|
44
44
|
|
basic_memory/cli/commands/mcp.py
CHANGED
|
@@ -1,10 +1,7 @@
|
|
|
1
1
|
"""MCP server command."""
|
|
2
2
|
|
|
3
|
-
from loguru import logger
|
|
4
|
-
|
|
5
3
|
import basic_memory
|
|
6
4
|
from basic_memory.cli.app import app
|
|
7
|
-
from basic_memory.config import config, config_manager
|
|
8
5
|
|
|
9
6
|
# Import mcp instance
|
|
10
7
|
from basic_memory.mcp.server import mcp as mcp_server # pragma: no cover
|
|
@@ -15,19 +12,24 @@ import basic_memory.mcp.tools # noqa: F401 # pragma: no cover
|
|
|
15
12
|
|
|
16
13
|
@app.command()
|
|
17
14
|
def mcp(): # pragma: no cover
|
|
18
|
-
"""Run the MCP server
|
|
19
|
-
|
|
20
|
-
|
|
15
|
+
"""Run the MCP server"""
|
|
16
|
+
from basic_memory.config import config
|
|
17
|
+
import asyncio
|
|
18
|
+
from basic_memory.services.initialization import initialize_database
|
|
19
|
+
|
|
20
|
+
# First, run just the database migrations synchronously
|
|
21
|
+
asyncio.run(initialize_database(config))
|
|
22
|
+
|
|
23
|
+
# Load config to check if sync is enabled
|
|
24
|
+
from basic_memory.config import config_manager
|
|
21
25
|
|
|
22
|
-
# app config
|
|
23
26
|
basic_memory_config = config_manager.load_config()
|
|
24
27
|
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
f"Update permalinks on move enabled: {basic_memory_config.update_permalinks_on_move}"
|
|
31
|
-
)
|
|
28
|
+
if basic_memory_config.sync_changes:
|
|
29
|
+
# For now, we'll just log that sync will be handled by the MCP server
|
|
30
|
+
from loguru import logger
|
|
31
|
+
|
|
32
|
+
logger.info("File sync will be handled by the MCP server")
|
|
32
33
|
|
|
34
|
+
# Start the MCP server
|
|
33
35
|
mcp_server.run()
|
|
@@ -179,14 +179,14 @@ async def run_sync(verbose: bool = False, watch: bool = False, console_status: b
|
|
|
179
179
|
)
|
|
180
180
|
|
|
181
181
|
# full sync - no progress bars in watch mode
|
|
182
|
-
await sync_service.sync(config.home
|
|
182
|
+
await sync_service.sync(config.home)
|
|
183
183
|
|
|
184
184
|
# watch changes
|
|
185
185
|
await watch_service.run() # pragma: no cover
|
|
186
186
|
else:
|
|
187
|
-
# one time sync
|
|
187
|
+
# one time sync
|
|
188
188
|
logger.info("Running one-time sync")
|
|
189
|
-
knowledge_changes = await sync_service.sync(config.home
|
|
189
|
+
knowledge_changes = await sync_service.sync(config.home)
|
|
190
190
|
|
|
191
191
|
# Log results
|
|
192
192
|
duration_ms = int((time.time() - start_time) * 1000)
|
|
@@ -237,11 +237,11 @@ def sync(
|
|
|
237
237
|
if not isinstance(e, typer.Exit):
|
|
238
238
|
logger.exception(
|
|
239
239
|
"Sync command failed",
|
|
240
|
-
project=config.project,
|
|
241
|
-
error=str(e),
|
|
242
|
-
error_type=type(e).__name__,
|
|
243
|
-
watch_mode=watch,
|
|
244
|
-
directory=str(config.home),
|
|
240
|
+
f"project={config.project},"
|
|
241
|
+
f"error={str(e)},"
|
|
242
|
+
f"error_type={type(e).__name__},"
|
|
243
|
+
f"watch_mode={watch},"
|
|
244
|
+
f"directory={str(config.home)}",
|
|
245
245
|
)
|
|
246
246
|
typer.echo(f"Error during sync: {e}", err=True)
|
|
247
247
|
raise typer.Exit(1)
|
basic_memory/cli/main.py
CHANGED
|
@@ -1,9 +1,5 @@
|
|
|
1
1
|
"""Main CLI entry point for basic-memory.""" # pragma: no cover
|
|
2
2
|
|
|
3
|
-
import asyncio
|
|
4
|
-
|
|
5
|
-
import typer
|
|
6
|
-
|
|
7
3
|
from basic_memory.cli.app import app # pragma: no cover
|
|
8
4
|
|
|
9
5
|
# Register commands
|
|
@@ -20,48 +16,11 @@ from basic_memory.cli.commands import ( # noqa: F401 # pragma: no cover
|
|
|
20
16
|
tool,
|
|
21
17
|
)
|
|
22
18
|
from basic_memory.config import config
|
|
23
|
-
from basic_memory.
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
# Version command
|
|
27
|
-
@app.callback(invoke_without_command=True)
|
|
28
|
-
def main(
|
|
29
|
-
ctx: typer.Context,
|
|
30
|
-
project: str = typer.Option( # noqa
|
|
31
|
-
"main",
|
|
32
|
-
"--project",
|
|
33
|
-
"-p",
|
|
34
|
-
help="Specify which project to use",
|
|
35
|
-
envvar="BASIC_MEMORY_PROJECT",
|
|
36
|
-
),
|
|
37
|
-
version: bool = typer.Option(
|
|
38
|
-
False,
|
|
39
|
-
"--version",
|
|
40
|
-
"-V",
|
|
41
|
-
help="Show version information and exit.",
|
|
42
|
-
is_eager=True,
|
|
43
|
-
),
|
|
44
|
-
):
|
|
45
|
-
"""Basic Memory - Local-first personal knowledge management system."""
|
|
46
|
-
if version: # pragma: no cover
|
|
47
|
-
from basic_memory import __version__
|
|
48
|
-
from basic_memory.config import config
|
|
49
|
-
|
|
50
|
-
typer.echo(f"Basic Memory v{__version__}")
|
|
51
|
-
typer.echo(f"Current project: {config.project}")
|
|
52
|
-
typer.echo(f"Project path: {config.home}")
|
|
53
|
-
raise typer.Exit()
|
|
54
|
-
|
|
55
|
-
# Handle project selection via environment variable
|
|
56
|
-
if project:
|
|
57
|
-
import os
|
|
58
|
-
|
|
59
|
-
os.environ["BASIC_MEMORY_PROJECT"] = project
|
|
60
|
-
|
|
19
|
+
from basic_memory.services.initialization import ensure_initialization
|
|
61
20
|
|
|
62
21
|
if __name__ == "__main__": # pragma: no cover
|
|
63
|
-
# Run
|
|
64
|
-
|
|
22
|
+
# Run initialization if we are starting as a module
|
|
23
|
+
ensure_initialization(config)
|
|
65
24
|
|
|
66
25
|
# start the app
|
|
67
26
|
app()
|
basic_memory/config.py
CHANGED
|
@@ -35,7 +35,7 @@ class ProjectConfig(BaseSettings):
|
|
|
35
35
|
|
|
36
36
|
# Watch service configuration
|
|
37
37
|
sync_delay: int = Field(
|
|
38
|
-
default=
|
|
38
|
+
default=1000, description="Milliseconds to wait after changes before syncing", gt=0
|
|
39
39
|
)
|
|
40
40
|
|
|
41
41
|
# update permalinks on move
|
|
@@ -274,7 +274,7 @@ def setup_basic_memory_logging(): # pragma: no cover
|
|
|
274
274
|
console=False,
|
|
275
275
|
)
|
|
276
276
|
|
|
277
|
-
logger.info(f"
|
|
277
|
+
logger.info(f"Basic Memory {basic_memory.__version__} (Project: {config.project})")
|
|
278
278
|
_LOGGING_SETUP = True
|
|
279
279
|
|
|
280
280
|
|
|
@@ -104,7 +104,7 @@ class EntityParser:
|
|
|
104
104
|
absolute_path = self.base_path / path
|
|
105
105
|
|
|
106
106
|
# Parse frontmatter and content using python-frontmatter
|
|
107
|
-
file_content = absolute_path.read_text()
|
|
107
|
+
file_content = absolute_path.read_text(encoding="utf-8")
|
|
108
108
|
return await self.parse_file_content(absolute_path, file_content)
|
|
109
109
|
|
|
110
110
|
async def parse_file_content(self, absolute_path, file_content):
|
basic_memory/mcp/server.py
CHANGED
|
@@ -1,11 +1,37 @@
|
|
|
1
1
|
"""Enhanced FastMCP server instance for Basic Memory."""
|
|
2
2
|
|
|
3
|
+
import asyncio
|
|
4
|
+
from contextlib import asynccontextmanager
|
|
5
|
+
from typing import AsyncIterator, Optional
|
|
6
|
+
|
|
3
7
|
from mcp.server.fastmcp import FastMCP
|
|
4
|
-
from mcp.server.fastmcp.utilities.logging import configure_logging
|
|
8
|
+
from mcp.server.fastmcp.utilities.logging import configure_logging as mcp_configure_logging
|
|
9
|
+
from dataclasses import dataclass
|
|
10
|
+
|
|
11
|
+
from basic_memory.config import config as project_config
|
|
12
|
+
from basic_memory.services.initialization import initialize_app
|
|
5
13
|
|
|
6
14
|
# mcp console logging
|
|
7
|
-
|
|
15
|
+
mcp_configure_logging(level="ERROR")
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class AppContext:
|
|
20
|
+
watch_task: Optional[asyncio.Task]
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@asynccontextmanager
|
|
24
|
+
async def app_lifespan(server: FastMCP) -> AsyncIterator[AppContext]: # pragma: no cover
|
|
25
|
+
"""Manage application lifecycle with type-safe context"""
|
|
26
|
+
# Initialize on startup
|
|
27
|
+
watch_task = await initialize_app(project_config)
|
|
28
|
+
try:
|
|
29
|
+
yield AppContext(watch_task=watch_task)
|
|
30
|
+
finally:
|
|
31
|
+
# Cleanup on shutdown
|
|
32
|
+
if watch_task:
|
|
33
|
+
watch_task.cancel()
|
|
8
34
|
|
|
9
35
|
|
|
10
36
|
# Create the shared server instance
|
|
11
|
-
mcp = FastMCP("Basic Memory", log_level="ERROR")
|
|
37
|
+
mcp = FastMCP("Basic Memory", log_level="ERROR", lifespan=app_lifespan)
|
|
@@ -137,8 +137,6 @@ class Repository[T: Base]:
|
|
|
137
137
|
|
|
138
138
|
async def find_one(self, query: Select[tuple[T]]) -> Optional[T]:
|
|
139
139
|
"""Execute a query and retrieve a single record."""
|
|
140
|
-
logger.debug(f"Finding one {self.Model.__name__} with query: {query}")
|
|
141
|
-
|
|
142
140
|
# add in load options
|
|
143
141
|
query = query.options(*self.get_load_options())
|
|
144
142
|
result = await self.execute_query(query)
|
|
@@ -270,11 +268,9 @@ class Repository[T: Base]:
|
|
|
270
268
|
"""Execute a query asynchronously."""
|
|
271
269
|
|
|
272
270
|
query = query.options(*self.get_load_options()) if use_query_options else query
|
|
273
|
-
|
|
274
271
|
logger.debug(f"Executing query: {query}")
|
|
275
272
|
async with db.scoped_session(self.session_maker) as session:
|
|
276
273
|
result = await session.execute(query)
|
|
277
|
-
logger.debug("Query executed successfully")
|
|
278
274
|
return result
|
|
279
275
|
|
|
280
276
|
def get_load_options(self) -> List[LoaderOption]:
|
|
@@ -60,7 +60,7 @@ class FileService:
|
|
|
60
60
|
Returns:
|
|
61
61
|
Raw content string without metadata sections
|
|
62
62
|
"""
|
|
63
|
-
logger.debug("Reading entity content
|
|
63
|
+
logger.debug(f"Reading entity content, entity_id={entity.id}, permalink={entity.permalink}")
|
|
64
64
|
|
|
65
65
|
file_path = self.get_entity_path(entity)
|
|
66
66
|
markdown = await self.markdown_processor.read_file(file_path)
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
"""Shared initialization service for Basic Memory.
|
|
2
|
+
|
|
3
|
+
This module provides shared initialization functions used by both CLI and API
|
|
4
|
+
to ensure consistent application startup across all entry points.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import asyncio
|
|
8
|
+
from typing import Optional
|
|
9
|
+
|
|
10
|
+
from loguru import logger
|
|
11
|
+
|
|
12
|
+
from basic_memory import db
|
|
13
|
+
from basic_memory.config import ProjectConfig, config_manager
|
|
14
|
+
from basic_memory.sync import WatchService
|
|
15
|
+
|
|
16
|
+
# Import this inside functions to avoid circular imports
|
|
17
|
+
# from basic_memory.cli.commands.sync import get_sync_service
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
async def initialize_database(app_config: ProjectConfig) -> None:
|
|
21
|
+
"""Run database migrations to ensure schema is up to date.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
app_config: The Basic Memory project configuration
|
|
25
|
+
"""
|
|
26
|
+
try:
|
|
27
|
+
logger.info("Running database migrations...")
|
|
28
|
+
await db.run_migrations(app_config)
|
|
29
|
+
logger.info("Migrations completed successfully")
|
|
30
|
+
except Exception as e:
|
|
31
|
+
logger.error(f"Error running migrations: {e}")
|
|
32
|
+
# Allow application to continue - it might still work
|
|
33
|
+
# depending on what the error was, and will fail with a
|
|
34
|
+
# more specific error if the database is actually unusable
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
async def initialize_file_sync(
|
|
38
|
+
app_config: ProjectConfig,
|
|
39
|
+
) -> asyncio.Task:
|
|
40
|
+
"""Initialize file synchronization services.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
app_config: The Basic Memory project configuration
|
|
44
|
+
|
|
45
|
+
Returns:
|
|
46
|
+
Tuple of (sync_service, watch_service, watch_task) if sync is enabled,
|
|
47
|
+
or (None, None, None) if sync is disabled
|
|
48
|
+
"""
|
|
49
|
+
# Load app configuration
|
|
50
|
+
# Import here to avoid circular imports
|
|
51
|
+
from basic_memory.cli.commands.sync import get_sync_service
|
|
52
|
+
|
|
53
|
+
# Initialize sync service
|
|
54
|
+
sync_service = await get_sync_service()
|
|
55
|
+
|
|
56
|
+
# Initialize watch service
|
|
57
|
+
watch_service = WatchService(
|
|
58
|
+
sync_service=sync_service,
|
|
59
|
+
file_service=sync_service.entity_service.file_service,
|
|
60
|
+
config=app_config,
|
|
61
|
+
quiet=True,
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
# Create the background task for running sync
|
|
65
|
+
async def run_background_sync(): # pragma: no cover
|
|
66
|
+
# Run initial full sync
|
|
67
|
+
await sync_service.sync(app_config.home)
|
|
68
|
+
logger.info("Sync completed successfully")
|
|
69
|
+
|
|
70
|
+
# Start background sync task
|
|
71
|
+
logger.info(f"Starting watch service to sync file changes in dir: {app_config.home}")
|
|
72
|
+
|
|
73
|
+
# Start watching for changes
|
|
74
|
+
await watch_service.run()
|
|
75
|
+
|
|
76
|
+
watch_task = asyncio.create_task(run_background_sync())
|
|
77
|
+
logger.info("Watch service started")
|
|
78
|
+
return watch_task
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
async def initialize_app(
|
|
82
|
+
app_config: ProjectConfig,
|
|
83
|
+
) -> Optional[asyncio.Task]:
|
|
84
|
+
"""Initialize the Basic Memory application.
|
|
85
|
+
|
|
86
|
+
This function handles all initialization steps needed for both API and shor lived CLI commands.
|
|
87
|
+
For long running commands like mcp, a
|
|
88
|
+
- Running database migrations
|
|
89
|
+
- Setting up file synchronization
|
|
90
|
+
|
|
91
|
+
Args:
|
|
92
|
+
app_config: The Basic Memory project configuration
|
|
93
|
+
"""
|
|
94
|
+
# Initialize database first
|
|
95
|
+
await initialize_database(app_config)
|
|
96
|
+
|
|
97
|
+
basic_memory_config = config_manager.load_config()
|
|
98
|
+
logger.info(f"Sync changes enabled: {basic_memory_config.sync_changes}")
|
|
99
|
+
logger.info(
|
|
100
|
+
f"Update permalinks on move enabled: {basic_memory_config.update_permalinks_on_move}"
|
|
101
|
+
)
|
|
102
|
+
if not basic_memory_config.sync_changes: # pragma: no cover
|
|
103
|
+
logger.info("Sync changes disabled. Skipping watch service.")
|
|
104
|
+
return
|
|
105
|
+
|
|
106
|
+
# Initialize file sync services
|
|
107
|
+
return await initialize_file_sync(app_config)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def ensure_initialization(app_config: ProjectConfig) -> None:
|
|
111
|
+
"""Ensure initialization runs in a synchronous context.
|
|
112
|
+
|
|
113
|
+
This is a wrapper for the async initialize_app function that can be
|
|
114
|
+
called from synchronous code like CLI entry points.
|
|
115
|
+
|
|
116
|
+
Args:
|
|
117
|
+
app_config: The Basic Memory project configuration
|
|
118
|
+
"""
|
|
119
|
+
try:
|
|
120
|
+
asyncio.run(initialize_app(app_config))
|
|
121
|
+
except Exception as e:
|
|
122
|
+
logger.error(f"Error during initialization: {e}")
|
|
123
|
+
# Continue execution even if initialization fails
|
|
124
|
+
# The command might still work, or will fail with a
|
|
125
|
+
# more specific error message
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def ensure_initialize_database(app_config: ProjectConfig) -> None:
|
|
129
|
+
"""Ensure initialization runs in a synchronous context.
|
|
130
|
+
|
|
131
|
+
This is a wrapper for the async initialize_database function that can be
|
|
132
|
+
called from synchronous code like CLI entry points.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
app_config: The Basic Memory project configuration
|
|
136
|
+
"""
|
|
137
|
+
try:
|
|
138
|
+
asyncio.run(initialize_database(app_config))
|
|
139
|
+
except Exception as e:
|
|
140
|
+
logger.error(f"Error during initialization: {e}")
|
|
141
|
+
# Continue execution even if initialization fails
|
|
142
|
+
# The command might still work, or will fail with a
|
|
143
|
+
# more specific error message
|
|
@@ -1,9 +1,8 @@
|
|
|
1
1
|
"""Service for syncing files between filesystem and database."""
|
|
2
2
|
|
|
3
3
|
import os
|
|
4
|
-
|
|
5
|
-
from dataclasses import dataclass
|
|
6
|
-
from dataclasses import field
|
|
4
|
+
import time
|
|
5
|
+
from dataclasses import dataclass, field
|
|
7
6
|
from datetime import datetime
|
|
8
7
|
from pathlib import Path
|
|
9
8
|
from typing import Dict, Optional, Set, Tuple
|
|
@@ -18,8 +17,6 @@ from basic_memory.models import Entity
|
|
|
18
17
|
from basic_memory.repository import EntityRepository, RelationRepository
|
|
19
18
|
from basic_memory.services import EntityService, FileService
|
|
20
19
|
from basic_memory.services.search_service import SearchService
|
|
21
|
-
import time
|
|
22
|
-
from rich.progress import Progress, TextColumn, BarColumn, TaskProgressColumn
|
|
23
20
|
|
|
24
21
|
|
|
25
22
|
@dataclass
|
|
@@ -83,145 +80,51 @@ class SyncService:
|
|
|
83
80
|
self.search_service = search_service
|
|
84
81
|
self.file_service = file_service
|
|
85
82
|
|
|
86
|
-
async def sync(self, directory: Path
|
|
83
|
+
async def sync(self, directory: Path) -> SyncReport:
|
|
87
84
|
"""Sync all files with database."""
|
|
88
85
|
|
|
89
86
|
start_time = time.time()
|
|
90
|
-
|
|
91
|
-
progress = None # Will be initialized if show_progress is True
|
|
92
|
-
|
|
93
|
-
logger.info("Sync operation started", directory=str(directory))
|
|
87
|
+
logger.info(f"Sync operation started for directory: {directory}")
|
|
94
88
|
|
|
95
89
|
# initial paths from db to sync
|
|
96
90
|
# path -> checksum
|
|
97
|
-
if show_progress:
|
|
98
|
-
from rich.console import Console
|
|
99
|
-
|
|
100
|
-
console = Console()
|
|
101
|
-
console.print(f"Scanning directory: {directory}")
|
|
102
|
-
|
|
103
91
|
report = await self.scan(directory)
|
|
104
92
|
|
|
105
93
|
# Initialize progress tracking if requested
|
|
106
|
-
if show_progress and report.total > 0:
|
|
107
|
-
progress = Progress(
|
|
108
|
-
TextColumn("[bold blue]{task.description}"),
|
|
109
|
-
BarColumn(),
|
|
110
|
-
TaskProgressColumn(),
|
|
111
|
-
console=console,
|
|
112
|
-
expand=True,
|
|
113
|
-
)
|
|
114
|
-
|
|
115
94
|
# order of sync matters to resolve relations effectively
|
|
116
95
|
logger.info(
|
|
117
|
-
"Sync changes detected"
|
|
118
|
-
|
|
119
|
-
modified_files=len(report.modified),
|
|
120
|
-
deleted_files=len(report.deleted),
|
|
121
|
-
moved_files=len(report.moves),
|
|
96
|
+
f"Sync changes detected: new_files={len(report.new)}, modified_files={len(report.modified)}, "
|
|
97
|
+
+ f"deleted_files={len(report.deleted)}, moved_files={len(report.moves)}"
|
|
122
98
|
)
|
|
123
99
|
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
if report.deleted: # pragma: no cover
|
|
133
|
-
delete_task = progress.add_task( # pyright: ignore
|
|
134
|
-
"[red]Deleting files...", total=len(report.deleted)
|
|
135
|
-
)
|
|
136
|
-
|
|
137
|
-
new_task = None
|
|
138
|
-
if report.new:
|
|
139
|
-
new_task = progress.add_task( # pyright: ignore
|
|
140
|
-
"[green]Adding new files...", total=len(report.new)
|
|
141
|
-
)
|
|
142
|
-
|
|
143
|
-
modify_task = None
|
|
144
|
-
if report.modified: # pragma: no cover
|
|
145
|
-
modify_task = progress.add_task( # pyright: ignore
|
|
146
|
-
"[yellow]Updating modified files...", total=len(report.modified)
|
|
100
|
+
# sync moves first
|
|
101
|
+
for old_path, new_path in report.moves.items():
|
|
102
|
+
# in the case where a file has been deleted and replaced by another file
|
|
103
|
+
# it will show up in the move and modified lists, so handle it in modified
|
|
104
|
+
if new_path in report.modified:
|
|
105
|
+
report.modified.remove(new_path)
|
|
106
|
+
logger.debug(
|
|
107
|
+
f"File marked as moved and modified: old_path={old_path}, new_path={new_path}"
|
|
147
108
|
)
|
|
109
|
+
else:
|
|
110
|
+
await self.handle_move(old_path, new_path)
|
|
148
111
|
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
# it will show up in the move and modified lists, so handle it in modified
|
|
153
|
-
if new_path in report.modified: # pragma: no cover
|
|
154
|
-
report.modified.remove(new_path)
|
|
155
|
-
logger.debug(
|
|
156
|
-
"File marked as moved and modified",
|
|
157
|
-
old_path=old_path,
|
|
158
|
-
new_path=new_path,
|
|
159
|
-
action="processing as modified",
|
|
160
|
-
)
|
|
161
|
-
else: # pragma: no cover
|
|
162
|
-
await self.handle_move(old_path, new_path)
|
|
163
|
-
|
|
164
|
-
if move_task is not None: # pragma: no cover
|
|
165
|
-
progress.update(move_task, advance=1) # pyright: ignore
|
|
166
|
-
|
|
167
|
-
# deleted next
|
|
168
|
-
for i, path in enumerate(report.deleted): # pragma: no cover
|
|
169
|
-
await self.handle_delete(path)
|
|
170
|
-
if delete_task is not None: # pragma: no cover
|
|
171
|
-
progress.update(delete_task, advance=1) # pyright: ignore
|
|
172
|
-
|
|
173
|
-
# then new and modified
|
|
174
|
-
for i, path in enumerate(report.new):
|
|
175
|
-
await self.sync_file(path, new=True)
|
|
176
|
-
if new_task is not None:
|
|
177
|
-
progress.update(new_task, advance=1) # pyright: ignore
|
|
178
|
-
|
|
179
|
-
for i, path in enumerate(report.modified): # pragma: no cover
|
|
180
|
-
await self.sync_file(path, new=False)
|
|
181
|
-
if modify_task is not None: # pragma: no cover
|
|
182
|
-
progress.update(modify_task, advance=1) # pyright: ignore
|
|
183
|
-
|
|
184
|
-
# Final step - resolving relations
|
|
185
|
-
if report.total > 0:
|
|
186
|
-
relation_task = progress.add_task("[cyan]Resolving relations...", total=1) # pyright: ignore
|
|
187
|
-
await self.resolve_relations()
|
|
188
|
-
progress.update(relation_task, advance=1) # pyright: ignore
|
|
189
|
-
else:
|
|
190
|
-
# No progress display - proceed with normal sync
|
|
191
|
-
# sync moves first
|
|
192
|
-
for old_path, new_path in report.moves.items():
|
|
193
|
-
# in the case where a file has been deleted and replaced by another file
|
|
194
|
-
# it will show up in the move and modified lists, so handle it in modified
|
|
195
|
-
if new_path in report.modified:
|
|
196
|
-
report.modified.remove(new_path)
|
|
197
|
-
logger.debug(
|
|
198
|
-
"File marked as moved and modified",
|
|
199
|
-
old_path=old_path,
|
|
200
|
-
new_path=new_path,
|
|
201
|
-
action="processing as modified",
|
|
202
|
-
)
|
|
203
|
-
else:
|
|
204
|
-
await self.handle_move(old_path, new_path)
|
|
205
|
-
|
|
206
|
-
# deleted next
|
|
207
|
-
for path in report.deleted:
|
|
208
|
-
await self.handle_delete(path)
|
|
112
|
+
# deleted next
|
|
113
|
+
for path in report.deleted:
|
|
114
|
+
await self.handle_delete(path)
|
|
209
115
|
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
116
|
+
# then new and modified
|
|
117
|
+
for path in report.new:
|
|
118
|
+
await self.sync_file(path, new=True)
|
|
213
119
|
|
|
214
|
-
|
|
215
|
-
|
|
120
|
+
for path in report.modified:
|
|
121
|
+
await self.sync_file(path, new=False)
|
|
216
122
|
|
|
217
|
-
|
|
123
|
+
await self.resolve_relations()
|
|
218
124
|
|
|
219
125
|
duration_ms = int((time.time() - start_time) * 1000)
|
|
220
126
|
logger.info(
|
|
221
|
-
"Sync operation completed"
|
|
222
|
-
directory=str(directory),
|
|
223
|
-
total_changes=report.total,
|
|
224
|
-
duration_ms=duration_ms,
|
|
127
|
+
f"Sync operation completed: directory={directory}, total_changes={report.total}, duration_ms={duration_ms}"
|
|
225
128
|
)
|
|
226
129
|
|
|
227
130
|
return report
|
|
@@ -230,6 +133,7 @@ class SyncService:
|
|
|
230
133
|
"""Scan directory for changes compared to database state."""
|
|
231
134
|
|
|
232
135
|
db_paths = await self.get_db_file_state()
|
|
136
|
+
logger.debug(f"Found {len(db_paths)} db paths")
|
|
233
137
|
|
|
234
138
|
# Track potentially moved files by checksum
|
|
235
139
|
scan_result = await self.scan_directory(directory)
|
|
@@ -280,6 +184,7 @@ class SyncService:
|
|
|
280
184
|
:param db_records: the data from the db
|
|
281
185
|
"""
|
|
282
186
|
db_records = await self.entity_repository.find_all()
|
|
187
|
+
logger.info(f"Found {len(db_records)} db records")
|
|
283
188
|
return {r.file_path: r.checksum or "" for r in db_records}
|
|
284
189
|
|
|
285
190
|
async def sync_file(
|
|
@@ -296,10 +201,7 @@ class SyncService:
|
|
|
296
201
|
"""
|
|
297
202
|
try:
|
|
298
203
|
logger.debug(
|
|
299
|
-
"Syncing file"
|
|
300
|
-
path=path,
|
|
301
|
-
is_new=new,
|
|
302
|
-
is_markdown=self.file_service.is_markdown(path),
|
|
204
|
+
f"Syncing file path={path} is_new={new} is_markdown={self.file_service.is_markdown(path)}"
|
|
303
205
|
)
|
|
304
206
|
|
|
305
207
|
if self.file_service.is_markdown(path):
|
|
@@ -311,7 +213,7 @@ class SyncService:
|
|
|
311
213
|
await self.search_service.index_entity(entity)
|
|
312
214
|
|
|
313
215
|
logger.debug(
|
|
314
|
-
"File sync completed
|
|
216
|
+
f"File sync completed, path={path}, entity_id={entity.id}, checksum={checksum[:8]}"
|
|
315
217
|
)
|
|
316
218
|
return entity, checksum
|
|
317
219
|
|
|
@@ -330,10 +232,10 @@ class SyncService:
|
|
|
330
232
|
Tuple of (entity, checksum)
|
|
331
233
|
"""
|
|
332
234
|
# Parse markdown first to get any existing permalink
|
|
333
|
-
logger.debug("Parsing markdown file
|
|
235
|
+
logger.debug(f"Parsing markdown file, path: {path}, new: {new}")
|
|
334
236
|
|
|
335
237
|
file_path = self.entity_parser.base_path / path
|
|
336
|
-
file_content = file_path.read_text()
|
|
238
|
+
file_content = file_path.read_text(encoding="utf-8")
|
|
337
239
|
file_contains_frontmatter = has_frontmatter(file_content)
|
|
338
240
|
|
|
339
241
|
# entity markdown will always contain front matter, so it can be used up create/update the entity
|
|
@@ -347,10 +249,7 @@ class SyncService:
|
|
|
347
249
|
# If permalink changed, update the file
|
|
348
250
|
if permalink != entity_markdown.frontmatter.permalink:
|
|
349
251
|
logger.info(
|
|
350
|
-
"Updating permalink"
|
|
351
|
-
path=path,
|
|
352
|
-
old_permalink=entity_markdown.frontmatter.permalink,
|
|
353
|
-
new_permalink=permalink,
|
|
252
|
+
f"Updating permalink for path: {path}, old_permalink: {entity_markdown.frontmatter.permalink}, new_permalink: {permalink}"
|
|
354
253
|
)
|
|
355
254
|
|
|
356
255
|
entity_markdown.frontmatter.metadata["permalink"] = permalink
|
|
@@ -359,12 +258,12 @@ class SyncService:
|
|
|
359
258
|
# if the file is new, create an entity
|
|
360
259
|
if new:
|
|
361
260
|
# Create entity with final permalink
|
|
362
|
-
logger.debug("Creating new entity from markdown
|
|
261
|
+
logger.debug(f"Creating new entity from markdown, path={path}")
|
|
363
262
|
await self.entity_service.create_entity_from_markdown(Path(path), entity_markdown)
|
|
364
263
|
|
|
365
264
|
# otherwise we need to update the entity and observations
|
|
366
265
|
else:
|
|
367
|
-
logger.debug("Updating entity from markdown
|
|
266
|
+
logger.debug(f"Updating entity from markdown, path={path}")
|
|
368
267
|
await self.entity_service.update_entity_and_observations(Path(path), entity_markdown)
|
|
369
268
|
|
|
370
269
|
# Update relations and search index
|
|
@@ -379,12 +278,9 @@ class SyncService:
|
|
|
379
278
|
await self.entity_repository.update(entity.id, {"checksum": final_checksum})
|
|
380
279
|
|
|
381
280
|
logger.debug(
|
|
382
|
-
"Markdown sync completed"
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
observation_count=len(entity.observations),
|
|
386
|
-
relation_count=len(entity.relations),
|
|
387
|
-
checksum=final_checksum,
|
|
281
|
+
f"Markdown sync completed: path={path}, entity_id={entity.id}, "
|
|
282
|
+
f"observation_count={len(entity.observations)}, relation_count={len(entity.relations)}, "
|
|
283
|
+
f"checksum={final_checksum[:8]}"
|
|
388
284
|
)
|
|
389
285
|
|
|
390
286
|
# Return the final checksum to ensure everything is consistent
|
|
@@ -429,7 +325,7 @@ class SyncService:
|
|
|
429
325
|
else:
|
|
430
326
|
entity = await self.entity_repository.get_by_file_path(path)
|
|
431
327
|
if entity is None: # pragma: no cover
|
|
432
|
-
logger.error("Entity not found for existing file
|
|
328
|
+
logger.error(f"Entity not found for existing file, path={path}")
|
|
433
329
|
raise ValueError(f"Entity not found for existing file: {path}")
|
|
434
330
|
|
|
435
331
|
updated = await self.entity_repository.update(
|
|
@@ -437,7 +333,7 @@ class SyncService:
|
|
|
437
333
|
)
|
|
438
334
|
|
|
439
335
|
if updated is None: # pragma: no cover
|
|
440
|
-
logger.error("Failed to update entity
|
|
336
|
+
logger.error(f"Failed to update entity, entity_id={entity.id}, path={path}")
|
|
441
337
|
raise ValueError(f"Failed to update entity with ID {entity.id}")
|
|
442
338
|
|
|
443
339
|
return updated, checksum
|
|
@@ -449,10 +345,7 @@ class SyncService:
|
|
|
449
345
|
entity = await self.entity_repository.get_by_file_path(file_path)
|
|
450
346
|
if entity:
|
|
451
347
|
logger.info(
|
|
452
|
-
"Deleting entity"
|
|
453
|
-
file_path=file_path,
|
|
454
|
-
entity_id=entity.id,
|
|
455
|
-
permalink=entity.permalink,
|
|
348
|
+
f"Deleting entity with file_path={file_path}, entity_id={entity.id}, permalink={entity.permalink}"
|
|
456
349
|
)
|
|
457
350
|
|
|
458
351
|
# Delete from db (this cascades to observations/relations)
|
|
@@ -466,10 +359,8 @@ class SyncService:
|
|
|
466
359
|
)
|
|
467
360
|
|
|
468
361
|
logger.debug(
|
|
469
|
-
"Cleaning up search index"
|
|
470
|
-
|
|
471
|
-
file_path=file_path,
|
|
472
|
-
index_entries=len(permalinks),
|
|
362
|
+
f"Cleaning up search index for entity_id={entity.id}, file_path={file_path}, "
|
|
363
|
+
f"index_entries={len(permalinks)}"
|
|
473
364
|
)
|
|
474
365
|
|
|
475
366
|
for permalink in permalinks:
|
|
@@ -479,7 +370,7 @@ class SyncService:
|
|
|
479
370
|
await self.search_service.delete_by_entity_id(entity.id)
|
|
480
371
|
|
|
481
372
|
async def handle_move(self, old_path, new_path):
|
|
482
|
-
logger.
|
|
373
|
+
logger.debug("Moving entity", old_path=old_path, new_path=new_path)
|
|
483
374
|
|
|
484
375
|
entity = await self.entity_repository.get_by_file_path(old_path)
|
|
485
376
|
if entity:
|
|
@@ -500,29 +391,28 @@ class SyncService:
|
|
|
500
391
|
updates["checksum"] = new_checksum
|
|
501
392
|
|
|
502
393
|
logger.info(
|
|
503
|
-
"Updating permalink on move"
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
new_checksum=new_checksum,
|
|
394
|
+
f"Updating permalink on move,old_permalink={entity.permalink}"
|
|
395
|
+
f"new_permalink={new_permalink}"
|
|
396
|
+
f"new_checksum={new_checksum}"
|
|
507
397
|
)
|
|
508
398
|
|
|
509
399
|
updated = await self.entity_repository.update(entity.id, updates)
|
|
510
400
|
|
|
511
401
|
if updated is None: # pragma: no cover
|
|
512
402
|
logger.error(
|
|
513
|
-
"Failed to update entity path"
|
|
514
|
-
entity_id=entity.id
|
|
515
|
-
old_path=old_path
|
|
516
|
-
new_path=new_path
|
|
403
|
+
"Failed to update entity path"
|
|
404
|
+
f"entity_id={entity.id}"
|
|
405
|
+
f"old_path={old_path}"
|
|
406
|
+
f"new_path={new_path}"
|
|
517
407
|
)
|
|
518
408
|
raise ValueError(f"Failed to update entity path for ID {entity.id}")
|
|
519
409
|
|
|
520
410
|
logger.debug(
|
|
521
|
-
"Entity path updated"
|
|
522
|
-
entity_id=entity.id
|
|
523
|
-
permalink=entity.permalink
|
|
524
|
-
old_path=old_path
|
|
525
|
-
new_path=new_path
|
|
411
|
+
"Entity path updated"
|
|
412
|
+
f"entity_id={entity.id} "
|
|
413
|
+
f"permalink={entity.permalink} "
|
|
414
|
+
f"old_path={old_path} "
|
|
415
|
+
f"new_path={new_path} "
|
|
526
416
|
)
|
|
527
417
|
|
|
528
418
|
# update search index
|
|
@@ -537,10 +427,10 @@ class SyncService:
|
|
|
537
427
|
|
|
538
428
|
for relation in unresolved_relations:
|
|
539
429
|
logger.debug(
|
|
540
|
-
"Attempting to resolve relation"
|
|
541
|
-
relation_id=relation.id
|
|
542
|
-
from_id=relation.from_id
|
|
543
|
-
to_name=relation.to_name
|
|
430
|
+
"Attempting to resolve relation "
|
|
431
|
+
f"relation_id={relation.id} "
|
|
432
|
+
f"from_id={relation.from_id} "
|
|
433
|
+
f"to_name={relation.to_name}"
|
|
544
434
|
)
|
|
545
435
|
|
|
546
436
|
resolved_entity = await self.entity_service.link_resolver.resolve_link(relation.to_name)
|
|
@@ -548,12 +438,12 @@ class SyncService:
|
|
|
548
438
|
# ignore reference to self
|
|
549
439
|
if resolved_entity and resolved_entity.id != relation.from_id:
|
|
550
440
|
logger.debug(
|
|
551
|
-
"Resolved forward reference"
|
|
552
|
-
relation_id=relation.id
|
|
553
|
-
from_id=relation.from_id
|
|
554
|
-
to_name=relation.to_name
|
|
555
|
-
resolved_id=resolved_entity.id
|
|
556
|
-
resolved_title=resolved_entity.title,
|
|
441
|
+
"Resolved forward reference "
|
|
442
|
+
f"relation_id={relation.id} "
|
|
443
|
+
f"from_id={relation.from_id} "
|
|
444
|
+
f"to_name={relation.to_name} "
|
|
445
|
+
f"resolved_id={resolved_entity.id} "
|
|
446
|
+
f"resolved_title={resolved_entity.title}",
|
|
557
447
|
)
|
|
558
448
|
try:
|
|
559
449
|
await self.relation_repository.update(
|
|
@@ -565,10 +455,10 @@ class SyncService:
|
|
|
565
455
|
)
|
|
566
456
|
except IntegrityError: # pragma: no cover
|
|
567
457
|
logger.debug(
|
|
568
|
-
"Ignoring duplicate relation"
|
|
569
|
-
relation_id=relation.id
|
|
570
|
-
from_id=relation.from_id
|
|
571
|
-
to_name=relation.to_name
|
|
458
|
+
"Ignoring duplicate relation "
|
|
459
|
+
f"relation_id={relation.id} "
|
|
460
|
+
f"from_id={relation.from_id} "
|
|
461
|
+
f"to_name={relation.to_name}"
|
|
572
462
|
)
|
|
573
463
|
|
|
574
464
|
# update search index
|
|
@@ -586,7 +476,7 @@ class SyncService:
|
|
|
586
476
|
"""
|
|
587
477
|
start_time = time.time()
|
|
588
478
|
|
|
589
|
-
logger.debug("Scanning directory
|
|
479
|
+
logger.debug(f"Scanning directory {directory}")
|
|
590
480
|
result = ScanResult()
|
|
591
481
|
|
|
592
482
|
for root, dirnames, filenames in os.walk(str(directory)):
|
|
@@ -608,10 +498,10 @@ class SyncService:
|
|
|
608
498
|
|
|
609
499
|
duration_ms = int((time.time() - start_time) * 1000)
|
|
610
500
|
logger.debug(
|
|
611
|
-
"
|
|
612
|
-
directory=str(directory)
|
|
613
|
-
files_found=len(result.files)
|
|
614
|
-
duration_ms=duration_ms
|
|
501
|
+
f"{directory} scan completed "
|
|
502
|
+
f"directory={str(directory)} "
|
|
503
|
+
f"files_found={len(result.files)} "
|
|
504
|
+
f"duration_ms={duration_ms}"
|
|
615
505
|
)
|
|
616
506
|
|
|
617
507
|
return result
|
|
@@ -70,22 +70,30 @@ class WatchServiceState(BaseModel):
|
|
|
70
70
|
|
|
71
71
|
|
|
72
72
|
class WatchService:
|
|
73
|
-
def __init__(
|
|
73
|
+
def __init__(
|
|
74
|
+
self,
|
|
75
|
+
sync_service: SyncService,
|
|
76
|
+
file_service: FileService,
|
|
77
|
+
config: ProjectConfig,
|
|
78
|
+
quiet: bool = False,
|
|
79
|
+
):
|
|
74
80
|
self.sync_service = sync_service
|
|
75
81
|
self.file_service = file_service
|
|
76
82
|
self.config = config
|
|
77
83
|
self.state = WatchServiceState()
|
|
78
84
|
self.status_path = config.home / ".basic-memory" / WATCH_STATUS_JSON
|
|
79
85
|
self.status_path.parent.mkdir(parents=True, exist_ok=True)
|
|
80
|
-
|
|
86
|
+
|
|
87
|
+
# quiet mode for mcp so it doesn't mess up stdout
|
|
88
|
+
self.console = Console(quiet=quiet)
|
|
81
89
|
|
|
82
90
|
async def run(self): # pragma: no cover
|
|
83
91
|
"""Watch for file changes and sync them"""
|
|
84
92
|
logger.info(
|
|
85
93
|
"Watch service started",
|
|
86
|
-
directory=str(self.config.home),
|
|
87
|
-
debounce_ms=self.config.sync_delay,
|
|
88
|
-
pid=os.getpid(),
|
|
94
|
+
f"directory={str(self.config.home)}",
|
|
95
|
+
f"debounce_ms={self.config.sync_delay}",
|
|
96
|
+
f"pid={os.getpid()}",
|
|
89
97
|
)
|
|
90
98
|
|
|
91
99
|
self.state.running = True
|
|
@@ -111,8 +119,8 @@ class WatchService:
|
|
|
111
119
|
finally:
|
|
112
120
|
logger.info(
|
|
113
121
|
"Watch service stopped",
|
|
114
|
-
directory=str(self.config.home),
|
|
115
|
-
runtime_seconds=int((datetime.now() - self.state.start_time).total_seconds()),
|
|
122
|
+
f"directory={str(self.config.home)}",
|
|
123
|
+
f"runtime_seconds={int((datetime.now() - self.state.start_time).total_seconds())}",
|
|
116
124
|
)
|
|
117
125
|
|
|
118
126
|
self.state.running = False
|
|
@@ -154,7 +162,7 @@ class WatchService:
|
|
|
154
162
|
|
|
155
163
|
start_time = time.time()
|
|
156
164
|
|
|
157
|
-
logger.info("Processing file changes
|
|
165
|
+
logger.info(f"Processing file changes, change_count={len(changes)}, directory={directory}")
|
|
158
166
|
|
|
159
167
|
# Group changes by type
|
|
160
168
|
adds: List[str] = []
|
|
@@ -177,9 +185,17 @@ class WatchService:
|
|
|
177
185
|
modifies.append(relative_path)
|
|
178
186
|
|
|
179
187
|
logger.debug(
|
|
180
|
-
"Grouped file changes
|
|
188
|
+
f"Grouped file changes, added={len(adds)}, deleted={len(deletes)}, modified={len(modifies)}"
|
|
181
189
|
)
|
|
182
190
|
|
|
191
|
+
# because of our atomic writes on updates, an add may be an existing file
|
|
192
|
+
for added_path in adds: # pragma: no cover TODO add test
|
|
193
|
+
entity = await self.sync_service.entity_repository.get_by_file_path(added_path)
|
|
194
|
+
if entity is not None:
|
|
195
|
+
logger.debug(f"Existing file will be processed as modified, path={added_path}")
|
|
196
|
+
adds.remove(added_path)
|
|
197
|
+
modifies.append(added_path)
|
|
198
|
+
|
|
183
199
|
# Track processed files to avoid duplicates
|
|
184
200
|
processed: Set[str] = set()
|
|
185
201
|
|
|
@@ -223,15 +239,16 @@ class WatchService:
|
|
|
223
239
|
status="success",
|
|
224
240
|
)
|
|
225
241
|
self.console.print(f"[blue]→[/blue] {deleted_path} → {added_path}")
|
|
242
|
+
logger.info(f"move: {deleted_path} -> {added_path}")
|
|
226
243
|
processed.add(added_path)
|
|
227
244
|
processed.add(deleted_path)
|
|
228
245
|
break
|
|
229
246
|
except Exception as e: # pragma: no cover
|
|
230
247
|
logger.warning(
|
|
231
248
|
"Error checking for move",
|
|
232
|
-
old_path=deleted_path,
|
|
233
|
-
new_path=added_path,
|
|
234
|
-
error=str(e),
|
|
249
|
+
f"old_path={deleted_path}",
|
|
250
|
+
f"new_path={added_path}",
|
|
251
|
+
f"error={str(e)}",
|
|
235
252
|
)
|
|
236
253
|
|
|
237
254
|
# Handle remaining changes - group them by type for concise output
|
|
@@ -247,6 +264,7 @@ class WatchService:
|
|
|
247
264
|
await self.sync_service.handle_delete(path)
|
|
248
265
|
self.state.add_event(path=path, action="deleted", status="success")
|
|
249
266
|
self.console.print(f"[red]✕[/red] {path}")
|
|
267
|
+
logger.info(f"deleted: {path}")
|
|
250
268
|
processed.add(path)
|
|
251
269
|
delete_count += 1
|
|
252
270
|
|
|
@@ -257,28 +275,27 @@ class WatchService:
|
|
|
257
275
|
full_path = directory / path
|
|
258
276
|
if not full_path.exists() or full_path.is_dir():
|
|
259
277
|
logger.debug(
|
|
260
|
-
"Skipping non-existent or directory path
|
|
278
|
+
f"Skipping non-existent or directory path, path={path}"
|
|
261
279
|
) # pragma: no cover
|
|
262
280
|
processed.add(path) # pragma: no cover
|
|
263
281
|
continue # pragma: no cover
|
|
264
282
|
|
|
265
|
-
logger.debug("Processing new file
|
|
283
|
+
logger.debug(f"Processing new file, path={path}")
|
|
266
284
|
entity, checksum = await self.sync_service.sync_file(path, new=True)
|
|
267
285
|
if checksum:
|
|
268
286
|
self.state.add_event(
|
|
269
287
|
path=path, action="new", status="success", checksum=checksum
|
|
270
288
|
)
|
|
271
289
|
self.console.print(f"[green]✓[/green] {path}")
|
|
272
|
-
logger.
|
|
273
|
-
"
|
|
274
|
-
path=path,
|
|
275
|
-
|
|
276
|
-
checksum=checksum,
|
|
290
|
+
logger.info(
|
|
291
|
+
"new file processed",
|
|
292
|
+
f"path={path}",
|
|
293
|
+
f"checksum={checksum}",
|
|
277
294
|
)
|
|
278
295
|
processed.add(path)
|
|
279
296
|
add_count += 1
|
|
280
297
|
else: # pragma: no cover
|
|
281
|
-
logger.warning("Error syncing new file
|
|
298
|
+
logger.warning(f"Error syncing new file, path={path}") # pragma: no cover
|
|
282
299
|
self.console.print(
|
|
283
300
|
f"[orange]?[/orange] Error syncing: {path}"
|
|
284
301
|
) # pragma: no cover
|
|
@@ -296,7 +313,7 @@ class WatchService:
|
|
|
296
313
|
processed.add(path)
|
|
297
314
|
continue
|
|
298
315
|
|
|
299
|
-
logger.debug("Processing modified file
|
|
316
|
+
logger.debug(f"Processing modified file: path={path}")
|
|
300
317
|
entity, checksum = await self.sync_service.sync_file(path, new=False)
|
|
301
318
|
self.state.add_event(
|
|
302
319
|
path=path, action="modified", status="success", checksum=checksum
|
|
@@ -311,17 +328,18 @@ class WatchService:
|
|
|
311
328
|
f"[yellow]...[/yellow] Repeated changes to {path}"
|
|
312
329
|
) # pragma: no cover
|
|
313
330
|
else:
|
|
314
|
-
#
|
|
331
|
+
# haven't processed this file
|
|
315
332
|
self.console.print(f"[yellow]✎[/yellow] {path}")
|
|
333
|
+
logger.info(f"modified: {path}")
|
|
316
334
|
last_modified_path = path
|
|
317
335
|
repeat_count = 0
|
|
318
336
|
modify_count += 1
|
|
319
337
|
|
|
320
338
|
logger.debug(
|
|
321
|
-
"Modified file processed"
|
|
322
|
-
path=path
|
|
323
|
-
entity_id=entity.id if entity else None
|
|
324
|
-
checksum=checksum,
|
|
339
|
+
"Modified file processed, "
|
|
340
|
+
f"path={path} "
|
|
341
|
+
f"entity_id={entity.id if entity else None} "
|
|
342
|
+
f"checksum={checksum}",
|
|
325
343
|
)
|
|
326
344
|
processed.add(path)
|
|
327
345
|
|
|
@@ -339,16 +357,17 @@ class WatchService:
|
|
|
339
357
|
|
|
340
358
|
if changes:
|
|
341
359
|
self.console.print(f"{', '.join(changes)}", style="dim") # pyright: ignore
|
|
360
|
+
logger.info(f"changes: {len(changes)}")
|
|
342
361
|
|
|
343
362
|
duration_ms = int((time.time() - start_time) * 1000)
|
|
344
363
|
self.state.last_scan = datetime.now()
|
|
345
364
|
self.state.synced_files += len(processed)
|
|
346
365
|
|
|
347
366
|
logger.info(
|
|
348
|
-
"File change processing completed"
|
|
349
|
-
processed_files=len(processed),
|
|
350
|
-
total_synced_files=self.state.synced_files,
|
|
351
|
-
duration_ms=duration_ms
|
|
367
|
+
"File change processing completed, "
|
|
368
|
+
f"processed_files={len(processed)}, "
|
|
369
|
+
f"total_synced_files={self.state.synced_files}, "
|
|
370
|
+
f"duration_ms={duration_ms}"
|
|
352
371
|
)
|
|
353
372
|
|
|
354
373
|
await self.write_status()
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: basic-memory
|
|
3
|
-
Version: 0.12.
|
|
3
|
+
Version: 0.12.2
|
|
4
4
|
Summary: Local-first knowledge management combining Zettelkasten with knowledge graphs
|
|
5
5
|
Project-URL: Homepage, https://github.com/basicmachines-co/basic-memory
|
|
6
6
|
Project-URL: Repository, https://github.com/basicmachines-co/basic-memory
|
|
@@ -187,7 +187,7 @@ The note embeds semantic content and links to other topics via simple Markdown f
|
|
|
187
187
|
|
|
188
188
|
3. You see this file on your computer in real time in the current project directory (default `~/$HOME/basic-memory`).
|
|
189
189
|
|
|
190
|
-
- Realtime sync
|
|
190
|
+
- Realtime sync is enabled by default with the v0.12.0 version
|
|
191
191
|
|
|
192
192
|
4. In a chat with the LLM, you can reference a topic:
|
|
193
193
|
|
|
@@ -378,13 +378,7 @@ config:
|
|
|
378
378
|
|
|
379
379
|
2. Sync your knowledge:
|
|
380
380
|
|
|
381
|
-
|
|
382
|
-
# One-time sync of local knowledge updates
|
|
383
|
-
basic-memory sync
|
|
384
|
-
|
|
385
|
-
# Run realtime sync process (recommended)
|
|
386
|
-
basic-memory sync --watch
|
|
387
|
-
```
|
|
381
|
+
Basic Memory will sync the files in your project in real time if you make manual edits.
|
|
388
382
|
|
|
389
383
|
3. In Claude Desktop, the LLM can now use these tools:
|
|
390
384
|
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
basic_memory/__init__.py,sha256=
|
|
2
|
-
basic_memory/config.py,sha256=
|
|
1
|
+
basic_memory/__init__.py,sha256=9r3qVuD_n9X5pYE_X1n8qlu86LtJPBHz5Z4U15_KkCM,123
|
|
2
|
+
basic_memory/config.py,sha256=jZmBOj4Gl2l56pApiN88s6juPDaX1g2LcvuVUnUeG0Q,9203
|
|
3
3
|
basic_memory/db.py,sha256=8SmrmNAlJlmYT9yIJiPwNq8SN8mB2rbW5t33Rqpyl2I,6052
|
|
4
4
|
basic_memory/deps.py,sha256=yI6RL_5-8LXw7ywSJ_84BXAczDtv2h9GFLw-E9XDJFg,5770
|
|
5
5
|
basic_memory/file_utils.py,sha256=eaxTKLLEbTIy_Mb_Iv_Dmt4IXAJSrZGVi-Knrpyci3E,6700
|
|
@@ -13,7 +13,7 @@ basic_memory/alembic/versions/502b60eaa905_remove_required_from_entity_permalink
|
|
|
13
13
|
basic_memory/alembic/versions/b3c3938bacdb_relation_to_name_unique_index.py,sha256=RsGymQzfRXV1LSNKiyi0lMilTxW1NgwS9jR67ye2apI,1428
|
|
14
14
|
basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py,sha256=Lbo3dEzdId_vKRFe3jMkGFF3dNQpblPIQa4Bh7np-zA,4020
|
|
15
15
|
basic_memory/api/__init__.py,sha256=wCpj-21j1D0KzKl9Ql6unLBVFY0K1uGp_FeSZRKtqpk,72
|
|
16
|
-
basic_memory/api/app.py,sha256=
|
|
16
|
+
basic_memory/api/app.py,sha256=GFFX3MOusEzbDaAVDECk3B46xybuinUfMt4Atw0Nr8c,1724
|
|
17
17
|
basic_memory/api/routers/__init__.py,sha256=SKuL-weA58hYj0NOMCQRfmsaumlNjjyVHDXNpRO38bQ,305
|
|
18
18
|
basic_memory/api/routers/knowledge_router.py,sha256=iYuBguMb6ERitAwoelSejBYJqLTGfjpkzAHrqwTKjVE,5876
|
|
19
19
|
basic_memory/api/routers/memory_router.py,sha256=W_uHJe2c4XN96mFj6XNvUH6INVbl1BMxy0KUchLcbxU,5421
|
|
@@ -21,21 +21,21 @@ basic_memory/api/routers/project_info_router.py,sha256=Qv12_QL3SRpo7bPcpAjizJmkZ
|
|
|
21
21
|
basic_memory/api/routers/resource_router.py,sha256=WEJEqEaY_yTKj5-U-rW4kXQKUcJflykgwI6_g_R41ck,8058
|
|
22
22
|
basic_memory/api/routers/search_router.py,sha256=R_a5OF5_8rCjmoOMhmw3M4VLCy6I1KLGJ-otSLB0rbI,1953
|
|
23
23
|
basic_memory/cli/__init__.py,sha256=arcKLAWRDhPD7x5t80MlviZeYzwHZ0GZigyy3NKVoGk,33
|
|
24
|
-
basic_memory/cli/app.py,sha256=
|
|
25
|
-
basic_memory/cli/main.py,sha256=
|
|
24
|
+
basic_memory/cli/app.py,sha256=hFRYznTSI58t6FEDUwF_PUgKZF0V63sCHzBDDb5FOAk,2142
|
|
25
|
+
basic_memory/cli/main.py,sha256=WhYOCKliF48DLDOukOg3QPiD16IP3AJfhdCIe7Wlh2g,666
|
|
26
26
|
basic_memory/cli/commands/__init__.py,sha256=3oojcC-Y-4RPqff9vtwWziT_T4uvBVicL0pSHNilVkU,393
|
|
27
27
|
basic_memory/cli/commands/db.py,sha256=-jgVH2fs_s1vvBNJx_FWspQVHv0F6Qd7V5ZPxtYn_jQ,1125
|
|
28
28
|
basic_memory/cli/commands/import_chatgpt.py,sha256=M4_oUN9o_BaW5jpKQu2pTEybivB5ccVolhdZzmhLOsI,8162
|
|
29
29
|
basic_memory/cli/commands/import_claude_conversations.py,sha256=D_4-0xFKkZka7xFvvW8OkgjLv3TFqsC_VuB2Z-Y3avU,6827
|
|
30
30
|
basic_memory/cli/commands/import_claude_projects.py,sha256=KzUuf3wrlvJlqTWCzoLRrNxD3OYNteRXaTFj5IB1FA8,6649
|
|
31
|
-
basic_memory/cli/commands/import_memory_json.py,sha256=
|
|
32
|
-
basic_memory/cli/commands/mcp.py,sha256=
|
|
31
|
+
basic_memory/cli/commands/import_memory_json.py,sha256=qA7at-JbpwjGIJ27hbhIOQU6HnhQn4PGK0OxxC0rV1I,5212
|
|
32
|
+
basic_memory/cli/commands/mcp.py,sha256=sWwRLRbY_FYUNxoy1a8risypnjS9YvZbnP3IjijiUZ0,1025
|
|
33
33
|
basic_memory/cli/commands/project.py,sha256=BSjdz07xDM3R4CUXggv1qhrWLJsEgvGFir6aOUzdr2Q,11330
|
|
34
34
|
basic_memory/cli/commands/status.py,sha256=nbs3myxaNtehEEJ4BBngPuKs-vqZTHNCCb0bTgDsE-s,5277
|
|
35
|
-
basic_memory/cli/commands/sync.py,sha256=
|
|
35
|
+
basic_memory/cli/commands/sync.py,sha256=3jwgabxkF4WyFZ-gRC1l8A8p8Z_aYrzHRXOtUfYy2yc,8324
|
|
36
36
|
basic_memory/cli/commands/tool.py,sha256=7wte1TqjG__NcC7BB0BRLl8THB3t5eAngud0zVHBQ8k,9506
|
|
37
37
|
basic_memory/markdown/__init__.py,sha256=DdzioCWtDnKaq05BHYLgL_78FawEHLpLXnp-kPSVfIc,501
|
|
38
|
-
basic_memory/markdown/entity_parser.py,sha256=
|
|
38
|
+
basic_memory/markdown/entity_parser.py,sha256=vf0U2ABdnI4PS2rv7dlm-6WfSzdJlMEar55M79JSZJ0,4436
|
|
39
39
|
basic_memory/markdown/markdown_processor.py,sha256=S5ny69zu2dlqO7tWJoLrpLSzg8emQIDq7Du7olpJUsk,4968
|
|
40
40
|
basic_memory/markdown/plugins.py,sha256=gtIzKRjoZsyvBqLpVNnrmzl_cbTZ5ZGn8kcuXxQjRko,6639
|
|
41
41
|
basic_memory/markdown/schemas.py,sha256=eyxYCr1hVyWmImcle0asE5It_DD6ARkqaBZYu1KK5n4,1896
|
|
@@ -43,7 +43,7 @@ basic_memory/markdown/utils.py,sha256=wr7KnDMThgnztkOoqSG_ANPhwNBoPkyjYP1sA1Wnxe
|
|
|
43
43
|
basic_memory/mcp/__init__.py,sha256=dsDOhKqjYeIbCULbHIxfcItTbqudEuEg1Np86eq0GEQ,35
|
|
44
44
|
basic_memory/mcp/async_client.py,sha256=Eo345wANiBRSM4u3j_Vd6Ax4YtMg7qbWd9PIoFfj61I,236
|
|
45
45
|
basic_memory/mcp/main.py,sha256=0kbcyf1PxRC1bLnHv2zzParfJ6cOq7Am9ScF9UoI50U,703
|
|
46
|
-
basic_memory/mcp/server.py,sha256=
|
|
46
|
+
basic_memory/mcp/server.py,sha256=RgNIyRUsuBgoCntj_5Dn2_QNTBYQ1tjFSEn-Z1PoFzU,1099
|
|
47
47
|
basic_memory/mcp/prompts/__init__.py,sha256=-Bl9Dgj2TD9PULjzggPqXuvPEjWCRy7S9Yg03h2-U7A,615
|
|
48
48
|
basic_memory/mcp/prompts/ai_assistant_guide.py,sha256=8TI5xObiRVcwv6w9by1xQHlX0whvyE7-LGsiqDMRTFg,821
|
|
49
49
|
basic_memory/mcp/prompts/continue_conversation.py,sha256=I1FdNXIsSBKsu2ABj8TRRr-mdZKZ1K8LMCUfAik5Iqs,4424
|
|
@@ -71,7 +71,7 @@ basic_memory/repository/entity_repository.py,sha256=VLKlQ97-_HhSqc-st_YToWUNE4pJ
|
|
|
71
71
|
basic_memory/repository/observation_repository.py,sha256=BOcy4wARqCXu-thYyt7mPxt2A2C8TW0le3s_X9wrK6I,1701
|
|
72
72
|
basic_memory/repository/project_info_repository.py,sha256=nHWzs0WBQ366WfzIYZgnAfU6tyQ_8slEszWNlDSeIlo,336
|
|
73
73
|
basic_memory/repository/relation_repository.py,sha256=DwpTcn9z_1sZQcyMOUABz1k1VSwo_AU63x2zR7aerTk,2933
|
|
74
|
-
basic_memory/repository/repository.py,sha256=
|
|
74
|
+
basic_memory/repository/repository.py,sha256=X03U6FA3tpQ8FoULL7J7GByUeArSc2Ajb5GIJjZ8HBA,11934
|
|
75
75
|
basic_memory/repository/search_repository.py,sha256=z6oX6wCLo2JaW2hawtgiyxmTsboKTjuO7cgXsFsQhmA,11607
|
|
76
76
|
basic_memory/schemas/__init__.py,sha256=KHzF2lZhYXRsH2g6tV5Oivlk1EHFfrlbKuiRllqnBzs,1570
|
|
77
77
|
basic_memory/schemas/base.py,sha256=dwnaI5fJXsdp81mdH0ZpmJ-WICY-0M7ZPWeW5OUgBG8,5685
|
|
@@ -85,15 +85,16 @@ basic_memory/services/__init__.py,sha256=oop6SKmzV4_NAYt9otGnupLGVCCKIVgxEcdRQWw
|
|
|
85
85
|
basic_memory/services/context_service.py,sha256=vUo4j-_UDDlpL_PxoNTqmG5kZ2m7UfPyIz6FY3SCU7o,9715
|
|
86
86
|
basic_memory/services/entity_service.py,sha256=lCdqRnAkaolt_pr48lFxRXAj2YRS-nasJTkepBf3zlU,12915
|
|
87
87
|
basic_memory/services/exceptions.py,sha256=VGlCLd4UD2w5NWKqC7QpG4jOM_hA7jKRRM-MqvEVMNk,288
|
|
88
|
-
basic_memory/services/file_service.py,sha256=
|
|
88
|
+
basic_memory/services/file_service.py,sha256=egoJnhoHBUX_wepjkLyyc6qZkPfOexlj8p0HRvtdxWw,9940
|
|
89
|
+
basic_memory/services/initialization.py,sha256=T8KPFpFzeURORPjvfhvHE7Vnmx_TXUUGumBCEEiCJaM,4787
|
|
89
90
|
basic_memory/services/link_resolver.py,sha256=3I3wp5HHpe17DNHhn1IG3_yWWHYtEZKRNL7I2j_AHos,3599
|
|
90
91
|
basic_memory/services/search_service.py,sha256=1K1YuWFVreKjn6LkbOpl-zCmXYjqOQS1qB-yvkwu-zc,9817
|
|
91
92
|
basic_memory/services/service.py,sha256=V-d_8gOV07zGIQDpL-Ksqs3ZN9l3qf3HZOK1f_YNTag,336
|
|
92
93
|
basic_memory/sync/__init__.py,sha256=CVHguYH457h2u2xoM8KvOilJC71XJlZ-qUh8lHcjYj4,156
|
|
93
|
-
basic_memory/sync/sync_service.py,sha256=
|
|
94
|
-
basic_memory/sync/watch_service.py,sha256=
|
|
95
|
-
basic_memory-0.12.
|
|
96
|
-
basic_memory-0.12.
|
|
97
|
-
basic_memory-0.12.
|
|
98
|
-
basic_memory-0.12.
|
|
99
|
-
basic_memory-0.12.
|
|
94
|
+
basic_memory/sync/sync_service.py,sha256=ZIgaukAsS8PRf5FBPYGT2lVdn--YuGLd8AJShA79IYk,19602
|
|
95
|
+
basic_memory/sync/watch_service.py,sha256=ipkW9zK1MhisvdHambB9sesB6vNm0OapMZZM7w0GmsQ,14338
|
|
96
|
+
basic_memory-0.12.2.dist-info/METADATA,sha256=An_KcV3Ns11AxyUjuKCTdRCjUSA-KFTyw4D4eubnJi4,14992
|
|
97
|
+
basic_memory-0.12.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
98
|
+
basic_memory-0.12.2.dist-info/entry_points.txt,sha256=wvE2mRF6-Pg4weIYcfQ-86NOLZD4WJg7F7TIsRVFLb8,90
|
|
99
|
+
basic_memory-0.12.2.dist-info/licenses/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
|
|
100
|
+
basic_memory-0.12.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|