basic-memory 0.11.0__py3-none-any.whl → 0.12.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of basic-memory might be problematic. Click here for more details.
- basic_memory/__init__.py +1 -1
- basic_memory/api/app.py +11 -3
- basic_memory/cli/app.py +12 -7
- basic_memory/cli/commands/mcp.py +18 -9
- basic_memory/cli/commands/sync.py +9 -8
- basic_memory/cli/commands/tool.py +28 -15
- basic_memory/cli/main.py +12 -44
- basic_memory/config.py +30 -6
- basic_memory/db.py +3 -1
- basic_memory/file_utils.py +3 -0
- basic_memory/markdown/entity_parser.py +16 -7
- basic_memory/markdown/utils.py +21 -13
- basic_memory/mcp/prompts/continue_conversation.py +4 -4
- basic_memory/mcp/prompts/search.py +2 -2
- basic_memory/mcp/server.py +29 -3
- basic_memory/mcp/tools/read_note.py +2 -3
- basic_memory/mcp/tools/search.py +64 -28
- basic_memory/mcp/tools/write_note.py +3 -1
- basic_memory/repository/repository.py +0 -4
- basic_memory/repository/search_repository.py +11 -11
- basic_memory/schemas/search.py +2 -2
- basic_memory/services/context_service.py +1 -1
- basic_memory/services/entity_service.py +10 -10
- basic_memory/services/file_service.py +1 -1
- basic_memory/services/initialization.py +143 -0
- basic_memory/services/link_resolver.py +8 -1
- basic_memory/services/search_service.py +3 -23
- basic_memory/sync/sync_service.py +120 -191
- basic_memory/sync/watch_service.py +49 -30
- basic_memory/utils.py +10 -2
- {basic_memory-0.11.0.dist-info → basic_memory-0.12.1.dist-info}/METADATA +42 -11
- {basic_memory-0.11.0.dist-info → basic_memory-0.12.1.dist-info}/RECORD +35 -34
- {basic_memory-0.11.0.dist-info → basic_memory-0.12.1.dist-info}/WHEEL +0 -0
- {basic_memory-0.11.0.dist-info → basic_memory-0.12.1.dist-info}/entry_points.txt +0 -0
- {basic_memory-0.11.0.dist-info → basic_memory-0.12.1.dist-info}/licenses/LICENSE +0 -0
basic_memory/mcp/tools/search.py
CHANGED
|
@@ -1,17 +1,27 @@
|
|
|
1
1
|
"""Search tools for Basic Memory MCP server."""
|
|
2
2
|
|
|
3
|
+
from typing import List, Optional
|
|
4
|
+
|
|
3
5
|
from loguru import logger
|
|
4
6
|
|
|
7
|
+
from basic_memory.mcp.async_client import client
|
|
5
8
|
from basic_memory.mcp.server import mcp
|
|
6
9
|
from basic_memory.mcp.tools.utils import call_post
|
|
7
|
-
from basic_memory.schemas.search import SearchQuery, SearchResponse
|
|
8
|
-
from basic_memory.mcp.async_client import client
|
|
10
|
+
from basic_memory.schemas.search import SearchItemType, SearchQuery, SearchResponse
|
|
9
11
|
|
|
10
12
|
|
|
11
13
|
@mcp.tool(
|
|
12
14
|
description="Search across all content in the knowledge base.",
|
|
13
15
|
)
|
|
14
|
-
async def search_notes(
|
|
16
|
+
async def search_notes(
|
|
17
|
+
query: str,
|
|
18
|
+
page: int = 1,
|
|
19
|
+
page_size: int = 10,
|
|
20
|
+
search_type: str = "text",
|
|
21
|
+
types: Optional[List[str]] = None,
|
|
22
|
+
entity_types: Optional[List[str]] = None,
|
|
23
|
+
after_date: Optional[str] = None,
|
|
24
|
+
) -> SearchResponse:
|
|
15
25
|
"""Search across all content in the knowledge base.
|
|
16
26
|
|
|
17
27
|
This tool searches the knowledge base using full-text search, pattern matching,
|
|
@@ -19,59 +29,85 @@ async def search_notes(query: SearchQuery, page: int = 1, page_size: int = 10) -
|
|
|
19
29
|
and date.
|
|
20
30
|
|
|
21
31
|
Args:
|
|
22
|
-
query:
|
|
23
|
-
- text: Full-text search (e.g., "project planning")
|
|
24
|
-
Supports boolean operators: AND, OR, NOT and parentheses for grouping
|
|
25
|
-
- title: Search only in titles (e.g., "Meeting notes")
|
|
26
|
-
- permalink: Exact permalink match (e.g., "docs/meeting-notes")
|
|
27
|
-
- permalink_match: Pattern matching for permalinks (e.g., "docs/*-notes")
|
|
28
|
-
- types: Optional list of content types to search (e.g., ["entity", "observation"])
|
|
29
|
-
- entity_types: Optional list of entity types to filter by (e.g., ["note", "person"])
|
|
30
|
-
- after_date: Optional date filter for recent content (e.g., "1 week", "2d")
|
|
32
|
+
query: The search query string
|
|
31
33
|
page: The page number of results to return (default 1)
|
|
32
34
|
page_size: The number of results to return per page (default 10)
|
|
35
|
+
search_type: Type of search to perform, one of: "text", "title", "permalink" (default: "text")
|
|
36
|
+
types: Optional list of note types to search (e.g., ["note", "person"])
|
|
37
|
+
entity_types: Optional list of entity types to filter by (e.g., ["entity", "observation"])
|
|
38
|
+
after_date: Optional date filter for recent content (e.g., "1 week", "2d")
|
|
33
39
|
|
|
34
40
|
Returns:
|
|
35
41
|
SearchResponse with results and pagination info
|
|
36
42
|
|
|
37
43
|
Examples:
|
|
38
44
|
# Basic text search
|
|
39
|
-
results = await search_notes(
|
|
45
|
+
results = await search_notes("project planning")
|
|
40
46
|
|
|
41
47
|
# Boolean AND search (both terms must be present)
|
|
42
|
-
results = await search_notes(
|
|
48
|
+
results = await search_notes("project AND planning")
|
|
43
49
|
|
|
44
50
|
# Boolean OR search (either term can be present)
|
|
45
|
-
results = await search_notes(
|
|
51
|
+
results = await search_notes("project OR meeting")
|
|
46
52
|
|
|
47
53
|
# Boolean NOT search (exclude terms)
|
|
48
|
-
results = await search_notes(
|
|
54
|
+
results = await search_notes("project NOT meeting")
|
|
49
55
|
|
|
50
56
|
# Boolean search with grouping
|
|
51
|
-
results = await search_notes(
|
|
57
|
+
results = await search_notes("(project OR planning) AND notes")
|
|
52
58
|
|
|
53
59
|
# Search with type filter
|
|
54
|
-
results = await search_notes(
|
|
55
|
-
|
|
60
|
+
results = await search_notes(
|
|
61
|
+
query="meeting notes",
|
|
62
|
+
types=["entity"],
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
# Search with entity type filter, e.g., note vs
|
|
66
|
+
results = await search_notes(
|
|
67
|
+
query="meeting notes",
|
|
56
68
|
types=["entity"],
|
|
57
|
-
)
|
|
69
|
+
)
|
|
58
70
|
|
|
59
71
|
# Search for recent content
|
|
60
|
-
results = await search_notes(
|
|
61
|
-
|
|
72
|
+
results = await search_notes(
|
|
73
|
+
query="bug report",
|
|
62
74
|
after_date="1 week"
|
|
63
|
-
)
|
|
75
|
+
)
|
|
64
76
|
|
|
65
77
|
# Pattern matching on permalinks
|
|
66
|
-
results = await search_notes(
|
|
67
|
-
|
|
68
|
-
|
|
78
|
+
results = await search_notes(
|
|
79
|
+
query="docs/meeting-*",
|
|
80
|
+
search_type="permalink"
|
|
81
|
+
)
|
|
69
82
|
"""
|
|
70
|
-
|
|
83
|
+
# Create a SearchQuery object based on the parameters
|
|
84
|
+
search_query = SearchQuery()
|
|
85
|
+
|
|
86
|
+
# Set the appropriate search field based on search_type
|
|
87
|
+
if search_type == "text":
|
|
88
|
+
search_query.text = query
|
|
89
|
+
elif search_type == "title":
|
|
90
|
+
search_query.title = query
|
|
91
|
+
elif search_type == "permalink" and "*" in query:
|
|
92
|
+
search_query.permalink_match = query
|
|
93
|
+
elif search_type == "permalink":
|
|
94
|
+
search_query.permalink = query
|
|
95
|
+
else:
|
|
96
|
+
search_query.text = query # Default to text search
|
|
97
|
+
|
|
98
|
+
# Add optional filters if provided
|
|
99
|
+
if entity_types:
|
|
100
|
+
search_query.entity_types = [SearchItemType(t) for t in entity_types]
|
|
101
|
+
if types:
|
|
102
|
+
search_query.types = types
|
|
103
|
+
if after_date:
|
|
104
|
+
search_query.after_date = after_date
|
|
105
|
+
|
|
106
|
+
logger.info(f"Searching for {search_query}")
|
|
71
107
|
response = await call_post(
|
|
72
108
|
client,
|
|
73
109
|
"/search/",
|
|
74
|
-
json=
|
|
110
|
+
json=search_query.model_dump(),
|
|
75
111
|
params={"page": page, "page_size": page_size},
|
|
76
112
|
)
|
|
77
113
|
return SearchResponse.model_validate(response.json())
|
|
@@ -88,8 +88,10 @@ async def write_note(
|
|
|
88
88
|
# Format semantic summary based on status code
|
|
89
89
|
action = "Created" if response.status_code == 201 else "Updated"
|
|
90
90
|
summary = [
|
|
91
|
-
f"# {action}
|
|
91
|
+
f"# {action} note",
|
|
92
|
+
f"file_path: {result.file_path}",
|
|
92
93
|
f"permalink: {result.permalink}",
|
|
94
|
+
f"checksum: {result.checksum[:8] if result.checksum else 'unknown'}",
|
|
93
95
|
]
|
|
94
96
|
|
|
95
97
|
# Count observations by category
|
|
@@ -137,8 +137,6 @@ class Repository[T: Base]:
|
|
|
137
137
|
|
|
138
138
|
async def find_one(self, query: Select[tuple[T]]) -> Optional[T]:
|
|
139
139
|
"""Execute a query and retrieve a single record."""
|
|
140
|
-
logger.debug(f"Finding one {self.Model.__name__} with query: {query}")
|
|
141
|
-
|
|
142
140
|
# add in load options
|
|
143
141
|
query = query.options(*self.get_load_options())
|
|
144
142
|
result = await self.execute_query(query)
|
|
@@ -270,11 +268,9 @@ class Repository[T: Base]:
|
|
|
270
268
|
"""Execute a query asynchronously."""
|
|
271
269
|
|
|
272
270
|
query = query.options(*self.get_load_options()) if use_query_options else query
|
|
273
|
-
|
|
274
271
|
logger.debug(f"Executing query: {query}")
|
|
275
272
|
async with db.scoped_session(self.session_maker) as session:
|
|
276
273
|
result = await session.execute(query)
|
|
277
|
-
logger.debug("Query executed successfully")
|
|
278
274
|
return result
|
|
279
275
|
|
|
280
276
|
def get_load_options(self) -> List[LoaderOption]:
|
|
@@ -4,10 +4,10 @@ import json
|
|
|
4
4
|
import time
|
|
5
5
|
from dataclasses import dataclass
|
|
6
6
|
from datetime import datetime
|
|
7
|
-
from typing import
|
|
7
|
+
from typing import Any, Dict, List, Optional
|
|
8
8
|
|
|
9
9
|
from loguru import logger
|
|
10
|
-
from sqlalchemy import
|
|
10
|
+
from sqlalchemy import Executable, Result, text
|
|
11
11
|
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker
|
|
12
12
|
|
|
13
13
|
from basic_memory import db
|
|
@@ -123,9 +123,9 @@ class SearchRepository:
|
|
|
123
123
|
permalink: Optional[str] = None,
|
|
124
124
|
permalink_match: Optional[str] = None,
|
|
125
125
|
title: Optional[str] = None,
|
|
126
|
-
types: Optional[List[
|
|
126
|
+
types: Optional[List[str]] = None,
|
|
127
127
|
after_date: Optional[datetime] = None,
|
|
128
|
-
entity_types: Optional[List[
|
|
128
|
+
entity_types: Optional[List[SearchItemType]] = None,
|
|
129
129
|
limit: int = 10,
|
|
130
130
|
offset: int = 0,
|
|
131
131
|
) -> List[SearchIndexRow]:
|
|
@@ -174,15 +174,15 @@ class SearchRepository:
|
|
|
174
174
|
else:
|
|
175
175
|
conditions.append("permalink MATCH :permalink")
|
|
176
176
|
|
|
177
|
-
# Handle type filter
|
|
178
|
-
if types:
|
|
179
|
-
type_list = ", ".join(f"'{t.value}'" for t in types)
|
|
180
|
-
conditions.append(f"type IN ({type_list})")
|
|
181
|
-
|
|
182
177
|
# Handle entity type filter
|
|
183
178
|
if entity_types:
|
|
184
|
-
|
|
185
|
-
conditions.append(f"
|
|
179
|
+
type_list = ", ".join(f"'{t.value}'" for t in entity_types)
|
|
180
|
+
conditions.append(f"type IN ({type_list})")
|
|
181
|
+
|
|
182
|
+
# Handle type filter
|
|
183
|
+
if types:
|
|
184
|
+
type_list = ", ".join(f"'{t}'" for t in types)
|
|
185
|
+
conditions.append(f"json_extract(metadata, '$.entity_type') IN ({type_list})")
|
|
186
186
|
|
|
187
187
|
# Handle date filter using datetime() for proper comparison
|
|
188
188
|
if after_date:
|
basic_memory/schemas/search.py
CHANGED
|
@@ -49,8 +49,8 @@ class SearchQuery(BaseModel):
|
|
|
49
49
|
title: Optional[str] = None # title only search
|
|
50
50
|
|
|
51
51
|
# Optional filters
|
|
52
|
-
types: Optional[List[
|
|
53
|
-
entity_types: Optional[List[
|
|
52
|
+
types: Optional[List[str]] = None # Filter by type
|
|
53
|
+
entity_types: Optional[List[SearchItemType]] = None # Filter by entity type
|
|
54
54
|
after_date: Optional[Union[datetime, str]] = None # Time-based filter
|
|
55
55
|
|
|
56
56
|
@field_validator("after_date")
|
|
@@ -81,7 +81,7 @@ class ContextService:
|
|
|
81
81
|
else:
|
|
82
82
|
logger.debug(f"Build context for '{types}'")
|
|
83
83
|
primary = await self.search_repository.search(
|
|
84
|
-
|
|
84
|
+
entity_types=types, after_date=since, limit=limit, offset=offset
|
|
85
85
|
)
|
|
86
86
|
|
|
87
87
|
# Get type_id pairs for traversal
|
|
@@ -1,24 +1,24 @@
|
|
|
1
1
|
"""Service for managing entities in the database."""
|
|
2
2
|
|
|
3
3
|
from pathlib import Path
|
|
4
|
-
from typing import
|
|
4
|
+
from typing import List, Optional, Sequence, Tuple, Union
|
|
5
5
|
|
|
6
6
|
import frontmatter
|
|
7
7
|
from loguru import logger
|
|
8
8
|
from sqlalchemy.exc import IntegrityError
|
|
9
9
|
|
|
10
10
|
from basic_memory.markdown import EntityMarkdown
|
|
11
|
+
from basic_memory.markdown.entity_parser import EntityParser
|
|
11
12
|
from basic_memory.markdown.utils import entity_model_from_markdown, schema_to_markdown
|
|
12
|
-
from basic_memory.models import Entity as EntityModel
|
|
13
|
+
from basic_memory.models import Entity as EntityModel
|
|
14
|
+
from basic_memory.models import Observation, Relation
|
|
13
15
|
from basic_memory.repository import ObservationRepository, RelationRepository
|
|
14
16
|
from basic_memory.repository.entity_repository import EntityRepository
|
|
15
17
|
from basic_memory.schemas import Entity as EntitySchema
|
|
16
18
|
from basic_memory.schemas.base import Permalink
|
|
17
|
-
from basic_memory.services
|
|
18
|
-
from basic_memory.services import
|
|
19
|
-
from basic_memory.services import BaseService
|
|
19
|
+
from basic_memory.services import BaseService, FileService
|
|
20
|
+
from basic_memory.services.exceptions import EntityCreationError, EntityNotFoundError
|
|
20
21
|
from basic_memory.services.link_resolver import LinkResolver
|
|
21
|
-
from basic_memory.markdown.entity_parser import EntityParser
|
|
22
22
|
from basic_memory.utils import generate_permalink
|
|
23
23
|
|
|
24
24
|
|
|
@@ -89,7 +89,7 @@ class EntityService(BaseService[EntityModel]):
|
|
|
89
89
|
logger.debug(f"Creating or updating entity: {schema}")
|
|
90
90
|
|
|
91
91
|
# Try to find existing entity using smart resolution
|
|
92
|
-
existing = await self.link_resolver.resolve_link(schema.permalink)
|
|
92
|
+
existing = await self.link_resolver.resolve_link(schema.permalink or schema.file_path)
|
|
93
93
|
|
|
94
94
|
if existing:
|
|
95
95
|
logger.debug(f"Found existing entity: {existing.permalink}")
|
|
@@ -100,7 +100,7 @@ class EntityService(BaseService[EntityModel]):
|
|
|
100
100
|
|
|
101
101
|
async def create_entity(self, schema: EntitySchema) -> EntityModel:
|
|
102
102
|
"""Create a new entity and write to filesystem."""
|
|
103
|
-
logger.debug(f"Creating entity: {schema.
|
|
103
|
+
logger.debug(f"Creating entity: {schema.title}")
|
|
104
104
|
|
|
105
105
|
# Get file path and ensure it's a Path object
|
|
106
106
|
file_path = Path(schema.file_path)
|
|
@@ -230,7 +230,7 @@ class EntityService(BaseService[EntityModel]):
|
|
|
230
230
|
Creates the entity with null checksum to indicate sync not complete.
|
|
231
231
|
Relations will be added in second pass.
|
|
232
232
|
"""
|
|
233
|
-
logger.debug(f"Creating entity: {markdown.frontmatter.title}")
|
|
233
|
+
logger.debug(f"Creating entity: {markdown.frontmatter.title} file_path: {file_path}")
|
|
234
234
|
model = entity_model_from_markdown(file_path, markdown)
|
|
235
235
|
|
|
236
236
|
# Mark as incomplete because we still need to add relations
|
|
@@ -315,7 +315,7 @@ class EntityService(BaseService[EntityModel]):
|
|
|
315
315
|
except IntegrityError:
|
|
316
316
|
# Unique constraint violation - relation already exists
|
|
317
317
|
logger.debug(
|
|
318
|
-
f"Skipping duplicate relation {rel.type} from {db_entity.permalink} target: {rel.target}
|
|
318
|
+
f"Skipping duplicate relation {rel.type} from {db_entity.permalink} target: {rel.target}"
|
|
319
319
|
)
|
|
320
320
|
continue
|
|
321
321
|
|
|
@@ -60,7 +60,7 @@ class FileService:
|
|
|
60
60
|
Returns:
|
|
61
61
|
Raw content string without metadata sections
|
|
62
62
|
"""
|
|
63
|
-
logger.debug("Reading entity content
|
|
63
|
+
logger.debug(f"Reading entity content, entity_id={entity.id}, permalink={entity.permalink}")
|
|
64
64
|
|
|
65
65
|
file_path = self.get_entity_path(entity)
|
|
66
66
|
markdown = await self.markdown_processor.read_file(file_path)
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
"""Shared initialization service for Basic Memory.
|
|
2
|
+
|
|
3
|
+
This module provides shared initialization functions used by both CLI and API
|
|
4
|
+
to ensure consistent application startup across all entry points.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import asyncio
|
|
8
|
+
from typing import Optional
|
|
9
|
+
|
|
10
|
+
from loguru import logger
|
|
11
|
+
|
|
12
|
+
from basic_memory import db
|
|
13
|
+
from basic_memory.config import ProjectConfig, config_manager
|
|
14
|
+
from basic_memory.sync import WatchService
|
|
15
|
+
|
|
16
|
+
# Import this inside functions to avoid circular imports
|
|
17
|
+
# from basic_memory.cli.commands.sync import get_sync_service
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
async def initialize_database(app_config: ProjectConfig) -> None:
|
|
21
|
+
"""Run database migrations to ensure schema is up to date.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
app_config: The Basic Memory project configuration
|
|
25
|
+
"""
|
|
26
|
+
try:
|
|
27
|
+
logger.info("Running database migrations...")
|
|
28
|
+
await db.run_migrations(app_config)
|
|
29
|
+
logger.info("Migrations completed successfully")
|
|
30
|
+
except Exception as e:
|
|
31
|
+
logger.error(f"Error running migrations: {e}")
|
|
32
|
+
# Allow application to continue - it might still work
|
|
33
|
+
# depending on what the error was, and will fail with a
|
|
34
|
+
# more specific error if the database is actually unusable
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
async def initialize_file_sync(
|
|
38
|
+
app_config: ProjectConfig,
|
|
39
|
+
) -> asyncio.Task:
|
|
40
|
+
"""Initialize file synchronization services.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
app_config: The Basic Memory project configuration
|
|
44
|
+
|
|
45
|
+
Returns:
|
|
46
|
+
Tuple of (sync_service, watch_service, watch_task) if sync is enabled,
|
|
47
|
+
or (None, None, None) if sync is disabled
|
|
48
|
+
"""
|
|
49
|
+
# Load app configuration
|
|
50
|
+
# Import here to avoid circular imports
|
|
51
|
+
from basic_memory.cli.commands.sync import get_sync_service
|
|
52
|
+
|
|
53
|
+
# Initialize sync service
|
|
54
|
+
sync_service = await get_sync_service()
|
|
55
|
+
|
|
56
|
+
# Initialize watch service
|
|
57
|
+
watch_service = WatchService(
|
|
58
|
+
sync_service=sync_service,
|
|
59
|
+
file_service=sync_service.entity_service.file_service,
|
|
60
|
+
config=app_config,
|
|
61
|
+
quiet=True,
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
# Create the background task for running sync
|
|
65
|
+
async def run_background_sync(): # pragma: no cover
|
|
66
|
+
# Run initial full sync
|
|
67
|
+
await sync_service.sync(app_config.home)
|
|
68
|
+
logger.info("Sync completed successfully")
|
|
69
|
+
|
|
70
|
+
# Start background sync task
|
|
71
|
+
logger.info(f"Starting watch service to sync file changes in dir: {app_config.home}")
|
|
72
|
+
|
|
73
|
+
# Start watching for changes
|
|
74
|
+
await watch_service.run()
|
|
75
|
+
|
|
76
|
+
watch_task = asyncio.create_task(run_background_sync())
|
|
77
|
+
logger.info("Watch service started")
|
|
78
|
+
return watch_task
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
async def initialize_app(
|
|
82
|
+
app_config: ProjectConfig,
|
|
83
|
+
) -> Optional[asyncio.Task]:
|
|
84
|
+
"""Initialize the Basic Memory application.
|
|
85
|
+
|
|
86
|
+
This function handles all initialization steps needed for both API and shor lived CLI commands.
|
|
87
|
+
For long running commands like mcp, a
|
|
88
|
+
- Running database migrations
|
|
89
|
+
- Setting up file synchronization
|
|
90
|
+
|
|
91
|
+
Args:
|
|
92
|
+
app_config: The Basic Memory project configuration
|
|
93
|
+
"""
|
|
94
|
+
# Initialize database first
|
|
95
|
+
await initialize_database(app_config)
|
|
96
|
+
|
|
97
|
+
basic_memory_config = config_manager.load_config()
|
|
98
|
+
logger.info(f"Sync changes enabled: {basic_memory_config.sync_changes}")
|
|
99
|
+
logger.info(
|
|
100
|
+
f"Update permalinks on move enabled: {basic_memory_config.update_permalinks_on_move}"
|
|
101
|
+
)
|
|
102
|
+
if not basic_memory_config.sync_changes: # pragma: no cover
|
|
103
|
+
logger.info("Sync changes disabled. Skipping watch service.")
|
|
104
|
+
return
|
|
105
|
+
|
|
106
|
+
# Initialize file sync services
|
|
107
|
+
return await initialize_file_sync(app_config)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def ensure_initialization(app_config: ProjectConfig) -> None:
|
|
111
|
+
"""Ensure initialization runs in a synchronous context.
|
|
112
|
+
|
|
113
|
+
This is a wrapper for the async initialize_app function that can be
|
|
114
|
+
called from synchronous code like CLI entry points.
|
|
115
|
+
|
|
116
|
+
Args:
|
|
117
|
+
app_config: The Basic Memory project configuration
|
|
118
|
+
"""
|
|
119
|
+
try:
|
|
120
|
+
asyncio.run(initialize_app(app_config))
|
|
121
|
+
except Exception as e:
|
|
122
|
+
logger.error(f"Error during initialization: {e}")
|
|
123
|
+
# Continue execution even if initialization fails
|
|
124
|
+
# The command might still work, or will fail with a
|
|
125
|
+
# more specific error message
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def ensure_initialize_database(app_config: ProjectConfig) -> None:
|
|
129
|
+
"""Ensure initialization runs in a synchronous context.
|
|
130
|
+
|
|
131
|
+
This is a wrapper for the async initialize_database function that can be
|
|
132
|
+
called from synchronous code like CLI entry points.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
app_config: The Basic Memory project configuration
|
|
136
|
+
"""
|
|
137
|
+
try:
|
|
138
|
+
asyncio.run(initialize_database(app_config))
|
|
139
|
+
except Exception as e:
|
|
140
|
+
logger.error(f"Error during initialization: {e}")
|
|
141
|
+
# Continue execution even if initialization fails
|
|
142
|
+
# The command might still work, or will fail with a
|
|
143
|
+
# more specific error message
|
|
@@ -46,10 +46,17 @@ class LinkResolver:
|
|
|
46
46
|
logger.debug(f"Found title match: {entity.title}")
|
|
47
47
|
return entity
|
|
48
48
|
|
|
49
|
+
# 3. Try file path
|
|
50
|
+
found_path = await self.entity_repository.get_by_file_path(clean_text)
|
|
51
|
+
if found_path:
|
|
52
|
+
logger.debug(f"Found entity with path: {found_path.file_path}")
|
|
53
|
+
return found_path
|
|
54
|
+
|
|
55
|
+
# search if indicated
|
|
49
56
|
if use_search and "*" not in clean_text:
|
|
50
57
|
# 3. Fall back to search for fuzzy matching on title
|
|
51
58
|
results = await self.search_service.search(
|
|
52
|
-
query=SearchQuery(title=clean_text,
|
|
59
|
+
query=SearchQuery(title=clean_text, entity_types=[SearchItemType.ENTITY]),
|
|
53
60
|
)
|
|
54
61
|
|
|
55
62
|
if results:
|
|
@@ -181,17 +181,6 @@ class SearchService:
|
|
|
181
181
|
Each type gets its own row in the search index with appropriate metadata.
|
|
182
182
|
"""
|
|
183
183
|
|
|
184
|
-
if entity.permalink is None: # pragma: no cover
|
|
185
|
-
logger.error(
|
|
186
|
-
"Missing permalink for markdown entity",
|
|
187
|
-
entity_id=entity.id,
|
|
188
|
-
title=entity.title,
|
|
189
|
-
file_path=entity.file_path,
|
|
190
|
-
)
|
|
191
|
-
raise ValueError(
|
|
192
|
-
f"Entity permalink should not be None for markdown entity: {entity.id} ({entity.title})"
|
|
193
|
-
)
|
|
194
|
-
|
|
195
184
|
content_stems = []
|
|
196
185
|
content_snippet = ""
|
|
197
186
|
title_variants = self._generate_variants(entity.title)
|
|
@@ -202,22 +191,13 @@ class SearchService:
|
|
|
202
191
|
content_stems.append(content)
|
|
203
192
|
content_snippet = f"{content[:250]}"
|
|
204
193
|
|
|
205
|
-
|
|
194
|
+
if entity.permalink:
|
|
195
|
+
content_stems.extend(self._generate_variants(entity.permalink))
|
|
196
|
+
|
|
206
197
|
content_stems.extend(self._generate_variants(entity.file_path))
|
|
207
198
|
|
|
208
199
|
entity_content_stems = "\n".join(p for p in content_stems if p and p.strip())
|
|
209
200
|
|
|
210
|
-
if entity.permalink is None: # pragma: no cover
|
|
211
|
-
logger.error(
|
|
212
|
-
"Missing permalink for markdown entity",
|
|
213
|
-
entity_id=entity.id,
|
|
214
|
-
title=entity.title,
|
|
215
|
-
file_path=entity.file_path,
|
|
216
|
-
)
|
|
217
|
-
raise ValueError(
|
|
218
|
-
f"Entity permalink should not be None for markdown entity: {entity.id} ({entity.title})"
|
|
219
|
-
)
|
|
220
|
-
|
|
221
201
|
# Index entity
|
|
222
202
|
await self.repository.index_item(
|
|
223
203
|
SearchIndexRow(
|