basic-memory 0.17.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- basic_memory/__init__.py +7 -0
- basic_memory/alembic/alembic.ini +119 -0
- basic_memory/alembic/env.py +185 -0
- basic_memory/alembic/migrations.py +24 -0
- basic_memory/alembic/script.py.mako +26 -0
- basic_memory/alembic/versions/314f1ea54dc4_add_postgres_full_text_search_support_.py +131 -0
- basic_memory/alembic/versions/3dae7c7b1564_initial_schema.py +93 -0
- basic_memory/alembic/versions/502b60eaa905_remove_required_from_entity_permalink.py +51 -0
- basic_memory/alembic/versions/5fe1ab1ccebe_add_projects_table.py +120 -0
- basic_memory/alembic/versions/647e7a75e2cd_project_constraint_fix.py +112 -0
- basic_memory/alembic/versions/9d9c1cb7d8f5_add_mtime_and_size_columns_to_entity_.py +49 -0
- basic_memory/alembic/versions/a1b2c3d4e5f6_fix_project_foreign_keys.py +49 -0
- basic_memory/alembic/versions/a2b3c4d5e6f7_add_search_index_entity_cascade.py +56 -0
- basic_memory/alembic/versions/b3c3938bacdb_relation_to_name_unique_index.py +44 -0
- basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py +113 -0
- basic_memory/alembic/versions/e7e1f4367280_add_scan_watermark_tracking_to_project.py +37 -0
- basic_memory/alembic/versions/f8a9b2c3d4e5_add_pg_trgm_for_fuzzy_link_resolution.py +239 -0
- basic_memory/api/__init__.py +5 -0
- basic_memory/api/app.py +131 -0
- basic_memory/api/routers/__init__.py +11 -0
- basic_memory/api/routers/directory_router.py +84 -0
- basic_memory/api/routers/importer_router.py +152 -0
- basic_memory/api/routers/knowledge_router.py +318 -0
- basic_memory/api/routers/management_router.py +80 -0
- basic_memory/api/routers/memory_router.py +90 -0
- basic_memory/api/routers/project_router.py +448 -0
- basic_memory/api/routers/prompt_router.py +260 -0
- basic_memory/api/routers/resource_router.py +249 -0
- basic_memory/api/routers/search_router.py +36 -0
- basic_memory/api/routers/utils.py +169 -0
- basic_memory/api/template_loader.py +292 -0
- basic_memory/api/v2/__init__.py +35 -0
- basic_memory/api/v2/routers/__init__.py +21 -0
- basic_memory/api/v2/routers/directory_router.py +93 -0
- basic_memory/api/v2/routers/importer_router.py +182 -0
- basic_memory/api/v2/routers/knowledge_router.py +413 -0
- basic_memory/api/v2/routers/memory_router.py +130 -0
- basic_memory/api/v2/routers/project_router.py +342 -0
- basic_memory/api/v2/routers/prompt_router.py +270 -0
- basic_memory/api/v2/routers/resource_router.py +286 -0
- basic_memory/api/v2/routers/search_router.py +73 -0
- basic_memory/cli/__init__.py +1 -0
- basic_memory/cli/app.py +84 -0
- basic_memory/cli/auth.py +277 -0
- basic_memory/cli/commands/__init__.py +18 -0
- basic_memory/cli/commands/cloud/__init__.py +6 -0
- basic_memory/cli/commands/cloud/api_client.py +112 -0
- basic_memory/cli/commands/cloud/bisync_commands.py +110 -0
- basic_memory/cli/commands/cloud/cloud_utils.py +101 -0
- basic_memory/cli/commands/cloud/core_commands.py +195 -0
- basic_memory/cli/commands/cloud/rclone_commands.py +371 -0
- basic_memory/cli/commands/cloud/rclone_config.py +110 -0
- basic_memory/cli/commands/cloud/rclone_installer.py +263 -0
- basic_memory/cli/commands/cloud/upload.py +233 -0
- basic_memory/cli/commands/cloud/upload_command.py +124 -0
- basic_memory/cli/commands/command_utils.py +77 -0
- basic_memory/cli/commands/db.py +44 -0
- basic_memory/cli/commands/format.py +198 -0
- basic_memory/cli/commands/import_chatgpt.py +84 -0
- basic_memory/cli/commands/import_claude_conversations.py +87 -0
- basic_memory/cli/commands/import_claude_projects.py +86 -0
- basic_memory/cli/commands/import_memory_json.py +87 -0
- basic_memory/cli/commands/mcp.py +76 -0
- basic_memory/cli/commands/project.py +889 -0
- basic_memory/cli/commands/status.py +174 -0
- basic_memory/cli/commands/telemetry.py +81 -0
- basic_memory/cli/commands/tool.py +341 -0
- basic_memory/cli/main.py +28 -0
- basic_memory/config.py +616 -0
- basic_memory/db.py +394 -0
- basic_memory/deps.py +705 -0
- basic_memory/file_utils.py +478 -0
- basic_memory/ignore_utils.py +297 -0
- basic_memory/importers/__init__.py +27 -0
- basic_memory/importers/base.py +79 -0
- basic_memory/importers/chatgpt_importer.py +232 -0
- basic_memory/importers/claude_conversations_importer.py +180 -0
- basic_memory/importers/claude_projects_importer.py +148 -0
- basic_memory/importers/memory_json_importer.py +108 -0
- basic_memory/importers/utils.py +61 -0
- basic_memory/markdown/__init__.py +21 -0
- basic_memory/markdown/entity_parser.py +279 -0
- basic_memory/markdown/markdown_processor.py +160 -0
- basic_memory/markdown/plugins.py +242 -0
- basic_memory/markdown/schemas.py +70 -0
- basic_memory/markdown/utils.py +117 -0
- basic_memory/mcp/__init__.py +1 -0
- basic_memory/mcp/async_client.py +139 -0
- basic_memory/mcp/project_context.py +141 -0
- basic_memory/mcp/prompts/__init__.py +19 -0
- basic_memory/mcp/prompts/ai_assistant_guide.py +70 -0
- basic_memory/mcp/prompts/continue_conversation.py +62 -0
- basic_memory/mcp/prompts/recent_activity.py +188 -0
- basic_memory/mcp/prompts/search.py +57 -0
- basic_memory/mcp/prompts/utils.py +162 -0
- basic_memory/mcp/resources/ai_assistant_guide.md +283 -0
- basic_memory/mcp/resources/project_info.py +71 -0
- basic_memory/mcp/server.py +81 -0
- basic_memory/mcp/tools/__init__.py +48 -0
- basic_memory/mcp/tools/build_context.py +120 -0
- basic_memory/mcp/tools/canvas.py +152 -0
- basic_memory/mcp/tools/chatgpt_tools.py +190 -0
- basic_memory/mcp/tools/delete_note.py +242 -0
- basic_memory/mcp/tools/edit_note.py +324 -0
- basic_memory/mcp/tools/list_directory.py +168 -0
- basic_memory/mcp/tools/move_note.py +551 -0
- basic_memory/mcp/tools/project_management.py +201 -0
- basic_memory/mcp/tools/read_content.py +281 -0
- basic_memory/mcp/tools/read_note.py +267 -0
- basic_memory/mcp/tools/recent_activity.py +534 -0
- basic_memory/mcp/tools/search.py +385 -0
- basic_memory/mcp/tools/utils.py +540 -0
- basic_memory/mcp/tools/view_note.py +78 -0
- basic_memory/mcp/tools/write_note.py +230 -0
- basic_memory/models/__init__.py +15 -0
- basic_memory/models/base.py +10 -0
- basic_memory/models/knowledge.py +226 -0
- basic_memory/models/project.py +87 -0
- basic_memory/models/search.py +85 -0
- basic_memory/repository/__init__.py +11 -0
- basic_memory/repository/entity_repository.py +503 -0
- basic_memory/repository/observation_repository.py +73 -0
- basic_memory/repository/postgres_search_repository.py +379 -0
- basic_memory/repository/project_info_repository.py +10 -0
- basic_memory/repository/project_repository.py +128 -0
- basic_memory/repository/relation_repository.py +146 -0
- basic_memory/repository/repository.py +385 -0
- basic_memory/repository/search_index_row.py +95 -0
- basic_memory/repository/search_repository.py +94 -0
- basic_memory/repository/search_repository_base.py +241 -0
- basic_memory/repository/sqlite_search_repository.py +439 -0
- basic_memory/schemas/__init__.py +86 -0
- basic_memory/schemas/base.py +297 -0
- basic_memory/schemas/cloud.py +50 -0
- basic_memory/schemas/delete.py +37 -0
- basic_memory/schemas/directory.py +30 -0
- basic_memory/schemas/importer.py +35 -0
- basic_memory/schemas/memory.py +285 -0
- basic_memory/schemas/project_info.py +212 -0
- basic_memory/schemas/prompt.py +90 -0
- basic_memory/schemas/request.py +112 -0
- basic_memory/schemas/response.py +229 -0
- basic_memory/schemas/search.py +117 -0
- basic_memory/schemas/sync_report.py +72 -0
- basic_memory/schemas/v2/__init__.py +27 -0
- basic_memory/schemas/v2/entity.py +129 -0
- basic_memory/schemas/v2/resource.py +46 -0
- basic_memory/services/__init__.py +8 -0
- basic_memory/services/context_service.py +601 -0
- basic_memory/services/directory_service.py +308 -0
- basic_memory/services/entity_service.py +864 -0
- basic_memory/services/exceptions.py +37 -0
- basic_memory/services/file_service.py +541 -0
- basic_memory/services/initialization.py +216 -0
- basic_memory/services/link_resolver.py +121 -0
- basic_memory/services/project_service.py +880 -0
- basic_memory/services/search_service.py +404 -0
- basic_memory/services/service.py +15 -0
- basic_memory/sync/__init__.py +6 -0
- basic_memory/sync/background_sync.py +26 -0
- basic_memory/sync/sync_service.py +1259 -0
- basic_memory/sync/watch_service.py +510 -0
- basic_memory/telemetry.py +249 -0
- basic_memory/templates/prompts/continue_conversation.hbs +110 -0
- basic_memory/templates/prompts/search.hbs +101 -0
- basic_memory/utils.py +468 -0
- basic_memory-0.17.1.dist-info/METADATA +617 -0
- basic_memory-0.17.1.dist-info/RECORD +171 -0
- basic_memory-0.17.1.dist-info/WHEEL +4 -0
- basic_memory-0.17.1.dist-info/entry_points.txt +3 -0
- basic_memory-0.17.1.dist-info/licenses/LICENSE +661 -0
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
"""Abstract base class for search repository implementations."""
|
|
2
|
+
|
|
3
|
+
from abc import ABC, abstractmethod
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from typing import Any, Dict, List, Optional
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
from loguru import logger
|
|
9
|
+
from sqlalchemy import Executable, Result, text
|
|
10
|
+
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker
|
|
11
|
+
|
|
12
|
+
from basic_memory import db
|
|
13
|
+
from basic_memory.schemas.search import SearchItemType
|
|
14
|
+
from basic_memory.repository.search_index_row import SearchIndexRow
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class SearchRepositoryBase(ABC):
|
|
18
|
+
"""Abstract base class for backend-specific search repository implementations.
|
|
19
|
+
|
|
20
|
+
This class defines the common interface that all search repositories must implement,
|
|
21
|
+
regardless of whether they use SQLite FTS5 or Postgres tsvector for full-text search.
|
|
22
|
+
|
|
23
|
+
Concrete implementations:
|
|
24
|
+
- SQLiteSearchRepository: Uses FTS5 virtual tables with MATCH queries
|
|
25
|
+
- PostgresSearchRepository: Uses tsvector/tsquery with GIN indexes
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
def __init__(self, session_maker: async_sessionmaker[AsyncSession], project_id: int):
|
|
29
|
+
"""Initialize with session maker and project_id filter.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
session_maker: SQLAlchemy session maker
|
|
33
|
+
project_id: Project ID to filter all operations by
|
|
34
|
+
|
|
35
|
+
Raises:
|
|
36
|
+
ValueError: If project_id is None or invalid
|
|
37
|
+
"""
|
|
38
|
+
if project_id is None or project_id <= 0: # pragma: no cover
|
|
39
|
+
raise ValueError("A valid project_id is required for SearchRepository")
|
|
40
|
+
|
|
41
|
+
self.session_maker = session_maker
|
|
42
|
+
self.project_id = project_id
|
|
43
|
+
|
|
44
|
+
@abstractmethod
|
|
45
|
+
async def init_search_index(self) -> None:
|
|
46
|
+
"""Create or recreate the search index.
|
|
47
|
+
|
|
48
|
+
Backend-specific implementations:
|
|
49
|
+
- SQLite: CREATE VIRTUAL TABLE using FTS5
|
|
50
|
+
- Postgres: CREATE TABLE with tsvector column and GIN indexes
|
|
51
|
+
"""
|
|
52
|
+
pass
|
|
53
|
+
|
|
54
|
+
@abstractmethod
|
|
55
|
+
def _prepare_search_term(self, term: str, is_prefix: bool = True) -> str:
|
|
56
|
+
"""Prepare a search term for backend-specific query syntax.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
term: The search term to prepare
|
|
60
|
+
is_prefix: Whether to add prefix search capability
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
Formatted search term for the backend
|
|
64
|
+
|
|
65
|
+
Backend-specific implementations:
|
|
66
|
+
- SQLite: Quotes FTS5 special characters, adds * wildcards
|
|
67
|
+
- Postgres: Converts to tsquery syntax with :* prefix operator
|
|
68
|
+
"""
|
|
69
|
+
pass
|
|
70
|
+
|
|
71
|
+
@abstractmethod
|
|
72
|
+
async def search(
|
|
73
|
+
self,
|
|
74
|
+
search_text: Optional[str] = None,
|
|
75
|
+
permalink: Optional[str] = None,
|
|
76
|
+
permalink_match: Optional[str] = None,
|
|
77
|
+
title: Optional[str] = None,
|
|
78
|
+
types: Optional[List[str]] = None,
|
|
79
|
+
after_date: Optional[datetime] = None,
|
|
80
|
+
search_item_types: Optional[List[SearchItemType]] = None,
|
|
81
|
+
limit: int = 10,
|
|
82
|
+
offset: int = 0,
|
|
83
|
+
) -> List[SearchIndexRow]:
|
|
84
|
+
"""Search across all indexed content.
|
|
85
|
+
|
|
86
|
+
Args:
|
|
87
|
+
search_text: Full-text search across title and content
|
|
88
|
+
permalink: Exact permalink match
|
|
89
|
+
permalink_match: Permalink pattern match (supports *)
|
|
90
|
+
title: Title search
|
|
91
|
+
types: Filter by entity types (from metadata.entity_type)
|
|
92
|
+
after_date: Filter by created_at > after_date
|
|
93
|
+
search_item_types: Filter by SearchItemType (ENTITY, OBSERVATION, RELATION)
|
|
94
|
+
limit: Maximum results to return
|
|
95
|
+
offset: Number of results to skip
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
List of SearchIndexRow results with relevance scores
|
|
99
|
+
|
|
100
|
+
Backend-specific implementations:
|
|
101
|
+
- SQLite: Uses MATCH operator and bm25() for scoring
|
|
102
|
+
- Postgres: Uses @@ operator and ts_rank() for scoring
|
|
103
|
+
"""
|
|
104
|
+
pass
|
|
105
|
+
|
|
106
|
+
async def index_item(self, search_index_row: SearchIndexRow) -> None:
|
|
107
|
+
"""Index or update a single item.
|
|
108
|
+
|
|
109
|
+
This implementation is shared across backends as it uses standard SQL INSERT.
|
|
110
|
+
"""
|
|
111
|
+
|
|
112
|
+
async with db.scoped_session(self.session_maker) as session:
|
|
113
|
+
# Delete existing record if any
|
|
114
|
+
await session.execute(
|
|
115
|
+
text(
|
|
116
|
+
"DELETE FROM search_index WHERE permalink = :permalink AND project_id = :project_id"
|
|
117
|
+
),
|
|
118
|
+
{"permalink": search_index_row.permalink, "project_id": self.project_id},
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
# When using text() raw SQL, always serialize JSON to string
|
|
122
|
+
# Both SQLite (TEXT) and Postgres (JSONB) accept JSON strings in raw SQL
|
|
123
|
+
# The database driver/column type will handle conversion
|
|
124
|
+
insert_data = search_index_row.to_insert(serialize_json=True)
|
|
125
|
+
insert_data["project_id"] = self.project_id
|
|
126
|
+
|
|
127
|
+
# Insert new record
|
|
128
|
+
await session.execute(
|
|
129
|
+
text("""
|
|
130
|
+
INSERT INTO search_index (
|
|
131
|
+
id, title, content_stems, content_snippet, permalink, file_path, type, metadata,
|
|
132
|
+
from_id, to_id, relation_type,
|
|
133
|
+
entity_id, category,
|
|
134
|
+
created_at, updated_at,
|
|
135
|
+
project_id
|
|
136
|
+
) VALUES (
|
|
137
|
+
:id, :title, :content_stems, :content_snippet, :permalink, :file_path, :type, :metadata,
|
|
138
|
+
:from_id, :to_id, :relation_type,
|
|
139
|
+
:entity_id, :category,
|
|
140
|
+
:created_at, :updated_at,
|
|
141
|
+
:project_id
|
|
142
|
+
)
|
|
143
|
+
"""),
|
|
144
|
+
insert_data,
|
|
145
|
+
)
|
|
146
|
+
logger.debug(f"indexed row {search_index_row}")
|
|
147
|
+
await session.commit()
|
|
148
|
+
|
|
149
|
+
async def bulk_index_items(self, search_index_rows: List[SearchIndexRow]) -> None:
|
|
150
|
+
"""Index multiple items in a single batch operation.
|
|
151
|
+
|
|
152
|
+
This implementation is shared across backends as it uses standard SQL INSERT.
|
|
153
|
+
|
|
154
|
+
Note: This method assumes that any existing records for the entity_id
|
|
155
|
+
have already been deleted (typically via delete_by_entity_id).
|
|
156
|
+
|
|
157
|
+
Args:
|
|
158
|
+
search_index_rows: List of SearchIndexRow objects to index
|
|
159
|
+
"""
|
|
160
|
+
|
|
161
|
+
if not search_index_rows:
|
|
162
|
+
return
|
|
163
|
+
|
|
164
|
+
async with db.scoped_session(self.session_maker) as session:
|
|
165
|
+
# When using text() raw SQL, always serialize JSON to string
|
|
166
|
+
# Both SQLite (TEXT) and Postgres (JSONB) accept JSON strings in raw SQL
|
|
167
|
+
# The database driver/column type will handle conversion
|
|
168
|
+
insert_data_list = []
|
|
169
|
+
for row in search_index_rows:
|
|
170
|
+
insert_data = row.to_insert(serialize_json=True)
|
|
171
|
+
insert_data["project_id"] = self.project_id
|
|
172
|
+
insert_data_list.append(insert_data)
|
|
173
|
+
|
|
174
|
+
# Batch insert all records using executemany
|
|
175
|
+
await session.execute(
|
|
176
|
+
text("""
|
|
177
|
+
INSERT INTO search_index (
|
|
178
|
+
id, title, content_stems, content_snippet, permalink, file_path, type, metadata,
|
|
179
|
+
from_id, to_id, relation_type,
|
|
180
|
+
entity_id, category,
|
|
181
|
+
created_at, updated_at,
|
|
182
|
+
project_id
|
|
183
|
+
) VALUES (
|
|
184
|
+
:id, :title, :content_stems, :content_snippet, :permalink, :file_path, :type, :metadata,
|
|
185
|
+
:from_id, :to_id, :relation_type,
|
|
186
|
+
:entity_id, :category,
|
|
187
|
+
:created_at, :updated_at,
|
|
188
|
+
:project_id
|
|
189
|
+
)
|
|
190
|
+
"""),
|
|
191
|
+
insert_data_list,
|
|
192
|
+
)
|
|
193
|
+
logger.debug(f"Bulk indexed {len(search_index_rows)} rows")
|
|
194
|
+
await session.commit()
|
|
195
|
+
|
|
196
|
+
async def delete_by_entity_id(self, entity_id: int) -> None:
|
|
197
|
+
"""Delete all search index entries for an entity.
|
|
198
|
+
|
|
199
|
+
This implementation is shared across backends as it uses standard SQL DELETE.
|
|
200
|
+
"""
|
|
201
|
+
async with db.scoped_session(self.session_maker) as session:
|
|
202
|
+
await session.execute(
|
|
203
|
+
text(
|
|
204
|
+
"DELETE FROM search_index WHERE entity_id = :entity_id AND project_id = :project_id"
|
|
205
|
+
),
|
|
206
|
+
{"entity_id": entity_id, "project_id": self.project_id},
|
|
207
|
+
)
|
|
208
|
+
await session.commit()
|
|
209
|
+
|
|
210
|
+
async def delete_by_permalink(self, permalink: str) -> None:
|
|
211
|
+
"""Delete a search index entry by permalink.
|
|
212
|
+
|
|
213
|
+
This implementation is shared across backends as it uses standard SQL DELETE.
|
|
214
|
+
"""
|
|
215
|
+
async with db.scoped_session(self.session_maker) as session:
|
|
216
|
+
await session.execute(
|
|
217
|
+
text(
|
|
218
|
+
"DELETE FROM search_index WHERE permalink = :permalink AND project_id = :project_id"
|
|
219
|
+
),
|
|
220
|
+
{"permalink": permalink, "project_id": self.project_id},
|
|
221
|
+
)
|
|
222
|
+
await session.commit()
|
|
223
|
+
|
|
224
|
+
async def execute_query(
|
|
225
|
+
self,
|
|
226
|
+
query: Executable,
|
|
227
|
+
params: Dict[str, Any],
|
|
228
|
+
) -> Result[Any]:
|
|
229
|
+
"""Execute a query asynchronously.
|
|
230
|
+
|
|
231
|
+
This implementation is shared across backends for utility query execution.
|
|
232
|
+
"""
|
|
233
|
+
import time
|
|
234
|
+
|
|
235
|
+
async with db.scoped_session(self.session_maker) as session:
|
|
236
|
+
start_time = time.perf_counter()
|
|
237
|
+
result = await session.execute(query, params)
|
|
238
|
+
end_time = time.perf_counter()
|
|
239
|
+
elapsed_time = end_time - start_time
|
|
240
|
+
logger.debug(f"Query executed successfully in {elapsed_time:.2f}s.")
|
|
241
|
+
return result
|
|
@@ -0,0 +1,439 @@
|
|
|
1
|
+
"""SQLite FTS5-based search repository implementation."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import re
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from typing import List, Optional
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
from loguru import logger
|
|
10
|
+
from sqlalchemy import text
|
|
11
|
+
|
|
12
|
+
from basic_memory import db
|
|
13
|
+
from basic_memory.models.search import CREATE_SEARCH_INDEX
|
|
14
|
+
from basic_memory.repository.search_index_row import SearchIndexRow
|
|
15
|
+
from basic_memory.repository.search_repository_base import SearchRepositoryBase
|
|
16
|
+
from basic_memory.schemas.search import SearchItemType
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class SQLiteSearchRepository(SearchRepositoryBase):
|
|
20
|
+
"""SQLite FTS5 implementation of search repository.
|
|
21
|
+
|
|
22
|
+
Uses SQLite's FTS5 virtual tables for full-text search with:
|
|
23
|
+
- MATCH operator for queries
|
|
24
|
+
- bm25() function for relevance scoring
|
|
25
|
+
- Special character quoting for syntax safety
|
|
26
|
+
- Prefix wildcard matching with *
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
async def init_search_index(self):
|
|
30
|
+
"""Create FTS5 virtual table for search.
|
|
31
|
+
|
|
32
|
+
Note: Drops any existing search_index table first to ensure FTS5 virtual table creation.
|
|
33
|
+
This is necessary because Base.metadata.create_all() might create a regular table.
|
|
34
|
+
"""
|
|
35
|
+
logger.info("Initializing SQLite FTS5 search index")
|
|
36
|
+
try:
|
|
37
|
+
async with db.scoped_session(self.session_maker) as session:
|
|
38
|
+
# Drop any existing regular or virtual table first
|
|
39
|
+
await session.execute(text("DROP TABLE IF EXISTS search_index"))
|
|
40
|
+
# Create FTS5 virtual table
|
|
41
|
+
await session.execute(CREATE_SEARCH_INDEX)
|
|
42
|
+
await session.commit()
|
|
43
|
+
except Exception as e: # pragma: no cover
|
|
44
|
+
logger.error(f"Error initializing search index: {e}")
|
|
45
|
+
raise e
|
|
46
|
+
|
|
47
|
+
def _prepare_boolean_query(self, query: str) -> str:
|
|
48
|
+
"""Prepare a Boolean query by quoting individual terms while preserving operators.
|
|
49
|
+
|
|
50
|
+
Args:
|
|
51
|
+
query: A Boolean query like "tier1-test AND unicode" or "(hello OR world) NOT test"
|
|
52
|
+
|
|
53
|
+
Returns:
|
|
54
|
+
A properly formatted Boolean query with quoted terms that need quoting
|
|
55
|
+
"""
|
|
56
|
+
# Define Boolean operators and their boundaries
|
|
57
|
+
boolean_pattern = r"(\bAND\b|\bOR\b|\bNOT\b)"
|
|
58
|
+
|
|
59
|
+
# Split the query by Boolean operators, keeping the operators
|
|
60
|
+
parts = re.split(boolean_pattern, query)
|
|
61
|
+
|
|
62
|
+
processed_parts = []
|
|
63
|
+
for part in parts:
|
|
64
|
+
part = part.strip()
|
|
65
|
+
if not part:
|
|
66
|
+
continue
|
|
67
|
+
|
|
68
|
+
# If it's a Boolean operator, keep it as is
|
|
69
|
+
if part in ["AND", "OR", "NOT"]:
|
|
70
|
+
processed_parts.append(part)
|
|
71
|
+
else:
|
|
72
|
+
# Handle parentheses specially - they should be preserved for grouping
|
|
73
|
+
if "(" in part or ")" in part:
|
|
74
|
+
# Parse parenthetical expressions carefully
|
|
75
|
+
processed_part = self._prepare_parenthetical_term(part)
|
|
76
|
+
processed_parts.append(processed_part)
|
|
77
|
+
else:
|
|
78
|
+
# This is a search term - for Boolean queries, don't add prefix wildcards
|
|
79
|
+
prepared_term = self._prepare_single_term(part, is_prefix=False)
|
|
80
|
+
processed_parts.append(prepared_term)
|
|
81
|
+
|
|
82
|
+
return " ".join(processed_parts)
|
|
83
|
+
|
|
84
|
+
def _prepare_parenthetical_term(self, term: str) -> str:
|
|
85
|
+
"""Prepare a term that contains parentheses, preserving the parentheses for grouping.
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
term: A term that may contain parentheses like "(hello" or "world)" or "(hello OR world)"
|
|
89
|
+
|
|
90
|
+
Returns:
|
|
91
|
+
A properly formatted term with parentheses preserved
|
|
92
|
+
"""
|
|
93
|
+
# Handle terms that start/end with parentheses but may contain quotable content
|
|
94
|
+
result = ""
|
|
95
|
+
i = 0
|
|
96
|
+
while i < len(term):
|
|
97
|
+
if term[i] in "()":
|
|
98
|
+
# Preserve parentheses as-is
|
|
99
|
+
result += term[i]
|
|
100
|
+
i += 1
|
|
101
|
+
else:
|
|
102
|
+
# Find the next parenthesis or end of string
|
|
103
|
+
start = i
|
|
104
|
+
while i < len(term) and term[i] not in "()":
|
|
105
|
+
i += 1
|
|
106
|
+
|
|
107
|
+
# Extract the content between parentheses
|
|
108
|
+
content = term[start:i].strip()
|
|
109
|
+
if content:
|
|
110
|
+
# Only quote if it actually needs quoting (has hyphens, special chars, etc)
|
|
111
|
+
# but don't quote if it's just simple words
|
|
112
|
+
if self._needs_quoting(content):
|
|
113
|
+
escaped_content = content.replace('"', '""')
|
|
114
|
+
result += f'"{escaped_content}"'
|
|
115
|
+
else:
|
|
116
|
+
result += content
|
|
117
|
+
|
|
118
|
+
return result
|
|
119
|
+
|
|
120
|
+
def _needs_quoting(self, term: str) -> bool:
|
|
121
|
+
"""Check if a term needs to be quoted for FTS5 safety.
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
term: The term to check
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
True if the term should be quoted
|
|
128
|
+
"""
|
|
129
|
+
if not term or not term.strip():
|
|
130
|
+
return False
|
|
131
|
+
|
|
132
|
+
# Characters that indicate we should quote (excluding parentheses which are valid syntax)
|
|
133
|
+
needs_quoting_chars = [
|
|
134
|
+
" ",
|
|
135
|
+
".",
|
|
136
|
+
":",
|
|
137
|
+
";",
|
|
138
|
+
",",
|
|
139
|
+
"<",
|
|
140
|
+
">",
|
|
141
|
+
"?",
|
|
142
|
+
"/",
|
|
143
|
+
"-",
|
|
144
|
+
"'",
|
|
145
|
+
'"',
|
|
146
|
+
"[",
|
|
147
|
+
"]",
|
|
148
|
+
"{",
|
|
149
|
+
"}",
|
|
150
|
+
"+",
|
|
151
|
+
"!",
|
|
152
|
+
"@",
|
|
153
|
+
"#",
|
|
154
|
+
"$",
|
|
155
|
+
"%",
|
|
156
|
+
"^",
|
|
157
|
+
"&",
|
|
158
|
+
"=",
|
|
159
|
+
"|",
|
|
160
|
+
"\\",
|
|
161
|
+
"~",
|
|
162
|
+
"`",
|
|
163
|
+
]
|
|
164
|
+
|
|
165
|
+
return any(c in term for c in needs_quoting_chars)
|
|
166
|
+
|
|
167
|
+
def _prepare_single_term(self, term: str, is_prefix: bool = True) -> str:
|
|
168
|
+
"""Prepare a single search term (no Boolean operators).
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
term: A single search term
|
|
172
|
+
is_prefix: Whether to add prefix search capability (* suffix)
|
|
173
|
+
|
|
174
|
+
Returns:
|
|
175
|
+
A properly formatted single term
|
|
176
|
+
"""
|
|
177
|
+
if not term or not term.strip():
|
|
178
|
+
return term
|
|
179
|
+
|
|
180
|
+
term = term.strip()
|
|
181
|
+
|
|
182
|
+
# Check if term is already a proper wildcard pattern (alphanumeric + *)
|
|
183
|
+
# e.g., "hello*", "test*world" - these should be left alone
|
|
184
|
+
if "*" in term and all(c.isalnum() or c in "*_-" for c in term):
|
|
185
|
+
return term
|
|
186
|
+
|
|
187
|
+
# Characters that can cause FTS5 syntax errors when used as operators
|
|
188
|
+
# We're more conservative here - only quote when we detect problematic patterns
|
|
189
|
+
problematic_chars = [
|
|
190
|
+
'"',
|
|
191
|
+
"'",
|
|
192
|
+
"(",
|
|
193
|
+
")",
|
|
194
|
+
"[",
|
|
195
|
+
"]",
|
|
196
|
+
"{",
|
|
197
|
+
"}",
|
|
198
|
+
"+",
|
|
199
|
+
"!",
|
|
200
|
+
"@",
|
|
201
|
+
"#",
|
|
202
|
+
"$",
|
|
203
|
+
"%",
|
|
204
|
+
"^",
|
|
205
|
+
"&",
|
|
206
|
+
"=",
|
|
207
|
+
"|",
|
|
208
|
+
"\\",
|
|
209
|
+
"~",
|
|
210
|
+
"`",
|
|
211
|
+
]
|
|
212
|
+
|
|
213
|
+
# Characters that indicate we should quote (spaces, dots, colons, etc.)
|
|
214
|
+
# Adding hyphens here because FTS5 can have issues with hyphens followed by wildcards
|
|
215
|
+
needs_quoting_chars = [" ", ".", ":", ";", ",", "<", ">", "?", "/", "-"]
|
|
216
|
+
|
|
217
|
+
# Check if term needs quoting
|
|
218
|
+
has_problematic = any(c in term for c in problematic_chars)
|
|
219
|
+
has_spaces_or_special = any(c in term for c in needs_quoting_chars)
|
|
220
|
+
|
|
221
|
+
if has_problematic or has_spaces_or_special:
|
|
222
|
+
# Handle multi-word queries differently from special character queries
|
|
223
|
+
if " " in term and not any(c in term for c in problematic_chars):
|
|
224
|
+
# Check if any individual word contains special characters that need quoting
|
|
225
|
+
words = term.strip().split()
|
|
226
|
+
has_special_in_words = any(
|
|
227
|
+
any(c in word for c in needs_quoting_chars if c != " ") for word in words
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
if not has_special_in_words:
|
|
231
|
+
# For multi-word queries with simple words (like "emoji unicode"),
|
|
232
|
+
# use boolean AND to handle word order variations
|
|
233
|
+
if is_prefix:
|
|
234
|
+
# Add prefix wildcard to each word for better matching
|
|
235
|
+
prepared_words = [f"{word}*" for word in words if word]
|
|
236
|
+
else:
|
|
237
|
+
prepared_words = words
|
|
238
|
+
term = " AND ".join(prepared_words)
|
|
239
|
+
else:
|
|
240
|
+
# If any word has special characters, quote the entire phrase
|
|
241
|
+
escaped_term = term.replace('"', '""')
|
|
242
|
+
if is_prefix and not ("/" in term and term.endswith(".md")):
|
|
243
|
+
term = f'"{escaped_term}"*'
|
|
244
|
+
else:
|
|
245
|
+
term = f'"{escaped_term}"'
|
|
246
|
+
else:
|
|
247
|
+
# For terms with problematic characters or file paths, use exact phrase matching
|
|
248
|
+
# Escape any existing quotes by doubling them
|
|
249
|
+
escaped_term = term.replace('"', '""')
|
|
250
|
+
# Quote the entire term to handle special characters safely
|
|
251
|
+
if is_prefix and not ("/" in term and term.endswith(".md")):
|
|
252
|
+
# For search terms (not file paths), add prefix matching
|
|
253
|
+
term = f'"{escaped_term}"*'
|
|
254
|
+
else:
|
|
255
|
+
# For file paths, use exact matching
|
|
256
|
+
term = f'"{escaped_term}"'
|
|
257
|
+
elif is_prefix:
|
|
258
|
+
# Only add wildcard for simple terms without special characters
|
|
259
|
+
term = f"{term}*"
|
|
260
|
+
|
|
261
|
+
return term
|
|
262
|
+
|
|
263
|
+
def _prepare_search_term(self, term: str, is_prefix: bool = True) -> str:
|
|
264
|
+
"""Prepare a search term for FTS5 query.
|
|
265
|
+
|
|
266
|
+
Args:
|
|
267
|
+
term: The search term to prepare
|
|
268
|
+
is_prefix: Whether to add prefix search capability (* suffix)
|
|
269
|
+
|
|
270
|
+
For FTS5:
|
|
271
|
+
- Boolean operators (AND, OR, NOT) are preserved for complex queries
|
|
272
|
+
- Terms with FTS5 special characters are quoted to prevent syntax errors
|
|
273
|
+
- Simple terms get prefix wildcards for better matching
|
|
274
|
+
"""
|
|
275
|
+
# Check for explicit boolean operators - if present, process as Boolean query
|
|
276
|
+
boolean_operators = [" AND ", " OR ", " NOT "]
|
|
277
|
+
if any(op in f" {term} " for op in boolean_operators):
|
|
278
|
+
return self._prepare_boolean_query(term)
|
|
279
|
+
|
|
280
|
+
# For non-Boolean queries, use the single term preparation logic
|
|
281
|
+
return self._prepare_single_term(term, is_prefix)
|
|
282
|
+
|
|
283
|
+
async def search(
|
|
284
|
+
self,
|
|
285
|
+
search_text: Optional[str] = None,
|
|
286
|
+
permalink: Optional[str] = None,
|
|
287
|
+
permalink_match: Optional[str] = None,
|
|
288
|
+
title: Optional[str] = None,
|
|
289
|
+
types: Optional[List[str]] = None,
|
|
290
|
+
after_date: Optional[datetime] = None,
|
|
291
|
+
search_item_types: Optional[List[SearchItemType]] = None,
|
|
292
|
+
limit: int = 10,
|
|
293
|
+
offset: int = 0,
|
|
294
|
+
) -> List[SearchIndexRow]:
|
|
295
|
+
"""Search across all indexed content using SQLite FTS5."""
|
|
296
|
+
conditions = []
|
|
297
|
+
params = {}
|
|
298
|
+
order_by_clause = ""
|
|
299
|
+
|
|
300
|
+
# Handle text search for title and content
|
|
301
|
+
if search_text:
|
|
302
|
+
# Skip FTS for wildcard-only queries that would cause "unknown special query" errors
|
|
303
|
+
if search_text.strip() == "*" or search_text.strip() == "":
|
|
304
|
+
# For wildcard searches, don't add any text conditions - return all results
|
|
305
|
+
pass
|
|
306
|
+
else:
|
|
307
|
+
# Use _prepare_search_term to handle both Boolean and non-Boolean queries
|
|
308
|
+
processed_text = self._prepare_search_term(search_text.strip())
|
|
309
|
+
params["text"] = processed_text
|
|
310
|
+
conditions.append("(title MATCH :text OR content_stems MATCH :text)")
|
|
311
|
+
|
|
312
|
+
# Handle title match search
|
|
313
|
+
if title:
|
|
314
|
+
title_text = self._prepare_search_term(title.strip(), is_prefix=False)
|
|
315
|
+
params["title_text"] = title_text
|
|
316
|
+
conditions.append("title MATCH :title_text")
|
|
317
|
+
|
|
318
|
+
# Handle permalink exact search
|
|
319
|
+
if permalink:
|
|
320
|
+
params["permalink"] = permalink
|
|
321
|
+
conditions.append("permalink = :permalink")
|
|
322
|
+
|
|
323
|
+
# Handle permalink match search, supports *
|
|
324
|
+
if permalink_match:
|
|
325
|
+
# For GLOB patterns, don't use _prepare_search_term as it will quote slashes
|
|
326
|
+
# GLOB patterns need to preserve their syntax
|
|
327
|
+
permalink_text = permalink_match.lower().strip()
|
|
328
|
+
params["permalink"] = permalink_text
|
|
329
|
+
if "*" in permalink_match:
|
|
330
|
+
conditions.append("permalink GLOB :permalink")
|
|
331
|
+
else:
|
|
332
|
+
# For exact matches without *, we can use FTS5 MATCH
|
|
333
|
+
# but only prepare the term if it doesn't look like a path
|
|
334
|
+
if "/" in permalink_text:
|
|
335
|
+
conditions.append("permalink = :permalink")
|
|
336
|
+
else:
|
|
337
|
+
permalink_text = self._prepare_search_term(permalink_text, is_prefix=False)
|
|
338
|
+
params["permalink"] = permalink_text
|
|
339
|
+
conditions.append("permalink MATCH :permalink")
|
|
340
|
+
|
|
341
|
+
# Handle entity type filter
|
|
342
|
+
if search_item_types:
|
|
343
|
+
type_list = ", ".join(f"'{t.value}'" for t in search_item_types)
|
|
344
|
+
conditions.append(f"type IN ({type_list})")
|
|
345
|
+
|
|
346
|
+
# Handle type filter
|
|
347
|
+
if types:
|
|
348
|
+
type_list = ", ".join(f"'{t}'" for t in types)
|
|
349
|
+
conditions.append(f"json_extract(metadata, '$.entity_type') IN ({type_list})")
|
|
350
|
+
|
|
351
|
+
# Handle date filter using datetime() for proper comparison
|
|
352
|
+
if after_date:
|
|
353
|
+
params["after_date"] = after_date
|
|
354
|
+
conditions.append("datetime(created_at) > datetime(:after_date)")
|
|
355
|
+
|
|
356
|
+
# order by most recent first
|
|
357
|
+
order_by_clause = ", updated_at DESC"
|
|
358
|
+
|
|
359
|
+
# Always filter by project_id
|
|
360
|
+
params["project_id"] = self.project_id
|
|
361
|
+
conditions.append("project_id = :project_id")
|
|
362
|
+
|
|
363
|
+
# set limit on search query
|
|
364
|
+
params["limit"] = limit
|
|
365
|
+
params["offset"] = offset
|
|
366
|
+
|
|
367
|
+
# Build WHERE clause
|
|
368
|
+
where_clause = " AND ".join(conditions) if conditions else "1=1"
|
|
369
|
+
|
|
370
|
+
sql = f"""
|
|
371
|
+
SELECT
|
|
372
|
+
project_id,
|
|
373
|
+
id,
|
|
374
|
+
title,
|
|
375
|
+
permalink,
|
|
376
|
+
file_path,
|
|
377
|
+
type,
|
|
378
|
+
metadata,
|
|
379
|
+
from_id,
|
|
380
|
+
to_id,
|
|
381
|
+
relation_type,
|
|
382
|
+
entity_id,
|
|
383
|
+
content_snippet,
|
|
384
|
+
category,
|
|
385
|
+
created_at,
|
|
386
|
+
updated_at,
|
|
387
|
+
bm25(search_index) as score
|
|
388
|
+
FROM search_index
|
|
389
|
+
WHERE {where_clause}
|
|
390
|
+
ORDER BY score ASC {order_by_clause}
|
|
391
|
+
LIMIT :limit
|
|
392
|
+
OFFSET :offset
|
|
393
|
+
"""
|
|
394
|
+
|
|
395
|
+
logger.trace(f"Search {sql} params: {params}")
|
|
396
|
+
try:
|
|
397
|
+
async with db.scoped_session(self.session_maker) as session:
|
|
398
|
+
result = await session.execute(text(sql), params)
|
|
399
|
+
rows = result.fetchall()
|
|
400
|
+
except Exception as e:
|
|
401
|
+
# Handle FTS5 syntax errors and provide user-friendly feedback
|
|
402
|
+
if "fts5: syntax error" in str(e).lower(): # pragma: no cover
|
|
403
|
+
logger.warning(f"FTS5 syntax error for search term: {search_text}, error: {e}")
|
|
404
|
+
# Return empty results rather than crashing
|
|
405
|
+
return []
|
|
406
|
+
else:
|
|
407
|
+
# Re-raise other database errors
|
|
408
|
+
logger.error(f"Database error during search: {e}")
|
|
409
|
+
raise
|
|
410
|
+
|
|
411
|
+
results = [
|
|
412
|
+
SearchIndexRow(
|
|
413
|
+
project_id=self.project_id,
|
|
414
|
+
id=row.id,
|
|
415
|
+
title=row.title,
|
|
416
|
+
permalink=row.permalink,
|
|
417
|
+
file_path=row.file_path,
|
|
418
|
+
type=row.type,
|
|
419
|
+
score=row.score,
|
|
420
|
+
metadata=json.loads(row.metadata) if row.metadata else {},
|
|
421
|
+
from_id=row.from_id,
|
|
422
|
+
to_id=row.to_id,
|
|
423
|
+
relation_type=row.relation_type,
|
|
424
|
+
entity_id=row.entity_id,
|
|
425
|
+
content_snippet=row.content_snippet,
|
|
426
|
+
category=row.category,
|
|
427
|
+
created_at=row.created_at,
|
|
428
|
+
updated_at=row.updated_at,
|
|
429
|
+
)
|
|
430
|
+
for row in rows
|
|
431
|
+
]
|
|
432
|
+
|
|
433
|
+
logger.trace(f"Found {len(results)} search results")
|
|
434
|
+
for r in results:
|
|
435
|
+
logger.trace(
|
|
436
|
+
f"Search result: project_id: {r.project_id} type:{r.type} title: {r.title} permalink: {r.permalink} score: {r.score}"
|
|
437
|
+
)
|
|
438
|
+
|
|
439
|
+
return results
|