basic-memory 0.0.0__py3-none-any.whl → 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

basic_memory/api/app.py CHANGED
@@ -7,18 +7,40 @@ from fastapi.exception_handlers import http_exception_handler
7
7
  from loguru import logger
8
8
 
9
9
  from basic_memory import db
10
- from .routers import knowledge, search, memory, resource
10
+ from basic_memory.api.routers import knowledge, search, memory, resource
11
+ from basic_memory.config import config
12
+ from basic_memory.services import DatabaseService
11
13
 
12
14
 
13
15
  @asynccontextmanager
14
16
  async def lifespan(app: FastAPI):
15
17
  """Lifecycle manager for the FastAPI app."""
16
18
  logger.info("Starting Basic Memory API")
19
+
20
+ # check the db state
21
+ await check_db(app)
17
22
  yield
18
23
  logger.info("Shutting down Basic Memory API")
19
24
  await db.shutdown_db()
20
25
 
21
26
 
27
+ async def check_db(app: FastAPI):
28
+ logger.info("Checking database state")
29
+
30
+ # Initialize DB management service
31
+ db_service = DatabaseService(
32
+ config=config,
33
+ )
34
+
35
+ # Check and initialize DB if needed
36
+ if not await db_service.check_db():
37
+ raise RuntimeError("Database initialization failed")
38
+
39
+ # Clean up old backups on shutdown
40
+ await db_service.cleanup_backups()
41
+
42
+
43
+
22
44
  # Initialize FastAPI app
23
45
  app = FastAPI(
24
46
  title="Basic Memory API",
@@ -47,7 +47,7 @@ async def to_graph_context(context, entity_repository: EntityRepository):
47
47
 
48
48
  return RelationSummary(
49
49
  permalink=item.permalink,
50
- type=item.type,
50
+ relation_type=item.type,
51
51
  from_id=from_entity.permalink,
52
52
  to_id=to_entity.permalink if to_entity else None,
53
53
  )
@@ -1,9 +1,5 @@
1
- """CLI commands package."""
1
+ """Command module exports."""
2
2
 
3
- from . import init, status
3
+ from . import init, status, sync, import_memory_json
4
4
 
5
-
6
- __all__ = [
7
- "init",
8
- "status",
9
- ]
5
+ __all__ = ["init", "status", "sync", "import_memory_json.py"]
@@ -0,0 +1,139 @@
1
+ """Import command for basic-memory CLI to import from JSON memory format."""
2
+
3
+ import asyncio
4
+ import json
5
+ from pathlib import Path
6
+ from typing import Dict, Any, List
7
+
8
+ import typer
9
+ from loguru import logger
10
+ from rich.console import Console
11
+ from rich.panel import Panel
12
+ from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn
13
+
14
+ from basic_memory.cli.app import app
15
+ from basic_memory.config import config
16
+ from basic_memory.markdown import EntityParser, MarkdownProcessor
17
+ from basic_memory.markdown.schemas import EntityMarkdown, EntityFrontmatter, Observation, Relation
18
+
19
+
20
+ console = Console()
21
+
22
+ async def process_memory_json(json_path: Path, base_path: Path,markdown_processor: MarkdownProcessor):
23
+ """Import entities from memory.json using markdown processor."""
24
+
25
+ # First pass - collect all relations by source entity
26
+ entity_relations: Dict[str, List[Relation]] = {}
27
+ entities: Dict[str, Dict[str, Any]] = {}
28
+
29
+ with Progress(
30
+ SpinnerColumn(),
31
+ TextColumn("[progress.description]{task.description}"),
32
+ BarColumn(),
33
+ TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
34
+ console=console,
35
+ ) as progress:
36
+ read_task = progress.add_task("Reading memory.json...", total=None)
37
+
38
+ # First pass - collect entities and relations
39
+ with open(json_path) as f:
40
+ lines = f.readlines()
41
+ progress.update(read_task, total=len(lines))
42
+
43
+ for line in lines:
44
+ data = json.loads(line)
45
+ if data["type"] == "entity":
46
+ entities[data["name"]] = data
47
+ elif data["type"] == "relation":
48
+ # Store relation with its source entity
49
+ source = data.get("from") or data.get("from_id")
50
+ if source not in entity_relations:
51
+ entity_relations[source] = []
52
+ entity_relations[source].append(
53
+ Relation(
54
+ type=data.get("relationType") or data.get("relation_type"),
55
+ target=data.get("to") or data.get("to_id")
56
+ )
57
+ )
58
+ progress.update(read_task, advance=1)
59
+
60
+ # Second pass - create and write entities
61
+ write_task = progress.add_task("Creating entities...", total=len(entities))
62
+
63
+ entities_created = 0
64
+ for name, entity_data in entities.items():
65
+ entity = EntityMarkdown(
66
+ frontmatter=EntityFrontmatter(
67
+ metadata={
68
+ "type": entity_data["entityType"],
69
+ "title": name,
70
+ "permalink": f"{entity_data['entityType']}/{name}"
71
+ }
72
+ ),
73
+ content=f"# {name}\n",
74
+ observations=[
75
+ Observation(content=obs)
76
+ for obs in entity_data["observations"]
77
+ ],
78
+ relations=entity_relations.get(name, []) # Add any relations where this entity is the source
79
+ )
80
+
81
+ # Let markdown processor handle writing
82
+ file_path = base_path / f"{entity_data['entityType']}/{name}.md"
83
+ await markdown_processor.write_file(file_path, entity)
84
+ entities_created += 1
85
+ progress.update(write_task, advance=1)
86
+
87
+ return {
88
+ "entities": entities_created,
89
+ "relations": sum(len(rels) for rels in entity_relations.values())
90
+ }
91
+
92
+
93
+ async def get_markdown_processor() -> MarkdownProcessor:
94
+ """Get MarkdownProcessor instance."""
95
+ entity_parser = EntityParser(config.home)
96
+ return MarkdownProcessor(entity_parser)
97
+
98
+
99
+ @app.command()
100
+ def import_json(
101
+ json_path: Path = typer.Argument(..., help="Path to memory.json file to import"),
102
+ ):
103
+ """Import entities and relations from a memory.json file.
104
+
105
+ This command will:
106
+ 1. Read entities and relations from the JSON file
107
+ 2. Create markdown files for each entity
108
+ 3. Include outgoing relations in each entity's markdown
109
+
110
+ After importing, run 'basic-memory sync' to index the new files.
111
+ """
112
+
113
+ if not json_path.exists():
114
+ typer.echo(f"Error: File not found: {json_path}", err=True)
115
+ raise typer.Exit(1)
116
+
117
+ try:
118
+ # Get markdown processor
119
+ markdown_processor = asyncio.run(get_markdown_processor())
120
+
121
+ # Process the file
122
+ base_path = config.home
123
+ console.print(f"\nImporting from {json_path}...writing to {base_path}")
124
+ results = asyncio.run(process_memory_json(json_path, base_path, markdown_processor))
125
+
126
+ # Show results
127
+ console.print(Panel(
128
+ f"[green]Import complete![/green]\n\n"
129
+ f"Created {results['entities']} entities\n"
130
+ f"Added {results['relations']} relations",
131
+ expand=False
132
+ ))
133
+
134
+ console.print("\nRun 'basic-memory sync' to index the new files.")
135
+
136
+ except Exception as e:
137
+ logger.exception("Import failed")
138
+ typer.echo(f"Error during import: {e}", err=True)
139
+ raise typer.Exit(1)
basic_memory/cli/main.py CHANGED
@@ -14,7 +14,7 @@ __all__ = ["init", "status", "sync"]
14
14
  from basic_memory.config import config
15
15
 
16
16
 
17
- def setup_logging(home_dir: str = config.home, log_file: str = "./basic-memory/basic-memory-tools.log"):
17
+ def setup_logging(home_dir: str = config.home, log_file: str = ".basic-memory/basic-memory-tools.log"):
18
18
  """Configure logging for the application."""
19
19
 
20
20
  # Remove default handler and any existing handlers
basic_memory/config.py CHANGED
@@ -37,7 +37,11 @@ class ProjectConfig(BaseSettings):
37
37
  @property
38
38
  def database_path(self) -> Path:
39
39
  """Get SQLite database path."""
40
- return self.home / DATA_DIR_NAME / DATABASE_NAME
40
+ database_path = self.home / DATA_DIR_NAME / DATABASE_NAME
41
+ if not database_path.exists():
42
+ database_path.parent.mkdir(parents=True, exist_ok=True)
43
+ database_path.touch()
44
+ return database_path
41
45
 
42
46
  @field_validator("home")
43
47
  @classmethod
basic_memory/db.py CHANGED
@@ -14,8 +14,7 @@ from sqlalchemy.ext.asyncio import (
14
14
  async_scoped_session,
15
15
  )
16
16
 
17
- from basic_memory.models import Base
18
-
17
+ from basic_memory.models import Base, SCHEMA_VERSION
19
18
 
20
19
  # Module level state
21
20
  _engine: Optional[AsyncEngine] = None
@@ -69,12 +68,31 @@ async def scoped_session(
69
68
  await factory.remove()
70
69
 
71
70
 
72
- async def init_db(session: AsyncSession):
71
+ async def init_db() -> None:
73
72
  """Initialize database with required tables."""
74
- await session.execute(text("PRAGMA foreign_keys=ON"))
75
- conn = await session.connection()
76
- await conn.run_sync(Base.metadata.create_all)
77
- await session.commit()
73
+
74
+ logger.info("Initializing database...")
75
+
76
+ async with scoped_session(_session_maker) as session:
77
+ await session.execute(text("PRAGMA foreign_keys=ON"))
78
+ conn = await session.connection()
79
+ await conn.run_sync(Base.metadata.create_all)
80
+
81
+ await session.commit()
82
+
83
+ async def drop_db():
84
+ """Drop all database tables."""
85
+ global _engine, _session_maker
86
+
87
+ logger.info("Dropping tables...")
88
+ async with scoped_session(_session_maker) as session:
89
+ conn = await session.connection()
90
+ await conn.run_sync(Base.metadata.drop_all)
91
+ await session.commit()
92
+
93
+ # reset global engine and session_maker
94
+ _engine = None
95
+ _session_maker = None
78
96
 
79
97
 
80
98
  async def get_or_create_db(
@@ -83,7 +101,7 @@ async def get_or_create_db(
83
101
  ) -> tuple[AsyncEngine, async_sessionmaker[AsyncSession]]:
84
102
  """Get or create database engine and session maker."""
85
103
  global _engine, _session_maker
86
-
104
+
87
105
  if _engine is None:
88
106
  db_url = DatabaseType.get_db_url(db_path, db_type)
89
107
  logger.debug(f"Creating engine for db_url: {db_url}")
@@ -91,9 +109,7 @@ async def get_or_create_db(
91
109
  _session_maker = async_sessionmaker(_engine, expire_on_commit=False)
92
110
 
93
111
  # Initialize database
94
- logger.debug("Initializing database...")
95
- async with scoped_session(_session_maker) as db_session:
96
- await init_db(db_session)
112
+ await init_db()
97
113
 
98
114
  return _engine, _session_maker
99
115
 
@@ -101,35 +117,38 @@ async def get_or_create_db(
101
117
  async def shutdown_db():
102
118
  """Clean up database connections."""
103
119
  global _engine, _session_maker
104
-
120
+
105
121
  if _engine:
106
122
  await _engine.dispose()
107
123
  _engine = None
108
124
  _session_maker = None
109
125
 
110
126
 
127
+
111
128
  @asynccontextmanager
112
129
  async def engine_session_factory(
113
130
  db_path: Path,
114
- db_type: DatabaseType = DatabaseType.FILESYSTEM,
131
+ db_type: DatabaseType = DatabaseType.MEMORY,
115
132
  init: bool = True,
116
133
  ) -> AsyncGenerator[tuple[AsyncEngine, async_sessionmaker[AsyncSession]], None]:
117
134
  """Create engine and session factory.
118
-
135
+
119
136
  Note: This is primarily used for testing where we want a fresh database
120
137
  for each test. For production use, use get_or_create_db() instead.
121
138
  """
139
+
140
+ global _engine, _session_maker
141
+
122
142
  db_url = DatabaseType.get_db_url(db_path, db_type)
123
143
  logger.debug(f"Creating engine for db_url: {db_url}")
124
- engine = create_async_engine(db_url, connect_args={"check_same_thread": False})
144
+
145
+ _engine = create_async_engine(db_url, connect_args={"check_same_thread": False})
125
146
  try:
126
- factory = async_sessionmaker(engine, expire_on_commit=False)
147
+ _session_maker = async_sessionmaker(_engine, expire_on_commit=False)
127
148
 
128
149
  if init:
129
- logger.debug("Initializing database...")
130
- async with scoped_session(factory) as db_session:
131
- await init_db(db_session)
150
+ await init_db()
132
151
 
133
- yield engine, factory
152
+ yield _engine, _session_maker
134
153
  finally:
135
- await engine.dispose()
154
+ await _engine.dispose()
@@ -170,79 +170,44 @@ def remove_frontmatter(content: str) -> str:
170
170
  raise
171
171
 
172
172
 
173
- def remove_frontmatter_lenient(content: str) -> str:
174
- """
175
- Remove frontmatter markers and anything between them without validation.
176
-
177
- This is a more permissive version of remove_frontmatter that doesn't
178
- try to validate the YAML content. It simply removes everything between
179
- the first two '---' markers if they exist.
173
+ async def update_frontmatter(path: Path, updates: Dict[str, Any]) -> str:
174
+ """Update frontmatter fields in a file while preserving all content.
175
+
176
+ Only modifies the frontmatter section, leaving all content untouched.
177
+ Creates frontmatter section if none exists.
178
+ Returns checksum of updated file.
180
179
 
181
180
  Args:
182
- content: Content that may contain frontmatter
181
+ path: Path to markdown file
182
+ updates: Dict of frontmatter fields to update
183
183
 
184
184
  Returns:
185
- Content with any frontmatter markers and content removed
186
- """
187
- content = content.strip()
188
- if not content.startswith("---"):
189
- return content
190
-
191
- # Find the second marker
192
- rest = content[3:].strip()
193
- if "---" not in rest:
194
- return content
185
+ Checksum of updated file
195
186
 
196
- # Split on the second marker and take everything after
197
- parts = rest.split("---", 1)
198
- return parts[1].strip()
199
-
200
-
201
- async def add_frontmatter(content: str, frontmatter: Dict[str, Any]) -> str:
202
- """
203
- Add YAML frontmatter to content.
204
-
205
- Args:
206
- content: Main content text
207
- frontmatter: Key-value pairs for frontmatter
208
-
209
- Returns:
210
- Content with YAML frontmatter prepended
211
-
212
187
  Raises:
213
- ParseError: If YAML serialization fails
188
+ FileError: If file operations fail
189
+ ParseError: If frontmatter parsing fails
214
190
  """
215
191
  try:
216
- yaml_fm = yaml.dump(frontmatter, sort_keys=False)
217
- return f"---\n{yaml_fm}---\n\n{content.strip()}"
218
- except yaml.YAMLError as e:
219
- logger.error(f"Failed to add frontmatter: {e}")
220
- raise ParseError(f"Failed to add frontmatter: {e}")
221
-
192
+ # Read current content
193
+ content = path.read_text()
222
194
 
223
- async def parse_content_with_frontmatter(content: str) -> Tuple[Dict[str, Any], str]:
224
- """
225
- Parse both frontmatter and content.
226
-
227
- Args:
228
- content: Text content with optional frontmatter
195
+ # Parse current frontmatter
196
+ current_fm = {}
197
+ if has_frontmatter(content):
198
+ current_fm = parse_frontmatter(content)
199
+ content = remove_frontmatter(content)
229
200
 
230
- Returns:
231
- Tuple of (frontmatter dict, content without frontmatter)
201
+ # Update frontmatter
202
+ new_fm = {**current_fm, **updates}
232
203
 
233
- Raises:
234
- ParseError: If parsing fails
235
- """
236
- try:
237
- if not has_frontmatter(content):
238
- return {}, content.strip()
204
+ # Write new file with updated frontmatter
205
+ yaml_fm = yaml.dump(new_fm, sort_keys=False)
206
+ final_content = f"---\n{yaml_fm}---\n\n{content.strip()}"
239
207
 
240
- frontmatter = parse_frontmatter(content)
241
- remaining = remove_frontmatter(content)
242
- return frontmatter, remaining
208
+ await write_file_atomic(path, final_content)
209
+ return await compute_checksum(final_content)
243
210
 
244
211
  except Exception as e:
245
- if not isinstance(e, ParseError):
246
- logger.error(f"Failed to parse content with frontmatter: {e}")
247
- raise ParseError(f"Failed to parse content with frontmatter: {e}")
248
- raise
212
+ logger.error(f"Failed to update frontmatter in {path}: {e}")
213
+ raise FileError(f"Failed to update frontmatter: {e}")
@@ -2,6 +2,7 @@
2
2
 
3
3
  from basic_memory.file_utils import ParseError
4
4
  from basic_memory.markdown.entity_parser import EntityParser
5
+ from basic_memory.markdown.markdown_processor import MarkdownProcessor
5
6
  from basic_memory.markdown.schemas import (
6
7
  EntityMarkdown,
7
8
  EntityFrontmatter,
@@ -13,6 +14,7 @@ __all__ = [
13
14
  "EntityMarkdown",
14
15
  "EntityFrontmatter",
15
16
  "EntityParser",
17
+ "MarkdownProcessor",
16
18
  "Observation",
17
19
  "Relation",
18
20
  "ParseError",
@@ -116,7 +116,7 @@ class EntityParser:
116
116
 
117
117
  metadata = post.metadata
118
118
  metadata["title"] = post.metadata.get("title", file_path.name)
119
- metadata["type"] = metadata.get("type", "note")
119
+ metadata["type"] = post.metadata.get("type", "note")
120
120
  metadata["tags"] = parse_tags(post.metadata.get("tags", []))
121
121
 
122
122
  # frontmatter
@@ -1,17 +1,3 @@
1
- """Process markdown files with structured sections.
2
-
3
- This module follows a Read -> Modify -> Write pattern for all file operations:
4
- 1. Read entire file and parse into EntityMarkdown schema
5
- 2. Modify the schema (add relation, update content, etc)
6
- 3. Write entire file atomically using temp file + swap
7
-
8
- No in-place updates are performed. Each write reconstructs the entire file from the schema.
9
- The file format has two distinct types of content:
10
- 1. User content - Free form text that is preserved exactly as written
11
- 2. Structured sections - Observations and Relations that are always formatted
12
- in a standard way and can be overwritten since they're tracked in our schema
13
- """
14
-
15
1
  from pathlib import Path
16
2
  from typing import Optional
17
3
  from collections import OrderedDict
@@ -33,6 +19,8 @@ class DirtyFileError(Exception):
33
19
 
34
20
  class MarkdownProcessor:
35
21
  """Process markdown files while preserving content and structure.
22
+
23
+ used only for import
36
24
 
37
25
  This class handles the file I/O aspects of our markdown processing. It:
38
26
  1. Uses EntityParser for reading/parsing files into our schema
@@ -68,7 +68,7 @@ def parse_observation(token: Token) -> Dict[str, Any]:
68
68
 
69
69
  return {
70
70
  'category': category,
71
- 'content': ' '.join(content_parts).strip(),
71
+ 'content': content,
72
72
  'tags': list(tags) if tags else None,
73
73
  'context': context
74
74
  }
@@ -9,15 +9,13 @@ from pydantic import BaseModel
9
9
  class Observation(BaseModel):
10
10
  """An observation about an entity."""
11
11
 
12
- category: Optional[str] = None
12
+ category: Optional[str] = "Note"
13
13
  content: str
14
14
  tags: Optional[List[str]] = None
15
15
  context: Optional[str] = None
16
16
 
17
17
  def __str__(self) -> str:
18
18
  obs_string = f"- [{self.category}] {self.content}"
19
- if self.tags:
20
- obs_string += " " + " ".join(f"#{tag}" for tag in sorted(self.tags))
21
19
  if self.context:
22
20
  obs_string += f" ({self.context})"
23
21
  return obs_string
@@ -1,6 +1,6 @@
1
1
  """Discussion context tools for Basic Memory MCP server."""
2
2
 
3
- from typing import Optional, Literal
3
+ from typing import Optional, Literal, List
4
4
 
5
5
  from loguru import logger
6
6
 
@@ -9,6 +9,7 @@ from basic_memory.mcp.server import mcp
9
9
  from basic_memory.mcp.tools.utils import call_get
10
10
  from basic_memory.schemas.memory import GraphContext, MemoryUrl, memory_url, memory_url_path, normalize_memory_url
11
11
  from basic_memory.schemas.base import TimeFrame
12
+ from basic_memory.schemas.search import SearchItemType
12
13
 
13
14
 
14
15
  @mcp.tool(
@@ -83,7 +84,7 @@ async def build_context(
83
84
  """,
84
85
  )
85
86
  async def recent_activity(
86
- type: Literal["entity", "observation", "relation"] = None,
87
+ type: List[Literal["entity", "observation", "relation"]] = None,
87
88
  depth: Optional[int] = 1,
88
89
  timeframe: Optional[TimeFrame] = "7d",
89
90
  max_results: int = 10,
@@ -110,6 +111,9 @@ async def recent_activity(
110
111
  - metadata: Query details and statistics
111
112
 
112
113
  Examples:
114
+ # Get all entities for the last 10 days (default)
115
+ recent_activity()
116
+
113
117
  # Get all entities from yesterday
114
118
  recent_activity(type=["entity"], timeframe="yesterday")
115
119
 
@@ -131,8 +135,9 @@ async def recent_activity(
131
135
  "depth": depth,
132
136
  "timeframe": timeframe,
133
137
  "max_results": max_results,
134
- "type": type if type else None,
135
138
  }
139
+ if type:
140
+ params["type"] = type
136
141
 
137
142
  response = await call_get(
138
143
  client,
@@ -1,12 +1,15 @@
1
1
  """Models package for basic-memory."""
2
2
 
3
+ import basic_memory
3
4
  from basic_memory.models.base import Base
4
5
  from basic_memory.models.knowledge import Entity, Observation, Relation, ObservationCategory
5
6
 
7
+ SCHEMA_VERSION = basic_memory.__version__ + "-" + "003"
8
+
6
9
  __all__ = [
7
- 'Base',
8
- 'Entity',
9
- 'Observation',
10
- 'ObservationCategory',
11
- 'Relation'
12
- ]
10
+ "Base",
11
+ "Entity",
12
+ "Observation",
13
+ "ObservationCategory",
14
+ "Relation",
15
+ ]
@@ -1,9 +1,10 @@
1
1
  """Base model class for SQLAlchemy models."""
2
-
2
+ from sqlalchemy import String, Integer
3
3
  from sqlalchemy.ext.asyncio import AsyncAttrs
4
- from sqlalchemy.orm import DeclarativeBase
4
+ from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
5
5
 
6
6
 
7
7
  class Base(AsyncAttrs, DeclarativeBase):
8
8
  """Base class for all models"""
9
- pass
9
+ pass
10
+
@@ -95,14 +95,15 @@ class SearchRepository:
95
95
  permalink_match: Optional[str] = None,
96
96
  title: Optional[str] = None,
97
97
  types: List[SearchItemType] = None,
98
- after_date: datetime = None,
98
+ after_date: Optional[datetime] = None,
99
99
  entity_types: List[str] = None,
100
100
  limit: int = 10,
101
101
  ) -> List[SearchIndexRow]:
102
102
  """Search across all indexed content with fuzzy matching."""
103
103
  conditions = []
104
104
  params = {}
105
-
105
+ order_by_clause = ""
106
+
106
107
  # Handle text search for title and content
107
108
  if search_text:
108
109
  search_text = self._quote_search_term(search_text.lower().strip())
@@ -139,6 +140,9 @@ class SearchRepository:
139
140
  if after_date:
140
141
  params["after_date"] = after_date
141
142
  conditions.append("datetime(created_at) > datetime(:after_date)")
143
+
144
+ # order by most recent first
145
+ order_by_clause = ", updated_at DESC"
142
146
 
143
147
  # set limit on search query
144
148
  params["limit"] = limit
@@ -165,7 +169,7 @@ class SearchRepository:
165
169
  bm25(search_index) as score
166
170
  FROM search_index
167
171
  WHERE {where_clause}
168
- ORDER BY score ASC
172
+ ORDER BY score ASC {order_by_clause}
169
173
  LIMIT :limit
170
174
  """
171
175
 
@@ -197,6 +201,7 @@ class SearchRepository:
197
201
 
198
202
  #for r in results:
199
203
  # logger.debug(f"Search result: type:{r.type} title: {r.title} permalink: {r.permalink} score: {r.score}")
204
+
200
205
  return results
201
206
 
202
207
  async def index_item(