basic-memory 0.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (73) hide show
  1. basic_memory/__init__.py +3 -0
  2. basic_memory/api/__init__.py +4 -0
  3. basic_memory/api/app.py +42 -0
  4. basic_memory/api/routers/__init__.py +8 -0
  5. basic_memory/api/routers/knowledge_router.py +168 -0
  6. basic_memory/api/routers/memory_router.py +123 -0
  7. basic_memory/api/routers/resource_router.py +34 -0
  8. basic_memory/api/routers/search_router.py +34 -0
  9. basic_memory/cli/__init__.py +1 -0
  10. basic_memory/cli/app.py +4 -0
  11. basic_memory/cli/commands/__init__.py +9 -0
  12. basic_memory/cli/commands/init.py +38 -0
  13. basic_memory/cli/commands/status.py +152 -0
  14. basic_memory/cli/commands/sync.py +254 -0
  15. basic_memory/cli/main.py +48 -0
  16. basic_memory/config.py +53 -0
  17. basic_memory/db.py +135 -0
  18. basic_memory/deps.py +182 -0
  19. basic_memory/file_utils.py +248 -0
  20. basic_memory/markdown/__init__.py +19 -0
  21. basic_memory/markdown/entity_parser.py +137 -0
  22. basic_memory/markdown/markdown_processor.py +153 -0
  23. basic_memory/markdown/plugins.py +236 -0
  24. basic_memory/markdown/schemas.py +73 -0
  25. basic_memory/markdown/utils.py +144 -0
  26. basic_memory/mcp/__init__.py +1 -0
  27. basic_memory/mcp/async_client.py +10 -0
  28. basic_memory/mcp/main.py +21 -0
  29. basic_memory/mcp/server.py +39 -0
  30. basic_memory/mcp/tools/__init__.py +34 -0
  31. basic_memory/mcp/tools/ai_edit.py +84 -0
  32. basic_memory/mcp/tools/knowledge.py +56 -0
  33. basic_memory/mcp/tools/memory.py +142 -0
  34. basic_memory/mcp/tools/notes.py +122 -0
  35. basic_memory/mcp/tools/search.py +28 -0
  36. basic_memory/mcp/tools/utils.py +154 -0
  37. basic_memory/models/__init__.py +12 -0
  38. basic_memory/models/base.py +9 -0
  39. basic_memory/models/knowledge.py +204 -0
  40. basic_memory/models/search.py +34 -0
  41. basic_memory/repository/__init__.py +7 -0
  42. basic_memory/repository/entity_repository.py +156 -0
  43. basic_memory/repository/observation_repository.py +40 -0
  44. basic_memory/repository/relation_repository.py +78 -0
  45. basic_memory/repository/repository.py +303 -0
  46. basic_memory/repository/search_repository.py +259 -0
  47. basic_memory/schemas/__init__.py +73 -0
  48. basic_memory/schemas/base.py +216 -0
  49. basic_memory/schemas/delete.py +38 -0
  50. basic_memory/schemas/discovery.py +25 -0
  51. basic_memory/schemas/memory.py +111 -0
  52. basic_memory/schemas/request.py +77 -0
  53. basic_memory/schemas/response.py +220 -0
  54. basic_memory/schemas/search.py +117 -0
  55. basic_memory/services/__init__.py +11 -0
  56. basic_memory/services/context_service.py +274 -0
  57. basic_memory/services/entity_service.py +281 -0
  58. basic_memory/services/exceptions.py +15 -0
  59. basic_memory/services/file_service.py +213 -0
  60. basic_memory/services/link_resolver.py +126 -0
  61. basic_memory/services/search_service.py +218 -0
  62. basic_memory/services/service.py +36 -0
  63. basic_memory/sync/__init__.py +5 -0
  64. basic_memory/sync/file_change_scanner.py +162 -0
  65. basic_memory/sync/sync_service.py +140 -0
  66. basic_memory/sync/utils.py +66 -0
  67. basic_memory/sync/watch_service.py +197 -0
  68. basic_memory/utils.py +78 -0
  69. basic_memory-0.0.0.dist-info/METADATA +71 -0
  70. basic_memory-0.0.0.dist-info/RECORD +73 -0
  71. basic_memory-0.0.0.dist-info/WHEEL +4 -0
  72. basic_memory-0.0.0.dist-info/entry_points.txt +2 -0
  73. basic_memory-0.0.0.dist-info/licenses/LICENSE +661 -0
@@ -0,0 +1,303 @@
1
+ """Base repository implementation."""
2
+
3
+ from datetime import datetime
4
+ from typing import Type, Optional, Any, Sequence, TypeVar, List
5
+
6
+ from loguru import logger
7
+ from sqlalchemy import (
8
+ select,
9
+ func,
10
+ Select,
11
+ Executable,
12
+ inspect,
13
+ Result,
14
+ Column,
15
+ and_,
16
+ delete,
17
+ )
18
+ from sqlalchemy.exc import NoResultFound
19
+ from sqlalchemy.ext.asyncio import async_sessionmaker, AsyncSession
20
+ from sqlalchemy.orm.interfaces import LoaderOption
21
+
22
+ from basic_memory import db
23
+ from basic_memory.models import Base
24
+
25
+ T = TypeVar("T", bound=Base)
26
+
27
+
28
+ class Repository[T: Base]:
29
+ """Base repository implementation with generic CRUD operations."""
30
+
31
+ def __init__(self, session_maker: async_sessionmaker[AsyncSession], Model: Type[T]):
32
+ self.session_maker = session_maker
33
+ self.Model = Model
34
+ self.mapper = inspect(self.Model).mapper
35
+ self.primary_key: Column[Any] = self.mapper.primary_key[0]
36
+ self.valid_columns = [column.key for column in self.mapper.columns]
37
+
38
+ def get_model_data(self, entity_data):
39
+ model_data = {
40
+ k: v for k, v in entity_data.items() if k in self.valid_columns and v is not None
41
+ }
42
+ return model_data
43
+
44
+ async def select_by_id(self, session: AsyncSession, entity_id: int) -> Optional[T]:
45
+ """Select an entity by ID using an existing session."""
46
+ query = (
47
+ select(self.Model)
48
+ .filter(self.primary_key == entity_id)
49
+ .options(*self.get_load_options())
50
+ )
51
+ result = await session.execute(query)
52
+ return result.scalars().one_or_none()
53
+
54
+ async def select_by_ids(self, session: AsyncSession, ids: List[int]) -> Sequence[T]:
55
+ """Select multiple entities by IDs using an existing session."""
56
+ query = (
57
+ select(self.Model).where(self.primary_key.in_(ids)).options(*self.get_load_options())
58
+ )
59
+ result = await session.execute(query)
60
+ return result.scalars().all()
61
+
62
+ async def add(self, model: T) -> T:
63
+ """
64
+ Add a model to the repository. This will also add related objects
65
+ :param model: the model to add
66
+ :return: the added model instance
67
+ """
68
+ async with db.scoped_session(self.session_maker) as session:
69
+ session.add(model)
70
+ await session.flush()
71
+
72
+ # Query within same session
73
+ found = await self.select_by_id(session, model.id) # pyright: ignore [reportAttributeAccessIssue]
74
+ assert found is not None, "can't find model after session.add"
75
+ return found
76
+
77
+ async def add_all(self, models: List[T]) -> Sequence[T]:
78
+ """
79
+ Add a list of models to the repository. This will also add related objects
80
+ :param models: the models to add
81
+ :return: the added models instances
82
+ """
83
+ async with db.scoped_session(self.session_maker) as session:
84
+ session.add_all(models)
85
+ await session.flush()
86
+
87
+ # Query within same session
88
+ return await self.select_by_ids(session, [m.id for m in models]) # pyright: ignore [reportAttributeAccessIssue]
89
+
90
+ def select(self, *entities: Any) -> Select:
91
+ """Create a new SELECT statement.
92
+
93
+ Returns:
94
+ A SQLAlchemy Select object configured with the provided entities
95
+ or this repository's model if no entities provided.
96
+ """
97
+ if not entities:
98
+ entities = (self.Model,)
99
+ return select(*entities)
100
+
101
+ async def refresh(self, instance: T, relationships: list[str] | None = None) -> None:
102
+ """Refresh instance and optionally specified relationships."""
103
+ logger.debug(f"Refreshing {self.Model.__name__} instance: {getattr(instance, 'id', None)}")
104
+ async with db.scoped_session(self.session_maker) as session:
105
+ await session.refresh(instance, relationships or [])
106
+ logger.debug(f"Refreshed relationships: {relationships}")
107
+
108
+ async def find_all(self, skip: int = 0, limit: Optional[int] = 0) -> Sequence[T]:
109
+ """Fetch records from the database with pagination."""
110
+ logger.debug(f"Finding all {self.Model.__name__} (skip={skip}, limit={limit})")
111
+
112
+ async with db.scoped_session(self.session_maker) as session:
113
+ query = select(self.Model).offset(skip).options(*self.get_load_options())
114
+ if limit:
115
+ query = query.limit(limit)
116
+
117
+ result = await session.execute(query)
118
+
119
+ items = result.scalars().all()
120
+ logger.debug(f"Found {len(items)} {self.Model.__name__} records")
121
+ return items
122
+
123
+ async def find_by_id(self, entity_id: int) -> Optional[T]:
124
+ """Fetch an entity by its unique identifier."""
125
+ logger.debug(f"Finding {self.Model.__name__} by ID: {entity_id}")
126
+
127
+ async with db.scoped_session(self.session_maker) as session:
128
+ return await self.select_by_id(session, entity_id)
129
+
130
+ async def find_by_ids(self, ids: List[int]) -> Sequence[T]:
131
+ """Fetch multiple entities by their identifiers in a single query."""
132
+ logger.debug(f"Finding {self.Model.__name__} by IDs: {ids}")
133
+
134
+ async with db.scoped_session(self.session_maker) as session:
135
+ return await self.select_by_ids(session, ids)
136
+
137
+ async def find_one(self, query: Select[tuple[T]]) -> Optional[T]:
138
+ """Execute a query and retrieve a single record."""
139
+ logger.debug(f"Finding one {self.Model.__name__} with query: {query}")
140
+
141
+ # add in load options
142
+ query = query.options(*self.get_load_options())
143
+ result = await self.execute_query(query)
144
+ entity = result.scalars().one_or_none()
145
+
146
+ if entity:
147
+ logger.debug(f"Found {self.Model.__name__}: {getattr(entity, 'id', None)}")
148
+ else:
149
+ logger.debug(f"No {self.Model.__name__} found")
150
+ return entity
151
+
152
+ async def find_modified_since(self, since: datetime) -> Sequence[T]:
153
+ """Find all records modified since the given timestamp.
154
+
155
+ This method assumes the model has an updated_at column. Override
156
+ in subclasses if a different column should be used.
157
+
158
+ Args:
159
+ since: Datetime to search from
160
+
161
+ Returns:
162
+ Sequence of records modified since the timestamp
163
+ """
164
+ logger.debug(f"Finding {self.Model.__name__} modified since: {since}")
165
+
166
+ if not hasattr(self.Model, "updated_at"):
167
+ raise AttributeError(f"{self.Model.__name__} does not have updated_at column")
168
+
169
+ query = (
170
+ select(self.Model)
171
+ .filter(self.Model.updated_at >= since)
172
+ .options(*self.get_load_options())
173
+ )
174
+
175
+ async with db.scoped_session(self.session_maker) as session:
176
+ result = await session.execute(query)
177
+ items = result.scalars().all()
178
+ logger.debug(f"Found {len(items)} modified {self.Model.__name__} records")
179
+ return items
180
+
181
+ async def create(self, data: dict) -> T:
182
+ """Create a new record from a model instance."""
183
+ logger.debug(f"Creating {self.Model.__name__} from entity_data: {data}")
184
+ async with db.scoped_session(self.session_maker) as session:
185
+ # Only include valid columns that are provided in entity_data
186
+ model_data = self.get_model_data(data)
187
+ model = self.Model(**model_data)
188
+ session.add(model)
189
+ await session.flush()
190
+
191
+ return_instance = await self.select_by_id(session, model.id) # pyright: ignore [reportAttributeAccessIssue]
192
+ assert return_instance is not None, "can't find model after session.add"
193
+ return return_instance
194
+
195
+ async def create_all(self, data_list: List[dict]) -> Sequence[T]:
196
+ """Create multiple records in a single transaction."""
197
+ logger.debug(f"Bulk creating {len(data_list)} {self.Model.__name__} instances")
198
+
199
+ async with db.scoped_session(self.session_maker) as session:
200
+ # Only include valid columns that are provided in entity_data
201
+ model_list = [
202
+ self.Model(
203
+ **self.get_model_data(d),
204
+ )
205
+ for d in data_list
206
+ ]
207
+ session.add_all(model_list)
208
+ await session.flush()
209
+
210
+ return await self.select_by_ids(session, [model.id for model in model_list]) # pyright: ignore [reportAttributeAccessIssue]
211
+
212
+ async def update(self, entity_id: int, entity_data: dict | T) -> Optional[T]:
213
+ """Update an entity with the given data."""
214
+ logger.debug(f"Updating {self.Model.__name__} {entity_id} with data: {entity_data}")
215
+ async with db.scoped_session(self.session_maker) as session:
216
+ try:
217
+ result = await session.execute(
218
+ select(self.Model).filter(self.primary_key == entity_id)
219
+ )
220
+ entity = result.scalars().one()
221
+
222
+ if isinstance(entity_data, dict):
223
+ for key, value in entity_data.items():
224
+ if key in self.valid_columns:
225
+ setattr(entity, key, value)
226
+
227
+ elif isinstance(entity_data, self.Model):
228
+ for column in self.Model.__table__.columns.keys():
229
+ setattr(entity, column, getattr(entity_data, column))
230
+
231
+ await session.flush() # Make sure changes are flushed
232
+ await session.refresh(entity) # Refresh
233
+
234
+ logger.debug(f"Updated {self.Model.__name__}: {entity_id}")
235
+ return await self.select_by_id(session, entity.id) # pyright: ignore [reportAttributeAccessIssue]
236
+
237
+ except NoResultFound:
238
+ logger.debug(f"No {self.Model.__name__} found to update: {entity_id}")
239
+ return None
240
+
241
+ async def delete(self, entity_id: int) -> bool:
242
+ """Delete an entity from the database."""
243
+ logger.debug(f"Deleting {self.Model.__name__}: {entity_id}")
244
+ async with db.scoped_session(self.session_maker) as session:
245
+ try:
246
+ result = await session.execute(
247
+ select(self.Model).filter(self.primary_key == entity_id)
248
+ )
249
+ entity = result.scalars().one()
250
+ await session.delete(entity)
251
+
252
+ logger.debug(f"Deleted {self.Model.__name__}: {entity_id}")
253
+ return True
254
+ except NoResultFound:
255
+ logger.debug(f"No {self.Model.__name__} found to delete: {entity_id}")
256
+ return False
257
+
258
+ async def delete_by_ids(self, ids: List[int]) -> int:
259
+ """Delete records matching given IDs."""
260
+ logger.debug(f"Deleting {self.Model.__name__} by ids: {ids}")
261
+ async with db.scoped_session(self.session_maker) as session:
262
+ query = delete(self.Model).where(self.primary_key.in_(ids))
263
+ result = await session.execute(query)
264
+ logger.debug(f"Deleted {result.rowcount} records")
265
+ return result.rowcount
266
+
267
+ async def delete_by_fields(self, **filters: Any) -> bool:
268
+ """Delete records matching given field values."""
269
+ logger.debug(f"Deleting {self.Model.__name__} by fields: {filters}")
270
+ async with db.scoped_session(self.session_maker) as session:
271
+ conditions = [getattr(self.Model, field) == value for field, value in filters.items()]
272
+ query = delete(self.Model).where(and_(*conditions))
273
+ result = await session.execute(query)
274
+ deleted = result.rowcount > 0
275
+ logger.debug(f"Deleted {result.rowcount} records")
276
+ return deleted
277
+
278
+ async def count(self, query: Executable | None = None) -> int:
279
+ """Count entities in the database table."""
280
+ async with db.scoped_session(self.session_maker) as session:
281
+ if query is None:
282
+ query = select(func.count()).select_from(self.Model)
283
+ result = await session.execute(query)
284
+ scalar = result.scalar()
285
+ count = scalar if scalar is not None else 0
286
+ logger.debug(f"Counted {count} {self.Model.__name__} records")
287
+ return count
288
+
289
+ async def execute_query(self, query: Executable, use_query_options: bool = True) -> Result[Any]:
290
+ """Execute a query asynchronously."""
291
+
292
+ query = query.options(*self.get_load_options()) if use_query_options else query
293
+
294
+ logger.debug(f"Executing query: {query}")
295
+ async with db.scoped_session(self.session_maker) as session:
296
+ result = await session.execute(query)
297
+ logger.debug("Query executed successfully")
298
+ return result
299
+
300
+ def get_load_options(self) -> List[LoaderOption]:
301
+ """Get list of loader options for eager loading relationships.
302
+ Override in subclasses to specify what to load."""
303
+ return []
@@ -0,0 +1,259 @@
1
+ """Repository for search operations."""
2
+
3
+ import json
4
+ import time
5
+ from dataclasses import dataclass
6
+ from datetime import datetime
7
+ from typing import List, Optional, Any, Dict
8
+
9
+ from loguru import logger
10
+ from sqlalchemy import text, Executable, Result
11
+ from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker
12
+
13
+ from basic_memory import db
14
+ from basic_memory.models.search import CREATE_SEARCH_INDEX
15
+ from basic_memory.schemas.search import SearchItemType
16
+
17
+
18
+ @dataclass
19
+ class SearchIndexRow:
20
+ """Search result with score and metadata."""
21
+
22
+ id: int
23
+ type: str
24
+ metadata: Optional[dict] = None
25
+
26
+ # date values
27
+ created_at: Optional[datetime] = None
28
+ updated_at: Optional[datetime] = None
29
+
30
+ # assigned in result
31
+ score: Optional[float] = None
32
+
33
+ # Common fields
34
+ permalink: Optional[str] = None
35
+ file_path: Optional[str] = None
36
+
37
+ # Type-specific fields
38
+ title: Optional[int] = None # entity
39
+ content: Optional[int] = None # entity, observation
40
+ entity_id: Optional[int] = None # observations
41
+ category: Optional[str] = None # observations
42
+ from_id: Optional[int] = None # relations
43
+ to_id: Optional[int] = None # relations
44
+ relation_type: Optional[str] = None # relations
45
+
46
+ def to_insert(self):
47
+ return {
48
+ "id": self.id,
49
+ "title": self.title,
50
+ "content": self.content,
51
+ "permalink": self.permalink,
52
+ "file_path": self.file_path,
53
+ "type": self.type,
54
+ "metadata": json.dumps(self.metadata),
55
+ "from_id": self.from_id,
56
+ "to_id": self.to_id,
57
+ "relation_type": self.relation_type,
58
+ "entity_id": self.entity_id,
59
+ "category": self.category,
60
+ "created_at": self.created_at if self.created_at else None,
61
+ "updated_at": self.updated_at if self.updated_at else None,
62
+ }
63
+
64
+
65
+ class SearchRepository:
66
+ """Repository for search index operations."""
67
+
68
+ def __init__(self, session_maker: async_sessionmaker[AsyncSession]):
69
+ self.session_maker = session_maker
70
+
71
+ async def init_search_index(self):
72
+ """Create or recreate the search index."""
73
+ async with db.scoped_session(self.session_maker) as session:
74
+ await session.execute(CREATE_SEARCH_INDEX)
75
+ await session.commit()
76
+
77
+ def _quote_search_term(self, term: str) -> str:
78
+ """Add quotes if term contains special characters.
79
+ For FTS5, special characters and phrases need to be quoted to be treated as a single token.
80
+ """
81
+ # List of special characters that need quoting
82
+ special_chars = ['/', '*', '-', '.', ' ', '(', ')', '[', ']', '"', "'"]
83
+
84
+ # Check if term contains any special characters
85
+ if any(c in term for c in special_chars):
86
+ # If the term already contains quotes, escape them
87
+ term = term.replace('"', '""')
88
+ return f'"{term}"'
89
+ return term
90
+
91
+ async def search(
92
+ self,
93
+ search_text: Optional[str] = None,
94
+ permalink: Optional[str] = None,
95
+ permalink_match: Optional[str] = None,
96
+ title: Optional[str] = None,
97
+ types: List[SearchItemType] = None,
98
+ after_date: datetime = None,
99
+ entity_types: List[str] = None,
100
+ limit: int = 10,
101
+ ) -> List[SearchIndexRow]:
102
+ """Search across all indexed content with fuzzy matching."""
103
+ conditions = []
104
+ params = {}
105
+
106
+ # Handle text search for title and content
107
+ if search_text:
108
+ search_text = self._quote_search_term(search_text.lower().strip())
109
+ params["text"] = f"{search_text}*"
110
+ conditions.append("(title MATCH :text OR content MATCH :text)")
111
+
112
+ # Handle title match search
113
+ if title:
114
+ title_text = self._quote_search_term(title.lower().strip())
115
+ params["text"] = f"{title_text}*"
116
+ conditions.append("title MATCH :text")
117
+
118
+ # Handle permalink exact search
119
+ if permalink:
120
+ params["permalink"] = permalink
121
+ conditions.append("permalink = :permalink")
122
+
123
+ # Handle permalink match search, supports *
124
+ if permalink_match:
125
+ params["permalink"] = self._quote_search_term(permalink_match)
126
+ conditions.append("permalink MATCH :permalink")
127
+
128
+ # Handle type filter
129
+ if types:
130
+ type_list = ", ".join(f"'{t.value}'" for t in types)
131
+ conditions.append(f"type IN ({type_list})")
132
+
133
+ # Handle entity type filter
134
+ if entity_types:
135
+ entity_type_list = ", ".join(f"'{t}'" for t in entity_types)
136
+ conditions.append(f"json_extract(metadata, '$.entity_type') IN ({entity_type_list})")
137
+
138
+ # Handle date filter using datetime() for proper comparison
139
+ if after_date:
140
+ params["after_date"] = after_date
141
+ conditions.append("datetime(created_at) > datetime(:after_date)")
142
+
143
+ # set limit on search query
144
+ params["limit"] = limit
145
+
146
+ # Build WHERE clause
147
+ where_clause = " AND ".join(conditions) if conditions else "1=1"
148
+
149
+ sql = f"""
150
+ SELECT
151
+ id,
152
+ title,
153
+ permalink,
154
+ file_path,
155
+ type,
156
+ metadata,
157
+ from_id,
158
+ to_id,
159
+ relation_type,
160
+ entity_id,
161
+ content,
162
+ category,
163
+ created_at,
164
+ updated_at,
165
+ bm25(search_index) as score
166
+ FROM search_index
167
+ WHERE {where_clause}
168
+ ORDER BY score ASC
169
+ LIMIT :limit
170
+ """
171
+
172
+ #logger.debug(f"Search {sql} params: {params}")
173
+ async with db.scoped_session(self.session_maker) as session:
174
+ result = await session.execute(text(sql), params)
175
+ rows = result.fetchall()
176
+
177
+ results = [
178
+ SearchIndexRow(
179
+ id=row.id,
180
+ title=row.title,
181
+ permalink=row.permalink,
182
+ file_path=row.file_path,
183
+ type=row.type,
184
+ score=row.score,
185
+ metadata=json.loads(row.metadata),
186
+ from_id=row.from_id,
187
+ to_id=row.to_id,
188
+ relation_type=row.relation_type,
189
+ entity_id=row.entity_id,
190
+ content=row.content,
191
+ category=row.category,
192
+ created_at=row.created_at,
193
+ updated_at=row.updated_at,
194
+ )
195
+ for row in rows
196
+ ]
197
+
198
+ #for r in results:
199
+ # logger.debug(f"Search result: type:{r.type} title: {r.title} permalink: {r.permalink} score: {r.score}")
200
+ return results
201
+
202
+ async def index_item(
203
+ self,
204
+ search_index_row: SearchIndexRow,
205
+ ):
206
+ """Index or update a single item."""
207
+ async with db.scoped_session(self.session_maker) as session:
208
+ # Delete existing record if any
209
+ await session.execute(
210
+ text("DELETE FROM search_index WHERE permalink = :permalink"),
211
+ {"permalink": search_index_row.permalink},
212
+ )
213
+
214
+ # Insert new record
215
+ await session.execute(
216
+ text("""
217
+ INSERT INTO search_index (
218
+ id, title, content, permalink, file_path, type, metadata,
219
+ from_id, to_id, relation_type,
220
+ entity_id, category,
221
+ created_at, updated_at
222
+ ) VALUES (
223
+ :id, :title, :content, :permalink, :file_path, :type, :metadata,
224
+ :from_id, :to_id, :relation_type,
225
+ :entity_id, :category,
226
+ :created_at, :updated_at
227
+ )
228
+ """),
229
+ search_index_row.to_insert(),
230
+ )
231
+ logger.debug(f"indexed permalink {search_index_row.permalink}")
232
+ await session.commit()
233
+
234
+ async def delete_by_permalink(self, permalink: str):
235
+ """Delete an item from the search index."""
236
+ async with db.scoped_session(self.session_maker) as session:
237
+ await session.execute(
238
+ text("DELETE FROM search_index WHERE permalink = :permalink"),
239
+ {"permalink": permalink},
240
+ )
241
+ await session.commit()
242
+
243
+ async def execute_query(
244
+ self,
245
+ query: Executable,
246
+ params: Optional[Dict[str, Any]] = None,
247
+ ) -> Result[Any]:
248
+ """Execute a query asynchronously."""
249
+ #logger.debug(f"Executing query: {query}")
250
+ async with db.scoped_session(self.session_maker) as session:
251
+ start_time = time.perf_counter()
252
+ if params:
253
+ result = await session.execute(query, params)
254
+ else:
255
+ result = await session.execute(query)
256
+ end_time = time.perf_counter()
257
+ elapsed_time = end_time - start_time
258
+ logger.debug(f"Query executed successfully in {elapsed_time:.2f}s.")
259
+ return result
@@ -0,0 +1,73 @@
1
+ """Knowledge graph schema exports.
2
+
3
+ This module exports all schema classes to simplify imports.
4
+ Rather than importing from individual schema files, you can
5
+ import everything from basic_memory.schemas.
6
+ """
7
+
8
+ # Base types and models
9
+ from basic_memory.schemas.base import (
10
+ Observation,
11
+ EntityType,
12
+ RelationType,
13
+ Relation,
14
+ Entity,
15
+ )
16
+
17
+ # Delete operation models
18
+ from basic_memory.schemas.delete import (
19
+ DeleteEntitiesRequest,
20
+ )
21
+
22
+ # Request models
23
+ from basic_memory.schemas.request import (
24
+ SearchNodesRequest,
25
+ GetEntitiesRequest,
26
+ CreateRelationsRequest, UpdateEntityRequest,
27
+ )
28
+
29
+ # Response models
30
+ from basic_memory.schemas.response import (
31
+ SQLAlchemyModel,
32
+ ObservationResponse,
33
+ RelationResponse,
34
+ EntityResponse,
35
+ EntityListResponse,
36
+ SearchNodesResponse,
37
+ DeleteEntitiesResponse,
38
+ )
39
+
40
+ # Discovery and analytics models
41
+ from basic_memory.schemas.discovery import (
42
+ EntityTypeList,
43
+ ObservationCategoryList, TypedEntityList,
44
+ )
45
+
46
+ # For convenient imports, export all models
47
+ __all__ = [
48
+ # Base
49
+ "Observation",
50
+ "EntityType",
51
+ "RelationType",
52
+ "Relation",
53
+ "Entity",
54
+ # Requests
55
+ "SearchNodesRequest",
56
+ "GetEntitiesRequest",
57
+ "CreateRelationsRequest",
58
+ "UpdateEntityRequest",
59
+ # Responses
60
+ "SQLAlchemyModel",
61
+ "ObservationResponse",
62
+ "RelationResponse",
63
+ "EntityResponse",
64
+ "EntityListResponse",
65
+ "SearchNodesResponse",
66
+ "DeleteEntitiesResponse",
67
+ # Delete Operations
68
+ "DeleteEntitiesRequest",
69
+ # Discovery and Analytics
70
+ "EntityTypeList",
71
+ "ObservationCategoryList",
72
+ "TypedEntityList"
73
+ ]