basic-memory 0.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (73) hide show
  1. basic_memory/__init__.py +3 -0
  2. basic_memory/api/__init__.py +4 -0
  3. basic_memory/api/app.py +42 -0
  4. basic_memory/api/routers/__init__.py +8 -0
  5. basic_memory/api/routers/knowledge_router.py +168 -0
  6. basic_memory/api/routers/memory_router.py +123 -0
  7. basic_memory/api/routers/resource_router.py +34 -0
  8. basic_memory/api/routers/search_router.py +34 -0
  9. basic_memory/cli/__init__.py +1 -0
  10. basic_memory/cli/app.py +4 -0
  11. basic_memory/cli/commands/__init__.py +9 -0
  12. basic_memory/cli/commands/init.py +38 -0
  13. basic_memory/cli/commands/status.py +152 -0
  14. basic_memory/cli/commands/sync.py +254 -0
  15. basic_memory/cli/main.py +48 -0
  16. basic_memory/config.py +53 -0
  17. basic_memory/db.py +135 -0
  18. basic_memory/deps.py +182 -0
  19. basic_memory/file_utils.py +248 -0
  20. basic_memory/markdown/__init__.py +19 -0
  21. basic_memory/markdown/entity_parser.py +137 -0
  22. basic_memory/markdown/markdown_processor.py +153 -0
  23. basic_memory/markdown/plugins.py +236 -0
  24. basic_memory/markdown/schemas.py +73 -0
  25. basic_memory/markdown/utils.py +144 -0
  26. basic_memory/mcp/__init__.py +1 -0
  27. basic_memory/mcp/async_client.py +10 -0
  28. basic_memory/mcp/main.py +21 -0
  29. basic_memory/mcp/server.py +39 -0
  30. basic_memory/mcp/tools/__init__.py +34 -0
  31. basic_memory/mcp/tools/ai_edit.py +84 -0
  32. basic_memory/mcp/tools/knowledge.py +56 -0
  33. basic_memory/mcp/tools/memory.py +142 -0
  34. basic_memory/mcp/tools/notes.py +122 -0
  35. basic_memory/mcp/tools/search.py +28 -0
  36. basic_memory/mcp/tools/utils.py +154 -0
  37. basic_memory/models/__init__.py +12 -0
  38. basic_memory/models/base.py +9 -0
  39. basic_memory/models/knowledge.py +204 -0
  40. basic_memory/models/search.py +34 -0
  41. basic_memory/repository/__init__.py +7 -0
  42. basic_memory/repository/entity_repository.py +156 -0
  43. basic_memory/repository/observation_repository.py +40 -0
  44. basic_memory/repository/relation_repository.py +78 -0
  45. basic_memory/repository/repository.py +303 -0
  46. basic_memory/repository/search_repository.py +259 -0
  47. basic_memory/schemas/__init__.py +73 -0
  48. basic_memory/schemas/base.py +216 -0
  49. basic_memory/schemas/delete.py +38 -0
  50. basic_memory/schemas/discovery.py +25 -0
  51. basic_memory/schemas/memory.py +111 -0
  52. basic_memory/schemas/request.py +77 -0
  53. basic_memory/schemas/response.py +220 -0
  54. basic_memory/schemas/search.py +117 -0
  55. basic_memory/services/__init__.py +11 -0
  56. basic_memory/services/context_service.py +274 -0
  57. basic_memory/services/entity_service.py +281 -0
  58. basic_memory/services/exceptions.py +15 -0
  59. basic_memory/services/file_service.py +213 -0
  60. basic_memory/services/link_resolver.py +126 -0
  61. basic_memory/services/search_service.py +218 -0
  62. basic_memory/services/service.py +36 -0
  63. basic_memory/sync/__init__.py +5 -0
  64. basic_memory/sync/file_change_scanner.py +162 -0
  65. basic_memory/sync/sync_service.py +140 -0
  66. basic_memory/sync/utils.py +66 -0
  67. basic_memory/sync/watch_service.py +197 -0
  68. basic_memory/utils.py +78 -0
  69. basic_memory-0.0.0.dist-info/METADATA +71 -0
  70. basic_memory-0.0.0.dist-info/RECORD +73 -0
  71. basic_memory-0.0.0.dist-info/WHEEL +4 -0
  72. basic_memory-0.0.0.dist-info/entry_points.txt +2 -0
  73. basic_memory-0.0.0.dist-info/licenses/LICENSE +661 -0
@@ -0,0 +1,117 @@
1
+ """Search schemas for Basic Memory.
2
+
3
+ The search system supports three primary modes:
4
+ 1. Exact permalink lookup
5
+ 2. Pattern matching with *
6
+ 3. Full-text search across content
7
+ """
8
+
9
+ from typing import Optional, List, Union
10
+ from datetime import datetime
11
+ from enum import Enum
12
+ from pydantic import BaseModel, field_validator
13
+
14
+
15
+ class SearchItemType(str, Enum):
16
+ """Types of searchable items."""
17
+
18
+ ENTITY = "entity"
19
+ OBSERVATION = "observation"
20
+ RELATION = "relation"
21
+
22
+
23
+ class SearchQuery(BaseModel):
24
+ """Search query parameters.
25
+
26
+ Use ONE of these primary search modes:
27
+ - permalink: Exact permalink match
28
+ - permalink_match: Path pattern with *
29
+ - text: Full-text search of title/content
30
+
31
+ Optionally filter results by:
32
+ - types: Limit to specific item types
33
+ - entity_types: Limit to specific entity types
34
+ - after_date: Only items after date
35
+ """
36
+
37
+ # Primary search modes (use ONE of these)
38
+ permalink: Optional[str] = None # Exact permalink match
39
+ permalink_match: Optional[str] = None # Exact permalink match
40
+ text: Optional[str] = None # Full-text search
41
+ title: Optional[str] = None # title only search
42
+
43
+ # Optional filters
44
+ types: Optional[List[SearchItemType]] = None # Filter by item type
45
+ entity_types: Optional[List[str]] = None # Filter by entity type
46
+ after_date: Optional[Union[datetime, str]] = None # Time-based filter
47
+
48
+ @field_validator("after_date")
49
+ @classmethod
50
+ def validate_date(cls, v: Optional[Union[datetime, str]]) -> Optional[str]:
51
+ """Convert datetime to ISO format if needed."""
52
+ if v is None:
53
+ return None
54
+ if isinstance(v, datetime):
55
+ return v.isoformat()
56
+ return v
57
+
58
+ def no_criteria(self) -> bool:
59
+ return (
60
+ self.permalink is None
61
+ and self.permalink_match is None
62
+ and self.text is None
63
+ and self.after_date is None
64
+ and self.types is None
65
+ and self.entity_types is None
66
+ )
67
+
68
+
69
+ class SearchResult(BaseModel):
70
+ """Search result with score and metadata."""
71
+
72
+ id: int
73
+ type: SearchItemType
74
+ score: Optional[float] = None
75
+ metadata: Optional[dict] = None
76
+
77
+ # Common fields
78
+ permalink: Optional[str] = None
79
+ file_path: Optional[str] = None
80
+
81
+ # Type-specific fields
82
+ entity_id: Optional[int] = None # For observations
83
+ category: Optional[str] = None # For observations
84
+ from_id: Optional[int] = None # For relations
85
+ to_id: Optional[int] = None # For relations
86
+ relation_type: Optional[str] = None # For relations
87
+
88
+
89
+ class RelatedResult(BaseModel):
90
+ type: SearchItemType
91
+ id: int
92
+ title: str
93
+ permalink: str
94
+ depth: int
95
+ root_id: int
96
+ created_at: datetime
97
+ from_id: Optional[int] = None
98
+ to_id: Optional[int] = None
99
+ relation_type: Optional[str] = None
100
+ category: Optional[str] = None
101
+ entity_id: Optional[int] = None
102
+
103
+
104
+ class SearchResponse(BaseModel):
105
+ """Wrapper for search results."""
106
+
107
+ results: List[SearchResult]
108
+
109
+
110
+ # Schema for future advanced search endpoint
111
+ class AdvancedSearchQuery(BaseModel):
112
+ """Advanced full-text search with explicit FTS5 syntax."""
113
+
114
+ query: str # Raw FTS5 query (e.g., "foo AND bar")
115
+ types: Optional[List[SearchItemType]] = None
116
+ entity_types: Optional[List[str]] = None
117
+ after_date: Optional[Union[datetime, str]] = None
@@ -0,0 +1,11 @@
1
+ """Services package."""
2
+
3
+ from .service import BaseService
4
+ from .file_service import FileService
5
+ from .entity_service import EntityService
6
+
7
+ __all__ = [
8
+ "BaseService",
9
+ "FileService",
10
+ "EntityService",
11
+ ]
@@ -0,0 +1,274 @@
1
+ """Service for building rich context from the knowledge graph."""
2
+
3
+ from dataclasses import dataclass
4
+ from datetime import datetime, timezone
5
+ from typing import List, Optional, Tuple
6
+
7
+ from loguru import logger
8
+ from sqlalchemy import text
9
+
10
+ from basic_memory.repository.entity_repository import EntityRepository
11
+ from basic_memory.repository.search_repository import SearchRepository
12
+ from basic_memory.schemas.memory import MemoryUrl, memory_url_path
13
+ from basic_memory.schemas.search import SearchItemType
14
+
15
+
16
+ @dataclass
17
+ class ContextResultRow:
18
+ type: str
19
+ id: int
20
+ title: str
21
+ permalink: str
22
+ file_path: str
23
+ depth: int
24
+ root_id: int
25
+ created_at: datetime
26
+ from_id: Optional[int] = None
27
+ to_id: Optional[int] = None
28
+ relation_type: Optional[str] = None
29
+ content: Optional[str] = None
30
+ category: Optional[str] = None
31
+ entity_id: Optional[int] = None
32
+
33
+
34
+ class ContextService:
35
+ """Service for building rich context from memory:// URIs.
36
+
37
+ Handles three types of context building:
38
+ 1. Direct permalink lookup - exact match on path
39
+ 2. Pattern matching - using * wildcards
40
+ 3. Special modes via params (e.g., 'related')
41
+ """
42
+
43
+ def __init__(
44
+ self,
45
+ search_repository: SearchRepository,
46
+ entity_repository: EntityRepository,
47
+ ):
48
+ self.search_repository = search_repository
49
+ self.entity_repository = entity_repository
50
+
51
+ async def build_context(
52
+ self,
53
+ memory_url: MemoryUrl = None,
54
+ types: List[SearchItemType] = None,
55
+ depth: int = 1,
56
+ since: Optional[datetime] = None,
57
+ max_results: int = 10,
58
+ ):
59
+ """Build rich context from a memory:// URI."""
60
+ logger.debug(
61
+ f"Building context for URI: '{memory_url}' depth: '{depth}' since: '{since}' max_results: '{max_results}'"
62
+ )
63
+
64
+ if memory_url:
65
+ path = memory_url_path(memory_url)
66
+ # Pattern matching - use search
67
+ if "*" in path:
68
+ logger.debug(f"Pattern search for '{path}'")
69
+ primary = await self.search_repository.search(
70
+ permalink_match=path
71
+ )
72
+
73
+ # Direct lookup for exact path
74
+ else:
75
+ logger.debug(f"Direct lookup for '{path}'")
76
+ primary = await self.search_repository.search(permalink=path)
77
+ else:
78
+ logger.debug(f"Build context for '{types}'")
79
+ primary = await self.search_repository.search(types=types)
80
+
81
+ # Get type_id pairs for traversal
82
+
83
+ type_id_pairs = [(r.type, r.id) for r in primary] if primary else []
84
+ logger.debug(f"found primary type_id_pairs: {len(type_id_pairs)}")
85
+
86
+ # Find related content
87
+ related = await self.find_related(
88
+ type_id_pairs, max_depth=depth, since=since, max_results=max_results
89
+ )
90
+ logger.debug(f"Found {len(related)} related results")
91
+ for r in related:
92
+ logger.debug(f"Found related {r.type}: {r.permalink}")
93
+
94
+ # Build response
95
+ return {
96
+ "primary_results": primary,
97
+ "related_results": related,
98
+ "metadata": {
99
+ "uri": memory_url_path(memory_url) if memory_url else None,
100
+ "types": types if types else None,
101
+ "depth": depth,
102
+ "timeframe": since.isoformat() if since else None,
103
+ "generated_at": datetime.now(timezone.utc).isoformat(),
104
+ "matched_results": len(primary),
105
+ "total_results": len(primary) + len(related),
106
+ "total_relations": sum(1 for r in related if r.type == SearchItemType.RELATION),
107
+ },
108
+ }
109
+
110
+ async def find_related(
111
+ self,
112
+ type_id_pairs: List[Tuple[str, int]],
113
+ max_depth: int = 1,
114
+ since: Optional[datetime] = None,
115
+ max_results: int = 10,
116
+ ) -> List[ContextResultRow]:
117
+ """Find items connected through relations.
118
+
119
+ Uses recursive CTE to find:
120
+ - Connected entities
121
+ - Their observations
122
+ - Relations that connect them
123
+ """
124
+ if not type_id_pairs:
125
+ return []
126
+
127
+ logger.debug(f"Finding connected items for {len(type_id_pairs)} with depth {max_depth}")
128
+
129
+ # Build the VALUES clause directly since SQLite doesn't handle parameterized IN well
130
+ values = ", ".join([f"('{t}', {i})" for t, i in type_id_pairs])
131
+
132
+ # Parameters for bindings
133
+ params = {"max_depth": max_depth, "max_results": max_results}
134
+ if since:
135
+ params["since_date"] = since.isoformat()
136
+
137
+ # Build date filter
138
+ date_filter = "AND base.created_at >= :since_date" if since else ""
139
+ r1_date_filter = "AND r.created_at >= :since_date" if since else ""
140
+ related_date_filter = "AND e.created_at >= :since_date" if since else ""
141
+
142
+ query = text(f"""
143
+ WITH RECURSIVE context_graph AS (
144
+ -- Base case: seed items (unchanged)
145
+ SELECT
146
+ id,
147
+ type,
148
+ title,
149
+ permalink,
150
+ file_path,
151
+ from_id,
152
+ to_id,
153
+ relation_type,
154
+ content,
155
+ category,
156
+ entity_id,
157
+ 0 as depth,
158
+ id as root_id,
159
+ created_at,
160
+ created_at as relation_date,
161
+ 0 as is_incoming
162
+ FROM search_index base
163
+ WHERE (base.type, base.id) IN ({values})
164
+ {date_filter}
165
+
166
+ UNION -- Changed from UNION ALL
167
+
168
+ -- Get relations from current entities
169
+ SELECT DISTINCT
170
+ r.id,
171
+ r.type,
172
+ r.title,
173
+ r.permalink,
174
+ r.file_path,
175
+ r.from_id,
176
+ r.to_id,
177
+ r.relation_type,
178
+ r.content,
179
+ r.category,
180
+ r.entity_id,
181
+ cg.depth + 1,
182
+ cg.root_id,
183
+ r.created_at,
184
+ r.created_at as relation_date,
185
+ CASE WHEN r.from_id = cg.id THEN 0 ELSE 1 END as is_incoming
186
+ FROM context_graph cg
187
+ JOIN search_index r ON (
188
+ cg.type = 'entity' AND
189
+ r.type = 'relation' AND
190
+ (r.from_id = cg.id OR r.to_id = cg.id)
191
+ {r1_date_filter}
192
+ )
193
+ WHERE cg.depth < :max_depth
194
+
195
+ UNION -- Changed from UNION ALL
196
+
197
+ -- Get entities connected by relations
198
+ SELECT DISTINCT
199
+ e.id,
200
+ e.type,
201
+ e.title,
202
+ e.permalink,
203
+ e.file_path,
204
+ e.from_id,
205
+ e.to_id,
206
+ e.relation_type,
207
+ e.content,
208
+ e.category,
209
+ e.entity_id,
210
+ cg.depth,
211
+ cg.root_id,
212
+ e.created_at,
213
+ cg.relation_date,
214
+ cg.is_incoming
215
+ FROM context_graph cg
216
+ JOIN search_index e ON (
217
+ cg.type = 'relation' AND
218
+ e.type = 'entity' AND
219
+ e.id = CASE
220
+ WHEN cg.from_id = cg.id THEN cg.to_id
221
+ ELSE cg.from_id
222
+ END
223
+ {related_date_filter}
224
+ )
225
+ WHERE cg.depth < :max_depth
226
+ )
227
+ SELECT DISTINCT
228
+ type,
229
+ id,
230
+ title,
231
+ permalink,
232
+ file_path,
233
+ from_id,
234
+ to_id,
235
+ relation_type,
236
+ content,
237
+ category,
238
+ entity_id,
239
+ MIN(depth) as depth,
240
+ root_id,
241
+ created_at
242
+ FROM context_graph
243
+ WHERE (type, id) NOT IN ({values})
244
+ GROUP BY
245
+ type, id, title, permalink, from_id, to_id,
246
+ relation_type, category, entity_id,
247
+ root_id, created_at
248
+ ORDER BY depth, type, id
249
+ LIMIT :max_results
250
+ """)
251
+
252
+ result = await self.search_repository.execute_query(query, params=params)
253
+ rows = result.all()
254
+
255
+ context_rows = [
256
+ ContextResultRow(
257
+ type=row.type,
258
+ id=row.id,
259
+ title=row.title,
260
+ permalink=row.permalink,
261
+ file_path=row.file_path,
262
+ from_id=row.from_id,
263
+ to_id=row.to_id,
264
+ relation_type=row.relation_type,
265
+ content=row.content,
266
+ category=row.category,
267
+ entity_id=row.entity_id,
268
+ depth=row.depth,
269
+ root_id=row.root_id,
270
+ created_at=row.created_at,
271
+ )
272
+ for row in rows
273
+ ]
274
+ return context_rows
@@ -0,0 +1,281 @@
1
+ """Service for managing entities in the database."""
2
+
3
+ from pathlib import Path
4
+ from typing import Sequence, List, Optional
5
+
6
+ import frontmatter
7
+ from frontmatter import Post
8
+ from loguru import logger
9
+ from sqlalchemy.exc import IntegrityError
10
+
11
+ from basic_memory.markdown import EntityMarkdown
12
+ from basic_memory.markdown.utils import entity_model_from_markdown, schema_to_markdown
13
+ from basic_memory.models import Entity as EntityModel, Observation, Relation
14
+ from basic_memory.repository import ObservationRepository, RelationRepository
15
+ from basic_memory.repository.entity_repository import EntityRepository
16
+ from basic_memory.schemas import Entity as EntitySchema
17
+ from basic_memory.services.exceptions import EntityNotFoundError, EntityCreationError
18
+ from basic_memory.services import FileService
19
+ from basic_memory.services import BaseService
20
+ from basic_memory.services.link_resolver import LinkResolver
21
+ from basic_memory.markdown.entity_parser import EntityParser
22
+
23
+
24
+ class EntityService(BaseService[EntityModel]):
25
+ """Service for managing entities in the database."""
26
+
27
+ def __init__(
28
+ self,
29
+ entity_parser: EntityParser,
30
+ entity_repository: EntityRepository,
31
+ observation_repository: ObservationRepository,
32
+ relation_repository: RelationRepository,
33
+ file_service: FileService,
34
+ link_resolver: LinkResolver,
35
+ ):
36
+ super().__init__(entity_repository)
37
+ self.observation_repository = observation_repository
38
+ self.relation_repository = relation_repository
39
+ self.entity_parser = entity_parser
40
+ self.file_service = file_service
41
+ self.link_resolver = link_resolver
42
+
43
+ async def create_or_update_entity(self, schema: EntitySchema) -> (EntityModel, bool):
44
+ """Create new entity or update existing one.
45
+ if a new entity is created, the return value is (entity, True)
46
+ """
47
+
48
+ logger.debug(f"Creating or updating entity: {schema}")
49
+
50
+ # Try to find existing entity using smart resolution
51
+ existing = await self.link_resolver.resolve_link(schema.permalink)
52
+
53
+ if existing:
54
+ logger.debug(f"Found existing entity: {existing.permalink}")
55
+ return await self.update_entity(existing, schema), False
56
+ else:
57
+ # Create new entity
58
+ return await self.create_entity(schema), True
59
+
60
+ async def create_entity(self, schema: EntitySchema) -> EntityModel:
61
+ """Create a new entity and write to filesystem."""
62
+ logger.debug(f"Creating entity: {schema.permalink}")
63
+
64
+ # get file path
65
+ file_path = Path(schema.file_path)
66
+
67
+ if await self.file_service.exists(file_path):
68
+ raise EntityCreationError(
69
+ f"file_path {file_path} for entity {schema.permalink} already exists: {file_path}"
70
+ )
71
+
72
+ post = await schema_to_markdown(schema)
73
+
74
+ # write file
75
+ final_content = frontmatter.dumps(post, sort_keys=False)
76
+ checksum = await self.file_service.write_file(file_path, final_content)
77
+
78
+ # parse entity from file
79
+ entity_markdown = await self.entity_parser.parse_file(file_path)
80
+
81
+ # create entity
82
+ created_entity = await self.create_entity_from_markdown(
83
+ file_path, entity_markdown
84
+ )
85
+
86
+ # add relations
87
+ entity = await self.update_entity_relations(file_path, entity_markdown)
88
+
89
+ # Set final checksum to mark complete
90
+ return await self.repository.update(entity.id, {"checksum": checksum})
91
+
92
+
93
+ async def update_entity(self, entity: EntityModel, schema: EntitySchema) -> EntityModel:
94
+ """Update an entity's content and metadata."""
95
+ logger.debug(f"Updating entity with permalink: {entity.permalink}")
96
+
97
+ # get file path
98
+ file_path = Path(entity.file_path)
99
+
100
+ post = await schema_to_markdown(schema)
101
+
102
+ # write file
103
+ final_content = frontmatter.dumps(post)
104
+ checksum = await self.file_service.write_file(file_path, final_content)
105
+
106
+ # parse entity from file
107
+ entity_markdown = await self.entity_parser.parse_file(file_path)
108
+
109
+ # update entity in db
110
+ entity = await self.update_entity_and_observations(
111
+ file_path, entity_markdown
112
+ )
113
+
114
+ # add relations
115
+ await self.update_entity_relations(file_path, entity_markdown)
116
+
117
+ # Set final checksum to match file
118
+ entity = await self.repository.update(entity.id, {"checksum": checksum})
119
+
120
+ return entity
121
+
122
+ async def delete_entity(self, permalink: str) -> bool:
123
+ """Delete entity and its file."""
124
+ logger.debug(f"Deleting entity: {permalink}")
125
+
126
+ try:
127
+ # Get entity first for file deletion
128
+ entity = await self.get_by_permalink(permalink)
129
+
130
+ # Delete file first
131
+ await self.file_service.delete_entity_file(entity)
132
+
133
+ # Delete from DB (this will cascade to observations/relations)
134
+ return await self.repository.delete(entity.id)
135
+
136
+ except EntityNotFoundError:
137
+ logger.info(f"Entity not found: {permalink}")
138
+ return True # Already deleted
139
+
140
+ except Exception as e:
141
+ logger.error(f"Failed to delete entity: {e}")
142
+ raise
143
+
144
+ async def get_by_permalink(self, permalink: str) -> EntityModel:
145
+ """Get entity by type and name combination."""
146
+ logger.debug(f"Getting entity by permalink: {permalink}")
147
+ db_entity = await self.repository.get_by_permalink(permalink)
148
+ if not db_entity:
149
+ raise EntityNotFoundError(f"Entity not found: {permalink}")
150
+ return db_entity
151
+
152
+ async def get_all(self) -> Sequence[EntityModel]:
153
+ """Get all entities."""
154
+ return await self.repository.find_all()
155
+
156
+ async def get_entity_types(self) -> List[str]:
157
+ """Get list of all distinct entity types in the system."""
158
+ logger.debug("Getting all distinct entity types")
159
+ return await self.repository.get_entity_types()
160
+
161
+ async def list_entities(
162
+ self,
163
+ entity_type: Optional[str] = None,
164
+ sort_by: Optional[str] = "updated_at",
165
+ include_related: bool = False,
166
+ ) -> Sequence[EntityModel]:
167
+ """List entities with optional filtering and sorting."""
168
+ logger.debug(f"Listing entities: type={entity_type} sort={sort_by}")
169
+ return await self.repository.list_entities(entity_type=entity_type, sort_by=sort_by)
170
+
171
+ async def get_entities_by_permalinks(self, permalinks: List[str]) -> Sequence[EntityModel]:
172
+ """Get specific nodes and their relationships."""
173
+ logger.debug(f"Getting entities permalinks: {permalinks}")
174
+ return await self.repository.find_by_permalinks(permalinks)
175
+
176
+ async def delete_entity_by_file_path(self, file_path):
177
+ await self.repository.delete_by_file_path(file_path)
178
+
179
+ async def create_entity_from_markdown(
180
+ self, file_path: Path, markdown: EntityMarkdown
181
+ ) -> EntityModel:
182
+ """Create entity and observations only.
183
+
184
+ Creates the entity with null checksum to indicate sync not complete.
185
+ Relations will be added in second pass.
186
+ """
187
+ logger.debug(f"Creating entity: {markdown.frontmatter.title}")
188
+ model = entity_model_from_markdown(file_path, markdown)
189
+
190
+ # Mark as incomplete sync
191
+ model.checksum = None
192
+ return await self.add(model)
193
+
194
+ async def update_entity_and_observations(
195
+ self, file_path: Path | str, markdown: EntityMarkdown
196
+ ) -> EntityModel:
197
+ """Update entity fields and observations.
198
+
199
+ Updates everything except relations and sets null checksum
200
+ to indicate sync not complete.
201
+ """
202
+ logger.debug(f"Updating entity and observations: {file_path}")
203
+ file_path = str(file_path)
204
+
205
+ db_entity = await self.repository.get_by_file_path(file_path)
206
+ if not db_entity:
207
+ raise EntityNotFoundError(f"Entity not found: {file_path}")
208
+
209
+ # Clear observations for entity
210
+ await self.observation_repository.delete_by_fields(entity_id=db_entity.id)
211
+
212
+ # add new observations
213
+ observations = [
214
+ Observation(
215
+ entity_id=db_entity.id,
216
+ content=obs.content,
217
+ category=obs.category,
218
+ context=obs.context,
219
+ tags=obs.tags,
220
+ )
221
+ for obs in markdown.observations
222
+ ]
223
+ await self.observation_repository.add_all(observations)
224
+
225
+ # update values from markdown
226
+ db_entity = entity_model_from_markdown(file_path, markdown, db_entity)
227
+
228
+ # checksum value is None == not finished with sync
229
+ db_entity.checksum = None
230
+
231
+ # update entity
232
+ # checksum value is None == not finished with sync
233
+ return await self.repository.update(
234
+ db_entity.id,
235
+ db_entity,
236
+ )
237
+
238
+ async def update_entity_relations(
239
+ self,
240
+ file_path: Path | str,
241
+ markdown: EntityMarkdown,
242
+ ) -> EntityModel:
243
+ """Update relations for entity"""
244
+ logger.debug(f"Updating relations for entity: {file_path}")
245
+
246
+ file_path = str(file_path)
247
+ db_entity = await self.repository.get_by_file_path(file_path)
248
+
249
+ # Clear existing relations first
250
+ await self.relation_repository.delete_outgoing_relations_from_entity(db_entity.id)
251
+
252
+ # Process each relation
253
+ for rel in markdown.relations:
254
+ # Resolve the target permalink
255
+ target_entity = await self.link_resolver.resolve_link(
256
+ rel.target,
257
+ )
258
+
259
+ # if the target is found, store the id
260
+ target_id = target_entity.id if target_entity else None
261
+ # if the target is found, store the title, otherwise add the target for a "forward link"
262
+ target_name = target_entity.title if target_entity else rel.target
263
+
264
+ # Create the relation
265
+ relation = Relation(
266
+ from_id=db_entity.id,
267
+ to_id=target_id,
268
+ to_name=target_name,
269
+ relation_type=rel.type,
270
+ context=rel.context,
271
+ )
272
+ try:
273
+ await self.relation_repository.add(relation)
274
+ except IntegrityError:
275
+ # Unique constraint violation - relation already exists
276
+ logger.debug(
277
+ f"Skipping duplicate relation {rel.type} from {db_entity.permalink} target: {rel.target}, type: {rel.type}"
278
+ )
279
+ continue
280
+
281
+ return await self.repository.get_by_file_path(file_path)
@@ -0,0 +1,15 @@
1
+ class FileOperationError(Exception):
2
+ """Raised when file operations fail"""
3
+
4
+ pass
5
+
6
+
7
+ class EntityNotFoundError(Exception):
8
+ """Raised when an entity cannot be found"""
9
+
10
+ pass
11
+
12
+ class EntityCreationError(Exception):
13
+ """Raised when an entity cannot be created"""
14
+
15
+ pass