basic-memory 0.0.0__py3-none-any.whl → 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

@@ -181,6 +181,9 @@ class Entity(BaseModel):
181
181
  - Optional relations to other entities
182
182
  - Optional description for high-level overview
183
183
  """
184
+
185
+ # private field to override permalink
186
+ _permalink: Optional[str] = None
184
187
 
185
188
  title: str
186
189
  content: Optional[str] = None
@@ -199,8 +202,8 @@ class Entity(BaseModel):
199
202
 
200
203
  @property
201
204
  def permalink(self) -> PathId:
202
- """Get the path ID in format {snake_case_title}."""
203
- return generate_permalink(self.file_path)
205
+ """Get a url friendly path}."""
206
+ return self._permalink or generate_permalink(self.file_path)
204
207
 
205
208
  @model_validator(mode="after")
206
209
  @classmethod
@@ -59,6 +59,7 @@ def memory_url_path(url: memory_url) -> str:
59
59
  class EntitySummary(BaseModel):
60
60
  """Simplified entity representation."""
61
61
 
62
+ type: str = "entity"
62
63
  permalink: str
63
64
  title: str
64
65
  file_path: str
@@ -68,8 +69,9 @@ class EntitySummary(BaseModel):
68
69
  class RelationSummary(BaseModel):
69
70
  """Simplified relation representation."""
70
71
 
72
+ type: str = "relation"
71
73
  permalink: str
72
- type: str
74
+ relation_type: str
73
75
  from_id: str
74
76
  to_id: Optional[str] = None
75
77
 
@@ -77,6 +79,7 @@ class RelationSummary(BaseModel):
77
79
  class ObservationSummary(BaseModel):
78
80
  """Simplified observation representation."""
79
81
 
82
+ type: str = "observation"
80
83
  permalink: str
81
84
  category: str
82
85
  content: str
@@ -1,5 +1,5 @@
1
1
  """Services package."""
2
-
2
+ from .database_service import DatabaseService
3
3
  from .service import BaseService
4
4
  from .file_service import FileService
5
5
  from .entity_service import EntityService
@@ -8,4 +8,5 @@ __all__ = [
8
8
  "BaseService",
9
9
  "FileService",
10
10
  "EntityService",
11
+ "DatabaseService"
11
12
  ]
@@ -76,7 +76,7 @@ class ContextService:
76
76
  primary = await self.search_repository.search(permalink=path)
77
77
  else:
78
78
  logger.debug(f"Build context for '{types}'")
79
- primary = await self.search_repository.search(types=types)
79
+ primary = await self.search_repository.search(types=types, after_date=since)
80
80
 
81
81
  # Get type_id pairs for traversal
82
82
 
@@ -0,0 +1,158 @@
1
+ """Service for managing database lifecycle and schema validation."""
2
+
3
+ from datetime import datetime
4
+ from pathlib import Path
5
+ from typing import Optional, Tuple, List
6
+
7
+ from alembic.runtime.migration import MigrationContext
8
+ from alembic.autogenerate import compare_metadata
9
+ from loguru import logger
10
+ from sqlalchemy import MetaData
11
+ from sqlalchemy.ext.asyncio import AsyncSession
12
+
13
+ from basic_memory import db
14
+ from basic_memory.config import ProjectConfig
15
+ from basic_memory.models import Base
16
+
17
+
18
+ async def check_schema_matches_models(session: AsyncSession) -> Tuple[bool, List[str]]:
19
+ """Check if database schema matches SQLAlchemy models.
20
+
21
+ Returns:
22
+ tuple[bool, list[str]]: (matches, list of differences)
23
+ """
24
+ # Get current DB schema via migration context
25
+ conn = await session.connection()
26
+
27
+ def _compare_schemas(connection):
28
+ context = MigrationContext.configure(connection)
29
+ return compare_metadata(context, Base.metadata)
30
+
31
+ # Run comparison in sync context
32
+ differences = await conn.run_sync(_compare_schemas)
33
+
34
+ if not differences:
35
+ return True, []
36
+
37
+ # Format differences into readable messages
38
+ diff_messages = []
39
+ for diff in differences:
40
+ if diff[0] == 'add_table':
41
+ diff_messages.append(f"Missing table: {diff[1].name}")
42
+ elif diff[0] == 'remove_table':
43
+ diff_messages.append(f"Extra table: {diff[1].name}")
44
+ elif diff[0] == 'add_column':
45
+ diff_messages.append(f"Missing column: {diff[3]} in table {diff[2]}")
46
+ elif diff[0] == 'remove_column':
47
+ diff_messages.append(f"Extra column: {diff[3]} in table {diff[2]}")
48
+ elif diff[0] == 'modify_type':
49
+ diff_messages.append(f"Column type mismatch: {diff[3]} in table {diff[2]}")
50
+
51
+ return False, diff_messages
52
+
53
+
54
+ class DatabaseService:
55
+ """Manages database lifecycle including schema validation and backups."""
56
+
57
+ def __init__(
58
+ self,
59
+ config: ProjectConfig,
60
+ db_type: db.DatabaseType = db.DatabaseType.FILESYSTEM,
61
+ ):
62
+ self.config = config
63
+ self.db_path = Path(config.database_path)
64
+ self.db_type = db_type
65
+
66
+ async def create_backup(self) -> Optional[Path]:
67
+ """Create backup of existing database file.
68
+
69
+ Returns:
70
+ Optional[Path]: Path to backup file if created, None if no DB exists
71
+ """
72
+ if self.db_type == db.DatabaseType.MEMORY:
73
+ return None # Skip backups for in-memory DB
74
+
75
+ if not self.db_path.exists():
76
+ return None
77
+
78
+ # Create backup with timestamp
79
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
80
+ backup_path = self.db_path.with_suffix(f".{timestamp}.backup")
81
+
82
+ try:
83
+ self.db_path.rename(backup_path)
84
+ logger.info(f"Created database backup: {backup_path}")
85
+
86
+ # make a new empty file
87
+ self.db_path.touch()
88
+ return backup_path
89
+ except Exception as e:
90
+ logger.error(f"Failed to create database backup: {e}")
91
+ return None
92
+
93
+ async def initialize_db(self):
94
+ """Initialize database with current schema."""
95
+ logger.info("Initializing database...")
96
+
97
+ if self.db_type == db.DatabaseType.FILESYSTEM:
98
+ await self.create_backup()
99
+
100
+ # Drop existing tables if any
101
+ await db.drop_db()
102
+
103
+ # Create tables with current schema
104
+ await db.get_or_create_db(
105
+ db_path=self.db_path,
106
+ db_type=self.db_type
107
+ )
108
+
109
+ logger.info("Database initialized with current schema")
110
+
111
+ async def check_db(self) -> bool:
112
+ """Check database state and rebuild if schema doesn't match models.
113
+
114
+ Returns:
115
+ bool: True if DB is ready for use, False if initialization failed
116
+ """
117
+ try:
118
+ _, session_maker = await db.get_or_create_db(
119
+ db_path=self.db_path,
120
+ db_type=self.db_type
121
+ )
122
+ async with db.scoped_session(session_maker) as db_session:
123
+ # Check actual schema matches
124
+ matches, differences = await check_schema_matches_models(db_session)
125
+ if not matches:
126
+ logger.warning("Database schema does not match models:")
127
+ for diff in differences:
128
+ logger.warning(f" {diff}")
129
+ logger.info("Rebuilding database to match current models...")
130
+ await self.initialize_db()
131
+ return True
132
+
133
+ logger.info("Database schema matches models")
134
+ return True
135
+
136
+ except Exception as e:
137
+ logger.error(f"Database initialization failed: {e}")
138
+ return False
139
+
140
+ async def cleanup_backups(self, keep_count: int = 5):
141
+ """Clean up old database backups, keeping the N most recent."""
142
+ if self.db_type == db.DatabaseType.MEMORY:
143
+ return # Skip cleanup for in-memory DB
144
+
145
+ backup_pattern = "*.backup" # Use relative pattern
146
+ backups = sorted(
147
+ self.db_path.parent.glob(backup_pattern),
148
+ key=lambda p: p.stat().st_mtime,
149
+ reverse=True,
150
+ )
151
+
152
+ # Remove old backups
153
+ for backup in backups[keep_count:]:
154
+ try:
155
+ backup.unlink()
156
+ logger.debug(f"Removed old backup: {backup}")
157
+ except Exception as e:
158
+ logger.error(f"Failed to remove backup {backup}: {e}")
@@ -19,6 +19,7 @@ from basic_memory.services import FileService
19
19
  from basic_memory.services import BaseService
20
20
  from basic_memory.services.link_resolver import LinkResolver
21
21
  from basic_memory.markdown.entity_parser import EntityParser
22
+ from basic_memory.utils import generate_permalink
22
23
 
23
24
 
24
25
  class EntityService(BaseService[EntityModel]):
@@ -40,6 +41,51 @@ class EntityService(BaseService[EntityModel]):
40
41
  self.file_service = file_service
41
42
  self.link_resolver = link_resolver
42
43
 
44
+ async def resolve_permalink(
45
+ self,
46
+ file_path: Path,
47
+ markdown: Optional[EntityMarkdown] = None
48
+ ) -> str:
49
+ """Get or generate unique permalink for an entity.
50
+
51
+ Priority:
52
+ 1. If markdown has permalink and it's not used by another file -> use as is
53
+ 2. If markdown has permalink but it's used by another file -> make unique
54
+ 3. For existing files, keep current permalink from db
55
+ 4. Generate new unique permalink from file path
56
+ """
57
+ file_path = str(file_path)
58
+
59
+ # If markdown has explicit permalink, try to validate it
60
+ if markdown and markdown.frontmatter.permalink:
61
+ desired_permalink = markdown.frontmatter.permalink
62
+ existing = await self.repository.get_by_permalink(desired_permalink)
63
+
64
+ # If no conflict or it's our own file, use as is
65
+ if not existing or existing.file_path == file_path:
66
+ return desired_permalink
67
+
68
+ # For existing files, try to find current permalink
69
+ existing = await self.repository.get_by_file_path(file_path)
70
+ if existing:
71
+ return existing.permalink
72
+
73
+ # New file - generate permalink
74
+ if markdown and markdown.frontmatter.permalink:
75
+ desired_permalink = markdown.frontmatter.permalink
76
+ else:
77
+ desired_permalink = generate_permalink(file_path)
78
+
79
+ # Make unique if needed
80
+ permalink = desired_permalink
81
+ suffix = 1
82
+ while await self.repository.get_by_permalink(permalink):
83
+ permalink = f"{desired_permalink}-{suffix}"
84
+ suffix += 1
85
+ logger.debug(f"creating unique permalink: {permalink}")
86
+
87
+ return permalink
88
+
43
89
  async def create_or_update_entity(self, schema: EntitySchema) -> (EntityModel, bool):
44
90
  """Create new entity or update existing one.
45
91
  if a new entity is created, the return value is (entity, True)
@@ -66,9 +112,13 @@ class EntityService(BaseService[EntityModel]):
66
112
 
67
113
  if await self.file_service.exists(file_path):
68
114
  raise EntityCreationError(
69
- f"file_path {file_path} for entity {schema.permalink} already exists: {file_path}"
115
+ f"file for entity {schema.folder}/{schema.title} already exists: {file_path}"
70
116
  )
71
117
 
118
+ # Get unique permalink
119
+ permalink = await self.resolve_permalink(schema.permalink or file_path)
120
+ schema._permalink = permalink
121
+
72
122
  post = await schema_to_markdown(schema)
73
123
 
74
124
  # write file
@@ -184,7 +234,7 @@ class EntityService(BaseService[EntityModel]):
184
234
  Creates the entity with null checksum to indicate sync not complete.
185
235
  Relations will be added in second pass.
186
236
  """
187
- logger.debug(f"Creating entity: {markdown.frontmatter.title}")
237
+ logger.debug(f"Creating entity: {markdown.frontmatter.title}")
188
238
  model = entity_model_from_markdown(file_path, markdown)
189
239
 
190
240
  # Mark as incomplete sync
@@ -35,7 +35,6 @@ class FileService:
35
35
  """Generate absolute filesystem path for entity."""
36
36
  return self.base_path / f"{entity.file_path}"
37
37
 
38
- # TODO move to tests
39
38
  async def write_entity_file(
40
39
  self,
41
40
  entity: EntityModel,
@@ -5,6 +5,7 @@ from typing import Dict
5
5
 
6
6
  from loguru import logger
7
7
 
8
+ from basic_memory import file_utils
8
9
  from basic_memory.markdown import EntityParser, EntityMarkdown
9
10
  from basic_memory.repository import EntityRepository, RelationRepository
10
11
  from basic_memory.services import EntityService
@@ -92,8 +93,32 @@ class SyncService:
92
93
  # First pass: Create/update entities
93
94
  # entities will have a null checksum to indicate they are not complete
94
95
  for file_path, entity_markdown in parsed_entities.items():
96
+
97
+ # Get unique permalink and update markdown if needed
98
+ permalink = await self.entity_service.resolve_permalink(
99
+ file_path,
100
+ markdown=entity_markdown
101
+ )
102
+
103
+ if permalink != entity_markdown.frontmatter.permalink:
104
+ # Add/update permalink in frontmatter
105
+ logger.info(f"Adding permalink '{permalink}' to file: {file_path}")
106
+
107
+ # update markdown
108
+ entity_markdown.frontmatter.metadata["permalink"] = permalink
109
+
110
+ # update file frontmatter
111
+ updated_checksum = await file_utils.update_frontmatter(
112
+ directory / file_path,
113
+ {"permalink": permalink}
114
+ )
115
+
116
+ # Update checksum in changes report since file was modified
117
+ changes.checksums[file_path] = updated_checksum
118
+
95
119
  # if the file is new, create an entity
96
120
  if file_path in changes.new:
121
+ # Create entity with final permalink
97
122
  logger.debug(f"Creating new entity_markdown: {file_path}")
98
123
  await self.entity_service.create_entity_from_markdown(
99
124
  file_path, entity_markdown
@@ -0,0 +1,296 @@
1
+ Metadata-Version: 2.4
2
+ Name: basic-memory
3
+ Version: 0.1.0
4
+ Summary: Local-first knowledge management combining Zettelkasten with knowledge graphs
5
+ Project-URL: Homepage, https://github.com/basicmachines-co/basic-memory
6
+ Project-URL: Repository, https://github.com/basicmachines-co/basic-memory
7
+ Project-URL: Documentation, https://github.com/basicmachines-co/basic-memory#readme
8
+ Author-email: Basic Machines <hello@basic-machines.co>
9
+ License: AGPL-3.0-or-later
10
+ License-File: LICENSE
11
+ Requires-Python: >=3.12.1
12
+ Requires-Dist: aiosqlite>=0.20.0
13
+ Requires-Dist: alembic>=1.14.1
14
+ Requires-Dist: dateparser>=1.2.0
15
+ Requires-Dist: fastapi[standard]>=0.115.8
16
+ Requires-Dist: greenlet>=3.1.1
17
+ Requires-Dist: icecream>=2.1.3
18
+ Requires-Dist: loguru>=0.7.3
19
+ Requires-Dist: markdown-it-py>=3.0.0
20
+ Requires-Dist: mcp>=1.2.0
21
+ Requires-Dist: pydantic-settings>=2.6.1
22
+ Requires-Dist: pydantic[email,timezone]>=2.10.3
23
+ Requires-Dist: pyright>=1.1.390
24
+ Requires-Dist: python-frontmatter>=1.1.0
25
+ Requires-Dist: pyyaml>=6.0.1
26
+ Requires-Dist: rich>=13.9.4
27
+ Requires-Dist: sqlalchemy>=2.0.0
28
+ Requires-Dist: typer>=0.9.0
29
+ Requires-Dist: unidecode>=1.3.8
30
+ Requires-Dist: watchfiles>=1.0.4
31
+ Provides-Extra: dev
32
+ Requires-Dist: pytest-asyncio>=0.24.0; extra == 'dev'
33
+ Requires-Dist: pytest-cov>=4.1.0; extra == 'dev'
34
+ Requires-Dist: pytest-mock>=3.12.0; extra == 'dev'
35
+ Requires-Dist: pytest>=8.3.4; extra == 'dev'
36
+ Requires-Dist: ruff>=0.1.6; extra == 'dev'
37
+ Description-Content-Type: text/markdown
38
+
39
+ # Basic Memory
40
+
41
+ Basic Memory lets you build persistent knowledge through natural conversations with Large Language Models (LLMs) like
42
+ Claude, while keeping everything in simple markdown files on your computer. It uses the Model Context Protocol (MCP) to
43
+ enable any compatible LLM to read and write to your local knowledge base.
44
+
45
+ ## What is Basic Memory?
46
+
47
+ Most people use LLMs like calculators - paste in some text, expect to get an answer back, repeat. Each conversation
48
+ starts fresh,
49
+ and any knowledge or context is lost. Some try to work around this by:
50
+
51
+ - Saving chat histories (but they're hard to reference)
52
+ - Copying and pasting previous conversations (messy and repetitive)
53
+ - Using RAG systems to query documents (complex and often cloud-based)
54
+
55
+ Basic Memory takes a different approach by letting both humans and LLMs read and write knowledge naturally using
56
+ standard markdown files. This means:
57
+
58
+ - Your knowledge stays in files you control
59
+ - Both you and the LLM can read and write notes
60
+ - Context persists across conversations
61
+ - Context stays local and user controlled
62
+
63
+ ## How It Works in Practice
64
+
65
+ Let's say you're working on a new project and want to capture design decisions. Here's how it works:
66
+
67
+ 1. Start by chatting normally:
68
+
69
+ ```markdown
70
+ We need to design a new auth system, some key features:
71
+
72
+ - local first, don't delegate users to third party system
73
+ - support multiple platforms via jwt
74
+ - want to keep it simple but secure
75
+ ```
76
+
77
+ ... continue conversation.
78
+
79
+ 2. Ask Claude to help structure this knowledge:
80
+
81
+ ```
82
+ "Lets write a note about the auth system design."
83
+ ```
84
+
85
+ Claude creates a new markdown file on your system (which you can see instantly in Obsidian or your editor):
86
+
87
+ ```markdown
88
+ ---
89
+ title: Auth System Design
90
+ permalink: auth-system-design
91
+ tags
92
+ - design
93
+ - auth
94
+ ---
95
+
96
+ # Auth System Design
97
+
98
+ ## Observations
99
+
100
+ - [requirement] Local-first authentication without third party delegation
101
+ - [tech] JWT-based auth for cross-platform support
102
+ - [principle] Balance simplicity with security
103
+
104
+ ## Relations
105
+
106
+ - implements [[Security Requirements]]
107
+ - relates_to [[Platform Support]]
108
+ - referenced_by [[JWT Implementation]]
109
+ ```
110
+
111
+ The note embeds semantic content (Observations) and links to other topics (Relations) via simple markdown formatting.
112
+
113
+ 3. You can edit this file directly in your editor in real time:
114
+
115
+ ```markdown
116
+ # Auth System Design
117
+
118
+ ## Observations
119
+
120
+ - [requirement] Local-first authentication without third party delegation
121
+ - [tech] JWT-based auth for cross-platform support
122
+ - [principle] Balance simplicity with security
123
+ - [decision] Will use bcrypt for password hashing # Added by you
124
+
125
+ ## Relations
126
+
127
+ - implements [[Security Requirements]]
128
+ - relates_to [[Platform Support]]
129
+ - referenced_by [[JWT Implementation]]
130
+ - blocks [[User Service]] # Added by you
131
+ ```
132
+
133
+ 4. In a new chat with Claude, you can reference this knowledge:
134
+
135
+ ```
136
+ "Claude, look at memory://auth-system-design for context about our auth system"
137
+ ```
138
+
139
+ Claude can now build rich context from the knowledge graph. For example:
140
+
141
+ ```
142
+ Following relation 'implements [[Security Requirements]]':
143
+ - Found authentication best practices
144
+ - OWASP guidelines for JWT
145
+ - Rate limiting requirements
146
+
147
+ Following relation 'relates_to [[Platform Support]]':
148
+ - Mobile auth requirements
149
+ - Browser security considerations
150
+ - JWT storage strategies
151
+ ```
152
+
153
+ Each related document can lead to more context, building a rich semantic understanding of your knowledge base. All of
154
+ this context comes from standard markdown files that both humans and LLMs can read and write.
155
+
156
+ Everything stays in local markdown files that you can:
157
+
158
+ - Edit in any text editor
159
+ - Version via git
160
+ - Back up normally
161
+ - Share when you want to
162
+
163
+ ## Technical Implementation
164
+
165
+ Under the hood, Basic Memory:
166
+
167
+ 1. Stores everything in markdown files
168
+ 2. Uses a SQLite database just for searching and indexing
169
+ 3. Extracts semantic meaning from simple markdown patterns
170
+ 4. Maintains a local knowledge graph from file content
171
+
172
+ The file format is just markdown with some simple markup:
173
+
174
+ Frontmatter
175
+
176
+ - title
177
+ - type
178
+ - permalink
179
+ - optional metadata
180
+
181
+ Observations
182
+
183
+ - facts about a topic
184
+
185
+ ```markdown
186
+ - [category] content #tag (optional context)
187
+ ```
188
+
189
+ Relations
190
+
191
+ - links to other topics
192
+
193
+ ```markdown
194
+ - relation_type [[WikiLink]] (optional context)
195
+ ```
196
+
197
+ Example:
198
+
199
+ ```markdown
200
+ ---
201
+ title: Note tile
202
+ type: note
203
+ permalink: unique/stable/id # Added automatically
204
+ tags
205
+ - tag1
206
+ - tag2
207
+ ---
208
+
209
+ # Note Title
210
+
211
+ Regular markdown content...
212
+
213
+ ## Observations
214
+
215
+ - [category] Structured knowledge #tag (optional context)
216
+ - [idea] Another observation
217
+
218
+ ## Relations
219
+
220
+ - links_to [[Other Note]]
221
+ - implements [[Some Spec]]
222
+ ```
223
+
224
+ Basic Memory will parse the markdown and derive the semantic relationships in the content. When you run
225
+ `basic-memory sync`:
226
+
227
+ 1. New and changed files are detected
228
+ 2. Markdown patterns become semantic knowledge:
229
+
230
+ - `[tech]` becomes a categorized observation
231
+ - `[[WikiLink]]` creates a relation in the knowledge graph
232
+ - Tags and metadata are indexed for search
233
+
234
+ 3. A SQLite database maintains these relationships for fast querying
235
+ 4. Claude and other MCP-compatible LLMs can access this knowledge via memory:// URLs
236
+
237
+ This creates a two-way flow where:
238
+
239
+ - Humans write and edit markdown files
240
+ - LLMs read and write through the MCP protocol
241
+ - Sync keeps everything consistent
242
+ - All knowledge stays in local files.
243
+
244
+ ## Using with Claude
245
+
246
+ Basic Memory works with the Claude desktop app (https://claude.ai/):
247
+
248
+ 1. Install Basic Memory locally:
249
+
250
+ ```bash
251
+ {
252
+ "mcpServers": {
253
+ "basic-memory": {
254
+ "command": "uvx",
255
+ "args": [
256
+ "basic-memory"
257
+ ]
258
+ }
259
+ }
260
+ ```
261
+
262
+ 2. Add to Claude Desktop:
263
+
264
+ ```
265
+ Basic Memory is available with these tools:
266
+ - write_note() for creating/updating notes
267
+ - read_note() for loading notes
268
+ - build_context() to load notes via memory:// URLs
269
+ - recent_activity() to find recently updated information
270
+ - search() to search infomation in the knowledge base
271
+ ```
272
+
273
+ 3. Install via uv
274
+
275
+ ```bash
276
+ uv add basic-memory
277
+
278
+ # sync local knowledge updates
279
+ basic-memory sync
280
+
281
+ # run realtime sync process
282
+ basic-memory sync --watch
283
+ ```
284
+
285
+ ## Design Philosophy
286
+
287
+ Basic Memory is built on some key ideas:
288
+
289
+ - Your knowledge should stay in files you control
290
+ - Both humans and AI should use natural formats
291
+ - Simple text patterns can capture rich meaning
292
+ - Local-first doesn't mean feature-poor
293
+
294
+ ## License
295
+
296
+ AGPL-3.0