basic-memory 0.13.5__py3-none-any.whl → 0.13.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

basic_memory/__init__.py CHANGED
@@ -1,7 +1,7 @@
1
1
  """basic-memory - Local-first knowledge management combining Zettelkasten with knowledge graphs"""
2
2
 
3
3
  # Package version - updated by release automation
4
- __version__ = "0.13.5"
4
+ __version__ = "0.13.6"
5
5
 
6
6
  # API version for FastAPI - independent of package version
7
7
  __api_version__ = "v0"
@@ -8,12 +8,12 @@ from sqlalchemy import pool
8
8
 
9
9
  from alembic import context
10
10
 
11
- from basic_memory.models import Base
12
-
13
11
  # set config.env to "test" for pytest to prevent logging to file in utils.setup_logging()
14
12
  os.environ["BASIC_MEMORY_ENV"] = "test"
15
13
 
16
- from basic_memory.config import app_config
14
+ # Import after setting environment variable # noqa: E402
15
+ from basic_memory.config import app_config # noqa: E402
16
+ from basic_memory.models import Base # noqa: E402
17
17
 
18
18
  # this is the Alembic Config object, which provides
19
19
  # access to the values within the .ini file in use.
basic_memory/db.py CHANGED
@@ -23,6 +23,7 @@ from basic_memory.repository.search_repository import SearchRepository
23
23
  # Module level state
24
24
  _engine: Optional[AsyncEngine] = None
25
25
  _session_maker: Optional[async_sessionmaker[AsyncSession]] = None
26
+ _migrations_completed: bool = False
26
27
 
27
28
 
28
29
  class DatabaseType(Enum):
@@ -72,18 +73,35 @@ async def scoped_session(
72
73
  await factory.remove()
73
74
 
74
75
 
76
+ def _create_engine_and_session(
77
+ db_path: Path, db_type: DatabaseType = DatabaseType.FILESYSTEM
78
+ ) -> tuple[AsyncEngine, async_sessionmaker[AsyncSession]]:
79
+ """Internal helper to create engine and session maker."""
80
+ db_url = DatabaseType.get_db_url(db_path, db_type)
81
+ logger.debug(f"Creating engine for db_url: {db_url}")
82
+ engine = create_async_engine(db_url, connect_args={"check_same_thread": False})
83
+ session_maker = async_sessionmaker(engine, expire_on_commit=False)
84
+ return engine, session_maker
85
+
86
+
75
87
  async def get_or_create_db(
76
88
  db_path: Path,
77
89
  db_type: DatabaseType = DatabaseType.FILESYSTEM,
90
+ ensure_migrations: bool = True,
91
+ app_config: Optional["BasicMemoryConfig"] = None,
78
92
  ) -> tuple[AsyncEngine, async_sessionmaker[AsyncSession]]: # pragma: no cover
79
93
  """Get or create database engine and session maker."""
80
94
  global _engine, _session_maker
81
95
 
82
96
  if _engine is None:
83
- db_url = DatabaseType.get_db_url(db_path, db_type)
84
- logger.debug(f"Creating engine for db_url: {db_url}")
85
- _engine = create_async_engine(db_url, connect_args={"check_same_thread": False})
86
- _session_maker = async_sessionmaker(_engine, expire_on_commit=False)
97
+ _engine, _session_maker = _create_engine_and_session(db_path, db_type)
98
+
99
+ # Run migrations automatically unless explicitly disabled
100
+ if ensure_migrations:
101
+ if app_config is None:
102
+ from basic_memory.config import app_config as global_app_config
103
+ app_config = global_app_config
104
+ await run_migrations(app_config, db_type)
87
105
 
88
106
  # These checks should never fail since we just created the engine and session maker
89
107
  # if they were None, but we'll check anyway for the type checker
@@ -100,12 +118,13 @@ async def get_or_create_db(
100
118
 
101
119
  async def shutdown_db() -> None: # pragma: no cover
102
120
  """Clean up database connections."""
103
- global _engine, _session_maker
121
+ global _engine, _session_maker, _migrations_completed
104
122
 
105
123
  if _engine:
106
124
  await _engine.dispose()
107
125
  _engine = None
108
126
  _session_maker = None
127
+ _migrations_completed = False
109
128
 
110
129
 
111
130
  @asynccontextmanager
@@ -119,7 +138,7 @@ async def engine_session_factory(
119
138
  for each test. For production use, use get_or_create_db() instead.
120
139
  """
121
140
 
122
- global _engine, _session_maker
141
+ global _engine, _session_maker, _migrations_completed
123
142
 
124
143
  db_url = DatabaseType.get_db_url(db_path, db_type)
125
144
  logger.debug(f"Creating engine for db_url: {db_url}")
@@ -143,12 +162,20 @@ async def engine_session_factory(
143
162
  await _engine.dispose()
144
163
  _engine = None
145
164
  _session_maker = None
165
+ _migrations_completed = False
146
166
 
147
167
 
148
168
  async def run_migrations(
149
- app_config: BasicMemoryConfig, database_type=DatabaseType.FILESYSTEM
169
+ app_config: BasicMemoryConfig, database_type=DatabaseType.FILESYSTEM, force: bool = False
150
170
  ): # pragma: no cover
151
171
  """Run any pending alembic migrations."""
172
+ global _migrations_completed
173
+
174
+ # Skip if migrations already completed unless forced
175
+ if _migrations_completed and not force:
176
+ logger.debug("Migrations already completed in this session, skipping")
177
+ return
178
+
152
179
  logger.info("Running database migrations...")
153
180
  try:
154
181
  # Get the absolute path to the alembic directory relative to this file
@@ -170,11 +197,18 @@ async def run_migrations(
170
197
  command.upgrade(config, "head")
171
198
  logger.info("Migrations completed successfully")
172
199
 
173
- _, session_maker = await get_or_create_db(app_config.database_path, database_type)
200
+ # Get session maker - ensure we don't trigger recursive migration calls
201
+ if _session_maker is None:
202
+ _, session_maker = _create_engine_and_session(app_config.database_path, database_type)
203
+ else:
204
+ session_maker = _session_maker
174
205
 
175
206
  # initialize the search Index schema
176
207
  # the project_id is not used for init_search_index, so we pass a dummy value
177
208
  await SearchRepository(session_maker, 1).init_search_index()
209
+
210
+ # Mark migrations as completed
211
+ _migrations_completed = True
178
212
  except Exception as e: # pragma: no cover
179
213
  logger.error(f"Error running migrations: {e}")
180
214
  raise
@@ -27,6 +27,7 @@ async def write_note(
27
27
  content: str,
28
28
  folder: str,
29
29
  tags=None, # Remove type hint completely to avoid schema issues
30
+ entity_type: str = "note",
30
31
  project: Optional[str] = None,
31
32
  ) -> str:
32
33
  """Write a markdown note to the knowledge base.
@@ -58,6 +59,7 @@ async def write_note(
58
59
  Use forward slashes (/) as separators. Examples: "notes", "projects/2025", "research/ml"
59
60
  tags: Tags to categorize the note. Can be a list of strings, a comma-separated string, or None.
60
61
  Note: If passing from external MCP clients, use a string format (e.g. "tag1,tag2,tag3")
62
+ entity_type: Type of entity to create. Defaults to "note". Can be "guide", "report", "config", etc.
61
63
  project: Optional project name to write to. If not provided, uses current active project.
62
64
 
63
65
  Returns:
@@ -84,7 +86,7 @@ async def write_note(
84
86
  entity = Entity(
85
87
  title=title,
86
88
  folder=folder,
87
- entity_type="note",
89
+ entity_type=entity_type,
88
90
  content_type="text/markdown",
89
91
  content=content,
90
92
  entity_metadata=metadata,
@@ -3,10 +3,13 @@
3
3
  from pathlib import Path
4
4
  from typing import List, Optional, Sequence, Union
5
5
 
6
+ from sqlalchemy import select
7
+ from sqlalchemy.exc import IntegrityError
6
8
  from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker
7
9
  from sqlalchemy.orm import selectinload
8
10
  from sqlalchemy.orm.interfaces import LoaderOption
9
11
 
12
+ from basic_memory import db
10
13
  from basic_memory.models.knowledge import Entity, Observation, Relation
11
14
  from basic_memory.repository.repository import Repository
12
15
 
@@ -96,3 +99,153 @@ class EntityRepository(Repository[Entity]):
96
99
 
97
100
  result = await self.execute_query(query)
98
101
  return list(result.scalars().all())
102
+
103
+ async def upsert_entity(self, entity: Entity) -> Entity:
104
+ """Insert or update entity using a hybrid approach.
105
+
106
+ This method provides a cleaner alternative to the try/catch approach
107
+ for handling permalink and file_path conflicts. It first tries direct
108
+ insertion, then handles conflicts intelligently.
109
+
110
+ Args:
111
+ entity: The entity to insert or update
112
+
113
+ Returns:
114
+ The inserted or updated entity
115
+ """
116
+
117
+ async with db.scoped_session(self.session_maker) as session:
118
+ # Set project_id if applicable and not already set
119
+ self._set_project_id_if_needed(entity)
120
+
121
+ # Check for existing entity with same file_path first
122
+ existing_by_path = await session.execute(
123
+ select(Entity).where(
124
+ Entity.file_path == entity.file_path,
125
+ Entity.project_id == entity.project_id
126
+ )
127
+ )
128
+ existing_path_entity = existing_by_path.scalar_one_or_none()
129
+
130
+ if existing_path_entity:
131
+ # Update existing entity with same file path
132
+ for key, value in {
133
+ 'title': entity.title,
134
+ 'entity_type': entity.entity_type,
135
+ 'entity_metadata': entity.entity_metadata,
136
+ 'content_type': entity.content_type,
137
+ 'permalink': entity.permalink,
138
+ 'checksum': entity.checksum,
139
+ 'updated_at': entity.updated_at,
140
+ }.items():
141
+ setattr(existing_path_entity, key, value)
142
+
143
+ await session.flush()
144
+ # Return with relationships loaded
145
+ query = (
146
+ select(Entity)
147
+ .where(Entity.file_path == entity.file_path)
148
+ .options(*self.get_load_options())
149
+ )
150
+ result = await session.execute(query)
151
+ found = result.scalar_one_or_none()
152
+ if not found: # pragma: no cover
153
+ raise RuntimeError(f"Failed to retrieve entity after update: {entity.file_path}")
154
+ return found
155
+
156
+ # No existing entity with same file_path, try insert
157
+ try:
158
+ # Simple insert for new entity
159
+ session.add(entity)
160
+ await session.flush()
161
+
162
+ # Return with relationships loaded
163
+ query = (
164
+ select(Entity)
165
+ .where(Entity.file_path == entity.file_path)
166
+ .options(*self.get_load_options())
167
+ )
168
+ result = await session.execute(query)
169
+ found = result.scalar_one_or_none()
170
+ if not found: # pragma: no cover
171
+ raise RuntimeError(f"Failed to retrieve entity after insert: {entity.file_path}")
172
+ return found
173
+
174
+ except IntegrityError:
175
+ # Could be either file_path or permalink conflict
176
+ await session.rollback()
177
+
178
+ # Check if it's a file_path conflict (race condition)
179
+ existing_by_path_check = await session.execute(
180
+ select(Entity).where(
181
+ Entity.file_path == entity.file_path,
182
+ Entity.project_id == entity.project_id
183
+ )
184
+ )
185
+ race_condition_entity = existing_by_path_check.scalar_one_or_none()
186
+
187
+ if race_condition_entity:
188
+ # Race condition: file_path conflict detected after our initial check
189
+ # Update the existing entity instead
190
+ for key, value in {
191
+ 'title': entity.title,
192
+ 'entity_type': entity.entity_type,
193
+ 'entity_metadata': entity.entity_metadata,
194
+ 'content_type': entity.content_type,
195
+ 'permalink': entity.permalink,
196
+ 'checksum': entity.checksum,
197
+ 'updated_at': entity.updated_at,
198
+ }.items():
199
+ setattr(race_condition_entity, key, value)
200
+
201
+ await session.flush()
202
+ # Return the updated entity with relationships loaded
203
+ query = (
204
+ select(Entity)
205
+ .where(Entity.file_path == entity.file_path)
206
+ .options(*self.get_load_options())
207
+ )
208
+ result = await session.execute(query)
209
+ found = result.scalar_one_or_none()
210
+ if not found: # pragma: no cover
211
+ raise RuntimeError(f"Failed to retrieve entity after race condition update: {entity.file_path}")
212
+ return found
213
+ else:
214
+ # Must be permalink conflict - generate unique permalink
215
+ return await self._handle_permalink_conflict(entity, session)
216
+
217
+ async def _handle_permalink_conflict(self, entity: Entity, session: AsyncSession) -> Entity:
218
+ """Handle permalink conflicts by generating a unique permalink."""
219
+ base_permalink = entity.permalink
220
+ suffix = 1
221
+
222
+ # Find a unique permalink
223
+ while True:
224
+ test_permalink = f"{base_permalink}-{suffix}"
225
+ existing = await session.execute(
226
+ select(Entity).where(
227
+ Entity.permalink == test_permalink,
228
+ Entity.project_id == entity.project_id
229
+ )
230
+ )
231
+ if existing.scalar_one_or_none() is None:
232
+ # Found unique permalink
233
+ entity.permalink = test_permalink
234
+ break
235
+ suffix += 1
236
+
237
+ # Insert with unique permalink (no conflict possible now)
238
+ session.add(entity)
239
+ await session.flush()
240
+
241
+ # Return the inserted entity with relationships loaded
242
+ query = (
243
+ select(Entity)
244
+ .where(Entity.file_path == entity.file_path)
245
+ .options(*self.get_load_options())
246
+ )
247
+ result = await session.execute(query)
248
+ found = result.scalar_one_or_none()
249
+ if not found: # pragma: no cover
250
+ raise RuntimeError(f"Failed to retrieve entity after insert: {entity.file_path}")
251
+ return found
@@ -117,10 +117,15 @@ class EntityService(BaseService[EntityModel]):
117
117
  f"file for entity {schema.folder}/{schema.title} already exists: {file_path}"
118
118
  )
119
119
 
120
- # Parse content frontmatter to check for user-specified permalink
120
+ # Parse content frontmatter to check for user-specified permalink and entity_type
121
121
  content_markdown = None
122
122
  if schema.content and has_frontmatter(schema.content):
123
123
  content_frontmatter = parse_frontmatter(schema.content)
124
+
125
+ # If content has entity_type/type, use it to override the schema entity_type
126
+ if "type" in content_frontmatter:
127
+ schema.entity_type = content_frontmatter["type"]
128
+
124
129
  if "permalink" in content_frontmatter:
125
130
  # Create a minimal EntityMarkdown object for permalink resolution
126
131
  from basic_memory.markdown.schemas import EntityFrontmatter
@@ -172,10 +177,15 @@ class EntityService(BaseService[EntityModel]):
172
177
  # Read existing frontmatter from the file if it exists
173
178
  existing_markdown = await self.entity_parser.parse_file(file_path)
174
179
 
175
- # Parse content frontmatter to check for user-specified permalink
180
+ # Parse content frontmatter to check for user-specified permalink and entity_type
176
181
  content_markdown = None
177
182
  if schema.content and has_frontmatter(schema.content):
178
183
  content_frontmatter = parse_frontmatter(schema.content)
184
+
185
+ # If content has entity_type/type, use it to override the schema entity_type
186
+ if "type" in content_frontmatter:
187
+ schema.entity_type = content_frontmatter["type"]
188
+
179
189
  if "permalink" in content_frontmatter:
180
190
  # Create a minimal EntityMarkdown object for permalink resolution
181
191
  from basic_memory.markdown.schemas import EntityFrontmatter
@@ -292,27 +302,21 @@ class EntityService(BaseService[EntityModel]):
292
302
 
293
303
  Creates the entity with null checksum to indicate sync not complete.
294
304
  Relations will be added in second pass.
305
+
306
+ Uses UPSERT approach to handle permalink/file_path conflicts cleanly.
295
307
  """
296
308
  logger.debug(f"Creating entity: {markdown.frontmatter.title} file_path: {file_path}")
297
309
  model = entity_model_from_markdown(file_path, markdown)
298
310
 
299
311
  # Mark as incomplete because we still need to add relations
300
312
  model.checksum = None
301
- # Repository will set project_id automatically
313
+
314
+ # Use UPSERT to handle conflicts cleanly
302
315
  try:
303
- return await self.repository.add(model)
304
- except IntegrityError as e:
305
- # Handle race condition where entity was created by another process
306
- if "UNIQUE constraint failed: entity.file_path" in str(
307
- e
308
- ) or "UNIQUE constraint failed: entity.permalink" in str(e):
309
- logger.info(
310
- f"Entity already exists for file_path={file_path} (file_path or permalink conflict), updating instead of creating"
311
- )
312
- return await self.update_entity_and_observations(file_path, markdown)
313
- else:
314
- # Re-raise if it's a different integrity error
315
- raise
316
+ return await self.repository.upsert_entity(model)
317
+ except Exception as e:
318
+ logger.error(f"Failed to upsert entity for {file_path}: {e}")
319
+ raise EntityCreationError(f"Failed to create entity: {str(e)}") from e
316
320
 
317
321
  async def update_entity_and_observations(
318
322
  self, file_path: Path, markdown: EntityMarkdown
@@ -17,17 +17,21 @@ from basic_memory.repository import ProjectRepository
17
17
 
18
18
 
19
19
  async def initialize_database(app_config: BasicMemoryConfig) -> None:
20
- """Run database migrations to ensure schema is up to date.
20
+ """Initialize database with migrations handled automatically by get_or_create_db.
21
21
 
22
22
  Args:
23
23
  app_config: The Basic Memory project configuration
24
+
25
+ Note:
26
+ Database migrations are now handled automatically when the database
27
+ connection is first established via get_or_create_db().
24
28
  """
29
+ # Trigger database initialization and migrations by getting the database connection
25
30
  try:
26
- logger.info("Running database migrations...")
27
- await db.run_migrations(app_config)
28
- logger.info("Migrations completed successfully")
31
+ await db.get_or_create_db(app_config.database_path)
32
+ logger.info("Database initialization completed")
29
33
  except Exception as e:
30
- logger.error(f"Error running migrations: {e}")
34
+ logger.error(f"Error initializing database: {e}")
31
35
  # Allow application to continue - it might still work
32
36
  # depending on what the error was, and will fail with a
33
37
  # more specific error if the database is actually unusable
@@ -44,9 +48,9 @@ async def reconcile_projects_with_config(app_config: BasicMemoryConfig):
44
48
  """
45
49
  logger.info("Reconciling projects from config with database...")
46
50
 
47
- # Get database session
51
+ # Get database session - migrations handled centrally
48
52
  _, session_maker = await db.get_or_create_db(
49
- db_path=app_config.database_path, db_type=db.DatabaseType.FILESYSTEM
53
+ db_path=app_config.database_path, db_type=db.DatabaseType.FILESYSTEM, ensure_migrations=False
50
54
  )
51
55
  project_repository = ProjectRepository(session_maker)
52
56
 
@@ -65,9 +69,9 @@ async def reconcile_projects_with_config(app_config: BasicMemoryConfig):
65
69
 
66
70
 
67
71
  async def migrate_legacy_projects(app_config: BasicMemoryConfig):
68
- # Get database session
72
+ # Get database session - migrations handled centrally
69
73
  _, session_maker = await db.get_or_create_db(
70
- db_path=app_config.database_path, db_type=db.DatabaseType.FILESYSTEM
74
+ db_path=app_config.database_path, db_type=db.DatabaseType.FILESYSTEM, ensure_migrations=False
71
75
  )
72
76
  logger.info("Migrating legacy projects...")
73
77
  project_repository = ProjectRepository(session_maker)
@@ -134,9 +138,9 @@ async def initialize_file_sync(
134
138
  # delay import
135
139
  from basic_memory.sync import WatchService
136
140
 
137
- # Load app configuration
141
+ # Load app configuration - migrations handled centrally
138
142
  _, session_maker = await db.get_or_create_db(
139
- db_path=app_config.database_path, db_type=db.DatabaseType.FILESYSTEM
143
+ db_path=app_config.database_path, db_type=db.DatabaseType.FILESYSTEM, ensure_migrations=False
140
144
  )
141
145
  project_repository = ProjectRepository(session_maker)
142
146
 
@@ -96,7 +96,7 @@ You can also:
96
96
  You can:
97
97
  - Explore more with: `search_notes("{{ topic }}")`
98
98
  - See what's changed: `recent_activity(timeframe="{{default timeframe "7d"}}")`
99
- - **Record new learnings or decisions from this conversation:** `write_note(folder="[Chose a folder]" title="[Create a meaningful title]", content="[Content with observations and relations]")`
99
+ - **Record new learnings or decisions from this conversation:** `write_note(folder="[Choose a folder]" title="[Create a meaningful title]", content="[Content with observations and relations]")`
100
100
 
101
101
  ## Knowledge Capture Recommendation
102
102
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: basic-memory
3
- Version: 0.13.5
3
+ Version: 0.13.6
4
4
  Summary: Local-first knowledge management combining Zettelkasten with knowledge graphs
5
5
  Project-URL: Homepage, https://github.com/basicmachines-co/basic-memory
6
6
  Project-URL: Repository, https://github.com/basicmachines-co/basic-memory
@@ -51,8 +51,11 @@ Basic Memory lets you build persistent knowledge through natural conversations w
51
51
  Claude, while keeping everything in simple Markdown files on your computer. It uses the Model Context Protocol (MCP) to
52
52
  enable any compatible LLM to read and write to your local knowledge base.
53
53
 
54
- - Website: https://basicmachines.co
54
+ - Website: https://basicmemory.com
55
+ - Company: https://basicmachines.co
55
56
  - Documentation: https://memory.basicmachines.co
57
+ - Discord: https://discord.gg/tyvKNccgqN
58
+ - YouTube: https://www.youtube.com/@basicmachines-co
56
59
 
57
60
  ## Pick up your conversation right where you left off
58
61
 
@@ -99,8 +102,7 @@ Memory for Claude Desktop:
99
102
  npx -y @smithery/cli install @basicmachines-co/basic-memory --client claude
100
103
  ```
101
104
 
102
- This installs and configures Basic Memory without requiring manual edits to the Claude Desktop configuration file. The
103
- Smithery server hosts the MCP server component, while your data remains stored locally as Markdown files.
105
+ This installs and configures Basic Memory without requiring manual edits to the Claude Desktop configuration file. Note: The Smithery installation uses their hosted MCP server, while your data remains stored locally as Markdown files.
104
106
 
105
107
  ### Glama.ai
106
108
 
@@ -191,7 +193,8 @@ The note embeds semantic content and links to other topics via simple Markdown f
191
193
 
192
194
  3. You see this file on your computer in real time in the current project directory (default `~/$HOME/basic-memory`).
193
195
 
194
- - Realtime sync is enabled by default with the v0.12.0 version
196
+ - Realtime sync is enabled by default starting with v0.12.0
197
+ - Project switching during conversations is supported starting with v0.13.0
195
198
 
196
199
  4. In a chat with the LLM, you can reference a topic:
197
200
 
@@ -389,10 +392,20 @@ Basic Memory will sync the files in your project in real time if you make manual
389
392
  ```
390
393
  write_note(title, content, folder, tags) - Create or update notes
391
394
  read_note(identifier, page, page_size) - Read notes by title or permalink
395
+ edit_note(identifier, operation, content) - Edit notes incrementally (append, prepend, find/replace)
396
+ move_note(identifier, destination_path) - Move notes with database consistency
397
+ view_note(identifier) - Display notes as formatted artifacts for better readability
392
398
  build_context(url, depth, timeframe) - Navigate knowledge graph via memory:// URLs
393
399
  search_notes(query, page, page_size) - Search across your knowledge base
394
400
  recent_activity(type, depth, timeframe) - Find recently updated information
395
401
  canvas(nodes, edges, title, folder) - Generate knowledge visualizations
402
+ list_memory_projects() - List all available projects with status
403
+ switch_project(project_name) - Switch to different project context
404
+ get_current_project() - Show current project and statistics
405
+ create_memory_project(name, path, set_default) - Create new projects
406
+ delete_project(name) - Delete projects from configuration
407
+ set_default_project(name) - Set default project
408
+ sync_status() - Check file synchronization status
396
409
  ```
397
410
 
398
411
  5. Example prompts to try:
@@ -403,6 +416,10 @@ canvas(nodes, edges, title, folder) - Generate knowledge visualizations
403
416
  "Create a canvas visualization of my project components"
404
417
  "Read my notes on the authentication system"
405
418
  "What have I been working on in the past week?"
419
+ "Switch to my work-notes project"
420
+ "List all my available projects"
421
+ "Edit my coffee brewing note to add a new technique"
422
+ "Move my old meeting notes to the archive folder"
406
423
  ```
407
424
 
408
425
  ## Futher info
@@ -1,11 +1,11 @@
1
- basic_memory/__init__.py,sha256=WHPxSfUeC15lusNJ4Ju9qybUhppI3x4uo1yfuNB5Y6k,256
1
+ basic_memory/__init__.py,sha256=jY-dYXPDVPu0kvc-gebJZdd6zU-oKyz-d5OfbALCEes,256
2
2
  basic_memory/config.py,sha256=OP8ygXZuiAEpKp2EvudMFWnJDiEf3ZWAUZqvqzkEo6g,11700
3
- basic_memory/db.py,sha256=X4-uyEZdJXVLfFDTpcNZxWzawRZXhDdKoEFWAGgE4Lk,6193
3
+ basic_memory/db.py,sha256=UCN37mfGgK3EKCSpKDZjqxkbQVZqBsTOfkQ5OE8sFuc,7609
4
4
  basic_memory/deps.py,sha256=zXOhqXCoSVIa1iIcO8U6uUiofJn5eT4ycwJkH9I2kX4,12102
5
5
  basic_memory/file_utils.py,sha256=eaxTKLLEbTIy_Mb_Iv_Dmt4IXAJSrZGVi-Knrpyci3E,6700
6
6
  basic_memory/utils.py,sha256=BL6DDRiMF1gNcDr_guRAYflooSrSlDniJh96ApdzuDY,7555
7
7
  basic_memory/alembic/alembic.ini,sha256=IEZsnF8CbbZnkwBr67LzKKNobHuzTaQNUvM8Psop5xc,3733
8
- basic_memory/alembic/env.py,sha256=2izCQuFbw0hhlx0L8dHgQ98QM_tzMj6WbgrznKrbxv8,2727
8
+ basic_memory/alembic/env.py,sha256=gECjMcc--Hhacy3od1WNIAFyHzv6MUi7F_eQG7k3bRQ,2812
9
9
  basic_memory/alembic/migrations.py,sha256=lriHPXDdBLSNXEW3QTpU0SJKuVd1V-8NrVkpN3qfsUQ,718
10
10
  basic_memory/alembic/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
11
11
  basic_memory/alembic/versions/3dae7c7b1564_initial_schema.py,sha256=lTbWlAnd1es7xU99DoJgfaRe1_Kte8TL98riqeKGV80,4363
@@ -87,14 +87,14 @@ basic_memory/mcp/tools/search.py,sha256=22sLHed6z53mH9NQqBv37Xi4d6AtOTyrUvKs2Myc
87
87
  basic_memory/mcp/tools/sync_status.py,sha256=mt0DdcaAlyiKW4NK4gy6psajSqcez0bOm_4MzG1NOdg,10486
88
88
  basic_memory/mcp/tools/utils.py,sha256=wsfrgiBScacMilODu85AXbUUKA5fJi4_6phDIC9dQRs,19702
89
89
  basic_memory/mcp/tools/view_note.py,sha256=ddNXxyETsdA5SYflIaQVj_Cbd7I7CLVs3atRRDMbGmg,2499
90
- basic_memory/mcp/tools/write_note.py,sha256=TW_7-4QfX8GYZ-FU_iSSYAm1lucE7NeOcpZUypRXKOk,5912
90
+ basic_memory/mcp/tools/write_note.py,sha256=dPC2bqxnuWD1NgDn8hXx4mMtQ8T91WoS2kNmHtKsm70,6056
91
91
  basic_memory/models/__init__.py,sha256=j0C4dtFi-FOEaQKR8dQWEG-dJtdQ15NBTiJg4nbIXNU,333
92
92
  basic_memory/models/base.py,sha256=4hAXJ8CE1RnjKhb23lPd-QM7G_FXIdTowMJ9bRixspU,225
93
93
  basic_memory/models/knowledge.py,sha256=AFxfKS8fRa43Kq3EjJCAufpte4VNC7fs9YfshDrB4o0,7087
94
94
  basic_memory/models/project.py,sha256=oUrQaUOu7_muSl-i38Dh0HzmCFrMAtwgxALDUTt9k5c,2773
95
95
  basic_memory/models/search.py,sha256=PhQ8w4taApSvjh1DpPhB4cH9GTt2E2po-DFZzhnoZkY,1300
96
96
  basic_memory/repository/__init__.py,sha256=MWK-o8QikqzOpe5SyPbKQ2ioB5BWA0Upz65tgg-E0DU,327
97
- basic_memory/repository/entity_repository.py,sha256=larjP7r6Sc7YyPD1BloC_m96McYsjHTf6doUQy3gSY4,3776
97
+ basic_memory/repository/entity_repository.py,sha256=ABEFD-ZJbJ05OwQTcZWpECxYtzlfVWfzXlMRIFOuuyQ,10520
98
98
  basic_memory/repository/observation_repository.py,sha256=qhMvHLSjaoT3Fa_cQOKsT5jYPj66GXSytEBMwLAgygQ,2943
99
99
  basic_memory/repository/project_info_repository.py,sha256=8XLVAYKkBWQ6GbKj1iqA9OK0FGPHdTlOs7ZtfeUf9t8,338
100
100
  basic_memory/repository/project_repository.py,sha256=sgdKxKTSiiOZTzABwUNqli7K5mbXiPiQEAc5r0RD_jQ,3159
@@ -115,10 +115,10 @@ basic_memory/schemas/search.py,sha256=ywMsDGAQK2sO2TT5lc-da_k67OKW1x1TenXormHHWv
115
115
  basic_memory/services/__init__.py,sha256=XGt8WX3fX_0K9L37Msy8HF8nlMZYIG3uQ6mUX6_iJtg,259
116
116
  basic_memory/services/context_service.py,sha256=4ReLAF5qifA9ayOePGsVKusw1TWj8oBzRECjrsFiKPI,14462
117
117
  basic_memory/services/directory_service.py,sha256=_YOPXseQM4knd7PIFAho9LV_E-FljVE5WVJKQ0uflZs,6017
118
- basic_memory/services/entity_service.py,sha256=KemsDkKkA7KItVtfsdAlYaGyOR8ryZQCu_O9GhkJucc,30103
118
+ basic_memory/services/entity_service.py,sha256=R6iXgkGepWscbtErFKHWwsqk2IXWVOmst15IRZ6eBCA,30181
119
119
  basic_memory/services/exceptions.py,sha256=oVjQr50XQqnFq1-MNKBilI2ShtHDxypavyDk1UeyHhw,390
120
120
  basic_memory/services/file_service.py,sha256=jCrmnEkTQ4t9HF7L_M6BL7tdDqjjzty9hpTo9AzwhvM,10059
121
- basic_memory/services/initialization.py,sha256=6ZeuTInPksyre4pjmiK_GXi5o_mJk3mfqGGH6apHxko,9271
121
+ basic_memory/services/initialization.py,sha256=Lbnq5qH2S2vkQQHPSHU8UsTt2d9x0oj-Rvmty0s8WLQ,9675
122
122
  basic_memory/services/link_resolver.py,sha256=1-_VFsvqdT5rVBHe8Jrq63U59XQ0hxGezxY8c24Tiow,4594
123
123
  basic_memory/services/migration_service.py,sha256=pFJCSD7UgHLx1CHvtN4Df1CzDEp-CZ9Vqx4XYn1m1M0,6096
124
124
  basic_memory/services/project_service.py,sha256=YDZl_e7R36D6KcObpBeMqIiM05oh9nOIfZFIFgIRxbY,27151
@@ -129,10 +129,10 @@ basic_memory/sync/__init__.py,sha256=CVHguYH457h2u2xoM8KvOilJC71XJlZ-qUh8lHcjYj4
129
129
  basic_memory/sync/background_sync.py,sha256=4CEx8oP6-qD33uCeowhpzhA8wivmWxaCmSBP37h3Fs8,714
130
130
  basic_memory/sync/sync_service.py,sha256=AxC5J1YTcPWTmA0HdzvOZBthi4-_LZ44kNF0KQoDRPw,23387
131
131
  basic_memory/sync/watch_service.py,sha256=JAumrHUjV1lF9NtEK32jgg0myWBfLXotNXxONeIV9SM,15316
132
- basic_memory/templates/prompts/continue_conversation.hbs,sha256=begMFHOPN3aCm5sHz5PlKMLOfZ8hlpFxFJ-hgy0T9K4,3075
132
+ basic_memory/templates/prompts/continue_conversation.hbs,sha256=trrDHSXA5S0JCbInMoUJL04xvCGRB_ku1RHNQHtl6ZI,3076
133
133
  basic_memory/templates/prompts/search.hbs,sha256=H1cCIsHKp4VC1GrH2KeUB8pGe5vXFPqb2VPotypmeCA,3098
134
- basic_memory-0.13.5.dist-info/METADATA,sha256=LgTv7id00iSZnHBGYLIWAujW_PDC7rAw_veLBCd4-_E,15469
135
- basic_memory-0.13.5.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
136
- basic_memory-0.13.5.dist-info/entry_points.txt,sha256=wvE2mRF6-Pg4weIYcfQ-86NOLZD4WJg7F7TIsRVFLb8,90
137
- basic_memory-0.13.5.dist-info/licenses/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
138
- basic_memory-0.13.5.dist-info/RECORD,,
134
+ basic_memory-0.13.6.dist-info/METADATA,sha256=vf2t-jK5fi2Pi_y-TkTiyhrY85Bx7Ah0tIGCGIZ0S0A,16532
135
+ basic_memory-0.13.6.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
136
+ basic_memory-0.13.6.dist-info/entry_points.txt,sha256=wvE2mRF6-Pg4weIYcfQ-86NOLZD4WJg7F7TIsRVFLb8,90
137
+ basic_memory-0.13.6.dist-info/licenses/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
138
+ basic_memory-0.13.6.dist-info/RECORD,,