basic-memory 0.2.21__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

basic_memory/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  """basic-memory - Local-first knowledge management combining Zettelkasten with knowledge graphs"""
2
2
 
3
- __version__ = "0.0.1"
3
+ __version__ = "0.4.0"
basic_memory/api/app.py CHANGED
@@ -6,38 +6,17 @@ from fastapi import FastAPI, HTTPException
6
6
  from fastapi.exception_handlers import http_exception_handler
7
7
  from loguru import logger
8
8
 
9
+ import basic_memory
9
10
  from basic_memory import db
10
11
  from basic_memory.config import config as app_config
11
12
  from basic_memory.api.routers import knowledge, search, memory, resource
12
- from alembic import command
13
- from alembic.config import Config
14
-
15
- from basic_memory.db import DatabaseType
16
- from basic_memory.repository.search_repository import SearchRepository
17
-
18
-
19
- async def run_migrations(): # pragma: no cover
20
- """Run any pending alembic migrations."""
21
- logger.info("Running database migrations...")
22
- try:
23
- config = Config("alembic.ini")
24
- command.upgrade(config, "head")
25
- logger.info("Migrations completed successfully")
26
-
27
- _, session_maker = await db.get_or_create_db(
28
- app_config.database_path, DatabaseType.FILESYSTEM
29
- )
30
- await SearchRepository(session_maker).init_search_index()
31
- except Exception as e:
32
- logger.error(f"Error running migrations: {e}")
33
- raise
34
13
 
35
14
 
36
15
  @asynccontextmanager
37
16
  async def lifespan(app: FastAPI): # pragma: no cover
38
17
  """Lifecycle manager for the FastAPI app."""
39
- logger.info("Starting Basic Memory API")
40
- await run_migrations()
18
+ logger.info(f"Starting Basic Memory API {basic_memory.__version__}")
19
+ await db.run_migrations(app_config)
41
20
  yield
42
21
  logger.info("Shutting down Basic Memory API")
43
22
  await db.shutdown_db()
basic_memory/cli/app.py CHANGED
@@ -1,3 +1,20 @@
1
+ import asyncio
2
+
1
3
  import typer
2
4
 
3
- app = typer.Typer()
5
+ from basic_memory import db
6
+ from basic_memory.config import config
7
+ from basic_memory.utils import setup_logging
8
+
9
+ setup_logging(log_file=".basic-memory/basic-memory-cli.log") # pragma: no cover
10
+
11
+ asyncio.run(db.run_migrations(config))
12
+
13
+ app = typer.Typer(name="basic-memory")
14
+
15
+ import_app = typer.Typer()
16
+ app.add_typer(import_app, name="import")
17
+
18
+
19
+ claude_app = typer.Typer()
20
+ import_app.add_typer(claude_app, name="claude")
@@ -22,4 +22,4 @@ def reset(
22
22
  from basic_memory.cli.commands.sync import sync
23
23
 
24
24
  logger.info("Rebuilding search index from filesystem...")
25
- asyncio.run(sync()) # pyright: ignore
25
+ sync(watch=False) # pyright: ignore
@@ -0,0 +1,255 @@
1
+ """Import command for ChatGPT conversations."""
2
+
3
+ import asyncio
4
+ import json
5
+ from datetime import datetime
6
+ from pathlib import Path
7
+ from typing import Dict, Any, List, Annotated, Set, Optional
8
+
9
+ import typer
10
+ from loguru import logger
11
+ from rich.console import Console
12
+ from rich.panel import Panel
13
+ from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn
14
+
15
+ from basic_memory.cli.app import import_app
16
+ from basic_memory.config import config
17
+ from basic_memory.markdown import EntityParser, MarkdownProcessor
18
+ from basic_memory.markdown.schemas import EntityMarkdown, EntityFrontmatter
19
+
20
+ console = Console()
21
+
22
+
23
+ def clean_filename(text: str) -> str:
24
+ """Convert text to safe filename."""
25
+ clean = "".join(c if c.isalnum() else "-" for c in text.lower()).strip("-")
26
+ return clean
27
+
28
+
29
+ def format_timestamp(ts: float) -> str:
30
+ """Format Unix timestamp for display."""
31
+ dt = datetime.fromtimestamp(ts)
32
+ return dt.strftime("%Y-%m-%d %H:%M:%S")
33
+
34
+
35
+ def get_message_content(message: Dict[str, Any]) -> str:
36
+ """Extract clean message content."""
37
+ if not message or "content" not in message:
38
+ return "" # pragma: no cover
39
+
40
+ content = message["content"]
41
+ if content.get("content_type") == "text":
42
+ return "\n".join(content.get("parts", []))
43
+ elif content.get("content_type") == "code":
44
+ return f"```{content.get('language', '')}\n{content.get('text', '')}\n```"
45
+ return "" # pragma: no cover
46
+
47
+
48
+ def traverse_messages(
49
+ mapping: Dict[str, Any], root_id: Optional[str], seen: Set[str]
50
+ ) -> List[Dict[str, Any]]:
51
+ """Traverse message tree and return messages in order."""
52
+ messages = []
53
+ node = mapping.get(root_id) if root_id else None
54
+
55
+ while node:
56
+ if node["id"] not in seen and node.get("message"):
57
+ seen.add(node["id"])
58
+ messages.append(node["message"])
59
+
60
+ # Follow children
61
+ children = node.get("children", [])
62
+ for child_id in children:
63
+ child_msgs = traverse_messages(mapping, child_id, seen)
64
+ messages.extend(child_msgs)
65
+
66
+ break # Don't follow siblings
67
+
68
+ return messages
69
+
70
+
71
+ def format_chat_markdown(
72
+ title: str, mapping: Dict[str, Any], root_id: Optional[str], created_at: float, modified_at: float
73
+ ) -> str:
74
+ """Format chat as clean markdown."""
75
+
76
+ # Start with title
77
+ lines = [f"# {title}\n"]
78
+
79
+ # Traverse message tree
80
+ seen_msgs = set()
81
+ messages = traverse_messages(mapping, root_id, seen_msgs)
82
+
83
+ # Format each message
84
+ for msg in messages:
85
+ # Skip hidden messages
86
+ if msg.get("metadata", {}).get("is_visually_hidden_from_conversation"):
87
+ continue
88
+
89
+ # Get author and timestamp
90
+ author = msg["author"]["role"].title()
91
+ ts = format_timestamp(msg["create_time"]) if msg.get("create_time") else ""
92
+
93
+ # Add message header
94
+ lines.append(f"### {author} ({ts})")
95
+
96
+ # Add message content
97
+ content = get_message_content(msg)
98
+ if content:
99
+ lines.append(content)
100
+
101
+ # Add spacing
102
+ lines.append("")
103
+
104
+ return "\n".join(lines)
105
+
106
+
107
+ def format_chat_content(folder: str, conversation: Dict[str, Any]) -> EntityMarkdown:
108
+ """Convert chat conversation to Basic Memory entity."""
109
+
110
+ # Extract timestamps
111
+ created_at = conversation["create_time"]
112
+ modified_at = conversation["update_time"]
113
+
114
+ root_id = None
115
+ # Find root message
116
+ for node_id, node in conversation["mapping"].items():
117
+ if node.get("parent") is None:
118
+ root_id = node_id
119
+ break
120
+
121
+ # Generate permalink
122
+ date_prefix = datetime.fromtimestamp(created_at).strftime("%Y%m%d")
123
+ clean_title = clean_filename(conversation["title"])
124
+
125
+ # Format content
126
+ content = format_chat_markdown(
127
+ title=conversation["title"],
128
+ mapping=conversation["mapping"],
129
+ root_id=root_id,
130
+ created_at=created_at,
131
+ modified_at=modified_at,
132
+ )
133
+
134
+ # Create entity
135
+ entity = EntityMarkdown(
136
+ frontmatter=EntityFrontmatter(
137
+ metadata={
138
+ "type": "conversation",
139
+ "title": conversation["title"],
140
+ "created": format_timestamp(created_at),
141
+ "modified": format_timestamp(modified_at),
142
+ "permalink": f"{folder}/{date_prefix}-{clean_title}",
143
+ }
144
+ ),
145
+ content=content,
146
+ )
147
+
148
+ return entity
149
+
150
+
151
+ async def process_chatgpt_json(
152
+ json_path: Path, folder: str, markdown_processor: MarkdownProcessor
153
+ ) -> Dict[str, int]:
154
+ """Import conversations from ChatGPT JSON format."""
155
+
156
+ with Progress(
157
+ SpinnerColumn(),
158
+ TextColumn("[progress.description]{task.description}"),
159
+ BarColumn(),
160
+ TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
161
+ console=console,
162
+ ) as progress:
163
+ read_task = progress.add_task("Reading chat data...", total=None)
164
+
165
+ # Read conversations
166
+ conversations = json.loads(json_path.read_text())
167
+ progress.update(read_task, total=len(conversations))
168
+
169
+ # Process each conversation
170
+ messages_imported = 0
171
+ chats_imported = 0
172
+
173
+ for chat in conversations:
174
+ # Convert to entity
175
+ entity = format_chat_content(folder, chat)
176
+
177
+ # Write file
178
+ file_path = config.home / f"{entity.frontmatter.metadata['permalink']}.md"
179
+ # logger.info(f"Writing file: {file_path.absolute()}")
180
+ await markdown_processor.write_file(file_path, entity)
181
+
182
+ # Count messages
183
+ msg_count = sum(
184
+ 1
185
+ for node in chat["mapping"].values()
186
+ if node.get("message")
187
+ and not node.get("message", {})
188
+ .get("metadata", {})
189
+ .get("is_visually_hidden_from_conversation")
190
+ )
191
+
192
+ chats_imported += 1
193
+ messages_imported += msg_count
194
+ progress.update(read_task, advance=1)
195
+
196
+ return {"conversations": chats_imported, "messages": messages_imported}
197
+
198
+
199
+ async def get_markdown_processor() -> MarkdownProcessor:
200
+ """Get MarkdownProcessor instance."""
201
+ entity_parser = EntityParser(config.home)
202
+ return MarkdownProcessor(entity_parser)
203
+
204
+
205
+ @import_app.command(name="chatgpt", help="Import conversations from ChatGPT JSON export.")
206
+ def import_chatgpt(
207
+ conversations_json: Annotated[
208
+ Path, typer.Option(..., help="Path to ChatGPT conversations.json file")
209
+ ] = Path("conversations.json"),
210
+ folder: Annotated[
211
+ str, typer.Option(help="The folder to place the files in.")
212
+ ] = "conversations",
213
+ ):
214
+ """Import chat conversations from ChatGPT JSON format.
215
+
216
+ This command will:
217
+ 1. Read the complex tree structure of messages
218
+ 2. Convert them to linear markdown conversations
219
+ 3. Save as clean, readable markdown files
220
+
221
+ After importing, run 'basic-memory sync' to index the new files.
222
+ """
223
+
224
+ try:
225
+ if conversations_json:
226
+ if not conversations_json.exists():
227
+ typer.echo(f"Error: File not found: {conversations_json}", err=True)
228
+ raise typer.Exit(1)
229
+
230
+ # Get markdown processor
231
+ markdown_processor = asyncio.run(get_markdown_processor())
232
+
233
+ # Process the file
234
+ base_path = config.home / folder
235
+ console.print(f"\nImporting chats from {conversations_json}...writing to {base_path}")
236
+ results = asyncio.run(
237
+ process_chatgpt_json(conversations_json, folder, markdown_processor)
238
+ )
239
+
240
+ # Show results
241
+ console.print(
242
+ Panel(
243
+ f"[green]Import complete![/green]\n\n"
244
+ f"Imported {results['conversations']} conversations\n"
245
+ f"Containing {results['messages']} messages",
246
+ expand=False,
247
+ )
248
+ )
249
+
250
+ console.print("\nRun 'basic-memory sync' to index the new files.")
251
+
252
+ except Exception as e:
253
+ logger.error("Import failed")
254
+ typer.echo(f"Error during import: {e}", err=True)
255
+ raise typer.Exit(1)
@@ -0,0 +1,211 @@
1
+ """Import command for basic-memory CLI to import chat data from conversations2.json format."""
2
+
3
+ import asyncio
4
+ import json
5
+ from datetime import datetime
6
+ from pathlib import Path
7
+ from typing import Dict, Any, List, Annotated
8
+
9
+ import typer
10
+ from loguru import logger
11
+ from rich.console import Console
12
+ from rich.panel import Panel
13
+ from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn
14
+
15
+ from basic_memory.cli.app import claude_app
16
+ from basic_memory.config import config
17
+ from basic_memory.markdown import EntityParser, MarkdownProcessor
18
+ from basic_memory.markdown.schemas import EntityMarkdown, EntityFrontmatter
19
+
20
+ console = Console()
21
+
22
+
23
+ def clean_filename(text: str) -> str:
24
+ """Convert text to safe filename."""
25
+ # Remove invalid characters and convert spaces
26
+ clean = "".join(c if c.isalnum() else "-" for c in text.lower()).strip("-")
27
+ return clean
28
+
29
+
30
+ def format_timestamp(ts: str) -> str:
31
+ """Format ISO timestamp for display."""
32
+ dt = datetime.fromisoformat(ts.replace("Z", "+00:00"))
33
+ return dt.strftime("%Y-%m-%d %H:%M:%S")
34
+
35
+
36
+ def format_chat_markdown(
37
+ name: str, messages: List[Dict[str, Any]], created_at: str, modified_at: str, permalink: str
38
+ ) -> str:
39
+ """Format chat as clean markdown."""
40
+
41
+ # Start with frontmatter and title
42
+ lines = [
43
+ f"# {name}\n",
44
+ ]
45
+
46
+ # Add messages
47
+ for msg in messages:
48
+ # Format timestamp
49
+ ts = format_timestamp(msg["created_at"])
50
+
51
+ # Add message header
52
+ lines.append(f"### {msg['sender'].title()} ({ts})")
53
+
54
+ # Handle message content
55
+ content = msg.get("text", "")
56
+ if msg.get("content"):
57
+ content = " ".join(c.get("text", "") for c in msg["content"])
58
+ lines.append(content)
59
+
60
+ # Handle attachments
61
+ attachments = msg.get("attachments", [])
62
+ for attachment in attachments:
63
+ if "file_name" in attachment:
64
+ lines.append(f"\n**Attachment: {attachment['file_name']}**")
65
+ if "extracted_content" in attachment:
66
+ lines.append("```")
67
+ lines.append(attachment["extracted_content"])
68
+ lines.append("```")
69
+
70
+ # Add spacing between messages
71
+ lines.append("")
72
+
73
+ return "\n".join(lines)
74
+
75
+
76
+ def format_chat_content(
77
+ base_path: Path, name: str, messages: List[Dict[str, Any]], created_at: str, modified_at: str
78
+ ) -> EntityMarkdown:
79
+ """Convert chat messages to Basic Memory entity format."""
80
+
81
+ # Generate permalink
82
+ date_prefix = datetime.fromisoformat(created_at.replace("Z", "+00:00")).strftime("%Y%m%d")
83
+ clean_title = clean_filename(name)
84
+ permalink = f"{base_path}/{date_prefix}-{clean_title}"
85
+
86
+ # Format content
87
+ content = format_chat_markdown(
88
+ name=name,
89
+ messages=messages,
90
+ created_at=created_at,
91
+ modified_at=modified_at,
92
+ permalink=permalink,
93
+ )
94
+
95
+ # Create entity
96
+ entity = EntityMarkdown(
97
+ frontmatter=EntityFrontmatter(
98
+ metadata={
99
+ "type": "conversation",
100
+ "title": name,
101
+ "created": created_at,
102
+ "modified": modified_at,
103
+ "permalink": permalink,
104
+ }
105
+ ),
106
+ content=content,
107
+ )
108
+
109
+ return entity
110
+
111
+
112
+ async def process_conversations_json(
113
+ json_path: Path, base_path: Path, markdown_processor: MarkdownProcessor
114
+ ) -> Dict[str, int]:
115
+ """Import chat data from conversations2.json format."""
116
+
117
+ with Progress(
118
+ SpinnerColumn(),
119
+ TextColumn("[progress.description]{task.description}"),
120
+ BarColumn(),
121
+ TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
122
+ console=console,
123
+ ) as progress:
124
+ read_task = progress.add_task("Reading chat data...", total=None)
125
+
126
+ # Read chat data - handle array of arrays format
127
+ data = json.loads(json_path.read_text())
128
+ conversations = [chat for chat in data]
129
+ progress.update(read_task, total=len(conversations))
130
+
131
+ # Process each conversation
132
+ messages_imported = 0
133
+ chats_imported = 0
134
+
135
+ for chat in conversations:
136
+ # Convert to entity
137
+ entity = format_chat_content(
138
+ base_path=base_path,
139
+ name=chat["name"],
140
+ messages=chat["chat_messages"],
141
+ created_at=chat["created_at"],
142
+ modified_at=chat["updated_at"],
143
+ )
144
+
145
+ # Write file
146
+ file_path = Path(f"{entity.frontmatter.metadata['permalink']}.md")
147
+ await markdown_processor.write_file(file_path, entity)
148
+
149
+ chats_imported += 1
150
+ messages_imported += len(chat["chat_messages"])
151
+ progress.update(read_task, advance=1)
152
+
153
+ return {"conversations": chats_imported, "messages": messages_imported}
154
+
155
+
156
+ async def get_markdown_processor() -> MarkdownProcessor:
157
+ """Get MarkdownProcessor instance."""
158
+ entity_parser = EntityParser(config.home)
159
+ return MarkdownProcessor(entity_parser)
160
+
161
+
162
+ @claude_app.command(name="conversations", help="Import chat conversations from Claude.ai.")
163
+ def import_claude(
164
+ conversations_json: Annotated[
165
+ Path, typer.Argument(..., help="Path to conversations.json file")
166
+ ] = Path("conversations.json"),
167
+ folder: Annotated[
168
+ str, typer.Option(help="The folder to place the files in.")
169
+ ] = "conversations",
170
+ ):
171
+ """Import chat conversations from conversations2.json format.
172
+
173
+ This command will:
174
+ 1. Read chat data and nested messages
175
+ 2. Create markdown files for each conversation
176
+ 3. Format content in clean, readable markdown
177
+
178
+ After importing, run 'basic-memory sync' to index the new files.
179
+ """
180
+
181
+ try:
182
+ if not conversations_json.exists():
183
+ typer.echo(f"Error: File not found: {conversations_json}", err=True)
184
+ raise typer.Exit(1)
185
+
186
+ # Get markdown processor
187
+ markdown_processor = asyncio.run(get_markdown_processor())
188
+
189
+ # Process the file
190
+ base_path = config.home / folder
191
+ console.print(f"\nImporting chats from {conversations_json}...writing to {base_path}")
192
+ results = asyncio.run(
193
+ process_conversations_json(conversations_json, base_path, markdown_processor)
194
+ )
195
+
196
+ # Show results
197
+ console.print(
198
+ Panel(
199
+ f"[green]Import complete![/green]\n\n"
200
+ f"Imported {results['conversations']} conversations\n"
201
+ f"Containing {results['messages']} messages",
202
+ expand=False,
203
+ )
204
+ )
205
+
206
+ console.print("\nRun 'basic-memory sync' to index the new files.")
207
+
208
+ except Exception as e:
209
+ logger.error("Import failed")
210
+ typer.echo(f"Error during import: {e}", err=True)
211
+ raise typer.Exit(1)
@@ -0,0 +1,195 @@
1
+ """Import command for basic-memory CLI to import project data from Claude.ai."""
2
+
3
+ import asyncio
4
+ import json
5
+ from pathlib import Path
6
+ from typing import Dict, Any, Annotated, Optional
7
+
8
+ import typer
9
+ from loguru import logger
10
+ from rich.console import Console
11
+ from rich.panel import Panel
12
+ from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn
13
+
14
+ from basic_memory.cli.app import claude_app
15
+ from basic_memory.config import config
16
+ from basic_memory.markdown import EntityParser, MarkdownProcessor
17
+ from basic_memory.markdown.schemas import EntityMarkdown, EntityFrontmatter
18
+
19
+ console = Console()
20
+
21
+
22
+ def clean_filename(text: str) -> str:
23
+ """Convert text to safe filename."""
24
+ clean = "".join(c if c.isalnum() else "-" for c in text.lower()).strip("-")
25
+ return clean
26
+
27
+
28
+ def format_project_markdown(project: Dict[str, Any], doc: Dict[str, Any]) -> EntityMarkdown:
29
+ """Format a project document as a Basic Memory entity."""
30
+
31
+ # Extract timestamps
32
+ created_at = doc.get("created_at") or project["created_at"]
33
+ modified_at = project["updated_at"]
34
+
35
+ # Generate clean names for organization
36
+ project_dir = clean_filename(project["name"])
37
+ doc_file = clean_filename(doc["filename"])
38
+
39
+ # Create entity
40
+ entity = EntityMarkdown(
41
+ frontmatter=EntityFrontmatter(
42
+ metadata={
43
+ "type": "project_doc",
44
+ "title": doc["filename"],
45
+ "created": created_at,
46
+ "modified": modified_at,
47
+ "permalink": f"{project_dir}/docs/{doc_file}",
48
+ "project_name": project["name"],
49
+ "project_uuid": project["uuid"],
50
+ "doc_uuid": doc["uuid"],
51
+ }
52
+ ),
53
+ content=doc["content"],
54
+ )
55
+
56
+ return entity
57
+
58
+
59
+ def format_prompt_markdown(project: Dict[str, Any]) -> Optional[EntityMarkdown]:
60
+ """Format project prompt template as a Basic Memory entity."""
61
+
62
+ if not project.get("prompt_template"):
63
+ return None
64
+
65
+ # Extract timestamps
66
+ created_at = project["created_at"]
67
+ modified_at = project["updated_at"]
68
+
69
+ # Generate clean project directory name
70
+ project_dir = clean_filename(project["name"])
71
+
72
+ # Create entity
73
+ entity = EntityMarkdown(
74
+ frontmatter=EntityFrontmatter(
75
+ metadata={
76
+ "type": "prompt_template",
77
+ "title": f"Prompt Template: {project['name']}",
78
+ "created": created_at,
79
+ "modified": modified_at,
80
+ "permalink": f"{project_dir}/prompt-template",
81
+ "project_name": project["name"],
82
+ "project_uuid": project["uuid"],
83
+ }
84
+ ),
85
+ content=f"# Prompt Template: {project['name']}\n\n{project['prompt_template']}",
86
+ )
87
+
88
+ return entity
89
+
90
+
91
+ async def process_projects_json(
92
+ json_path: Path, base_path: Path, markdown_processor: MarkdownProcessor
93
+ ) -> Dict[str, int]:
94
+ """Import project data from Claude.ai projects.json format."""
95
+
96
+ with Progress(
97
+ SpinnerColumn(),
98
+ TextColumn("[progress.description]{task.description}"),
99
+ BarColumn(),
100
+ TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
101
+ console=console,
102
+ ) as progress:
103
+ read_task = progress.add_task("Reading project data...", total=None)
104
+
105
+ # Read project data
106
+ data = json.loads(json_path.read_text())
107
+ progress.update(read_task, total=len(data))
108
+
109
+ # Track import counts
110
+ docs_imported = 0
111
+ prompts_imported = 0
112
+
113
+ # Process each project
114
+ for project in data:
115
+ project_dir = clean_filename(project["name"])
116
+
117
+ # Create project directories
118
+ docs_dir = base_path / project_dir / "docs"
119
+ docs_dir.mkdir(parents=True, exist_ok=True)
120
+
121
+ # Import prompt template if it exists
122
+ if prompt_entity := format_prompt_markdown(project):
123
+ file_path = base_path / f"{prompt_entity.frontmatter.metadata['permalink']}.md"
124
+ await markdown_processor.write_file(file_path, prompt_entity)
125
+ prompts_imported += 1
126
+
127
+ # Import project documents
128
+ for doc in project.get("docs", []):
129
+ entity = format_project_markdown(project, doc)
130
+ file_path = base_path / f"{entity.frontmatter.metadata['permalink']}.md"
131
+ await markdown_processor.write_file(file_path, entity)
132
+ docs_imported += 1
133
+
134
+ progress.update(read_task, advance=1)
135
+
136
+ return {"documents": docs_imported, "prompts": prompts_imported}
137
+
138
+
139
+ async def get_markdown_processor() -> MarkdownProcessor:
140
+ """Get MarkdownProcessor instance."""
141
+ entity_parser = EntityParser(config.home)
142
+ return MarkdownProcessor(entity_parser)
143
+
144
+
145
+ @claude_app.command(name="projects", help="Import projects from Claude.ai.")
146
+ def import_projects(
147
+ projects_json: Annotated[Path, typer.Argument(..., help="Path to projects.json file")] = Path(
148
+ "projects.json"
149
+ ),
150
+ base_folder: Annotated[
151
+ str, typer.Option(help="The base folder to place project files in.")
152
+ ] = "projects",
153
+ ):
154
+ """Import project data from Claude.ai.
155
+
156
+ This command will:
157
+ 1. Create a directory for each project
158
+ 2. Store docs in a docs/ subdirectory
159
+ 3. Place prompt template in project root
160
+
161
+ After importing, run 'basic-memory sync' to index the new files.
162
+ """
163
+
164
+ try:
165
+ if projects_json:
166
+ if not projects_json.exists():
167
+ typer.echo(f"Error: File not found: {projects_json}", err=True)
168
+ raise typer.Exit(1)
169
+
170
+ # Get markdown processor
171
+ markdown_processor = asyncio.run(get_markdown_processor())
172
+
173
+ # Process the file
174
+ base_path = config.home / base_folder if base_folder else config.home
175
+ console.print(f"\nImporting projects from {projects_json}...writing to {base_path}")
176
+ results = asyncio.run(
177
+ process_projects_json(projects_json, base_path, markdown_processor)
178
+ )
179
+
180
+ # Show results
181
+ console.print(
182
+ Panel(
183
+ f"[green]Import complete![/green]\n\n"
184
+ f"Imported {results['documents']} project documents\n"
185
+ f"Imported {results['prompts']} prompt templates",
186
+ expand=False,
187
+ )
188
+ )
189
+
190
+ console.print("\nRun 'basic-memory sync' to index the new files.")
191
+
192
+ except Exception as e:
193
+ logger.error("Import failed")
194
+ typer.echo(f"Error during import: {e}", err=True)
195
+ raise typer.Exit(1)
@@ -3,7 +3,7 @@
3
3
  import asyncio
4
4
  import json
5
5
  from pathlib import Path
6
- from typing import Dict, Any, List
6
+ from typing import Dict, Any, List, Annotated
7
7
 
8
8
  import typer
9
9
  from loguru import logger
@@ -11,12 +11,11 @@ from rich.console import Console
11
11
  from rich.panel import Panel
12
12
  from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn
13
13
 
14
- from basic_memory.cli.app import app
14
+ from basic_memory.cli.app import import_app
15
15
  from basic_memory.config import config
16
16
  from basic_memory.markdown import EntityParser, MarkdownProcessor
17
17
  from basic_memory.markdown.schemas import EntityMarkdown, EntityFrontmatter, Observation, Relation
18
18
 
19
-
20
19
  console = Console()
21
20
 
22
21
 
@@ -98,9 +97,11 @@ async def get_markdown_processor() -> MarkdownProcessor:
98
97
  return MarkdownProcessor(entity_parser)
99
98
 
100
99
 
101
- @app.command()
102
- def import_json(
103
- json_path: Path = typer.Argument(..., help="Path to memory.json file to import"),
100
+ @import_app.command()
101
+ def memory_json(
102
+ json_path: Annotated[Path, typer.Argument(..., help="Path to memory.json file")] = Path(
103
+ "memory.json"
104
+ ),
104
105
  ):
105
106
  """Import entities and relations from a memory.json file.
106
107
 
@@ -138,6 +139,6 @@ def import_json(
138
139
  console.print("\nRun 'basic-memory sync' to index the new files.")
139
140
 
140
141
  except Exception as e:
141
- logger.exception("Import failed")
142
+ logger.error("Import failed")
142
143
  typer.echo(f"Error during import: {e}", err=True)
143
144
  raise typer.Exit(1)
@@ -12,9 +12,9 @@ import basic_memory.mcp.tools # noqa: F401 # pragma: no cover
12
12
 
13
13
 
14
14
  @app.command()
15
- def mcp():
15
+ def mcp(): # pragma: no cover
16
16
  """Run the MCP server for Claude Desktop integration."""
17
17
  home_dir = config.home
18
- logger.info("Starting Basic Memory MCP server")
18
+ logger.info(f"Starting Basic Memory MCP server {basic_memory.__version__}")
19
19
  logger.info(f"Home directory: {home_dir}")
20
20
  mcp_server.run()
@@ -25,13 +25,11 @@ async def get_file_change_scanner(
25
25
  db_type=DatabaseType.FILESYSTEM,
26
26
  ) -> FileChangeScanner: # pragma: no cover
27
27
  """Get sync service instance."""
28
- async with db.engine_session_factory(db_path=config.database_path, db_type=db_type) as (
29
- engine,
30
- session_maker,
31
- ):
32
- entity_repository = EntityRepository(session_maker)
33
- file_change_scanner = FileChangeScanner(entity_repository)
34
- return file_change_scanner
28
+ _, session_maker = await db.get_or_create_db(db_path=config.database_path, db_type=db_type)
29
+
30
+ entity_repository = EntityRepository(session_maker)
31
+ file_change_scanner = FileChangeScanner(entity_repository)
32
+ return file_change_scanner
35
33
 
36
34
 
37
35
  def add_files_to_tree(
@@ -14,7 +14,6 @@ from rich.tree import Tree
14
14
  from basic_memory import db
15
15
  from basic_memory.cli.app import app
16
16
  from basic_memory.config import config
17
- from basic_memory.db import DatabaseType
18
17
  from basic_memory.markdown import EntityParser
19
18
  from basic_memory.markdown.markdown_processor import MarkdownProcessor
20
19
  from basic_memory.repository import (
@@ -39,50 +38,50 @@ class ValidationIssue:
39
38
  error: str
40
39
 
41
40
 
42
- async def get_sync_service(db_type=DatabaseType.FILESYSTEM): # pragma: no cover
41
+ async def get_sync_service(): # pragma: no cover
43
42
  """Get sync service instance with all dependencies."""
44
- async with db.engine_session_factory(db_path=config.database_path, db_type=db_type) as (
45
- engine,
46
- session_maker,
47
- ):
48
- entity_parser = EntityParser(config.home)
49
- markdown_processor = MarkdownProcessor(entity_parser)
50
- file_service = FileService(config.home, markdown_processor)
51
-
52
- # Initialize repositories
53
- entity_repository = EntityRepository(session_maker)
54
- observation_repository = ObservationRepository(session_maker)
55
- relation_repository = RelationRepository(session_maker)
56
- search_repository = SearchRepository(session_maker)
57
-
58
- # Initialize services
59
- search_service = SearchService(search_repository, entity_repository, file_service)
60
- link_resolver = LinkResolver(entity_repository, search_service)
61
-
62
- # Initialize scanner
63
- file_change_scanner = FileChangeScanner(entity_repository)
64
-
65
- # Initialize services
66
- entity_service = EntityService(
67
- entity_parser,
68
- entity_repository,
69
- observation_repository,
70
- relation_repository,
71
- file_service,
72
- link_resolver,
73
- )
74
-
75
- # Create sync service
76
- sync_service = SyncService(
77
- scanner=file_change_scanner,
78
- entity_service=entity_service,
79
- entity_parser=entity_parser,
80
- entity_repository=entity_repository,
81
- relation_repository=relation_repository,
82
- search_service=search_service,
83
- )
84
-
85
- return sync_service
43
+ _, session_maker = await db.get_or_create_db(
44
+ db_path=config.database_path, db_type=db.DatabaseType.FILESYSTEM
45
+ )
46
+
47
+ entity_parser = EntityParser(config.home)
48
+ markdown_processor = MarkdownProcessor(entity_parser)
49
+ file_service = FileService(config.home, markdown_processor)
50
+
51
+ # Initialize repositories
52
+ entity_repository = EntityRepository(session_maker)
53
+ observation_repository = ObservationRepository(session_maker)
54
+ relation_repository = RelationRepository(session_maker)
55
+ search_repository = SearchRepository(session_maker)
56
+
57
+ # Initialize services
58
+ search_service = SearchService(search_repository, entity_repository, file_service)
59
+ link_resolver = LinkResolver(entity_repository, search_service)
60
+
61
+ # Initialize scanner
62
+ file_change_scanner = FileChangeScanner(entity_repository)
63
+
64
+ # Initialize services
65
+ entity_service = EntityService(
66
+ entity_parser,
67
+ entity_repository,
68
+ observation_repository,
69
+ relation_repository,
70
+ file_service,
71
+ link_resolver,
72
+ )
73
+
74
+ # Create sync service
75
+ sync_service = SyncService(
76
+ scanner=file_change_scanner,
77
+ entity_service=entity_service,
78
+ entity_parser=entity_parser,
79
+ entity_repository=entity_repository,
80
+ relation_repository=relation_repository,
81
+ search_service=search_service,
82
+ )
83
+
84
+ return sync_service
86
85
 
87
86
 
88
87
  def group_issues_by_directory(issues: List[ValidationIssue]) -> Dict[str, List[ValidationIssue]]:
@@ -154,6 +153,7 @@ def display_detailed_sync_results(knowledge: SyncReport):
154
153
 
155
154
  async def run_sync(verbose: bool = False, watch: bool = False):
156
155
  """Run sync operation."""
156
+
157
157
  sync_service = await get_sync_service()
158
158
 
159
159
  # Start watching if requested
basic_memory/cli/main.py CHANGED
@@ -4,7 +4,16 @@ from basic_memory.cli.app import app # pragma: no cover
4
4
  from basic_memory.utils import setup_logging # pragma: no cover
5
5
 
6
6
  # Register commands
7
- from basic_memory.cli.commands import status, sync, db, import_memory_json, mcp # noqa: F401 # pragma: no cover
7
+ from basic_memory.cli.commands import ( # noqa: F401 # pragma: no cover
8
+ status,
9
+ sync,
10
+ db,
11
+ import_memory_json,
12
+ mcp,
13
+ import_claude_conversations,
14
+ import_claude_projects,
15
+ import_chatgpt,
16
+ )
8
17
 
9
18
 
10
19
  # Set up logging when module is imported
basic_memory/db.py CHANGED
@@ -4,6 +4,10 @@ from enum import Enum, auto
4
4
  from pathlib import Path
5
5
  from typing import AsyncGenerator, Optional
6
6
 
7
+ from basic_memory.config import ProjectConfig
8
+ from alembic import command
9
+ from alembic.config import Config
10
+
7
11
  from loguru import logger
8
12
  from sqlalchemy import text
9
13
  from sqlalchemy.ext.asyncio import (
@@ -14,8 +18,7 @@ from sqlalchemy.ext.asyncio import (
14
18
  async_scoped_session,
15
19
  )
16
20
 
17
- from basic_memory.models import Base
18
- from basic_memory.models.search import CREATE_SEARCH_INDEX
21
+ from basic_memory.repository.search_repository import SearchRepository
19
22
 
20
23
  # Module level state
21
24
  _engine: Optional[AsyncEngine] = None
@@ -35,7 +38,7 @@ class DatabaseType(Enum):
35
38
  logger.info("Using in-memory SQLite database")
36
39
  return "sqlite+aiosqlite://"
37
40
 
38
- return f"sqlite+aiosqlite:///{db_path}"
41
+ return f"sqlite+aiosqlite:///{db_path}" # pragma: no cover
39
42
 
40
43
 
41
44
  def get_scoped_session_factory(
@@ -69,24 +72,6 @@ async def scoped_session(
69
72
  await factory.remove()
70
73
 
71
74
 
72
- async def init_db() -> None:
73
- """Initialize database with required tables."""
74
- if _session_maker is None: # pragma: no cover
75
- raise RuntimeError("Database session maker not initialized")
76
-
77
- logger.info("Initializing database...")
78
-
79
- async with scoped_session(_session_maker) as session:
80
- await session.execute(text("PRAGMA foreign_keys=ON"))
81
- conn = await session.connection()
82
- await conn.run_sync(Base.metadata.create_all)
83
-
84
- # recreate search index
85
- await session.execute(CREATE_SEARCH_INDEX)
86
-
87
- await session.commit()
88
-
89
-
90
75
  async def get_or_create_db(
91
76
  db_path: Path,
92
77
  db_type: DatabaseType = DatabaseType.FILESYSTEM,
@@ -100,9 +85,6 @@ async def get_or_create_db(
100
85
  _engine = create_async_engine(db_url, connect_args={"check_same_thread": False})
101
86
  _session_maker = async_sessionmaker(_engine, expire_on_commit=False)
102
87
 
103
- # Initialize database
104
- await init_db()
105
-
106
88
  assert _engine is not None # for type checker
107
89
  assert _session_maker is not None # for type checker
108
90
  return _engine, _session_maker
@@ -122,7 +104,6 @@ async def shutdown_db() -> None: # pragma: no cover
122
104
  async def engine_session_factory(
123
105
  db_path: Path,
124
106
  db_type: DatabaseType = DatabaseType.MEMORY,
125
- init: bool = True,
126
107
  ) -> AsyncGenerator[tuple[AsyncEngine, async_sessionmaker[AsyncSession]], None]:
127
108
  """Create engine and session factory.
128
109
 
@@ -139,9 +120,6 @@ async def engine_session_factory(
139
120
  try:
140
121
  _session_maker = async_sessionmaker(_engine, expire_on_commit=False)
141
122
 
142
- if init:
143
- await init_db()
144
-
145
123
  assert _engine is not None # for type checker
146
124
  assert _session_maker is not None # for type checker
147
125
  yield _engine, _session_maker
@@ -150,3 +128,18 @@ async def engine_session_factory(
150
128
  await _engine.dispose()
151
129
  _engine = None
152
130
  _session_maker = None
131
+
132
+
133
+ async def run_migrations(app_config: ProjectConfig, database_type=DatabaseType.FILESYSTEM):
134
+ """Run any pending alembic migrations."""
135
+ logger.info("Running database migrations...")
136
+ try:
137
+ config = Config("alembic.ini")
138
+ command.upgrade(config, "head")
139
+ logger.info("Migrations completed successfully")
140
+
141
+ _, session_maker = await get_or_create_db(app_config.database_path, database_type)
142
+ await SearchRepository(session_maker).init_search_index()
143
+ except Exception as e: # pragma: no cover
144
+ logger.error(f"Error running migrations: {e}")
145
+ raise
@@ -2,7 +2,7 @@ from httpx import ASGITransport, AsyncClient
2
2
 
3
3
  from basic_memory.api.app import app as fastapi_app
4
4
 
5
- BASE_URL = "http://test"
5
+ BASE_URL = "memory://"
6
6
 
7
7
  # Create shared async client
8
8
  client = AsyncClient(transport=ASGITransport(app=fastapi_app), base_url=BASE_URL)
@@ -13,6 +13,7 @@ from basic_memory.mcp.async_client import client
13
13
  from basic_memory.schemas import EntityResponse, DeleteEntitiesResponse
14
14
  from basic_memory.schemas.base import Entity
15
15
  from basic_memory.mcp.tools.utils import call_get, call_put, call_delete
16
+ from basic_memory.schemas.memory import memory_url_path
16
17
 
17
18
 
18
19
  @mcp.tool(
@@ -96,7 +97,9 @@ async def read_note(identifier: str) -> str:
96
97
  Raises:
97
98
  ValueError: If the note cannot be found
98
99
  """
99
- response = await call_get(client, f"/resource/{identifier}")
100
+ logger.info(f"Reading note {identifier}")
101
+ url = memory_url_path(identifier)
102
+ response = await call_get(client, f"/resource/{url}")
100
103
  return response.text
101
104
 
102
105
 
@@ -1,10 +1,7 @@
1
1
  """Types and utilities for file sync."""
2
2
 
3
3
  from dataclasses import dataclass, field
4
- from typing import Set, Dict, Optional
5
-
6
- from watchfiles import Change
7
-
4
+ from typing import Set, Dict
8
5
 
9
6
 
10
7
  @dataclass
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: basic-memory
3
- Version: 0.2.21
3
+ Version: 0.4.0
4
4
  Summary: Local-first knowledge management combining Zettelkasten with knowledge graphs
5
5
  Project-URL: Homepage, https://github.com/basicmachines-co/basic-memory
6
6
  Project-URL: Repository, https://github.com/basicmachines-co/basic-memory
@@ -286,6 +286,92 @@ Basic Memory is built on some key ideas:
286
286
  - Simple text patterns can capture rich meaning
287
287
  - Local-first doesn't mean feature-poor
288
288
 
289
+ ## Importing data
290
+
291
+ Basic memory has cli commands to import data from several formats into Markdown files
292
+
293
+ ### Claude.ai
294
+
295
+ First, request an export of your data from your Claude account. The data will be emailed to you in several files,
296
+ including
297
+ `conversations.json` and `projects.json`.
298
+
299
+ Import Claude.ai conversation data
300
+
301
+ ```bash
302
+ basic-memory import claude conversations conversations.json
303
+ ```
304
+
305
+ The conversations will be turned into Markdown files and placed in the "conversations" folder by default (this can be
306
+ changed with the --folder arg).
307
+
308
+ Example:
309
+
310
+ ```bash
311
+ Importing chats from conversations.json...writing to .../basic-memory
312
+ Reading chat data... ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100%
313
+ ╭────────────────────────────╮
314
+ │ Import complete! │
315
+ │ │
316
+ │ Imported 307 conversations │
317
+ │ Containing 7769 messages │
318
+ ╰────────────────────────────╯
319
+ ```
320
+
321
+ Next, you can run the `sync` command to import the data into basic-memory
322
+
323
+ ```bash
324
+ basic-memory sync
325
+ ```
326
+
327
+ You can also import project data from Claude.ai
328
+
329
+ ```bash
330
+ ➜ basic-memory import claude projects
331
+ Importing projects from projects.json...writing to .../basic-memory/projects
332
+ Reading project data... ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100%
333
+ ╭────────────────────────────────╮
334
+ │ Import complete! │
335
+ │ │
336
+ │ Imported 101 project documents │
337
+ │ Imported 32 prompt templates │
338
+ ╰────────────────────────────────╯
339
+
340
+ Run 'basic-memory sync' to index the new files.
341
+ ```
342
+
343
+ ### Chat Gpt
344
+
345
+ ```bash
346
+ ➜ basic-memory import chatgpt
347
+ Importing chats from conversations.json...writing to .../basic-memory/conversations
348
+
349
+ Reading chat data... ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100%
350
+ ╭────────────────────────────╮
351
+ │ Import complete! │
352
+ │ │
353
+ │ Imported 198 conversations │
354
+ │ Containing 11777 messages │
355
+ ╰────────────────────────────╯
356
+
357
+
358
+ ```
359
+
360
+ ### Memory json
361
+
362
+ ```bash
363
+ ➜ basic-memory import memory-json
364
+ Importing from memory.json...writing to .../basic-memory
365
+ Reading memory.json... ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100%
366
+ Creating entities... ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100%
367
+ ╭──────────────────────╮
368
+ │ Import complete! │
369
+ │ │
370
+ │ Created 126 entities │
371
+ │ Added 252 relations │
372
+ ╰──────────────────────╯
373
+ ```
374
+
289
375
  ## License
290
376
 
291
377
  AGPL-3.0
@@ -1,6 +1,6 @@
1
- basic_memory/__init__.py,sha256=_ij75bUYM3LqRQYHrJ1kLnDuUyauuHilEBF96OFw9hA,122
1
+ basic_memory/__init__.py,sha256=-gENVBDpKGZOdTlwqKdZ7bGfjTyvAEkavJkI0MLiQIc,122
2
2
  basic_memory/config.py,sha256=PZA2qgwKACvKfRcM3H-BPB_8FYVhgZAwTmlKJ3ROfhU,1643
3
- basic_memory/db.py,sha256=BFZCp4aJ7Xj9_ZCMz0rnSBuCy5xIMvvWjSImmuKzdWg,4605
3
+ basic_memory/db.py,sha256=cN8g_5dTMjQZKCvy_NwWs3dXV5lZZL7DCeu99guIK5k,4613
4
4
  basic_memory/deps.py,sha256=UzivBw6e6iYcU_8SQ8LNCmSsmFyHfjdzfWvnfNzqbRc,5375
5
5
  basic_memory/file_utils.py,sha256=gp7RCFWaddFnELIyTc1E19Rk8jJsrKshG2n8ZZR-kKA,5751
6
6
  basic_memory/utils.py,sha256=HiLorP5_YCQeNeTcDqvnkrwY7OBaFRS3i_hdV9iWKLs,2374
@@ -10,21 +10,24 @@ basic_memory/alembic/migrations.py,sha256=CIbkMHEKZ60aDUhFGSQjv8kDNM7sazfvEYHGGc
10
10
  basic_memory/alembic/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
11
11
  basic_memory/alembic/versions/3dae7c7b1564_initial_schema.py,sha256=lTbWlAnd1es7xU99DoJgfaRe1_Kte8TL98riqeKGV80,4363
12
12
  basic_memory/api/__init__.py,sha256=wCpj-21j1D0KzKl9Ql6unLBVFY0K1uGp_FeSZRKtqpk,72
13
- basic_memory/api/app.py,sha256=AEHcslN4SBq5Ni7q7wkG4jDH0-SwMWV2DeTdaUSQKns,2083
13
+ basic_memory/api/app.py,sha256=3ddcWTxVjMy_5SUq89kROhMwosZqcr67Q5evOlSR9GE,1389
14
14
  basic_memory/api/routers/__init__.py,sha256=iviQ1QVYobC8huUuyRhEjcA0BDjrOUm1lXHXhJkxP9A,239
15
15
  basic_memory/api/routers/knowledge_router.py,sha256=cMLhRczOfSRnsZdyR0bSS8PENPRTu70dlwaV27O34bs,5705
16
16
  basic_memory/api/routers/memory_router.py,sha256=pF0GzmWoxmjhtxZM8jCmfLwqjey_fmXER5vYbD8fsQw,4556
17
17
  basic_memory/api/routers/resource_router.py,sha256=_Gp5HSJr-L-GUkQKbEP2bAZvCY8Smd-sBNWpGyqXS4c,1056
18
18
  basic_memory/api/routers/search_router.py,sha256=dCRnBbp3r966U8UYwgAaxZBbg7yX7pC8QJqagdACUi0,1086
19
19
  basic_memory/cli/__init__.py,sha256=arcKLAWRDhPD7x5t80MlviZeYzwHZ0GZigyy3NKVoGk,33
20
- basic_memory/cli/app.py,sha256=hF4MgYCgFql4J6qi3lguqc6HQdP2gm6PpvtSxKBSjZc,34
21
- basic_memory/cli/main.py,sha256=Vvpmh33MSZJftCENEjzJH3yBbxD4B40Pl6IBIumiVX4,505
20
+ basic_memory/cli/app.py,sha256=NG6gs_UzyXBiQLHbiZRZlew3nb7G7i_8gwPh1383EnA,450
21
+ basic_memory/cli/main.py,sha256=_x9Tvjv5Xl26Bhn6dO2A2-5yu5ckiLiPZr0yFeDYB2w,611
22
22
  basic_memory/cli/commands/__init__.py,sha256=OQGLaKTsOdPsp2INM_pHzmOlbVfdL0sytBNgvqTqCDY,159
23
- basic_memory/cli/commands/db.py,sha256=I92CRufPskvHl9c90f5Eg7U7D0uIzLBiwngQuAh5cLk,772
24
- basic_memory/cli/commands/import_memory_json.py,sha256=ZXSRHH_3GgJzmMLvDulakKIpzsKxrZIUmEuWgJmwMOE,5138
25
- basic_memory/cli/commands/mcp.py,sha256=a0v54iFL01_eykODHuWIupTHCn-COm-WZGdSO5iinc0,563
26
- basic_memory/cli/commands/status.py,sha256=aNpP8u-ECoVTiL5MIb-D2cXXLJtv6z2z8CMCh5nt2KY,5782
27
- basic_memory/cli/commands/sync.py,sha256=sb6OGl9IVZLmGfHUm0-aexD365BRTaHJhpwqt0O5yxk,7035
23
+ basic_memory/cli/commands/db.py,sha256=XW2ujzas5j2Gf01NOPQI89L4NK-21GksO_OIekKxv6c,770
24
+ basic_memory/cli/commands/import_chatgpt.py,sha256=zzp0I3vu5zekYlvBf7YzPTfNq9SumULwwL-Ky5rEjA4,8133
25
+ basic_memory/cli/commands/import_claude_conversations.py,sha256=Ba97fH5yfW642yrkxay3YkyDdgIYCeru-MUIZfEGblo,6812
26
+ basic_memory/cli/commands/import_claude_projects.py,sha256=euht03ydbI6c5IO_VeArlk9YUYMXNZGXekaa7uG8i7g,6635
27
+ basic_memory/cli/commands/import_memory_json.py,sha256=zqpU4eCzQXx04aRsigddJAyhvklmTgSAzeRTuEdNw0c,5194
28
+ basic_memory/cli/commands/mcp.py,sha256=BPdThcufdriIvrDskc87a0oCC1BkZ0PZsgNao_-oNKk,611
29
+ basic_memory/cli/commands/status.py,sha256=G7aAdbCuiFe38VFxMTcAfY2DKqol3WIQxYa491ui4yM,5728
30
+ basic_memory/cli/commands/sync.py,sha256=LpoEPstcguhRPT2bwsbAI3ypiY0cDoNn_gxUmLpX21Q,6827
28
31
  basic_memory/markdown/__init__.py,sha256=DdzioCWtDnKaq05BHYLgL_78FawEHLpLXnp-kPSVfIc,501
29
32
  basic_memory/markdown/entity_parser.py,sha256=sJk8TRUd9cAaIjATiJn7dBQRorrYngRbd7MRVfc0Oc4,3781
30
33
  basic_memory/markdown/markdown_processor.py,sha256=mV3pYoDTaQMEl1tA5n_XztBvNlYyH2SzKs4vnKdAet4,4952
@@ -32,12 +35,12 @@ basic_memory/markdown/plugins.py,sha256=gtIzKRjoZsyvBqLpVNnrmzl_cbTZ5ZGn8kcuXxQj
32
35
  basic_memory/markdown/schemas.py,sha256=mzVEDUhH98kwETMknjkKw5H697vg_zUapsJkJVi17ho,1894
33
36
  basic_memory/markdown/utils.py,sha256=ZtHa-dG--ZwFEUC3jfl04KZGhM_ZWo5b-8d8KpJ90gY,2758
34
37
  basic_memory/mcp/__init__.py,sha256=dsDOhKqjYeIbCULbHIxfcItTbqudEuEg1Np86eq0GEQ,35
35
- basic_memory/mcp/async_client.py,sha256=Eo345wANiBRSM4u3j_Vd6Ax4YtMg7qbWd9PIoFfj61I,236
38
+ basic_memory/mcp/async_client.py,sha256=vMN5nApPA428Oz4Siq2mNTiBjTcM5A5OSZTnX7_sDxE,234
36
39
  basic_memory/mcp/server.py,sha256=L92Vit7llaKT9NlPZfxdp67C33niObmRH2QFyUhmnD0,355
37
40
  basic_memory/mcp/tools/__init__.py,sha256=MHZmWw016N0qbtC3f186Jg1tPzh2g88_ZsCKJ0oyrrs,873
38
41
  basic_memory/mcp/tools/knowledge.py,sha256=2U8YUKCizsAETHCC1mBVKMfCEef6tlc_pa2wOmA9mD4,2016
39
42
  basic_memory/mcp/tools/memory.py,sha256=gl4MBm9l2lMOfu_xmUqjoZacWSIHOAYZiAm8z7oDuY8,5203
40
- basic_memory/mcp/tools/notes.py,sha256=4GKnhDK53UkeZtpZENQ9id9XdemKxLzGwMQJeuX-Kok,3772
43
+ basic_memory/mcp/tools/notes.py,sha256=pe7n0f0_nrkjnq6E4PCr7L8oOvzMnQgthfJNy9Vr3DE,3905
41
44
  basic_memory/mcp/tools/search.py,sha256=tx6aIuB2FWmmrvzu3RHSQvszlk-zHcwrWhkLLHWjuZc,1105
42
45
  basic_memory/mcp/tools/utils.py,sha256=icm-Xyqw3GxooGYkXqjEjoZvIGy_Z3CPw-uUYBxR_YQ,4831
43
46
  basic_memory/models/__init__.py,sha256=Bf0xXV_ryndogvZDiVM_Wb6iV2fHUxYNGMZNWNcZi0s,307
@@ -69,10 +72,10 @@ basic_memory/services/service.py,sha256=V-d_8gOV07zGIQDpL-Ksqs3ZN9l3qf3HZOK1f_YN
69
72
  basic_memory/sync/__init__.py,sha256=ko0xLQv1S5U7sAOmIP2XKl03akVPzoY-a9m3TFPcMh4,193
70
73
  basic_memory/sync/file_change_scanner.py,sha256=4whJej6t9sxwUp1ox93efJ0bBHSnAr6STpk_PsKU6to,5784
71
74
  basic_memory/sync/sync_service.py,sha256=nAOX4N90lbpRJeq5tRR_7PYptIoWwhXMUljE7yrneF4,7087
72
- basic_memory/sync/utils.py,sha256=uc7VLK34HufKyKavGwTPGU-ARfoQr_jYbjs4fsmUvuo,1233
75
+ basic_memory/sync/utils.py,sha256=wz1Fe7Mb_M5N9vYRQnDKGODiMGcj5MEK16KVJ3eoQ9g,1191
73
76
  basic_memory/sync/watch_service.py,sha256=CtKBrP1imI3ZSEgJl7Ffi-JZ_oDGKrhiyGgs41h5QYI,7563
74
- basic_memory-0.2.21.dist-info/METADATA,sha256=QaWTyvDXjBu1VPbqUJcbncos7QQEsy6MGTFVTyyIkQQ,7540
75
- basic_memory-0.2.21.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
76
- basic_memory-0.2.21.dist-info/entry_points.txt,sha256=IDQa_VmVTzmvMrpnjhEfM0S3F--XsVGEj3MpdJfuo-Q,59
77
- basic_memory-0.2.21.dist-info/licenses/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
78
- basic_memory-0.2.21.dist-info/RECORD,,
77
+ basic_memory-0.4.0.dist-info/METADATA,sha256=l3T2eBVksZjQjczYOgl6M4WQrt5t4NKOJjzvTuoBrEM,10809
78
+ basic_memory-0.4.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
79
+ basic_memory-0.4.0.dist-info/entry_points.txt,sha256=IDQa_VmVTzmvMrpnjhEfM0S3F--XsVGEj3MpdJfuo-Q,59
80
+ basic_memory-0.4.0.dist-info/licenses/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
81
+ basic_memory-0.4.0.dist-info/RECORD,,