basic-memory 0.2.12__py3-none-any.whl → 0.16.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of basic-memory might be problematic. Click here for more details.
- basic_memory/__init__.py +5 -1
- basic_memory/alembic/alembic.ini +119 -0
- basic_memory/alembic/env.py +27 -3
- basic_memory/alembic/migrations.py +4 -9
- basic_memory/alembic/versions/502b60eaa905_remove_required_from_entity_permalink.py +51 -0
- basic_memory/alembic/versions/5fe1ab1ccebe_add_projects_table.py +108 -0
- basic_memory/alembic/versions/647e7a75e2cd_project_constraint_fix.py +104 -0
- basic_memory/alembic/versions/9d9c1cb7d8f5_add_mtime_and_size_columns_to_entity_.py +49 -0
- basic_memory/alembic/versions/a1b2c3d4e5f6_fix_project_foreign_keys.py +49 -0
- basic_memory/alembic/versions/b3c3938bacdb_relation_to_name_unique_index.py +44 -0
- basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py +100 -0
- basic_memory/alembic/versions/e7e1f4367280_add_scan_watermark_tracking_to_project.py +37 -0
- basic_memory/api/app.py +63 -31
- basic_memory/api/routers/__init__.py +4 -1
- basic_memory/api/routers/directory_router.py +84 -0
- basic_memory/api/routers/importer_router.py +152 -0
- basic_memory/api/routers/knowledge_router.py +165 -28
- basic_memory/api/routers/management_router.py +80 -0
- basic_memory/api/routers/memory_router.py +28 -67
- basic_memory/api/routers/project_router.py +406 -0
- basic_memory/api/routers/prompt_router.py +260 -0
- basic_memory/api/routers/resource_router.py +219 -14
- basic_memory/api/routers/search_router.py +21 -13
- basic_memory/api/routers/utils.py +130 -0
- basic_memory/api/template_loader.py +292 -0
- basic_memory/cli/app.py +52 -1
- basic_memory/cli/auth.py +277 -0
- basic_memory/cli/commands/__init__.py +13 -2
- basic_memory/cli/commands/cloud/__init__.py +6 -0
- basic_memory/cli/commands/cloud/api_client.py +112 -0
- basic_memory/cli/commands/cloud/bisync_commands.py +110 -0
- basic_memory/cli/commands/cloud/cloud_utils.py +101 -0
- basic_memory/cli/commands/cloud/core_commands.py +195 -0
- basic_memory/cli/commands/cloud/rclone_commands.py +301 -0
- basic_memory/cli/commands/cloud/rclone_config.py +110 -0
- basic_memory/cli/commands/cloud/rclone_installer.py +249 -0
- basic_memory/cli/commands/cloud/upload.py +233 -0
- basic_memory/cli/commands/cloud/upload_command.py +124 -0
- basic_memory/cli/commands/command_utils.py +51 -0
- basic_memory/cli/commands/db.py +26 -7
- basic_memory/cli/commands/import_chatgpt.py +83 -0
- basic_memory/cli/commands/import_claude_conversations.py +86 -0
- basic_memory/cli/commands/import_claude_projects.py +85 -0
- basic_memory/cli/commands/import_memory_json.py +35 -92
- basic_memory/cli/commands/mcp.py +84 -10
- basic_memory/cli/commands/project.py +876 -0
- basic_memory/cli/commands/status.py +47 -30
- basic_memory/cli/commands/tool.py +341 -0
- basic_memory/cli/main.py +13 -6
- basic_memory/config.py +481 -22
- basic_memory/db.py +192 -32
- basic_memory/deps.py +252 -22
- basic_memory/file_utils.py +113 -58
- basic_memory/ignore_utils.py +297 -0
- basic_memory/importers/__init__.py +27 -0
- basic_memory/importers/base.py +79 -0
- basic_memory/importers/chatgpt_importer.py +232 -0
- basic_memory/importers/claude_conversations_importer.py +177 -0
- basic_memory/importers/claude_projects_importer.py +148 -0
- basic_memory/importers/memory_json_importer.py +108 -0
- basic_memory/importers/utils.py +58 -0
- basic_memory/markdown/entity_parser.py +143 -23
- basic_memory/markdown/markdown_processor.py +3 -3
- basic_memory/markdown/plugins.py +39 -21
- basic_memory/markdown/schemas.py +1 -1
- basic_memory/markdown/utils.py +28 -13
- basic_memory/mcp/async_client.py +134 -4
- basic_memory/mcp/project_context.py +141 -0
- basic_memory/mcp/prompts/__init__.py +19 -0
- basic_memory/mcp/prompts/ai_assistant_guide.py +70 -0
- basic_memory/mcp/prompts/continue_conversation.py +62 -0
- basic_memory/mcp/prompts/recent_activity.py +188 -0
- basic_memory/mcp/prompts/search.py +57 -0
- basic_memory/mcp/prompts/utils.py +162 -0
- basic_memory/mcp/resources/ai_assistant_guide.md +283 -0
- basic_memory/mcp/resources/project_info.py +71 -0
- basic_memory/mcp/server.py +7 -13
- basic_memory/mcp/tools/__init__.py +33 -21
- basic_memory/mcp/tools/build_context.py +120 -0
- basic_memory/mcp/tools/canvas.py +130 -0
- basic_memory/mcp/tools/chatgpt_tools.py +187 -0
- basic_memory/mcp/tools/delete_note.py +225 -0
- basic_memory/mcp/tools/edit_note.py +320 -0
- basic_memory/mcp/tools/list_directory.py +167 -0
- basic_memory/mcp/tools/move_note.py +545 -0
- basic_memory/mcp/tools/project_management.py +200 -0
- basic_memory/mcp/tools/read_content.py +271 -0
- basic_memory/mcp/tools/read_note.py +255 -0
- basic_memory/mcp/tools/recent_activity.py +534 -0
- basic_memory/mcp/tools/search.py +369 -14
- basic_memory/mcp/tools/utils.py +374 -16
- basic_memory/mcp/tools/view_note.py +77 -0
- basic_memory/mcp/tools/write_note.py +207 -0
- basic_memory/models/__init__.py +3 -2
- basic_memory/models/knowledge.py +67 -15
- basic_memory/models/project.py +87 -0
- basic_memory/models/search.py +10 -6
- basic_memory/repository/__init__.py +2 -0
- basic_memory/repository/entity_repository.py +229 -7
- basic_memory/repository/observation_repository.py +35 -3
- basic_memory/repository/project_info_repository.py +10 -0
- basic_memory/repository/project_repository.py +103 -0
- basic_memory/repository/relation_repository.py +21 -2
- basic_memory/repository/repository.py +147 -29
- basic_memory/repository/search_repository.py +437 -59
- basic_memory/schemas/__init__.py +22 -9
- basic_memory/schemas/base.py +97 -8
- basic_memory/schemas/cloud.py +50 -0
- basic_memory/schemas/directory.py +30 -0
- basic_memory/schemas/importer.py +35 -0
- basic_memory/schemas/memory.py +188 -23
- basic_memory/schemas/project_info.py +211 -0
- basic_memory/schemas/prompt.py +90 -0
- basic_memory/schemas/request.py +57 -3
- basic_memory/schemas/response.py +9 -1
- basic_memory/schemas/search.py +33 -35
- basic_memory/schemas/sync_report.py +72 -0
- basic_memory/services/__init__.py +2 -1
- basic_memory/services/context_service.py +251 -106
- basic_memory/services/directory_service.py +295 -0
- basic_memory/services/entity_service.py +595 -60
- basic_memory/services/exceptions.py +21 -0
- basic_memory/services/file_service.py +284 -30
- basic_memory/services/initialization.py +191 -0
- basic_memory/services/link_resolver.py +50 -56
- basic_memory/services/project_service.py +863 -0
- basic_memory/services/search_service.py +172 -34
- basic_memory/sync/__init__.py +3 -2
- basic_memory/sync/background_sync.py +26 -0
- basic_memory/sync/sync_service.py +1176 -96
- basic_memory/sync/watch_service.py +412 -135
- basic_memory/templates/prompts/continue_conversation.hbs +110 -0
- basic_memory/templates/prompts/search.hbs +101 -0
- basic_memory/utils.py +388 -28
- basic_memory-0.16.1.dist-info/METADATA +493 -0
- basic_memory-0.16.1.dist-info/RECORD +148 -0
- {basic_memory-0.2.12.dist-info → basic_memory-0.16.1.dist-info}/entry_points.txt +1 -0
- basic_memory/alembic/README +0 -1
- basic_memory/cli/commands/sync.py +0 -203
- basic_memory/mcp/tools/knowledge.py +0 -56
- basic_memory/mcp/tools/memory.py +0 -151
- basic_memory/mcp/tools/notes.py +0 -122
- basic_memory/schemas/discovery.py +0 -28
- basic_memory/sync/file_change_scanner.py +0 -158
- basic_memory/sync/utils.py +0 -34
- basic_memory-0.2.12.dist-info/METADATA +0 -291
- basic_memory-0.2.12.dist-info/RECORD +0 -78
- {basic_memory-0.2.12.dist-info → basic_memory-0.16.1.dist-info}/WHEEL +0 -0
- {basic_memory-0.2.12.dist-info → basic_memory-0.16.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
"""WebDAV upload functionality for basic-memory projects."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
import aiofiles
|
|
7
|
+
import httpx
|
|
8
|
+
|
|
9
|
+
from basic_memory.ignore_utils import load_gitignore_patterns, should_ignore_path
|
|
10
|
+
from basic_memory.mcp.async_client import get_client
|
|
11
|
+
from basic_memory.mcp.tools.utils import call_put
|
|
12
|
+
|
|
13
|
+
# Archive file extensions that should be skipped during upload
|
|
14
|
+
ARCHIVE_EXTENSIONS = {".zip", ".tar", ".gz", ".bz2", ".xz", ".7z", ".rar", ".tgz", ".tbz2"}
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
async def upload_path(
|
|
18
|
+
local_path: Path,
|
|
19
|
+
project_name: str,
|
|
20
|
+
verbose: bool = False,
|
|
21
|
+
use_gitignore: bool = True,
|
|
22
|
+
dry_run: bool = False,
|
|
23
|
+
) -> bool:
|
|
24
|
+
"""
|
|
25
|
+
Upload a file or directory to cloud project via WebDAV.
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
local_path: Path to local file or directory
|
|
29
|
+
project_name: Name of cloud project (destination)
|
|
30
|
+
verbose: Show detailed information about filtering and upload
|
|
31
|
+
use_gitignore: If False, skip .gitignore patterns (still use .bmignore)
|
|
32
|
+
dry_run: If True, show what would be uploaded without uploading
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
True if upload succeeded, False otherwise
|
|
36
|
+
"""
|
|
37
|
+
try:
|
|
38
|
+
# Resolve path
|
|
39
|
+
local_path = local_path.resolve()
|
|
40
|
+
|
|
41
|
+
# Check if path exists
|
|
42
|
+
if not local_path.exists():
|
|
43
|
+
print(f"Error: Path does not exist: {local_path}")
|
|
44
|
+
return False
|
|
45
|
+
|
|
46
|
+
# Get files to upload
|
|
47
|
+
if local_path.is_file():
|
|
48
|
+
files_to_upload = [(local_path, local_path.name)]
|
|
49
|
+
if verbose:
|
|
50
|
+
print(f"Uploading single file: {local_path.name}")
|
|
51
|
+
else:
|
|
52
|
+
files_to_upload = _get_files_to_upload(local_path, verbose, use_gitignore)
|
|
53
|
+
|
|
54
|
+
if not files_to_upload:
|
|
55
|
+
print("No files found to upload")
|
|
56
|
+
if verbose:
|
|
57
|
+
print(
|
|
58
|
+
"\nTip: Use --verbose to see which files are being filtered, "
|
|
59
|
+
"or --no-gitignore to skip .gitignore patterns"
|
|
60
|
+
)
|
|
61
|
+
return True
|
|
62
|
+
|
|
63
|
+
print(f"Found {len(files_to_upload)} file(s) to upload")
|
|
64
|
+
|
|
65
|
+
# Calculate total size
|
|
66
|
+
total_bytes = sum(file_path.stat().st_size for file_path, _ in files_to_upload)
|
|
67
|
+
skipped_count = 0
|
|
68
|
+
|
|
69
|
+
# If dry run, just show what would be uploaded
|
|
70
|
+
if dry_run:
|
|
71
|
+
print("\nFiles that would be uploaded:")
|
|
72
|
+
for file_path, relative_path in files_to_upload:
|
|
73
|
+
# Skip archive files
|
|
74
|
+
if _is_archive_file(file_path):
|
|
75
|
+
print(f" [SKIP] {relative_path} (archive file)")
|
|
76
|
+
skipped_count += 1
|
|
77
|
+
continue
|
|
78
|
+
|
|
79
|
+
size = file_path.stat().st_size
|
|
80
|
+
if size < 1024:
|
|
81
|
+
size_str = f"{size} bytes"
|
|
82
|
+
elif size < 1024 * 1024:
|
|
83
|
+
size_str = f"{size / 1024:.1f} KB"
|
|
84
|
+
else:
|
|
85
|
+
size_str = f"{size / (1024 * 1024):.1f} MB"
|
|
86
|
+
print(f" {relative_path} ({size_str})")
|
|
87
|
+
else:
|
|
88
|
+
# Upload files using httpx
|
|
89
|
+
async with get_client() as client:
|
|
90
|
+
for i, (file_path, relative_path) in enumerate(files_to_upload, 1):
|
|
91
|
+
# Skip archive files (zip, tar, gz, etc.)
|
|
92
|
+
if _is_archive_file(file_path):
|
|
93
|
+
print(
|
|
94
|
+
f"Skipping archive file: {relative_path} ({i}/{len(files_to_upload)})"
|
|
95
|
+
)
|
|
96
|
+
skipped_count += 1
|
|
97
|
+
continue
|
|
98
|
+
|
|
99
|
+
# Build remote path: /webdav/{project_name}/{relative_path}
|
|
100
|
+
remote_path = f"/webdav/{project_name}/{relative_path}"
|
|
101
|
+
print(f"Uploading {relative_path} ({i}/{len(files_to_upload)})")
|
|
102
|
+
|
|
103
|
+
# Get file modification time
|
|
104
|
+
file_stat = file_path.stat()
|
|
105
|
+
mtime = int(file_stat.st_mtime)
|
|
106
|
+
|
|
107
|
+
# Read file content asynchronously
|
|
108
|
+
async with aiofiles.open(file_path, "rb") as f:
|
|
109
|
+
content = await f.read()
|
|
110
|
+
|
|
111
|
+
# Upload via HTTP PUT to WebDAV endpoint with mtime header
|
|
112
|
+
# Using X-OC-Mtime (ownCloud/Nextcloud standard)
|
|
113
|
+
response = await call_put(
|
|
114
|
+
client, remote_path, content=content, headers={"X-OC-Mtime": str(mtime)}
|
|
115
|
+
)
|
|
116
|
+
response.raise_for_status()
|
|
117
|
+
|
|
118
|
+
# Format total size based on magnitude
|
|
119
|
+
if total_bytes < 1024:
|
|
120
|
+
size_str = f"{total_bytes} bytes"
|
|
121
|
+
elif total_bytes < 1024 * 1024:
|
|
122
|
+
size_str = f"{total_bytes / 1024:.1f} KB"
|
|
123
|
+
else:
|
|
124
|
+
size_str = f"{total_bytes / (1024 * 1024):.1f} MB"
|
|
125
|
+
|
|
126
|
+
uploaded_count = len(files_to_upload) - skipped_count
|
|
127
|
+
if dry_run:
|
|
128
|
+
print(f"\nTotal: {uploaded_count} file(s) ({size_str})")
|
|
129
|
+
if skipped_count > 0:
|
|
130
|
+
print(f" Would skip {skipped_count} archive file(s)")
|
|
131
|
+
else:
|
|
132
|
+
print(f"✓ Upload complete: {uploaded_count} file(s) ({size_str})")
|
|
133
|
+
if skipped_count > 0:
|
|
134
|
+
print(f" Skipped {skipped_count} archive file(s)")
|
|
135
|
+
|
|
136
|
+
return True
|
|
137
|
+
|
|
138
|
+
except httpx.HTTPStatusError as e:
|
|
139
|
+
print(f"Upload failed: HTTP {e.response.status_code} - {e.response.text}")
|
|
140
|
+
return False
|
|
141
|
+
except Exception as e:
|
|
142
|
+
print(f"Upload failed: {e}")
|
|
143
|
+
return False
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def _is_archive_file(file_path: Path) -> bool:
|
|
147
|
+
"""
|
|
148
|
+
Check if a file is an archive file based on its extension.
|
|
149
|
+
|
|
150
|
+
Args:
|
|
151
|
+
file_path: Path to the file to check
|
|
152
|
+
|
|
153
|
+
Returns:
|
|
154
|
+
True if file is an archive, False otherwise
|
|
155
|
+
"""
|
|
156
|
+
return file_path.suffix.lower() in ARCHIVE_EXTENSIONS
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def _get_files_to_upload(
|
|
160
|
+
directory: Path, verbose: bool = False, use_gitignore: bool = True
|
|
161
|
+
) -> list[tuple[Path, str]]:
|
|
162
|
+
"""
|
|
163
|
+
Get list of files to upload from directory.
|
|
164
|
+
|
|
165
|
+
Uses .bmignore and optionally .gitignore patterns for filtering.
|
|
166
|
+
|
|
167
|
+
Args:
|
|
168
|
+
directory: Directory to scan
|
|
169
|
+
verbose: Show detailed filtering information
|
|
170
|
+
use_gitignore: If False, skip .gitignore patterns (still use .bmignore)
|
|
171
|
+
|
|
172
|
+
Returns:
|
|
173
|
+
List of (absolute_path, relative_path) tuples
|
|
174
|
+
"""
|
|
175
|
+
files = []
|
|
176
|
+
ignored_files = []
|
|
177
|
+
|
|
178
|
+
# Load ignore patterns from .bmignore and optionally .gitignore
|
|
179
|
+
ignore_patterns = load_gitignore_patterns(directory, use_gitignore=use_gitignore)
|
|
180
|
+
|
|
181
|
+
if verbose:
|
|
182
|
+
gitignore_path = directory / ".gitignore"
|
|
183
|
+
gitignore_exists = gitignore_path.exists() and use_gitignore
|
|
184
|
+
print(f"\nScanning directory: {directory}")
|
|
185
|
+
print("Using .bmignore: Yes")
|
|
186
|
+
print(f"Using .gitignore: {'Yes' if gitignore_exists else 'No'}")
|
|
187
|
+
print(f"Ignore patterns loaded: {len(ignore_patterns)}")
|
|
188
|
+
if ignore_patterns and len(ignore_patterns) <= 20:
|
|
189
|
+
print(f"Patterns: {', '.join(sorted(ignore_patterns))}")
|
|
190
|
+
print()
|
|
191
|
+
|
|
192
|
+
# Walk through directory
|
|
193
|
+
for root, dirs, filenames in os.walk(directory):
|
|
194
|
+
root_path = Path(root)
|
|
195
|
+
|
|
196
|
+
# Filter directories based on ignore patterns
|
|
197
|
+
filtered_dirs = []
|
|
198
|
+
for d in dirs:
|
|
199
|
+
dir_path = root_path / d
|
|
200
|
+
if should_ignore_path(dir_path, directory, ignore_patterns):
|
|
201
|
+
if verbose:
|
|
202
|
+
rel_path = dir_path.relative_to(directory)
|
|
203
|
+
print(f" [IGNORED DIR] {rel_path}/")
|
|
204
|
+
else:
|
|
205
|
+
filtered_dirs.append(d)
|
|
206
|
+
dirs[:] = filtered_dirs
|
|
207
|
+
|
|
208
|
+
# Process files
|
|
209
|
+
for filename in filenames:
|
|
210
|
+
file_path = root_path / filename
|
|
211
|
+
|
|
212
|
+
# Calculate relative path for display/remote
|
|
213
|
+
rel_path = file_path.relative_to(directory)
|
|
214
|
+
remote_path = str(rel_path).replace("\\", "/")
|
|
215
|
+
|
|
216
|
+
# Check if file should be ignored
|
|
217
|
+
if should_ignore_path(file_path, directory, ignore_patterns):
|
|
218
|
+
ignored_files.append(remote_path)
|
|
219
|
+
if verbose:
|
|
220
|
+
print(f" [IGNORED] {remote_path}")
|
|
221
|
+
continue
|
|
222
|
+
|
|
223
|
+
if verbose:
|
|
224
|
+
print(f" [INCLUDE] {remote_path}")
|
|
225
|
+
|
|
226
|
+
files.append((file_path, remote_path))
|
|
227
|
+
|
|
228
|
+
if verbose:
|
|
229
|
+
print("\nSummary:")
|
|
230
|
+
print(f" Files to upload: {len(files)}")
|
|
231
|
+
print(f" Files ignored: {len(ignored_files)}")
|
|
232
|
+
|
|
233
|
+
return files
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
"""Upload CLI commands for basic-memory projects."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
import typer
|
|
7
|
+
from rich.console import Console
|
|
8
|
+
|
|
9
|
+
from basic_memory.cli.app import cloud_app
|
|
10
|
+
from basic_memory.cli.commands.cloud.cloud_utils import (
|
|
11
|
+
create_cloud_project,
|
|
12
|
+
project_exists,
|
|
13
|
+
sync_project,
|
|
14
|
+
)
|
|
15
|
+
from basic_memory.cli.commands.cloud.upload import upload_path
|
|
16
|
+
|
|
17
|
+
console = Console()
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@cloud_app.command("upload")
|
|
21
|
+
def upload(
|
|
22
|
+
path: Path = typer.Argument(
|
|
23
|
+
...,
|
|
24
|
+
help="Path to local file or directory to upload",
|
|
25
|
+
exists=True,
|
|
26
|
+
readable=True,
|
|
27
|
+
resolve_path=True,
|
|
28
|
+
),
|
|
29
|
+
project: str = typer.Option(
|
|
30
|
+
...,
|
|
31
|
+
"--project",
|
|
32
|
+
"-p",
|
|
33
|
+
help="Cloud project name (destination)",
|
|
34
|
+
),
|
|
35
|
+
create_project: bool = typer.Option(
|
|
36
|
+
False,
|
|
37
|
+
"--create-project",
|
|
38
|
+
"-c",
|
|
39
|
+
help="Create project if it doesn't exist",
|
|
40
|
+
),
|
|
41
|
+
sync: bool = typer.Option(
|
|
42
|
+
True,
|
|
43
|
+
"--sync/--no-sync",
|
|
44
|
+
help="Sync project after upload (default: true)",
|
|
45
|
+
),
|
|
46
|
+
verbose: bool = typer.Option(
|
|
47
|
+
False,
|
|
48
|
+
"--verbose",
|
|
49
|
+
"-v",
|
|
50
|
+
help="Show detailed information about file filtering and upload",
|
|
51
|
+
),
|
|
52
|
+
no_gitignore: bool = typer.Option(
|
|
53
|
+
False,
|
|
54
|
+
"--no-gitignore",
|
|
55
|
+
help="Skip .gitignore patterns (still respects .bmignore)",
|
|
56
|
+
),
|
|
57
|
+
dry_run: bool = typer.Option(
|
|
58
|
+
False,
|
|
59
|
+
"--dry-run",
|
|
60
|
+
help="Show what would be uploaded without actually uploading",
|
|
61
|
+
),
|
|
62
|
+
) -> None:
|
|
63
|
+
"""Upload local files or directories to cloud project via WebDAV.
|
|
64
|
+
|
|
65
|
+
Examples:
|
|
66
|
+
bm cloud upload ~/my-notes --project research
|
|
67
|
+
bm cloud upload notes.md --project research --create-project
|
|
68
|
+
bm cloud upload ~/docs --project work --no-sync
|
|
69
|
+
bm cloud upload ./history --project proto --verbose
|
|
70
|
+
bm cloud upload ./notes --project work --no-gitignore
|
|
71
|
+
bm cloud upload ./files --project test --dry-run
|
|
72
|
+
"""
|
|
73
|
+
|
|
74
|
+
async def _upload():
|
|
75
|
+
# Check if project exists
|
|
76
|
+
if not await project_exists(project):
|
|
77
|
+
if create_project:
|
|
78
|
+
console.print(f"[blue]Creating cloud project '{project}'...[/blue]")
|
|
79
|
+
try:
|
|
80
|
+
await create_cloud_project(project)
|
|
81
|
+
console.print(f"[green]Created project '{project}'[/green]")
|
|
82
|
+
except Exception as e:
|
|
83
|
+
console.print(f"[red]Failed to create project: {e}[/red]")
|
|
84
|
+
raise typer.Exit(1)
|
|
85
|
+
else:
|
|
86
|
+
console.print(
|
|
87
|
+
f"[red]Project '{project}' does not exist.[/red]\n"
|
|
88
|
+
f"[yellow]Options:[/yellow]\n"
|
|
89
|
+
f" 1. Create it first: bm project add {project}\n"
|
|
90
|
+
f" 2. Use --create-project flag to create automatically"
|
|
91
|
+
)
|
|
92
|
+
raise typer.Exit(1)
|
|
93
|
+
|
|
94
|
+
# Perform upload (or dry run)
|
|
95
|
+
if dry_run:
|
|
96
|
+
console.print(
|
|
97
|
+
f"[yellow]DRY RUN: Showing what would be uploaded to '{project}'[/yellow]"
|
|
98
|
+
)
|
|
99
|
+
else:
|
|
100
|
+
console.print(f"[blue]Uploading {path} to project '{project}'...[/blue]")
|
|
101
|
+
|
|
102
|
+
success = await upload_path(
|
|
103
|
+
path, project, verbose=verbose, use_gitignore=not no_gitignore, dry_run=dry_run
|
|
104
|
+
)
|
|
105
|
+
if not success:
|
|
106
|
+
console.print("[red]Upload failed[/red]")
|
|
107
|
+
raise typer.Exit(1)
|
|
108
|
+
|
|
109
|
+
if dry_run:
|
|
110
|
+
console.print("[yellow]DRY RUN complete - no files were uploaded[/yellow]")
|
|
111
|
+
else:
|
|
112
|
+
console.print(f"[green]Successfully uploaded to '{project}'[/green]")
|
|
113
|
+
|
|
114
|
+
# Sync project if requested (skip on dry run)
|
|
115
|
+
# Force full scan after bisync to ensure database is up-to-date with synced files
|
|
116
|
+
if sync and not dry_run:
|
|
117
|
+
console.print(f"[blue]Syncing project '{project}'...[/blue]")
|
|
118
|
+
try:
|
|
119
|
+
await sync_project(project, force_full=True)
|
|
120
|
+
except Exception as e:
|
|
121
|
+
console.print(f"[yellow]Warning: Sync failed: {e}[/yellow]")
|
|
122
|
+
console.print("[dim]Files uploaded but may not be indexed yet[/dim]")
|
|
123
|
+
|
|
124
|
+
asyncio.run(_upload())
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"""utility functions for commands"""
|
|
2
|
+
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
from mcp.server.fastmcp.exceptions import ToolError
|
|
6
|
+
import typer
|
|
7
|
+
|
|
8
|
+
from rich.console import Console
|
|
9
|
+
|
|
10
|
+
from basic_memory.mcp.async_client import get_client
|
|
11
|
+
|
|
12
|
+
from basic_memory.mcp.tools.utils import call_post, call_get
|
|
13
|
+
from basic_memory.mcp.project_context import get_active_project
|
|
14
|
+
from basic_memory.schemas import ProjectInfoResponse
|
|
15
|
+
|
|
16
|
+
console = Console()
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
async def run_sync(project: Optional[str] = None, force_full: bool = False):
|
|
20
|
+
"""Run sync operation via API endpoint.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
project: Optional project name
|
|
24
|
+
force_full: If True, force a full scan bypassing watermark optimization
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
try:
|
|
28
|
+
async with get_client() as client:
|
|
29
|
+
project_item = await get_active_project(client, project, None)
|
|
30
|
+
url = f"{project_item.project_url}/project/sync"
|
|
31
|
+
if force_full:
|
|
32
|
+
url += "?force_full=true"
|
|
33
|
+
response = await call_post(client, url)
|
|
34
|
+
data = response.json()
|
|
35
|
+
console.print(f"[green]{data['message']}[/green]")
|
|
36
|
+
except (ToolError, ValueError) as e:
|
|
37
|
+
console.print(f"[red]Sync failed: {e}[/red]")
|
|
38
|
+
raise typer.Exit(1)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
async def get_project_info(project: str):
|
|
42
|
+
"""Get project information via API endpoint."""
|
|
43
|
+
|
|
44
|
+
try:
|
|
45
|
+
async with get_client() as client:
|
|
46
|
+
project_item = await get_active_project(client, project, None)
|
|
47
|
+
response = await call_get(client, f"{project_item.project_url}/project/info")
|
|
48
|
+
return ProjectInfoResponse.model_validate(response.json())
|
|
49
|
+
except (ToolError, ValueError) as e:
|
|
50
|
+
console.print(f"[red]Sync failed: {e}[/red]")
|
|
51
|
+
raise typer.Exit(1)
|
basic_memory/cli/commands/db.py
CHANGED
|
@@ -1,25 +1,44 @@
|
|
|
1
1
|
"""Database management commands."""
|
|
2
2
|
|
|
3
3
|
import asyncio
|
|
4
|
+
|
|
4
5
|
import typer
|
|
5
6
|
from loguru import logger
|
|
6
7
|
|
|
7
|
-
from basic_memory
|
|
8
|
+
from basic_memory import db
|
|
8
9
|
from basic_memory.cli.app import app
|
|
10
|
+
from basic_memory.config import ConfigManager, BasicMemoryConfig, save_basic_memory_config
|
|
9
11
|
|
|
10
12
|
|
|
11
13
|
@app.command()
|
|
12
14
|
def reset(
|
|
13
|
-
reindex: bool = typer.Option(False, "--reindex", help="Rebuild
|
|
15
|
+
reindex: bool = typer.Option(False, "--reindex", help="Rebuild db index from filesystem"),
|
|
14
16
|
): # pragma: no cover
|
|
15
17
|
"""Reset database (drop all tables and recreate)."""
|
|
16
|
-
if typer.confirm("This will delete all data. Are you sure?"):
|
|
18
|
+
if typer.confirm("This will delete all data in your db. Are you sure?"):
|
|
17
19
|
logger.info("Resetting database...")
|
|
18
|
-
|
|
20
|
+
config_manager = ConfigManager()
|
|
21
|
+
app_config = config_manager.config
|
|
22
|
+
# Get database path
|
|
23
|
+
db_path = app_config.app_database_path
|
|
24
|
+
|
|
25
|
+
# Delete the database file if it exists
|
|
26
|
+
if db_path.exists():
|
|
27
|
+
db_path.unlink()
|
|
28
|
+
logger.info(f"Database file deleted: {db_path}")
|
|
29
|
+
|
|
30
|
+
# Reset project configuration
|
|
31
|
+
config = BasicMemoryConfig()
|
|
32
|
+
save_basic_memory_config(config_manager.config_file, config)
|
|
33
|
+
logger.info("Project configuration reset to default")
|
|
34
|
+
|
|
35
|
+
# Create a new empty database
|
|
36
|
+
asyncio.run(db.run_migrations(app_config))
|
|
37
|
+
logger.info("Database reset complete")
|
|
19
38
|
|
|
20
39
|
if reindex:
|
|
21
|
-
#
|
|
22
|
-
from basic_memory.cli.commands.
|
|
40
|
+
# Run database sync directly
|
|
41
|
+
from basic_memory.cli.commands.command_utils import run_sync
|
|
23
42
|
|
|
24
43
|
logger.info("Rebuilding search index from filesystem...")
|
|
25
|
-
asyncio.run(
|
|
44
|
+
asyncio.run(run_sync(project=None))
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
"""Import command for ChatGPT conversations."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import json
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Annotated
|
|
7
|
+
|
|
8
|
+
import typer
|
|
9
|
+
from basic_memory.cli.app import import_app
|
|
10
|
+
from basic_memory.config import get_project_config
|
|
11
|
+
from basic_memory.importers import ChatGPTImporter
|
|
12
|
+
from basic_memory.markdown import EntityParser, MarkdownProcessor
|
|
13
|
+
from loguru import logger
|
|
14
|
+
from rich.console import Console
|
|
15
|
+
from rich.panel import Panel
|
|
16
|
+
|
|
17
|
+
console = Console()
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
async def get_markdown_processor() -> MarkdownProcessor:
|
|
21
|
+
"""Get MarkdownProcessor instance."""
|
|
22
|
+
config = get_project_config()
|
|
23
|
+
entity_parser = EntityParser(config.home)
|
|
24
|
+
return MarkdownProcessor(entity_parser)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@import_app.command(name="chatgpt", help="Import conversations from ChatGPT JSON export.")
|
|
28
|
+
def import_chatgpt(
|
|
29
|
+
conversations_json: Annotated[
|
|
30
|
+
Path, typer.Argument(help="Path to ChatGPT conversations.json file")
|
|
31
|
+
] = Path("conversations.json"),
|
|
32
|
+
folder: Annotated[
|
|
33
|
+
str, typer.Option(help="The folder to place the files in.")
|
|
34
|
+
] = "conversations",
|
|
35
|
+
):
|
|
36
|
+
"""Import chat conversations from ChatGPT JSON format.
|
|
37
|
+
|
|
38
|
+
This command will:
|
|
39
|
+
1. Read the complex tree structure of messages
|
|
40
|
+
2. Convert them to linear markdown conversations
|
|
41
|
+
3. Save as clean, readable markdown files
|
|
42
|
+
|
|
43
|
+
After importing, run 'basic-memory sync' to index the new files.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
try:
|
|
47
|
+
if not conversations_json.exists(): # pragma: no cover
|
|
48
|
+
typer.echo(f"Error: File not found: {conversations_json}", err=True)
|
|
49
|
+
raise typer.Exit(1)
|
|
50
|
+
|
|
51
|
+
# Get markdown processor
|
|
52
|
+
markdown_processor = asyncio.run(get_markdown_processor())
|
|
53
|
+
config = get_project_config()
|
|
54
|
+
# Process the file
|
|
55
|
+
base_path = config.home / folder
|
|
56
|
+
console.print(f"\nImporting chats from {conversations_json}...writing to {base_path}")
|
|
57
|
+
|
|
58
|
+
# Create importer and run import
|
|
59
|
+
importer = ChatGPTImporter(config.home, markdown_processor)
|
|
60
|
+
with conversations_json.open("r", encoding="utf-8") as file:
|
|
61
|
+
json_data = json.load(file)
|
|
62
|
+
result = asyncio.run(importer.import_data(json_data, folder))
|
|
63
|
+
|
|
64
|
+
if not result.success: # pragma: no cover
|
|
65
|
+
typer.echo(f"Error during import: {result.error_message}", err=True)
|
|
66
|
+
raise typer.Exit(1)
|
|
67
|
+
|
|
68
|
+
# Show results
|
|
69
|
+
console.print(
|
|
70
|
+
Panel(
|
|
71
|
+
f"[green]Import complete![/green]\n\n"
|
|
72
|
+
f"Imported {result.conversations} conversations\n"
|
|
73
|
+
f"Containing {result.messages} messages",
|
|
74
|
+
expand=False,
|
|
75
|
+
)
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
console.print("\nRun 'basic-memory sync' to index the new files.")
|
|
79
|
+
|
|
80
|
+
except Exception as e:
|
|
81
|
+
logger.error("Import failed")
|
|
82
|
+
typer.echo(f"Error during import: {e}", err=True)
|
|
83
|
+
raise typer.Exit(1)
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
"""Import command for basic-memory CLI to import chat data from conversations2.json format."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import json
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Annotated
|
|
7
|
+
|
|
8
|
+
import typer
|
|
9
|
+
from basic_memory.cli.app import claude_app
|
|
10
|
+
from basic_memory.config import get_project_config
|
|
11
|
+
from basic_memory.importers.claude_conversations_importer import ClaudeConversationsImporter
|
|
12
|
+
from basic_memory.markdown import EntityParser, MarkdownProcessor
|
|
13
|
+
from loguru import logger
|
|
14
|
+
from rich.console import Console
|
|
15
|
+
from rich.panel import Panel
|
|
16
|
+
|
|
17
|
+
console = Console()
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
async def get_markdown_processor() -> MarkdownProcessor:
|
|
21
|
+
"""Get MarkdownProcessor instance."""
|
|
22
|
+
config = get_project_config()
|
|
23
|
+
entity_parser = EntityParser(config.home)
|
|
24
|
+
return MarkdownProcessor(entity_parser)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@claude_app.command(name="conversations", help="Import chat conversations from Claude.ai.")
|
|
28
|
+
def import_claude(
|
|
29
|
+
conversations_json: Annotated[
|
|
30
|
+
Path, typer.Argument(..., help="Path to conversations.json file")
|
|
31
|
+
] = Path("conversations.json"),
|
|
32
|
+
folder: Annotated[
|
|
33
|
+
str, typer.Option(help="The folder to place the files in.")
|
|
34
|
+
] = "conversations",
|
|
35
|
+
):
|
|
36
|
+
"""Import chat conversations from conversations2.json format.
|
|
37
|
+
|
|
38
|
+
This command will:
|
|
39
|
+
1. Read chat data and nested messages
|
|
40
|
+
2. Create markdown files for each conversation
|
|
41
|
+
3. Format content in clean, readable markdown
|
|
42
|
+
|
|
43
|
+
After importing, run 'basic-memory sync' to index the new files.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
config = get_project_config()
|
|
47
|
+
try:
|
|
48
|
+
if not conversations_json.exists():
|
|
49
|
+
typer.echo(f"Error: File not found: {conversations_json}", err=True)
|
|
50
|
+
raise typer.Exit(1)
|
|
51
|
+
|
|
52
|
+
# Get markdown processor
|
|
53
|
+
markdown_processor = asyncio.run(get_markdown_processor())
|
|
54
|
+
|
|
55
|
+
# Create the importer
|
|
56
|
+
importer = ClaudeConversationsImporter(config.home, markdown_processor)
|
|
57
|
+
|
|
58
|
+
# Process the file
|
|
59
|
+
base_path = config.home / folder
|
|
60
|
+
console.print(f"\nImporting chats from {conversations_json}...writing to {base_path}")
|
|
61
|
+
|
|
62
|
+
# Run the import
|
|
63
|
+
with conversations_json.open("r", encoding="utf-8") as file:
|
|
64
|
+
json_data = json.load(file)
|
|
65
|
+
result = asyncio.run(importer.import_data(json_data, folder))
|
|
66
|
+
|
|
67
|
+
if not result.success: # pragma: no cover
|
|
68
|
+
typer.echo(f"Error during import: {result.error_message}", err=True)
|
|
69
|
+
raise typer.Exit(1)
|
|
70
|
+
|
|
71
|
+
# Show results
|
|
72
|
+
console.print(
|
|
73
|
+
Panel(
|
|
74
|
+
f"[green]Import complete![/green]\n\n"
|
|
75
|
+
f"Imported {result.conversations} conversations\n"
|
|
76
|
+
f"Containing {result.messages} messages",
|
|
77
|
+
expand=False,
|
|
78
|
+
)
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
console.print("\nRun 'basic-memory sync' to index the new files.")
|
|
82
|
+
|
|
83
|
+
except Exception as e:
|
|
84
|
+
logger.error("Import failed")
|
|
85
|
+
typer.echo(f"Error during import: {e}", err=True)
|
|
86
|
+
raise typer.Exit(1)
|