basic-memory 0.7.0__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (58) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/alembic.ini +119 -0
  3. basic_memory/alembic/env.py +23 -1
  4. basic_memory/alembic/versions/502b60eaa905_remove_required_from_entity_permalink.py +51 -0
  5. basic_memory/alembic/versions/b3c3938bacdb_relation_to_name_unique_index.py +44 -0
  6. basic_memory/api/app.py +0 -4
  7. basic_memory/api/routers/knowledge_router.py +1 -1
  8. basic_memory/api/routers/memory_router.py +16 -16
  9. basic_memory/api/routers/resource_router.py +105 -4
  10. basic_memory/cli/app.py +0 -2
  11. basic_memory/cli/commands/status.py +9 -21
  12. basic_memory/cli/commands/sync.py +12 -16
  13. basic_memory/cli/commands/tools.py +36 -13
  14. basic_memory/cli/main.py +0 -1
  15. basic_memory/config.py +15 -1
  16. basic_memory/file_utils.py +6 -4
  17. basic_memory/markdown/entity_parser.py +3 -3
  18. basic_memory/mcp/async_client.py +1 -1
  19. basic_memory/mcp/main.py +25 -0
  20. basic_memory/mcp/prompts/__init__.py +15 -0
  21. basic_memory/mcp/prompts/ai_assistant_guide.py +28 -0
  22. basic_memory/mcp/prompts/continue_conversation.py +172 -0
  23. basic_memory/mcp/prompts/json_canvas_spec.py +25 -0
  24. basic_memory/mcp/prompts/recent_activity.py +46 -0
  25. basic_memory/mcp/prompts/search.py +127 -0
  26. basic_memory/mcp/prompts/utils.py +98 -0
  27. basic_memory/mcp/server.py +3 -7
  28. basic_memory/mcp/tools/__init__.py +6 -4
  29. basic_memory/mcp/tools/canvas.py +99 -0
  30. basic_memory/mcp/tools/memory.py +12 -5
  31. basic_memory/mcp/tools/notes.py +1 -2
  32. basic_memory/mcp/tools/resource.py +192 -0
  33. basic_memory/mcp/tools/utils.py +2 -1
  34. basic_memory/models/knowledge.py +27 -11
  35. basic_memory/repository/repository.py +1 -1
  36. basic_memory/repository/search_repository.py +14 -4
  37. basic_memory/schemas/__init__.py +0 -11
  38. basic_memory/schemas/base.py +4 -1
  39. basic_memory/schemas/memory.py +11 -2
  40. basic_memory/schemas/search.py +2 -1
  41. basic_memory/services/entity_service.py +19 -12
  42. basic_memory/services/file_service.py +69 -2
  43. basic_memory/services/link_resolver.py +12 -9
  44. basic_memory/services/search_service.py +56 -12
  45. basic_memory/sync/__init__.py +3 -2
  46. basic_memory/sync/sync_service.py +294 -123
  47. basic_memory/sync/watch_service.py +125 -129
  48. basic_memory/utils.py +24 -9
  49. {basic_memory-0.7.0.dist-info → basic_memory-0.8.0.dist-info}/METADATA +2 -1
  50. basic_memory-0.8.0.dist-info/RECORD +91 -0
  51. basic_memory/alembic/README +0 -1
  52. basic_memory/schemas/discovery.py +0 -28
  53. basic_memory/sync/file_change_scanner.py +0 -158
  54. basic_memory/sync/utils.py +0 -31
  55. basic_memory-0.7.0.dist-info/RECORD +0 -82
  56. {basic_memory-0.7.0.dist-info → basic_memory-0.8.0.dist-info}/WHEEL +0 -0
  57. {basic_memory-0.7.0.dist-info → basic_memory-0.8.0.dist-info}/entry_points.txt +0 -0
  58. {basic_memory-0.7.0.dist-info → basic_memory-0.8.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,22 +1,20 @@
1
1
  """Watch service for Basic Memory."""
2
2
 
3
3
  import dataclasses
4
-
5
- from loguru import logger
6
- from pydantic import BaseModel
4
+ import os
7
5
  from datetime import datetime
8
6
  from pathlib import Path
9
- from typing import List, Optional
7
+ from typing import List, Optional, Set
10
8
 
9
+ from loguru import logger
10
+ from pydantic import BaseModel
11
11
  from rich.console import Console
12
- from rich.live import Live
13
- from rich.table import Table
14
- from watchfiles import awatch, Change
15
- import os
12
+ from watchfiles import awatch
13
+ from watchfiles.main import FileChange, Change
16
14
 
17
15
  from basic_memory.config import ProjectConfig
18
- from basic_memory.sync.sync_service import SyncService
19
16
  from basic_memory.services.file_service import FileService
17
+ from basic_memory.sync.sync_service import SyncService
20
18
 
21
19
 
22
20
  class WatchEvent(BaseModel):
@@ -81,138 +79,136 @@ class WatchService:
81
79
  self.status_path.parent.mkdir(parents=True, exist_ok=True)
82
80
  self.console = Console()
83
81
 
84
- def generate_table(self) -> Table:
85
- """Generate status display table"""
86
- table = Table()
87
-
88
- # Add status row
89
- table.add_column("Status", style="cyan")
90
- table.add_column("Last Scan", style="cyan")
91
- table.add_column("Files", style="cyan")
92
- table.add_column("Errors", style="red")
93
-
94
- # Add main status row
95
- table.add_row(
96
- "✓ Running" if self.state.running else "✗ Stopped",
97
- self.state.last_scan.strftime("%H:%M:%S") if self.state.last_scan else "-",
98
- str(self.state.synced_files),
99
- f"{self.state.error_count} ({self.state.last_error.strftime('%H:%M:%S') if self.state.last_error else 'none'})",
100
- )
101
-
102
- if self.state.recent_events:
103
- # Add recent events
104
- table.add_section()
105
- table.add_row("Recent Events", "", "", "")
106
-
107
- for event in self.state.recent_events[:5]: # Show last 5 events
108
- color = {
109
- "new": "green",
110
- "modified": "yellow",
111
- "moved": "blue",
112
- "deleted": "red",
113
- "error": "red",
114
- }.get(event.action, "white")
115
-
116
- icon = {
117
- "new": "✚",
118
- "modified": "✎",
119
- "moved": "→",
120
- "deleted": "✖",
121
- "error": "!",
122
- }.get(event.action, "*")
123
-
124
- table.add_row(
125
- f"[{color}]{icon} {event.action}[/{color}]",
126
- event.timestamp.strftime("%H:%M:%S"),
127
- f"[{color}]{event.path}[/{color}]",
128
- f"[dim]{event.checksum[:8] if event.checksum else ''}[/dim]",
129
- )
130
-
131
- return table
132
-
133
- async def run(self, console_status: bool = False): # pragma: no cover
82
+ async def run(self): # pragma: no cover
134
83
  """Watch for file changes and sync them"""
135
84
  logger.info("Watching for sync changes")
136
85
  self.state.running = True
137
86
  self.state.start_time = datetime.now()
138
87
  await self.write_status()
88
+ try:
89
+ async for changes in awatch(
90
+ self.config.home,
91
+ debounce=self.config.sync_delay,
92
+ watch_filter=self.filter_changes,
93
+ recursive=True,
94
+ ):
95
+ await self.handle_changes(self.config.home, changes)
96
+
97
+ except Exception as e:
98
+ self.state.record_error(str(e))
99
+ await self.write_status()
100
+ raise
101
+ finally:
102
+ self.state.running = False
103
+ await self.write_status()
104
+
105
+ def filter_changes(self, change: Change, path: str) -> bool:
106
+ """Filter to only watch non-hidden files and directories.
107
+
108
+ Returns:
109
+ True if the file should be watched, False if it should be ignored
110
+ """
111
+ # Skip if path is invalid
112
+ try:
113
+ relative_path = Path(path).relative_to(self.config.home)
114
+ except ValueError:
115
+ return False
116
+
117
+ # Skip hidden directories and files
118
+ path_parts = relative_path.parts
119
+ for part in path_parts:
120
+ if part.startswith("."):
121
+ return False
139
122
 
140
- if console_status:
141
- with Live(self.generate_table(), refresh_per_second=4, console=self.console) as live:
142
- try:
143
- async for changes in awatch(
144
- self.config.home,
145
- watch_filter=self.filter_changes,
146
- debounce=self.config.sync_delay,
147
- recursive=True,
148
- ):
149
- # Process changes
150
- await self.handle_changes(self.config.home)
151
- # Update display
152
- live.update(self.generate_table())
153
-
154
- except Exception as e:
155
- self.state.record_error(str(e))
156
- await self.write_status()
157
- raise
158
- finally:
159
- self.state.running = False
160
- await self.write_status()
161
-
162
- else:
163
- try:
164
- async for changes in awatch(
165
- self.config.home,
166
- watch_filter=self.filter_changes,
167
- debounce=self.config.sync_delay,
168
- recursive=True,
169
- ):
170
- # Process changes
171
- await self.handle_changes(self.config.home)
172
- # Update display
173
-
174
- except Exception as e:
175
- self.state.record_error(str(e))
176
- await self.write_status()
177
- raise
178
- finally:
179
- self.state.running = False
180
- await self.write_status()
123
+ return True
181
124
 
182
125
  async def write_status(self):
183
126
  """Write current state to status file"""
184
127
  self.status_path.write_text(WatchServiceState.model_dump_json(self.state, indent=2))
185
128
 
186
- def filter_changes(self, change: Change, path: str) -> bool:
187
- """Filter to only watch markdown files"""
188
- return path.endswith(".md") and not Path(path).name.startswith(".")
189
-
190
- async def handle_changes(self, directory: Path):
129
+ async def handle_changes(self, directory: Path, changes: Set[FileChange]):
191
130
  """Process a batch of file changes"""
131
+ logger.debug(f"handling {len(changes)} changes in directory: {directory} ...")
132
+
133
+ # Group changes by type
134
+ adds = []
135
+ deletes = []
136
+ modifies = []
137
+
138
+ for change, path in changes:
139
+ # convert to relative path
140
+ relative_path = str(Path(path).relative_to(directory))
141
+ if change == Change.added:
142
+ adds.append(relative_path)
143
+ elif change == Change.deleted:
144
+ deletes.append(relative_path)
145
+ elif change == Change.modified:
146
+ modifies.append(relative_path)
147
+
148
+ # Track processed files to avoid duplicates
149
+ processed = set()
150
+
151
+ # First handle potential moves
152
+ for added_path in adds:
153
+ if added_path in processed:
154
+ continue # pragma: no cover
155
+
156
+ for deleted_path in deletes:
157
+ if deleted_path in processed:
158
+ continue # pragma: no cover
159
+
160
+ if added_path != deleted_path:
161
+ # Compare checksums to detect moves
162
+ try:
163
+ added_checksum = await self.file_service.compute_checksum(added_path)
164
+ deleted_entity = await self.sync_service.entity_repository.get_by_file_path(
165
+ deleted_path
166
+ )
167
+
168
+ if deleted_entity and deleted_entity.checksum == added_checksum:
169
+ await self.sync_service.handle_move(deleted_path, added_path)
170
+ self.state.add_event(
171
+ path=f"{deleted_path} -> {added_path}",
172
+ action="moved",
173
+ status="success",
174
+ )
175
+ self.console.print(
176
+ f"[blue]→[/blue] Moved: {deleted_path} → {added_path}"
177
+ )
178
+ processed.add(added_path)
179
+ processed.add(deleted_path)
180
+ break
181
+ except Exception as e: # pragma: no cover
182
+ logger.warning(f"Error checking for move: {e}")
183
+
184
+ # Handle remaining changes
185
+ for path in deletes:
186
+ if path not in processed:
187
+ await self.sync_service.handle_delete(path)
188
+ self.state.add_event(path=path, action="deleted", status="success")
189
+ self.console.print(f"[red]✕[/red] Deleted: {path}")
190
+ processed.add(path)
191
+
192
+ for path in adds:
193
+ if path not in processed:
194
+ _, checksum = await self.sync_service.sync_file(path, new=True)
195
+ self.state.add_event(path=path, action="new", status="success", checksum=checksum)
196
+ self.console.print(f"[green]✓[/green] Added: {path}")
197
+ processed.add(path)
198
+
199
+ for path in modifies:
200
+ if path not in processed:
201
+ _, checksum = await self.sync_service.sync_file(path, new=False)
202
+ self.state.add_event(
203
+ path=path, action="modified", status="success", checksum=checksum
204
+ )
205
+ self.console.print(f"[yellow]✎[/yellow] Modified: {path}")
206
+ processed.add(path)
192
207
 
193
- logger.debug(f"handling change in directory: {directory} ...")
194
- # Process changes with timeout
195
- report = await self.sync_service.sync(directory)
196
- self.state.last_scan = datetime.now()
197
- self.state.synced_files = report.total
198
-
199
- # Update stats
200
- for path in report.new:
201
- self.state.add_event(
202
- path=path, action="new", status="success", checksum=report.checksums[path]
203
- )
204
- for path in report.modified:
205
- self.state.add_event(
206
- path=path, action="modified", status="success", checksum=report.checksums[path]
207
- )
208
- for old_path, new_path in report.moves.items():
209
- self.state.add_event(
210
- path=f"{old_path} -> {new_path}",
211
- action="moved",
212
- status="success",
213
- checksum=report.checksums[new_path],
214
- )
215
- for path in report.deleted:
216
- self.state.add_event(path=path, action="deleted", status="success")
208
+ # Add a divider if we processed any files
209
+ if processed:
210
+ self.console.print("─" * 50, style="dim")
217
211
 
212
+ self.state.last_scan = datetime.now()
213
+ self.state.synced_files += len(processed)
218
214
  await self.write_status()
basic_memory/utils.py CHANGED
@@ -1,5 +1,6 @@
1
1
  """Utility functions for basic-memory."""
2
2
 
3
+ import logging
3
4
  import os
4
5
  import re
5
6
  import sys
@@ -10,7 +11,6 @@ from loguru import logger
10
11
  from unidecode import unidecode
11
12
 
12
13
  import basic_memory
13
- from basic_memory.config import config
14
14
 
15
15
  import logfire
16
16
 
@@ -65,7 +65,11 @@ def generate_permalink(file_path: Union[Path, str]) -> str:
65
65
 
66
66
 
67
67
  def setup_logging(
68
- home_dir: Path = config.home, log_file: Optional[str] = None, console: bool = True
68
+ env: str,
69
+ home_dir: Path,
70
+ log_file: Optional[str] = None,
71
+ log_level: str = "INFO",
72
+ console: bool = True,
69
73
  ) -> None: # pragma: no cover
70
74
  """
71
75
  Configure logging for the application.
@@ -79,15 +83,14 @@ def setup_logging(
79
83
  logger.remove()
80
84
 
81
85
  # Add file handler if we are not running tests
82
- if log_file and config.env != "test":
86
+ if log_file and env != "test":
83
87
  # enable pydantic logfire
84
88
  logfire.configure(
85
89
  code_source=logfire.CodeSource(
86
90
  repository="https://github.com/basicmachines-co/basic-memory",
87
91
  revision=basic_memory.__version__,
88
- root_path="/src/basic_memory",
89
92
  ),
90
- environment=config.env,
93
+ environment=env,
91
94
  console=False,
92
95
  )
93
96
  logger.configure(handlers=[logfire.loguru_handler()])
@@ -100,7 +103,7 @@ def setup_logging(
100
103
  log_path = home_dir / log_file
101
104
  logger.add(
102
105
  str(log_path),
103
- level=config.log_level,
106
+ level=log_level,
104
107
  rotation="100 MB",
105
108
  retention="10 days",
106
109
  backtrace=True,
@@ -109,7 +112,19 @@ def setup_logging(
109
112
  colorize=False,
110
113
  )
111
114
 
112
- # Add stderr handler
113
- logger.add(sys.stderr, level=config.log_level, backtrace=True, diagnose=True, colorize=True)
115
+ if env == "test" or console:
116
+ # Add stderr handler
117
+ logger.add(sys.stderr, level=log_level, backtrace=True, diagnose=True, colorize=True)
114
118
 
115
- logger.info(f"ENV: '{config.env}' Log level: '{config.log_level}' Logging to {log_file}")
119
+ logger.info(f"ENV: '{env}' Log level: '{log_level}' Logging to {log_file}")
120
+
121
+ # Get the logger for 'httpx'
122
+ httpx_logger = logging.getLogger("httpx")
123
+ # Set the logging level to WARNING to ignore INFO and DEBUG logs
124
+ httpx_logger.setLevel(logging.WARNING)
125
+
126
+ # turn watchfiles to WARNING
127
+ logging.getLogger("watchfiles.main").setLevel(logging.WARNING)
128
+
129
+ # disable open telemetry warning
130
+ logging.getLogger("instrumentor").setLevel(logging.ERROR)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: basic-memory
3
- Version: 0.7.0
3
+ Version: 0.8.0
4
4
  Summary: Local-first knowledge management combining Zettelkasten with knowledge graphs
5
5
  Project-URL: Homepage, https://github.com/basicmachines-co/basic-memory
6
6
  Project-URL: Repository, https://github.com/basicmachines-co/basic-memory
@@ -19,6 +19,7 @@ Requires-Dist: logfire[fastapi,httpx,sqlalchemy,sqlite3]>=3.6.0
19
19
  Requires-Dist: loguru>=0.7.3
20
20
  Requires-Dist: markdown-it-py>=3.0.0
21
21
  Requires-Dist: mcp>=1.2.0
22
+ Requires-Dist: pillow>=11.1.0
22
23
  Requires-Dist: pydantic-settings>=2.6.1
23
24
  Requires-Dist: pydantic[email,timezone]>=2.10.3
24
25
  Requires-Dist: pyright>=1.1.390
@@ -0,0 +1,91 @@
1
+ basic_memory/__init__.py,sha256=1Vkhl1i_QQpYLtQxxfrs_tJ3ZUbmJC0C4zxgmVy7C60,122
2
+ basic_memory/config.py,sha256=kh3LC9slFHoxfIufziyqafbI2tS9NXpEgJ8y3bH1Ixk,2129
3
+ basic_memory/db.py,sha256=EVX3pgA2rah4tZpxy5wKvZSN8vVcrvo5l-hKjAXBb0U,5284
4
+ basic_memory/deps.py,sha256=8LkcfppQEJpiflYZxLk-SmQuL04qknbsdfzIkM_ctuY,5530
5
+ basic_memory/file_utils.py,sha256=Kx2WVewno_SnLMnorHdiWwsZztY6esKRVirmDQSiv2w,5864
6
+ basic_memory/utils.py,sha256=2GOo14HUcnzpft6tkSCr8QTESlS5baxfm8oZJ6vdVds,3711
7
+ basic_memory/alembic/alembic.ini,sha256=IEZsnF8CbbZnkwBr67LzKKNobHuzTaQNUvM8Psop5xc,3733
8
+ basic_memory/alembic/env.py,sha256=GyQpEpQu84flqAdelxR0-H9nbkHrVoCboYGfmltBDoA,2737
9
+ basic_memory/alembic/migrations.py,sha256=CIbkMHEKZ60aDUhFGSQjv8kDNM7sazfvEYHGGcy1DBk,858
10
+ basic_memory/alembic/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
11
+ basic_memory/alembic/versions/3dae7c7b1564_initial_schema.py,sha256=lTbWlAnd1es7xU99DoJgfaRe1_Kte8TL98riqeKGV80,4363
12
+ basic_memory/alembic/versions/502b60eaa905_remove_required_from_entity_permalink.py,sha256=k6xYTmYPM9Ros-7CA7BwZBKYwoK_gmVdC-2n8FAjdoE,1840
13
+ basic_memory/alembic/versions/b3c3938bacdb_relation_to_name_unique_index.py,sha256=RsGymQzfRXV1LSNKiyi0lMilTxW1NgwS9jR67ye2apI,1428
14
+ basic_memory/api/__init__.py,sha256=wCpj-21j1D0KzKl9Ql6unLBVFY0K1uGp_FeSZRKtqpk,72
15
+ basic_memory/api/app.py,sha256=Ma-kLPkwlHUQ1dutExIqedMiFurzgBcazusQDq7IDsE,1374
16
+ basic_memory/api/routers/__init__.py,sha256=iviQ1QVYobC8huUuyRhEjcA0BDjrOUm1lXHXhJkxP9A,239
17
+ basic_memory/api/routers/knowledge_router.py,sha256=2encEkZw7gKyfwa2HjFtTJh5aXhl96DFLrSB2pk-HfI,5291
18
+ basic_memory/api/routers/memory_router.py,sha256=wGoAWm1ehWSlDYI66eUtp99b7uPJtymh2RMaZiohh0Y,5371
19
+ basic_memory/api/routers/resource_router.py,sha256=WEJEqEaY_yTKj5-U-rW4kXQKUcJflykgwI6_g_R41ck,8058
20
+ basic_memory/api/routers/search_router.py,sha256=WCLuAkEZ-DpwHU8RsnZg63_LDiSYbbLLlBb9Avbc2fA,1164
21
+ basic_memory/cli/__init__.py,sha256=arcKLAWRDhPD7x5t80MlviZeYzwHZ0GZigyy3NKVoGk,33
22
+ basic_memory/cli/app.py,sha256=EyE-qactiI4QMGXZzmYcVNtn3-16zy1jT_G-a-yx2AU,324
23
+ basic_memory/cli/main.py,sha256=KH2JDvQNIwV7VRKnKZ2jTkMSlTECziUmk1xgmU6ezfw,433
24
+ basic_memory/cli/commands/__init__.py,sha256=OQGLaKTsOdPsp2INM_pHzmOlbVfdL0sytBNgvqTqCDY,159
25
+ basic_memory/cli/commands/db.py,sha256=BW3VmoTKNfzkl85m9lyrx8lkk1IRb6wuAmrPKFQxNE8,906
26
+ basic_memory/cli/commands/import_chatgpt.py,sha256=py3q9iMlB85olQBsBcEpUt0X03hHvAmRy8iQl2dbbuc,8394
27
+ basic_memory/cli/commands/import_claude_conversations.py,sha256=_7n-nn1tbsaarwR25QXjYxSC_K2M0cXBTzthnCZ_4-w,7030
28
+ basic_memory/cli/commands/import_claude_projects.py,sha256=fvXu6wwlmfA2wJCcp8IoJnz3INefkVcFhqAX8KhnCtc,6851
29
+ basic_memory/cli/commands/import_memory_json.py,sha256=cS-1rxGYUC0-QsETIbA0QqbB1Cl74YcnoJRNCkMkM-o,5395
30
+ basic_memory/cli/commands/mcp.py,sha256=BPdThcufdriIvrDskc87a0oCC1BkZ0PZsgNao_-oNKk,611
31
+ basic_memory/cli/commands/status.py,sha256=YEw6SA82Y8GbtTIn-h6m15zjX5bGMx5abAbu19LXVjs,5398
32
+ basic_memory/cli/commands/sync.py,sha256=uSzGPdjbMX7PKh8iA_Oo06zLc0tixX_cV6i_JWph95I,6766
33
+ basic_memory/cli/commands/tools.py,sha256=P5EMifFHDlaTFje97Yxp9GawoRlCOML5Dy6wBBjMnjo,6117
34
+ basic_memory/markdown/__init__.py,sha256=DdzioCWtDnKaq05BHYLgL_78FawEHLpLXnp-kPSVfIc,501
35
+ basic_memory/markdown/entity_parser.py,sha256=LnjG_wg38LVN8JndsZJV2UVGPIaoIV5sGs94iQ9PL6k,3781
36
+ basic_memory/markdown/markdown_processor.py,sha256=mV3pYoDTaQMEl1tA5n_XztBvNlYyH2SzKs4vnKdAet4,4952
37
+ basic_memory/markdown/plugins.py,sha256=gtIzKRjoZsyvBqLpVNnrmzl_cbTZ5ZGn8kcuXxQjRko,6639
38
+ basic_memory/markdown/schemas.py,sha256=mzVEDUhH98kwETMknjkKw5H697vg_zUapsJkJVi17ho,1894
39
+ basic_memory/markdown/utils.py,sha256=ZtHa-dG--ZwFEUC3jfl04KZGhM_ZWo5b-8d8KpJ90gY,2758
40
+ basic_memory/mcp/__init__.py,sha256=dsDOhKqjYeIbCULbHIxfcItTbqudEuEg1Np86eq0GEQ,35
41
+ basic_memory/mcp/async_client.py,sha256=Eo345wANiBRSM4u3j_Vd6Ax4YtMg7qbWd9PIoFfj61I,236
42
+ basic_memory/mcp/main.py,sha256=p_zjCDSeT9EsjNmlzDfEQ7xI8Y_bBLmMG0g-y-DakTA,703
43
+ basic_memory/mcp/server.py,sha256=vlhURihp7Ly1eVyI01J9zvYhbz7wgHkhoNLNq6KOGug,286
44
+ basic_memory/mcp/prompts/__init__.py,sha256=BsjRBgxYjS-JXK8RqJQQd1xPEoWWgQ4Hw3BynaYxGwA,670
45
+ basic_memory/mcp/prompts/ai_assistant_guide.py,sha256=e2DoP2Dv2HSDd00m6xGjGMWxUIqYJceJhw3RxuuJdeA,925
46
+ basic_memory/mcp/prompts/continue_conversation.py,sha256=tXE626Qay8aY4tDQhAmvZYO8nf7tTfAMRqE24JShTvc,6666
47
+ basic_memory/mcp/prompts/json_canvas_spec.py,sha256=yKO_9_ojF4tCEWSMuHn5YNHFkAibzPdOgluAKJ-K924,827
48
+ basic_memory/mcp/prompts/recent_activity.py,sha256=3b9_jxPQ-c4lQH05Ig2a6k4ApWW5d9WhcArhMU0S6BM,1552
49
+ basic_memory/mcp/prompts/search.py,sha256=1E8dHcPRCy19Emv7r4MWJ2ZYsuQNa4qRmIy2fnuWuoA,4496
50
+ basic_memory/mcp/prompts/utils.py,sha256=cPdlJl1D5bS36hQBoBgf0RACE2ToUy-HgHukO8eRkZ4,3734
51
+ basic_memory/mcp/tools/__init__.py,sha256=_MXdi0AJr5WB5C20WSQAwHhWOyhspyzbgWL9ijQh3to,953
52
+ basic_memory/mcp/tools/canvas.py,sha256=wbSg-NauRmz_reNH3SUY2kLWNUKMA5kPvSoz_6ec7uw,3162
53
+ basic_memory/mcp/tools/knowledge.py,sha256=JotFMQpMwidx0WLvOG4yWpwWwLmyp-PoO6bVjpQseYQ,2671
54
+ basic_memory/mcp/tools/memory.py,sha256=rteQJSXIUGsJbA-AzZN1dyHKr98EELHGc-rvMXlG2JI,6474
55
+ basic_memory/mcp/tools/notes.py,sha256=vAiQsJqJvxevYUPxVib10Jd5qvEMATeiSeu1sGe32nM,7415
56
+ basic_memory/mcp/tools/resource.py,sha256=Imbl8exE27qPmWEZ4DXIeCdpQXSYYv_ul8whTu3d_cE,6526
57
+ basic_memory/mcp/tools/search.py,sha256=UFPBDzfZ60SrvAgvISO3Jt6WdNwEQKsvibQdPxC7dOg,1511
58
+ basic_memory/mcp/tools/utils.py,sha256=h_iDGduofU4GyQyyARkI4L-OJ9Rwppkx48BDSEMYsLY,4866
59
+ basic_memory/models/__init__.py,sha256=Bf0xXV_ryndogvZDiVM_Wb6iV2fHUxYNGMZNWNcZi0s,307
60
+ basic_memory/models/base.py,sha256=4hAXJ8CE1RnjKhb23lPd-QM7G_FXIdTowMJ9bRixspU,225
61
+ basic_memory/models/knowledge.py,sha256=lbKd8VOOVPqXtIhNMY30bIokoQutFjLpHwLD5At90MY,6644
62
+ basic_memory/models/search.py,sha256=IB-ySJUqlQq9FqLGfWnraIFcB_brWa9eBwsQP1rVTeI,1164
63
+ basic_memory/repository/__init__.py,sha256=TnscLXARq2iOgQZFvQoT9X1Bn9SB_7s1xw2fOqRs3Jg,252
64
+ basic_memory/repository/entity_repository.py,sha256=VFLymzJ1W6AZru_s1S3U6nlqSprBrVV5Toy0-qysIfw,3524
65
+ basic_memory/repository/observation_repository.py,sha256=BOcy4wARqCXu-thYyt7mPxt2A2C8TW0le3s_X9wrK6I,1701
66
+ basic_memory/repository/relation_repository.py,sha256=DwpTcn9z_1sZQcyMOUABz1k1VSwo_AU63x2zR7aerTk,2933
67
+ basic_memory/repository/repository.py,sha256=X59h8JevRn9sEZz6R41NvtyXo9Gbq22mY-XxsoIEwrQ,11324
68
+ basic_memory/repository/search_repository.py,sha256=3AOrfTihzLKiXZTGerrKyRUpITNtXyLrp_XWEVpY3h0,10503
69
+ basic_memory/schemas/__init__.py,sha256=WXQ2okYPC-OFa2xmCWq-EP52lSuqFX9Sx-zMJUPv8to,1318
70
+ basic_memory/schemas/base.py,sha256=dwnaI5fJXsdp81mdH0ZpmJ-WICY-0M7ZPWeW5OUgBG8,5685
71
+ basic_memory/schemas/delete.py,sha256=UAR2JK99WMj3gP-yoGWlHD3eZEkvlTSRf8QoYIE-Wfw,1180
72
+ basic_memory/schemas/memory.py,sha256=ap8lKHxUfM6WTiAFlObs_rZWz3sQxhiGlcvc_NVA9f8,3094
73
+ basic_memory/schemas/request.py,sha256=58r9mPGc4Am9rR_zGzo-yqXcsrl5I6n3M5LjGK5gFFk,1626
74
+ basic_memory/schemas/response.py,sha256=lVYR31DTtSeFRddGWX_wQWnQgyiwX0LEpNJ4f4lKpTM,6440
75
+ basic_memory/schemas/search.py,sha256=riODPc4EWgXTM4QRBJZ9vZga9lP9lYRF7Nfbtlw_gjg,3344
76
+ basic_memory/services/__init__.py,sha256=oop6SKmzV4_NAYt9otGnupLGVCCKIVgxEcdRQWwh25I,197
77
+ basic_memory/services/context_service.py,sha256=y2Kd9YRPdQbJ6uWcY71z2qCZZUt8Sb2Dy52dh2OMJxo,9651
78
+ basic_memory/services/entity_service.py,sha256=cMas6BjtWJLt3QMUsR1U21Js_vpFFKz1SMwSnXD8Suk,12133
79
+ basic_memory/services/exceptions.py,sha256=VGlCLd4UD2w5NWKqC7QpG4jOM_hA7jKRRM-MqvEVMNk,288
80
+ basic_memory/services/file_service.py,sha256=Z81qnaIHEQd9M9SMCQwvo5Wga3R3hfP43hEllt5KTmc,8008
81
+ basic_memory/services/link_resolver.py,sha256=m1ycdKrU9tnVHaMn1vuEN4_2YJ5f912Us61PTB2O7QI,4708
82
+ basic_memory/services/search_service.py,sha256=OYnv15DVUZGI3WBVwdW3VARlWR377hwb5GH2eNts400,9336
83
+ basic_memory/services/service.py,sha256=V-d_8gOV07zGIQDpL-Ksqs3ZN9l3qf3HZOK1f_YNTag,336
84
+ basic_memory/sync/__init__.py,sha256=CVHguYH457h2u2xoM8KvOilJC71XJlZ-qUh8lHcjYj4,156
85
+ basic_memory/sync/sync_service.py,sha256=1lytLbJO1RxKhihv3n7zsuek4MpbJ_bc2aCcqGl7hh0,13724
86
+ basic_memory/sync/watch_service.py,sha256=FtamKOp_aTMdTdd8fNL60Bn3VU2IjHJJh8wzp3-T5DQ,7662
87
+ basic_memory-0.8.0.dist-info/METADATA,sha256=7z2Vt7lYowomq0uUxzNT6aHcwfz8IseOPcmYCbZcZfE,10885
88
+ basic_memory-0.8.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
89
+ basic_memory-0.8.0.dist-info/entry_points.txt,sha256=IDQa_VmVTzmvMrpnjhEfM0S3F--XsVGEj3MpdJfuo-Q,59
90
+ basic_memory-0.8.0.dist-info/licenses/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
91
+ basic_memory-0.8.0.dist-info/RECORD,,
@@ -1 +0,0 @@
1
- Generic single-database configuration.
@@ -1,28 +0,0 @@
1
- """Schemas for knowledge discovery and analytics endpoints."""
2
-
3
- from typing import List, Optional
4
- from pydantic import BaseModel, Field
5
-
6
- from basic_memory.schemas.response import EntityResponse
7
-
8
-
9
- class EntityTypeList(BaseModel):
10
- """List of unique entity types in the system."""
11
-
12
- types: List[str]
13
-
14
-
15
- class ObservationCategoryList(BaseModel):
16
- """List of unique observation categories in the system."""
17
-
18
- categories: List[str]
19
-
20
-
21
- class TypedEntityList(BaseModel):
22
- """List of entities of a specific type."""
23
-
24
- entity_type: str = Field(..., description="Type of entities in the list")
25
- entities: List[EntityResponse]
26
- total: int = Field(..., description="Total number of entities")
27
- sort_by: Optional[str] = Field(None, description="Field used for sorting")
28
- include_related: bool = Field(False, description="Whether related entities are included")
@@ -1,158 +0,0 @@
1
- """Service for detecting changes between filesystem and database."""
2
-
3
- from dataclasses import dataclass, field
4
- from pathlib import Path
5
- from typing import Dict, Sequence
6
-
7
- from loguru import logger
8
-
9
- from basic_memory.file_utils import compute_checksum
10
- from basic_memory.models import Entity
11
- from basic_memory.repository.entity_repository import EntityRepository
12
- from basic_memory.sync.utils import SyncReport
13
-
14
-
15
- @dataclass
16
- class FileState:
17
- """State of a file including file path, permalink and checksum info."""
18
-
19
- file_path: str
20
- permalink: str
21
- checksum: str
22
-
23
-
24
- @dataclass
25
- class ScanResult:
26
- """Result of scanning a directory."""
27
-
28
- # file_path -> checksum
29
- files: Dict[str, str] = field(default_factory=dict)
30
- # file_path -> error message
31
- errors: Dict[str, str] = field(default_factory=dict)
32
-
33
-
34
- class FileChangeScanner:
35
- """
36
- Service for detecting changes between filesystem and database.
37
- The filesystem is treated as the source of truth.
38
- """
39
-
40
- def __init__(self, entity_repository: EntityRepository):
41
- self.entity_repository = entity_repository
42
-
43
- async def scan_directory(self, directory: Path) -> ScanResult:
44
- """
45
- Scan directory for markdown files and their checksums.
46
- Only processes .md files, logs and skips others.
47
-
48
- Args:
49
- directory: Directory to scan
50
-
51
- Returns:
52
- ScanResult containing found files and any errors
53
- """
54
- logger.debug(f"Scanning directory: {directory}")
55
- result = ScanResult()
56
-
57
- if not directory.exists():
58
- logger.debug(f"Directory does not exist: {directory}")
59
- return result
60
-
61
- for path in directory.rglob("*"):
62
- if not path.is_file() or not path.name.endswith(".md"):
63
- if path.is_file():
64
- logger.debug(f"Skipping non-markdown file: {path}")
65
- continue
66
-
67
- try:
68
- # Get relative path first - used in error reporting if needed
69
- rel_path = str(path.relative_to(directory))
70
- content = path.read_text()
71
- checksum = await compute_checksum(content)
72
- result.files[rel_path] = checksum
73
-
74
- except Exception as e:
75
- rel_path = str(path.relative_to(directory))
76
- result.errors[rel_path] = str(e)
77
- logger.error(f"Failed to read {rel_path}: {e}")
78
-
79
- logger.debug(f"Found {len(result.files)} markdown files")
80
- if result.errors:
81
- logger.warning(f"Encountered {len(result.errors)} errors while scanning")
82
-
83
- return result
84
-
85
- async def find_changes(
86
- self, directory: Path, db_file_state: Dict[str, FileState]
87
- ) -> SyncReport:
88
- """Find changes between filesystem and database."""
89
- # Get current files and checksums
90
- scan_result = await self.scan_directory(directory)
91
- current_files = scan_result.files
92
-
93
- # Build report
94
- report = SyncReport(total=len(current_files))
95
-
96
- # Track potentially moved files by checksum
97
- files_by_checksum = {} # checksum -> file_path
98
-
99
- # First find potential new files and record checksums
100
- for file_path, checksum in current_files.items():
101
- logger.debug(f"{file_path} ({checksum[:8]})")
102
-
103
- if file_path not in db_file_state:
104
- # Could be new or could be the destination of a move
105
- report.new.add(file_path)
106
- files_by_checksum[checksum] = file_path
107
- elif checksum != db_file_state[file_path].checksum:
108
- report.modified.add(file_path)
109
-
110
- report.checksums[file_path] = checksum
111
-
112
- # Now detect moves and deletions
113
- for db_file_path, db_state in db_file_state.items():
114
- if db_file_path not in current_files:
115
- if db_state.checksum in files_by_checksum:
116
- # Found a move - file exists at new path with same checksum
117
- new_path = files_by_checksum[db_state.checksum]
118
- report.moves[db_file_path] = new_path
119
- # Remove from new files since it's a move
120
- report.new.remove(new_path)
121
- else:
122
- # Actually deleted
123
- report.deleted.add(db_file_path)
124
-
125
- # Log summary
126
- logger.debug(f"Total files: {report.total}")
127
- logger.debug(f"Changes found: {report.total_changes}")
128
- logger.debug(f" New: {len(report.new)}")
129
- logger.debug(f" Modified: {len(report.modified)}")
130
- logger.debug(f" Moved: {len(report.moves)}")
131
- logger.debug(f" Deleted: {len(report.deleted)}")
132
-
133
- if scan_result.errors: # pragma: no cover
134
- logger.warning("Files skipped due to errors:")
135
- for file_path, error in scan_result.errors.items():
136
- logger.warning(f" {file_path}: {error}")
137
-
138
- return report
139
-
140
- async def get_db_file_state(self, db_records: Sequence[Entity]) -> Dict[str, FileState]:
141
- """Get file_path and checksums from database.
142
- Args:
143
- db_records: database records
144
- Returns:
145
- Dict mapping file paths to FileState
146
- :param db_records: the data from the db
147
- """
148
- return {
149
- r.file_path: FileState(
150
- file_path=r.file_path, permalink=r.permalink, checksum=r.checksum or ""
151
- )
152
- for r in db_records
153
- }
154
-
155
- async def find_knowledge_changes(self, directory: Path) -> SyncReport:
156
- """Find changes in knowledge directory."""
157
- db_file_state = await self.get_db_file_state(await self.entity_repository.find_all())
158
- return await self.find_changes(directory=directory, db_file_state=db_file_state)