basic-memory 0.8.0__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (75) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/migrations.py +4 -9
  3. basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py +106 -0
  4. basic_memory/api/app.py +9 -6
  5. basic_memory/api/routers/__init__.py +2 -1
  6. basic_memory/api/routers/knowledge_router.py +30 -4
  7. basic_memory/api/routers/memory_router.py +3 -2
  8. basic_memory/api/routers/project_info_router.py +275 -0
  9. basic_memory/api/routers/search_router.py +22 -4
  10. basic_memory/cli/app.py +54 -3
  11. basic_memory/cli/commands/__init__.py +15 -2
  12. basic_memory/cli/commands/db.py +9 -13
  13. basic_memory/cli/commands/import_chatgpt.py +26 -30
  14. basic_memory/cli/commands/import_claude_conversations.py +27 -29
  15. basic_memory/cli/commands/import_claude_projects.py +29 -31
  16. basic_memory/cli/commands/import_memory_json.py +26 -28
  17. basic_memory/cli/commands/mcp.py +7 -1
  18. basic_memory/cli/commands/project.py +119 -0
  19. basic_memory/cli/commands/project_info.py +167 -0
  20. basic_memory/cli/commands/status.py +7 -9
  21. basic_memory/cli/commands/sync.py +54 -9
  22. basic_memory/cli/commands/{tools.py → tool.py} +92 -19
  23. basic_memory/cli/main.py +40 -1
  24. basic_memory/config.py +155 -7
  25. basic_memory/db.py +19 -4
  26. basic_memory/deps.py +10 -3
  27. basic_memory/file_utils.py +32 -16
  28. basic_memory/markdown/utils.py +5 -0
  29. basic_memory/mcp/main.py +1 -2
  30. basic_memory/mcp/prompts/__init__.py +6 -2
  31. basic_memory/mcp/prompts/ai_assistant_guide.py +6 -8
  32. basic_memory/mcp/prompts/continue_conversation.py +65 -126
  33. basic_memory/mcp/prompts/recent_activity.py +55 -13
  34. basic_memory/mcp/prompts/search.py +72 -17
  35. basic_memory/mcp/prompts/utils.py +139 -82
  36. basic_memory/mcp/server.py +1 -1
  37. basic_memory/mcp/tools/__init__.py +11 -22
  38. basic_memory/mcp/tools/build_context.py +85 -0
  39. basic_memory/mcp/tools/canvas.py +17 -19
  40. basic_memory/mcp/tools/delete_note.py +28 -0
  41. basic_memory/mcp/tools/project_info.py +51 -0
  42. basic_memory/mcp/tools/{resource.py → read_content.py} +42 -5
  43. basic_memory/mcp/tools/read_note.py +190 -0
  44. basic_memory/mcp/tools/recent_activity.py +100 -0
  45. basic_memory/mcp/tools/search.py +56 -17
  46. basic_memory/mcp/tools/utils.py +245 -17
  47. basic_memory/mcp/tools/write_note.py +124 -0
  48. basic_memory/models/search.py +2 -1
  49. basic_memory/repository/entity_repository.py +3 -2
  50. basic_memory/repository/project_info_repository.py +9 -0
  51. basic_memory/repository/repository.py +23 -6
  52. basic_memory/repository/search_repository.py +33 -10
  53. basic_memory/schemas/__init__.py +12 -0
  54. basic_memory/schemas/memory.py +3 -2
  55. basic_memory/schemas/project_info.py +96 -0
  56. basic_memory/schemas/search.py +27 -32
  57. basic_memory/services/context_service.py +3 -3
  58. basic_memory/services/entity_service.py +8 -2
  59. basic_memory/services/file_service.py +105 -53
  60. basic_memory/services/link_resolver.py +5 -45
  61. basic_memory/services/search_service.py +45 -16
  62. basic_memory/sync/sync_service.py +274 -39
  63. basic_memory/sync/watch_service.py +160 -30
  64. basic_memory/utils.py +40 -40
  65. basic_memory-0.9.0.dist-info/METADATA +736 -0
  66. basic_memory-0.9.0.dist-info/RECORD +99 -0
  67. basic_memory/mcp/prompts/json_canvas_spec.py +0 -25
  68. basic_memory/mcp/tools/knowledge.py +0 -68
  69. basic_memory/mcp/tools/memory.py +0 -177
  70. basic_memory/mcp/tools/notes.py +0 -201
  71. basic_memory-0.8.0.dist-info/METADATA +0 -379
  72. basic_memory-0.8.0.dist-info/RECORD +0 -91
  73. {basic_memory-0.8.0.dist-info → basic_memory-0.9.0.dist-info}/WHEEL +0 -0
  74. {basic_memory-0.8.0.dist-info → basic_memory-0.9.0.dist-info}/entry_points.txt +0 -0
  75. {basic_memory-0.8.0.dist-info → basic_memory-0.9.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,6 +1,5 @@
1
1
  """Watch service for Basic Memory."""
2
2
 
3
- import dataclasses
4
3
  import os
5
4
  from datetime import datetime
6
5
  from pathlib import Path
@@ -16,6 +15,8 @@ from basic_memory.config import ProjectConfig
16
15
  from basic_memory.services.file_service import FileService
17
16
  from basic_memory.sync.sync_service import SyncService
18
17
 
18
+ WATCH_STATUS_JSON = "watch-status.json"
19
+
19
20
 
20
21
  class WatchEvent(BaseModel):
21
22
  timestamp: datetime
@@ -29,8 +30,8 @@ class WatchEvent(BaseModel):
29
30
  class WatchServiceState(BaseModel):
30
31
  # Service status
31
32
  running: bool = False
32
- start_time: datetime = dataclasses.field(default_factory=datetime.now)
33
- pid: int = dataclasses.field(default_factory=os.getpid)
33
+ start_time: datetime = datetime.now() # Use directly with Pydantic model
34
+ pid: int = os.getpid() # Use directly with Pydantic model
34
35
 
35
36
  # Stats
36
37
  error_count: int = 0
@@ -41,7 +42,7 @@ class WatchServiceState(BaseModel):
41
42
  synced_files: int = 0
42
43
 
43
44
  # Recent activity
44
- recent_events: List[WatchEvent] = dataclasses.field(default_factory=list)
45
+ recent_events: List[WatchEvent] = [] # Use directly with Pydantic model
45
46
 
46
47
  def add_event(
47
48
  self,
@@ -75,16 +76,23 @@ class WatchService:
75
76
  self.file_service = file_service
76
77
  self.config = config
77
78
  self.state = WatchServiceState()
78
- self.status_path = config.home / ".basic-memory" / "watch-status.json"
79
+ self.status_path = config.home / ".basic-memory" / WATCH_STATUS_JSON
79
80
  self.status_path.parent.mkdir(parents=True, exist_ok=True)
80
81
  self.console = Console()
81
82
 
82
83
  async def run(self): # pragma: no cover
83
84
  """Watch for file changes and sync them"""
84
- logger.info("Watching for sync changes")
85
+ logger.info(
86
+ "Watch service started",
87
+ directory=str(self.config.home),
88
+ debounce_ms=self.config.sync_delay,
89
+ pid=os.getpid(),
90
+ )
91
+
85
92
  self.state.running = True
86
93
  self.state.start_time = datetime.now()
87
94
  await self.write_status()
95
+
88
96
  try:
89
97
  async for changes in awatch(
90
98
  self.config.home,
@@ -95,14 +103,23 @@ class WatchService:
95
103
  await self.handle_changes(self.config.home, changes)
96
104
 
97
105
  except Exception as e:
106
+ logger.exception("Watch service error", error=str(e), directory=str(self.config.home))
107
+
98
108
  self.state.record_error(str(e))
99
109
  await self.write_status()
100
110
  raise
111
+
101
112
  finally:
113
+ logger.info(
114
+ "Watch service stopped",
115
+ directory=str(self.config.home),
116
+ runtime_seconds=int((datetime.now() - self.state.start_time).total_seconds()),
117
+ )
118
+
102
119
  self.state.running = False
103
120
  await self.write_status()
104
121
 
105
- def filter_changes(self, change: Change, path: str) -> bool:
122
+ def filter_changes(self, change: Change, path: str) -> bool: # pragma: no cover
106
123
  """Filter to only watch non-hidden files and directories.
107
124
 
108
125
  Returns:
@@ -112,6 +129,7 @@ class WatchService:
112
129
  try:
113
130
  relative_path = Path(path).relative_to(self.config.home)
114
131
  except ValueError:
132
+ # This is a defensive check for paths outside our home directory
115
133
  return False
116
134
 
117
135
  # Skip hidden directories and files
@@ -128,12 +146,17 @@ class WatchService:
128
146
 
129
147
  async def handle_changes(self, directory: Path, changes: Set[FileChange]):
130
148
  """Process a batch of file changes"""
131
- logger.debug(f"handling {len(changes)} changes in directory: {directory} ...")
149
+ import time
150
+ from typing import List, Set
151
+
152
+ start_time = time.time()
153
+
154
+ logger.info("Processing file changes", change_count=len(changes), directory=str(directory))
132
155
 
133
156
  # Group changes by type
134
- adds = []
135
- deletes = []
136
- modifies = []
157
+ adds: List[str] = []
158
+ deletes: List[str] = []
159
+ modifies: List[str] = []
137
160
 
138
161
  for change, path in changes:
139
162
  # convert to relative path
@@ -145,25 +168,44 @@ class WatchService:
145
168
  elif change == Change.modified:
146
169
  modifies.append(relative_path)
147
170
 
171
+ logger.debug(
172
+ "Grouped file changes", added=len(adds), deleted=len(deletes), modified=len(modifies)
173
+ )
174
+
148
175
  # Track processed files to avoid duplicates
149
- processed = set()
176
+ processed: Set[str] = set()
150
177
 
151
178
  # First handle potential moves
152
179
  for added_path in adds:
153
180
  if added_path in processed:
154
181
  continue # pragma: no cover
155
182
 
183
+ # Skip directories for added paths
184
+ # We don't need to process directories, only the files inside them
185
+ # This prevents errors when trying to compute checksums or read directories as files
186
+ added_full_path = directory / added_path
187
+ if added_full_path.is_dir():
188
+ logger.debug("Skipping directory for move detection", path=added_path)
189
+ processed.add(added_path)
190
+ continue
191
+
156
192
  for deleted_path in deletes:
157
193
  if deleted_path in processed:
158
194
  continue # pragma: no cover
159
195
 
196
+ # Skip directories for deleted paths (based on entity type in db)
197
+ deleted_entity = await self.sync_service.entity_repository.get_by_file_path(
198
+ deleted_path
199
+ )
200
+ if deleted_entity is None:
201
+ # If this was a directory, it wouldn't have an entity
202
+ logger.debug("Skipping unknown path for move detection", path=deleted_path)
203
+ continue
204
+
160
205
  if added_path != deleted_path:
161
206
  # Compare checksums to detect moves
162
207
  try:
163
208
  added_checksum = await self.file_service.compute_checksum(added_path)
164
- deleted_entity = await self.sync_service.entity_repository.get_by_file_path(
165
- deleted_path
166
- )
167
209
 
168
210
  if deleted_entity and deleted_entity.checksum == added_checksum:
169
211
  await self.sync_service.handle_move(deleted_path, added_path)
@@ -172,43 +214,131 @@ class WatchService:
172
214
  action="moved",
173
215
  status="success",
174
216
  )
175
- self.console.print(
176
- f"[blue]→[/blue] Moved: {deleted_path} → {added_path}"
177
- )
217
+ self.console.print(f"[blue]→[/blue] {deleted_path} → {added_path}")
178
218
  processed.add(added_path)
179
219
  processed.add(deleted_path)
180
220
  break
181
221
  except Exception as e: # pragma: no cover
182
- logger.warning(f"Error checking for move: {e}")
222
+ logger.warning(
223
+ "Error checking for move",
224
+ old_path=deleted_path,
225
+ new_path=added_path,
226
+ error=str(e),
227
+ )
228
+
229
+ # Handle remaining changes - group them by type for concise output
230
+ moved_count = len([p for p in processed if p in deletes or p in adds])
231
+ delete_count = 0
232
+ add_count = 0
233
+ modify_count = 0
183
234
 
184
- # Handle remaining changes
235
+ # Process deletes
185
236
  for path in deletes:
186
237
  if path not in processed:
238
+ logger.debug("Processing deleted file", path=path)
187
239
  await self.sync_service.handle_delete(path)
188
240
  self.state.add_event(path=path, action="deleted", status="success")
189
- self.console.print(f"[red]✕[/red] Deleted: {path}")
241
+ self.console.print(f"[red]✕[/red] {path}")
190
242
  processed.add(path)
243
+ delete_count += 1
191
244
 
245
+ # Process adds
192
246
  for path in adds:
193
247
  if path not in processed:
194
- _, checksum = await self.sync_service.sync_file(path, new=True)
195
- self.state.add_event(path=path, action="new", status="success", checksum=checksum)
196
- self.console.print(f"[green]✓[/green] Added: {path}")
197
- processed.add(path)
248
+ # Skip directories - only process files
249
+ full_path = directory / path
250
+ if full_path.is_dir(): # pragma: no cover
251
+ logger.debug("Skipping directory", path=path)
252
+ processed.add(path)
253
+ continue
254
+
255
+ logger.debug("Processing new file", path=path)
256
+ entity, checksum = await self.sync_service.sync_file(path, new=True)
257
+ if checksum:
258
+ self.state.add_event(
259
+ path=path, action="new", status="success", checksum=checksum
260
+ )
261
+ self.console.print(f"[green]✓[/green] {path}")
262
+ logger.debug(
263
+ "Added file processed",
264
+ path=path,
265
+ entity_id=entity.id if entity else None,
266
+ checksum=checksum,
267
+ )
268
+ processed.add(path)
269
+ add_count += 1
270
+ else: # pragma: no cover
271
+ logger.warning("Error syncing new file", path=path) # pragma: no cover
272
+ self.console.print(
273
+ f"[orange]?[/orange] Error syncing: {path}"
274
+ ) # pragma: no cover
275
+
276
+ # Process modifies - detect repeats
277
+ last_modified_path = None
278
+ repeat_count = 0
198
279
 
199
280
  for path in modifies:
200
281
  if path not in processed:
201
- _, checksum = await self.sync_service.sync_file(path, new=False)
282
+ # Skip directories - only process files
283
+ full_path = directory / path
284
+ if full_path.is_dir():
285
+ logger.debug("Skipping directory", path=path)
286
+ processed.add(path)
287
+ continue
288
+
289
+ logger.debug("Processing modified file", path=path)
290
+ entity, checksum = await self.sync_service.sync_file(path, new=False)
202
291
  self.state.add_event(
203
292
  path=path, action="modified", status="success", checksum=checksum
204
293
  )
205
- self.console.print(f"[yellow]✎[/yellow] Modified: {path}")
294
+
295
+ # Check if this is a repeat of the last modified file
296
+ if path == last_modified_path: # pragma: no cover
297
+ repeat_count += 1 # pragma: no cover
298
+ # Only show a message for the first repeat
299
+ if repeat_count == 1: # pragma: no cover
300
+ self.console.print(
301
+ f"[yellow]...[/yellow] Repeated changes to {path}"
302
+ ) # pragma: no cover
303
+ else:
304
+ # New file being modified
305
+ self.console.print(f"[yellow]✎[/yellow] {path}")
306
+ last_modified_path = path
307
+ repeat_count = 0
308
+ modify_count += 1
309
+
310
+ logger.debug(
311
+ "Modified file processed",
312
+ path=path,
313
+ entity_id=entity.id if entity else None,
314
+ checksum=checksum,
315
+ )
206
316
  processed.add(path)
207
317
 
208
- # Add a divider if we processed any files
318
+ # Add a concise summary instead of a divider
209
319
  if processed:
210
- self.console.print("─" * 50, style="dim")
211
-
320
+ changes = [] # pyright: ignore
321
+ if add_count > 0:
322
+ changes.append(f"[green]{add_count} added[/green]") # pyright: ignore
323
+ if modify_count > 0:
324
+ changes.append(f"[yellow]{modify_count} modified[/yellow]") # pyright: ignore
325
+ if moved_count > 0:
326
+ changes.append(f"[blue]{moved_count} moved[/blue]") # pyright: ignore
327
+ if delete_count > 0:
328
+ changes.append(f"[red]{delete_count} deleted[/red]") # pyright: ignore
329
+
330
+ if changes:
331
+ self.console.print(f"{', '.join(changes)}", style="dim") # pyright: ignore
332
+
333
+ duration_ms = int((time.time() - start_time) * 1000)
212
334
  self.state.last_scan = datetime.now()
213
335
  self.state.synced_files += len(processed)
336
+
337
+ logger.info(
338
+ "File change processing completed",
339
+ processed_files=len(processed),
340
+ total_synced_files=self.state.synced_files,
341
+ duration_ms=duration_ms,
342
+ )
343
+
214
344
  await self.write_status()
basic_memory/utils.py CHANGED
@@ -1,25 +1,37 @@
1
1
  """Utility functions for basic-memory."""
2
2
 
3
- import logging
4
3
  import os
4
+
5
+ import logging
5
6
  import re
6
7
  import sys
7
8
  from pathlib import Path
8
- from typing import Optional, Union
9
+ from typing import Optional, Protocol, Union, runtime_checkable
9
10
 
10
11
  from loguru import logger
11
12
  from unidecode import unidecode
12
13
 
13
- import basic_memory
14
14
 
15
- import logfire
15
+ @runtime_checkable
16
+ class PathLike(Protocol):
17
+ """Protocol for objects that can be used as paths."""
18
+
19
+ def __str__(self) -> str: ...
16
20
 
17
21
 
18
- def generate_permalink(file_path: Union[Path, str]) -> str:
22
+ # In type annotations, use Union[Path, str] instead of FilePath for now
23
+ # This preserves compatibility with existing code while we migrate
24
+ FilePath = Union[Path, str]
25
+
26
+ # Disable the "Queue is full" warning
27
+ logging.getLogger("opentelemetry.sdk.metrics._internal.instrument").setLevel(logging.ERROR)
28
+
29
+
30
+ def generate_permalink(file_path: Union[Path, str, PathLike]) -> str:
19
31
  """Generate a stable permalink from a file path.
20
32
 
21
33
  Args:
22
- file_path: Original file path
34
+ file_path: Original file path (str, Path, or PathLike)
23
35
 
24
36
  Returns:
25
37
  Normalized permalink that matches validation rules. Converts spaces and underscores
@@ -73,38 +85,25 @@ def setup_logging(
73
85
  ) -> None: # pragma: no cover
74
86
  """
75
87
  Configure logging for the application.
76
- :param home_dir: the root directory for the application
77
- :param log_file: the name of the log file to write to
78
- :param app: the fastapi application instance
79
- :param console: whether to log to the console
80
- """
81
88
 
89
+ Args:
90
+ env: The environment name (dev, test, prod)
91
+ home_dir: The root directory for the application
92
+ log_file: The name of the log file to write to
93
+ log_level: The logging level to use
94
+ console: Whether to log to the console
95
+ """
82
96
  # Remove default handler and any existing handlers
83
97
  logger.remove()
84
98
 
85
- # Add file handler if we are not running tests
99
+ # Add file handler if we are not running tests and a log file is specified
86
100
  if log_file and env != "test":
87
- # enable pydantic logfire
88
- logfire.configure(
89
- code_source=logfire.CodeSource(
90
- repository="https://github.com/basicmachines-co/basic-memory",
91
- revision=basic_memory.__version__,
92
- ),
93
- environment=env,
94
- console=False,
95
- )
96
- logger.configure(handlers=[logfire.loguru_handler()])
97
-
98
- # instrument code spans
99
- logfire.instrument_sqlite3()
100
- logfire.instrument_httpx()
101
-
102
- # setup logger
101
+ # Setup file logger
103
102
  log_path = home_dir / log_file
104
103
  logger.add(
105
104
  str(log_path),
106
105
  level=log_level,
107
- rotation="100 MB",
106
+ rotation="10 MB",
108
107
  retention="10 days",
109
108
  backtrace=True,
110
109
  diagnose=True,
@@ -112,19 +111,20 @@ def setup_logging(
112
111
  colorize=False,
113
112
  )
114
113
 
114
+ # Add console logger if requested or in test mode
115
115
  if env == "test" or console:
116
- # Add stderr handler
117
116
  logger.add(sys.stderr, level=log_level, backtrace=True, diagnose=True, colorize=True)
118
117
 
119
118
  logger.info(f"ENV: '{env}' Log level: '{log_level}' Logging to {log_file}")
120
119
 
121
- # Get the logger for 'httpx'
122
- httpx_logger = logging.getLogger("httpx")
123
- # Set the logging level to WARNING to ignore INFO and DEBUG logs
124
- httpx_logger.setLevel(logging.WARNING)
125
-
126
- # turn watchfiles to WARNING
127
- logging.getLogger("watchfiles.main").setLevel(logging.WARNING)
128
-
129
- # disable open telemetry warning
130
- logging.getLogger("instrumentor").setLevel(logging.ERROR)
120
+ # Reduce noise from third-party libraries
121
+ noisy_loggers = {
122
+ # HTTP client logs
123
+ "httpx": logging.WARNING,
124
+ # File watching logs
125
+ "watchfiles.main": logging.WARNING,
126
+ }
127
+
128
+ # Set log levels for noisy loggers
129
+ for logger_name, level in noisy_loggers.items():
130
+ logging.getLogger(logger_name).setLevel(level)