basic-memory 0.7.0__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (89) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/alembic.ini +119 -0
  3. basic_memory/alembic/env.py +23 -1
  4. basic_memory/alembic/migrations.py +4 -9
  5. basic_memory/alembic/versions/502b60eaa905_remove_required_from_entity_permalink.py +51 -0
  6. basic_memory/alembic/versions/b3c3938bacdb_relation_to_name_unique_index.py +44 -0
  7. basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py +106 -0
  8. basic_memory/api/app.py +9 -10
  9. basic_memory/api/routers/__init__.py +2 -1
  10. basic_memory/api/routers/knowledge_router.py +31 -5
  11. basic_memory/api/routers/memory_router.py +18 -17
  12. basic_memory/api/routers/project_info_router.py +275 -0
  13. basic_memory/api/routers/resource_router.py +105 -4
  14. basic_memory/api/routers/search_router.py +22 -4
  15. basic_memory/cli/app.py +54 -5
  16. basic_memory/cli/commands/__init__.py +15 -2
  17. basic_memory/cli/commands/db.py +9 -13
  18. basic_memory/cli/commands/import_chatgpt.py +26 -30
  19. basic_memory/cli/commands/import_claude_conversations.py +27 -29
  20. basic_memory/cli/commands/import_claude_projects.py +29 -31
  21. basic_memory/cli/commands/import_memory_json.py +26 -28
  22. basic_memory/cli/commands/mcp.py +7 -1
  23. basic_memory/cli/commands/project.py +119 -0
  24. basic_memory/cli/commands/project_info.py +167 -0
  25. basic_memory/cli/commands/status.py +14 -28
  26. basic_memory/cli/commands/sync.py +63 -22
  27. basic_memory/cli/commands/tool.py +253 -0
  28. basic_memory/cli/main.py +39 -1
  29. basic_memory/config.py +166 -4
  30. basic_memory/db.py +19 -4
  31. basic_memory/deps.py +10 -3
  32. basic_memory/file_utils.py +37 -19
  33. basic_memory/markdown/entity_parser.py +3 -3
  34. basic_memory/markdown/utils.py +5 -0
  35. basic_memory/mcp/async_client.py +1 -1
  36. basic_memory/mcp/main.py +24 -0
  37. basic_memory/mcp/prompts/__init__.py +19 -0
  38. basic_memory/mcp/prompts/ai_assistant_guide.py +26 -0
  39. basic_memory/mcp/prompts/continue_conversation.py +111 -0
  40. basic_memory/mcp/prompts/recent_activity.py +88 -0
  41. basic_memory/mcp/prompts/search.py +182 -0
  42. basic_memory/mcp/prompts/utils.py +155 -0
  43. basic_memory/mcp/server.py +2 -6
  44. basic_memory/mcp/tools/__init__.py +12 -21
  45. basic_memory/mcp/tools/build_context.py +85 -0
  46. basic_memory/mcp/tools/canvas.py +97 -0
  47. basic_memory/mcp/tools/delete_note.py +28 -0
  48. basic_memory/mcp/tools/project_info.py +51 -0
  49. basic_memory/mcp/tools/read_content.py +229 -0
  50. basic_memory/mcp/tools/read_note.py +190 -0
  51. basic_memory/mcp/tools/recent_activity.py +100 -0
  52. basic_memory/mcp/tools/search.py +56 -17
  53. basic_memory/mcp/tools/utils.py +245 -16
  54. basic_memory/mcp/tools/write_note.py +124 -0
  55. basic_memory/models/knowledge.py +27 -11
  56. basic_memory/models/search.py +2 -1
  57. basic_memory/repository/entity_repository.py +3 -2
  58. basic_memory/repository/project_info_repository.py +9 -0
  59. basic_memory/repository/repository.py +24 -7
  60. basic_memory/repository/search_repository.py +47 -14
  61. basic_memory/schemas/__init__.py +10 -9
  62. basic_memory/schemas/base.py +4 -1
  63. basic_memory/schemas/memory.py +14 -4
  64. basic_memory/schemas/project_info.py +96 -0
  65. basic_memory/schemas/search.py +29 -33
  66. basic_memory/services/context_service.py +3 -3
  67. basic_memory/services/entity_service.py +26 -13
  68. basic_memory/services/file_service.py +145 -26
  69. basic_memory/services/link_resolver.py +9 -46
  70. basic_memory/services/search_service.py +95 -22
  71. basic_memory/sync/__init__.py +3 -2
  72. basic_memory/sync/sync_service.py +523 -117
  73. basic_memory/sync/watch_service.py +258 -132
  74. basic_memory/utils.py +51 -36
  75. basic_memory-0.9.0.dist-info/METADATA +736 -0
  76. basic_memory-0.9.0.dist-info/RECORD +99 -0
  77. basic_memory/alembic/README +0 -1
  78. basic_memory/cli/commands/tools.py +0 -157
  79. basic_memory/mcp/tools/knowledge.py +0 -68
  80. basic_memory/mcp/tools/memory.py +0 -170
  81. basic_memory/mcp/tools/notes.py +0 -202
  82. basic_memory/schemas/discovery.py +0 -28
  83. basic_memory/sync/file_change_scanner.py +0 -158
  84. basic_memory/sync/utils.py +0 -31
  85. basic_memory-0.7.0.dist-info/METADATA +0 -378
  86. basic_memory-0.7.0.dist-info/RECORD +0 -82
  87. {basic_memory-0.7.0.dist-info → basic_memory-0.9.0.dist-info}/WHEEL +0 -0
  88. {basic_memory-0.7.0.dist-info → basic_memory-0.9.0.dist-info}/entry_points.txt +0 -0
  89. {basic_memory-0.7.0.dist-info → basic_memory-0.9.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,22 +1,21 @@
1
1
  """Watch service for Basic Memory."""
2
2
 
3
- import dataclasses
4
-
5
- from loguru import logger
6
- from pydantic import BaseModel
3
+ import os
7
4
  from datetime import datetime
8
5
  from pathlib import Path
9
- from typing import List, Optional
6
+ from typing import List, Optional, Set
10
7
 
8
+ from loguru import logger
9
+ from pydantic import BaseModel
11
10
  from rich.console import Console
12
- from rich.live import Live
13
- from rich.table import Table
14
- from watchfiles import awatch, Change
15
- import os
11
+ from watchfiles import awatch
12
+ from watchfiles.main import FileChange, Change
16
13
 
17
14
  from basic_memory.config import ProjectConfig
18
- from basic_memory.sync.sync_service import SyncService
19
15
  from basic_memory.services.file_service import FileService
16
+ from basic_memory.sync.sync_service import SyncService
17
+
18
+ WATCH_STATUS_JSON = "watch-status.json"
20
19
 
21
20
 
22
21
  class WatchEvent(BaseModel):
@@ -31,8 +30,8 @@ class WatchEvent(BaseModel):
31
30
  class WatchServiceState(BaseModel):
32
31
  # Service status
33
32
  running: bool = False
34
- start_time: datetime = dataclasses.field(default_factory=datetime.now)
35
- pid: int = dataclasses.field(default_factory=os.getpid)
33
+ start_time: datetime = datetime.now() # Use directly with Pydantic model
34
+ pid: int = os.getpid() # Use directly with Pydantic model
36
35
 
37
36
  # Stats
38
37
  error_count: int = 0
@@ -43,7 +42,7 @@ class WatchServiceState(BaseModel):
43
42
  synced_files: int = 0
44
43
 
45
44
  # Recent activity
46
- recent_events: List[WatchEvent] = dataclasses.field(default_factory=list)
45
+ recent_events: List[WatchEvent] = [] # Use directly with Pydantic model
47
46
 
48
47
  def add_event(
49
48
  self,
@@ -77,142 +76,269 @@ class WatchService:
77
76
  self.file_service = file_service
78
77
  self.config = config
79
78
  self.state = WatchServiceState()
80
- self.status_path = config.home / ".basic-memory" / "watch-status.json"
79
+ self.status_path = config.home / ".basic-memory" / WATCH_STATUS_JSON
81
80
  self.status_path.parent.mkdir(parents=True, exist_ok=True)
82
81
  self.console = Console()
83
82
 
84
- def generate_table(self) -> Table:
85
- """Generate status display table"""
86
- table = Table()
87
-
88
- # Add status row
89
- table.add_column("Status", style="cyan")
90
- table.add_column("Last Scan", style="cyan")
91
- table.add_column("Files", style="cyan")
92
- table.add_column("Errors", style="red")
93
-
94
- # Add main status row
95
- table.add_row(
96
- "✓ Running" if self.state.running else "✗ Stopped",
97
- self.state.last_scan.strftime("%H:%M:%S") if self.state.last_scan else "-",
98
- str(self.state.synced_files),
99
- f"{self.state.error_count} ({self.state.last_error.strftime('%H:%M:%S') if self.state.last_error else 'none'})",
83
+ async def run(self): # pragma: no cover
84
+ """Watch for file changes and sync them"""
85
+ logger.info(
86
+ "Watch service started",
87
+ directory=str(self.config.home),
88
+ debounce_ms=self.config.sync_delay,
89
+ pid=os.getpid(),
100
90
  )
101
91
 
102
- if self.state.recent_events:
103
- # Add recent events
104
- table.add_section()
105
- table.add_row("Recent Events", "", "", "")
106
-
107
- for event in self.state.recent_events[:5]: # Show last 5 events
108
- color = {
109
- "new": "green",
110
- "modified": "yellow",
111
- "moved": "blue",
112
- "deleted": "red",
113
- "error": "red",
114
- }.get(event.action, "white")
115
-
116
- icon = {
117
- "new": "✚",
118
- "modified": "✎",
119
- "moved": "→",
120
- "deleted": "✖",
121
- "error": "!",
122
- }.get(event.action, "*")
123
-
124
- table.add_row(
125
- f"[{color}]{icon} {event.action}[/{color}]",
126
- event.timestamp.strftime("%H:%M:%S"),
127
- f"[{color}]{event.path}[/{color}]",
128
- f"[dim]{event.checksum[:8] if event.checksum else ''}[/dim]",
129
- )
130
-
131
- return table
132
-
133
- async def run(self, console_status: bool = False): # pragma: no cover
134
- """Watch for file changes and sync them"""
135
- logger.info("Watching for sync changes")
136
92
  self.state.running = True
137
93
  self.state.start_time = datetime.now()
138
94
  await self.write_status()
139
95
 
140
- if console_status:
141
- with Live(self.generate_table(), refresh_per_second=4, console=self.console) as live:
142
- try:
143
- async for changes in awatch(
144
- self.config.home,
145
- watch_filter=self.filter_changes,
146
- debounce=self.config.sync_delay,
147
- recursive=True,
148
- ):
149
- # Process changes
150
- await self.handle_changes(self.config.home)
151
- # Update display
152
- live.update(self.generate_table())
153
-
154
- except Exception as e:
155
- self.state.record_error(str(e))
156
- await self.write_status()
157
- raise
158
- finally:
159
- self.state.running = False
160
- await self.write_status()
161
-
162
- else:
163
- try:
164
- async for changes in awatch(
165
- self.config.home,
166
- watch_filter=self.filter_changes,
167
- debounce=self.config.sync_delay,
168
- recursive=True,
169
- ):
170
- # Process changes
171
- await self.handle_changes(self.config.home)
172
- # Update display
173
-
174
- except Exception as e:
175
- self.state.record_error(str(e))
176
- await self.write_status()
177
- raise
178
- finally:
179
- self.state.running = False
180
- await self.write_status()
96
+ try:
97
+ async for changes in awatch(
98
+ self.config.home,
99
+ debounce=self.config.sync_delay,
100
+ watch_filter=self.filter_changes,
101
+ recursive=True,
102
+ ):
103
+ await self.handle_changes(self.config.home, changes)
104
+
105
+ except Exception as e:
106
+ logger.exception("Watch service error", error=str(e), directory=str(self.config.home))
107
+
108
+ self.state.record_error(str(e))
109
+ await self.write_status()
110
+ raise
111
+
112
+ finally:
113
+ logger.info(
114
+ "Watch service stopped",
115
+ directory=str(self.config.home),
116
+ runtime_seconds=int((datetime.now() - self.state.start_time).total_seconds()),
117
+ )
118
+
119
+ self.state.running = False
120
+ await self.write_status()
121
+
122
+ def filter_changes(self, change: Change, path: str) -> bool: # pragma: no cover
123
+ """Filter to only watch non-hidden files and directories.
124
+
125
+ Returns:
126
+ True if the file should be watched, False if it should be ignored
127
+ """
128
+ # Skip if path is invalid
129
+ try:
130
+ relative_path = Path(path).relative_to(self.config.home)
131
+ except ValueError:
132
+ # This is a defensive check for paths outside our home directory
133
+ return False
134
+
135
+ # Skip hidden directories and files
136
+ path_parts = relative_path.parts
137
+ for part in path_parts:
138
+ if part.startswith("."):
139
+ return False
140
+
141
+ return True
181
142
 
182
143
  async def write_status(self):
183
144
  """Write current state to status file"""
184
145
  self.status_path.write_text(WatchServiceState.model_dump_json(self.state, indent=2))
185
146
 
186
- def filter_changes(self, change: Change, path: str) -> bool:
187
- """Filter to only watch markdown files"""
188
- return path.endswith(".md") and not Path(path).name.startswith(".")
189
-
190
- async def handle_changes(self, directory: Path):
147
+ async def handle_changes(self, directory: Path, changes: Set[FileChange]):
191
148
  """Process a batch of file changes"""
149
+ import time
150
+ from typing import List, Set
151
+
152
+ start_time = time.time()
153
+
154
+ logger.info("Processing file changes", change_count=len(changes), directory=str(directory))
155
+
156
+ # Group changes by type
157
+ adds: List[str] = []
158
+ deletes: List[str] = []
159
+ modifies: List[str] = []
160
+
161
+ for change, path in changes:
162
+ # convert to relative path
163
+ relative_path = str(Path(path).relative_to(directory))
164
+ if change == Change.added:
165
+ adds.append(relative_path)
166
+ elif change == Change.deleted:
167
+ deletes.append(relative_path)
168
+ elif change == Change.modified:
169
+ modifies.append(relative_path)
170
+
171
+ logger.debug(
172
+ "Grouped file changes", added=len(adds), deleted=len(deletes), modified=len(modifies)
173
+ )
192
174
 
193
- logger.debug(f"handling change in directory: {directory} ...")
194
- # Process changes with timeout
195
- report = await self.sync_service.sync(directory)
175
+ # Track processed files to avoid duplicates
176
+ processed: Set[str] = set()
177
+
178
+ # First handle potential moves
179
+ for added_path in adds:
180
+ if added_path in processed:
181
+ continue # pragma: no cover
182
+
183
+ # Skip directories for added paths
184
+ # We don't need to process directories, only the files inside them
185
+ # This prevents errors when trying to compute checksums or read directories as files
186
+ added_full_path = directory / added_path
187
+ if added_full_path.is_dir():
188
+ logger.debug("Skipping directory for move detection", path=added_path)
189
+ processed.add(added_path)
190
+ continue
191
+
192
+ for deleted_path in deletes:
193
+ if deleted_path in processed:
194
+ continue # pragma: no cover
195
+
196
+ # Skip directories for deleted paths (based on entity type in db)
197
+ deleted_entity = await self.sync_service.entity_repository.get_by_file_path(
198
+ deleted_path
199
+ )
200
+ if deleted_entity is None:
201
+ # If this was a directory, it wouldn't have an entity
202
+ logger.debug("Skipping unknown path for move detection", path=deleted_path)
203
+ continue
204
+
205
+ if added_path != deleted_path:
206
+ # Compare checksums to detect moves
207
+ try:
208
+ added_checksum = await self.file_service.compute_checksum(added_path)
209
+
210
+ if deleted_entity and deleted_entity.checksum == added_checksum:
211
+ await self.sync_service.handle_move(deleted_path, added_path)
212
+ self.state.add_event(
213
+ path=f"{deleted_path} -> {added_path}",
214
+ action="moved",
215
+ status="success",
216
+ )
217
+ self.console.print(f"[blue]→[/blue] {deleted_path} → {added_path}")
218
+ processed.add(added_path)
219
+ processed.add(deleted_path)
220
+ break
221
+ except Exception as e: # pragma: no cover
222
+ logger.warning(
223
+ "Error checking for move",
224
+ old_path=deleted_path,
225
+ new_path=added_path,
226
+ error=str(e),
227
+ )
228
+
229
+ # Handle remaining changes - group them by type for concise output
230
+ moved_count = len([p for p in processed if p in deletes or p in adds])
231
+ delete_count = 0
232
+ add_count = 0
233
+ modify_count = 0
234
+
235
+ # Process deletes
236
+ for path in deletes:
237
+ if path not in processed:
238
+ logger.debug("Processing deleted file", path=path)
239
+ await self.sync_service.handle_delete(path)
240
+ self.state.add_event(path=path, action="deleted", status="success")
241
+ self.console.print(f"[red]✕[/red] {path}")
242
+ processed.add(path)
243
+ delete_count += 1
244
+
245
+ # Process adds
246
+ for path in adds:
247
+ if path not in processed:
248
+ # Skip directories - only process files
249
+ full_path = directory / path
250
+ if full_path.is_dir(): # pragma: no cover
251
+ logger.debug("Skipping directory", path=path)
252
+ processed.add(path)
253
+ continue
254
+
255
+ logger.debug("Processing new file", path=path)
256
+ entity, checksum = await self.sync_service.sync_file(path, new=True)
257
+ if checksum:
258
+ self.state.add_event(
259
+ path=path, action="new", status="success", checksum=checksum
260
+ )
261
+ self.console.print(f"[green]✓[/green] {path}")
262
+ logger.debug(
263
+ "Added file processed",
264
+ path=path,
265
+ entity_id=entity.id if entity else None,
266
+ checksum=checksum,
267
+ )
268
+ processed.add(path)
269
+ add_count += 1
270
+ else: # pragma: no cover
271
+ logger.warning("Error syncing new file", path=path) # pragma: no cover
272
+ self.console.print(
273
+ f"[orange]?[/orange] Error syncing: {path}"
274
+ ) # pragma: no cover
275
+
276
+ # Process modifies - detect repeats
277
+ last_modified_path = None
278
+ repeat_count = 0
279
+
280
+ for path in modifies:
281
+ if path not in processed:
282
+ # Skip directories - only process files
283
+ full_path = directory / path
284
+ if full_path.is_dir():
285
+ logger.debug("Skipping directory", path=path)
286
+ processed.add(path)
287
+ continue
288
+
289
+ logger.debug("Processing modified file", path=path)
290
+ entity, checksum = await self.sync_service.sync_file(path, new=False)
291
+ self.state.add_event(
292
+ path=path, action="modified", status="success", checksum=checksum
293
+ )
294
+
295
+ # Check if this is a repeat of the last modified file
296
+ if path == last_modified_path: # pragma: no cover
297
+ repeat_count += 1 # pragma: no cover
298
+ # Only show a message for the first repeat
299
+ if repeat_count == 1: # pragma: no cover
300
+ self.console.print(
301
+ f"[yellow]...[/yellow] Repeated changes to {path}"
302
+ ) # pragma: no cover
303
+ else:
304
+ # New file being modified
305
+ self.console.print(f"[yellow]✎[/yellow] {path}")
306
+ last_modified_path = path
307
+ repeat_count = 0
308
+ modify_count += 1
309
+
310
+ logger.debug(
311
+ "Modified file processed",
312
+ path=path,
313
+ entity_id=entity.id if entity else None,
314
+ checksum=checksum,
315
+ )
316
+ processed.add(path)
317
+
318
+ # Add a concise summary instead of a divider
319
+ if processed:
320
+ changes = [] # pyright: ignore
321
+ if add_count > 0:
322
+ changes.append(f"[green]{add_count} added[/green]") # pyright: ignore
323
+ if modify_count > 0:
324
+ changes.append(f"[yellow]{modify_count} modified[/yellow]") # pyright: ignore
325
+ if moved_count > 0:
326
+ changes.append(f"[blue]{moved_count} moved[/blue]") # pyright: ignore
327
+ if delete_count > 0:
328
+ changes.append(f"[red]{delete_count} deleted[/red]") # pyright: ignore
329
+
330
+ if changes:
331
+ self.console.print(f"{', '.join(changes)}", style="dim") # pyright: ignore
332
+
333
+ duration_ms = int((time.time() - start_time) * 1000)
196
334
  self.state.last_scan = datetime.now()
197
- self.state.synced_files = report.total
335
+ self.state.synced_files += len(processed)
198
336
 
199
- # Update stats
200
- for path in report.new:
201
- self.state.add_event(
202
- path=path, action="new", status="success", checksum=report.checksums[path]
203
- )
204
- for path in report.modified:
205
- self.state.add_event(
206
- path=path, action="modified", status="success", checksum=report.checksums[path]
207
- )
208
- for old_path, new_path in report.moves.items():
209
- self.state.add_event(
210
- path=f"{old_path} -> {new_path}",
211
- action="moved",
212
- status="success",
213
- checksum=report.checksums[new_path],
214
- )
215
- for path in report.deleted:
216
- self.state.add_event(path=path, action="deleted", status="success")
337
+ logger.info(
338
+ "File change processing completed",
339
+ processed_files=len(processed),
340
+ total_synced_files=self.state.synced_files,
341
+ duration_ms=duration_ms,
342
+ )
217
343
 
218
344
  await self.write_status()
basic_memory/utils.py CHANGED
@@ -1,25 +1,37 @@
1
1
  """Utility functions for basic-memory."""
2
2
 
3
3
  import os
4
+
5
+ import logging
4
6
  import re
5
7
  import sys
6
8
  from pathlib import Path
7
- from typing import Optional, Union
9
+ from typing import Optional, Protocol, Union, runtime_checkable
8
10
 
9
11
  from loguru import logger
10
12
  from unidecode import unidecode
11
13
 
12
- import basic_memory
13
- from basic_memory.config import config
14
14
 
15
- import logfire
15
+ @runtime_checkable
16
+ class PathLike(Protocol):
17
+ """Protocol for objects that can be used as paths."""
18
+
19
+ def __str__(self) -> str: ...
20
+
21
+
22
+ # In type annotations, use Union[Path, str] instead of FilePath for now
23
+ # This preserves compatibility with existing code while we migrate
24
+ FilePath = Union[Path, str]
16
25
 
26
+ # Disable the "Queue is full" warning
27
+ logging.getLogger("opentelemetry.sdk.metrics._internal.instrument").setLevel(logging.ERROR)
17
28
 
18
- def generate_permalink(file_path: Union[Path, str]) -> str:
29
+
30
+ def generate_permalink(file_path: Union[Path, str, PathLike]) -> str:
19
31
  """Generate a stable permalink from a file path.
20
32
 
21
33
  Args:
22
- file_path: Original file path
34
+ file_path: Original file path (str, Path, or PathLike)
23
35
 
24
36
  Returns:
25
37
  Normalized permalink that matches validation rules. Converts spaces and underscores
@@ -65,43 +77,33 @@ def generate_permalink(file_path: Union[Path, str]) -> str:
65
77
 
66
78
 
67
79
  def setup_logging(
68
- home_dir: Path = config.home, log_file: Optional[str] = None, console: bool = True
80
+ env: str,
81
+ home_dir: Path,
82
+ log_file: Optional[str] = None,
83
+ log_level: str = "INFO",
84
+ console: bool = True,
69
85
  ) -> None: # pragma: no cover
70
86
  """
71
87
  Configure logging for the application.
72
- :param home_dir: the root directory for the application
73
- :param log_file: the name of the log file to write to
74
- :param app: the fastapi application instance
75
- :param console: whether to log to the console
76
- """
77
88
 
89
+ Args:
90
+ env: The environment name (dev, test, prod)
91
+ home_dir: The root directory for the application
92
+ log_file: The name of the log file to write to
93
+ log_level: The logging level to use
94
+ console: Whether to log to the console
95
+ """
78
96
  # Remove default handler and any existing handlers
79
97
  logger.remove()
80
98
 
81
- # Add file handler if we are not running tests
82
- if log_file and config.env != "test":
83
- # enable pydantic logfire
84
- logfire.configure(
85
- code_source=logfire.CodeSource(
86
- repository="https://github.com/basicmachines-co/basic-memory",
87
- revision=basic_memory.__version__,
88
- root_path="/src/basic_memory",
89
- ),
90
- environment=config.env,
91
- console=False,
92
- )
93
- logger.configure(handlers=[logfire.loguru_handler()])
94
-
95
- # instrument code spans
96
- logfire.instrument_sqlite3()
97
- logfire.instrument_httpx()
98
-
99
- # setup logger
99
+ # Add file handler if we are not running tests and a log file is specified
100
+ if log_file and env != "test":
101
+ # Setup file logger
100
102
  log_path = home_dir / log_file
101
103
  logger.add(
102
104
  str(log_path),
103
- level=config.log_level,
104
- rotation="100 MB",
105
+ level=log_level,
106
+ rotation="10 MB",
105
107
  retention="10 days",
106
108
  backtrace=True,
107
109
  diagnose=True,
@@ -109,7 +111,20 @@ def setup_logging(
109
111
  colorize=False,
110
112
  )
111
113
 
112
- # Add stderr handler
113
- logger.add(sys.stderr, level=config.log_level, backtrace=True, diagnose=True, colorize=True)
114
+ # Add console logger if requested or in test mode
115
+ if env == "test" or console:
116
+ logger.add(sys.stderr, level=log_level, backtrace=True, diagnose=True, colorize=True)
117
+
118
+ logger.info(f"ENV: '{env}' Log level: '{log_level}' Logging to {log_file}")
119
+
120
+ # Reduce noise from third-party libraries
121
+ noisy_loggers = {
122
+ # HTTP client logs
123
+ "httpx": logging.WARNING,
124
+ # File watching logs
125
+ "watchfiles.main": logging.WARNING,
126
+ }
114
127
 
115
- logger.info(f"ENV: '{config.env}' Log level: '{config.log_level}' Logging to {log_file}")
128
+ # Set log levels for noisy loggers
129
+ for logger_name, level in noisy_loggers.items():
130
+ logging.getLogger(logger_name).setLevel(level)