basic-memory 0.8.0__py3-none-any.whl → 0.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of basic-memory might be problematic. Click here for more details.
- basic_memory/__init__.py +1 -1
- basic_memory/alembic/migrations.py +4 -9
- basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py +106 -0
- basic_memory/api/app.py +9 -6
- basic_memory/api/routers/__init__.py +2 -1
- basic_memory/api/routers/knowledge_router.py +30 -4
- basic_memory/api/routers/memory_router.py +3 -2
- basic_memory/api/routers/project_info_router.py +274 -0
- basic_memory/api/routers/search_router.py +22 -4
- basic_memory/cli/app.py +54 -3
- basic_memory/cli/commands/__init__.py +15 -2
- basic_memory/cli/commands/db.py +9 -13
- basic_memory/cli/commands/import_chatgpt.py +31 -36
- basic_memory/cli/commands/import_claude_conversations.py +32 -35
- basic_memory/cli/commands/import_claude_projects.py +34 -37
- basic_memory/cli/commands/import_memory_json.py +26 -28
- basic_memory/cli/commands/mcp.py +7 -1
- basic_memory/cli/commands/project.py +119 -0
- basic_memory/cli/commands/project_info.py +167 -0
- basic_memory/cli/commands/status.py +7 -9
- basic_memory/cli/commands/sync.py +54 -9
- basic_memory/cli/commands/{tools.py → tool.py} +92 -19
- basic_memory/cli/main.py +40 -1
- basic_memory/config.py +157 -10
- basic_memory/db.py +19 -4
- basic_memory/deps.py +10 -3
- basic_memory/file_utils.py +34 -18
- basic_memory/markdown/markdown_processor.py +1 -1
- basic_memory/markdown/utils.py +5 -0
- basic_memory/mcp/main.py +1 -2
- basic_memory/mcp/prompts/__init__.py +6 -2
- basic_memory/mcp/prompts/ai_assistant_guide.py +9 -10
- basic_memory/mcp/prompts/continue_conversation.py +65 -126
- basic_memory/mcp/prompts/recent_activity.py +55 -13
- basic_memory/mcp/prompts/search.py +72 -17
- basic_memory/mcp/prompts/utils.py +139 -82
- basic_memory/mcp/server.py +1 -1
- basic_memory/mcp/tools/__init__.py +11 -22
- basic_memory/mcp/tools/build_context.py +85 -0
- basic_memory/mcp/tools/canvas.py +17 -19
- basic_memory/mcp/tools/delete_note.py +28 -0
- basic_memory/mcp/tools/project_info.py +51 -0
- basic_memory/mcp/tools/{resource.py → read_content.py} +42 -5
- basic_memory/mcp/tools/read_note.py +190 -0
- basic_memory/mcp/tools/recent_activity.py +100 -0
- basic_memory/mcp/tools/search.py +56 -17
- basic_memory/mcp/tools/utils.py +245 -17
- basic_memory/mcp/tools/write_note.py +124 -0
- basic_memory/models/search.py +2 -1
- basic_memory/repository/entity_repository.py +3 -2
- basic_memory/repository/project_info_repository.py +9 -0
- basic_memory/repository/repository.py +23 -6
- basic_memory/repository/search_repository.py +33 -10
- basic_memory/schemas/__init__.py +12 -0
- basic_memory/schemas/memory.py +3 -2
- basic_memory/schemas/project_info.py +96 -0
- basic_memory/schemas/search.py +27 -32
- basic_memory/services/context_service.py +3 -3
- basic_memory/services/entity_service.py +8 -2
- basic_memory/services/file_service.py +107 -57
- basic_memory/services/link_resolver.py +5 -45
- basic_memory/services/search_service.py +45 -16
- basic_memory/sync/sync_service.py +274 -39
- basic_memory/sync/watch_service.py +174 -34
- basic_memory/utils.py +40 -40
- basic_memory-0.10.0.dist-info/METADATA +386 -0
- basic_memory-0.10.0.dist-info/RECORD +99 -0
- basic_memory/mcp/prompts/json_canvas_spec.py +0 -25
- basic_memory/mcp/tools/knowledge.py +0 -68
- basic_memory/mcp/tools/memory.py +0 -177
- basic_memory/mcp/tools/notes.py +0 -201
- basic_memory-0.8.0.dist-info/METADATA +0 -379
- basic_memory-0.8.0.dist-info/RECORD +0 -91
- {basic_memory-0.8.0.dist-info → basic_memory-0.10.0.dist-info}/WHEEL +0 -0
- {basic_memory-0.8.0.dist-info → basic_memory-0.10.0.dist-info}/entry_points.txt +0 -0
- {basic_memory-0.8.0.dist-info → basic_memory-0.10.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,20 +1,20 @@
|
|
|
1
1
|
"""Watch service for Basic Memory."""
|
|
2
2
|
|
|
3
|
-
import dataclasses
|
|
4
3
|
import os
|
|
5
4
|
from datetime import datetime
|
|
6
5
|
from pathlib import Path
|
|
7
6
|
from typing import List, Optional, Set
|
|
8
7
|
|
|
8
|
+
from basic_memory.config import ProjectConfig
|
|
9
|
+
from basic_memory.services.file_service import FileService
|
|
10
|
+
from basic_memory.sync.sync_service import SyncService
|
|
9
11
|
from loguru import logger
|
|
10
12
|
from pydantic import BaseModel
|
|
11
13
|
from rich.console import Console
|
|
12
14
|
from watchfiles import awatch
|
|
13
15
|
from watchfiles.main import FileChange, Change
|
|
14
16
|
|
|
15
|
-
|
|
16
|
-
from basic_memory.services.file_service import FileService
|
|
17
|
-
from basic_memory.sync.sync_service import SyncService
|
|
17
|
+
WATCH_STATUS_JSON = "watch-status.json"
|
|
18
18
|
|
|
19
19
|
|
|
20
20
|
class WatchEvent(BaseModel):
|
|
@@ -29,8 +29,8 @@ class WatchEvent(BaseModel):
|
|
|
29
29
|
class WatchServiceState(BaseModel):
|
|
30
30
|
# Service status
|
|
31
31
|
running: bool = False
|
|
32
|
-
start_time: datetime =
|
|
33
|
-
pid: int =
|
|
32
|
+
start_time: datetime = datetime.now() # Use directly with Pydantic model
|
|
33
|
+
pid: int = os.getpid() # Use directly with Pydantic model
|
|
34
34
|
|
|
35
35
|
# Stats
|
|
36
36
|
error_count: int = 0
|
|
@@ -41,7 +41,7 @@ class WatchServiceState(BaseModel):
|
|
|
41
41
|
synced_files: int = 0
|
|
42
42
|
|
|
43
43
|
# Recent activity
|
|
44
|
-
recent_events: List[WatchEvent] =
|
|
44
|
+
recent_events: List[WatchEvent] = [] # Use directly with Pydantic model
|
|
45
45
|
|
|
46
46
|
def add_event(
|
|
47
47
|
self,
|
|
@@ -75,16 +75,23 @@ class WatchService:
|
|
|
75
75
|
self.file_service = file_service
|
|
76
76
|
self.config = config
|
|
77
77
|
self.state = WatchServiceState()
|
|
78
|
-
self.status_path = config.home / ".basic-memory" /
|
|
78
|
+
self.status_path = config.home / ".basic-memory" / WATCH_STATUS_JSON
|
|
79
79
|
self.status_path.parent.mkdir(parents=True, exist_ok=True)
|
|
80
80
|
self.console = Console()
|
|
81
81
|
|
|
82
82
|
async def run(self): # pragma: no cover
|
|
83
83
|
"""Watch for file changes and sync them"""
|
|
84
|
-
logger.info(
|
|
84
|
+
logger.info(
|
|
85
|
+
"Watch service started",
|
|
86
|
+
directory=str(self.config.home),
|
|
87
|
+
debounce_ms=self.config.sync_delay,
|
|
88
|
+
pid=os.getpid(),
|
|
89
|
+
)
|
|
90
|
+
|
|
85
91
|
self.state.running = True
|
|
86
92
|
self.state.start_time = datetime.now()
|
|
87
93
|
await self.write_status()
|
|
94
|
+
|
|
88
95
|
try:
|
|
89
96
|
async for changes in awatch(
|
|
90
97
|
self.config.home,
|
|
@@ -95,14 +102,23 @@ class WatchService:
|
|
|
95
102
|
await self.handle_changes(self.config.home, changes)
|
|
96
103
|
|
|
97
104
|
except Exception as e:
|
|
105
|
+
logger.exception("Watch service error", error=str(e), directory=str(self.config.home))
|
|
106
|
+
|
|
98
107
|
self.state.record_error(str(e))
|
|
99
108
|
await self.write_status()
|
|
100
109
|
raise
|
|
110
|
+
|
|
101
111
|
finally:
|
|
112
|
+
logger.info(
|
|
113
|
+
"Watch service stopped",
|
|
114
|
+
directory=str(self.config.home),
|
|
115
|
+
runtime_seconds=int((datetime.now() - self.state.start_time).total_seconds()),
|
|
116
|
+
)
|
|
117
|
+
|
|
102
118
|
self.state.running = False
|
|
103
119
|
await self.write_status()
|
|
104
120
|
|
|
105
|
-
def filter_changes(self, change: Change, path: str) -> bool:
|
|
121
|
+
def filter_changes(self, change: Change, path: str) -> bool: # pragma: no cover
|
|
106
122
|
"""Filter to only watch non-hidden files and directories.
|
|
107
123
|
|
|
108
124
|
Returns:
|
|
@@ -112,6 +128,7 @@ class WatchService:
|
|
|
112
128
|
try:
|
|
113
129
|
relative_path = Path(path).relative_to(self.config.home)
|
|
114
130
|
except ValueError:
|
|
131
|
+
# This is a defensive check for paths outside our home directory
|
|
115
132
|
return False
|
|
116
133
|
|
|
117
134
|
# Skip hidden directories and files
|
|
@@ -120,6 +137,10 @@ class WatchService:
|
|
|
120
137
|
if part.startswith("."):
|
|
121
138
|
return False
|
|
122
139
|
|
|
140
|
+
# Skip temp files used in atomic operations
|
|
141
|
+
if path.endswith(".tmp"):
|
|
142
|
+
return False
|
|
143
|
+
|
|
123
144
|
return True
|
|
124
145
|
|
|
125
146
|
async def write_status(self):
|
|
@@ -128,16 +149,26 @@ class WatchService:
|
|
|
128
149
|
|
|
129
150
|
async def handle_changes(self, directory: Path, changes: Set[FileChange]):
|
|
130
151
|
"""Process a batch of file changes"""
|
|
131
|
-
|
|
152
|
+
import time
|
|
153
|
+
from typing import List, Set
|
|
154
|
+
|
|
155
|
+
start_time = time.time()
|
|
156
|
+
|
|
157
|
+
logger.info("Processing file changes", change_count=len(changes), directory=str(directory))
|
|
132
158
|
|
|
133
159
|
# Group changes by type
|
|
134
|
-
adds = []
|
|
135
|
-
deletes = []
|
|
136
|
-
modifies = []
|
|
160
|
+
adds: List[str] = []
|
|
161
|
+
deletes: List[str] = []
|
|
162
|
+
modifies: List[str] = []
|
|
137
163
|
|
|
138
164
|
for change, path in changes:
|
|
139
165
|
# convert to relative path
|
|
140
166
|
relative_path = str(Path(path).relative_to(directory))
|
|
167
|
+
|
|
168
|
+
# Skip .tmp files - they're temporary and shouldn't be synced
|
|
169
|
+
if relative_path.endswith(".tmp"):
|
|
170
|
+
continue
|
|
171
|
+
|
|
141
172
|
if change == Change.added:
|
|
142
173
|
adds.append(relative_path)
|
|
143
174
|
elif change == Change.deleted:
|
|
@@ -145,25 +176,44 @@ class WatchService:
|
|
|
145
176
|
elif change == Change.modified:
|
|
146
177
|
modifies.append(relative_path)
|
|
147
178
|
|
|
179
|
+
logger.debug(
|
|
180
|
+
"Grouped file changes", added=len(adds), deleted=len(deletes), modified=len(modifies)
|
|
181
|
+
)
|
|
182
|
+
|
|
148
183
|
# Track processed files to avoid duplicates
|
|
149
|
-
processed = set()
|
|
184
|
+
processed: Set[str] = set()
|
|
150
185
|
|
|
151
186
|
# First handle potential moves
|
|
152
187
|
for added_path in adds:
|
|
153
188
|
if added_path in processed:
|
|
154
189
|
continue # pragma: no cover
|
|
155
190
|
|
|
191
|
+
# Skip directories for added paths
|
|
192
|
+
# We don't need to process directories, only the files inside them
|
|
193
|
+
# This prevents errors when trying to compute checksums or read directories as files
|
|
194
|
+
added_full_path = directory / added_path
|
|
195
|
+
if not added_full_path.exists() or added_full_path.is_dir():
|
|
196
|
+
logger.debug("Skipping non-existent or directory path", path=added_path)
|
|
197
|
+
processed.add(added_path)
|
|
198
|
+
continue
|
|
199
|
+
|
|
156
200
|
for deleted_path in deletes:
|
|
157
201
|
if deleted_path in processed:
|
|
158
202
|
continue # pragma: no cover
|
|
159
203
|
|
|
204
|
+
# Skip directories for deleted paths (based on entity type in db)
|
|
205
|
+
deleted_entity = await self.sync_service.entity_repository.get_by_file_path(
|
|
206
|
+
deleted_path
|
|
207
|
+
)
|
|
208
|
+
if deleted_entity is None:
|
|
209
|
+
# If this was a directory, it wouldn't have an entity
|
|
210
|
+
logger.debug("Skipping unknown path for move detection", path=deleted_path)
|
|
211
|
+
continue
|
|
212
|
+
|
|
160
213
|
if added_path != deleted_path:
|
|
161
214
|
# Compare checksums to detect moves
|
|
162
215
|
try:
|
|
163
216
|
added_checksum = await self.file_service.compute_checksum(added_path)
|
|
164
|
-
deleted_entity = await self.sync_service.entity_repository.get_by_file_path(
|
|
165
|
-
deleted_path
|
|
166
|
-
)
|
|
167
217
|
|
|
168
218
|
if deleted_entity and deleted_entity.checksum == added_checksum:
|
|
169
219
|
await self.sync_service.handle_move(deleted_path, added_path)
|
|
@@ -172,43 +222,133 @@ class WatchService:
|
|
|
172
222
|
action="moved",
|
|
173
223
|
status="success",
|
|
174
224
|
)
|
|
175
|
-
self.console.print(
|
|
176
|
-
f"[blue]→[/blue] Moved: {deleted_path} → {added_path}"
|
|
177
|
-
)
|
|
225
|
+
self.console.print(f"[blue]→[/blue] {deleted_path} → {added_path}")
|
|
178
226
|
processed.add(added_path)
|
|
179
227
|
processed.add(deleted_path)
|
|
180
228
|
break
|
|
181
229
|
except Exception as e: # pragma: no cover
|
|
182
|
-
logger.warning(
|
|
230
|
+
logger.warning(
|
|
231
|
+
"Error checking for move",
|
|
232
|
+
old_path=deleted_path,
|
|
233
|
+
new_path=added_path,
|
|
234
|
+
error=str(e),
|
|
235
|
+
)
|
|
183
236
|
|
|
184
|
-
# Handle remaining changes
|
|
237
|
+
# Handle remaining changes - group them by type for concise output
|
|
238
|
+
moved_count = len([p for p in processed if p in deletes or p in adds])
|
|
239
|
+
delete_count = 0
|
|
240
|
+
add_count = 0
|
|
241
|
+
modify_count = 0
|
|
242
|
+
|
|
243
|
+
# Process deletes
|
|
185
244
|
for path in deletes:
|
|
186
245
|
if path not in processed:
|
|
246
|
+
logger.debug("Processing deleted file", path=path)
|
|
187
247
|
await self.sync_service.handle_delete(path)
|
|
188
248
|
self.state.add_event(path=path, action="deleted", status="success")
|
|
189
|
-
self.console.print(f"[red]✕[/red]
|
|
249
|
+
self.console.print(f"[red]✕[/red] {path}")
|
|
190
250
|
processed.add(path)
|
|
251
|
+
delete_count += 1
|
|
191
252
|
|
|
253
|
+
# Process adds
|
|
192
254
|
for path in adds:
|
|
193
255
|
if path not in processed:
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
256
|
+
# Skip directories - only process files
|
|
257
|
+
full_path = directory / path
|
|
258
|
+
if not full_path.exists() or full_path.is_dir():
|
|
259
|
+
logger.debug(
|
|
260
|
+
"Skipping non-existent or directory path", path=path
|
|
261
|
+
) # pragma: no cover
|
|
262
|
+
processed.add(path) # pragma: no cover
|
|
263
|
+
continue # pragma: no cover
|
|
264
|
+
|
|
265
|
+
logger.debug("Processing new file", path=path)
|
|
266
|
+
entity, checksum = await self.sync_service.sync_file(path, new=True)
|
|
267
|
+
if checksum:
|
|
268
|
+
self.state.add_event(
|
|
269
|
+
path=path, action="new", status="success", checksum=checksum
|
|
270
|
+
)
|
|
271
|
+
self.console.print(f"[green]✓[/green] {path}")
|
|
272
|
+
logger.debug(
|
|
273
|
+
"Added file processed",
|
|
274
|
+
path=path,
|
|
275
|
+
entity_id=entity.id if entity else None,
|
|
276
|
+
checksum=checksum,
|
|
277
|
+
)
|
|
278
|
+
processed.add(path)
|
|
279
|
+
add_count += 1
|
|
280
|
+
else: # pragma: no cover
|
|
281
|
+
logger.warning("Error syncing new file", path=path) # pragma: no cover
|
|
282
|
+
self.console.print(
|
|
283
|
+
f"[orange]?[/orange] Error syncing: {path}"
|
|
284
|
+
) # pragma: no cover
|
|
285
|
+
|
|
286
|
+
# Process modifies - detect repeats
|
|
287
|
+
last_modified_path = None
|
|
288
|
+
repeat_count = 0
|
|
198
289
|
|
|
199
290
|
for path in modifies:
|
|
200
291
|
if path not in processed:
|
|
201
|
-
|
|
292
|
+
# Skip directories - only process files
|
|
293
|
+
full_path = directory / path
|
|
294
|
+
if not full_path.exists() or full_path.is_dir():
|
|
295
|
+
logger.debug("Skipping non-existent or directory path", path=path)
|
|
296
|
+
processed.add(path)
|
|
297
|
+
continue
|
|
298
|
+
|
|
299
|
+
logger.debug("Processing modified file", path=path)
|
|
300
|
+
entity, checksum = await self.sync_service.sync_file(path, new=False)
|
|
202
301
|
self.state.add_event(
|
|
203
302
|
path=path, action="modified", status="success", checksum=checksum
|
|
204
303
|
)
|
|
205
|
-
|
|
304
|
+
|
|
305
|
+
# Check if this is a repeat of the last modified file
|
|
306
|
+
if path == last_modified_path: # pragma: no cover
|
|
307
|
+
repeat_count += 1 # pragma: no cover
|
|
308
|
+
# Only show a message for the first repeat
|
|
309
|
+
if repeat_count == 1: # pragma: no cover
|
|
310
|
+
self.console.print(
|
|
311
|
+
f"[yellow]...[/yellow] Repeated changes to {path}"
|
|
312
|
+
) # pragma: no cover
|
|
313
|
+
else:
|
|
314
|
+
# New file being modified
|
|
315
|
+
self.console.print(f"[yellow]✎[/yellow] {path}")
|
|
316
|
+
last_modified_path = path
|
|
317
|
+
repeat_count = 0
|
|
318
|
+
modify_count += 1
|
|
319
|
+
|
|
320
|
+
logger.debug(
|
|
321
|
+
"Modified file processed",
|
|
322
|
+
path=path,
|
|
323
|
+
entity_id=entity.id if entity else None,
|
|
324
|
+
checksum=checksum,
|
|
325
|
+
)
|
|
206
326
|
processed.add(path)
|
|
207
327
|
|
|
208
|
-
# Add a
|
|
328
|
+
# Add a concise summary instead of a divider
|
|
209
329
|
if processed:
|
|
210
|
-
|
|
211
|
-
|
|
330
|
+
changes = [] # pyright: ignore
|
|
331
|
+
if add_count > 0:
|
|
332
|
+
changes.append(f"[green]{add_count} added[/green]") # pyright: ignore
|
|
333
|
+
if modify_count > 0:
|
|
334
|
+
changes.append(f"[yellow]{modify_count} modified[/yellow]") # pyright: ignore
|
|
335
|
+
if moved_count > 0:
|
|
336
|
+
changes.append(f"[blue]{moved_count} moved[/blue]") # pyright: ignore
|
|
337
|
+
if delete_count > 0:
|
|
338
|
+
changes.append(f"[red]{delete_count} deleted[/red]") # pyright: ignore
|
|
339
|
+
|
|
340
|
+
if changes:
|
|
341
|
+
self.console.print(f"{', '.join(changes)}", style="dim") # pyright: ignore
|
|
342
|
+
|
|
343
|
+
duration_ms = int((time.time() - start_time) * 1000)
|
|
212
344
|
self.state.last_scan = datetime.now()
|
|
213
345
|
self.state.synced_files += len(processed)
|
|
214
|
-
|
|
346
|
+
|
|
347
|
+
logger.info(
|
|
348
|
+
"File change processing completed",
|
|
349
|
+
processed_files=len(processed),
|
|
350
|
+
total_synced_files=self.state.synced_files,
|
|
351
|
+
duration_ms=duration_ms,
|
|
352
|
+
)
|
|
353
|
+
|
|
354
|
+
await self.write_status()
|
basic_memory/utils.py
CHANGED
|
@@ -1,25 +1,37 @@
|
|
|
1
1
|
"""Utility functions for basic-memory."""
|
|
2
2
|
|
|
3
|
-
import logging
|
|
4
3
|
import os
|
|
4
|
+
|
|
5
|
+
import logging
|
|
5
6
|
import re
|
|
6
7
|
import sys
|
|
7
8
|
from pathlib import Path
|
|
8
|
-
from typing import Optional, Union
|
|
9
|
+
from typing import Optional, Protocol, Union, runtime_checkable
|
|
9
10
|
|
|
10
11
|
from loguru import logger
|
|
11
12
|
from unidecode import unidecode
|
|
12
13
|
|
|
13
|
-
import basic_memory
|
|
14
14
|
|
|
15
|
-
|
|
15
|
+
@runtime_checkable
|
|
16
|
+
class PathLike(Protocol):
|
|
17
|
+
"""Protocol for objects that can be used as paths."""
|
|
18
|
+
|
|
19
|
+
def __str__(self) -> str: ...
|
|
16
20
|
|
|
17
21
|
|
|
18
|
-
|
|
22
|
+
# In type annotations, use Union[Path, str] instead of FilePath for now
|
|
23
|
+
# This preserves compatibility with existing code while we migrate
|
|
24
|
+
FilePath = Union[Path, str]
|
|
25
|
+
|
|
26
|
+
# Disable the "Queue is full" warning
|
|
27
|
+
logging.getLogger("opentelemetry.sdk.metrics._internal.instrument").setLevel(logging.ERROR)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def generate_permalink(file_path: Union[Path, str, PathLike]) -> str:
|
|
19
31
|
"""Generate a stable permalink from a file path.
|
|
20
32
|
|
|
21
33
|
Args:
|
|
22
|
-
file_path: Original file path
|
|
34
|
+
file_path: Original file path (str, Path, or PathLike)
|
|
23
35
|
|
|
24
36
|
Returns:
|
|
25
37
|
Normalized permalink that matches validation rules. Converts spaces and underscores
|
|
@@ -73,38 +85,25 @@ def setup_logging(
|
|
|
73
85
|
) -> None: # pragma: no cover
|
|
74
86
|
"""
|
|
75
87
|
Configure logging for the application.
|
|
76
|
-
:param home_dir: the root directory for the application
|
|
77
|
-
:param log_file: the name of the log file to write to
|
|
78
|
-
:param app: the fastapi application instance
|
|
79
|
-
:param console: whether to log to the console
|
|
80
|
-
"""
|
|
81
88
|
|
|
89
|
+
Args:
|
|
90
|
+
env: The environment name (dev, test, prod)
|
|
91
|
+
home_dir: The root directory for the application
|
|
92
|
+
log_file: The name of the log file to write to
|
|
93
|
+
log_level: The logging level to use
|
|
94
|
+
console: Whether to log to the console
|
|
95
|
+
"""
|
|
82
96
|
# Remove default handler and any existing handlers
|
|
83
97
|
logger.remove()
|
|
84
98
|
|
|
85
|
-
# Add file handler if we are not running tests
|
|
99
|
+
# Add file handler if we are not running tests and a log file is specified
|
|
86
100
|
if log_file and env != "test":
|
|
87
|
-
#
|
|
88
|
-
logfire.configure(
|
|
89
|
-
code_source=logfire.CodeSource(
|
|
90
|
-
repository="https://github.com/basicmachines-co/basic-memory",
|
|
91
|
-
revision=basic_memory.__version__,
|
|
92
|
-
),
|
|
93
|
-
environment=env,
|
|
94
|
-
console=False,
|
|
95
|
-
)
|
|
96
|
-
logger.configure(handlers=[logfire.loguru_handler()])
|
|
97
|
-
|
|
98
|
-
# instrument code spans
|
|
99
|
-
logfire.instrument_sqlite3()
|
|
100
|
-
logfire.instrument_httpx()
|
|
101
|
-
|
|
102
|
-
# setup logger
|
|
101
|
+
# Setup file logger
|
|
103
102
|
log_path = home_dir / log_file
|
|
104
103
|
logger.add(
|
|
105
104
|
str(log_path),
|
|
106
105
|
level=log_level,
|
|
107
|
-
rotation="
|
|
106
|
+
rotation="10 MB",
|
|
108
107
|
retention="10 days",
|
|
109
108
|
backtrace=True,
|
|
110
109
|
diagnose=True,
|
|
@@ -112,19 +111,20 @@ def setup_logging(
|
|
|
112
111
|
colorize=False,
|
|
113
112
|
)
|
|
114
113
|
|
|
114
|
+
# Add console logger if requested or in test mode
|
|
115
115
|
if env == "test" or console:
|
|
116
|
-
# Add stderr handler
|
|
117
116
|
logger.add(sys.stderr, level=log_level, backtrace=True, diagnose=True, colorize=True)
|
|
118
117
|
|
|
119
118
|
logger.info(f"ENV: '{env}' Log level: '{log_level}' Logging to {log_file}")
|
|
120
119
|
|
|
121
|
-
#
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
#
|
|
130
|
-
|
|
120
|
+
# Reduce noise from third-party libraries
|
|
121
|
+
noisy_loggers = {
|
|
122
|
+
# HTTP client logs
|
|
123
|
+
"httpx": logging.WARNING,
|
|
124
|
+
# File watching logs
|
|
125
|
+
"watchfiles.main": logging.WARNING,
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
# Set log levels for noisy loggers
|
|
129
|
+
for logger_name, level in noisy_loggers.items():
|
|
130
|
+
logging.getLogger(logger_name).setLevel(level)
|