basic-memory 0.14.4__py3-none-any.whl → 0.15.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of basic-memory might be problematic. Click here for more details.
- basic_memory/__init__.py +1 -1
- basic_memory/alembic/versions/a1b2c3d4e5f6_fix_project_foreign_keys.py +5 -9
- basic_memory/api/app.py +10 -4
- basic_memory/api/routers/knowledge_router.py +25 -8
- basic_memory/api/routers/project_router.py +99 -4
- basic_memory/cli/app.py +9 -28
- basic_memory/cli/auth.py +277 -0
- basic_memory/cli/commands/cloud/__init__.py +5 -0
- basic_memory/cli/commands/cloud/api_client.py +112 -0
- basic_memory/cli/commands/cloud/bisync_commands.py +818 -0
- basic_memory/cli/commands/cloud/core_commands.py +288 -0
- basic_memory/cli/commands/cloud/mount_commands.py +295 -0
- basic_memory/cli/commands/cloud/rclone_config.py +288 -0
- basic_memory/cli/commands/cloud/rclone_installer.py +198 -0
- basic_memory/cli/commands/command_utils.py +60 -0
- basic_memory/cli/commands/import_memory_json.py +0 -4
- basic_memory/cli/commands/mcp.py +16 -4
- basic_memory/cli/commands/project.py +139 -142
- basic_memory/cli/commands/status.py +34 -22
- basic_memory/cli/commands/sync.py +45 -228
- basic_memory/cli/commands/tool.py +87 -16
- basic_memory/cli/main.py +1 -0
- basic_memory/config.py +76 -12
- basic_memory/db.py +104 -3
- basic_memory/deps.py +20 -3
- basic_memory/file_utils.py +37 -13
- basic_memory/ignore_utils.py +295 -0
- basic_memory/markdown/plugins.py +9 -7
- basic_memory/mcp/async_client.py +22 -10
- basic_memory/mcp/project_context.py +141 -0
- basic_memory/mcp/prompts/ai_assistant_guide.py +49 -4
- basic_memory/mcp/prompts/continue_conversation.py +1 -1
- basic_memory/mcp/prompts/recent_activity.py +116 -32
- basic_memory/mcp/prompts/search.py +1 -1
- basic_memory/mcp/prompts/utils.py +11 -4
- basic_memory/mcp/resources/ai_assistant_guide.md +179 -41
- basic_memory/mcp/resources/project_info.py +20 -6
- basic_memory/mcp/server.py +0 -37
- basic_memory/mcp/tools/__init__.py +5 -6
- basic_memory/mcp/tools/build_context.py +29 -19
- basic_memory/mcp/tools/canvas.py +19 -8
- basic_memory/mcp/tools/chatgpt_tools.py +178 -0
- basic_memory/mcp/tools/delete_note.py +67 -34
- basic_memory/mcp/tools/edit_note.py +55 -39
- basic_memory/mcp/tools/headers.py +44 -0
- basic_memory/mcp/tools/list_directory.py +18 -8
- basic_memory/mcp/tools/move_note.py +119 -41
- basic_memory/mcp/tools/project_management.py +61 -228
- basic_memory/mcp/tools/read_content.py +28 -12
- basic_memory/mcp/tools/read_note.py +83 -46
- basic_memory/mcp/tools/recent_activity.py +441 -42
- basic_memory/mcp/tools/search.py +82 -70
- basic_memory/mcp/tools/sync_status.py +5 -4
- basic_memory/mcp/tools/utils.py +19 -0
- basic_memory/mcp/tools/view_note.py +31 -6
- basic_memory/mcp/tools/write_note.py +65 -14
- basic_memory/models/knowledge.py +12 -6
- basic_memory/models/project.py +6 -2
- basic_memory/repository/entity_repository.py +29 -82
- basic_memory/repository/relation_repository.py +13 -0
- basic_memory/repository/repository.py +2 -2
- basic_memory/repository/search_repository.py +4 -2
- basic_memory/schemas/__init__.py +6 -0
- basic_memory/schemas/base.py +39 -11
- basic_memory/schemas/cloud.py +46 -0
- basic_memory/schemas/memory.py +90 -21
- basic_memory/schemas/project_info.py +9 -10
- basic_memory/schemas/sync_report.py +48 -0
- basic_memory/services/context_service.py +25 -11
- basic_memory/services/entity_service.py +75 -45
- basic_memory/services/initialization.py +30 -11
- basic_memory/services/project_service.py +13 -23
- basic_memory/sync/sync_service.py +145 -21
- basic_memory/sync/watch_service.py +101 -40
- basic_memory/utils.py +14 -4
- {basic_memory-0.14.4.dist-info → basic_memory-0.15.0.dist-info}/METADATA +7 -6
- basic_memory-0.15.0.dist-info/RECORD +147 -0
- basic_memory/mcp/project_session.py +0 -120
- basic_memory-0.14.4.dist-info/RECORD +0 -133
- {basic_memory-0.14.4.dist-info → basic_memory-0.15.0.dist-info}/WHEEL +0 -0
- {basic_memory-0.14.4.dist-info → basic_memory-0.15.0.dist-info}/entry_points.txt +0 -0
- {basic_memory-0.14.4.dist-info → basic_memory-0.15.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
"""Service for syncing files between filesystem and database."""
|
|
2
2
|
|
|
3
|
+
import asyncio
|
|
3
4
|
import os
|
|
4
5
|
import time
|
|
6
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
5
7
|
from dataclasses import dataclass, field
|
|
6
8
|
from datetime import datetime
|
|
7
9
|
from pathlib import Path
|
|
@@ -10,12 +12,16 @@ from typing import Dict, Optional, Set, Tuple
|
|
|
10
12
|
from loguru import logger
|
|
11
13
|
from sqlalchemy.exc import IntegrityError
|
|
12
14
|
|
|
13
|
-
from basic_memory
|
|
15
|
+
from basic_memory import db
|
|
16
|
+
from basic_memory.config import BasicMemoryConfig, ConfigManager
|
|
14
17
|
from basic_memory.file_utils import has_frontmatter
|
|
15
|
-
from basic_memory.
|
|
16
|
-
from basic_memory.
|
|
17
|
-
from basic_memory.
|
|
18
|
+
from basic_memory.ignore_utils import load_bmignore_patterns, should_ignore_path
|
|
19
|
+
from basic_memory.markdown import EntityParser, MarkdownProcessor
|
|
20
|
+
from basic_memory.models import Entity, Project
|
|
21
|
+
from basic_memory.repository import EntityRepository, RelationRepository, ObservationRepository
|
|
22
|
+
from basic_memory.repository.search_repository import SearchRepository
|
|
18
23
|
from basic_memory.services import EntityService, FileService
|
|
24
|
+
from basic_memory.services.link_resolver import LinkResolver
|
|
19
25
|
from basic_memory.services.search_service import SearchService
|
|
20
26
|
from basic_memory.services.sync_status_service import sync_status_tracker, SyncStatus
|
|
21
27
|
|
|
@@ -80,6 +86,43 @@ class SyncService:
|
|
|
80
86
|
self.relation_repository = relation_repository
|
|
81
87
|
self.search_service = search_service
|
|
82
88
|
self.file_service = file_service
|
|
89
|
+
self._thread_pool = ThreadPoolExecutor(max_workers=app_config.sync_thread_pool_size)
|
|
90
|
+
# Load ignore patterns once at initialization for performance
|
|
91
|
+
self._ignore_patterns = load_bmignore_patterns()
|
|
92
|
+
|
|
93
|
+
async def _read_file_async(self, file_path: Path) -> str:
|
|
94
|
+
"""Read file content in thread pool to avoid blocking the event loop."""
|
|
95
|
+
loop = asyncio.get_event_loop()
|
|
96
|
+
return await loop.run_in_executor(self._thread_pool, file_path.read_text, "utf-8")
|
|
97
|
+
|
|
98
|
+
async def _compute_checksum_async(self, path: str) -> str:
|
|
99
|
+
"""Compute file checksum in thread pool to avoid blocking the event loop."""
|
|
100
|
+
|
|
101
|
+
def _sync_compute_checksum(path_str: str) -> str:
|
|
102
|
+
# Synchronous version for thread pool execution
|
|
103
|
+
path_obj = self.file_service.base_path / path_str
|
|
104
|
+
|
|
105
|
+
if self.file_service.is_markdown(path_str):
|
|
106
|
+
content = path_obj.read_text(encoding="utf-8")
|
|
107
|
+
else:
|
|
108
|
+
content = path_obj.read_bytes()
|
|
109
|
+
|
|
110
|
+
# Use the synchronous version of compute_checksum
|
|
111
|
+
import hashlib
|
|
112
|
+
|
|
113
|
+
if isinstance(content, str):
|
|
114
|
+
content_bytes = content.encode("utf-8")
|
|
115
|
+
else:
|
|
116
|
+
content_bytes = content
|
|
117
|
+
return hashlib.sha256(content_bytes).hexdigest()
|
|
118
|
+
|
|
119
|
+
loop = asyncio.get_event_loop()
|
|
120
|
+
return await loop.run_in_executor(self._thread_pool, _sync_compute_checksum, path)
|
|
121
|
+
|
|
122
|
+
def __del__(self):
|
|
123
|
+
"""Cleanup thread pool when service is destroyed."""
|
|
124
|
+
if hasattr(self, "_thread_pool"):
|
|
125
|
+
self._thread_pool.shutdown(wait=False)
|
|
83
126
|
|
|
84
127
|
async def sync(self, directory: Path, project_name: Optional[str] = None) -> SyncReport:
|
|
85
128
|
"""Sync all files with database."""
|
|
@@ -289,14 +332,14 @@ class SyncService:
|
|
|
289
332
|
logger.debug(f"Parsing markdown file, path: {path}, new: {new}")
|
|
290
333
|
|
|
291
334
|
file_path = self.entity_parser.base_path / path
|
|
292
|
-
file_content =
|
|
335
|
+
file_content = await self._read_file_async(file_path)
|
|
293
336
|
file_contains_frontmatter = has_frontmatter(file_content)
|
|
294
337
|
|
|
295
338
|
# entity markdown will always contain front matter, so it can be used up create/update the entity
|
|
296
339
|
entity_markdown = await self.entity_parser.parse_file(path)
|
|
297
340
|
|
|
298
|
-
# if the file contains frontmatter, resolve a permalink
|
|
299
|
-
if file_contains_frontmatter:
|
|
341
|
+
# if the file contains frontmatter, resolve a permalink (unless disabled)
|
|
342
|
+
if file_contains_frontmatter and not self.app_config.disable_permalinks:
|
|
300
343
|
# Resolve permalink - this handles all the cases including conflicts
|
|
301
344
|
permalink = await self.entity_service.resolve_permalink(path, markdown=entity_markdown)
|
|
302
345
|
|
|
@@ -326,7 +369,7 @@ class SyncService:
|
|
|
326
369
|
# After updating relations, we need to compute the checksum again
|
|
327
370
|
# This is necessary for files with wikilinks to ensure consistent checksums
|
|
328
371
|
# after relation processing is complete
|
|
329
|
-
final_checksum = await self.
|
|
372
|
+
final_checksum = await self._compute_checksum_async(path)
|
|
330
373
|
|
|
331
374
|
# set checksum
|
|
332
375
|
await self.entity_repository.update(entity.id, {"checksum": final_checksum})
|
|
@@ -350,7 +393,7 @@ class SyncService:
|
|
|
350
393
|
Returns:
|
|
351
394
|
Tuple of (entity, checksum)
|
|
352
395
|
"""
|
|
353
|
-
checksum = await self.
|
|
396
|
+
checksum = await self._compute_checksum_async(path)
|
|
354
397
|
if new:
|
|
355
398
|
# Generate permalink from path
|
|
356
399
|
await self.entity_service.resolve_permalink(path)
|
|
@@ -487,8 +530,10 @@ class SyncService:
|
|
|
487
530
|
updates = {"file_path": new_path}
|
|
488
531
|
|
|
489
532
|
# If configured, also update permalink to match new path
|
|
490
|
-
if
|
|
491
|
-
|
|
533
|
+
if (
|
|
534
|
+
self.app_config.update_permalinks_on_move
|
|
535
|
+
and not self.app_config.disable_permalinks
|
|
536
|
+
and self.file_service.is_markdown(new_path)
|
|
492
537
|
):
|
|
493
538
|
# generate new permalink value
|
|
494
539
|
new_permalink = await self.entity_service.resolve_permalink(new_path)
|
|
@@ -548,12 +593,27 @@ class SyncService:
|
|
|
548
593
|
# update search index
|
|
549
594
|
await self.search_service.index_entity(updated)
|
|
550
595
|
|
|
551
|
-
async def resolve_relations(self):
|
|
552
|
-
"""Try to resolve
|
|
596
|
+
async def resolve_relations(self, entity_id: int | None = None):
|
|
597
|
+
"""Try to resolve unresolved relations.
|
|
553
598
|
|
|
554
|
-
|
|
599
|
+
Args:
|
|
600
|
+
entity_id: If provided, only resolve relations for this specific entity.
|
|
601
|
+
Otherwise, resolve all unresolved relations in the database.
|
|
602
|
+
"""
|
|
555
603
|
|
|
556
|
-
|
|
604
|
+
if entity_id:
|
|
605
|
+
# Only get unresolved relations for the specific entity
|
|
606
|
+
unresolved_relations = (
|
|
607
|
+
await self.relation_repository.find_unresolved_relations_for_entity(entity_id)
|
|
608
|
+
)
|
|
609
|
+
logger.info(
|
|
610
|
+
f"Resolving forward references for entity {entity_id}",
|
|
611
|
+
count=len(unresolved_relations),
|
|
612
|
+
)
|
|
613
|
+
else:
|
|
614
|
+
# Get all unresolved relations (original behavior)
|
|
615
|
+
unresolved_relations = await self.relation_repository.find_unresolved_relations()
|
|
616
|
+
logger.info("Resolving all forward references", count=len(unresolved_relations))
|
|
557
617
|
|
|
558
618
|
for relation in unresolved_relations:
|
|
559
619
|
logger.trace(
|
|
@@ -608,19 +668,35 @@ class SyncService:
|
|
|
608
668
|
|
|
609
669
|
logger.debug(f"Scanning directory {directory}")
|
|
610
670
|
result = ScanResult()
|
|
671
|
+
ignored_count = 0
|
|
611
672
|
|
|
612
673
|
for root, dirnames, filenames in os.walk(str(directory)):
|
|
613
|
-
#
|
|
614
|
-
|
|
674
|
+
# Convert root to Path for easier manipulation
|
|
675
|
+
root_path = Path(root)
|
|
676
|
+
|
|
677
|
+
# Filter out ignored directories in-place
|
|
678
|
+
dirnames_to_remove = []
|
|
679
|
+
for dirname in dirnames:
|
|
680
|
+
dir_path = root_path / dirname
|
|
681
|
+
if should_ignore_path(dir_path, directory, self._ignore_patterns):
|
|
682
|
+
dirnames_to_remove.append(dirname)
|
|
683
|
+
ignored_count += 1
|
|
684
|
+
|
|
685
|
+
# Remove ignored directories from dirnames to prevent os.walk from descending
|
|
686
|
+
for dirname in dirnames_to_remove:
|
|
687
|
+
dirnames.remove(dirname)
|
|
615
688
|
|
|
616
689
|
for filename in filenames:
|
|
617
|
-
|
|
618
|
-
|
|
690
|
+
path = root_path / filename
|
|
691
|
+
|
|
692
|
+
# Check if file should be ignored
|
|
693
|
+
if should_ignore_path(path, directory, self._ignore_patterns):
|
|
694
|
+
ignored_count += 1
|
|
695
|
+
logger.trace(f"Ignoring file per .bmignore: {path.relative_to(directory)}")
|
|
619
696
|
continue
|
|
620
697
|
|
|
621
|
-
path = Path(root) / filename
|
|
622
698
|
rel_path = path.relative_to(directory).as_posix()
|
|
623
|
-
checksum = await self.
|
|
699
|
+
checksum = await self._compute_checksum_async(rel_path)
|
|
624
700
|
result.files[rel_path] = checksum
|
|
625
701
|
result.checksums[checksum] = rel_path
|
|
626
702
|
|
|
@@ -631,7 +707,55 @@ class SyncService:
|
|
|
631
707
|
f"{directory} scan completed "
|
|
632
708
|
f"directory={str(directory)} "
|
|
633
709
|
f"files_found={len(result.files)} "
|
|
710
|
+
f"files_ignored={ignored_count} "
|
|
634
711
|
f"duration_ms={duration_ms}"
|
|
635
712
|
)
|
|
636
713
|
|
|
637
714
|
return result
|
|
715
|
+
|
|
716
|
+
|
|
717
|
+
async def get_sync_service(project: Project) -> SyncService: # pragma: no cover
|
|
718
|
+
"""Get sync service instance with all dependencies."""
|
|
719
|
+
|
|
720
|
+
app_config = ConfigManager().config
|
|
721
|
+
_, session_maker = await db.get_or_create_db(
|
|
722
|
+
db_path=app_config.database_path, db_type=db.DatabaseType.FILESYSTEM
|
|
723
|
+
)
|
|
724
|
+
|
|
725
|
+
project_path = Path(project.path)
|
|
726
|
+
entity_parser = EntityParser(project_path)
|
|
727
|
+
markdown_processor = MarkdownProcessor(entity_parser)
|
|
728
|
+
file_service = FileService(project_path, markdown_processor)
|
|
729
|
+
|
|
730
|
+
# Initialize repositories
|
|
731
|
+
entity_repository = EntityRepository(session_maker, project_id=project.id)
|
|
732
|
+
observation_repository = ObservationRepository(session_maker, project_id=project.id)
|
|
733
|
+
relation_repository = RelationRepository(session_maker, project_id=project.id)
|
|
734
|
+
search_repository = SearchRepository(session_maker, project_id=project.id)
|
|
735
|
+
|
|
736
|
+
# Initialize services
|
|
737
|
+
search_service = SearchService(search_repository, entity_repository, file_service)
|
|
738
|
+
link_resolver = LinkResolver(entity_repository, search_service)
|
|
739
|
+
|
|
740
|
+
# Initialize services
|
|
741
|
+
entity_service = EntityService(
|
|
742
|
+
entity_parser,
|
|
743
|
+
entity_repository,
|
|
744
|
+
observation_repository,
|
|
745
|
+
relation_repository,
|
|
746
|
+
file_service,
|
|
747
|
+
link_resolver,
|
|
748
|
+
)
|
|
749
|
+
|
|
750
|
+
# Create sync service
|
|
751
|
+
sync_service = SyncService(
|
|
752
|
+
app_config=app_config,
|
|
753
|
+
entity_service=entity_service,
|
|
754
|
+
entity_parser=entity_parser,
|
|
755
|
+
entity_repository=entity_repository,
|
|
756
|
+
relation_repository=relation_repository,
|
|
757
|
+
search_service=search_service,
|
|
758
|
+
file_service=file_service,
|
|
759
|
+
)
|
|
760
|
+
|
|
761
|
+
return sync_service
|
|
@@ -5,9 +5,10 @@ import os
|
|
|
5
5
|
from collections import defaultdict
|
|
6
6
|
from datetime import datetime
|
|
7
7
|
from pathlib import Path
|
|
8
|
-
from typing import List, Optional, Set
|
|
8
|
+
from typing import List, Optional, Set, Sequence
|
|
9
9
|
|
|
10
10
|
from basic_memory.config import BasicMemoryConfig, WATCH_STATUS_JSON
|
|
11
|
+
from basic_memory.ignore_utils import load_gitignore_patterns, should_ignore_path
|
|
11
12
|
from basic_memory.models import Project
|
|
12
13
|
from basic_memory.repository import ProjectRepository
|
|
13
14
|
from loguru import logger
|
|
@@ -15,6 +16,7 @@ from pydantic import BaseModel
|
|
|
15
16
|
from rich.console import Console
|
|
16
17
|
from watchfiles import awatch
|
|
17
18
|
from watchfiles.main import FileChange, Change
|
|
19
|
+
import time
|
|
18
20
|
|
|
19
21
|
|
|
20
22
|
class WatchEvent(BaseModel):
|
|
@@ -81,54 +83,110 @@ class WatchService:
|
|
|
81
83
|
self.state = WatchServiceState()
|
|
82
84
|
self.status_path = Path.home() / ".basic-memory" / WATCH_STATUS_JSON
|
|
83
85
|
self.status_path.parent.mkdir(parents=True, exist_ok=True)
|
|
86
|
+
self._ignore_patterns_cache: dict[Path, Set[str]] = {}
|
|
84
87
|
|
|
85
88
|
# quiet mode for mcp so it doesn't mess up stdout
|
|
86
89
|
self.console = Console(quiet=quiet)
|
|
87
90
|
|
|
91
|
+
async def _schedule_restart(self, stop_event: asyncio.Event):
|
|
92
|
+
"""Schedule a restart of the watch service after the configured interval."""
|
|
93
|
+
await asyncio.sleep(self.app_config.watch_project_reload_interval)
|
|
94
|
+
stop_event.set()
|
|
95
|
+
|
|
96
|
+
def _get_ignore_patterns(self, project_path: Path) -> Set[str]:
|
|
97
|
+
"""Get or load ignore patterns for a project path."""
|
|
98
|
+
if project_path not in self._ignore_patterns_cache:
|
|
99
|
+
self._ignore_patterns_cache[project_path] = load_gitignore_patterns(project_path)
|
|
100
|
+
return self._ignore_patterns_cache[project_path]
|
|
101
|
+
|
|
102
|
+
async def _watch_projects_cycle(self, projects: Sequence[Project], stop_event: asyncio.Event):
|
|
103
|
+
"""Run one cycle of watching the given projects until stop_event is set."""
|
|
104
|
+
project_paths = [project.path for project in projects]
|
|
105
|
+
|
|
106
|
+
async for changes in awatch(
|
|
107
|
+
*project_paths,
|
|
108
|
+
debounce=self.app_config.sync_delay,
|
|
109
|
+
watch_filter=self.filter_changes,
|
|
110
|
+
recursive=True,
|
|
111
|
+
stop_event=stop_event,
|
|
112
|
+
):
|
|
113
|
+
# group changes by project and filter using ignore patterns
|
|
114
|
+
project_changes = defaultdict(list)
|
|
115
|
+
for change, path in changes:
|
|
116
|
+
for project in projects:
|
|
117
|
+
if self.is_project_path(project, path):
|
|
118
|
+
# Check if the file should be ignored based on gitignore patterns
|
|
119
|
+
project_path = Path(project.path)
|
|
120
|
+
file_path = Path(path)
|
|
121
|
+
ignore_patterns = self._get_ignore_patterns(project_path)
|
|
122
|
+
|
|
123
|
+
if should_ignore_path(file_path, project_path, ignore_patterns):
|
|
124
|
+
logger.trace(
|
|
125
|
+
f"Ignoring watched file change: {file_path.relative_to(project_path)}"
|
|
126
|
+
)
|
|
127
|
+
continue
|
|
128
|
+
|
|
129
|
+
project_changes[project].append((change, path))
|
|
130
|
+
break
|
|
131
|
+
|
|
132
|
+
# create coroutines to handle changes
|
|
133
|
+
change_handlers = [
|
|
134
|
+
self.handle_changes(project, changes) # pyright: ignore
|
|
135
|
+
for project, changes in project_changes.items()
|
|
136
|
+
]
|
|
137
|
+
|
|
138
|
+
# process changes
|
|
139
|
+
await asyncio.gather(*change_handlers)
|
|
140
|
+
|
|
88
141
|
async def run(self): # pragma: no cover
|
|
89
142
|
"""Watch for file changes and sync them"""
|
|
90
143
|
|
|
91
|
-
|
|
92
|
-
|
|
144
|
+
self.state.running = True
|
|
145
|
+
self.state.start_time = datetime.now()
|
|
146
|
+
await self.write_status()
|
|
93
147
|
|
|
94
148
|
logger.info(
|
|
95
149
|
"Watch service started",
|
|
96
|
-
f"directories={project_paths}",
|
|
97
150
|
f"debounce_ms={self.app_config.sync_delay}",
|
|
98
151
|
f"pid={os.getpid()}",
|
|
99
152
|
)
|
|
100
153
|
|
|
101
|
-
self.state.running = True
|
|
102
|
-
self.state.start_time = datetime.now()
|
|
103
|
-
await self.write_status()
|
|
104
|
-
|
|
105
154
|
try:
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
155
|
+
while self.state.running:
|
|
156
|
+
# Clear ignore patterns cache to pick up any .gitignore changes
|
|
157
|
+
self._ignore_patterns_cache.clear()
|
|
158
|
+
|
|
159
|
+
# Reload projects to catch any new/removed projects
|
|
160
|
+
projects = await self.project_repository.get_active_projects()
|
|
161
|
+
|
|
162
|
+
project_paths = [project.path for project in projects]
|
|
163
|
+
logger.debug(f"Starting watch cycle for directories: {project_paths}")
|
|
164
|
+
|
|
165
|
+
# Create stop event for this watch cycle
|
|
166
|
+
stop_event = asyncio.Event()
|
|
167
|
+
|
|
168
|
+
# Schedule restart after configured interval to reload projects
|
|
169
|
+
timer_task = asyncio.create_task(self._schedule_restart(stop_event))
|
|
170
|
+
|
|
171
|
+
try:
|
|
172
|
+
await self._watch_projects_cycle(projects, stop_event)
|
|
173
|
+
except Exception as e:
|
|
174
|
+
logger.exception("Watch service error during cycle", error=str(e))
|
|
175
|
+
self.state.record_error(str(e))
|
|
176
|
+
await self.write_status()
|
|
177
|
+
# Continue to next cycle instead of exiting
|
|
178
|
+
await asyncio.sleep(5) # Brief pause before retry
|
|
179
|
+
finally:
|
|
180
|
+
# Cancel timer task if it's still running
|
|
181
|
+
if not timer_task.done():
|
|
182
|
+
timer_task.cancel()
|
|
183
|
+
try:
|
|
184
|
+
await timer_task
|
|
185
|
+
except asyncio.CancelledError:
|
|
186
|
+
pass
|
|
128
187
|
|
|
129
188
|
except Exception as e:
|
|
130
189
|
logger.exception("Watch service error", error=str(e))
|
|
131
|
-
|
|
132
190
|
self.state.record_error(str(e))
|
|
133
191
|
await self.write_status()
|
|
134
192
|
raise
|
|
@@ -175,11 +233,8 @@ class WatchService:
|
|
|
175
233
|
|
|
176
234
|
async def handle_changes(self, project: Project, changes: Set[FileChange]) -> None:
|
|
177
235
|
"""Process a batch of file changes"""
|
|
178
|
-
|
|
179
|
-
from
|
|
180
|
-
|
|
181
|
-
# Lazily initialize sync service for project changes
|
|
182
|
-
from basic_memory.cli.commands.sync import get_sync_service
|
|
236
|
+
# avoid circular imports
|
|
237
|
+
from basic_memory.sync.sync_service import get_sync_service
|
|
183
238
|
|
|
184
239
|
sync_service = await get_sync_service(project)
|
|
185
240
|
file_service = sync_service.file_service
|
|
@@ -288,9 +343,13 @@ class WatchService:
|
|
|
288
343
|
full_path = directory / path
|
|
289
344
|
if full_path.exists() and full_path.is_file():
|
|
290
345
|
# File still exists despite DELETE event - treat as modification
|
|
291
|
-
logger.debug(
|
|
346
|
+
logger.debug(
|
|
347
|
+
"File exists despite DELETE event, treating as modification", path=path
|
|
348
|
+
)
|
|
292
349
|
entity, checksum = await sync_service.sync_file(path, new=False)
|
|
293
|
-
self.state.add_event(
|
|
350
|
+
self.state.add_event(
|
|
351
|
+
path=path, action="modified", status="success", checksum=checksum
|
|
352
|
+
)
|
|
294
353
|
self.console.print(f"[yellow]✎[/yellow] {path} (atomic write)")
|
|
295
354
|
logger.info(f"atomic write detected: {path}")
|
|
296
355
|
processed.add(path)
|
|
@@ -302,10 +361,12 @@ class WatchService:
|
|
|
302
361
|
entity = await sync_service.entity_repository.get_by_file_path(path)
|
|
303
362
|
if entity is None:
|
|
304
363
|
# No entity means this was likely a directory - skip it
|
|
305
|
-
logger.debug(
|
|
364
|
+
logger.debug(
|
|
365
|
+
f"Skipping deleted path with no entity (likely directory), path={path}"
|
|
366
|
+
)
|
|
306
367
|
processed.add(path)
|
|
307
368
|
continue
|
|
308
|
-
|
|
369
|
+
|
|
309
370
|
# File truly deleted
|
|
310
371
|
logger.debug("Processing deleted file", path=path)
|
|
311
372
|
await sync_service.handle_delete(path)
|
basic_memory/utils.py
CHANGED
|
@@ -223,7 +223,8 @@ def parse_tags(tags: Union[List[str], str, None]) -> List[str]:
|
|
|
223
223
|
if isinstance(tags, str):
|
|
224
224
|
# Check if it's a JSON array string (common issue from AI assistants)
|
|
225
225
|
import json
|
|
226
|
-
|
|
226
|
+
|
|
227
|
+
if tags.strip().startswith("[") and tags.strip().endswith("]"):
|
|
227
228
|
try:
|
|
228
229
|
# Try to parse as JSON array
|
|
229
230
|
parsed_json = json.loads(tags)
|
|
@@ -233,7 +234,7 @@ def parse_tags(tags: Union[List[str], str, None]) -> List[str]:
|
|
|
233
234
|
except json.JSONDecodeError:
|
|
234
235
|
# Not valid JSON, fall through to comma-separated parsing
|
|
235
236
|
pass
|
|
236
|
-
|
|
237
|
+
|
|
237
238
|
# Split by comma, strip whitespace, then strip leading '#' characters
|
|
238
239
|
return [tag.strip().lstrip("#") for tag in tags.split(",") if tag and tag.strip()]
|
|
239
240
|
|
|
@@ -330,8 +331,8 @@ def detect_potential_file_conflicts(file_path: str, existing_paths: List[str]) -
|
|
|
330
331
|
return conflicts
|
|
331
332
|
|
|
332
333
|
|
|
333
|
-
def
|
|
334
|
-
"""Ensure path
|
|
334
|
+
def valid_project_path_value(path: str):
|
|
335
|
+
"""Ensure project path is valid."""
|
|
335
336
|
# Allow empty strings as they resolve to the project root
|
|
336
337
|
if not path:
|
|
337
338
|
return True
|
|
@@ -352,6 +353,15 @@ def validate_project_path(path: str, project_path: Path) -> bool:
|
|
|
352
353
|
if path.strip() and any(ord(c) < 32 and c not in [" ", "\t"] for c in path):
|
|
353
354
|
return False
|
|
354
355
|
|
|
356
|
+
return True
|
|
357
|
+
|
|
358
|
+
|
|
359
|
+
def validate_project_path(path: str, project_path: Path) -> bool:
|
|
360
|
+
"""Ensure path is valid and stays within project boundaries."""
|
|
361
|
+
|
|
362
|
+
if not valid_project_path_value(path):
|
|
363
|
+
return False
|
|
364
|
+
|
|
355
365
|
try:
|
|
356
366
|
resolved = (project_path / path).resolve()
|
|
357
367
|
return resolved.is_relative_to(project_path.resolve())
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: basic-memory
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.15.0
|
|
4
4
|
Summary: Local-first knowledge management combining Zettelkasten with knowledge graphs
|
|
5
5
|
Project-URL: Homepage, https://github.com/basicmachines-co/basic-memory
|
|
6
6
|
Project-URL: Repository, https://github.com/basicmachines-co/basic-memory
|
|
@@ -9,11 +9,12 @@ Author-email: Basic Machines <hello@basic-machines.co>
|
|
|
9
9
|
License: AGPL-3.0-or-later
|
|
10
10
|
License-File: LICENSE
|
|
11
11
|
Requires-Python: >=3.12.1
|
|
12
|
+
Requires-Dist: aiofiles>=24.1.0
|
|
12
13
|
Requires-Dist: aiosqlite>=0.20.0
|
|
13
14
|
Requires-Dist: alembic>=1.14.1
|
|
14
15
|
Requires-Dist: dateparser>=1.2.0
|
|
15
16
|
Requires-Dist: fastapi[standard]>=0.115.8
|
|
16
|
-
Requires-Dist: fastmcp
|
|
17
|
+
Requires-Dist: fastmcp>=2.10.2
|
|
17
18
|
Requires-Dist: greenlet>=3.1.1
|
|
18
19
|
Requires-Dist: icecream>=2.1.3
|
|
19
20
|
Requires-Dist: loguru>=0.7.3
|
|
@@ -409,10 +410,10 @@ canvas(nodes, edges, title, folder) - Generate knowledge visualizations
|
|
|
409
410
|
|
|
410
411
|
See the [Documentation](https://memory.basicmachines.co/) for more info, including:
|
|
411
412
|
|
|
412
|
-
- [Complete User Guide](https://
|
|
413
|
-
- [CLI tools](https://
|
|
414
|
-
- [Managing multiple Projects](https://
|
|
415
|
-
- [Importing data from OpenAI/Claude Projects](https://
|
|
413
|
+
- [Complete User Guide](https://docs.basicmemory.com/user-guide/)
|
|
414
|
+
- [CLI tools](https://docs.basicmemory.com/guides/cli-reference/)
|
|
415
|
+
- [Managing multiple Projects](https://docs.basicmemory.com/guides/cli-reference/#project)
|
|
416
|
+
- [Importing data from OpenAI/Claude Projects](https://docs.basicmemory.com/guides/cli-reference/#import)
|
|
416
417
|
|
|
417
418
|
## License
|
|
418
419
|
|