basic-memory 0.13.0b4__py3-none-any.whl → 0.13.0b5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of basic-memory might be problematic. Click here for more details.
- basic_memory/__init__.py +1 -7
- basic_memory/api/routers/knowledge_router.py +13 -0
- basic_memory/api/routers/memory_router.py +3 -4
- basic_memory/api/routers/project_router.py +6 -5
- basic_memory/api/routers/prompt_router.py +2 -2
- basic_memory/cli/commands/project.py +2 -2
- basic_memory/cli/commands/status.py +1 -1
- basic_memory/cli/commands/sync.py +1 -1
- basic_memory/mcp/prompts/__init__.py +2 -0
- basic_memory/mcp/prompts/sync_status.py +116 -0
- basic_memory/mcp/server.py +6 -6
- basic_memory/mcp/tools/__init__.py +4 -0
- basic_memory/mcp/tools/build_context.py +32 -7
- basic_memory/mcp/tools/canvas.py +2 -1
- basic_memory/mcp/tools/delete_note.py +159 -4
- basic_memory/mcp/tools/edit_note.py +17 -11
- basic_memory/mcp/tools/move_note.py +252 -40
- basic_memory/mcp/tools/project_management.py +35 -3
- basic_memory/mcp/tools/read_note.py +9 -2
- basic_memory/mcp/tools/search.py +180 -8
- basic_memory/mcp/tools/sync_status.py +254 -0
- basic_memory/mcp/tools/utils.py +47 -0
- basic_memory/mcp/tools/view_note.py +66 -0
- basic_memory/mcp/tools/write_note.py +13 -2
- basic_memory/repository/search_repository.py +99 -26
- basic_memory/schemas/base.py +33 -5
- basic_memory/schemas/memory.py +58 -1
- basic_memory/services/entity_service.py +4 -4
- basic_memory/services/initialization.py +32 -5
- basic_memory/services/link_resolver.py +20 -5
- basic_memory/services/migration_service.py +168 -0
- basic_memory/services/project_service.py +97 -47
- basic_memory/services/sync_status_service.py +181 -0
- basic_memory/sync/sync_service.py +55 -2
- {basic_memory-0.13.0b4.dist-info → basic_memory-0.13.0b5.dist-info}/METADATA +2 -2
- {basic_memory-0.13.0b4.dist-info → basic_memory-0.13.0b5.dist-info}/RECORD +39 -34
- {basic_memory-0.13.0b4.dist-info → basic_memory-0.13.0b5.dist-info}/WHEEL +0 -0
- {basic_memory-0.13.0b4.dist-info → basic_memory-0.13.0b5.dist-info}/entry_points.txt +0 -0
- {basic_memory-0.13.0b4.dist-info → basic_memory-0.13.0b5.dist-info}/licenses/LICENSE +0 -0
|
@@ -159,7 +159,9 @@ class ProjectService:
|
|
|
159
159
|
multiple projects might have is_default=True or no project is marked as default.
|
|
160
160
|
"""
|
|
161
161
|
if not self.repository:
|
|
162
|
-
raise ValueError(
|
|
162
|
+
raise ValueError(
|
|
163
|
+
"Repository is required for _ensure_single_default_project"
|
|
164
|
+
) # pragma: no cover
|
|
163
165
|
|
|
164
166
|
# Get all projects with is_default=True
|
|
165
167
|
db_projects = await self.repository.find_all()
|
|
@@ -309,8 +311,11 @@ class ProjectService:
|
|
|
309
311
|
f"Changed default project to '{new_default.name}' as '{name}' was deactivated"
|
|
310
312
|
)
|
|
311
313
|
|
|
312
|
-
async def get_project_info(self) -> ProjectInfoResponse:
|
|
313
|
-
"""Get comprehensive information about the
|
|
314
|
+
async def get_project_info(self, project_name: Optional[str] = None) -> ProjectInfoResponse:
|
|
315
|
+
"""Get comprehensive information about the specified Basic Memory project.
|
|
316
|
+
|
|
317
|
+
Args:
|
|
318
|
+
project_name: Name of the project to get info for. If None, uses the current config project.
|
|
314
319
|
|
|
315
320
|
Returns:
|
|
316
321
|
Comprehensive project information and statistics
|
|
@@ -318,19 +323,27 @@ class ProjectService:
|
|
|
318
323
|
if not self.repository: # pragma: no cover
|
|
319
324
|
raise ValueError("Repository is required for get_project_info")
|
|
320
325
|
|
|
321
|
-
#
|
|
322
|
-
|
|
326
|
+
# Use specified project or fall back to config project
|
|
327
|
+
project_name = project_name or config.project
|
|
328
|
+
# Get project path from configuration
|
|
329
|
+
project_path = config_manager.projects.get(project_name)
|
|
330
|
+
if not project_path: # pragma: no cover
|
|
331
|
+
raise ValueError(f"Project '{project_name}' not found in configuration")
|
|
332
|
+
|
|
333
|
+
# Get project from database to get project_id
|
|
334
|
+
db_project = await self.repository.get_by_name(project_name)
|
|
335
|
+
if not db_project: # pragma: no cover
|
|
336
|
+
raise ValueError(f"Project '{project_name}' not found in database")
|
|
337
|
+
|
|
338
|
+
# Get statistics for the specified project
|
|
339
|
+
statistics = await self.get_statistics(db_project.id)
|
|
323
340
|
|
|
324
|
-
# Get activity metrics
|
|
325
|
-
activity = await self.get_activity_metrics()
|
|
341
|
+
# Get activity metrics for the specified project
|
|
342
|
+
activity = await self.get_activity_metrics(db_project.id)
|
|
326
343
|
|
|
327
344
|
# Get system status
|
|
328
345
|
system = self.get_system_status()
|
|
329
346
|
|
|
330
|
-
# Get current project information from config
|
|
331
|
-
project_name = config.project
|
|
332
|
-
project_path = str(config.home)
|
|
333
|
-
|
|
334
347
|
# Get enhanced project information from database
|
|
335
348
|
db_projects = await self.repository.get_active_projects()
|
|
336
349
|
db_projects_by_name = {p.name: p for p in db_projects}
|
|
@@ -361,60 +374,85 @@ class ProjectService:
|
|
|
361
374
|
system=system,
|
|
362
375
|
)
|
|
363
376
|
|
|
364
|
-
async def get_statistics(self) -> ProjectStatistics:
|
|
365
|
-
"""Get statistics about the
|
|
377
|
+
async def get_statistics(self, project_id: int) -> ProjectStatistics:
|
|
378
|
+
"""Get statistics about the specified project.
|
|
379
|
+
|
|
380
|
+
Args:
|
|
381
|
+
project_id: ID of the project to get statistics for (required).
|
|
382
|
+
"""
|
|
366
383
|
if not self.repository: # pragma: no cover
|
|
367
384
|
raise ValueError("Repository is required for get_statistics")
|
|
368
385
|
|
|
369
386
|
# Get basic counts
|
|
370
387
|
entity_count_result = await self.repository.execute_query(
|
|
371
|
-
text("SELECT COUNT(*) FROM entity")
|
|
388
|
+
text("SELECT COUNT(*) FROM entity WHERE project_id = :project_id"),
|
|
389
|
+
{"project_id": project_id},
|
|
372
390
|
)
|
|
373
391
|
total_entities = entity_count_result.scalar() or 0
|
|
374
392
|
|
|
375
393
|
observation_count_result = await self.repository.execute_query(
|
|
376
|
-
text(
|
|
394
|
+
text(
|
|
395
|
+
"SELECT COUNT(*) FROM observation o JOIN entity e ON o.entity_id = e.id WHERE e.project_id = :project_id"
|
|
396
|
+
),
|
|
397
|
+
{"project_id": project_id},
|
|
377
398
|
)
|
|
378
399
|
total_observations = observation_count_result.scalar() or 0
|
|
379
400
|
|
|
380
401
|
relation_count_result = await self.repository.execute_query(
|
|
381
|
-
text(
|
|
402
|
+
text(
|
|
403
|
+
"SELECT COUNT(*) FROM relation r JOIN entity e ON r.from_id = e.id WHERE e.project_id = :project_id"
|
|
404
|
+
),
|
|
405
|
+
{"project_id": project_id},
|
|
382
406
|
)
|
|
383
407
|
total_relations = relation_count_result.scalar() or 0
|
|
384
408
|
|
|
385
409
|
unresolved_count_result = await self.repository.execute_query(
|
|
386
|
-
text(
|
|
410
|
+
text(
|
|
411
|
+
"SELECT COUNT(*) FROM relation r JOIN entity e ON r.from_id = e.id WHERE r.to_id IS NULL AND e.project_id = :project_id"
|
|
412
|
+
),
|
|
413
|
+
{"project_id": project_id},
|
|
387
414
|
)
|
|
388
415
|
total_unresolved = unresolved_count_result.scalar() or 0
|
|
389
416
|
|
|
390
417
|
# Get entity counts by type
|
|
391
418
|
entity_types_result = await self.repository.execute_query(
|
|
392
|
-
text(
|
|
419
|
+
text(
|
|
420
|
+
"SELECT entity_type, COUNT(*) FROM entity WHERE project_id = :project_id GROUP BY entity_type"
|
|
421
|
+
),
|
|
422
|
+
{"project_id": project_id},
|
|
393
423
|
)
|
|
394
424
|
entity_types = {row[0]: row[1] for row in entity_types_result.fetchall()}
|
|
395
425
|
|
|
396
426
|
# Get observation counts by category
|
|
397
427
|
category_result = await self.repository.execute_query(
|
|
398
|
-
text(
|
|
428
|
+
text(
|
|
429
|
+
"SELECT o.category, COUNT(*) FROM observation o JOIN entity e ON o.entity_id = e.id WHERE e.project_id = :project_id GROUP BY o.category"
|
|
430
|
+
),
|
|
431
|
+
{"project_id": project_id},
|
|
399
432
|
)
|
|
400
433
|
observation_categories = {row[0]: row[1] for row in category_result.fetchall()}
|
|
401
434
|
|
|
402
435
|
# Get relation counts by type
|
|
403
436
|
relation_types_result = await self.repository.execute_query(
|
|
404
|
-
text(
|
|
437
|
+
text(
|
|
438
|
+
"SELECT r.relation_type, COUNT(*) FROM relation r JOIN entity e ON r.from_id = e.id WHERE e.project_id = :project_id GROUP BY r.relation_type"
|
|
439
|
+
),
|
|
440
|
+
{"project_id": project_id},
|
|
405
441
|
)
|
|
406
442
|
relation_types = {row[0]: row[1] for row in relation_types_result.fetchall()}
|
|
407
443
|
|
|
408
|
-
# Find most connected entities (most outgoing relations)
|
|
444
|
+
# Find most connected entities (most outgoing relations) - project filtered
|
|
409
445
|
connected_result = await self.repository.execute_query(
|
|
410
446
|
text("""
|
|
411
|
-
SELECT e.id, e.title, e.permalink, COUNT(r.id) AS relation_count, file_path
|
|
447
|
+
SELECT e.id, e.title, e.permalink, COUNT(r.id) AS relation_count, e.file_path
|
|
412
448
|
FROM entity e
|
|
413
449
|
JOIN relation r ON e.id = r.from_id
|
|
450
|
+
WHERE e.project_id = :project_id
|
|
414
451
|
GROUP BY e.id
|
|
415
452
|
ORDER BY relation_count DESC
|
|
416
453
|
LIMIT 10
|
|
417
|
-
""")
|
|
454
|
+
"""),
|
|
455
|
+
{"project_id": project_id},
|
|
418
456
|
)
|
|
419
457
|
most_connected = [
|
|
420
458
|
{
|
|
@@ -427,15 +465,16 @@ class ProjectService:
|
|
|
427
465
|
for row in connected_result.fetchall()
|
|
428
466
|
]
|
|
429
467
|
|
|
430
|
-
# Count isolated entities (no relations)
|
|
468
|
+
# Count isolated entities (no relations) - project filtered
|
|
431
469
|
isolated_result = await self.repository.execute_query(
|
|
432
470
|
text("""
|
|
433
471
|
SELECT COUNT(e.id)
|
|
434
472
|
FROM entity e
|
|
435
473
|
LEFT JOIN relation r1 ON e.id = r1.from_id
|
|
436
474
|
LEFT JOIN relation r2 ON e.id = r2.to_id
|
|
437
|
-
WHERE r1.id IS NULL AND r2.id IS NULL
|
|
438
|
-
""")
|
|
475
|
+
WHERE e.project_id = :project_id AND r1.id IS NULL AND r2.id IS NULL
|
|
476
|
+
"""),
|
|
477
|
+
{"project_id": project_id},
|
|
439
478
|
)
|
|
440
479
|
isolated_count = isolated_result.scalar() or 0
|
|
441
480
|
|
|
@@ -451,19 +490,25 @@ class ProjectService:
|
|
|
451
490
|
isolated_entities=isolated_count,
|
|
452
491
|
)
|
|
453
492
|
|
|
454
|
-
async def get_activity_metrics(self) -> ActivityMetrics:
|
|
455
|
-
"""Get activity metrics for the
|
|
493
|
+
async def get_activity_metrics(self, project_id: int) -> ActivityMetrics:
|
|
494
|
+
"""Get activity metrics for the specified project.
|
|
495
|
+
|
|
496
|
+
Args:
|
|
497
|
+
project_id: ID of the project to get activity metrics for (required).
|
|
498
|
+
"""
|
|
456
499
|
if not self.repository: # pragma: no cover
|
|
457
500
|
raise ValueError("Repository is required for get_activity_metrics")
|
|
458
501
|
|
|
459
|
-
# Get recently created entities
|
|
502
|
+
# Get recently created entities (project filtered)
|
|
460
503
|
created_result = await self.repository.execute_query(
|
|
461
504
|
text("""
|
|
462
505
|
SELECT id, title, permalink, entity_type, created_at, file_path
|
|
463
506
|
FROM entity
|
|
507
|
+
WHERE project_id = :project_id
|
|
464
508
|
ORDER BY created_at DESC
|
|
465
509
|
LIMIT 10
|
|
466
|
-
""")
|
|
510
|
+
"""),
|
|
511
|
+
{"project_id": project_id},
|
|
467
512
|
)
|
|
468
513
|
recently_created = [
|
|
469
514
|
{
|
|
@@ -477,14 +522,16 @@ class ProjectService:
|
|
|
477
522
|
for row in created_result.fetchall()
|
|
478
523
|
]
|
|
479
524
|
|
|
480
|
-
# Get recently updated entities
|
|
525
|
+
# Get recently updated entities (project filtered)
|
|
481
526
|
updated_result = await self.repository.execute_query(
|
|
482
527
|
text("""
|
|
483
528
|
SELECT id, title, permalink, entity_type, updated_at, file_path
|
|
484
529
|
FROM entity
|
|
530
|
+
WHERE project_id = :project_id
|
|
485
531
|
ORDER BY updated_at DESC
|
|
486
532
|
LIMIT 10
|
|
487
|
-
""")
|
|
533
|
+
"""),
|
|
534
|
+
{"project_id": project_id},
|
|
488
535
|
)
|
|
489
536
|
recently_updated = [
|
|
490
537
|
{
|
|
@@ -505,47 +552,50 @@ class ProjectService:
|
|
|
505
552
|
now.year - (1 if now.month <= 6 else 0), ((now.month - 6) % 12) or 12, 1
|
|
506
553
|
)
|
|
507
554
|
|
|
508
|
-
# Query for monthly entity creation
|
|
555
|
+
# Query for monthly entity creation (project filtered)
|
|
509
556
|
entity_growth_result = await self.repository.execute_query(
|
|
510
|
-
text(
|
|
557
|
+
text("""
|
|
511
558
|
SELECT
|
|
512
559
|
strftime('%Y-%m', created_at) AS month,
|
|
513
560
|
COUNT(*) AS count
|
|
514
561
|
FROM entity
|
|
515
|
-
WHERE created_at >=
|
|
562
|
+
WHERE created_at >= :six_months_ago AND project_id = :project_id
|
|
516
563
|
GROUP BY month
|
|
517
564
|
ORDER BY month
|
|
518
|
-
""")
|
|
565
|
+
"""),
|
|
566
|
+
{"six_months_ago": six_months_ago.isoformat(), "project_id": project_id},
|
|
519
567
|
)
|
|
520
568
|
entity_growth = {row[0]: row[1] for row in entity_growth_result.fetchall()}
|
|
521
569
|
|
|
522
|
-
# Query for monthly observation creation
|
|
570
|
+
# Query for monthly observation creation (project filtered)
|
|
523
571
|
observation_growth_result = await self.repository.execute_query(
|
|
524
|
-
text(
|
|
572
|
+
text("""
|
|
525
573
|
SELECT
|
|
526
|
-
strftime('%Y-%m', created_at) AS month,
|
|
574
|
+
strftime('%Y-%m', entity.created_at) AS month,
|
|
527
575
|
COUNT(*) AS count
|
|
528
576
|
FROM observation
|
|
529
577
|
INNER JOIN entity ON observation.entity_id = entity.id
|
|
530
|
-
WHERE entity.created_at >=
|
|
578
|
+
WHERE entity.created_at >= :six_months_ago AND entity.project_id = :project_id
|
|
531
579
|
GROUP BY month
|
|
532
580
|
ORDER BY month
|
|
533
|
-
""")
|
|
581
|
+
"""),
|
|
582
|
+
{"six_months_ago": six_months_ago.isoformat(), "project_id": project_id},
|
|
534
583
|
)
|
|
535
584
|
observation_growth = {row[0]: row[1] for row in observation_growth_result.fetchall()}
|
|
536
585
|
|
|
537
|
-
# Query for monthly relation creation
|
|
586
|
+
# Query for monthly relation creation (project filtered)
|
|
538
587
|
relation_growth_result = await self.repository.execute_query(
|
|
539
|
-
text(
|
|
588
|
+
text("""
|
|
540
589
|
SELECT
|
|
541
|
-
strftime('%Y-%m', created_at) AS month,
|
|
590
|
+
strftime('%Y-%m', entity.created_at) AS month,
|
|
542
591
|
COUNT(*) AS count
|
|
543
592
|
FROM relation
|
|
544
593
|
INNER JOIN entity ON relation.from_id = entity.id
|
|
545
|
-
WHERE entity.created_at >=
|
|
594
|
+
WHERE entity.created_at >= :six_months_ago AND entity.project_id = :project_id
|
|
546
595
|
GROUP BY month
|
|
547
596
|
ORDER BY month
|
|
548
|
-
""")
|
|
597
|
+
"""),
|
|
598
|
+
{"six_months_ago": six_months_ago.isoformat(), "project_id": project_id},
|
|
549
599
|
)
|
|
550
600
|
relation_growth = {row[0]: row[1] for row in relation_growth_result.fetchall()}
|
|
551
601
|
|
|
@@ -597,4 +647,4 @@ class ProjectService:
|
|
|
597
647
|
database_size=db_size_readable,
|
|
598
648
|
watch_status=watch_status,
|
|
599
649
|
timestamp=datetime.now(),
|
|
600
|
-
)
|
|
650
|
+
)
|
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
"""Simple sync status tracking service."""
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from enum import Enum
|
|
5
|
+
from typing import Dict, Optional
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class SyncStatus(Enum):
|
|
9
|
+
"""Status of sync operations."""
|
|
10
|
+
|
|
11
|
+
IDLE = "idle"
|
|
12
|
+
SCANNING = "scanning"
|
|
13
|
+
SYNCING = "syncing"
|
|
14
|
+
COMPLETED = "completed"
|
|
15
|
+
FAILED = "failed"
|
|
16
|
+
WATCHING = "watching"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class ProjectSyncStatus:
|
|
21
|
+
"""Sync status for a single project."""
|
|
22
|
+
|
|
23
|
+
project_name: str
|
|
24
|
+
status: SyncStatus
|
|
25
|
+
message: str = ""
|
|
26
|
+
files_total: int = 0
|
|
27
|
+
files_processed: int = 0
|
|
28
|
+
error: Optional[str] = None
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class SyncStatusTracker:
|
|
32
|
+
"""Global tracker for all sync operations."""
|
|
33
|
+
|
|
34
|
+
def __init__(self):
|
|
35
|
+
self._project_statuses: Dict[str, ProjectSyncStatus] = {}
|
|
36
|
+
self._global_status: SyncStatus = SyncStatus.IDLE
|
|
37
|
+
|
|
38
|
+
def start_project_sync(self, project_name: str, files_total: int = 0) -> None:
|
|
39
|
+
"""Start tracking sync for a project."""
|
|
40
|
+
self._project_statuses[project_name] = ProjectSyncStatus(
|
|
41
|
+
project_name=project_name,
|
|
42
|
+
status=SyncStatus.SCANNING,
|
|
43
|
+
message="Scanning files",
|
|
44
|
+
files_total=files_total,
|
|
45
|
+
files_processed=0,
|
|
46
|
+
)
|
|
47
|
+
self._update_global_status()
|
|
48
|
+
|
|
49
|
+
def update_project_progress( # pragma: no cover
|
|
50
|
+
self,
|
|
51
|
+
project_name: str,
|
|
52
|
+
status: SyncStatus,
|
|
53
|
+
message: str = "",
|
|
54
|
+
files_processed: int = 0,
|
|
55
|
+
files_total: Optional[int] = None,
|
|
56
|
+
) -> None:
|
|
57
|
+
"""Update progress for a project."""
|
|
58
|
+
if project_name not in self._project_statuses: # pragma: no cover
|
|
59
|
+
return
|
|
60
|
+
|
|
61
|
+
project_status = self._project_statuses[project_name]
|
|
62
|
+
project_status.status = status
|
|
63
|
+
project_status.message = message
|
|
64
|
+
project_status.files_processed = files_processed
|
|
65
|
+
|
|
66
|
+
if files_total is not None:
|
|
67
|
+
project_status.files_total = files_total
|
|
68
|
+
|
|
69
|
+
self._update_global_status()
|
|
70
|
+
|
|
71
|
+
def complete_project_sync(self, project_name: str) -> None:
|
|
72
|
+
"""Mark project sync as completed."""
|
|
73
|
+
if project_name in self._project_statuses:
|
|
74
|
+
self._project_statuses[project_name].status = SyncStatus.COMPLETED
|
|
75
|
+
self._project_statuses[project_name].message = "Sync completed"
|
|
76
|
+
self._update_global_status()
|
|
77
|
+
|
|
78
|
+
def fail_project_sync(self, project_name: str, error: str) -> None:
|
|
79
|
+
"""Mark project sync as failed."""
|
|
80
|
+
if project_name in self._project_statuses:
|
|
81
|
+
self._project_statuses[project_name].status = SyncStatus.FAILED
|
|
82
|
+
self._project_statuses[project_name].error = error
|
|
83
|
+
self._update_global_status()
|
|
84
|
+
|
|
85
|
+
def start_project_watch(self, project_name: str) -> None:
|
|
86
|
+
"""Mark project as watching for changes (steady state after sync)."""
|
|
87
|
+
if project_name in self._project_statuses:
|
|
88
|
+
self._project_statuses[project_name].status = SyncStatus.WATCHING
|
|
89
|
+
self._project_statuses[project_name].message = "Watching for changes"
|
|
90
|
+
self._update_global_status()
|
|
91
|
+
else:
|
|
92
|
+
# Create new status if project isn't tracked yet
|
|
93
|
+
self._project_statuses[project_name] = ProjectSyncStatus(
|
|
94
|
+
project_name=project_name,
|
|
95
|
+
status=SyncStatus.WATCHING,
|
|
96
|
+
message="Watching for changes",
|
|
97
|
+
files_total=0,
|
|
98
|
+
files_processed=0,
|
|
99
|
+
)
|
|
100
|
+
self._update_global_status()
|
|
101
|
+
|
|
102
|
+
def _update_global_status(self) -> None:
|
|
103
|
+
"""Update global status based on project statuses."""
|
|
104
|
+
if not self._project_statuses: # pragma: no cover
|
|
105
|
+
self._global_status = SyncStatus.IDLE
|
|
106
|
+
return
|
|
107
|
+
|
|
108
|
+
statuses = [p.status for p in self._project_statuses.values()]
|
|
109
|
+
|
|
110
|
+
if any(s == SyncStatus.FAILED for s in statuses):
|
|
111
|
+
self._global_status = SyncStatus.FAILED
|
|
112
|
+
elif any(s in (SyncStatus.SCANNING, SyncStatus.SYNCING) for s in statuses):
|
|
113
|
+
self._global_status = SyncStatus.SYNCING
|
|
114
|
+
elif all(s in (SyncStatus.COMPLETED, SyncStatus.WATCHING) for s in statuses):
|
|
115
|
+
self._global_status = SyncStatus.COMPLETED
|
|
116
|
+
else:
|
|
117
|
+
self._global_status = SyncStatus.SYNCING
|
|
118
|
+
|
|
119
|
+
@property
|
|
120
|
+
def global_status(self) -> SyncStatus:
|
|
121
|
+
"""Get overall sync status."""
|
|
122
|
+
return self._global_status
|
|
123
|
+
|
|
124
|
+
@property
|
|
125
|
+
def is_syncing(self) -> bool:
|
|
126
|
+
"""Check if any sync operation is in progress."""
|
|
127
|
+
return self._global_status in (SyncStatus.SCANNING, SyncStatus.SYNCING)
|
|
128
|
+
|
|
129
|
+
@property
|
|
130
|
+
def is_ready(self) -> bool: # pragma: no cover
|
|
131
|
+
"""Check if system is ready (no sync in progress)."""
|
|
132
|
+
return self._global_status in (SyncStatus.IDLE, SyncStatus.COMPLETED)
|
|
133
|
+
|
|
134
|
+
def get_project_status(self, project_name: str) -> Optional[ProjectSyncStatus]:
|
|
135
|
+
"""Get status for a specific project."""
|
|
136
|
+
return self._project_statuses.get(project_name)
|
|
137
|
+
|
|
138
|
+
def get_all_projects(self) -> Dict[str, ProjectSyncStatus]:
|
|
139
|
+
"""Get all project statuses."""
|
|
140
|
+
return self._project_statuses.copy()
|
|
141
|
+
|
|
142
|
+
def get_summary(self) -> str: # pragma: no cover
|
|
143
|
+
"""Get a user-friendly summary of sync status."""
|
|
144
|
+
if self._global_status == SyncStatus.IDLE:
|
|
145
|
+
return "✅ System ready"
|
|
146
|
+
elif self._global_status == SyncStatus.COMPLETED:
|
|
147
|
+
return "✅ All projects synced successfully"
|
|
148
|
+
elif self._global_status == SyncStatus.FAILED:
|
|
149
|
+
failed_projects = [
|
|
150
|
+
p.project_name
|
|
151
|
+
for p in self._project_statuses.values()
|
|
152
|
+
if p.status == SyncStatus.FAILED
|
|
153
|
+
]
|
|
154
|
+
return f"❌ Sync failed for: {', '.join(failed_projects)}"
|
|
155
|
+
else:
|
|
156
|
+
active_projects = [
|
|
157
|
+
p.project_name
|
|
158
|
+
for p in self._project_statuses.values()
|
|
159
|
+
if p.status in (SyncStatus.SCANNING, SyncStatus.SYNCING)
|
|
160
|
+
]
|
|
161
|
+
total_files = sum(p.files_total for p in self._project_statuses.values())
|
|
162
|
+
processed_files = sum(p.files_processed for p in self._project_statuses.values())
|
|
163
|
+
|
|
164
|
+
if total_files > 0:
|
|
165
|
+
progress_pct = (processed_files / total_files) * 100
|
|
166
|
+
return f"🔄 Syncing {len(active_projects)} projects ({processed_files}/{total_files} files, {progress_pct:.0f}%)"
|
|
167
|
+
else:
|
|
168
|
+
return f"🔄 Syncing {len(active_projects)} projects"
|
|
169
|
+
|
|
170
|
+
def clear_completed(self) -> None:
|
|
171
|
+
"""Remove completed project statuses to clean up memory."""
|
|
172
|
+
self._project_statuses = {
|
|
173
|
+
name: status
|
|
174
|
+
for name, status in self._project_statuses.items()
|
|
175
|
+
if status.status != SyncStatus.COMPLETED
|
|
176
|
+
}
|
|
177
|
+
self._update_global_status()
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
# Global sync status tracker instance
|
|
181
|
+
sync_status_tracker = SyncStatusTracker()
|
|
@@ -17,6 +17,7 @@ from basic_memory.models import Entity
|
|
|
17
17
|
from basic_memory.repository import EntityRepository, RelationRepository
|
|
18
18
|
from basic_memory.services import EntityService, FileService
|
|
19
19
|
from basic_memory.services.search_service import SearchService
|
|
20
|
+
from basic_memory.services.sync_status_service import sync_status_tracker, SyncStatus
|
|
20
21
|
|
|
21
22
|
|
|
22
23
|
@dataclass
|
|
@@ -80,23 +81,38 @@ class SyncService:
|
|
|
80
81
|
self.search_service = search_service
|
|
81
82
|
self.file_service = file_service
|
|
82
83
|
|
|
83
|
-
async def sync(self, directory: Path) -> SyncReport:
|
|
84
|
+
async def sync(self, directory: Path, project_name: Optional[str] = None) -> SyncReport:
|
|
84
85
|
"""Sync all files with database."""
|
|
85
86
|
|
|
86
87
|
start_time = time.time()
|
|
87
88
|
logger.info(f"Sync operation started for directory: {directory}")
|
|
88
89
|
|
|
90
|
+
# Start tracking sync for this project if project name provided
|
|
91
|
+
if project_name:
|
|
92
|
+
sync_status_tracker.start_project_sync(project_name)
|
|
93
|
+
|
|
89
94
|
# initial paths from db to sync
|
|
90
95
|
# path -> checksum
|
|
91
96
|
report = await self.scan(directory)
|
|
92
97
|
|
|
93
|
-
#
|
|
98
|
+
# Update progress with file counts
|
|
99
|
+
if project_name:
|
|
100
|
+
sync_status_tracker.update_project_progress(
|
|
101
|
+
project_name=project_name,
|
|
102
|
+
status=SyncStatus.SYNCING,
|
|
103
|
+
message="Processing file changes",
|
|
104
|
+
files_total=report.total,
|
|
105
|
+
files_processed=0,
|
|
106
|
+
)
|
|
107
|
+
|
|
94
108
|
# order of sync matters to resolve relations effectively
|
|
95
109
|
logger.info(
|
|
96
110
|
f"Sync changes detected: new_files={len(report.new)}, modified_files={len(report.modified)}, "
|
|
97
111
|
+ f"deleted_files={len(report.deleted)}, moved_files={len(report.moves)}"
|
|
98
112
|
)
|
|
99
113
|
|
|
114
|
+
files_processed = 0
|
|
115
|
+
|
|
100
116
|
# sync moves first
|
|
101
117
|
for old_path, new_path in report.moves.items():
|
|
102
118
|
# in the case where a file has been deleted and replaced by another file
|
|
@@ -109,19 +125,56 @@ class SyncService:
|
|
|
109
125
|
else:
|
|
110
126
|
await self.handle_move(old_path, new_path)
|
|
111
127
|
|
|
128
|
+
files_processed += 1
|
|
129
|
+
if project_name:
|
|
130
|
+
sync_status_tracker.update_project_progress( # pragma: no cover
|
|
131
|
+
project_name=project_name,
|
|
132
|
+
status=SyncStatus.SYNCING,
|
|
133
|
+
message="Processing moves",
|
|
134
|
+
files_processed=files_processed,
|
|
135
|
+
)
|
|
136
|
+
|
|
112
137
|
# deleted next
|
|
113
138
|
for path in report.deleted:
|
|
114
139
|
await self.handle_delete(path)
|
|
140
|
+
files_processed += 1
|
|
141
|
+
if project_name:
|
|
142
|
+
sync_status_tracker.update_project_progress( # pragma: no cover
|
|
143
|
+
project_name=project_name,
|
|
144
|
+
status=SyncStatus.SYNCING,
|
|
145
|
+
message="Processing deletions",
|
|
146
|
+
files_processed=files_processed,
|
|
147
|
+
)
|
|
115
148
|
|
|
116
149
|
# then new and modified
|
|
117
150
|
for path in report.new:
|
|
118
151
|
await self.sync_file(path, new=True)
|
|
152
|
+
files_processed += 1
|
|
153
|
+
if project_name:
|
|
154
|
+
sync_status_tracker.update_project_progress(
|
|
155
|
+
project_name=project_name,
|
|
156
|
+
status=SyncStatus.SYNCING,
|
|
157
|
+
message="Processing new files",
|
|
158
|
+
files_processed=files_processed,
|
|
159
|
+
)
|
|
119
160
|
|
|
120
161
|
for path in report.modified:
|
|
121
162
|
await self.sync_file(path, new=False)
|
|
163
|
+
files_processed += 1
|
|
164
|
+
if project_name:
|
|
165
|
+
sync_status_tracker.update_project_progress( # pragma: no cover
|
|
166
|
+
project_name=project_name,
|
|
167
|
+
status=SyncStatus.SYNCING,
|
|
168
|
+
message="Processing modified files",
|
|
169
|
+
files_processed=files_processed,
|
|
170
|
+
)
|
|
122
171
|
|
|
123
172
|
await self.resolve_relations()
|
|
124
173
|
|
|
174
|
+
# Mark sync as completed
|
|
175
|
+
if project_name:
|
|
176
|
+
sync_status_tracker.complete_project_sync(project_name)
|
|
177
|
+
|
|
125
178
|
duration_ms = int((time.time() - start_time) * 1000)
|
|
126
179
|
logger.info(
|
|
127
180
|
f"Sync operation completed: directory={directory}, total_changes={report.total}, duration_ms={duration_ms}"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: basic-memory
|
|
3
|
-
Version: 0.13.
|
|
3
|
+
Version: 0.13.0b5
|
|
4
4
|
Summary: Local-first knowledge management combining Zettelkasten with knowledge graphs
|
|
5
5
|
Project-URL: Homepage, https://github.com/basicmachines-co/basic-memory
|
|
6
6
|
Project-URL: Repository, https://github.com/basicmachines-co/basic-memory
|
|
@@ -25,10 +25,10 @@ Requires-Dist: pydantic-settings>=2.6.1
|
|
|
25
25
|
Requires-Dist: pydantic[email,timezone]>=2.10.3
|
|
26
26
|
Requires-Dist: pyjwt>=2.10.1
|
|
27
27
|
Requires-Dist: pyright>=1.1.390
|
|
28
|
+
Requires-Dist: pytest-aio>=1.9.0
|
|
28
29
|
Requires-Dist: python-dotenv>=1.1.0
|
|
29
30
|
Requires-Dist: python-frontmatter>=1.1.0
|
|
30
31
|
Requires-Dist: pyyaml>=6.0.1
|
|
31
|
-
Requires-Dist: qasync>=0.27.1
|
|
32
32
|
Requires-Dist: rich>=13.9.4
|
|
33
33
|
Requires-Dist: sqlalchemy>=2.0.0
|
|
34
34
|
Requires-Dist: typer>=0.9.0
|