basic-memory 0.13.0b4__py3-none-any.whl → 0.13.0b6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of basic-memory might be problematic. Click here for more details.
- basic_memory/__init__.py +2 -7
- basic_memory/api/routers/knowledge_router.py +13 -0
- basic_memory/api/routers/memory_router.py +3 -4
- basic_memory/api/routers/project_router.py +6 -5
- basic_memory/api/routers/prompt_router.py +2 -2
- basic_memory/cli/commands/project.py +3 -3
- basic_memory/cli/commands/status.py +1 -1
- basic_memory/cli/commands/sync.py +1 -1
- basic_memory/cli/commands/tool.py +6 -6
- basic_memory/mcp/prompts/__init__.py +2 -0
- basic_memory/mcp/prompts/recent_activity.py +1 -1
- basic_memory/mcp/prompts/sync_status.py +116 -0
- basic_memory/mcp/server.py +6 -6
- basic_memory/mcp/tools/__init__.py +4 -0
- basic_memory/mcp/tools/build_context.py +32 -7
- basic_memory/mcp/tools/canvas.py +2 -1
- basic_memory/mcp/tools/delete_note.py +159 -4
- basic_memory/mcp/tools/edit_note.py +17 -11
- basic_memory/mcp/tools/move_note.py +252 -40
- basic_memory/mcp/tools/project_management.py +35 -3
- basic_memory/mcp/tools/read_note.py +11 -4
- basic_memory/mcp/tools/search.py +180 -8
- basic_memory/mcp/tools/sync_status.py +254 -0
- basic_memory/mcp/tools/utils.py +47 -0
- basic_memory/mcp/tools/view_note.py +66 -0
- basic_memory/mcp/tools/write_note.py +13 -2
- basic_memory/repository/search_repository.py +116 -38
- basic_memory/schemas/base.py +33 -5
- basic_memory/schemas/memory.py +58 -1
- basic_memory/services/entity_service.py +18 -5
- basic_memory/services/initialization.py +32 -5
- basic_memory/services/link_resolver.py +20 -5
- basic_memory/services/migration_service.py +168 -0
- basic_memory/services/project_service.py +121 -50
- basic_memory/services/sync_status_service.py +181 -0
- basic_memory/sync/sync_service.py +91 -13
- {basic_memory-0.13.0b4.dist-info → basic_memory-0.13.0b6.dist-info}/METADATA +2 -2
- {basic_memory-0.13.0b4.dist-info → basic_memory-0.13.0b6.dist-info}/RECORD +41 -36
- {basic_memory-0.13.0b4.dist-info → basic_memory-0.13.0b6.dist-info}/WHEEL +0 -0
- {basic_memory-0.13.0b4.dist-info → basic_memory-0.13.0b6.dist-info}/entry_points.txt +0 -0
- {basic_memory-0.13.0b4.dist-info → basic_memory-0.13.0b6.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
"""Migration service for handling background migrations and status tracking."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from enum import Enum
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Optional
|
|
8
|
+
|
|
9
|
+
from loguru import logger
|
|
10
|
+
|
|
11
|
+
from basic_memory.config import BasicMemoryConfig
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class MigrationStatus(Enum):
|
|
15
|
+
"""Status of migration operations."""
|
|
16
|
+
|
|
17
|
+
NOT_NEEDED = "not_needed"
|
|
18
|
+
PENDING = "pending"
|
|
19
|
+
IN_PROGRESS = "in_progress"
|
|
20
|
+
COMPLETED = "completed"
|
|
21
|
+
FAILED = "failed"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@dataclass
|
|
25
|
+
class MigrationState:
|
|
26
|
+
"""Current state of migration operations."""
|
|
27
|
+
|
|
28
|
+
status: MigrationStatus
|
|
29
|
+
message: str
|
|
30
|
+
progress: Optional[str] = None
|
|
31
|
+
error: Optional[str] = None
|
|
32
|
+
projects_migrated: int = 0
|
|
33
|
+
projects_total: int = 0
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class MigrationManager:
|
|
37
|
+
"""Manages background migration operations and status tracking."""
|
|
38
|
+
|
|
39
|
+
def __init__(self):
|
|
40
|
+
self._state = MigrationState(
|
|
41
|
+
status=MigrationStatus.NOT_NEEDED, message="No migration required"
|
|
42
|
+
)
|
|
43
|
+
self._migration_task: Optional[asyncio.Task] = None
|
|
44
|
+
|
|
45
|
+
@property
|
|
46
|
+
def state(self) -> MigrationState:
|
|
47
|
+
"""Get current migration state."""
|
|
48
|
+
return self._state
|
|
49
|
+
|
|
50
|
+
@property
|
|
51
|
+
def is_ready(self) -> bool:
|
|
52
|
+
"""Check if the system is ready for normal operations."""
|
|
53
|
+
return self._state.status in (MigrationStatus.NOT_NEEDED, MigrationStatus.COMPLETED)
|
|
54
|
+
|
|
55
|
+
@property
|
|
56
|
+
def status_message(self) -> str:
|
|
57
|
+
"""Get a user-friendly status message."""
|
|
58
|
+
if self._state.status == MigrationStatus.IN_PROGRESS:
|
|
59
|
+
progress = (
|
|
60
|
+
f" ({self._state.projects_migrated}/{self._state.projects_total})"
|
|
61
|
+
if self._state.projects_total > 0
|
|
62
|
+
else ""
|
|
63
|
+
)
|
|
64
|
+
return f"🔄 File sync in progress{progress}: {self._state.message}. Use sync_status() tool for details."
|
|
65
|
+
elif self._state.status == MigrationStatus.FAILED:
|
|
66
|
+
return f"❌ File sync failed: {self._state.error or 'Unknown error'}. Use sync_status() tool for details."
|
|
67
|
+
elif self._state.status == MigrationStatus.COMPLETED:
|
|
68
|
+
return "✅ File sync completed successfully"
|
|
69
|
+
else:
|
|
70
|
+
return "✅ System ready"
|
|
71
|
+
|
|
72
|
+
async def check_migration_needed(self, app_config: BasicMemoryConfig) -> bool:
|
|
73
|
+
"""Check if migration is needed without performing it."""
|
|
74
|
+
from basic_memory import db
|
|
75
|
+
from basic_memory.repository import ProjectRepository
|
|
76
|
+
|
|
77
|
+
try:
|
|
78
|
+
# Get database session
|
|
79
|
+
_, session_maker = await db.get_or_create_db(
|
|
80
|
+
db_path=app_config.database_path, db_type=db.DatabaseType.FILESYSTEM
|
|
81
|
+
)
|
|
82
|
+
project_repository = ProjectRepository(session_maker)
|
|
83
|
+
|
|
84
|
+
# Check for legacy projects
|
|
85
|
+
legacy_projects = []
|
|
86
|
+
for project_name, project_path in app_config.projects.items():
|
|
87
|
+
legacy_dir = Path(project_path) / ".basic-memory"
|
|
88
|
+
if legacy_dir.exists():
|
|
89
|
+
project = await project_repository.get_by_name(project_name)
|
|
90
|
+
if project:
|
|
91
|
+
legacy_projects.append(project)
|
|
92
|
+
|
|
93
|
+
if legacy_projects:
|
|
94
|
+
self._state = MigrationState(
|
|
95
|
+
status=MigrationStatus.PENDING,
|
|
96
|
+
message="Legacy projects detected",
|
|
97
|
+
projects_total=len(legacy_projects),
|
|
98
|
+
)
|
|
99
|
+
return True
|
|
100
|
+
else:
|
|
101
|
+
self._state = MigrationState(
|
|
102
|
+
status=MigrationStatus.NOT_NEEDED, message="No migration required"
|
|
103
|
+
)
|
|
104
|
+
return False
|
|
105
|
+
|
|
106
|
+
except Exception as e:
|
|
107
|
+
logger.error(f"Error checking migration status: {e}")
|
|
108
|
+
self._state = MigrationState(
|
|
109
|
+
status=MigrationStatus.FAILED, message="Migration check failed", error=str(e)
|
|
110
|
+
)
|
|
111
|
+
return False
|
|
112
|
+
|
|
113
|
+
async def start_background_migration(self, app_config: BasicMemoryConfig) -> None:
|
|
114
|
+
"""Start migration in background if needed."""
|
|
115
|
+
if not await self.check_migration_needed(app_config):
|
|
116
|
+
return
|
|
117
|
+
|
|
118
|
+
if self._migration_task and not self._migration_task.done():
|
|
119
|
+
logger.info("Migration already in progress")
|
|
120
|
+
return
|
|
121
|
+
|
|
122
|
+
logger.info("Starting background migration")
|
|
123
|
+
self._migration_task = asyncio.create_task(self._run_migration(app_config))
|
|
124
|
+
|
|
125
|
+
async def _run_migration(self, app_config: BasicMemoryConfig) -> None:
|
|
126
|
+
"""Run the actual migration process."""
|
|
127
|
+
try:
|
|
128
|
+
self._state.status = MigrationStatus.IN_PROGRESS
|
|
129
|
+
self._state.message = "Migrating legacy projects"
|
|
130
|
+
|
|
131
|
+
# Import here to avoid circular imports
|
|
132
|
+
from basic_memory.services.initialization import migrate_legacy_projects
|
|
133
|
+
|
|
134
|
+
# Run the migration
|
|
135
|
+
await migrate_legacy_projects(app_config)
|
|
136
|
+
|
|
137
|
+
self._state = MigrationState(
|
|
138
|
+
status=MigrationStatus.COMPLETED, message="Migration completed successfully"
|
|
139
|
+
)
|
|
140
|
+
logger.info("Background migration completed successfully")
|
|
141
|
+
|
|
142
|
+
except Exception as e:
|
|
143
|
+
logger.error(f"Background migration failed: {e}")
|
|
144
|
+
self._state = MigrationState(
|
|
145
|
+
status=MigrationStatus.FAILED, message="Migration failed", error=str(e)
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
async def wait_for_completion(self, timeout: Optional[float] = None) -> bool:
|
|
149
|
+
"""Wait for migration to complete."""
|
|
150
|
+
if self.is_ready:
|
|
151
|
+
return True
|
|
152
|
+
|
|
153
|
+
if not self._migration_task:
|
|
154
|
+
return False
|
|
155
|
+
|
|
156
|
+
try:
|
|
157
|
+
await asyncio.wait_for(self._migration_task, timeout=timeout)
|
|
158
|
+
return self.is_ready
|
|
159
|
+
except asyncio.TimeoutError:
|
|
160
|
+
return False
|
|
161
|
+
|
|
162
|
+
def mark_completed(self, message: str = "Migration completed") -> None:
|
|
163
|
+
"""Mark migration as completed externally."""
|
|
164
|
+
self._state = MigrationState(status=MigrationStatus.COMPLETED, message=message)
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
# Global migration manager instance
|
|
168
|
+
migration_manager = MigrationManager()
|
|
@@ -159,7 +159,9 @@ class ProjectService:
|
|
|
159
159
|
multiple projects might have is_default=True or no project is marked as default.
|
|
160
160
|
"""
|
|
161
161
|
if not self.repository:
|
|
162
|
-
raise ValueError(
|
|
162
|
+
raise ValueError(
|
|
163
|
+
"Repository is required for _ensure_single_default_project"
|
|
164
|
+
) # pragma: no cover
|
|
163
165
|
|
|
164
166
|
# Get all projects with is_default=True
|
|
165
167
|
db_projects = await self.repository.find_all()
|
|
@@ -207,8 +209,29 @@ class ProjectService:
|
|
|
207
209
|
db_projects = await self.repository.get_active_projects()
|
|
208
210
|
db_projects_by_name = {p.name: p for p in db_projects}
|
|
209
211
|
|
|
210
|
-
# Get all projects from configuration
|
|
211
|
-
config_projects = config_manager.projects
|
|
212
|
+
# Get all projects from configuration and normalize names if needed
|
|
213
|
+
config_projects = config_manager.projects.copy()
|
|
214
|
+
updated_config = {}
|
|
215
|
+
config_updated = False
|
|
216
|
+
|
|
217
|
+
for name, path in config_projects.items():
|
|
218
|
+
# Generate normalized name (what the database expects)
|
|
219
|
+
normalized_name = generate_permalink(name)
|
|
220
|
+
|
|
221
|
+
if normalized_name != name:
|
|
222
|
+
logger.info(f"Normalizing project name in config: '{name}' -> '{normalized_name}'")
|
|
223
|
+
config_updated = True
|
|
224
|
+
|
|
225
|
+
updated_config[normalized_name] = path
|
|
226
|
+
|
|
227
|
+
# Update the configuration if any changes were made
|
|
228
|
+
if config_updated:
|
|
229
|
+
config_manager.config.projects = updated_config
|
|
230
|
+
config_manager.save_config(config_manager.config)
|
|
231
|
+
logger.info("Config updated with normalized project names")
|
|
232
|
+
|
|
233
|
+
# Use the normalized config for further processing
|
|
234
|
+
config_projects = updated_config
|
|
212
235
|
|
|
213
236
|
# Add projects that exist in config but not in DB
|
|
214
237
|
for name, path in config_projects.items():
|
|
@@ -217,7 +240,7 @@ class ProjectService:
|
|
|
217
240
|
project_data = {
|
|
218
241
|
"name": name,
|
|
219
242
|
"path": path,
|
|
220
|
-
"permalink": name
|
|
243
|
+
"permalink": generate_permalink(name),
|
|
221
244
|
"is_active": True,
|
|
222
245
|
# Don't set is_default here - let the enforcement logic handle it
|
|
223
246
|
}
|
|
@@ -309,8 +332,11 @@ class ProjectService:
|
|
|
309
332
|
f"Changed default project to '{new_default.name}' as '{name}' was deactivated"
|
|
310
333
|
)
|
|
311
334
|
|
|
312
|
-
async def get_project_info(self) -> ProjectInfoResponse:
|
|
313
|
-
"""Get comprehensive information about the
|
|
335
|
+
async def get_project_info(self, project_name: Optional[str] = None) -> ProjectInfoResponse:
|
|
336
|
+
"""Get comprehensive information about the specified Basic Memory project.
|
|
337
|
+
|
|
338
|
+
Args:
|
|
339
|
+
project_name: Name of the project to get info for. If None, uses the current config project.
|
|
314
340
|
|
|
315
341
|
Returns:
|
|
316
342
|
Comprehensive project information and statistics
|
|
@@ -318,19 +344,27 @@ class ProjectService:
|
|
|
318
344
|
if not self.repository: # pragma: no cover
|
|
319
345
|
raise ValueError("Repository is required for get_project_info")
|
|
320
346
|
|
|
321
|
-
#
|
|
322
|
-
|
|
347
|
+
# Use specified project or fall back to config project
|
|
348
|
+
project_name = project_name or config.project
|
|
349
|
+
# Get project path from configuration
|
|
350
|
+
project_path = config_manager.projects.get(project_name)
|
|
351
|
+
if not project_path: # pragma: no cover
|
|
352
|
+
raise ValueError(f"Project '{project_name}' not found in configuration")
|
|
323
353
|
|
|
324
|
-
# Get
|
|
325
|
-
|
|
354
|
+
# Get project from database to get project_id
|
|
355
|
+
db_project = await self.repository.get_by_name(project_name)
|
|
356
|
+
if not db_project: # pragma: no cover
|
|
357
|
+
raise ValueError(f"Project '{project_name}' not found in database")
|
|
358
|
+
|
|
359
|
+
# Get statistics for the specified project
|
|
360
|
+
statistics = await self.get_statistics(db_project.id)
|
|
361
|
+
|
|
362
|
+
# Get activity metrics for the specified project
|
|
363
|
+
activity = await self.get_activity_metrics(db_project.id)
|
|
326
364
|
|
|
327
365
|
# Get system status
|
|
328
366
|
system = self.get_system_status()
|
|
329
367
|
|
|
330
|
-
# Get current project information from config
|
|
331
|
-
project_name = config.project
|
|
332
|
-
project_path = str(config.home)
|
|
333
|
-
|
|
334
368
|
# Get enhanced project information from database
|
|
335
369
|
db_projects = await self.repository.get_active_projects()
|
|
336
370
|
db_projects_by_name = {p.name: p for p in db_projects}
|
|
@@ -361,60 +395,85 @@ class ProjectService:
|
|
|
361
395
|
system=system,
|
|
362
396
|
)
|
|
363
397
|
|
|
364
|
-
async def get_statistics(self) -> ProjectStatistics:
|
|
365
|
-
"""Get statistics about the
|
|
398
|
+
async def get_statistics(self, project_id: int) -> ProjectStatistics:
|
|
399
|
+
"""Get statistics about the specified project.
|
|
400
|
+
|
|
401
|
+
Args:
|
|
402
|
+
project_id: ID of the project to get statistics for (required).
|
|
403
|
+
"""
|
|
366
404
|
if not self.repository: # pragma: no cover
|
|
367
405
|
raise ValueError("Repository is required for get_statistics")
|
|
368
406
|
|
|
369
407
|
# Get basic counts
|
|
370
408
|
entity_count_result = await self.repository.execute_query(
|
|
371
|
-
text("SELECT COUNT(*) FROM entity")
|
|
409
|
+
text("SELECT COUNT(*) FROM entity WHERE project_id = :project_id"),
|
|
410
|
+
{"project_id": project_id},
|
|
372
411
|
)
|
|
373
412
|
total_entities = entity_count_result.scalar() or 0
|
|
374
413
|
|
|
375
414
|
observation_count_result = await self.repository.execute_query(
|
|
376
|
-
text(
|
|
415
|
+
text(
|
|
416
|
+
"SELECT COUNT(*) FROM observation o JOIN entity e ON o.entity_id = e.id WHERE e.project_id = :project_id"
|
|
417
|
+
),
|
|
418
|
+
{"project_id": project_id},
|
|
377
419
|
)
|
|
378
420
|
total_observations = observation_count_result.scalar() or 0
|
|
379
421
|
|
|
380
422
|
relation_count_result = await self.repository.execute_query(
|
|
381
|
-
text(
|
|
423
|
+
text(
|
|
424
|
+
"SELECT COUNT(*) FROM relation r JOIN entity e ON r.from_id = e.id WHERE e.project_id = :project_id"
|
|
425
|
+
),
|
|
426
|
+
{"project_id": project_id},
|
|
382
427
|
)
|
|
383
428
|
total_relations = relation_count_result.scalar() or 0
|
|
384
429
|
|
|
385
430
|
unresolved_count_result = await self.repository.execute_query(
|
|
386
|
-
text(
|
|
431
|
+
text(
|
|
432
|
+
"SELECT COUNT(*) FROM relation r JOIN entity e ON r.from_id = e.id WHERE r.to_id IS NULL AND e.project_id = :project_id"
|
|
433
|
+
),
|
|
434
|
+
{"project_id": project_id},
|
|
387
435
|
)
|
|
388
436
|
total_unresolved = unresolved_count_result.scalar() or 0
|
|
389
437
|
|
|
390
438
|
# Get entity counts by type
|
|
391
439
|
entity_types_result = await self.repository.execute_query(
|
|
392
|
-
text(
|
|
440
|
+
text(
|
|
441
|
+
"SELECT entity_type, COUNT(*) FROM entity WHERE project_id = :project_id GROUP BY entity_type"
|
|
442
|
+
),
|
|
443
|
+
{"project_id": project_id},
|
|
393
444
|
)
|
|
394
445
|
entity_types = {row[0]: row[1] for row in entity_types_result.fetchall()}
|
|
395
446
|
|
|
396
447
|
# Get observation counts by category
|
|
397
448
|
category_result = await self.repository.execute_query(
|
|
398
|
-
text(
|
|
449
|
+
text(
|
|
450
|
+
"SELECT o.category, COUNT(*) FROM observation o JOIN entity e ON o.entity_id = e.id WHERE e.project_id = :project_id GROUP BY o.category"
|
|
451
|
+
),
|
|
452
|
+
{"project_id": project_id},
|
|
399
453
|
)
|
|
400
454
|
observation_categories = {row[0]: row[1] for row in category_result.fetchall()}
|
|
401
455
|
|
|
402
456
|
# Get relation counts by type
|
|
403
457
|
relation_types_result = await self.repository.execute_query(
|
|
404
|
-
text(
|
|
458
|
+
text(
|
|
459
|
+
"SELECT r.relation_type, COUNT(*) FROM relation r JOIN entity e ON r.from_id = e.id WHERE e.project_id = :project_id GROUP BY r.relation_type"
|
|
460
|
+
),
|
|
461
|
+
{"project_id": project_id},
|
|
405
462
|
)
|
|
406
463
|
relation_types = {row[0]: row[1] for row in relation_types_result.fetchall()}
|
|
407
464
|
|
|
408
|
-
# Find most connected entities (most outgoing relations)
|
|
465
|
+
# Find most connected entities (most outgoing relations) - project filtered
|
|
409
466
|
connected_result = await self.repository.execute_query(
|
|
410
467
|
text("""
|
|
411
|
-
SELECT e.id, e.title, e.permalink, COUNT(r.id) AS relation_count, file_path
|
|
468
|
+
SELECT e.id, e.title, e.permalink, COUNT(r.id) AS relation_count, e.file_path
|
|
412
469
|
FROM entity e
|
|
413
470
|
JOIN relation r ON e.id = r.from_id
|
|
471
|
+
WHERE e.project_id = :project_id
|
|
414
472
|
GROUP BY e.id
|
|
415
473
|
ORDER BY relation_count DESC
|
|
416
474
|
LIMIT 10
|
|
417
|
-
""")
|
|
475
|
+
"""),
|
|
476
|
+
{"project_id": project_id},
|
|
418
477
|
)
|
|
419
478
|
most_connected = [
|
|
420
479
|
{
|
|
@@ -427,15 +486,16 @@ class ProjectService:
|
|
|
427
486
|
for row in connected_result.fetchall()
|
|
428
487
|
]
|
|
429
488
|
|
|
430
|
-
# Count isolated entities (no relations)
|
|
489
|
+
# Count isolated entities (no relations) - project filtered
|
|
431
490
|
isolated_result = await self.repository.execute_query(
|
|
432
491
|
text("""
|
|
433
492
|
SELECT COUNT(e.id)
|
|
434
493
|
FROM entity e
|
|
435
494
|
LEFT JOIN relation r1 ON e.id = r1.from_id
|
|
436
495
|
LEFT JOIN relation r2 ON e.id = r2.to_id
|
|
437
|
-
WHERE r1.id IS NULL AND r2.id IS NULL
|
|
438
|
-
""")
|
|
496
|
+
WHERE e.project_id = :project_id AND r1.id IS NULL AND r2.id IS NULL
|
|
497
|
+
"""),
|
|
498
|
+
{"project_id": project_id},
|
|
439
499
|
)
|
|
440
500
|
isolated_count = isolated_result.scalar() or 0
|
|
441
501
|
|
|
@@ -451,19 +511,25 @@ class ProjectService:
|
|
|
451
511
|
isolated_entities=isolated_count,
|
|
452
512
|
)
|
|
453
513
|
|
|
454
|
-
async def get_activity_metrics(self) -> ActivityMetrics:
|
|
455
|
-
"""Get activity metrics for the
|
|
514
|
+
async def get_activity_metrics(self, project_id: int) -> ActivityMetrics:
|
|
515
|
+
"""Get activity metrics for the specified project.
|
|
516
|
+
|
|
517
|
+
Args:
|
|
518
|
+
project_id: ID of the project to get activity metrics for (required).
|
|
519
|
+
"""
|
|
456
520
|
if not self.repository: # pragma: no cover
|
|
457
521
|
raise ValueError("Repository is required for get_activity_metrics")
|
|
458
522
|
|
|
459
|
-
# Get recently created entities
|
|
523
|
+
# Get recently created entities (project filtered)
|
|
460
524
|
created_result = await self.repository.execute_query(
|
|
461
525
|
text("""
|
|
462
526
|
SELECT id, title, permalink, entity_type, created_at, file_path
|
|
463
527
|
FROM entity
|
|
528
|
+
WHERE project_id = :project_id
|
|
464
529
|
ORDER BY created_at DESC
|
|
465
530
|
LIMIT 10
|
|
466
|
-
""")
|
|
531
|
+
"""),
|
|
532
|
+
{"project_id": project_id},
|
|
467
533
|
)
|
|
468
534
|
recently_created = [
|
|
469
535
|
{
|
|
@@ -477,14 +543,16 @@ class ProjectService:
|
|
|
477
543
|
for row in created_result.fetchall()
|
|
478
544
|
]
|
|
479
545
|
|
|
480
|
-
# Get recently updated entities
|
|
546
|
+
# Get recently updated entities (project filtered)
|
|
481
547
|
updated_result = await self.repository.execute_query(
|
|
482
548
|
text("""
|
|
483
549
|
SELECT id, title, permalink, entity_type, updated_at, file_path
|
|
484
550
|
FROM entity
|
|
551
|
+
WHERE project_id = :project_id
|
|
485
552
|
ORDER BY updated_at DESC
|
|
486
553
|
LIMIT 10
|
|
487
|
-
""")
|
|
554
|
+
"""),
|
|
555
|
+
{"project_id": project_id},
|
|
488
556
|
)
|
|
489
557
|
recently_updated = [
|
|
490
558
|
{
|
|
@@ -505,47 +573,50 @@ class ProjectService:
|
|
|
505
573
|
now.year - (1 if now.month <= 6 else 0), ((now.month - 6) % 12) or 12, 1
|
|
506
574
|
)
|
|
507
575
|
|
|
508
|
-
# Query for monthly entity creation
|
|
576
|
+
# Query for monthly entity creation (project filtered)
|
|
509
577
|
entity_growth_result = await self.repository.execute_query(
|
|
510
|
-
text(
|
|
578
|
+
text("""
|
|
511
579
|
SELECT
|
|
512
580
|
strftime('%Y-%m', created_at) AS month,
|
|
513
581
|
COUNT(*) AS count
|
|
514
582
|
FROM entity
|
|
515
|
-
WHERE created_at >=
|
|
583
|
+
WHERE created_at >= :six_months_ago AND project_id = :project_id
|
|
516
584
|
GROUP BY month
|
|
517
585
|
ORDER BY month
|
|
518
|
-
""")
|
|
586
|
+
"""),
|
|
587
|
+
{"six_months_ago": six_months_ago.isoformat(), "project_id": project_id},
|
|
519
588
|
)
|
|
520
589
|
entity_growth = {row[0]: row[1] for row in entity_growth_result.fetchall()}
|
|
521
590
|
|
|
522
|
-
# Query for monthly observation creation
|
|
591
|
+
# Query for monthly observation creation (project filtered)
|
|
523
592
|
observation_growth_result = await self.repository.execute_query(
|
|
524
|
-
text(
|
|
593
|
+
text("""
|
|
525
594
|
SELECT
|
|
526
|
-
strftime('%Y-%m', created_at) AS month,
|
|
595
|
+
strftime('%Y-%m', entity.created_at) AS month,
|
|
527
596
|
COUNT(*) AS count
|
|
528
597
|
FROM observation
|
|
529
598
|
INNER JOIN entity ON observation.entity_id = entity.id
|
|
530
|
-
WHERE entity.created_at >=
|
|
599
|
+
WHERE entity.created_at >= :six_months_ago AND entity.project_id = :project_id
|
|
531
600
|
GROUP BY month
|
|
532
601
|
ORDER BY month
|
|
533
|
-
""")
|
|
602
|
+
"""),
|
|
603
|
+
{"six_months_ago": six_months_ago.isoformat(), "project_id": project_id},
|
|
534
604
|
)
|
|
535
605
|
observation_growth = {row[0]: row[1] for row in observation_growth_result.fetchall()}
|
|
536
606
|
|
|
537
|
-
# Query for monthly relation creation
|
|
607
|
+
# Query for monthly relation creation (project filtered)
|
|
538
608
|
relation_growth_result = await self.repository.execute_query(
|
|
539
|
-
text(
|
|
609
|
+
text("""
|
|
540
610
|
SELECT
|
|
541
|
-
strftime('%Y-%m', created_at) AS month,
|
|
611
|
+
strftime('%Y-%m', entity.created_at) AS month,
|
|
542
612
|
COUNT(*) AS count
|
|
543
613
|
FROM relation
|
|
544
614
|
INNER JOIN entity ON relation.from_id = entity.id
|
|
545
|
-
WHERE entity.created_at >=
|
|
615
|
+
WHERE entity.created_at >= :six_months_ago AND entity.project_id = :project_id
|
|
546
616
|
GROUP BY month
|
|
547
617
|
ORDER BY month
|
|
548
|
-
""")
|
|
618
|
+
"""),
|
|
619
|
+
{"six_months_ago": six_months_ago.isoformat(), "project_id": project_id},
|
|
549
620
|
)
|
|
550
621
|
relation_growth = {row[0]: row[1] for row in relation_growth_result.fetchall()}
|
|
551
622
|
|
|
@@ -597,4 +668,4 @@ class ProjectService:
|
|
|
597
668
|
database_size=db_size_readable,
|
|
598
669
|
watch_status=watch_status,
|
|
599
670
|
timestamp=datetime.now(),
|
|
600
|
-
)
|
|
671
|
+
)
|
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
"""Simple sync status tracking service."""
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from enum import Enum
|
|
5
|
+
from typing import Dict, Optional
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class SyncStatus(Enum):
|
|
9
|
+
"""Status of sync operations."""
|
|
10
|
+
|
|
11
|
+
IDLE = "idle"
|
|
12
|
+
SCANNING = "scanning"
|
|
13
|
+
SYNCING = "syncing"
|
|
14
|
+
COMPLETED = "completed"
|
|
15
|
+
FAILED = "failed"
|
|
16
|
+
WATCHING = "watching"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class ProjectSyncStatus:
|
|
21
|
+
"""Sync status for a single project."""
|
|
22
|
+
|
|
23
|
+
project_name: str
|
|
24
|
+
status: SyncStatus
|
|
25
|
+
message: str = ""
|
|
26
|
+
files_total: int = 0
|
|
27
|
+
files_processed: int = 0
|
|
28
|
+
error: Optional[str] = None
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class SyncStatusTracker:
|
|
32
|
+
"""Global tracker for all sync operations."""
|
|
33
|
+
|
|
34
|
+
def __init__(self):
|
|
35
|
+
self._project_statuses: Dict[str, ProjectSyncStatus] = {}
|
|
36
|
+
self._global_status: SyncStatus = SyncStatus.IDLE
|
|
37
|
+
|
|
38
|
+
def start_project_sync(self, project_name: str, files_total: int = 0) -> None:
|
|
39
|
+
"""Start tracking sync for a project."""
|
|
40
|
+
self._project_statuses[project_name] = ProjectSyncStatus(
|
|
41
|
+
project_name=project_name,
|
|
42
|
+
status=SyncStatus.SCANNING,
|
|
43
|
+
message="Scanning files",
|
|
44
|
+
files_total=files_total,
|
|
45
|
+
files_processed=0,
|
|
46
|
+
)
|
|
47
|
+
self._update_global_status()
|
|
48
|
+
|
|
49
|
+
def update_project_progress( # pragma: no cover
|
|
50
|
+
self,
|
|
51
|
+
project_name: str,
|
|
52
|
+
status: SyncStatus,
|
|
53
|
+
message: str = "",
|
|
54
|
+
files_processed: int = 0,
|
|
55
|
+
files_total: Optional[int] = None,
|
|
56
|
+
) -> None:
|
|
57
|
+
"""Update progress for a project."""
|
|
58
|
+
if project_name not in self._project_statuses: # pragma: no cover
|
|
59
|
+
return
|
|
60
|
+
|
|
61
|
+
project_status = self._project_statuses[project_name]
|
|
62
|
+
project_status.status = status
|
|
63
|
+
project_status.message = message
|
|
64
|
+
project_status.files_processed = files_processed
|
|
65
|
+
|
|
66
|
+
if files_total is not None:
|
|
67
|
+
project_status.files_total = files_total
|
|
68
|
+
|
|
69
|
+
self._update_global_status()
|
|
70
|
+
|
|
71
|
+
def complete_project_sync(self, project_name: str) -> None:
|
|
72
|
+
"""Mark project sync as completed."""
|
|
73
|
+
if project_name in self._project_statuses:
|
|
74
|
+
self._project_statuses[project_name].status = SyncStatus.COMPLETED
|
|
75
|
+
self._project_statuses[project_name].message = "Sync completed"
|
|
76
|
+
self._update_global_status()
|
|
77
|
+
|
|
78
|
+
def fail_project_sync(self, project_name: str, error: str) -> None:
|
|
79
|
+
"""Mark project sync as failed."""
|
|
80
|
+
if project_name in self._project_statuses:
|
|
81
|
+
self._project_statuses[project_name].status = SyncStatus.FAILED
|
|
82
|
+
self._project_statuses[project_name].error = error
|
|
83
|
+
self._update_global_status()
|
|
84
|
+
|
|
85
|
+
def start_project_watch(self, project_name: str) -> None:
|
|
86
|
+
"""Mark project as watching for changes (steady state after sync)."""
|
|
87
|
+
if project_name in self._project_statuses:
|
|
88
|
+
self._project_statuses[project_name].status = SyncStatus.WATCHING
|
|
89
|
+
self._project_statuses[project_name].message = "Watching for changes"
|
|
90
|
+
self._update_global_status()
|
|
91
|
+
else:
|
|
92
|
+
# Create new status if project isn't tracked yet
|
|
93
|
+
self._project_statuses[project_name] = ProjectSyncStatus(
|
|
94
|
+
project_name=project_name,
|
|
95
|
+
status=SyncStatus.WATCHING,
|
|
96
|
+
message="Watching for changes",
|
|
97
|
+
files_total=0,
|
|
98
|
+
files_processed=0,
|
|
99
|
+
)
|
|
100
|
+
self._update_global_status()
|
|
101
|
+
|
|
102
|
+
def _update_global_status(self) -> None:
|
|
103
|
+
"""Update global status based on project statuses."""
|
|
104
|
+
if not self._project_statuses: # pragma: no cover
|
|
105
|
+
self._global_status = SyncStatus.IDLE
|
|
106
|
+
return
|
|
107
|
+
|
|
108
|
+
statuses = [p.status for p in self._project_statuses.values()]
|
|
109
|
+
|
|
110
|
+
if any(s == SyncStatus.FAILED for s in statuses):
|
|
111
|
+
self._global_status = SyncStatus.FAILED
|
|
112
|
+
elif any(s in (SyncStatus.SCANNING, SyncStatus.SYNCING) for s in statuses):
|
|
113
|
+
self._global_status = SyncStatus.SYNCING
|
|
114
|
+
elif all(s in (SyncStatus.COMPLETED, SyncStatus.WATCHING) for s in statuses):
|
|
115
|
+
self._global_status = SyncStatus.COMPLETED
|
|
116
|
+
else:
|
|
117
|
+
self._global_status = SyncStatus.SYNCING
|
|
118
|
+
|
|
119
|
+
@property
|
|
120
|
+
def global_status(self) -> SyncStatus:
|
|
121
|
+
"""Get overall sync status."""
|
|
122
|
+
return self._global_status
|
|
123
|
+
|
|
124
|
+
@property
|
|
125
|
+
def is_syncing(self) -> bool:
|
|
126
|
+
"""Check if any sync operation is in progress."""
|
|
127
|
+
return self._global_status in (SyncStatus.SCANNING, SyncStatus.SYNCING)
|
|
128
|
+
|
|
129
|
+
@property
|
|
130
|
+
def is_ready(self) -> bool: # pragma: no cover
|
|
131
|
+
"""Check if system is ready (no sync in progress)."""
|
|
132
|
+
return self._global_status in (SyncStatus.IDLE, SyncStatus.COMPLETED)
|
|
133
|
+
|
|
134
|
+
def get_project_status(self, project_name: str) -> Optional[ProjectSyncStatus]:
|
|
135
|
+
"""Get status for a specific project."""
|
|
136
|
+
return self._project_statuses.get(project_name)
|
|
137
|
+
|
|
138
|
+
def get_all_projects(self) -> Dict[str, ProjectSyncStatus]:
|
|
139
|
+
"""Get all project statuses."""
|
|
140
|
+
return self._project_statuses.copy()
|
|
141
|
+
|
|
142
|
+
def get_summary(self) -> str: # pragma: no cover
|
|
143
|
+
"""Get a user-friendly summary of sync status."""
|
|
144
|
+
if self._global_status == SyncStatus.IDLE:
|
|
145
|
+
return "✅ System ready"
|
|
146
|
+
elif self._global_status == SyncStatus.COMPLETED:
|
|
147
|
+
return "✅ All projects synced successfully"
|
|
148
|
+
elif self._global_status == SyncStatus.FAILED:
|
|
149
|
+
failed_projects = [
|
|
150
|
+
p.project_name
|
|
151
|
+
for p in self._project_statuses.values()
|
|
152
|
+
if p.status == SyncStatus.FAILED
|
|
153
|
+
]
|
|
154
|
+
return f"❌ Sync failed for: {', '.join(failed_projects)}"
|
|
155
|
+
else:
|
|
156
|
+
active_projects = [
|
|
157
|
+
p.project_name
|
|
158
|
+
for p in self._project_statuses.values()
|
|
159
|
+
if p.status in (SyncStatus.SCANNING, SyncStatus.SYNCING)
|
|
160
|
+
]
|
|
161
|
+
total_files = sum(p.files_total for p in self._project_statuses.values())
|
|
162
|
+
processed_files = sum(p.files_processed for p in self._project_statuses.values())
|
|
163
|
+
|
|
164
|
+
if total_files > 0:
|
|
165
|
+
progress_pct = (processed_files / total_files) * 100
|
|
166
|
+
return f"🔄 Syncing {len(active_projects)} projects ({processed_files}/{total_files} files, {progress_pct:.0f}%)"
|
|
167
|
+
else:
|
|
168
|
+
return f"🔄 Syncing {len(active_projects)} projects"
|
|
169
|
+
|
|
170
|
+
def clear_completed(self) -> None:
|
|
171
|
+
"""Remove completed project statuses to clean up memory."""
|
|
172
|
+
self._project_statuses = {
|
|
173
|
+
name: status
|
|
174
|
+
for name, status in self._project_statuses.items()
|
|
175
|
+
if status.status != SyncStatus.COMPLETED
|
|
176
|
+
}
|
|
177
|
+
self._update_global_status()
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
# Global sync status tracker instance
|
|
181
|
+
sync_status_tracker = SyncStatusTracker()
|