basic-memory 0.16.1__py3-none-any.whl → 0.17.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of basic-memory might be problematic. Click here for more details.
- basic_memory/__init__.py +1 -1
- basic_memory/alembic/env.py +112 -26
- basic_memory/alembic/versions/314f1ea54dc4_add_postgres_full_text_search_support_.py +131 -0
- basic_memory/alembic/versions/5fe1ab1ccebe_add_projects_table.py +15 -3
- basic_memory/alembic/versions/647e7a75e2cd_project_constraint_fix.py +44 -36
- basic_memory/alembic/versions/6830751f5fb6_merge_multiple_heads.py +24 -0
- basic_memory/alembic/versions/a2b3c4d5e6f7_add_search_index_entity_cascade.py +56 -0
- basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py +13 -0
- basic_memory/alembic/versions/f8a9b2c3d4e5_add_pg_trgm_for_fuzzy_link_resolution.py +239 -0
- basic_memory/alembic/versions/g9a0b3c4d5e6_add_external_id_to_project_and_entity.py +173 -0
- basic_memory/api/app.py +45 -24
- basic_memory/api/container.py +133 -0
- basic_memory/api/routers/knowledge_router.py +17 -5
- basic_memory/api/routers/project_router.py +68 -14
- basic_memory/api/routers/resource_router.py +37 -27
- basic_memory/api/routers/utils.py +53 -14
- basic_memory/api/v2/__init__.py +35 -0
- basic_memory/api/v2/routers/__init__.py +21 -0
- basic_memory/api/v2/routers/directory_router.py +93 -0
- basic_memory/api/v2/routers/importer_router.py +181 -0
- basic_memory/api/v2/routers/knowledge_router.py +427 -0
- basic_memory/api/v2/routers/memory_router.py +130 -0
- basic_memory/api/v2/routers/project_router.py +359 -0
- basic_memory/api/v2/routers/prompt_router.py +269 -0
- basic_memory/api/v2/routers/resource_router.py +286 -0
- basic_memory/api/v2/routers/search_router.py +73 -0
- basic_memory/cli/app.py +43 -7
- basic_memory/cli/auth.py +27 -4
- basic_memory/cli/commands/__init__.py +3 -1
- basic_memory/cli/commands/cloud/api_client.py +20 -5
- basic_memory/cli/commands/cloud/cloud_utils.py +13 -6
- basic_memory/cli/commands/cloud/rclone_commands.py +110 -14
- basic_memory/cli/commands/cloud/rclone_installer.py +18 -4
- basic_memory/cli/commands/cloud/upload.py +10 -3
- basic_memory/cli/commands/command_utils.py +52 -4
- basic_memory/cli/commands/db.py +78 -19
- basic_memory/cli/commands/format.py +198 -0
- basic_memory/cli/commands/import_chatgpt.py +12 -8
- basic_memory/cli/commands/import_claude_conversations.py +12 -8
- basic_memory/cli/commands/import_claude_projects.py +12 -8
- basic_memory/cli/commands/import_memory_json.py +12 -8
- basic_memory/cli/commands/mcp.py +8 -26
- basic_memory/cli/commands/project.py +22 -9
- basic_memory/cli/commands/status.py +3 -2
- basic_memory/cli/commands/telemetry.py +81 -0
- basic_memory/cli/container.py +84 -0
- basic_memory/cli/main.py +7 -0
- basic_memory/config.py +177 -77
- basic_memory/db.py +183 -77
- basic_memory/deps/__init__.py +293 -0
- basic_memory/deps/config.py +26 -0
- basic_memory/deps/db.py +56 -0
- basic_memory/deps/importers.py +200 -0
- basic_memory/deps/projects.py +238 -0
- basic_memory/deps/repositories.py +179 -0
- basic_memory/deps/services.py +480 -0
- basic_memory/deps.py +14 -409
- basic_memory/file_utils.py +212 -3
- basic_memory/ignore_utils.py +5 -5
- basic_memory/importers/base.py +40 -19
- basic_memory/importers/chatgpt_importer.py +17 -4
- basic_memory/importers/claude_conversations_importer.py +27 -12
- basic_memory/importers/claude_projects_importer.py +50 -14
- basic_memory/importers/memory_json_importer.py +36 -16
- basic_memory/importers/utils.py +5 -2
- basic_memory/markdown/entity_parser.py +62 -23
- basic_memory/markdown/markdown_processor.py +67 -4
- basic_memory/markdown/plugins.py +4 -2
- basic_memory/markdown/utils.py +10 -1
- basic_memory/mcp/async_client.py +1 -0
- basic_memory/mcp/clients/__init__.py +28 -0
- basic_memory/mcp/clients/directory.py +70 -0
- basic_memory/mcp/clients/knowledge.py +176 -0
- basic_memory/mcp/clients/memory.py +120 -0
- basic_memory/mcp/clients/project.py +89 -0
- basic_memory/mcp/clients/resource.py +71 -0
- basic_memory/mcp/clients/search.py +65 -0
- basic_memory/mcp/container.py +110 -0
- basic_memory/mcp/project_context.py +47 -33
- basic_memory/mcp/prompts/ai_assistant_guide.py +2 -2
- basic_memory/mcp/prompts/recent_activity.py +2 -2
- basic_memory/mcp/prompts/utils.py +3 -3
- basic_memory/mcp/server.py +58 -0
- basic_memory/mcp/tools/build_context.py +14 -14
- basic_memory/mcp/tools/canvas.py +34 -12
- basic_memory/mcp/tools/chatgpt_tools.py +4 -1
- basic_memory/mcp/tools/delete_note.py +31 -7
- basic_memory/mcp/tools/edit_note.py +14 -9
- basic_memory/mcp/tools/list_directory.py +7 -17
- basic_memory/mcp/tools/move_note.py +35 -31
- basic_memory/mcp/tools/project_management.py +29 -25
- basic_memory/mcp/tools/read_content.py +13 -3
- basic_memory/mcp/tools/read_note.py +24 -14
- basic_memory/mcp/tools/recent_activity.py +32 -38
- basic_memory/mcp/tools/search.py +17 -10
- basic_memory/mcp/tools/utils.py +28 -0
- basic_memory/mcp/tools/view_note.py +2 -1
- basic_memory/mcp/tools/write_note.py +37 -14
- basic_memory/models/knowledge.py +15 -2
- basic_memory/models/project.py +7 -1
- basic_memory/models/search.py +58 -2
- basic_memory/project_resolver.py +222 -0
- basic_memory/repository/entity_repository.py +210 -3
- basic_memory/repository/observation_repository.py +1 -0
- basic_memory/repository/postgres_search_repository.py +451 -0
- basic_memory/repository/project_repository.py +38 -1
- basic_memory/repository/relation_repository.py +58 -2
- basic_memory/repository/repository.py +1 -0
- basic_memory/repository/search_index_row.py +95 -0
- basic_memory/repository/search_repository.py +77 -615
- basic_memory/repository/search_repository_base.py +241 -0
- basic_memory/repository/sqlite_search_repository.py +437 -0
- basic_memory/runtime.py +61 -0
- basic_memory/schemas/base.py +36 -6
- basic_memory/schemas/directory.py +2 -1
- basic_memory/schemas/memory.py +9 -2
- basic_memory/schemas/project_info.py +2 -0
- basic_memory/schemas/response.py +84 -27
- basic_memory/schemas/search.py +5 -0
- basic_memory/schemas/sync_report.py +1 -1
- basic_memory/schemas/v2/__init__.py +27 -0
- basic_memory/schemas/v2/entity.py +133 -0
- basic_memory/schemas/v2/resource.py +47 -0
- basic_memory/services/context_service.py +219 -43
- basic_memory/services/directory_service.py +26 -11
- basic_memory/services/entity_service.py +68 -33
- basic_memory/services/file_service.py +131 -16
- basic_memory/services/initialization.py +51 -26
- basic_memory/services/link_resolver.py +1 -0
- basic_memory/services/project_service.py +68 -43
- basic_memory/services/search_service.py +75 -16
- basic_memory/sync/__init__.py +2 -1
- basic_memory/sync/coordinator.py +160 -0
- basic_memory/sync/sync_service.py +135 -115
- basic_memory/sync/watch_service.py +32 -12
- basic_memory/telemetry.py +249 -0
- basic_memory/utils.py +96 -75
- {basic_memory-0.16.1.dist-info → basic_memory-0.17.4.dist-info}/METADATA +129 -5
- basic_memory-0.17.4.dist-info/RECORD +193 -0
- {basic_memory-0.16.1.dist-info → basic_memory-0.17.4.dist-info}/WHEEL +1 -1
- basic_memory-0.16.1.dist-info/RECORD +0 -148
- {basic_memory-0.16.1.dist-info → basic_memory-0.17.4.dist-info}/entry_points.txt +0 -0
- {basic_memory-0.16.1.dist-info → basic_memory-0.17.4.dist-info}/licenses/LICENSE +0 -0
|
@@ -4,11 +4,13 @@ from dataclasses import dataclass, field
|
|
|
4
4
|
from datetime import datetime, timezone
|
|
5
5
|
from typing import List, Optional, Tuple
|
|
6
6
|
|
|
7
|
+
|
|
7
8
|
from loguru import logger
|
|
8
9
|
from sqlalchemy import text
|
|
9
10
|
|
|
10
11
|
from basic_memory.repository.entity_repository import EntityRepository
|
|
11
12
|
from basic_memory.repository.observation_repository import ObservationRepository
|
|
13
|
+
from basic_memory.repository.postgres_search_repository import PostgresSearchRepository
|
|
12
14
|
from basic_memory.repository.search_repository import SearchRepository, SearchIndexRow
|
|
13
15
|
from basic_memory.schemas.memory import MemoryUrl, memory_url_path
|
|
14
16
|
from basic_memory.schemas.search import SearchItemType
|
|
@@ -252,9 +254,6 @@ class ContextService:
|
|
|
252
254
|
# Build the VALUES clause for entity IDs
|
|
253
255
|
entity_id_values = ", ".join([str(i) for i in entity_ids])
|
|
254
256
|
|
|
255
|
-
# For compatibility with the old query, we still need this for filtering
|
|
256
|
-
values = ", ".join([f"('{t}', {i})" for t, i in type_id_pairs])
|
|
257
|
-
|
|
258
257
|
# Parameters for bindings - include project_id for security filtering
|
|
259
258
|
params = {
|
|
260
259
|
"max_depth": max_depth,
|
|
@@ -264,7 +263,14 @@ class ContextService:
|
|
|
264
263
|
|
|
265
264
|
# Build date and timeframe filters conditionally based on since parameter
|
|
266
265
|
if since:
|
|
267
|
-
|
|
266
|
+
# SQLite accepts ISO strings, but Postgres/asyncpg requires datetime objects
|
|
267
|
+
if isinstance(self.search_repository, PostgresSearchRepository): # pragma: no cover
|
|
268
|
+
# asyncpg expects timezone-NAIVE datetime in UTC for DateTime(timezone=True) columns
|
|
269
|
+
# even though the column stores timezone-aware values
|
|
270
|
+
since_utc = since.astimezone(timezone.utc) if since.tzinfo else since # pragma: no cover
|
|
271
|
+
params["since_date"] = since_utc.replace(tzinfo=None) # pyright: ignore # pragma: no cover
|
|
272
|
+
else:
|
|
273
|
+
params["since_date"] = since.isoformat() # pyright: ignore
|
|
268
274
|
date_filter = "AND e.created_at >= :since_date"
|
|
269
275
|
relation_date_filter = "AND e_from.created_at >= :since_date"
|
|
270
276
|
timeframe_condition = "AND eg.relation_date >= :since_date"
|
|
@@ -279,13 +285,210 @@ class ContextService:
|
|
|
279
285
|
|
|
280
286
|
# Use a CTE that operates directly on entity and relation tables
|
|
281
287
|
# This avoids the overhead of the search_index virtual table
|
|
282
|
-
|
|
288
|
+
# Note: Postgres and SQLite have different CTE limitations:
|
|
289
|
+
# - Postgres: doesn't allow multiple UNION ALL branches referencing the CTE
|
|
290
|
+
# - SQLite: doesn't support LATERAL joins
|
|
291
|
+
# So we need different queries for each database backend
|
|
292
|
+
|
|
293
|
+
# Detect database backend
|
|
294
|
+
is_postgres = isinstance(self.search_repository, PostgresSearchRepository)
|
|
295
|
+
|
|
296
|
+
if is_postgres: # pragma: no cover
|
|
297
|
+
query = self._build_postgres_query(
|
|
298
|
+
entity_id_values,
|
|
299
|
+
date_filter,
|
|
300
|
+
project_filter,
|
|
301
|
+
relation_date_filter,
|
|
302
|
+
relation_project_filter,
|
|
303
|
+
timeframe_condition,
|
|
304
|
+
)
|
|
305
|
+
else:
|
|
306
|
+
# SQLite needs VALUES clause for exclusion (not needed for Postgres)
|
|
307
|
+
values = ", ".join([f"('{t}', {i})" for t, i in type_id_pairs])
|
|
308
|
+
query = self._build_sqlite_query(
|
|
309
|
+
entity_id_values,
|
|
310
|
+
date_filter,
|
|
311
|
+
project_filter,
|
|
312
|
+
relation_date_filter,
|
|
313
|
+
relation_project_filter,
|
|
314
|
+
timeframe_condition,
|
|
315
|
+
values,
|
|
316
|
+
)
|
|
317
|
+
|
|
318
|
+
result = await self.search_repository.execute_query(query, params=params)
|
|
319
|
+
rows = result.all()
|
|
320
|
+
|
|
321
|
+
context_rows = [
|
|
322
|
+
ContextResultRow(
|
|
323
|
+
type=row.type,
|
|
324
|
+
id=row.id,
|
|
325
|
+
title=row.title,
|
|
326
|
+
permalink=row.permalink,
|
|
327
|
+
file_path=row.file_path,
|
|
328
|
+
from_id=row.from_id,
|
|
329
|
+
to_id=row.to_id,
|
|
330
|
+
relation_type=row.relation_type,
|
|
331
|
+
content=row.content,
|
|
332
|
+
category=row.category,
|
|
333
|
+
entity_id=row.entity_id,
|
|
334
|
+
depth=row.depth,
|
|
335
|
+
root_id=row.root_id,
|
|
336
|
+
created_at=row.created_at,
|
|
337
|
+
)
|
|
338
|
+
for row in rows
|
|
339
|
+
]
|
|
340
|
+
return context_rows
|
|
341
|
+
|
|
342
|
+
def _build_postgres_query( # pragma: no cover
|
|
343
|
+
self,
|
|
344
|
+
entity_id_values: str,
|
|
345
|
+
date_filter: str,
|
|
346
|
+
project_filter: str,
|
|
347
|
+
relation_date_filter: str,
|
|
348
|
+
relation_project_filter: str,
|
|
349
|
+
timeframe_condition: str,
|
|
350
|
+
):
|
|
351
|
+
"""Build Postgres-specific CTE query using LATERAL joins."""
|
|
352
|
+
return text(f"""
|
|
283
353
|
WITH RECURSIVE entity_graph AS (
|
|
284
354
|
-- Base case: seed entities
|
|
285
|
-
SELECT
|
|
355
|
+
SELECT
|
|
286
356
|
e.id,
|
|
287
357
|
'entity' as type,
|
|
288
|
-
e.title,
|
|
358
|
+
e.title,
|
|
359
|
+
e.permalink,
|
|
360
|
+
e.file_path,
|
|
361
|
+
CAST(NULL AS INTEGER) as from_id,
|
|
362
|
+
CAST(NULL AS INTEGER) as to_id,
|
|
363
|
+
CAST(NULL AS TEXT) as relation_type,
|
|
364
|
+
CAST(NULL AS TEXT) as content,
|
|
365
|
+
CAST(NULL AS TEXT) as category,
|
|
366
|
+
CAST(NULL AS INTEGER) as entity_id,
|
|
367
|
+
0 as depth,
|
|
368
|
+
e.id as root_id,
|
|
369
|
+
e.created_at,
|
|
370
|
+
e.created_at as relation_date
|
|
371
|
+
FROM entity e
|
|
372
|
+
WHERE e.id IN ({entity_id_values})
|
|
373
|
+
{date_filter}
|
|
374
|
+
{project_filter}
|
|
375
|
+
|
|
376
|
+
UNION ALL
|
|
377
|
+
|
|
378
|
+
-- Fetch BOTH relations AND connected entities in a single recursive step
|
|
379
|
+
-- Postgres only allows ONE reference to the recursive CTE in the recursive term
|
|
380
|
+
-- We use CROSS JOIN LATERAL to generate two rows (relation + entity) from each traversal
|
|
381
|
+
SELECT
|
|
382
|
+
CASE
|
|
383
|
+
WHEN step_type = 1 THEN r.id
|
|
384
|
+
ELSE e.id
|
|
385
|
+
END as id,
|
|
386
|
+
CASE
|
|
387
|
+
WHEN step_type = 1 THEN 'relation'
|
|
388
|
+
ELSE 'entity'
|
|
389
|
+
END as type,
|
|
390
|
+
CASE
|
|
391
|
+
WHEN step_type = 1 THEN r.relation_type || ': ' || r.to_name
|
|
392
|
+
ELSE e.title
|
|
393
|
+
END as title,
|
|
394
|
+
CASE
|
|
395
|
+
WHEN step_type = 1 THEN ''
|
|
396
|
+
ELSE COALESCE(e.permalink, '')
|
|
397
|
+
END as permalink,
|
|
398
|
+
CASE
|
|
399
|
+
WHEN step_type = 1 THEN e_from.file_path
|
|
400
|
+
ELSE e.file_path
|
|
401
|
+
END as file_path,
|
|
402
|
+
CASE
|
|
403
|
+
WHEN step_type = 1 THEN r.from_id
|
|
404
|
+
ELSE NULL
|
|
405
|
+
END as from_id,
|
|
406
|
+
CASE
|
|
407
|
+
WHEN step_type = 1 THEN r.to_id
|
|
408
|
+
ELSE NULL
|
|
409
|
+
END as to_id,
|
|
410
|
+
CASE
|
|
411
|
+
WHEN step_type = 1 THEN r.relation_type
|
|
412
|
+
ELSE NULL
|
|
413
|
+
END as relation_type,
|
|
414
|
+
CAST(NULL AS TEXT) as content,
|
|
415
|
+
CAST(NULL AS TEXT) as category,
|
|
416
|
+
CAST(NULL AS INTEGER) as entity_id,
|
|
417
|
+
eg.depth + step_type as depth,
|
|
418
|
+
eg.root_id,
|
|
419
|
+
CASE
|
|
420
|
+
WHEN step_type = 1 THEN e_from.created_at
|
|
421
|
+
ELSE e.created_at
|
|
422
|
+
END as created_at,
|
|
423
|
+
CASE
|
|
424
|
+
WHEN step_type = 1 THEN e_from.created_at
|
|
425
|
+
ELSE eg.relation_date
|
|
426
|
+
END as relation_date
|
|
427
|
+
FROM entity_graph eg
|
|
428
|
+
CROSS JOIN LATERAL (VALUES (1), (2)) AS steps(step_type)
|
|
429
|
+
JOIN relation r ON (
|
|
430
|
+
eg.type = 'entity' AND
|
|
431
|
+
(r.from_id = eg.id OR r.to_id = eg.id)
|
|
432
|
+
)
|
|
433
|
+
JOIN entity e_from ON (
|
|
434
|
+
r.from_id = e_from.id
|
|
435
|
+
{relation_project_filter}
|
|
436
|
+
)
|
|
437
|
+
LEFT JOIN entity e ON (
|
|
438
|
+
step_type = 2 AND
|
|
439
|
+
e.id = CASE
|
|
440
|
+
WHEN r.from_id = eg.id THEN r.to_id
|
|
441
|
+
ELSE r.from_id
|
|
442
|
+
END
|
|
443
|
+
{date_filter}
|
|
444
|
+
{project_filter}
|
|
445
|
+
)
|
|
446
|
+
WHERE eg.depth < :max_depth
|
|
447
|
+
AND (step_type = 1 OR (step_type = 2 AND e.id IS NOT NULL AND e.id != eg.id))
|
|
448
|
+
{timeframe_condition}
|
|
449
|
+
)
|
|
450
|
+
-- Materialize and filter
|
|
451
|
+
SELECT DISTINCT
|
|
452
|
+
type,
|
|
453
|
+
id,
|
|
454
|
+
title,
|
|
455
|
+
permalink,
|
|
456
|
+
file_path,
|
|
457
|
+
from_id,
|
|
458
|
+
to_id,
|
|
459
|
+
relation_type,
|
|
460
|
+
content,
|
|
461
|
+
category,
|
|
462
|
+
entity_id,
|
|
463
|
+
MIN(depth) as depth,
|
|
464
|
+
root_id,
|
|
465
|
+
created_at
|
|
466
|
+
FROM entity_graph
|
|
467
|
+
WHERE depth > 0
|
|
468
|
+
GROUP BY type, id, title, permalink, file_path, from_id, to_id,
|
|
469
|
+
relation_type, content, category, entity_id, root_id, created_at
|
|
470
|
+
ORDER BY depth, type, id
|
|
471
|
+
LIMIT :max_results
|
|
472
|
+
""")
|
|
473
|
+
|
|
474
|
+
def _build_sqlite_query(
|
|
475
|
+
self,
|
|
476
|
+
entity_id_values: str,
|
|
477
|
+
date_filter: str,
|
|
478
|
+
project_filter: str,
|
|
479
|
+
relation_date_filter: str,
|
|
480
|
+
relation_project_filter: str,
|
|
481
|
+
timeframe_condition: str,
|
|
482
|
+
values: str,
|
|
483
|
+
):
|
|
484
|
+
"""Build SQLite-specific CTE query using multiple UNION ALL branches."""
|
|
485
|
+
return text(f"""
|
|
486
|
+
WITH RECURSIVE entity_graph AS (
|
|
487
|
+
-- Base case: seed entities
|
|
488
|
+
SELECT
|
|
489
|
+
e.id,
|
|
490
|
+
'entity' as type,
|
|
491
|
+
e.title,
|
|
289
492
|
e.permalink,
|
|
290
493
|
e.file_path,
|
|
291
494
|
NULL as from_id,
|
|
@@ -311,7 +514,6 @@ class ContextService:
|
|
|
311
514
|
r.id,
|
|
312
515
|
'relation' as type,
|
|
313
516
|
r.relation_type || ': ' || r.to_name as title,
|
|
314
|
-
-- Relation model doesn't have permalink column - we'll generate it at runtime
|
|
315
517
|
'' as permalink,
|
|
316
518
|
e_from.file_path,
|
|
317
519
|
r.from_id,
|
|
@@ -322,7 +524,7 @@ class ContextService:
|
|
|
322
524
|
NULL as entity_id,
|
|
323
525
|
eg.depth + 1,
|
|
324
526
|
eg.root_id,
|
|
325
|
-
e_from.created_at,
|
|
527
|
+
e_from.created_at,
|
|
326
528
|
e_from.created_at as relation_date,
|
|
327
529
|
CASE WHEN r.from_id = eg.id THEN 0 ELSE 1 END as is_incoming
|
|
328
530
|
FROM entity_graph eg
|
|
@@ -337,7 +539,6 @@ class ContextService:
|
|
|
337
539
|
)
|
|
338
540
|
LEFT JOIN entity e_to ON (r.to_id = e_to.id)
|
|
339
541
|
WHERE eg.depth < :max_depth
|
|
340
|
-
-- Ensure to_entity (if exists) also belongs to same project
|
|
341
542
|
AND (r.to_id IS NULL OR e_to.project_id = :project_id)
|
|
342
543
|
|
|
343
544
|
UNION ALL
|
|
@@ -347,9 +548,9 @@ class ContextService:
|
|
|
347
548
|
e.id,
|
|
348
549
|
'entity' as type,
|
|
349
550
|
e.title,
|
|
350
|
-
CASE
|
|
351
|
-
WHEN e.permalink IS NULL THEN ''
|
|
352
|
-
ELSE e.permalink
|
|
551
|
+
CASE
|
|
552
|
+
WHEN e.permalink IS NULL THEN ''
|
|
553
|
+
ELSE e.permalink
|
|
353
554
|
END as permalink,
|
|
354
555
|
e.file_path,
|
|
355
556
|
NULL as from_id,
|
|
@@ -366,7 +567,7 @@ class ContextService:
|
|
|
366
567
|
FROM entity_graph eg
|
|
367
568
|
JOIN entity e ON (
|
|
368
569
|
eg.type = 'relation' AND
|
|
369
|
-
e.id = CASE
|
|
570
|
+
e.id = CASE
|
|
370
571
|
WHEN eg.is_incoming = 0 THEN eg.to_id
|
|
371
572
|
ELSE eg.from_id
|
|
372
573
|
END
|
|
@@ -374,10 +575,9 @@ class ContextService:
|
|
|
374
575
|
{project_filter}
|
|
375
576
|
)
|
|
376
577
|
WHERE eg.depth < :max_depth
|
|
377
|
-
-- Only include entities connected by relations within timeframe if specified
|
|
378
578
|
{timeframe_condition}
|
|
379
579
|
)
|
|
380
|
-
SELECT DISTINCT
|
|
580
|
+
SELECT DISTINCT
|
|
381
581
|
type,
|
|
382
582
|
id,
|
|
383
583
|
title,
|
|
@@ -393,33 +593,9 @@ class ContextService:
|
|
|
393
593
|
root_id,
|
|
394
594
|
created_at
|
|
395
595
|
FROM entity_graph
|
|
396
|
-
WHERE
|
|
397
|
-
GROUP BY
|
|
398
|
-
|
|
596
|
+
WHERE depth > 0
|
|
597
|
+
GROUP BY type, id, title, permalink, file_path, from_id, to_id,
|
|
598
|
+
relation_type, content, category, entity_id, root_id, created_at
|
|
399
599
|
ORDER BY depth, type, id
|
|
400
600
|
LIMIT :max_results
|
|
401
601
|
""")
|
|
402
|
-
|
|
403
|
-
result = await self.search_repository.execute_query(query, params=params)
|
|
404
|
-
rows = result.all()
|
|
405
|
-
|
|
406
|
-
context_rows = [
|
|
407
|
-
ContextResultRow(
|
|
408
|
-
type=row.type,
|
|
409
|
-
id=row.id,
|
|
410
|
-
title=row.title,
|
|
411
|
-
permalink=row.permalink,
|
|
412
|
-
file_path=row.file_path,
|
|
413
|
-
from_id=row.from_id,
|
|
414
|
-
to_id=row.to_id,
|
|
415
|
-
relation_type=row.relation_type,
|
|
416
|
-
content=row.content,
|
|
417
|
-
category=row.category,
|
|
418
|
-
entity_id=row.entity_id,
|
|
419
|
-
depth=row.depth,
|
|
420
|
-
root_id=row.root_id,
|
|
421
|
-
created_at=row.created_at,
|
|
422
|
-
)
|
|
423
|
-
for row in rows
|
|
424
|
-
]
|
|
425
|
-
return context_rows
|
|
@@ -3,8 +3,10 @@
|
|
|
3
3
|
import fnmatch
|
|
4
4
|
import logging
|
|
5
5
|
import os
|
|
6
|
+
from datetime import datetime
|
|
6
7
|
from typing import Dict, List, Optional, Sequence
|
|
7
8
|
|
|
9
|
+
|
|
8
10
|
from basic_memory.models import Entity
|
|
9
11
|
from basic_memory.repository import EntityRepository
|
|
10
12
|
from basic_memory.schemas.directory import DirectoryNode
|
|
@@ -12,6 +14,17 @@ from basic_memory.schemas.directory import DirectoryNode
|
|
|
12
14
|
logger = logging.getLogger(__name__)
|
|
13
15
|
|
|
14
16
|
|
|
17
|
+
def _mtime_to_datetime(entity: Entity) -> datetime:
|
|
18
|
+
"""Convert entity mtime (file modification time) to datetime.
|
|
19
|
+
|
|
20
|
+
Returns the file's actual modification time, falling back to updated_at
|
|
21
|
+
if mtime is not available.
|
|
22
|
+
"""
|
|
23
|
+
if entity.mtime: # pragma: no cover
|
|
24
|
+
return datetime.fromtimestamp(entity.mtime).astimezone() # pragma: no cover
|
|
25
|
+
return entity.updated_at
|
|
26
|
+
|
|
27
|
+
|
|
15
28
|
class DirectoryService:
|
|
16
29
|
"""Service for working with directory trees."""
|
|
17
30
|
|
|
@@ -74,10 +87,11 @@ class DirectoryService:
|
|
|
74
87
|
type="file",
|
|
75
88
|
title=file.title,
|
|
76
89
|
permalink=file.permalink,
|
|
90
|
+
external_id=file.external_id, # UUID for v2 API
|
|
77
91
|
entity_id=file.id,
|
|
78
92
|
entity_type=file.entity_type,
|
|
79
93
|
content_type=file.content_type,
|
|
80
|
-
updated_at=file
|
|
94
|
+
updated_at=_mtime_to_datetime(file),
|
|
81
95
|
)
|
|
82
96
|
|
|
83
97
|
# Add to parent directory's children
|
|
@@ -173,7 +187,7 @@ class DirectoryService:
|
|
|
173
187
|
# Find the target directory node
|
|
174
188
|
target_node = self._find_directory_node(root_tree, dir_name)
|
|
175
189
|
if not target_node:
|
|
176
|
-
return []
|
|
190
|
+
return [] # pragma: no cover
|
|
177
191
|
|
|
178
192
|
# Collect nodes with depth and glob filtering
|
|
179
193
|
result = []
|
|
@@ -238,18 +252,19 @@ class DirectoryService:
|
|
|
238
252
|
type="file",
|
|
239
253
|
title=file.title,
|
|
240
254
|
permalink=file.permalink,
|
|
255
|
+
external_id=file.external_id, # UUID for v2 API
|
|
241
256
|
entity_id=file.id,
|
|
242
257
|
entity_type=file.entity_type,
|
|
243
258
|
content_type=file.content_type,
|
|
244
|
-
updated_at=file
|
|
259
|
+
updated_at=_mtime_to_datetime(file),
|
|
245
260
|
)
|
|
246
261
|
|
|
247
262
|
# Add to parent directory's children
|
|
248
263
|
if directory_path in dir_map:
|
|
249
264
|
dir_map[directory_path].children.append(file_node)
|
|
250
|
-
elif root_path in dir_map:
|
|
265
|
+
elif root_path in dir_map: # pragma: no cover
|
|
251
266
|
# Fallback to root if parent not found
|
|
252
|
-
dir_map[root_path].children.append(file_node)
|
|
267
|
+
dir_map[root_path].children.append(file_node) # pragma: no cover
|
|
253
268
|
|
|
254
269
|
return root_node
|
|
255
270
|
|
|
@@ -260,13 +275,13 @@ class DirectoryService:
|
|
|
260
275
|
if root.directory_path == target_path:
|
|
261
276
|
return root
|
|
262
277
|
|
|
263
|
-
for child in root.children:
|
|
264
|
-
if child.type == "directory":
|
|
265
|
-
found = self._find_directory_node(child, target_path)
|
|
266
|
-
if found:
|
|
267
|
-
return found
|
|
278
|
+
for child in root.children: # pragma: no cover
|
|
279
|
+
if child.type == "directory": # pragma: no cover
|
|
280
|
+
found = self._find_directory_node(child, target_path) # pragma: no cover
|
|
281
|
+
if found: # pragma: no cover
|
|
282
|
+
return found # pragma: no cover
|
|
268
283
|
|
|
269
|
-
return None
|
|
284
|
+
return None # pragma: no cover
|
|
270
285
|
|
|
271
286
|
def _collect_nodes_recursive(
|
|
272
287
|
self,
|
|
@@ -8,6 +8,7 @@ import yaml
|
|
|
8
8
|
from loguru import logger
|
|
9
9
|
from sqlalchemy.exc import IntegrityError
|
|
10
10
|
|
|
11
|
+
|
|
11
12
|
from basic_memory.config import ProjectConfig, BasicMemoryConfig
|
|
12
13
|
from basic_memory.file_utils import (
|
|
13
14
|
has_frontmatter,
|
|
@@ -28,6 +29,7 @@ from basic_memory.schemas.base import Permalink
|
|
|
28
29
|
from basic_memory.services import BaseService, FileService
|
|
29
30
|
from basic_memory.services.exceptions import EntityCreationError, EntityNotFoundError
|
|
30
31
|
from basic_memory.services.link_resolver import LinkResolver
|
|
32
|
+
from basic_memory.services.search_service import SearchService
|
|
31
33
|
from basic_memory.utils import generate_permalink
|
|
32
34
|
|
|
33
35
|
|
|
@@ -42,6 +44,7 @@ class EntityService(BaseService[EntityModel]):
|
|
|
42
44
|
relation_repository: RelationRepository,
|
|
43
45
|
file_service: FileService,
|
|
44
46
|
link_resolver: LinkResolver,
|
|
47
|
+
search_service: Optional[SearchService] = None,
|
|
45
48
|
app_config: Optional[BasicMemoryConfig] = None,
|
|
46
49
|
):
|
|
47
50
|
super().__init__(entity_repository)
|
|
@@ -50,6 +53,7 @@ class EntityService(BaseService[EntityModel]):
|
|
|
50
53
|
self.entity_parser = entity_parser
|
|
51
54
|
self.file_service = file_service
|
|
52
55
|
self.link_resolver = link_resolver
|
|
56
|
+
self.search_service = search_service
|
|
53
57
|
self.app_config = app_config
|
|
54
58
|
|
|
55
59
|
async def detect_file_path_conflicts(
|
|
@@ -106,6 +110,9 @@ class EntityService(BaseService[EntityModel]):
|
|
|
106
110
|
4. Generate new unique permalink from file path
|
|
107
111
|
|
|
108
112
|
Enhanced to detect and handle character-related conflicts.
|
|
113
|
+
|
|
114
|
+
Note: Uses lightweight repository methods that skip eager loading of
|
|
115
|
+
observations and relations for better performance during bulk operations.
|
|
109
116
|
"""
|
|
110
117
|
file_path_str = Path(file_path).as_posix()
|
|
111
118
|
|
|
@@ -122,16 +129,20 @@ class EntityService(BaseService[EntityModel]):
|
|
|
122
129
|
# If markdown has explicit permalink, try to validate it
|
|
123
130
|
if markdown and markdown.frontmatter.permalink:
|
|
124
131
|
desired_permalink = markdown.frontmatter.permalink
|
|
125
|
-
|
|
132
|
+
# Use lightweight method - we only need to check file_path
|
|
133
|
+
existing_file_path = await self.repository.get_file_path_for_permalink(
|
|
134
|
+
desired_permalink
|
|
135
|
+
)
|
|
126
136
|
|
|
127
137
|
# If no conflict or it's our own file, use as is
|
|
128
|
-
if not
|
|
138
|
+
if not existing_file_path or existing_file_path == file_path_str:
|
|
129
139
|
return desired_permalink
|
|
130
140
|
|
|
131
141
|
# For existing files, try to find current permalink
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
142
|
+
# Use lightweight method - we only need the permalink
|
|
143
|
+
existing_permalink = await self.repository.get_permalink_for_file_path(file_path_str)
|
|
144
|
+
if existing_permalink:
|
|
145
|
+
return existing_permalink
|
|
135
146
|
|
|
136
147
|
# New file - generate permalink
|
|
137
148
|
if markdown and markdown.frontmatter.permalink:
|
|
@@ -140,9 +151,10 @@ class EntityService(BaseService[EntityModel]):
|
|
|
140
151
|
desired_permalink = generate_permalink(file_path_str)
|
|
141
152
|
|
|
142
153
|
# Make unique if needed - enhanced to handle character conflicts
|
|
154
|
+
# Use lightweight existence check instead of loading full entity
|
|
143
155
|
permalink = desired_permalink
|
|
144
156
|
suffix = 1
|
|
145
|
-
while await self.repository.
|
|
157
|
+
while await self.repository.permalink_exists(permalink):
|
|
146
158
|
permalink = f"{desired_permalink}-{suffix}"
|
|
147
159
|
suffix += 1
|
|
148
160
|
logger.debug(f"creating unique permalink: {permalink}")
|
|
@@ -224,8 +236,11 @@ class EntityService(BaseService[EntityModel]):
|
|
|
224
236
|
final_content = dump_frontmatter(post)
|
|
225
237
|
checksum = await self.file_service.write_file(file_path, final_content)
|
|
226
238
|
|
|
227
|
-
# parse entity from file
|
|
228
|
-
entity_markdown = await self.entity_parser.
|
|
239
|
+
# parse entity from content we just wrote (avoids re-reading file for cloud compatibility)
|
|
240
|
+
entity_markdown = await self.entity_parser.parse_markdown_content(
|
|
241
|
+
file_path=file_path,
|
|
242
|
+
content=final_content,
|
|
243
|
+
)
|
|
229
244
|
|
|
230
245
|
# create entity
|
|
231
246
|
created = await self.create_entity_from_markdown(file_path, entity_markdown)
|
|
@@ -245,8 +260,12 @@ class EntityService(BaseService[EntityModel]):
|
|
|
245
260
|
# Convert file path string to Path
|
|
246
261
|
file_path = Path(entity.file_path)
|
|
247
262
|
|
|
248
|
-
# Read existing
|
|
249
|
-
|
|
263
|
+
# Read existing content via file_service (for cloud compatibility)
|
|
264
|
+
existing_content = await self.file_service.read_file_content(file_path)
|
|
265
|
+
existing_markdown = await self.entity_parser.parse_markdown_content(
|
|
266
|
+
file_path=file_path,
|
|
267
|
+
content=existing_content,
|
|
268
|
+
)
|
|
250
269
|
|
|
251
270
|
# Parse content frontmatter to check for user-specified permalink and entity_type
|
|
252
271
|
content_markdown = None
|
|
@@ -302,8 +321,11 @@ class EntityService(BaseService[EntityModel]):
|
|
|
302
321
|
final_content = dump_frontmatter(merged_post)
|
|
303
322
|
checksum = await self.file_service.write_file(file_path, final_content)
|
|
304
323
|
|
|
305
|
-
# parse entity from file
|
|
306
|
-
entity_markdown = await self.entity_parser.
|
|
324
|
+
# parse entity from content we just wrote (avoids re-reading file for cloud compatibility)
|
|
325
|
+
entity_markdown = await self.entity_parser.parse_markdown_content(
|
|
326
|
+
file_path=file_path,
|
|
327
|
+
content=final_content,
|
|
328
|
+
)
|
|
307
329
|
|
|
308
330
|
# update entity in db
|
|
309
331
|
entity = await self.update_entity_and_observations(file_path, entity_markdown)
|
|
@@ -335,7 +357,11 @@ class EntityService(BaseService[EntityModel]):
|
|
|
335
357
|
)
|
|
336
358
|
entity = entities[0]
|
|
337
359
|
|
|
338
|
-
# Delete
|
|
360
|
+
# Delete from search index first (if search_service is available)
|
|
361
|
+
if self.search_service:
|
|
362
|
+
await self.search_service.handle_delete(entity)
|
|
363
|
+
|
|
364
|
+
# Delete file
|
|
339
365
|
await self.file_service.delete_entity_file(entity)
|
|
340
366
|
|
|
341
367
|
# Delete from DB (this will cascade to observations/relations)
|
|
@@ -378,7 +404,9 @@ class EntityService(BaseService[EntityModel]):
|
|
|
378
404
|
Uses UPSERT approach to handle permalink/file_path conflicts cleanly.
|
|
379
405
|
"""
|
|
380
406
|
logger.debug(f"Creating entity: {markdown.frontmatter.title} file_path: {file_path}")
|
|
381
|
-
model = entity_model_from_markdown(
|
|
407
|
+
model = entity_model_from_markdown(
|
|
408
|
+
file_path, markdown, project_id=self.repository.project_id
|
|
409
|
+
)
|
|
382
410
|
|
|
383
411
|
# Mark as incomplete because we still need to add relations
|
|
384
412
|
model.checksum = None
|
|
@@ -408,6 +436,7 @@ class EntityService(BaseService[EntityModel]):
|
|
|
408
436
|
# add new observations
|
|
409
437
|
observations = [
|
|
410
438
|
Observation(
|
|
439
|
+
project_id=self.observation_repository.project_id,
|
|
411
440
|
entity_id=db_entity.id,
|
|
412
441
|
content=obs.content,
|
|
413
442
|
category=obs.category,
|
|
@@ -448,8 +477,11 @@ class EntityService(BaseService[EntityModel]):
|
|
|
448
477
|
import asyncio
|
|
449
478
|
|
|
450
479
|
# Create tasks for all relation lookups
|
|
480
|
+
# Use strict=True to disable fuzzy search - only exact matches should create resolved relations
|
|
481
|
+
# This ensures forward references (links to non-existent entities) remain unresolved (to_id=NULL)
|
|
451
482
|
lookup_tasks = [
|
|
452
|
-
self.link_resolver.resolve_link(rel.target)
|
|
483
|
+
self.link_resolver.resolve_link(rel.target, strict=True)
|
|
484
|
+
for rel in markdown.relations
|
|
453
485
|
]
|
|
454
486
|
|
|
455
487
|
# Execute all lookups in parallel
|
|
@@ -471,6 +503,7 @@ class EntityService(BaseService[EntityModel]):
|
|
|
471
503
|
|
|
472
504
|
# Create the relation
|
|
473
505
|
relation = Relation(
|
|
506
|
+
project_id=self.relation_repository.project_id,
|
|
474
507
|
from_id=db_entity.id,
|
|
475
508
|
to_id=target_id,
|
|
476
509
|
to_name=target_name,
|
|
@@ -543,8 +576,11 @@ class EntityService(BaseService[EntityModel]):
|
|
|
543
576
|
# Write the updated content back to the file
|
|
544
577
|
checksum = await self.file_service.write_file(file_path, new_content)
|
|
545
578
|
|
|
546
|
-
# Parse the
|
|
547
|
-
entity_markdown = await self.entity_parser.
|
|
579
|
+
# Parse the content we just wrote (avoids re-reading file for cloud compatibility)
|
|
580
|
+
entity_markdown = await self.entity_parser.parse_markdown_content(
|
|
581
|
+
file_path=file_path,
|
|
582
|
+
content=new_content,
|
|
583
|
+
)
|
|
548
584
|
|
|
549
585
|
# Update entity and its relationships
|
|
550
586
|
entity = await self.update_entity_and_observations(file_path, entity_markdown)
|
|
@@ -763,23 +799,20 @@ class EntityService(BaseService[EntityModel]):
|
|
|
763
799
|
raise ValueError(f"Invalid destination path: {destination_path}")
|
|
764
800
|
|
|
765
801
|
# 3. Validate paths
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
# Validate source exists
|
|
770
|
-
if not source_file.exists():
|
|
802
|
+
# NOTE: In tenantless/cloud mode, we cannot rely on local filesystem paths.
|
|
803
|
+
# Use FileService for existence checks and moving.
|
|
804
|
+
if not await self.file_service.exists(current_path):
|
|
771
805
|
raise ValueError(f"Source file not found: {current_path}")
|
|
772
806
|
|
|
773
|
-
|
|
774
|
-
if destination_file.exists():
|
|
807
|
+
if await self.file_service.exists(destination_path):
|
|
775
808
|
raise ValueError(f"Destination already exists: {destination_path}")
|
|
776
809
|
|
|
777
810
|
try:
|
|
778
|
-
# 4.
|
|
779
|
-
|
|
811
|
+
# 4. Ensure destination directory if needed (no-op for S3)
|
|
812
|
+
await self.file_service.ensure_directory(Path(destination_path).parent)
|
|
780
813
|
|
|
781
|
-
# 5. Move physical file
|
|
782
|
-
|
|
814
|
+
# 5. Move physical file via FileService (filesystem rename or cloud move)
|
|
815
|
+
await self.file_service.move_file(current_path, destination_path)
|
|
783
816
|
logger.info(f"Moved file: {current_path} -> {destination_path}")
|
|
784
817
|
|
|
785
818
|
# 6. Prepare database updates
|
|
@@ -818,12 +851,14 @@ class EntityService(BaseService[EntityModel]):
|
|
|
818
851
|
|
|
819
852
|
except Exception as e:
|
|
820
853
|
# Rollback: try to restore original file location if move succeeded
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
|
|
854
|
+
try:
|
|
855
|
+
if await self.file_service.exists(
|
|
856
|
+
destination_path
|
|
857
|
+
) and not await self.file_service.exists(current_path):
|
|
858
|
+
await self.file_service.move_file(destination_path, current_path)
|
|
824
859
|
logger.info(f"Rolled back file move: {destination_path} -> {current_path}")
|
|
825
|
-
|
|
826
|
-
|
|
860
|
+
except Exception as rollback_error: # pragma: no cover
|
|
861
|
+
logger.error(f"Failed to rollback file move: {rollback_error}")
|
|
827
862
|
|
|
828
863
|
# Re-raise the original error with context
|
|
829
864
|
raise ValueError(f"Move failed: {str(e)}") from e
|