basic-memory 0.1.1__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of basic-memory might be problematic. Click here for more details.

Files changed (77) hide show
  1. basic_memory/__init__.py +1 -1
  2. basic_memory/alembic/README +1 -0
  3. basic_memory/alembic/env.py +75 -0
  4. basic_memory/alembic/migrations.py +29 -0
  5. basic_memory/alembic/script.py.mako +26 -0
  6. basic_memory/alembic/versions/3dae7c7b1564_initial_schema.py +93 -0
  7. basic_memory/api/__init__.py +2 -1
  8. basic_memory/api/app.py +26 -24
  9. basic_memory/api/routers/knowledge_router.py +28 -26
  10. basic_memory/api/routers/memory_router.py +17 -11
  11. basic_memory/api/routers/search_router.py +6 -12
  12. basic_memory/cli/__init__.py +1 -1
  13. basic_memory/cli/app.py +0 -1
  14. basic_memory/cli/commands/__init__.py +3 -3
  15. basic_memory/cli/commands/db.py +25 -0
  16. basic_memory/cli/commands/import_memory_json.py +35 -31
  17. basic_memory/cli/commands/mcp.py +20 -0
  18. basic_memory/cli/commands/status.py +10 -6
  19. basic_memory/cli/commands/sync.py +5 -56
  20. basic_memory/cli/main.py +5 -38
  21. basic_memory/config.py +3 -3
  22. basic_memory/db.py +15 -22
  23. basic_memory/deps.py +3 -4
  24. basic_memory/file_utils.py +36 -35
  25. basic_memory/markdown/entity_parser.py +13 -30
  26. basic_memory/markdown/markdown_processor.py +7 -7
  27. basic_memory/markdown/plugins.py +109 -123
  28. basic_memory/markdown/schemas.py +7 -8
  29. basic_memory/markdown/utils.py +70 -121
  30. basic_memory/mcp/__init__.py +1 -1
  31. basic_memory/mcp/async_client.py +0 -2
  32. basic_memory/mcp/server.py +3 -27
  33. basic_memory/mcp/tools/__init__.py +5 -3
  34. basic_memory/mcp/tools/knowledge.py +2 -2
  35. basic_memory/mcp/tools/memory.py +8 -4
  36. basic_memory/mcp/tools/search.py +2 -1
  37. basic_memory/mcp/tools/utils.py +1 -1
  38. basic_memory/models/__init__.py +1 -2
  39. basic_memory/models/base.py +3 -3
  40. basic_memory/models/knowledge.py +23 -60
  41. basic_memory/models/search.py +1 -1
  42. basic_memory/repository/__init__.py +5 -3
  43. basic_memory/repository/entity_repository.py +34 -98
  44. basic_memory/repository/relation_repository.py +0 -7
  45. basic_memory/repository/repository.py +2 -39
  46. basic_memory/repository/search_repository.py +20 -25
  47. basic_memory/schemas/__init__.py +4 -4
  48. basic_memory/schemas/base.py +21 -62
  49. basic_memory/schemas/delete.py +2 -3
  50. basic_memory/schemas/discovery.py +4 -1
  51. basic_memory/schemas/memory.py +12 -13
  52. basic_memory/schemas/request.py +4 -23
  53. basic_memory/schemas/response.py +10 -9
  54. basic_memory/schemas/search.py +4 -7
  55. basic_memory/services/__init__.py +2 -7
  56. basic_memory/services/context_service.py +116 -110
  57. basic_memory/services/entity_service.py +25 -62
  58. basic_memory/services/exceptions.py +1 -0
  59. basic_memory/services/file_service.py +73 -109
  60. basic_memory/services/link_resolver.py +9 -9
  61. basic_memory/services/search_service.py +22 -15
  62. basic_memory/services/service.py +3 -24
  63. basic_memory/sync/__init__.py +2 -2
  64. basic_memory/sync/file_change_scanner.py +3 -7
  65. basic_memory/sync/sync_service.py +35 -40
  66. basic_memory/sync/utils.py +6 -38
  67. basic_memory/sync/watch_service.py +26 -5
  68. basic_memory/utils.py +42 -33
  69. {basic_memory-0.1.1.dist-info → basic_memory-0.2.0.dist-info}/METADATA +2 -7
  70. basic_memory-0.2.0.dist-info/RECORD +78 -0
  71. basic_memory/mcp/main.py +0 -21
  72. basic_memory/mcp/tools/ai_edit.py +0 -84
  73. basic_memory/services/database_service.py +0 -159
  74. basic_memory-0.1.1.dist-info/RECORD +0 -74
  75. {basic_memory-0.1.1.dist-info → basic_memory-0.2.0.dist-info}/WHEEL +0 -0
  76. {basic_memory-0.1.1.dist-info → basic_memory-0.2.0.dist-info}/entry_points.txt +0 -0
  77. {basic_memory-0.1.1.dist-info → basic_memory-0.2.0.dist-info}/licenses/LICENSE +0 -0
basic_memory/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  """basic-memory - Local-first knowledge management combining Zettelkasten with knowledge graphs"""
2
2
 
3
- __version__ = "0.0.1"
3
+ __version__ = "0.0.1"
@@ -0,0 +1 @@
1
+ Generic single-database configuration.
@@ -0,0 +1,75 @@
1
+ """Alembic environment configuration."""
2
+
3
+ from logging.config import fileConfig
4
+
5
+ from sqlalchemy import engine_from_config
6
+ from sqlalchemy import pool
7
+
8
+ from alembic import context
9
+
10
+ from basic_memory.models import Base
11
+ from basic_memory.config import config as app_config
12
+
13
+ # this is the Alembic Config object, which provides
14
+ # access to the values within the .ini file in use.
15
+ config = context.config
16
+
17
+ # Set the SQLAlchemy URL from our app config
18
+ sqlalchemy_url = f"sqlite:///{app_config.database_path}"
19
+ config.set_main_option("sqlalchemy.url", sqlalchemy_url)
20
+
21
+ # Interpret the config file for Python logging.
22
+ if config.config_file_name is not None:
23
+ fileConfig(config.config_file_name)
24
+
25
+ # add your model's MetaData object here
26
+ # for 'autogenerate' support
27
+ target_metadata = Base.metadata
28
+
29
+
30
+ def run_migrations_offline() -> None:
31
+ """Run migrations in 'offline' mode.
32
+
33
+ This configures the context with just a URL
34
+ and not an Engine, though an Engine is acceptable
35
+ here as well. By skipping the Engine creation
36
+ we don't even need a DBAPI to be available.
37
+
38
+ Calls to context.execute() here emit the given string to the
39
+ script output.
40
+ """
41
+ url = config.get_main_option("sqlalchemy.url")
42
+ context.configure(
43
+ url=url,
44
+ target_metadata=target_metadata,
45
+ literal_binds=True,
46
+ dialect_opts={"paramstyle": "named"},
47
+ )
48
+
49
+ with context.begin_transaction():
50
+ context.run_migrations()
51
+
52
+
53
+ def run_migrations_online() -> None:
54
+ """Run migrations in 'online' mode.
55
+
56
+ In this scenario we need to create an Engine
57
+ and associate a connection with the context.
58
+ """
59
+ connectable = engine_from_config(
60
+ config.get_section(config.config_ini_section, {}),
61
+ prefix="sqlalchemy.",
62
+ poolclass=pool.NullPool,
63
+ )
64
+
65
+ with connectable.connect() as connection:
66
+ context.configure(connection=connection, target_metadata=target_metadata)
67
+
68
+ with context.begin_transaction():
69
+ context.run_migrations()
70
+
71
+
72
+ if context.is_offline_mode():
73
+ run_migrations_offline()
74
+ else:
75
+ run_migrations_online()
@@ -0,0 +1,29 @@
1
+ """Functions for managing database migrations."""
2
+
3
+ import asyncio
4
+ from pathlib import Path
5
+ from loguru import logger
6
+ from alembic.config import Config
7
+ from alembic import command
8
+
9
+
10
+ def get_alembic_config() -> Config: # pragma: no cover
11
+ """Get alembic config with correct paths."""
12
+ migrations_path = Path(__file__).parent
13
+ alembic_ini = migrations_path.parent.parent.parent / "alembic.ini"
14
+
15
+ config = Config(alembic_ini)
16
+ config.set_main_option("script_location", str(migrations_path))
17
+ return config
18
+
19
+
20
+ async def reset_database(): # pragma: no cover
21
+ """Drop and recreate all tables."""
22
+ logger.info("Resetting database...")
23
+ config = get_alembic_config()
24
+
25
+ def _reset(cfg):
26
+ command.downgrade(cfg, "base")
27
+ command.upgrade(cfg, "head")
28
+
29
+ await asyncio.get_event_loop().run_in_executor(None, _reset, config)
@@ -0,0 +1,26 @@
1
+ """${message}
2
+
3
+ Revision ID: ${up_revision}
4
+ Revises: ${down_revision | comma,n}
5
+ Create Date: ${create_date}
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ ${imports if imports else ""}
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = ${repr(up_revision)}
16
+ down_revision: Union[str, None] = ${repr(down_revision)}
17
+ branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
18
+ depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
19
+
20
+
21
+ def upgrade() -> None:
22
+ ${upgrades if upgrades else "pass"}
23
+
24
+
25
+ def downgrade() -> None:
26
+ ${downgrades if downgrades else "pass"}
@@ -0,0 +1,93 @@
1
+ """initial schema
2
+
3
+ Revision ID: 3dae7c7b1564
4
+ Revises:
5
+ Create Date: 2025-02-12 21:23:00.336344
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = "3dae7c7b1564"
17
+ down_revision: Union[str, None] = None
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ # ### commands auto generated by Alembic - please adjust! ###
24
+ op.create_table(
25
+ "entity",
26
+ sa.Column("id", sa.Integer(), nullable=False),
27
+ sa.Column("title", sa.String(), nullable=False),
28
+ sa.Column("entity_type", sa.String(), nullable=False),
29
+ sa.Column("entity_metadata", sa.JSON(), nullable=True),
30
+ sa.Column("content_type", sa.String(), nullable=False),
31
+ sa.Column("permalink", sa.String(), nullable=False),
32
+ sa.Column("file_path", sa.String(), nullable=False),
33
+ sa.Column("checksum", sa.String(), nullable=True),
34
+ sa.Column("created_at", sa.DateTime(), nullable=False),
35
+ sa.Column("updated_at", sa.DateTime(), nullable=False),
36
+ sa.PrimaryKeyConstraint("id"),
37
+ sa.UniqueConstraint("permalink", name="uix_entity_permalink"),
38
+ )
39
+ op.create_index("ix_entity_created_at", "entity", ["created_at"], unique=False)
40
+ op.create_index(op.f("ix_entity_file_path"), "entity", ["file_path"], unique=True)
41
+ op.create_index(op.f("ix_entity_permalink"), "entity", ["permalink"], unique=True)
42
+ op.create_index("ix_entity_title", "entity", ["title"], unique=False)
43
+ op.create_index("ix_entity_type", "entity", ["entity_type"], unique=False)
44
+ op.create_index("ix_entity_updated_at", "entity", ["updated_at"], unique=False)
45
+ op.create_table(
46
+ "observation",
47
+ sa.Column("id", sa.Integer(), nullable=False),
48
+ sa.Column("entity_id", sa.Integer(), nullable=False),
49
+ sa.Column("content", sa.Text(), nullable=False),
50
+ sa.Column("category", sa.String(), nullable=False),
51
+ sa.Column("context", sa.Text(), nullable=True),
52
+ sa.Column("tags", sa.JSON(), server_default="[]", nullable=True),
53
+ sa.ForeignKeyConstraint(["entity_id"], ["entity.id"], ondelete="CASCADE"),
54
+ sa.PrimaryKeyConstraint("id"),
55
+ )
56
+ op.create_index("ix_observation_category", "observation", ["category"], unique=False)
57
+ op.create_index("ix_observation_entity_id", "observation", ["entity_id"], unique=False)
58
+ op.create_table(
59
+ "relation",
60
+ sa.Column("id", sa.Integer(), nullable=False),
61
+ sa.Column("from_id", sa.Integer(), nullable=False),
62
+ sa.Column("to_id", sa.Integer(), nullable=True),
63
+ sa.Column("to_name", sa.String(), nullable=False),
64
+ sa.Column("relation_type", sa.String(), nullable=False),
65
+ sa.Column("context", sa.Text(), nullable=True),
66
+ sa.ForeignKeyConstraint(["from_id"], ["entity.id"], ondelete="CASCADE"),
67
+ sa.ForeignKeyConstraint(["to_id"], ["entity.id"], ondelete="CASCADE"),
68
+ sa.PrimaryKeyConstraint("id"),
69
+ sa.UniqueConstraint("from_id", "to_id", "relation_type", name="uix_relation"),
70
+ )
71
+ op.create_index("ix_relation_from_id", "relation", ["from_id"], unique=False)
72
+ op.create_index("ix_relation_to_id", "relation", ["to_id"], unique=False)
73
+ op.create_index("ix_relation_type", "relation", ["relation_type"], unique=False)
74
+ # ### end Alembic commands ###
75
+
76
+
77
+ def downgrade() -> None:
78
+ # ### commands auto generated by Alembic - please adjust! ###
79
+ op.drop_index("ix_relation_type", table_name="relation")
80
+ op.drop_index("ix_relation_to_id", table_name="relation")
81
+ op.drop_index("ix_relation_from_id", table_name="relation")
82
+ op.drop_table("relation")
83
+ op.drop_index("ix_observation_entity_id", table_name="observation")
84
+ op.drop_index("ix_observation_category", table_name="observation")
85
+ op.drop_table("observation")
86
+ op.drop_index("ix_entity_updated_at", table_name="entity")
87
+ op.drop_index("ix_entity_type", table_name="entity")
88
+ op.drop_index("ix_entity_title", table_name="entity")
89
+ op.drop_index(op.f("ix_entity_permalink"), table_name="entity")
90
+ op.drop_index(op.f("ix_entity_file_path"), table_name="entity")
91
+ op.drop_index("ix_entity_created_at", table_name="entity")
92
+ op.drop_table("entity")
93
+ # ### end Alembic commands ###
@@ -1,4 +1,5 @@
1
1
  """Basic Memory API module."""
2
+
2
3
  from .app import app
3
4
 
4
- __all__ = ["app"]
5
+ __all__ = ["app"]
basic_memory/api/app.py CHANGED
@@ -7,40 +7,42 @@ from fastapi.exception_handlers import http_exception_handler
7
7
  from loguru import logger
8
8
 
9
9
  from basic_memory import db
10
+ from basic_memory.config import config as app_config
10
11
  from basic_memory.api.routers import knowledge, search, memory, resource
11
- from basic_memory.config import config
12
- from basic_memory.services import DatabaseService
12
+ from alembic import command
13
+ from alembic.config import Config
14
+
15
+ from basic_memory.db import DatabaseType
16
+ from basic_memory.repository.search_repository import SearchRepository
17
+
18
+
19
+ async def run_migrations(): # pragma: no cover
20
+ """Run any pending alembic migrations."""
21
+ logger.info("Running database migrations...")
22
+ try:
23
+ config = Config("alembic.ini")
24
+ command.upgrade(config, "head")
25
+ logger.info("Migrations completed successfully")
26
+
27
+ _, session_maker = await db.get_or_create_db(
28
+ app_config.database_path, DatabaseType.FILESYSTEM
29
+ )
30
+ await SearchRepository(session_maker).init_search_index()
31
+ except Exception as e:
32
+ logger.error(f"Error running migrations: {e}")
33
+ raise
13
34
 
14
35
 
15
36
  @asynccontextmanager
16
- async def lifespan(app: FastAPI):
37
+ async def lifespan(app: FastAPI): # pragma: no cover
17
38
  """Lifecycle manager for the FastAPI app."""
18
39
  logger.info("Starting Basic Memory API")
19
-
20
- # check the db state
21
- await check_db(app)
40
+ await run_migrations()
22
41
  yield
23
42
  logger.info("Shutting down Basic Memory API")
24
43
  await db.shutdown_db()
25
44
 
26
45
 
27
- async def check_db(app: FastAPI):
28
- logger.info("Checking database state")
29
-
30
- # Initialize DB management service
31
- db_service = DatabaseService(
32
- config=config,
33
- )
34
-
35
- # Check and initialize DB if needed
36
- if not await db_service.check_db():
37
- raise RuntimeError("Database initialization failed")
38
-
39
- # Clean up old backups on shutdown
40
- await db_service.cleanup_backups()
41
-
42
-
43
-
44
46
  # Initialize FastAPI app
45
47
  app = FastAPI(
46
48
  title="Basic Memory API",
@@ -57,7 +59,7 @@ app.include_router(resource.router)
57
59
 
58
60
 
59
61
  @app.exception_handler(Exception)
60
- async def exception_handler(request, exc):
62
+ async def exception_handler(request, exc): # pragma: no cover
61
63
  logger.exception(
62
64
  f"An unhandled exception occurred for request '{request.url}', exception: {exc}"
63
65
  )
@@ -17,7 +17,7 @@ from basic_memory.schemas import (
17
17
  DeleteEntitiesResponse,
18
18
  DeleteEntitiesRequest,
19
19
  )
20
- from basic_memory.schemas.base import PathId, Entity
20
+ from basic_memory.schemas.base import Permalink, Entity
21
21
  from basic_memory.services.exceptions import EntityNotFoundError
22
22
 
23
23
  router = APIRouter(prefix="/knowledge", tags=["knowledge"])
@@ -27,10 +27,10 @@ router = APIRouter(prefix="/knowledge", tags=["knowledge"])
27
27
 
28
28
  @router.post("/entities", response_model=EntityResponse)
29
29
  async def create_entity(
30
- data: Entity,
31
- background_tasks: BackgroundTasks,
32
- entity_service: EntityServiceDep,
33
- search_service: SearchServiceDep,
30
+ data: Entity,
31
+ background_tasks: BackgroundTasks,
32
+ entity_service: EntityServiceDep,
33
+ search_service: SearchServiceDep,
34
34
  ) -> EntityResponse:
35
35
  """Create an entity."""
36
36
  logger.info(f"request: create_entity with data={data}")
@@ -47,12 +47,12 @@ async def create_entity(
47
47
 
48
48
  @router.put("/entities/{permalink:path}", response_model=EntityResponse)
49
49
  async def create_or_update_entity(
50
- permalink: PathId,
51
- data: Entity,
52
- response: Response,
53
- background_tasks: BackgroundTasks,
54
- entity_service: EntityServiceDep,
55
- search_service: SearchServiceDep,
50
+ permalink: Permalink,
51
+ data: Entity,
52
+ response: Response,
53
+ background_tasks: BackgroundTasks,
54
+ entity_service: EntityServiceDep,
55
+ search_service: SearchServiceDep,
56
56
  ) -> EntityResponse:
57
57
  """Create or update an entity. If entity exists, it will be updated, otherwise created."""
58
58
  logger.info(f"request: create_or_update_entity with permalink={permalink}, data={data}")
@@ -69,7 +69,9 @@ async def create_or_update_entity(
69
69
  await search_service.index_entity(entity, background_tasks=background_tasks)
70
70
  result = EntityResponse.model_validate(entity)
71
71
 
72
- logger.info(f"response: create_or_update_entity with result={result}, status_code={response.status_code}")
72
+ logger.info(
73
+ f"response: create_or_update_entity with result={result}, status_code={response.status_code}"
74
+ )
73
75
  return result
74
76
 
75
77
 
@@ -78,8 +80,8 @@ async def create_or_update_entity(
78
80
 
79
81
  @router.get("/entities/{permalink:path}", response_model=EntityResponse)
80
82
  async def get_entity(
81
- entity_service: EntityServiceDep,
82
- permalink: str,
83
+ entity_service: EntityServiceDep,
84
+ permalink: str,
83
85
  ) -> EntityResponse:
84
86
  """Get a specific entity by ID.
85
87
 
@@ -102,13 +104,13 @@ async def get_entity(
102
104
 
103
105
  @router.get("/entities", response_model=EntityListResponse)
104
106
  async def get_entities(
105
- entity_service: EntityServiceDep,
106
- permalink: Annotated[list[str] | None, Query()] = None,
107
+ entity_service: EntityServiceDep,
108
+ permalink: Annotated[list[str] | None, Query()] = None,
107
109
  ) -> EntityListResponse:
108
110
  """Open specific entities"""
109
111
  logger.info(f"request: get_entities with permalinks={permalink}")
110
112
 
111
- entities = await entity_service.get_entities_by_permalinks(permalink)
113
+ entities = await entity_service.get_entities_by_permalinks(permalink) if permalink else []
112
114
  result = EntityListResponse(
113
115
  entities=[EntityResponse.model_validate(entity) for entity in entities]
114
116
  )
@@ -122,11 +124,11 @@ async def get_entities(
122
124
 
123
125
  @router.delete("/entities/{identifier:path}", response_model=DeleteEntitiesResponse)
124
126
  async def delete_entity(
125
- identifier: str,
126
- background_tasks: BackgroundTasks,
127
- entity_service: EntityServiceDep,
128
- link_resolver: LinkResolverDep,
129
- search_service=Depends(get_search_service),
127
+ identifier: str,
128
+ background_tasks: BackgroundTasks,
129
+ entity_service: EntityServiceDep,
130
+ link_resolver: LinkResolverDep,
131
+ search_service=Depends(get_search_service),
130
132
  ) -> DeleteEntitiesResponse:
131
133
  """Delete a single entity and remove from search index."""
132
134
  logger.info(f"request: delete_entity with identifier={identifier}")
@@ -149,10 +151,10 @@ async def delete_entity(
149
151
 
150
152
  @router.post("/entities/delete", response_model=DeleteEntitiesResponse)
151
153
  async def delete_entities(
152
- data: DeleteEntitiesRequest,
153
- background_tasks: BackgroundTasks,
154
- entity_service: EntityServiceDep,
155
- search_service=Depends(get_search_service),
154
+ data: DeleteEntitiesRequest,
155
+ background_tasks: BackgroundTasks,
156
+ entity_service: EntityServiceDep,
157
+ search_service=Depends(get_search_service),
156
158
  ) -> DeleteEntitiesResponse:
157
159
  """Delete entities and remove from search index."""
158
160
  logger.info(f"request: delete_entities with data={data}")
@@ -1,13 +1,11 @@
1
1
  """Routes for memory:// URI operations."""
2
2
 
3
- from datetime import datetime, timedelta
4
- from typing import Optional, List, Annotated
3
+ from typing import Annotated
5
4
 
6
5
  from dateparser import parse
7
6
  from fastapi import APIRouter, Query
8
7
  from loguru import logger
9
8
 
10
- from basic_memory.config import config
11
9
  from basic_memory.deps import ContextServiceDep, EntityRepositoryDep
12
10
  from basic_memory.repository import EntityRepository
13
11
  from basic_memory.repository.search_repository import SearchIndexRow
@@ -17,7 +15,8 @@ from basic_memory.schemas.memory import (
17
15
  RelationSummary,
18
16
  EntitySummary,
19
17
  ObservationSummary,
20
- MemoryMetadata, normalize_memory_url,
18
+ MemoryMetadata,
19
+ normalize_memory_url,
21
20
  )
22
21
  from basic_memory.schemas.search import SearchItemType
23
22
  from basic_memory.services.context_service import ContextResultRow
@@ -25,12 +24,14 @@ from basic_memory.services.context_service import ContextResultRow
25
24
  router = APIRouter(prefix="/memory", tags=["memory"])
26
25
 
27
26
 
28
-
29
27
  async def to_graph_context(context, entity_repository: EntityRepository):
30
28
  # return results
31
29
  async def to_summary(item: SearchIndexRow | ContextResultRow):
32
30
  match item.type:
33
31
  case SearchItemType.ENTITY:
32
+ assert item.title is not None
33
+ assert item.created_at is not None
34
+
34
35
  return EntitySummary(
35
36
  title=item.title,
36
37
  permalink=item.permalink,
@@ -38,12 +39,18 @@ async def to_graph_context(context, entity_repository: EntityRepository):
38
39
  created_at=item.created_at,
39
40
  )
40
41
  case SearchItemType.OBSERVATION:
42
+ assert item.category is not None
43
+ assert item.content is not None
44
+
41
45
  return ObservationSummary(
42
46
  category=item.category, content=item.content, permalink=item.permalink
43
47
  )
44
48
  case SearchItemType.RELATION:
49
+ assert item.from_id is not None
45
50
  from_entity = await entity_repository.find_by_id(item.from_id)
46
- to_entity = await entity_repository.find_by_id(item.to_id)
51
+ assert from_entity is not None
52
+
53
+ to_entity = await entity_repository.find_by_id(item.to_id) if item.to_id else None
47
54
 
48
55
  return RelationSummary(
49
56
  permalink=item.permalink,
@@ -51,6 +58,8 @@ async def to_graph_context(context, entity_repository: EntityRepository):
51
58
  from_id=from_entity.permalink,
52
59
  to_id=to_entity.permalink if to_entity else None,
53
60
  )
61
+ case _: # pragma: no cover
62
+ raise ValueError(f"Unexpected type: {item.type}")
54
63
 
55
64
  primary_results = [await to_summary(r) for r in context["primary_results"]]
56
65
  related_results = [await to_summary(r) for r in context["related_results"]]
@@ -61,7 +70,6 @@ async def to_graph_context(context, entity_repository: EntityRepository):
61
70
  )
62
71
 
63
72
 
64
-
65
73
  @router.get("/recent", response_model=GraphContext)
66
74
  async def recent(
67
75
  context_service: ContextServiceDep,
@@ -91,7 +99,8 @@ async def recent(
91
99
  return await to_graph_context(context, entity_repository=entity_repository)
92
100
 
93
101
 
94
- # get_memory_context needs to be declared last so other paths can match
102
+ # get_memory_context needs to be declared last so other paths can match
103
+
95
104
 
96
105
  @router.get("/{uri:path}", response_model=GraphContext)
97
106
  async def get_memory_context(
@@ -118,6 +127,3 @@ async def get_memory_context(
118
127
  memory_url, depth=depth, since=since, max_results=max_results
119
128
  )
120
129
  return await to_graph_context(context, entity_repository=entity_repository)
121
-
122
-
123
-
@@ -1,34 +1,28 @@
1
1
  """Router for search operations."""
2
+
2
3
  from dataclasses import asdict
3
4
 
4
5
  from fastapi import APIRouter, Depends, BackgroundTasks
5
- from typing import List
6
6
 
7
- from loguru import logger
8
7
  from basic_memory.services.search_service import SearchService
9
8
  from basic_memory.schemas.search import SearchQuery, SearchResult, SearchResponse
10
9
  from basic_memory.deps import get_search_service
11
10
 
12
11
  router = APIRouter(prefix="/search", tags=["search"])
13
12
 
13
+
14
14
  @router.post("/", response_model=SearchResponse)
15
- async def search(
16
- query: SearchQuery,
17
- search_service: SearchService = Depends(get_search_service)
18
- ):
15
+ async def search(query: SearchQuery, search_service: SearchService = Depends(get_search_service)):
19
16
  """Search across all knowledge and documents."""
20
17
  results = await search_service.search(query)
21
18
  search_results = [SearchResult.model_validate(asdict(r)) for r in results]
22
19
  return SearchResponse(results=search_results)
23
20
 
21
+
24
22
  @router.post("/reindex")
25
23
  async def reindex(
26
- background_tasks: BackgroundTasks,
27
- search_service: SearchService = Depends(get_search_service)
24
+ background_tasks: BackgroundTasks, search_service: SearchService = Depends(get_search_service)
28
25
  ):
29
26
  """Recreate and populate the search index."""
30
27
  await search_service.reindex_all(background_tasks=background_tasks)
31
- return {
32
- "status": "ok",
33
- "message": "Reindex initiated"
34
- }
28
+ return {"status": "ok", "message": "Reindex initiated"}
@@ -1 +1 @@
1
- """CLI tools for basic-memory"""
1
+ """CLI tools for basic-memory"""
basic_memory/cli/app.py CHANGED
@@ -1,4 +1,3 @@
1
1
  import typer
2
2
 
3
3
  app = typer.Typer()
4
-
@@ -1,5 +1,5 @@
1
- """Command module exports."""
1
+ """CLI commands for basic-memory."""
2
2
 
3
- from . import status, sync, import_memory_json
3
+ from . import status, sync, db, import_memory_json, mcp
4
4
 
5
- __all__ = [ "status", "sync", "import_memory_json.py"]
5
+ __all__ = ["status", "sync", "db", "import_memory_json", "mcp"]
@@ -0,0 +1,25 @@
1
+ """Database management commands."""
2
+
3
+ import asyncio
4
+ import typer
5
+ from loguru import logger
6
+
7
+ from basic_memory.alembic import migrations
8
+ from basic_memory.cli.app import app
9
+
10
+
11
+ @app.command()
12
+ def reset(
13
+ reindex: bool = typer.Option(False, "--reindex", help="Rebuild indices from filesystem"),
14
+ ): # pragma: no cover
15
+ """Reset database (drop all tables and recreate)."""
16
+ if typer.confirm("This will delete all data. Are you sure?"):
17
+ logger.info("Resetting database...")
18
+ asyncio.run(migrations.reset_database())
19
+
20
+ if reindex:
21
+ # Import and run sync
22
+ from basic_memory.cli.commands.sync import sync
23
+
24
+ logger.info("Rebuilding search index from filesystem...")
25
+ asyncio.run(sync()) # pyright: ignore