basic-memory 0.7.0__py3-none-any.whl → 0.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of basic-memory might be problematic. Click here for more details.
- basic_memory/__init__.py +1 -1
- basic_memory/alembic/alembic.ini +119 -0
- basic_memory/alembic/env.py +23 -1
- basic_memory/alembic/migrations.py +4 -9
- basic_memory/alembic/versions/502b60eaa905_remove_required_from_entity_permalink.py +51 -0
- basic_memory/alembic/versions/b3c3938bacdb_relation_to_name_unique_index.py +44 -0
- basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py +106 -0
- basic_memory/api/app.py +9 -10
- basic_memory/api/routers/__init__.py +2 -1
- basic_memory/api/routers/knowledge_router.py +31 -5
- basic_memory/api/routers/memory_router.py +18 -17
- basic_memory/api/routers/project_info_router.py +275 -0
- basic_memory/api/routers/resource_router.py +105 -4
- basic_memory/api/routers/search_router.py +22 -4
- basic_memory/cli/app.py +54 -5
- basic_memory/cli/commands/__init__.py +15 -2
- basic_memory/cli/commands/db.py +9 -13
- basic_memory/cli/commands/import_chatgpt.py +26 -30
- basic_memory/cli/commands/import_claude_conversations.py +27 -29
- basic_memory/cli/commands/import_claude_projects.py +29 -31
- basic_memory/cli/commands/import_memory_json.py +26 -28
- basic_memory/cli/commands/mcp.py +7 -1
- basic_memory/cli/commands/project.py +119 -0
- basic_memory/cli/commands/project_info.py +167 -0
- basic_memory/cli/commands/status.py +14 -28
- basic_memory/cli/commands/sync.py +63 -22
- basic_memory/cli/commands/tool.py +253 -0
- basic_memory/cli/main.py +39 -1
- basic_memory/config.py +166 -4
- basic_memory/db.py +19 -4
- basic_memory/deps.py +10 -3
- basic_memory/file_utils.py +37 -19
- basic_memory/markdown/entity_parser.py +3 -3
- basic_memory/markdown/utils.py +5 -0
- basic_memory/mcp/async_client.py +1 -1
- basic_memory/mcp/main.py +24 -0
- basic_memory/mcp/prompts/__init__.py +19 -0
- basic_memory/mcp/prompts/ai_assistant_guide.py +26 -0
- basic_memory/mcp/prompts/continue_conversation.py +111 -0
- basic_memory/mcp/prompts/recent_activity.py +88 -0
- basic_memory/mcp/prompts/search.py +182 -0
- basic_memory/mcp/prompts/utils.py +155 -0
- basic_memory/mcp/server.py +2 -6
- basic_memory/mcp/tools/__init__.py +12 -21
- basic_memory/mcp/tools/build_context.py +85 -0
- basic_memory/mcp/tools/canvas.py +97 -0
- basic_memory/mcp/tools/delete_note.py +28 -0
- basic_memory/mcp/tools/project_info.py +51 -0
- basic_memory/mcp/tools/read_content.py +229 -0
- basic_memory/mcp/tools/read_note.py +190 -0
- basic_memory/mcp/tools/recent_activity.py +100 -0
- basic_memory/mcp/tools/search.py +56 -17
- basic_memory/mcp/tools/utils.py +245 -16
- basic_memory/mcp/tools/write_note.py +124 -0
- basic_memory/models/knowledge.py +27 -11
- basic_memory/models/search.py +2 -1
- basic_memory/repository/entity_repository.py +3 -2
- basic_memory/repository/project_info_repository.py +9 -0
- basic_memory/repository/repository.py +24 -7
- basic_memory/repository/search_repository.py +47 -14
- basic_memory/schemas/__init__.py +10 -9
- basic_memory/schemas/base.py +4 -1
- basic_memory/schemas/memory.py +14 -4
- basic_memory/schemas/project_info.py +96 -0
- basic_memory/schemas/search.py +29 -33
- basic_memory/services/context_service.py +3 -3
- basic_memory/services/entity_service.py +26 -13
- basic_memory/services/file_service.py +145 -26
- basic_memory/services/link_resolver.py +9 -46
- basic_memory/services/search_service.py +95 -22
- basic_memory/sync/__init__.py +3 -2
- basic_memory/sync/sync_service.py +523 -117
- basic_memory/sync/watch_service.py +258 -132
- basic_memory/utils.py +51 -36
- basic_memory-0.9.0.dist-info/METADATA +736 -0
- basic_memory-0.9.0.dist-info/RECORD +99 -0
- basic_memory/alembic/README +0 -1
- basic_memory/cli/commands/tools.py +0 -157
- basic_memory/mcp/tools/knowledge.py +0 -68
- basic_memory/mcp/tools/memory.py +0 -170
- basic_memory/mcp/tools/notes.py +0 -202
- basic_memory/schemas/discovery.py +0 -28
- basic_memory/sync/file_change_scanner.py +0 -158
- basic_memory/sync/utils.py +0 -31
- basic_memory-0.7.0.dist-info/METADATA +0 -378
- basic_memory-0.7.0.dist-info/RECORD +0 -82
- {basic_memory-0.7.0.dist-info → basic_memory-0.9.0.dist-info}/WHEEL +0 -0
- {basic_memory-0.7.0.dist-info → basic_memory-0.9.0.dist-info}/entry_points.txt +0 -0
- {basic_memory-0.7.0.dist-info → basic_memory-0.9.0.dist-info}/licenses/LICENSE +0 -0
basic_memory/__init__.py
CHANGED
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
# A generic, single database configuration.
|
|
2
|
+
|
|
3
|
+
[alembic]
|
|
4
|
+
# path to migration scripts
|
|
5
|
+
# Use forward slashes (/) also on windows to provide an os agnostic path
|
|
6
|
+
script_location = .
|
|
7
|
+
|
|
8
|
+
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
|
9
|
+
# Uncomment the line below if you want the files to be prepended with date and time
|
|
10
|
+
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
|
11
|
+
# for all available tokens
|
|
12
|
+
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
|
13
|
+
|
|
14
|
+
# sys.path path, will be prepended to sys.path if present.
|
|
15
|
+
# defaults to the current working directory.
|
|
16
|
+
prepend_sys_path = .
|
|
17
|
+
|
|
18
|
+
# timezone to use when rendering the date within the migration file
|
|
19
|
+
# as well as the filename.
|
|
20
|
+
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
|
|
21
|
+
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
|
22
|
+
# string value is passed to ZoneInfo()
|
|
23
|
+
# leave blank for localtime
|
|
24
|
+
# timezone =
|
|
25
|
+
|
|
26
|
+
# max length of characters to apply to the "slug" field
|
|
27
|
+
# truncate_slug_length = 40
|
|
28
|
+
|
|
29
|
+
# set to 'true' to run the environment during
|
|
30
|
+
# the 'revision' command, regardless of autogenerate
|
|
31
|
+
# revision_environment = false
|
|
32
|
+
|
|
33
|
+
# set to 'true' to allow .pyc and .pyo files without
|
|
34
|
+
# a source .py file to be detected as revisions in the
|
|
35
|
+
# versions/ directory
|
|
36
|
+
# sourceless = false
|
|
37
|
+
|
|
38
|
+
# version location specification; This defaults
|
|
39
|
+
# to migrations/versions. When using multiple version
|
|
40
|
+
# directories, initial revisions must be specified with --version-path.
|
|
41
|
+
# The path separator used here should be the separator specified by "version_path_separator" below.
|
|
42
|
+
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
|
|
43
|
+
|
|
44
|
+
# version path separator; As mentioned above, this is the character used to split
|
|
45
|
+
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
|
46
|
+
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
|
47
|
+
# Valid values for version_path_separator are:
|
|
48
|
+
#
|
|
49
|
+
# version_path_separator = :
|
|
50
|
+
# version_path_separator = ;
|
|
51
|
+
# version_path_separator = space
|
|
52
|
+
# version_path_separator = newline
|
|
53
|
+
#
|
|
54
|
+
# Use os.pathsep. Default configuration used for new projects.
|
|
55
|
+
version_path_separator = os
|
|
56
|
+
|
|
57
|
+
# set to 'true' to search source files recursively
|
|
58
|
+
# in each "version_locations" directory
|
|
59
|
+
# new in Alembic version 1.10
|
|
60
|
+
# recursive_version_locations = false
|
|
61
|
+
|
|
62
|
+
# the output encoding used when revision files
|
|
63
|
+
# are written from script.py.mako
|
|
64
|
+
# output_encoding = utf-8
|
|
65
|
+
|
|
66
|
+
sqlalchemy.url = driver://user:pass@localhost/dbname
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
[post_write_hooks]
|
|
70
|
+
# post_write_hooks defines scripts or Python functions that are run
|
|
71
|
+
# on newly generated revision scripts. See the documentation for further
|
|
72
|
+
# detail and examples
|
|
73
|
+
|
|
74
|
+
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
|
75
|
+
# hooks = black
|
|
76
|
+
# black.type = console_scripts
|
|
77
|
+
# black.entrypoint = black
|
|
78
|
+
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
|
79
|
+
|
|
80
|
+
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
|
81
|
+
# hooks = ruff
|
|
82
|
+
# ruff.type = exec
|
|
83
|
+
# ruff.executable = %(here)s/.venv/bin/ruff
|
|
84
|
+
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
|
85
|
+
|
|
86
|
+
# Logging configuration
|
|
87
|
+
[loggers]
|
|
88
|
+
keys = root,sqlalchemy,alembic
|
|
89
|
+
|
|
90
|
+
[handlers]
|
|
91
|
+
keys = console
|
|
92
|
+
|
|
93
|
+
[formatters]
|
|
94
|
+
keys = generic
|
|
95
|
+
|
|
96
|
+
[logger_root]
|
|
97
|
+
level = WARNING
|
|
98
|
+
handlers = console
|
|
99
|
+
qualname =
|
|
100
|
+
|
|
101
|
+
[logger_sqlalchemy]
|
|
102
|
+
level = WARNING
|
|
103
|
+
handlers =
|
|
104
|
+
qualname = sqlalchemy.engine
|
|
105
|
+
|
|
106
|
+
[logger_alembic]
|
|
107
|
+
level = INFO
|
|
108
|
+
handlers =
|
|
109
|
+
qualname = alembic
|
|
110
|
+
|
|
111
|
+
[handler_console]
|
|
112
|
+
class = StreamHandler
|
|
113
|
+
args = (sys.stderr,)
|
|
114
|
+
level = NOTSET
|
|
115
|
+
formatter = generic
|
|
116
|
+
|
|
117
|
+
[formatter_generic]
|
|
118
|
+
format = %(levelname)-5.5s [%(name)s] %(message)s
|
|
119
|
+
datefmt = %H:%M:%S
|
basic_memory/alembic/env.py
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
"""Alembic environment configuration."""
|
|
2
2
|
|
|
3
|
+
import os
|
|
3
4
|
from logging.config import fileConfig
|
|
4
5
|
|
|
5
6
|
from sqlalchemy import engine_from_config
|
|
@@ -8,6 +9,10 @@ from sqlalchemy import pool
|
|
|
8
9
|
from alembic import context
|
|
9
10
|
|
|
10
11
|
from basic_memory.models import Base
|
|
12
|
+
|
|
13
|
+
# set config.env to "test" for pytest to prevent logging to file in utils.setup_logging()
|
|
14
|
+
os.environ["BASIC_MEMORY_ENV"] = "test"
|
|
15
|
+
|
|
11
16
|
from basic_memory.config import config as app_config
|
|
12
17
|
|
|
13
18
|
# this is the Alembic Config object, which provides
|
|
@@ -18,6 +23,8 @@ config = context.config
|
|
|
18
23
|
sqlalchemy_url = f"sqlite:///{app_config.database_path}"
|
|
19
24
|
config.set_main_option("sqlalchemy.url", sqlalchemy_url)
|
|
20
25
|
|
|
26
|
+
# print(f"Using SQLAlchemy URL: {sqlalchemy_url}")
|
|
27
|
+
|
|
21
28
|
# Interpret the config file for Python logging.
|
|
22
29
|
if config.config_file_name is not None:
|
|
23
30
|
fileConfig(config.config_file_name)
|
|
@@ -27,6 +34,14 @@ if config.config_file_name is not None:
|
|
|
27
34
|
target_metadata = Base.metadata
|
|
28
35
|
|
|
29
36
|
|
|
37
|
+
# Add this function to tell Alembic what to include/exclude
|
|
38
|
+
def include_object(object, name, type_, reflected, compare_to):
|
|
39
|
+
# Ignore SQLite FTS tables
|
|
40
|
+
if type_ == "table" and name.startswith("search_index"):
|
|
41
|
+
return False
|
|
42
|
+
return True
|
|
43
|
+
|
|
44
|
+
|
|
30
45
|
def run_migrations_offline() -> None:
|
|
31
46
|
"""Run migrations in 'offline' mode.
|
|
32
47
|
|
|
@@ -44,6 +59,8 @@ def run_migrations_offline() -> None:
|
|
|
44
59
|
target_metadata=target_metadata,
|
|
45
60
|
literal_binds=True,
|
|
46
61
|
dialect_opts={"paramstyle": "named"},
|
|
62
|
+
include_object=include_object,
|
|
63
|
+
render_as_batch=True,
|
|
47
64
|
)
|
|
48
65
|
|
|
49
66
|
with context.begin_transaction():
|
|
@@ -63,7 +80,12 @@ def run_migrations_online() -> None:
|
|
|
63
80
|
)
|
|
64
81
|
|
|
65
82
|
with connectable.connect() as connection:
|
|
66
|
-
context.configure(
|
|
83
|
+
context.configure(
|
|
84
|
+
connection=connection,
|
|
85
|
+
target_metadata=target_metadata,
|
|
86
|
+
include_object=include_object,
|
|
87
|
+
render_as_batch=True,
|
|
88
|
+
)
|
|
67
89
|
|
|
68
90
|
with context.begin_transaction():
|
|
69
91
|
context.run_migrations()
|
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
"""Functions for managing database migrations."""
|
|
2
2
|
|
|
3
|
-
import asyncio
|
|
4
3
|
from pathlib import Path
|
|
5
4
|
from loguru import logger
|
|
6
5
|
from alembic.config import Config
|
|
@@ -10,20 +9,16 @@ from alembic import command
|
|
|
10
9
|
def get_alembic_config() -> Config: # pragma: no cover
|
|
11
10
|
"""Get alembic config with correct paths."""
|
|
12
11
|
migrations_path = Path(__file__).parent
|
|
13
|
-
alembic_ini = migrations_path
|
|
12
|
+
alembic_ini = migrations_path / "alembic.ini"
|
|
14
13
|
|
|
15
14
|
config = Config(alembic_ini)
|
|
16
15
|
config.set_main_option("script_location", str(migrations_path))
|
|
17
16
|
return config
|
|
18
17
|
|
|
19
18
|
|
|
20
|
-
|
|
19
|
+
def reset_database(): # pragma: no cover
|
|
21
20
|
"""Drop and recreate all tables."""
|
|
22
21
|
logger.info("Resetting database...")
|
|
23
22
|
config = get_alembic_config()
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
command.downgrade(cfg, "base")
|
|
27
|
-
command.upgrade(cfg, "head")
|
|
28
|
-
|
|
29
|
-
await asyncio.get_event_loop().run_in_executor(None, _reset, config)
|
|
23
|
+
command.downgrade(config, "base")
|
|
24
|
+
command.upgrade(config, "head")
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"""remove required from entity.permalink
|
|
2
|
+
|
|
3
|
+
Revision ID: 502b60eaa905
|
|
4
|
+
Revises: b3c3938bacdb
|
|
5
|
+
Create Date: 2025-02-24 13:33:09.790951
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from typing import Sequence, Union
|
|
10
|
+
|
|
11
|
+
from alembic import op
|
|
12
|
+
import sqlalchemy as sa
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
# revision identifiers, used by Alembic.
|
|
16
|
+
revision: str = "502b60eaa905"
|
|
17
|
+
down_revision: Union[str, None] = "b3c3938bacdb"
|
|
18
|
+
branch_labels: Union[str, Sequence[str], None] = None
|
|
19
|
+
depends_on: Union[str, Sequence[str], None] = None
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def upgrade() -> None:
|
|
23
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
|
24
|
+
with op.batch_alter_table("entity", schema=None) as batch_op:
|
|
25
|
+
batch_op.alter_column("permalink", existing_type=sa.VARCHAR(), nullable=True)
|
|
26
|
+
batch_op.drop_index("ix_entity_permalink")
|
|
27
|
+
batch_op.create_index(batch_op.f("ix_entity_permalink"), ["permalink"], unique=False)
|
|
28
|
+
batch_op.drop_constraint("uix_entity_permalink", type_="unique")
|
|
29
|
+
batch_op.create_index(
|
|
30
|
+
"uix_entity_permalink",
|
|
31
|
+
["permalink"],
|
|
32
|
+
unique=True,
|
|
33
|
+
sqlite_where=sa.text("content_type = 'text/markdown' AND permalink IS NOT NULL"),
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
# ### end Alembic commands ###
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def downgrade() -> None:
|
|
40
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
|
41
|
+
with op.batch_alter_table("entity", schema=None) as batch_op:
|
|
42
|
+
batch_op.drop_index(
|
|
43
|
+
"uix_entity_permalink",
|
|
44
|
+
sqlite_where=sa.text("content_type = 'text/markdown' AND permalink IS NOT NULL"),
|
|
45
|
+
)
|
|
46
|
+
batch_op.create_unique_constraint("uix_entity_permalink", ["permalink"])
|
|
47
|
+
batch_op.drop_index(batch_op.f("ix_entity_permalink"))
|
|
48
|
+
batch_op.create_index("ix_entity_permalink", ["permalink"], unique=1)
|
|
49
|
+
batch_op.alter_column("permalink", existing_type=sa.VARCHAR(), nullable=False)
|
|
50
|
+
|
|
51
|
+
# ### end Alembic commands ###
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
"""relation to_name unique index
|
|
2
|
+
|
|
3
|
+
Revision ID: b3c3938bacdb
|
|
4
|
+
Revises: 3dae7c7b1564
|
|
5
|
+
Create Date: 2025-02-22 14:59:30.668466
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from typing import Sequence, Union
|
|
10
|
+
|
|
11
|
+
from alembic import op
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
# revision identifiers, used by Alembic.
|
|
15
|
+
revision: str = "b3c3938bacdb"
|
|
16
|
+
down_revision: Union[str, None] = "3dae7c7b1564"
|
|
17
|
+
branch_labels: Union[str, Sequence[str], None] = None
|
|
18
|
+
depends_on: Union[str, Sequence[str], None] = None
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def upgrade() -> None:
|
|
22
|
+
# SQLite doesn't support constraint changes through ALTER
|
|
23
|
+
# Need to recreate table with desired constraints
|
|
24
|
+
with op.batch_alter_table("relation") as batch_op:
|
|
25
|
+
# Drop existing unique constraint
|
|
26
|
+
batch_op.drop_constraint("uix_relation", type_="unique")
|
|
27
|
+
|
|
28
|
+
# Add new constraints
|
|
29
|
+
batch_op.create_unique_constraint(
|
|
30
|
+
"uix_relation_from_id_to_id", ["from_id", "to_id", "relation_type"]
|
|
31
|
+
)
|
|
32
|
+
batch_op.create_unique_constraint(
|
|
33
|
+
"uix_relation_from_id_to_name", ["from_id", "to_name", "relation_type"]
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def downgrade() -> None:
|
|
38
|
+
with op.batch_alter_table("relation") as batch_op:
|
|
39
|
+
# Drop new constraints
|
|
40
|
+
batch_op.drop_constraint("uix_relation_from_id_to_name", type_="unique")
|
|
41
|
+
batch_op.drop_constraint("uix_relation_from_id_to_id", type_="unique")
|
|
42
|
+
|
|
43
|
+
# Restore original constraint
|
|
44
|
+
batch_op.create_unique_constraint("uix_relation", ["from_id", "to_id", "relation_type"])
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
"""Update search index schema
|
|
2
|
+
|
|
3
|
+
Revision ID: cc7172b46608
|
|
4
|
+
Revises: 502b60eaa905
|
|
5
|
+
Create Date: 2025-02-28 18:48:23.244941
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from typing import Sequence, Union
|
|
10
|
+
|
|
11
|
+
from alembic import op
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
# revision identifiers, used by Alembic.
|
|
15
|
+
revision: str = "cc7172b46608"
|
|
16
|
+
down_revision: Union[str, None] = "502b60eaa905"
|
|
17
|
+
branch_labels: Union[str, Sequence[str], None] = None
|
|
18
|
+
depends_on: Union[str, Sequence[str], None] = None
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def upgrade() -> None:
|
|
22
|
+
"""Upgrade database schema to use new search index with content_stems and content_snippet."""
|
|
23
|
+
|
|
24
|
+
# First, drop the existing search_index table
|
|
25
|
+
op.execute("DROP TABLE IF EXISTS search_index")
|
|
26
|
+
|
|
27
|
+
# Create new search_index with updated schema
|
|
28
|
+
op.execute("""
|
|
29
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS search_index USING fts5(
|
|
30
|
+
-- Core entity fields
|
|
31
|
+
id UNINDEXED, -- Row ID
|
|
32
|
+
title, -- Title for searching
|
|
33
|
+
content_stems, -- Main searchable content split into stems
|
|
34
|
+
content_snippet, -- File content snippet for display
|
|
35
|
+
permalink, -- Stable identifier (now indexed for path search)
|
|
36
|
+
file_path UNINDEXED, -- Physical location
|
|
37
|
+
type UNINDEXED, -- entity/relation/observation
|
|
38
|
+
|
|
39
|
+
-- Relation fields
|
|
40
|
+
from_id UNINDEXED, -- Source entity
|
|
41
|
+
to_id UNINDEXED, -- Target entity
|
|
42
|
+
relation_type UNINDEXED, -- Type of relation
|
|
43
|
+
|
|
44
|
+
-- Observation fields
|
|
45
|
+
entity_id UNINDEXED, -- Parent entity
|
|
46
|
+
category UNINDEXED, -- Observation category
|
|
47
|
+
|
|
48
|
+
-- Common fields
|
|
49
|
+
metadata UNINDEXED, -- JSON metadata
|
|
50
|
+
created_at UNINDEXED, -- Creation timestamp
|
|
51
|
+
updated_at UNINDEXED, -- Last update
|
|
52
|
+
|
|
53
|
+
-- Configuration
|
|
54
|
+
tokenize='unicode61 tokenchars 0x2F', -- Hex code for /
|
|
55
|
+
prefix='1,2,3,4' -- Support longer prefixes for paths
|
|
56
|
+
);
|
|
57
|
+
""")
|
|
58
|
+
|
|
59
|
+
# Print instruction to manually reindex after migration
|
|
60
|
+
print("\n------------------------------------------------------------------")
|
|
61
|
+
print("IMPORTANT: After migration completes, manually run the reindex command:")
|
|
62
|
+
print("basic-memory sync")
|
|
63
|
+
print("------------------------------------------------------------------\n")
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def downgrade() -> None:
|
|
67
|
+
"""Downgrade database schema to use old search index."""
|
|
68
|
+
# Drop the updated search_index table
|
|
69
|
+
op.execute("DROP TABLE IF EXISTS search_index")
|
|
70
|
+
|
|
71
|
+
# Recreate the original search_index schema
|
|
72
|
+
op.execute("""
|
|
73
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS search_index USING fts5(
|
|
74
|
+
-- Core entity fields
|
|
75
|
+
id UNINDEXED, -- Row ID
|
|
76
|
+
title, -- Title for searching
|
|
77
|
+
content, -- Main searchable content
|
|
78
|
+
permalink, -- Stable identifier (now indexed for path search)
|
|
79
|
+
file_path UNINDEXED, -- Physical location
|
|
80
|
+
type UNINDEXED, -- entity/relation/observation
|
|
81
|
+
|
|
82
|
+
-- Relation fields
|
|
83
|
+
from_id UNINDEXED, -- Source entity
|
|
84
|
+
to_id UNINDEXED, -- Target entity
|
|
85
|
+
relation_type UNINDEXED, -- Type of relation
|
|
86
|
+
|
|
87
|
+
-- Observation fields
|
|
88
|
+
entity_id UNINDEXED, -- Parent entity
|
|
89
|
+
category UNINDEXED, -- Observation category
|
|
90
|
+
|
|
91
|
+
-- Common fields
|
|
92
|
+
metadata UNINDEXED, -- JSON metadata
|
|
93
|
+
created_at UNINDEXED, -- Creation timestamp
|
|
94
|
+
updated_at UNINDEXED, -- Last update
|
|
95
|
+
|
|
96
|
+
-- Configuration
|
|
97
|
+
tokenize='unicode61 tokenchars 0x2F', -- Hex code for /
|
|
98
|
+
prefix='1,2,3,4' -- Support longer prefixes for paths
|
|
99
|
+
);
|
|
100
|
+
""")
|
|
101
|
+
|
|
102
|
+
# Print instruction to manually reindex after migration
|
|
103
|
+
print("\n------------------------------------------------------------------")
|
|
104
|
+
print("IMPORTANT: After downgrade completes, manually run the reindex command:")
|
|
105
|
+
print("basic-memory sync")
|
|
106
|
+
print("------------------------------------------------------------------\n")
|
basic_memory/api/app.py
CHANGED
|
@@ -2,23 +2,18 @@
|
|
|
2
2
|
|
|
3
3
|
from contextlib import asynccontextmanager
|
|
4
4
|
|
|
5
|
-
import logfire
|
|
6
5
|
from fastapi import FastAPI, HTTPException
|
|
7
6
|
from fastapi.exception_handlers import http_exception_handler
|
|
8
7
|
from loguru import logger
|
|
9
8
|
|
|
10
|
-
import basic_memory
|
|
11
9
|
from basic_memory import db
|
|
12
10
|
from basic_memory.config import config as app_config
|
|
13
|
-
from basic_memory.api.routers import knowledge, search, memory, resource
|
|
14
|
-
from basic_memory.utils import setup_logging
|
|
11
|
+
from basic_memory.api.routers import knowledge, search, memory, resource, project_info
|
|
15
12
|
|
|
16
13
|
|
|
17
14
|
@asynccontextmanager
|
|
18
15
|
async def lifespan(app: FastAPI): # pragma: no cover
|
|
19
16
|
"""Lifecycle manager for the FastAPI app."""
|
|
20
|
-
setup_logging(log_file=".basic-memory/basic-memory.log")
|
|
21
|
-
logger.info(f"Starting Basic Memory API {basic_memory.__version__}")
|
|
22
17
|
await db.run_migrations(app_config)
|
|
23
18
|
yield
|
|
24
19
|
logger.info("Shutting down Basic Memory API")
|
|
@@ -33,20 +28,24 @@ app = FastAPI(
|
|
|
33
28
|
lifespan=lifespan,
|
|
34
29
|
)
|
|
35
30
|
|
|
36
|
-
if app_config != "test":
|
|
37
|
-
logfire.instrument_fastapi(app)
|
|
38
|
-
|
|
39
31
|
|
|
40
32
|
# Include routers
|
|
41
33
|
app.include_router(knowledge.router)
|
|
42
34
|
app.include_router(search.router)
|
|
43
35
|
app.include_router(memory.router)
|
|
44
36
|
app.include_router(resource.router)
|
|
37
|
+
app.include_router(project_info.router)
|
|
45
38
|
|
|
46
39
|
|
|
47
40
|
@app.exception_handler(Exception)
|
|
48
41
|
async def exception_handler(request, exc): # pragma: no cover
|
|
49
42
|
logger.exception(
|
|
50
|
-
|
|
43
|
+
"API unhandled exception",
|
|
44
|
+
url=str(request.url),
|
|
45
|
+
method=request.method,
|
|
46
|
+
client=request.client.host if request.client else None,
|
|
47
|
+
path=request.url.path,
|
|
48
|
+
error_type=type(exc).__name__,
|
|
49
|
+
error=str(exc),
|
|
51
50
|
)
|
|
52
51
|
return await http_exception_handler(request, HTTPException(status_code=500, detail=str(exc)))
|
|
@@ -4,5 +4,6 @@ from . import knowledge_router as knowledge
|
|
|
4
4
|
from . import memory_router as memory
|
|
5
5
|
from . import resource_router as resource
|
|
6
6
|
from . import search_router as search
|
|
7
|
+
from . import project_info_router as project_info
|
|
7
8
|
|
|
8
|
-
__all__ = ["knowledge", "memory", "resource", "search"]
|
|
9
|
+
__all__ = ["knowledge", "memory", "resource", "search", "project_info"]
|
|
@@ -33,7 +33,9 @@ async def create_entity(
|
|
|
33
33
|
search_service: SearchServiceDep,
|
|
34
34
|
) -> EntityResponse:
|
|
35
35
|
"""Create an entity."""
|
|
36
|
-
logger.info(
|
|
36
|
+
logger.info(
|
|
37
|
+
"API request", endpoint="create_entity", entity_type=data.entity_type, title=data.title
|
|
38
|
+
)
|
|
37
39
|
|
|
38
40
|
entity = await entity_service.create_entity(data)
|
|
39
41
|
|
|
@@ -41,7 +43,13 @@ async def create_entity(
|
|
|
41
43
|
await search_service.index_entity(entity, background_tasks=background_tasks)
|
|
42
44
|
result = EntityResponse.model_validate(entity)
|
|
43
45
|
|
|
44
|
-
logger.info(
|
|
46
|
+
logger.info(
|
|
47
|
+
"API response",
|
|
48
|
+
endpoint="create_entity",
|
|
49
|
+
title=result.title,
|
|
50
|
+
permalink=result.permalink,
|
|
51
|
+
status_code=201,
|
|
52
|
+
)
|
|
45
53
|
return result
|
|
46
54
|
|
|
47
55
|
|
|
@@ -55,10 +63,23 @@ async def create_or_update_entity(
|
|
|
55
63
|
search_service: SearchServiceDep,
|
|
56
64
|
) -> EntityResponse:
|
|
57
65
|
"""Create or update an entity. If entity exists, it will be updated, otherwise created."""
|
|
58
|
-
logger.info(
|
|
66
|
+
logger.info(
|
|
67
|
+
"API request",
|
|
68
|
+
endpoint="create_or_update_entity",
|
|
69
|
+
permalink=permalink,
|
|
70
|
+
entity_type=data.entity_type,
|
|
71
|
+
title=data.title,
|
|
72
|
+
)
|
|
59
73
|
|
|
60
74
|
# Validate permalink matches
|
|
61
75
|
if data.permalink != permalink:
|
|
76
|
+
logger.warning(
|
|
77
|
+
"API validation error",
|
|
78
|
+
endpoint="create_or_update_entity",
|
|
79
|
+
permalink=permalink,
|
|
80
|
+
data_permalink=data.permalink,
|
|
81
|
+
error="Permalink mismatch",
|
|
82
|
+
)
|
|
62
83
|
raise HTTPException(status_code=400, detail="Entity permalink must match URL path")
|
|
63
84
|
|
|
64
85
|
# Try create_or_update operation
|
|
@@ -70,7 +91,12 @@ async def create_or_update_entity(
|
|
|
70
91
|
result = EntityResponse.model_validate(entity)
|
|
71
92
|
|
|
72
93
|
logger.info(
|
|
73
|
-
|
|
94
|
+
"API response",
|
|
95
|
+
endpoint="create_or_update_entity",
|
|
96
|
+
title=result.title,
|
|
97
|
+
permalink=result.permalink,
|
|
98
|
+
created=created,
|
|
99
|
+
status_code=response.status_code,
|
|
74
100
|
)
|
|
75
101
|
return result
|
|
76
102
|
|
|
@@ -133,7 +159,7 @@ async def delete_entity(
|
|
|
133
159
|
return DeleteEntitiesResponse(deleted=False)
|
|
134
160
|
|
|
135
161
|
# Delete the entity
|
|
136
|
-
deleted = await entity_service.delete_entity(entity.permalink)
|
|
162
|
+
deleted = await entity_service.delete_entity(entity.permalink or entity.id)
|
|
137
163
|
|
|
138
164
|
# Remove from search index
|
|
139
165
|
background_tasks.add_task(search_service.delete_by_permalink, entity.permalink)
|
|
@@ -29,34 +29,33 @@ async def to_graph_context(context, entity_repository: EntityRepository, page: i
|
|
|
29
29
|
async def to_summary(item: SearchIndexRow | ContextResultRow):
|
|
30
30
|
match item.type:
|
|
31
31
|
case SearchItemType.ENTITY:
|
|
32
|
-
assert item.title is not None
|
|
33
|
-
assert item.created_at is not None
|
|
34
|
-
|
|
35
32
|
return EntitySummary(
|
|
36
|
-
title=item.title,
|
|
33
|
+
title=item.title, # pyright: ignore
|
|
37
34
|
permalink=item.permalink,
|
|
35
|
+
content=item.content,
|
|
38
36
|
file_path=item.file_path,
|
|
39
37
|
created_at=item.created_at,
|
|
40
38
|
)
|
|
41
39
|
case SearchItemType.OBSERVATION:
|
|
42
|
-
assert item.category is not None
|
|
43
|
-
assert item.content is not None
|
|
44
|
-
|
|
45
40
|
return ObservationSummary(
|
|
46
|
-
|
|
41
|
+
title=item.title, # pyright: ignore
|
|
42
|
+
file_path=item.file_path,
|
|
43
|
+
category=item.category, # pyright: ignore
|
|
44
|
+
content=item.content, # pyright: ignore
|
|
45
|
+
permalink=item.permalink, # pyright: ignore
|
|
46
|
+
created_at=item.created_at,
|
|
47
47
|
)
|
|
48
48
|
case SearchItemType.RELATION:
|
|
49
|
-
|
|
50
|
-
from_entity = await entity_repository.find_by_id(item.from_id)
|
|
51
|
-
assert from_entity is not None
|
|
52
|
-
|
|
49
|
+
from_entity = await entity_repository.find_by_id(item.from_id) # pyright: ignore
|
|
53
50
|
to_entity = await entity_repository.find_by_id(item.to_id) if item.to_id else None
|
|
54
|
-
|
|
55
51
|
return RelationSummary(
|
|
56
|
-
|
|
52
|
+
title=item.title, # pyright: ignore
|
|
53
|
+
file_path=item.file_path,
|
|
54
|
+
permalink=item.permalink, # pyright: ignore
|
|
57
55
|
relation_type=item.type,
|
|
58
|
-
|
|
59
|
-
|
|
56
|
+
from_entity=from_entity.permalink, # pyright: ignore
|
|
57
|
+
to_entity=to_entity.permalink if to_entity else None,
|
|
58
|
+
created_at=item.created_at,
|
|
60
59
|
)
|
|
61
60
|
case _: # pragma: no cover
|
|
62
61
|
raise ValueError(f"Unexpected type: {item.type}")
|
|
@@ -104,9 +103,11 @@ async def recent(
|
|
|
104
103
|
context = await context_service.build_context(
|
|
105
104
|
types=types, depth=depth, since=since, limit=limit, offset=offset, max_related=max_related
|
|
106
105
|
)
|
|
107
|
-
|
|
106
|
+
recent_context = await to_graph_context(
|
|
108
107
|
context, entity_repository=entity_repository, page=page, page_size=page_size
|
|
109
108
|
)
|
|
109
|
+
logger.debug(f"Recent context: {recent_context.model_dump_json()}")
|
|
110
|
+
return recent_context
|
|
110
111
|
|
|
111
112
|
|
|
112
113
|
# get_memory_context needs to be declared last so other paths can match
|