basic-memory 0.17.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- basic_memory/__init__.py +7 -0
- basic_memory/alembic/alembic.ini +119 -0
- basic_memory/alembic/env.py +185 -0
- basic_memory/alembic/migrations.py +24 -0
- basic_memory/alembic/script.py.mako +26 -0
- basic_memory/alembic/versions/314f1ea54dc4_add_postgres_full_text_search_support_.py +131 -0
- basic_memory/alembic/versions/3dae7c7b1564_initial_schema.py +93 -0
- basic_memory/alembic/versions/502b60eaa905_remove_required_from_entity_permalink.py +51 -0
- basic_memory/alembic/versions/5fe1ab1ccebe_add_projects_table.py +120 -0
- basic_memory/alembic/versions/647e7a75e2cd_project_constraint_fix.py +112 -0
- basic_memory/alembic/versions/9d9c1cb7d8f5_add_mtime_and_size_columns_to_entity_.py +49 -0
- basic_memory/alembic/versions/a1b2c3d4e5f6_fix_project_foreign_keys.py +49 -0
- basic_memory/alembic/versions/a2b3c4d5e6f7_add_search_index_entity_cascade.py +56 -0
- basic_memory/alembic/versions/b3c3938bacdb_relation_to_name_unique_index.py +44 -0
- basic_memory/alembic/versions/cc7172b46608_update_search_index_schema.py +113 -0
- basic_memory/alembic/versions/e7e1f4367280_add_scan_watermark_tracking_to_project.py +37 -0
- basic_memory/alembic/versions/f8a9b2c3d4e5_add_pg_trgm_for_fuzzy_link_resolution.py +239 -0
- basic_memory/api/__init__.py +5 -0
- basic_memory/api/app.py +131 -0
- basic_memory/api/routers/__init__.py +11 -0
- basic_memory/api/routers/directory_router.py +84 -0
- basic_memory/api/routers/importer_router.py +152 -0
- basic_memory/api/routers/knowledge_router.py +318 -0
- basic_memory/api/routers/management_router.py +80 -0
- basic_memory/api/routers/memory_router.py +90 -0
- basic_memory/api/routers/project_router.py +448 -0
- basic_memory/api/routers/prompt_router.py +260 -0
- basic_memory/api/routers/resource_router.py +249 -0
- basic_memory/api/routers/search_router.py +36 -0
- basic_memory/api/routers/utils.py +169 -0
- basic_memory/api/template_loader.py +292 -0
- basic_memory/api/v2/__init__.py +35 -0
- basic_memory/api/v2/routers/__init__.py +21 -0
- basic_memory/api/v2/routers/directory_router.py +93 -0
- basic_memory/api/v2/routers/importer_router.py +182 -0
- basic_memory/api/v2/routers/knowledge_router.py +413 -0
- basic_memory/api/v2/routers/memory_router.py +130 -0
- basic_memory/api/v2/routers/project_router.py +342 -0
- basic_memory/api/v2/routers/prompt_router.py +270 -0
- basic_memory/api/v2/routers/resource_router.py +286 -0
- basic_memory/api/v2/routers/search_router.py +73 -0
- basic_memory/cli/__init__.py +1 -0
- basic_memory/cli/app.py +84 -0
- basic_memory/cli/auth.py +277 -0
- basic_memory/cli/commands/__init__.py +18 -0
- basic_memory/cli/commands/cloud/__init__.py +6 -0
- basic_memory/cli/commands/cloud/api_client.py +112 -0
- basic_memory/cli/commands/cloud/bisync_commands.py +110 -0
- basic_memory/cli/commands/cloud/cloud_utils.py +101 -0
- basic_memory/cli/commands/cloud/core_commands.py +195 -0
- basic_memory/cli/commands/cloud/rclone_commands.py +371 -0
- basic_memory/cli/commands/cloud/rclone_config.py +110 -0
- basic_memory/cli/commands/cloud/rclone_installer.py +263 -0
- basic_memory/cli/commands/cloud/upload.py +233 -0
- basic_memory/cli/commands/cloud/upload_command.py +124 -0
- basic_memory/cli/commands/command_utils.py +77 -0
- basic_memory/cli/commands/db.py +44 -0
- basic_memory/cli/commands/format.py +198 -0
- basic_memory/cli/commands/import_chatgpt.py +84 -0
- basic_memory/cli/commands/import_claude_conversations.py +87 -0
- basic_memory/cli/commands/import_claude_projects.py +86 -0
- basic_memory/cli/commands/import_memory_json.py +87 -0
- basic_memory/cli/commands/mcp.py +76 -0
- basic_memory/cli/commands/project.py +889 -0
- basic_memory/cli/commands/status.py +174 -0
- basic_memory/cli/commands/telemetry.py +81 -0
- basic_memory/cli/commands/tool.py +341 -0
- basic_memory/cli/main.py +28 -0
- basic_memory/config.py +616 -0
- basic_memory/db.py +394 -0
- basic_memory/deps.py +705 -0
- basic_memory/file_utils.py +478 -0
- basic_memory/ignore_utils.py +297 -0
- basic_memory/importers/__init__.py +27 -0
- basic_memory/importers/base.py +79 -0
- basic_memory/importers/chatgpt_importer.py +232 -0
- basic_memory/importers/claude_conversations_importer.py +180 -0
- basic_memory/importers/claude_projects_importer.py +148 -0
- basic_memory/importers/memory_json_importer.py +108 -0
- basic_memory/importers/utils.py +61 -0
- basic_memory/markdown/__init__.py +21 -0
- basic_memory/markdown/entity_parser.py +279 -0
- basic_memory/markdown/markdown_processor.py +160 -0
- basic_memory/markdown/plugins.py +242 -0
- basic_memory/markdown/schemas.py +70 -0
- basic_memory/markdown/utils.py +117 -0
- basic_memory/mcp/__init__.py +1 -0
- basic_memory/mcp/async_client.py +139 -0
- basic_memory/mcp/project_context.py +141 -0
- basic_memory/mcp/prompts/__init__.py +19 -0
- basic_memory/mcp/prompts/ai_assistant_guide.py +70 -0
- basic_memory/mcp/prompts/continue_conversation.py +62 -0
- basic_memory/mcp/prompts/recent_activity.py +188 -0
- basic_memory/mcp/prompts/search.py +57 -0
- basic_memory/mcp/prompts/utils.py +162 -0
- basic_memory/mcp/resources/ai_assistant_guide.md +283 -0
- basic_memory/mcp/resources/project_info.py +71 -0
- basic_memory/mcp/server.py +81 -0
- basic_memory/mcp/tools/__init__.py +48 -0
- basic_memory/mcp/tools/build_context.py +120 -0
- basic_memory/mcp/tools/canvas.py +152 -0
- basic_memory/mcp/tools/chatgpt_tools.py +190 -0
- basic_memory/mcp/tools/delete_note.py +242 -0
- basic_memory/mcp/tools/edit_note.py +324 -0
- basic_memory/mcp/tools/list_directory.py +168 -0
- basic_memory/mcp/tools/move_note.py +551 -0
- basic_memory/mcp/tools/project_management.py +201 -0
- basic_memory/mcp/tools/read_content.py +281 -0
- basic_memory/mcp/tools/read_note.py +267 -0
- basic_memory/mcp/tools/recent_activity.py +534 -0
- basic_memory/mcp/tools/search.py +385 -0
- basic_memory/mcp/tools/utils.py +540 -0
- basic_memory/mcp/tools/view_note.py +78 -0
- basic_memory/mcp/tools/write_note.py +230 -0
- basic_memory/models/__init__.py +15 -0
- basic_memory/models/base.py +10 -0
- basic_memory/models/knowledge.py +226 -0
- basic_memory/models/project.py +87 -0
- basic_memory/models/search.py +85 -0
- basic_memory/repository/__init__.py +11 -0
- basic_memory/repository/entity_repository.py +503 -0
- basic_memory/repository/observation_repository.py +73 -0
- basic_memory/repository/postgres_search_repository.py +379 -0
- basic_memory/repository/project_info_repository.py +10 -0
- basic_memory/repository/project_repository.py +128 -0
- basic_memory/repository/relation_repository.py +146 -0
- basic_memory/repository/repository.py +385 -0
- basic_memory/repository/search_index_row.py +95 -0
- basic_memory/repository/search_repository.py +94 -0
- basic_memory/repository/search_repository_base.py +241 -0
- basic_memory/repository/sqlite_search_repository.py +439 -0
- basic_memory/schemas/__init__.py +86 -0
- basic_memory/schemas/base.py +297 -0
- basic_memory/schemas/cloud.py +50 -0
- basic_memory/schemas/delete.py +37 -0
- basic_memory/schemas/directory.py +30 -0
- basic_memory/schemas/importer.py +35 -0
- basic_memory/schemas/memory.py +285 -0
- basic_memory/schemas/project_info.py +212 -0
- basic_memory/schemas/prompt.py +90 -0
- basic_memory/schemas/request.py +112 -0
- basic_memory/schemas/response.py +229 -0
- basic_memory/schemas/search.py +117 -0
- basic_memory/schemas/sync_report.py +72 -0
- basic_memory/schemas/v2/__init__.py +27 -0
- basic_memory/schemas/v2/entity.py +129 -0
- basic_memory/schemas/v2/resource.py +46 -0
- basic_memory/services/__init__.py +8 -0
- basic_memory/services/context_service.py +601 -0
- basic_memory/services/directory_service.py +308 -0
- basic_memory/services/entity_service.py +864 -0
- basic_memory/services/exceptions.py +37 -0
- basic_memory/services/file_service.py +541 -0
- basic_memory/services/initialization.py +216 -0
- basic_memory/services/link_resolver.py +121 -0
- basic_memory/services/project_service.py +880 -0
- basic_memory/services/search_service.py +404 -0
- basic_memory/services/service.py +15 -0
- basic_memory/sync/__init__.py +6 -0
- basic_memory/sync/background_sync.py +26 -0
- basic_memory/sync/sync_service.py +1259 -0
- basic_memory/sync/watch_service.py +510 -0
- basic_memory/telemetry.py +249 -0
- basic_memory/templates/prompts/continue_conversation.hbs +110 -0
- basic_memory/templates/prompts/search.hbs +101 -0
- basic_memory/utils.py +468 -0
- basic_memory-0.17.1.dist-info/METADATA +617 -0
- basic_memory-0.17.1.dist-info/RECORD +171 -0
- basic_memory-0.17.1.dist-info/WHEEL +4 -0
- basic_memory-0.17.1.dist-info/entry_points.txt +3 -0
- basic_memory-0.17.1.dist-info/licenses/LICENSE +661 -0
|
@@ -0,0 +1,239 @@
|
|
|
1
|
+
"""Add project_id to relation/observation and pg_trgm for fuzzy link resolution
|
|
2
|
+
|
|
3
|
+
Revision ID: f8a9b2c3d4e5
|
|
4
|
+
Revises: 314f1ea54dc4
|
|
5
|
+
Create Date: 2025-12-01 12:00:00.000000
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from typing import Sequence, Union
|
|
10
|
+
|
|
11
|
+
import sqlalchemy as sa
|
|
12
|
+
from alembic import op
|
|
13
|
+
from sqlalchemy import text
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def column_exists(connection, table: str, column: str) -> bool:
|
|
17
|
+
"""Check if a column exists in a table (idempotent migration support)."""
|
|
18
|
+
if connection.dialect.name == "postgresql":
|
|
19
|
+
result = connection.execute(
|
|
20
|
+
text(
|
|
21
|
+
"SELECT 1 FROM information_schema.columns "
|
|
22
|
+
"WHERE table_name = :table AND column_name = :column"
|
|
23
|
+
),
|
|
24
|
+
{"table": table, "column": column},
|
|
25
|
+
)
|
|
26
|
+
return result.fetchone() is not None
|
|
27
|
+
else:
|
|
28
|
+
# SQLite
|
|
29
|
+
result = connection.execute(text(f"PRAGMA table_info({table})"))
|
|
30
|
+
columns = [row[1] for row in result]
|
|
31
|
+
return column in columns
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def index_exists(connection, index_name: str) -> bool:
|
|
35
|
+
"""Check if an index exists (idempotent migration support)."""
|
|
36
|
+
if connection.dialect.name == "postgresql":
|
|
37
|
+
result = connection.execute(
|
|
38
|
+
text("SELECT 1 FROM pg_indexes WHERE indexname = :index_name"),
|
|
39
|
+
{"index_name": index_name},
|
|
40
|
+
)
|
|
41
|
+
return result.fetchone() is not None
|
|
42
|
+
else:
|
|
43
|
+
# SQLite
|
|
44
|
+
result = connection.execute(
|
|
45
|
+
text("SELECT 1 FROM sqlite_master WHERE type='index' AND name = :index_name"),
|
|
46
|
+
{"index_name": index_name},
|
|
47
|
+
)
|
|
48
|
+
return result.fetchone() is not None
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
# revision identifiers, used by Alembic.
|
|
52
|
+
revision: str = "f8a9b2c3d4e5"
|
|
53
|
+
down_revision: Union[str, None] = "314f1ea54dc4"
|
|
54
|
+
branch_labels: Union[str, Sequence[str], None] = None
|
|
55
|
+
depends_on: Union[str, Sequence[str], None] = None
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def upgrade() -> None:
|
|
59
|
+
"""Add project_id to relation and observation tables, plus pg_trgm indexes.
|
|
60
|
+
|
|
61
|
+
This migration:
|
|
62
|
+
1. Adds project_id column to relation and observation tables (denormalization)
|
|
63
|
+
2. Backfills project_id from the associated entity
|
|
64
|
+
3. Enables pg_trgm extension for trigram-based fuzzy matching (Postgres only)
|
|
65
|
+
4. Creates GIN indexes on entity title and permalink for fast similarity searches
|
|
66
|
+
5. Creates partial index on unresolved relations for efficient bulk resolution
|
|
67
|
+
"""
|
|
68
|
+
connection = op.get_bind()
|
|
69
|
+
dialect = connection.dialect.name
|
|
70
|
+
|
|
71
|
+
# -------------------------------------------------------------------------
|
|
72
|
+
# Add project_id to relation table
|
|
73
|
+
# -------------------------------------------------------------------------
|
|
74
|
+
|
|
75
|
+
# Step 1: Add project_id column as nullable first (idempotent)
|
|
76
|
+
if not column_exists(connection, "relation", "project_id"):
|
|
77
|
+
op.add_column("relation", sa.Column("project_id", sa.Integer(), nullable=True))
|
|
78
|
+
|
|
79
|
+
# Step 2: Backfill project_id from entity.project_id via from_id
|
|
80
|
+
if dialect == "postgresql":
|
|
81
|
+
op.execute("""
|
|
82
|
+
UPDATE relation
|
|
83
|
+
SET project_id = entity.project_id
|
|
84
|
+
FROM entity
|
|
85
|
+
WHERE relation.from_id = entity.id
|
|
86
|
+
""")
|
|
87
|
+
else:
|
|
88
|
+
# SQLite syntax
|
|
89
|
+
op.execute("""
|
|
90
|
+
UPDATE relation
|
|
91
|
+
SET project_id = (
|
|
92
|
+
SELECT entity.project_id
|
|
93
|
+
FROM entity
|
|
94
|
+
WHERE entity.id = relation.from_id
|
|
95
|
+
)
|
|
96
|
+
""")
|
|
97
|
+
|
|
98
|
+
# Step 3: Make project_id NOT NULL and add foreign key
|
|
99
|
+
if dialect == "postgresql":
|
|
100
|
+
op.alter_column("relation", "project_id", nullable=False)
|
|
101
|
+
op.create_foreign_key(
|
|
102
|
+
"fk_relation_project_id",
|
|
103
|
+
"relation",
|
|
104
|
+
"project",
|
|
105
|
+
["project_id"],
|
|
106
|
+
["id"],
|
|
107
|
+
)
|
|
108
|
+
else:
|
|
109
|
+
# SQLite requires batch operations for ALTER COLUMN
|
|
110
|
+
with op.batch_alter_table("relation") as batch_op:
|
|
111
|
+
batch_op.alter_column("project_id", nullable=False)
|
|
112
|
+
batch_op.create_foreign_key(
|
|
113
|
+
"fk_relation_project_id",
|
|
114
|
+
"project",
|
|
115
|
+
["project_id"],
|
|
116
|
+
["id"],
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
# Step 4: Create index on relation.project_id (idempotent)
|
|
120
|
+
if not index_exists(connection, "ix_relation_project_id"):
|
|
121
|
+
op.create_index("ix_relation_project_id", "relation", ["project_id"])
|
|
122
|
+
|
|
123
|
+
# -------------------------------------------------------------------------
|
|
124
|
+
# Add project_id to observation table
|
|
125
|
+
# -------------------------------------------------------------------------
|
|
126
|
+
|
|
127
|
+
# Step 1: Add project_id column as nullable first (idempotent)
|
|
128
|
+
if not column_exists(connection, "observation", "project_id"):
|
|
129
|
+
op.add_column("observation", sa.Column("project_id", sa.Integer(), nullable=True))
|
|
130
|
+
|
|
131
|
+
# Step 2: Backfill project_id from entity.project_id via entity_id
|
|
132
|
+
if dialect == "postgresql":
|
|
133
|
+
op.execute("""
|
|
134
|
+
UPDATE observation
|
|
135
|
+
SET project_id = entity.project_id
|
|
136
|
+
FROM entity
|
|
137
|
+
WHERE observation.entity_id = entity.id
|
|
138
|
+
""")
|
|
139
|
+
else:
|
|
140
|
+
# SQLite syntax
|
|
141
|
+
op.execute("""
|
|
142
|
+
UPDATE observation
|
|
143
|
+
SET project_id = (
|
|
144
|
+
SELECT entity.project_id
|
|
145
|
+
FROM entity
|
|
146
|
+
WHERE entity.id = observation.entity_id
|
|
147
|
+
)
|
|
148
|
+
""")
|
|
149
|
+
|
|
150
|
+
# Step 3: Make project_id NOT NULL and add foreign key
|
|
151
|
+
if dialect == "postgresql":
|
|
152
|
+
op.alter_column("observation", "project_id", nullable=False)
|
|
153
|
+
op.create_foreign_key(
|
|
154
|
+
"fk_observation_project_id",
|
|
155
|
+
"observation",
|
|
156
|
+
"project",
|
|
157
|
+
["project_id"],
|
|
158
|
+
["id"],
|
|
159
|
+
)
|
|
160
|
+
else:
|
|
161
|
+
# SQLite requires batch operations for ALTER COLUMN
|
|
162
|
+
with op.batch_alter_table("observation") as batch_op:
|
|
163
|
+
batch_op.alter_column("project_id", nullable=False)
|
|
164
|
+
batch_op.create_foreign_key(
|
|
165
|
+
"fk_observation_project_id",
|
|
166
|
+
"project",
|
|
167
|
+
["project_id"],
|
|
168
|
+
["id"],
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
# Step 4: Create index on observation.project_id (idempotent)
|
|
172
|
+
if not index_exists(connection, "ix_observation_project_id"):
|
|
173
|
+
op.create_index("ix_observation_project_id", "observation", ["project_id"])
|
|
174
|
+
|
|
175
|
+
# Postgres-specific: pg_trgm and GIN indexes
|
|
176
|
+
if dialect == "postgresql":
|
|
177
|
+
# Enable pg_trgm extension for fuzzy string matching
|
|
178
|
+
op.execute("CREATE EXTENSION IF NOT EXISTS pg_trgm")
|
|
179
|
+
|
|
180
|
+
# Create trigram indexes on entity table for fuzzy matching
|
|
181
|
+
# GIN indexes with gin_trgm_ops support similarity searches
|
|
182
|
+
op.execute("""
|
|
183
|
+
CREATE INDEX IF NOT EXISTS idx_entity_title_trgm
|
|
184
|
+
ON entity USING gin (title gin_trgm_ops)
|
|
185
|
+
""")
|
|
186
|
+
|
|
187
|
+
op.execute("""
|
|
188
|
+
CREATE INDEX IF NOT EXISTS idx_entity_permalink_trgm
|
|
189
|
+
ON entity USING gin (permalink gin_trgm_ops)
|
|
190
|
+
""")
|
|
191
|
+
|
|
192
|
+
# Create partial index on unresolved relations for efficient bulk resolution
|
|
193
|
+
# This makes "WHERE to_id IS NULL AND project_id = X" queries very fast
|
|
194
|
+
op.execute("""
|
|
195
|
+
CREATE INDEX IF NOT EXISTS idx_relation_unresolved
|
|
196
|
+
ON relation (project_id, to_name)
|
|
197
|
+
WHERE to_id IS NULL
|
|
198
|
+
""")
|
|
199
|
+
|
|
200
|
+
# Create index on relation.to_name for join performance in bulk resolution
|
|
201
|
+
op.execute("""
|
|
202
|
+
CREATE INDEX IF NOT EXISTS idx_relation_to_name
|
|
203
|
+
ON relation (to_name)
|
|
204
|
+
""")
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
def downgrade() -> None:
|
|
208
|
+
"""Remove project_id from relation/observation and pg_trgm indexes."""
|
|
209
|
+
connection = op.get_bind()
|
|
210
|
+
dialect = connection.dialect.name
|
|
211
|
+
|
|
212
|
+
if dialect == "postgresql":
|
|
213
|
+
# Drop Postgres-specific indexes
|
|
214
|
+
op.execute("DROP INDEX IF EXISTS idx_relation_to_name")
|
|
215
|
+
op.execute("DROP INDEX IF EXISTS idx_relation_unresolved")
|
|
216
|
+
op.execute("DROP INDEX IF EXISTS idx_entity_permalink_trgm")
|
|
217
|
+
op.execute("DROP INDEX IF EXISTS idx_entity_title_trgm")
|
|
218
|
+
# Note: We don't drop the pg_trgm extension as other code may depend on it
|
|
219
|
+
|
|
220
|
+
# Drop project_id from observation
|
|
221
|
+
op.drop_index("ix_observation_project_id", table_name="observation")
|
|
222
|
+
op.drop_constraint("fk_observation_project_id", "observation", type_="foreignkey")
|
|
223
|
+
op.drop_column("observation", "project_id")
|
|
224
|
+
|
|
225
|
+
# Drop project_id from relation
|
|
226
|
+
op.drop_index("ix_relation_project_id", table_name="relation")
|
|
227
|
+
op.drop_constraint("fk_relation_project_id", "relation", type_="foreignkey")
|
|
228
|
+
op.drop_column("relation", "project_id")
|
|
229
|
+
else:
|
|
230
|
+
# SQLite requires batch operations
|
|
231
|
+
op.drop_index("ix_observation_project_id", table_name="observation")
|
|
232
|
+
with op.batch_alter_table("observation") as batch_op:
|
|
233
|
+
batch_op.drop_constraint("fk_observation_project_id", type_="foreignkey")
|
|
234
|
+
batch_op.drop_column("project_id")
|
|
235
|
+
|
|
236
|
+
op.drop_index("ix_relation_project_id", table_name="relation")
|
|
237
|
+
with op.batch_alter_table("relation") as batch_op:
|
|
238
|
+
batch_op.drop_constraint("fk_relation_project_id", type_="foreignkey")
|
|
239
|
+
batch_op.drop_column("project_id")
|
basic_memory/api/app.py
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
"""FastAPI application for basic-memory knowledge graph API."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
from contextlib import asynccontextmanager
|
|
5
|
+
|
|
6
|
+
from fastapi import FastAPI, HTTPException
|
|
7
|
+
from fastapi.exception_handlers import http_exception_handler
|
|
8
|
+
from loguru import logger
|
|
9
|
+
|
|
10
|
+
from basic_memory import __version__ as version
|
|
11
|
+
from basic_memory import db
|
|
12
|
+
from basic_memory.api.routers import (
|
|
13
|
+
directory_router,
|
|
14
|
+
importer_router,
|
|
15
|
+
knowledge,
|
|
16
|
+
management,
|
|
17
|
+
memory,
|
|
18
|
+
project,
|
|
19
|
+
resource,
|
|
20
|
+
search,
|
|
21
|
+
prompt_router,
|
|
22
|
+
)
|
|
23
|
+
from basic_memory.api.v2.routers import (
|
|
24
|
+
knowledge_router as v2_knowledge,
|
|
25
|
+
project_router as v2_project,
|
|
26
|
+
memory_router as v2_memory,
|
|
27
|
+
search_router as v2_search,
|
|
28
|
+
resource_router as v2_resource,
|
|
29
|
+
directory_router as v2_directory,
|
|
30
|
+
prompt_router as v2_prompt,
|
|
31
|
+
importer_router as v2_importer,
|
|
32
|
+
)
|
|
33
|
+
from basic_memory.config import ConfigManager, init_api_logging
|
|
34
|
+
from basic_memory.services.initialization import initialize_file_sync, initialize_app
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@asynccontextmanager
|
|
38
|
+
async def lifespan(app: FastAPI): # pragma: no cover
|
|
39
|
+
"""Lifecycle manager for the FastAPI app. Not called in stdio mcp mode"""
|
|
40
|
+
|
|
41
|
+
# Initialize logging for API (stdout in cloud mode, file otherwise)
|
|
42
|
+
init_api_logging()
|
|
43
|
+
|
|
44
|
+
app_config = ConfigManager().config
|
|
45
|
+
logger.info("Starting Basic Memory API")
|
|
46
|
+
|
|
47
|
+
await initialize_app(app_config)
|
|
48
|
+
|
|
49
|
+
# Cache database connections in app state for performance
|
|
50
|
+
logger.info("Initializing database and caching connections...")
|
|
51
|
+
engine, session_maker = await db.get_or_create_db(app_config.database_path)
|
|
52
|
+
app.state.engine = engine
|
|
53
|
+
app.state.session_maker = session_maker
|
|
54
|
+
logger.info("Database connections cached in app state")
|
|
55
|
+
|
|
56
|
+
# Start file sync if enabled
|
|
57
|
+
if app_config.sync_changes and not app_config.is_test_env:
|
|
58
|
+
logger.info(f"Sync changes enabled: {app_config.sync_changes}")
|
|
59
|
+
|
|
60
|
+
# start file sync task in background
|
|
61
|
+
async def _file_sync_runner() -> None:
|
|
62
|
+
await initialize_file_sync(app_config)
|
|
63
|
+
|
|
64
|
+
app.state.sync_task = asyncio.create_task(_file_sync_runner())
|
|
65
|
+
else:
|
|
66
|
+
if app_config.is_test_env:
|
|
67
|
+
logger.info("Test environment detected. Skipping file sync service.")
|
|
68
|
+
else:
|
|
69
|
+
logger.info("Sync changes disabled. Skipping file sync service.")
|
|
70
|
+
app.state.sync_task = None
|
|
71
|
+
|
|
72
|
+
# proceed with startup
|
|
73
|
+
yield
|
|
74
|
+
|
|
75
|
+
logger.info("Shutting down Basic Memory API")
|
|
76
|
+
if app.state.sync_task:
|
|
77
|
+
logger.info("Stopping sync...")
|
|
78
|
+
app.state.sync_task.cancel() # pyright: ignore
|
|
79
|
+
try:
|
|
80
|
+
await app.state.sync_task
|
|
81
|
+
except asyncio.CancelledError:
|
|
82
|
+
logger.info("Sync task cancelled successfully")
|
|
83
|
+
|
|
84
|
+
await db.shutdown_db()
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
# Initialize FastAPI app
|
|
88
|
+
app = FastAPI(
|
|
89
|
+
title="Basic Memory API",
|
|
90
|
+
description="Knowledge graph API for basic-memory",
|
|
91
|
+
version=version,
|
|
92
|
+
lifespan=lifespan,
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
# Include v1 routers
|
|
96
|
+
app.include_router(knowledge.router, prefix="/{project}")
|
|
97
|
+
app.include_router(memory.router, prefix="/{project}")
|
|
98
|
+
app.include_router(resource.router, prefix="/{project}")
|
|
99
|
+
app.include_router(search.router, prefix="/{project}")
|
|
100
|
+
app.include_router(project.project_router, prefix="/{project}")
|
|
101
|
+
app.include_router(directory_router.router, prefix="/{project}")
|
|
102
|
+
app.include_router(prompt_router.router, prefix="/{project}")
|
|
103
|
+
app.include_router(importer_router.router, prefix="/{project}")
|
|
104
|
+
|
|
105
|
+
# Include v2 routers (ID-based paths)
|
|
106
|
+
app.include_router(v2_knowledge, prefix="/v2/projects/{project_id}")
|
|
107
|
+
app.include_router(v2_memory, prefix="/v2/projects/{project_id}")
|
|
108
|
+
app.include_router(v2_search, prefix="/v2/projects/{project_id}")
|
|
109
|
+
app.include_router(v2_resource, prefix="/v2/projects/{project_id}")
|
|
110
|
+
app.include_router(v2_directory, prefix="/v2/projects/{project_id}")
|
|
111
|
+
app.include_router(v2_prompt, prefix="/v2/projects/{project_id}")
|
|
112
|
+
app.include_router(v2_importer, prefix="/v2/projects/{project_id}")
|
|
113
|
+
app.include_router(v2_project, prefix="/v2")
|
|
114
|
+
|
|
115
|
+
# Project resource router works across projects
|
|
116
|
+
app.include_router(project.project_resource_router)
|
|
117
|
+
app.include_router(management.router)
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
@app.exception_handler(Exception)
|
|
121
|
+
async def exception_handler(request, exc): # pragma: no cover
|
|
122
|
+
logger.exception(
|
|
123
|
+
"API unhandled exception",
|
|
124
|
+
url=str(request.url),
|
|
125
|
+
method=request.method,
|
|
126
|
+
client=request.client.host if request.client else None,
|
|
127
|
+
path=request.url.path,
|
|
128
|
+
error_type=type(exc).__name__,
|
|
129
|
+
error=str(exc),
|
|
130
|
+
)
|
|
131
|
+
return await http_exception_handler(request, HTTPException(status_code=500, detail=str(exc)))
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
"""API routers."""
|
|
2
|
+
|
|
3
|
+
from . import knowledge_router as knowledge
|
|
4
|
+
from . import management_router as management
|
|
5
|
+
from . import memory_router as memory
|
|
6
|
+
from . import project_router as project
|
|
7
|
+
from . import resource_router as resource
|
|
8
|
+
from . import search_router as search
|
|
9
|
+
from . import prompt_router as prompt
|
|
10
|
+
|
|
11
|
+
__all__ = ["knowledge", "management", "memory", "project", "resource", "search", "prompt"]
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
"""Router for directory tree operations."""
|
|
2
|
+
|
|
3
|
+
from typing import List, Optional
|
|
4
|
+
|
|
5
|
+
from fastapi import APIRouter, Query
|
|
6
|
+
|
|
7
|
+
from basic_memory.deps import DirectoryServiceDep, ProjectIdDep
|
|
8
|
+
from basic_memory.schemas.directory import DirectoryNode
|
|
9
|
+
|
|
10
|
+
router = APIRouter(prefix="/directory", tags=["directory"])
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@router.get("/tree", response_model=DirectoryNode, response_model_exclude_none=True)
|
|
14
|
+
async def get_directory_tree(
|
|
15
|
+
directory_service: DirectoryServiceDep,
|
|
16
|
+
project_id: ProjectIdDep,
|
|
17
|
+
):
|
|
18
|
+
"""Get hierarchical directory structure from the knowledge base.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
directory_service: Service for directory operations
|
|
22
|
+
project_id: ID of the current project
|
|
23
|
+
|
|
24
|
+
Returns:
|
|
25
|
+
DirectoryNode representing the root of the hierarchical tree structure
|
|
26
|
+
"""
|
|
27
|
+
# Get a hierarchical directory tree for the specific project
|
|
28
|
+
tree = await directory_service.get_directory_tree()
|
|
29
|
+
|
|
30
|
+
# Return the hierarchical tree
|
|
31
|
+
return tree
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@router.get("/structure", response_model=DirectoryNode, response_model_exclude_none=True)
|
|
35
|
+
async def get_directory_structure(
|
|
36
|
+
directory_service: DirectoryServiceDep,
|
|
37
|
+
project_id: ProjectIdDep,
|
|
38
|
+
):
|
|
39
|
+
"""Get folder structure for navigation (no files).
|
|
40
|
+
|
|
41
|
+
Optimized endpoint for folder tree navigation. Returns only directory nodes
|
|
42
|
+
without file metadata. For full tree with files, use /directory/tree.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
directory_service: Service for directory operations
|
|
46
|
+
project_id: ID of the current project
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
DirectoryNode tree containing only folders (type="directory")
|
|
50
|
+
"""
|
|
51
|
+
structure = await directory_service.get_directory_structure()
|
|
52
|
+
return structure
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
@router.get("/list", response_model=List[DirectoryNode], response_model_exclude_none=True)
|
|
56
|
+
async def list_directory(
|
|
57
|
+
directory_service: DirectoryServiceDep,
|
|
58
|
+
project_id: ProjectIdDep,
|
|
59
|
+
dir_name: str = Query("/", description="Directory path to list"),
|
|
60
|
+
depth: int = Query(1, ge=1, le=10, description="Recursion depth (1-10)"),
|
|
61
|
+
file_name_glob: Optional[str] = Query(
|
|
62
|
+
None, description="Glob pattern for filtering file names"
|
|
63
|
+
),
|
|
64
|
+
):
|
|
65
|
+
"""List directory contents with filtering and depth control.
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
directory_service: Service for directory operations
|
|
69
|
+
project_id: ID of the current project
|
|
70
|
+
dir_name: Directory path to list (default: root "/")
|
|
71
|
+
depth: Recursion depth (1-10, default: 1 for immediate children only)
|
|
72
|
+
file_name_glob: Optional glob pattern for filtering file names (e.g., "*.md", "*meeting*")
|
|
73
|
+
|
|
74
|
+
Returns:
|
|
75
|
+
List of DirectoryNode objects matching the criteria
|
|
76
|
+
"""
|
|
77
|
+
# Get directory listing with filtering
|
|
78
|
+
nodes = await directory_service.list_directory(
|
|
79
|
+
dir_name=dir_name,
|
|
80
|
+
depth=depth,
|
|
81
|
+
file_name_glob=file_name_glob,
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
return nodes
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
"""Import router for Basic Memory API."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
|
|
6
|
+
from fastapi import APIRouter, Form, HTTPException, UploadFile, status
|
|
7
|
+
|
|
8
|
+
from basic_memory.deps import (
|
|
9
|
+
ChatGPTImporterDep,
|
|
10
|
+
ClaudeConversationsImporterDep,
|
|
11
|
+
ClaudeProjectsImporterDep,
|
|
12
|
+
MemoryJsonImporterDep,
|
|
13
|
+
)
|
|
14
|
+
from basic_memory.importers import Importer
|
|
15
|
+
from basic_memory.schemas.importer import (
|
|
16
|
+
ChatImportResult,
|
|
17
|
+
EntityImportResult,
|
|
18
|
+
ProjectImportResult,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
logger = logging.getLogger(__name__)
|
|
22
|
+
|
|
23
|
+
router = APIRouter(prefix="/import", tags=["import"])
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@router.post("/chatgpt", response_model=ChatImportResult)
|
|
27
|
+
async def import_chatgpt(
|
|
28
|
+
importer: ChatGPTImporterDep,
|
|
29
|
+
file: UploadFile,
|
|
30
|
+
folder: str = Form("conversations"),
|
|
31
|
+
) -> ChatImportResult:
|
|
32
|
+
"""Import conversations from ChatGPT JSON export.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
file: The ChatGPT conversations.json file.
|
|
36
|
+
folder: The folder to place the files in.
|
|
37
|
+
markdown_processor: MarkdownProcessor instance.
|
|
38
|
+
|
|
39
|
+
Returns:
|
|
40
|
+
ChatImportResult with import statistics.
|
|
41
|
+
|
|
42
|
+
Raises:
|
|
43
|
+
HTTPException: If import fails.
|
|
44
|
+
"""
|
|
45
|
+
return await import_file(importer, file, folder)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
@router.post("/claude/conversations", response_model=ChatImportResult)
|
|
49
|
+
async def import_claude_conversations(
|
|
50
|
+
importer: ClaudeConversationsImporterDep,
|
|
51
|
+
file: UploadFile,
|
|
52
|
+
folder: str = Form("conversations"),
|
|
53
|
+
) -> ChatImportResult:
|
|
54
|
+
"""Import conversations from Claude conversations.json export.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
file: The Claude conversations.json file.
|
|
58
|
+
folder: The folder to place the files in.
|
|
59
|
+
markdown_processor: MarkdownProcessor instance.
|
|
60
|
+
|
|
61
|
+
Returns:
|
|
62
|
+
ChatImportResult with import statistics.
|
|
63
|
+
|
|
64
|
+
Raises:
|
|
65
|
+
HTTPException: If import fails.
|
|
66
|
+
"""
|
|
67
|
+
return await import_file(importer, file, folder)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
@router.post("/claude/projects", response_model=ProjectImportResult)
|
|
71
|
+
async def import_claude_projects(
|
|
72
|
+
importer: ClaudeProjectsImporterDep,
|
|
73
|
+
file: UploadFile,
|
|
74
|
+
folder: str = Form("projects"),
|
|
75
|
+
) -> ProjectImportResult:
|
|
76
|
+
"""Import projects from Claude projects.json export.
|
|
77
|
+
|
|
78
|
+
Args:
|
|
79
|
+
file: The Claude projects.json file.
|
|
80
|
+
base_folder: The base folder to place the files in.
|
|
81
|
+
markdown_processor: MarkdownProcessor instance.
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
ProjectImportResult with import statistics.
|
|
85
|
+
|
|
86
|
+
Raises:
|
|
87
|
+
HTTPException: If import fails.
|
|
88
|
+
"""
|
|
89
|
+
return await import_file(importer, file, folder)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
@router.post("/memory-json", response_model=EntityImportResult)
|
|
93
|
+
async def import_memory_json(
|
|
94
|
+
importer: MemoryJsonImporterDep,
|
|
95
|
+
file: UploadFile,
|
|
96
|
+
folder: str = Form("conversations"),
|
|
97
|
+
) -> EntityImportResult:
|
|
98
|
+
"""Import entities and relations from a memory.json file.
|
|
99
|
+
|
|
100
|
+
Args:
|
|
101
|
+
file: The memory.json file.
|
|
102
|
+
destination_folder: Optional destination folder within the project.
|
|
103
|
+
markdown_processor: MarkdownProcessor instance.
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
EntityImportResult with import statistics.
|
|
107
|
+
|
|
108
|
+
Raises:
|
|
109
|
+
HTTPException: If import fails.
|
|
110
|
+
"""
|
|
111
|
+
try:
|
|
112
|
+
file_data = []
|
|
113
|
+
file_bytes = await file.read()
|
|
114
|
+
file_str = file_bytes.decode("utf-8")
|
|
115
|
+
for line in file_str.splitlines():
|
|
116
|
+
json_data = json.loads(line)
|
|
117
|
+
file_data.append(json_data)
|
|
118
|
+
|
|
119
|
+
result = await importer.import_data(file_data, folder)
|
|
120
|
+
if not result.success: # pragma: no cover
|
|
121
|
+
raise HTTPException(
|
|
122
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
123
|
+
detail=result.error_message or "Import failed",
|
|
124
|
+
)
|
|
125
|
+
except Exception as e:
|
|
126
|
+
logger.exception("Import failed")
|
|
127
|
+
raise HTTPException(
|
|
128
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
129
|
+
detail=f"Import failed: {str(e)}",
|
|
130
|
+
)
|
|
131
|
+
return result
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
async def import_file(importer: Importer, file: UploadFile, destination_folder: str):
|
|
135
|
+
try:
|
|
136
|
+
# Process file
|
|
137
|
+
json_data = json.load(file.file)
|
|
138
|
+
result = await importer.import_data(json_data, destination_folder)
|
|
139
|
+
if not result.success: # pragma: no cover
|
|
140
|
+
raise HTTPException(
|
|
141
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
142
|
+
detail=result.error_message or "Import failed",
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
return result
|
|
146
|
+
|
|
147
|
+
except Exception as e:
|
|
148
|
+
logger.exception("Import failed")
|
|
149
|
+
raise HTTPException(
|
|
150
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
151
|
+
detail=f"Import failed: {str(e)}",
|
|
152
|
+
)
|