synapto 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- synapto/__init__.py +3 -0
- synapto/cli.py +620 -0
- synapto/config.py +125 -0
- synapto/db/__init__.py +1 -0
- synapto/db/migrations.py +300 -0
- synapto/db/postgres.py +88 -0
- synapto/db/redis_cache.py +103 -0
- synapto/decay/__init__.py +1 -0
- synapto/decay/maintenance.py +58 -0
- synapto/decay/scoring.py +50 -0
- synapto/embeddings/__init__.py +1 -0
- synapto/embeddings/base.py +31 -0
- synapto/embeddings/openai_provider.py +56 -0
- synapto/embeddings/registry.py +59 -0
- synapto/embeddings/sentence_transformer.py +53 -0
- synapto/graph/__init__.py +1 -0
- synapto/graph/entities.py +97 -0
- synapto/graph/relations.py +65 -0
- synapto/hrr/__init__.py +1 -0
- synapto/hrr/banks.py +74 -0
- synapto/hrr/core.py +160 -0
- synapto/hrr/retrieval.py +249 -0
- synapto/repositories/__init__.py +1 -0
- synapto/repositories/banks.py +54 -0
- synapto/repositories/entities.py +122 -0
- synapto/repositories/memories.py +204 -0
- synapto/repositories/relations.py +134 -0
- synapto/search/__init__.py +1 -0
- synapto/search/graph.py +178 -0
- synapto/search/hybrid.py +240 -0
- synapto/server.py +388 -0
- synapto-0.1.0.dist-info/METADATA +269 -0
- synapto-0.1.0.dist-info/RECORD +36 -0
- synapto-0.1.0.dist-info/WHEEL +4 -0
- synapto-0.1.0.dist-info/entry_points.txt +2 -0
- synapto-0.1.0.dist-info/licenses/LICENSE +21 -0
synapto/__init__.py
ADDED
synapto/cli.py
ADDED
|
@@ -0,0 +1,620 @@
|
|
|
1
|
+
"""Synapto CLI — command-line interface for managing the memory graph."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
import json
|
|
7
|
+
import logging
|
|
8
|
+
|
|
9
|
+
import click
|
|
10
|
+
|
|
11
|
+
from synapto import __version__
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _run(coro):
|
|
15
|
+
"""Run an async function synchronously."""
|
|
16
|
+
return asyncio.run(coro)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@click.group()
|
|
20
|
+
@click.version_option(version=__version__, prog_name="synapto")
|
|
21
|
+
@click.option("--verbose", "-v", is_flag=True, help="enable debug logging")
|
|
22
|
+
def main(verbose: bool) -> None:
|
|
23
|
+
"""Synapto — persistent memory graph for AI coding agents."""
|
|
24
|
+
level = logging.DEBUG if verbose else logging.INFO
|
|
25
|
+
logging.basicConfig(
|
|
26
|
+
level=level,
|
|
27
|
+
format="%(asctime)s [%(name)s] %(levelname)s: %(message)s",
|
|
28
|
+
datefmt="%H:%M:%S",
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@main.command()
|
|
33
|
+
@click.option("--pg-dsn", envvar="SYNAPTO_PG_DSN", default="postgresql://localhost/synapto")
|
|
34
|
+
@click.option("--interactive", "-i", is_flag=True, help="interactively configure synapto")
|
|
35
|
+
def init(pg_dsn: str, interactive: bool) -> None:
|
|
36
|
+
"""Initialize the database — create tables, indexes, and extensions."""
|
|
37
|
+
import tomli_w
|
|
38
|
+
|
|
39
|
+
from synapto.config import CONFIG_DIR, CONFIG_FILE, save_default_config
|
|
40
|
+
|
|
41
|
+
if interactive:
|
|
42
|
+
click.echo("synapto interactive setup\n")
|
|
43
|
+
|
|
44
|
+
pg_dsn = click.prompt("postgresql dsn", default=pg_dsn)
|
|
45
|
+
redis_url = click.prompt("redis url", default="redis://localhost:6379/0")
|
|
46
|
+
tenant = click.prompt("default tenant name", default="default")
|
|
47
|
+
provider = click.prompt(
|
|
48
|
+
"embedding provider",
|
|
49
|
+
default="sentence-transformers",
|
|
50
|
+
type=click.Choice(["sentence-transformers", "openai"], case_sensitive=False),
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
# write config
|
|
54
|
+
CONFIG_DIR.mkdir(parents=True, exist_ok=True)
|
|
55
|
+
data = {
|
|
56
|
+
"postgresql": {"dsn": pg_dsn},
|
|
57
|
+
"redis": {"url": redis_url},
|
|
58
|
+
"embeddings": {"provider": provider, "model": ""},
|
|
59
|
+
"defaults": {"tenant": tenant},
|
|
60
|
+
"decay": {"ephemeral_max_age_hours": 24, "purge_after_days": 30},
|
|
61
|
+
"server": {"name": "synapto"},
|
|
62
|
+
}
|
|
63
|
+
with open(CONFIG_FILE, "wb") as f:
|
|
64
|
+
tomli_w.dump(data, f)
|
|
65
|
+
click.echo(f"\nconfig written: {CONFIG_FILE}")
|
|
66
|
+
|
|
67
|
+
# run migrations
|
|
68
|
+
async def _init_interactive():
|
|
69
|
+
from synapto.db.migrations import get_schema_version, run_migrations
|
|
70
|
+
from synapto.db.postgres import PostgresClient
|
|
71
|
+
|
|
72
|
+
client = PostgresClient(pg_dsn)
|
|
73
|
+
await client.connect()
|
|
74
|
+
await run_migrations(client)
|
|
75
|
+
version = await get_schema_version(client)
|
|
76
|
+
await client.close()
|
|
77
|
+
return version
|
|
78
|
+
|
|
79
|
+
version = _run(_init_interactive())
|
|
80
|
+
click.echo(f"database initialized (schema v{version})")
|
|
81
|
+
|
|
82
|
+
# check embedding model availability
|
|
83
|
+
click.echo(f"loading embedding model ({provider})...")
|
|
84
|
+
from synapto.embeddings.registry import get_provider
|
|
85
|
+
|
|
86
|
+
p = get_provider(provider)
|
|
87
|
+
click.echo(f"embedding model ready: {p.name} (dim={p.dimension})")
|
|
88
|
+
|
|
89
|
+
# offer to write MCP client config
|
|
90
|
+
_offer_mcp_config(tenant)
|
|
91
|
+
|
|
92
|
+
# summary
|
|
93
|
+
click.echo("\n--- setup complete ---")
|
|
94
|
+
click.echo(f" postgresql: {pg_dsn}")
|
|
95
|
+
click.echo(f" redis: {redis_url}")
|
|
96
|
+
click.echo(f" tenant: {tenant}")
|
|
97
|
+
click.echo(f" embeddings: {p.name}")
|
|
98
|
+
click.echo(f" config: {CONFIG_FILE}")
|
|
99
|
+
return
|
|
100
|
+
|
|
101
|
+
config_path = save_default_config()
|
|
102
|
+
click.echo(f"config: {config_path}")
|
|
103
|
+
|
|
104
|
+
async def _init():
|
|
105
|
+
from synapto.db.migrations import get_schema_version, run_migrations
|
|
106
|
+
from synapto.db.postgres import PostgresClient
|
|
107
|
+
|
|
108
|
+
client = PostgresClient(pg_dsn)
|
|
109
|
+
await client.connect()
|
|
110
|
+
|
|
111
|
+
version = await get_schema_version(client)
|
|
112
|
+
if version:
|
|
113
|
+
click.echo(f"schema already at version {version}, re-applying...")
|
|
114
|
+
|
|
115
|
+
await run_migrations(client)
|
|
116
|
+
version = await get_schema_version(client)
|
|
117
|
+
click.echo(f"synapto database initialized (schema v{version})")
|
|
118
|
+
|
|
119
|
+
await client.close()
|
|
120
|
+
|
|
121
|
+
_run(_init())
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
@main.command()
|
|
125
|
+
def serve() -> None:
|
|
126
|
+
"""Start the Synapto MCP server (stdio transport)."""
|
|
127
|
+
from synapto.server import mcp
|
|
128
|
+
mcp.run()
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
@main.command()
|
|
132
|
+
@click.argument("query")
|
|
133
|
+
@click.option("--tenant", "-t", default=None, help="tenant/project scope")
|
|
134
|
+
@click.option("--limit", "-n", default=10, help="max results")
|
|
135
|
+
@click.option("--depth", "-d", default=None, help="depth layer filter")
|
|
136
|
+
def search(query: str, tenant: str | None, limit: int, depth: str | None) -> None:
|
|
137
|
+
"""Search memories from the command line."""
|
|
138
|
+
|
|
139
|
+
async def _search():
|
|
140
|
+
from synapto.config import load_config
|
|
141
|
+
from synapto.db.migrations import ensure_hnsw_index
|
|
142
|
+
from synapto.db.postgres import PostgresClient
|
|
143
|
+
from synapto.embeddings.registry import get_provider
|
|
144
|
+
from synapto.search.hybrid import hybrid_search
|
|
145
|
+
|
|
146
|
+
config = load_config()
|
|
147
|
+
t = tenant or config.default_tenant
|
|
148
|
+
|
|
149
|
+
client = PostgresClient(config.pg_dsn)
|
|
150
|
+
await client.connect()
|
|
151
|
+
|
|
152
|
+
provider = get_provider(config.embedding_provider)
|
|
153
|
+
await ensure_hnsw_index(client, provider.dimension)
|
|
154
|
+
|
|
155
|
+
results = await hybrid_search(
|
|
156
|
+
client, provider, query, tenant=t, depth_layer=depth, limit=limit
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
if not results:
|
|
160
|
+
click.echo("no memories found")
|
|
161
|
+
else:
|
|
162
|
+
for r in results:
|
|
163
|
+
click.echo(f"\n[{r.depth_layer}] ({r.type}) score={r.rrf_score:.4f}")
|
|
164
|
+
click.echo(f" {r.content[:200]}")
|
|
165
|
+
click.echo(f" id={r.id}")
|
|
166
|
+
|
|
167
|
+
await client.close()
|
|
168
|
+
|
|
169
|
+
_run(_search())
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
@main.command()
|
|
173
|
+
@click.option("--tenant", "-t", default=None, help="tenant/project scope")
|
|
174
|
+
def stats(tenant: str | None) -> None:
|
|
175
|
+
"""Show memory statistics."""
|
|
176
|
+
|
|
177
|
+
async def _stats():
|
|
178
|
+
from synapto.config import load_config
|
|
179
|
+
from synapto.db.postgres import PostgresClient
|
|
180
|
+
from synapto.repositories.entities import EntityRepository
|
|
181
|
+
from synapto.repositories.memories import MemoryRepository
|
|
182
|
+
|
|
183
|
+
config = load_config()
|
|
184
|
+
client = PostgresClient(config.pg_dsn)
|
|
185
|
+
await client.connect()
|
|
186
|
+
|
|
187
|
+
t = tenant or config.default_tenant
|
|
188
|
+
mem_repo = MemoryRepository(client)
|
|
189
|
+
ent_repo = EntityRepository(client)
|
|
190
|
+
|
|
191
|
+
by_type = await mem_repo.count_by_type(t if tenant else None)
|
|
192
|
+
by_depth = await mem_repo.count_by_depth(t if tenant else None)
|
|
193
|
+
total = sum(r["cnt"] for r in by_type)
|
|
194
|
+
entity_count = await ent_repo.count(t if tenant else None)
|
|
195
|
+
|
|
196
|
+
click.echo(f"total memories: {total}")
|
|
197
|
+
click.echo(f"total entities: {entity_count}")
|
|
198
|
+
click.echo("\nby type:")
|
|
199
|
+
for r in by_type:
|
|
200
|
+
click.echo(f" {r['type']}: {r['cnt']}")
|
|
201
|
+
click.echo("\nby depth layer:")
|
|
202
|
+
for r in by_depth:
|
|
203
|
+
click.echo(f" {r['depth_layer']}: {r['cnt']}")
|
|
204
|
+
|
|
205
|
+
await client.close()
|
|
206
|
+
|
|
207
|
+
_run(_stats())
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
@main.command()
|
|
211
|
+
def doctor() -> None:
|
|
212
|
+
"""Check system health — PostgreSQL, Redis, embeddings, config, and schema."""
|
|
213
|
+
|
|
214
|
+
def _green(msg: str) -> str:
|
|
215
|
+
return click.style(f"[ok] {msg}", fg="green")
|
|
216
|
+
|
|
217
|
+
def _warn(msg: str, fix: str) -> str:
|
|
218
|
+
return click.style(f"[warn] {msg}", fg="yellow") + f"\n fix: {fix}"
|
|
219
|
+
|
|
220
|
+
def _fail(msg: str, fix: str) -> str:
|
|
221
|
+
return click.style(f"[fail] {msg}", fg="red") + f"\n fix: {fix}"
|
|
222
|
+
|
|
223
|
+
from synapto.config import CONFIG_FILE, load_config
|
|
224
|
+
|
|
225
|
+
click.echo("synapto doctor\n")
|
|
226
|
+
|
|
227
|
+
# 1. config file
|
|
228
|
+
if CONFIG_FILE.exists():
|
|
229
|
+
click.echo(_green(f"config file exists: {CONFIG_FILE}"))
|
|
230
|
+
else:
|
|
231
|
+
click.echo(_warn(f"config file missing: {CONFIG_FILE}", "run: synapto init"))
|
|
232
|
+
|
|
233
|
+
config = load_config()
|
|
234
|
+
|
|
235
|
+
# 2. postgresql connectivity
|
|
236
|
+
async def _check_pg():
|
|
237
|
+
from synapto.db.postgres import PostgresClient
|
|
238
|
+
|
|
239
|
+
client = PostgresClient(config.pg_dsn, min_size=1, max_size=1)
|
|
240
|
+
await client.connect()
|
|
241
|
+
row = await client.execute_one("SELECT version() AS v;")
|
|
242
|
+
await client.close()
|
|
243
|
+
return row["v"]
|
|
244
|
+
|
|
245
|
+
try:
|
|
246
|
+
pg_version = _run(_check_pg())
|
|
247
|
+
short = pg_version.split(",")[0] if pg_version else "unknown"
|
|
248
|
+
click.echo(_green(f"postgresql: {short}"))
|
|
249
|
+
except Exception as e:
|
|
250
|
+
click.echo(_fail(f"postgresql: {e}", f"check connection to {config.pg_dsn}"))
|
|
251
|
+
|
|
252
|
+
# 3. pgvector extension
|
|
253
|
+
async def _check_pgvector():
|
|
254
|
+
from synapto.db.postgres import PostgresClient
|
|
255
|
+
|
|
256
|
+
client = PostgresClient(config.pg_dsn, min_size=1, max_size=1)
|
|
257
|
+
await client.connect()
|
|
258
|
+
row = await client.execute_one(
|
|
259
|
+
"SELECT extversion FROM pg_extension WHERE extname = 'vector';"
|
|
260
|
+
)
|
|
261
|
+
await client.close()
|
|
262
|
+
return row
|
|
263
|
+
|
|
264
|
+
try:
|
|
265
|
+
ext_row = _run(_check_pgvector())
|
|
266
|
+
if ext_row:
|
|
267
|
+
click.echo(_green(f"pgvector extension: v{ext_row['extversion']}"))
|
|
268
|
+
else:
|
|
269
|
+
click.echo(_warn("pgvector extension not installed", "run: CREATE EXTENSION vector;"))
|
|
270
|
+
except Exception as e:
|
|
271
|
+
click.echo(_fail(f"pgvector check: {e}", "ensure postgresql is reachable"))
|
|
272
|
+
|
|
273
|
+
# 4. redis connectivity
|
|
274
|
+
async def _check_redis():
|
|
275
|
+
import redis.asyncio as aioredis
|
|
276
|
+
|
|
277
|
+
client = aioredis.from_url(config.redis_url, decode_responses=True)
|
|
278
|
+
info = await client.info("server")
|
|
279
|
+
await client.aclose()
|
|
280
|
+
return info.get("redis_version", "unknown")
|
|
281
|
+
|
|
282
|
+
try:
|
|
283
|
+
redis_ver = _run(_check_redis())
|
|
284
|
+
click.echo(_green(f"redis: v{redis_ver}"))
|
|
285
|
+
except Exception as e:
|
|
286
|
+
click.echo(_fail(f"redis: {e}", f"check connection to {config.redis_url}"))
|
|
287
|
+
|
|
288
|
+
# 5. embedding model
|
|
289
|
+
try:
|
|
290
|
+
from synapto.embeddings.registry import get_provider
|
|
291
|
+
|
|
292
|
+
provider = get_provider(config.embedding_provider)
|
|
293
|
+
click.echo(_green(f"embedding model: {provider.name} (dim={provider.dimension})"))
|
|
294
|
+
except Exception as e:
|
|
295
|
+
click.echo(_fail(f"embedding model: {e}", "check embedding provider config"))
|
|
296
|
+
|
|
297
|
+
# 6. schema version
|
|
298
|
+
async def _check_schema():
|
|
299
|
+
from synapto.db.migrations import get_schema_version
|
|
300
|
+
from synapto.db.postgres import PostgresClient
|
|
301
|
+
|
|
302
|
+
client = PostgresClient(config.pg_dsn, min_size=1, max_size=1)
|
|
303
|
+
await client.connect()
|
|
304
|
+
version = await get_schema_version(client)
|
|
305
|
+
await client.close()
|
|
306
|
+
return version
|
|
307
|
+
|
|
308
|
+
try:
|
|
309
|
+
schema_v = _run(_check_schema())
|
|
310
|
+
if schema_v:
|
|
311
|
+
click.echo(_green(f"schema version: v{schema_v}"))
|
|
312
|
+
else:
|
|
313
|
+
click.echo(_warn("schema not initialized", "run: synapto init"))
|
|
314
|
+
except Exception:
|
|
315
|
+
click.echo(_warn("schema check skipped (postgresql unreachable)", "fix postgresql first"))
|
|
316
|
+
|
|
317
|
+
click.echo()
|
|
318
|
+
|
|
319
|
+
|
|
320
|
+
@main.group()
|
|
321
|
+
def migrate() -> None:
|
|
322
|
+
"""Database migration management."""
|
|
323
|
+
|
|
324
|
+
|
|
325
|
+
main.add_command(migrate)
|
|
326
|
+
|
|
327
|
+
|
|
328
|
+
@migrate.command(name="up")
|
|
329
|
+
@click.option("--pg-dsn", envvar="SYNAPTO_PG_DSN", default=None)
|
|
330
|
+
@click.option("--to", "target", default=None, type=int, help="apply up to this version")
|
|
331
|
+
def migrate_up(pg_dsn: str | None, target: int | None) -> None:
|
|
332
|
+
"""Apply all pending migrations."""
|
|
333
|
+
|
|
334
|
+
async def _up():
|
|
335
|
+
from synapto.config import load_config
|
|
336
|
+
from synapto.db.migrations import get_schema_version, migrate_up
|
|
337
|
+
from synapto.db.postgres import PostgresClient
|
|
338
|
+
|
|
339
|
+
config = load_config()
|
|
340
|
+
dsn = pg_dsn or config.pg_dsn
|
|
341
|
+
client = PostgresClient(dsn, min_size=1, max_size=2)
|
|
342
|
+
await client.connect()
|
|
343
|
+
applied = await migrate_up(client, target_version=target)
|
|
344
|
+
version = await get_schema_version(client)
|
|
345
|
+
await client.close()
|
|
346
|
+
return applied, version
|
|
347
|
+
|
|
348
|
+
applied, version = _run(_up())
|
|
349
|
+
if applied:
|
|
350
|
+
for f in applied:
|
|
351
|
+
click.echo(click.style(f" applied: {f}", fg="green"))
|
|
352
|
+
click.echo(f"\nschema now at v{version}")
|
|
353
|
+
else:
|
|
354
|
+
click.echo(f"all migrations up to date (v{version})")
|
|
355
|
+
|
|
356
|
+
|
|
357
|
+
@migrate.command(name="down")
|
|
358
|
+
@click.option("--pg-dsn", envvar="SYNAPTO_PG_DSN", default=None)
|
|
359
|
+
@click.option("--to", "target", default=0, type=int, help="rollback to this version (exclusive)")
|
|
360
|
+
def migrate_down(pg_dsn: str | None, target: int) -> None:
|
|
361
|
+
"""Rollback migrations to a target version."""
|
|
362
|
+
|
|
363
|
+
async def _down():
|
|
364
|
+
from synapto.config import load_config
|
|
365
|
+
from synapto.db.migrations import get_schema_version
|
|
366
|
+
from synapto.db.migrations import migrate_down as _migrate_down
|
|
367
|
+
from synapto.db.postgres import PostgresClient
|
|
368
|
+
|
|
369
|
+
config = load_config()
|
|
370
|
+
dsn = pg_dsn or config.pg_dsn
|
|
371
|
+
client = PostgresClient(dsn, min_size=1, max_size=2)
|
|
372
|
+
await client.connect()
|
|
373
|
+
rolled_back = await _migrate_down(client, target_version=target)
|
|
374
|
+
version = await get_schema_version(client)
|
|
375
|
+
await client.close()
|
|
376
|
+
return rolled_back, version
|
|
377
|
+
|
|
378
|
+
rolled_back, version = _run(_down())
|
|
379
|
+
if rolled_back:
|
|
380
|
+
for f in rolled_back:
|
|
381
|
+
click.echo(click.style(f" rolled back: {f}", fg="yellow"))
|
|
382
|
+
click.echo(f"\nschema now at v{version or 0}")
|
|
383
|
+
else:
|
|
384
|
+
click.echo("nothing to roll back")
|
|
385
|
+
|
|
386
|
+
|
|
387
|
+
@migrate.command(name="status")
|
|
388
|
+
@click.option("--pg-dsn", envvar="SYNAPTO_PG_DSN", default=None)
|
|
389
|
+
def migrate_status(pg_dsn: str | None) -> None:
|
|
390
|
+
"""Show migration status."""
|
|
391
|
+
|
|
392
|
+
async def _status():
|
|
393
|
+
from synapto.config import load_config
|
|
394
|
+
from synapto.db.migrations import get_migration_status
|
|
395
|
+
from synapto.db.postgres import PostgresClient
|
|
396
|
+
|
|
397
|
+
config = load_config()
|
|
398
|
+
dsn = pg_dsn or config.pg_dsn
|
|
399
|
+
client = PostgresClient(dsn, min_size=1, max_size=2)
|
|
400
|
+
await client.connect()
|
|
401
|
+
status = await get_migration_status(client)
|
|
402
|
+
await client.close()
|
|
403
|
+
return status
|
|
404
|
+
|
|
405
|
+
statuses = _run(_status())
|
|
406
|
+
if not statuses:
|
|
407
|
+
click.echo("no migrations found")
|
|
408
|
+
return
|
|
409
|
+
|
|
410
|
+
for s in statuses:
|
|
411
|
+
if s["status"] == "applied":
|
|
412
|
+
checksum = " (checksum mismatch!)" if s["checksum_ok"] is False else ""
|
|
413
|
+
click.echo(click.style(f" [applied] {s['filename']}{checksum}", fg="green"))
|
|
414
|
+
else:
|
|
415
|
+
click.echo(click.style(f" [pending] {s['filename']}", fg="yellow"))
|
|
416
|
+
|
|
417
|
+
|
|
418
|
+
@main.command(name="export")
|
|
419
|
+
@click.option("--tenant", "-t", default=None, help="tenant/project scope")
|
|
420
|
+
@click.option("--output", "-o", default="-", help="output file (- for stdout)")
|
|
421
|
+
def export_cmd(tenant: str | None, output: str) -> None:
|
|
422
|
+
"""Export memories as JSON."""
|
|
423
|
+
|
|
424
|
+
async def _export():
|
|
425
|
+
from synapto.config import load_config
|
|
426
|
+
from synapto.db.postgres import PostgresClient
|
|
427
|
+
|
|
428
|
+
config = load_config()
|
|
429
|
+
client = PostgresClient(config.pg_dsn)
|
|
430
|
+
await client.connect()
|
|
431
|
+
|
|
432
|
+
t = tenant or config.default_tenant
|
|
433
|
+
rows = await client.execute(
|
|
434
|
+
"""
|
|
435
|
+
SELECT id, content, summary, type, tenant, depth_layer, metadata, created_at, accessed_at
|
|
436
|
+
FROM memories WHERE deleted_at IS NULL AND tenant = %s ORDER BY created_at;
|
|
437
|
+
""",
|
|
438
|
+
(t,),
|
|
439
|
+
)
|
|
440
|
+
|
|
441
|
+
data = [
|
|
442
|
+
{k: str(v) if k in ("id", "created_at", "accessed_at") else v for k, v in row.items()}
|
|
443
|
+
for row in rows
|
|
444
|
+
]
|
|
445
|
+
|
|
446
|
+
text = json.dumps(data, indent=2, ensure_ascii=False)
|
|
447
|
+
|
|
448
|
+
if output == "-":
|
|
449
|
+
click.echo(text)
|
|
450
|
+
else:
|
|
451
|
+
with open(output, "w") as f:
|
|
452
|
+
f.write(text)
|
|
453
|
+
click.echo(f"exported {len(data)} memories to {output}")
|
|
454
|
+
|
|
455
|
+
await client.close()
|
|
456
|
+
|
|
457
|
+
_run(_export())
|
|
458
|
+
|
|
459
|
+
|
|
460
|
+
@main.command(name="import")
|
|
461
|
+
@click.argument("file_path")
|
|
462
|
+
@click.option("--tenant", "-t", default=None, help="tenant/project scope")
|
|
463
|
+
@click.option("--format", "fmt", type=click.Choice(["json", "markdown"]), default="json")
|
|
464
|
+
def import_cmd(file_path: str, tenant: str | None, fmt: str) -> None:
|
|
465
|
+
"""Import memories from JSON or MEMORY.md file."""
|
|
466
|
+
|
|
467
|
+
async def _import():
|
|
468
|
+
from psycopg.types.json import Jsonb
|
|
469
|
+
|
|
470
|
+
from synapto.config import load_config
|
|
471
|
+
from synapto.db.migrations import ensure_hnsw_index, run_migrations
|
|
472
|
+
from synapto.db.postgres import PostgresClient
|
|
473
|
+
from synapto.embeddings.registry import get_provider
|
|
474
|
+
|
|
475
|
+
config = load_config()
|
|
476
|
+
t = tenant or config.default_tenant
|
|
477
|
+
|
|
478
|
+
client = PostgresClient(config.pg_dsn)
|
|
479
|
+
await client.connect()
|
|
480
|
+
await run_migrations(client)
|
|
481
|
+
|
|
482
|
+
provider = get_provider(config.embedding_provider)
|
|
483
|
+
await ensure_hnsw_index(client, provider.dimension)
|
|
484
|
+
|
|
485
|
+
with open(file_path) as f:
|
|
486
|
+
raw = f.read()
|
|
487
|
+
|
|
488
|
+
if fmt == "json":
|
|
489
|
+
data = json.loads(raw)
|
|
490
|
+
else:
|
|
491
|
+
# parse MEMORY.md — each section becomes a memory
|
|
492
|
+
data = _parse_markdown_memories(raw, t)
|
|
493
|
+
|
|
494
|
+
count = 0
|
|
495
|
+
for item in data:
|
|
496
|
+
content = item.get("content", "")
|
|
497
|
+
if not content.strip():
|
|
498
|
+
continue
|
|
499
|
+
emb = await provider.embed_one(content)
|
|
500
|
+
await client.execute(
|
|
501
|
+
"""
|
|
502
|
+
INSERT INTO memories (content, summary, embedding, embedding_dim, type, tenant, depth_layer, metadata)
|
|
503
|
+
VALUES (%s, %s, %s, %s, %s, %s, %s, %s);
|
|
504
|
+
""",
|
|
505
|
+
(
|
|
506
|
+
content,
|
|
507
|
+
item.get("summary"),
|
|
508
|
+
emb,
|
|
509
|
+
provider.dimension,
|
|
510
|
+
item.get("type", "general"),
|
|
511
|
+
t,
|
|
512
|
+
item.get("depth_layer", "stable"),
|
|
513
|
+
Jsonb(item.get("metadata", {})),
|
|
514
|
+
),
|
|
515
|
+
)
|
|
516
|
+
count += 1
|
|
517
|
+
|
|
518
|
+
click.echo(f"imported {count} memories into tenant '{t}'")
|
|
519
|
+
await client.close()
|
|
520
|
+
|
|
521
|
+
_run(_import())
|
|
522
|
+
|
|
523
|
+
|
|
524
|
+
def _detect_mcp_clients(home=None) -> list[dict]:
|
|
525
|
+
"""Detect installed MCP clients and their config paths."""
|
|
526
|
+
from pathlib import Path
|
|
527
|
+
|
|
528
|
+
home = home or Path.home()
|
|
529
|
+
clients = []
|
|
530
|
+
|
|
531
|
+
# claude code
|
|
532
|
+
for path in [home / ".claude" / ".mcp.json", home / ".claude" / "settings.json"]:
|
|
533
|
+
if path.parent.exists():
|
|
534
|
+
clients.append({"name": "Claude Code", "path": path, "key": "mcpServers"})
|
|
535
|
+
break
|
|
536
|
+
|
|
537
|
+
# cursor
|
|
538
|
+
cursor_path = home / ".cursor" / "mcp.json"
|
|
539
|
+
if cursor_path.parent.exists():
|
|
540
|
+
clients.append({"name": "Cursor", "path": cursor_path, "key": "mcpServers"})
|
|
541
|
+
|
|
542
|
+
return clients
|
|
543
|
+
|
|
544
|
+
|
|
545
|
+
def _write_mcp_config(config_path, tenant: str = "default") -> None:
|
|
546
|
+
"""Write synapto MCP config using uvx for auto-updates."""
|
|
547
|
+
from pathlib import Path
|
|
548
|
+
|
|
549
|
+
path = Path(config_path)
|
|
550
|
+
existing = {}
|
|
551
|
+
if path.exists():
|
|
552
|
+
with open(path) as f:
|
|
553
|
+
existing = json.loads(f.read())
|
|
554
|
+
|
|
555
|
+
servers = existing.get("mcpServers", {})
|
|
556
|
+
server_config: dict = {
|
|
557
|
+
"command": "uvx",
|
|
558
|
+
"args": ["synapto", "serve"],
|
|
559
|
+
}
|
|
560
|
+
if tenant != "default":
|
|
561
|
+
server_config["env"] = {"SYNAPTO_DEFAULT_TENANT": tenant}
|
|
562
|
+
|
|
563
|
+
servers["synapto"] = server_config
|
|
564
|
+
existing["mcpServers"] = servers
|
|
565
|
+
|
|
566
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
567
|
+
with open(path, "w") as f:
|
|
568
|
+
f.write(json.dumps(existing, indent=2))
|
|
569
|
+
|
|
570
|
+
|
|
571
|
+
def _offer_mcp_config(tenant: str = "default") -> None:
|
|
572
|
+
"""Detect MCP clients and offer to write uvx-based config."""
|
|
573
|
+
clients = _detect_mcp_clients()
|
|
574
|
+
if not clients:
|
|
575
|
+
return
|
|
576
|
+
|
|
577
|
+
click.echo("\n--- mcp client configuration ---")
|
|
578
|
+
for client in clients:
|
|
579
|
+
if click.confirm(f"configure {client['name']} with auto-update (uvx)?", default=True):
|
|
580
|
+
_write_mcp_config(client["path"], tenant)
|
|
581
|
+
click.echo(f" written: {client['path']}")
|
|
582
|
+
else:
|
|
583
|
+
click.echo(f" skipped: {client['name']}")
|
|
584
|
+
|
|
585
|
+
|
|
586
|
+
def _parse_markdown_memories(text: str, tenant: str) -> list[dict]:
|
|
587
|
+
"""Parse a MEMORY.md-style file into memory entries."""
|
|
588
|
+
memories = []
|
|
589
|
+
current_section = None
|
|
590
|
+
current_content = []
|
|
591
|
+
|
|
592
|
+
for line in text.split("\n"):
|
|
593
|
+
if line.startswith("## "):
|
|
594
|
+
if current_section and current_content:
|
|
595
|
+
memories.append({
|
|
596
|
+
"content": "\n".join(current_content).strip(),
|
|
597
|
+
"summary": current_section,
|
|
598
|
+
"type": "reference",
|
|
599
|
+
"depth_layer": "stable",
|
|
600
|
+
"metadata": {"source": "MEMORY.md"},
|
|
601
|
+
})
|
|
602
|
+
current_section = line[3:].strip()
|
|
603
|
+
current_content = []
|
|
604
|
+
elif current_section:
|
|
605
|
+
current_content.append(line)
|
|
606
|
+
|
|
607
|
+
if current_section and current_content:
|
|
608
|
+
memories.append({
|
|
609
|
+
"content": "\n".join(current_content).strip(),
|
|
610
|
+
"summary": current_section,
|
|
611
|
+
"type": "reference",
|
|
612
|
+
"depth_layer": "stable",
|
|
613
|
+
"metadata": {"source": "MEMORY.md"},
|
|
614
|
+
})
|
|
615
|
+
|
|
616
|
+
return memories
|
|
617
|
+
|
|
618
|
+
|
|
619
|
+
if __name__ == "__main__":
|
|
620
|
+
main()
|