yourmemory 1.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
memory_mcp.py ADDED
@@ -0,0 +1,464 @@
1
+ """
2
+ YourMemory MCP Server
3
+
4
+ Exposes three tools to Claude:
5
+ recall_memory — retrieve relevant memories before answering
6
+ store_memory — insert a new memory after learning something new
7
+ update_memory — merge or replace an existing memory (by id from recall)
8
+ """
9
+
10
+ import asyncio
11
+ import json
12
+ import os
13
+ import sys
14
+ import threading
15
+
16
+ from dotenv import load_dotenv
17
+ from mcp.server import Server
18
+ from mcp.server.stdio import stdio_server
19
+ from mcp import types
20
+
21
+ # Add project root so src.services imports work
22
+ sys.path.insert(0, os.path.dirname(__file__))
23
+ load_dotenv(os.path.join(os.path.dirname(__file__), ".env"))
24
+
25
+ # Heavy imports (spaCy model, DB drivers) are deferred to first tool call
26
+ # so the MCP handshake completes instantly on startup.
27
+ _services = {}
28
+
29
+ def _load_services():
30
+ if _services:
31
+ return
32
+ from src.services.retrieve import retrieve as _retrieve
33
+ from src.services.embed import embed
34
+ from src.services.extract import is_question, categorize
35
+ from src.services.api_keys import validate_api_key
36
+ from src.services.resolve import resolve
37
+ from src.db.connection import get_backend, get_conn, emb_to_db
38
+ _services["retrieve"] = _retrieve
39
+ _services["embed"] = embed
40
+ _services["is_question"] = is_question
41
+ _services["categorize"] = categorize
42
+ _services["validate_api_key"] = validate_api_key
43
+ _services["resolve"] = resolve
44
+ _services["get_backend"] = get_backend
45
+ _services["get_conn"] = get_conn
46
+ _services["emb_to_db"] = emb_to_db
47
+
48
+ DEFAULT_USER = "sachit"
49
+ DEFAULT_IMPORTANCE = 0.5
50
+
51
+
52
+ # ── MCP Server ────────────────────────────────────────────────────────────────
53
+
54
+ server = Server("yourmemory")
55
+
56
+
57
+ @server.list_tools()
58
+ async def list_tools() -> list[types.Tool]:
59
+ return [
60
+ types.Tool(
61
+ name="recall_memory",
62
+ description=(
63
+ "Retrieve memories relevant to a query. "
64
+ "Call this at the start of every task to get context about the user's preferences, "
65
+ "past instructions, and known facts. Returns a list of memories with their IDs."
66
+ ),
67
+ inputSchema={
68
+ "type": "object",
69
+ "properties": {
70
+ "query": {
71
+ "type": "string",
72
+ "description": "Keywords or sentence describing what to look for in memory.",
73
+ },
74
+ "user_id": {
75
+ "type": "string",
76
+ "description": f"User identifier (default: '{DEFAULT_USER}').",
77
+ },
78
+ "api_key": {
79
+ "type": "string",
80
+ "description": "Agent API key (starts with 'ym_'). If provided, also returns this agent's private memories. If omitted, returns shared memories only.",
81
+ },
82
+ "top_k": {
83
+ "type": "integer",
84
+ "description": "Max memories to return (default: 5).",
85
+ },
86
+ },
87
+ "required": ["query"],
88
+ },
89
+ ),
90
+ types.Tool(
91
+ name="store_memory",
92
+ description=(
93
+ "Store a new memory about the user. "
94
+ "Use when you learn a new fact, preference, instruction, past failure, or successful strategy. "
95
+ "Does not conflict with any memory returned by recall_memory."
96
+ ),
97
+ inputSchema={
98
+ "type": "object",
99
+ "properties": {
100
+ "content": {
101
+ "type": "string",
102
+ "description": "The fact, preference, failure, or strategy to remember.",
103
+ },
104
+ "importance": {
105
+ "type": "number",
106
+ "description": (
107
+ "You MUST decide this. How important is this memory? (0.0–1.0)\n"
108
+ "0.9–1.0 — core identity, permanent preferences (e.g. 'Sachit uses Python')\n"
109
+ "0.7–0.8 — strong preferences, recurring patterns\n"
110
+ "0.5 — regular facts, project decisions\n"
111
+ "0.2–0.3 — transient context, one-off notes from this session"
112
+ ),
113
+ },
114
+ "category": {
115
+ "type": "string",
116
+ "description": (
117
+ "Memory category — controls decay rate:\n"
118
+ " 'fact' — user preferences, identity, stable knowledge (default, ~24 day survival)\n"
119
+ " 'assumption' — inferred beliefs, uncertain context (~19 days)\n"
120
+ " 'failure' — what went wrong in a past task, environment-specific errors (~11 days, decays fast)\n"
121
+ " 'strategy' — what worked well in a past task, approach patterns (~38 days, decays slow)\n"
122
+ "Use 'failure' when storing e.g. 'OAuth failed for client X due to wrong redirect URI'.\n"
123
+ "Use 'strategy' when storing e.g. 'Using pagination fixed the timeout on large DB queries'."
124
+ ),
125
+ },
126
+ "user_id": {
127
+ "type": "string",
128
+ "description": f"User identifier (default: '{DEFAULT_USER}').",
129
+ },
130
+ "api_key": {
131
+ "type": "string",
132
+ "description": "Agent API key (starts with 'ym_'). Required for agent-scoped memory. If omitted, stored as 'user' with shared visibility.",
133
+ },
134
+ "visibility": {
135
+ "type": "string",
136
+ "description": "Who can recall this memory: 'shared' (any agent, default) or 'private' (only this agent).",
137
+ },
138
+ },
139
+ "required": ["content", "importance"],
140
+ },
141
+ ),
142
+ types.Tool(
143
+ name="update_memory",
144
+ description=(
145
+ "Merge or replace an existing memory by its ID. "
146
+ "Use when a recalled memory is outdated (replace) or when new info adds detail "
147
+ "to an existing memory (merge — write the combined sentence as new_content)."
148
+ ),
149
+ inputSchema={
150
+ "type": "object",
151
+ "properties": {
152
+ "memory_id": {
153
+ "type": "integer",
154
+ "description": "ID of the memory to update (from recall_memory results).",
155
+ },
156
+ "new_content": {
157
+ "type": "string",
158
+ "description": "The updated or merged memory text.",
159
+ },
160
+ "importance": {
161
+ "type": "number",
162
+ "description": (
163
+ "You MUST decide this. Re-evaluate importance after the update. (0.0–1.0)\n"
164
+ "0.9–1.0 — core identity, permanent preferences\n"
165
+ "0.7–0.8 — strong preferences, recurring patterns\n"
166
+ "0.5 — regular facts, project decisions\n"
167
+ "0.2–0.3 — transient context, one-off notes"
168
+ ),
169
+ },
170
+ },
171
+ "required": ["memory_id", "new_content", "importance"],
172
+ },
173
+ ),
174
+ ]
175
+
176
+
177
+ @server.call_tool()
178
+ async def call_tool(name: str, arguments: dict) -> list[types.TextContent]:
179
+ _load_services()
180
+ retrieve = _services["retrieve"]
181
+ embed = _services["embed"]
182
+ is_question = _services["is_question"]
183
+ categorize = _services["categorize"]
184
+ validate_api_key = _services["validate_api_key"]
185
+ resolve = _services["resolve"]
186
+ get_backend = _services["get_backend"]
187
+ get_conn = _services["get_conn"]
188
+ emb_to_db = _services["emb_to_db"]
189
+
190
+ if name == "recall_memory":
191
+ user_id = arguments.get("user_id", DEFAULT_USER)
192
+ query = arguments["query"]
193
+ top_k = arguments.get("top_k", 5)
194
+ api_key = arguments.get("api_key")
195
+
196
+ agent = None
197
+ if api_key:
198
+ agent = validate_api_key(api_key)
199
+ if not agent:
200
+ return [types.TextContent(type="text", text=json.dumps(
201
+ {"error": "Invalid or revoked API key."}))]
202
+
203
+ agent_id = agent["agent_id"] if agent else None
204
+ result = retrieve(user_id, query, top_k=top_k, agent_id=agent_id)
205
+
206
+ if agent:
207
+ can_read = agent.get("can_read", [])
208
+ if can_read:
209
+ result["memories"] = [
210
+ m for m in result["memories"]
211
+ if m["agent_id"] in can_read
212
+ ]
213
+ result["memoriesFound"] = len(result["memories"])
214
+
215
+ return [types.TextContent(type="text", text=json.dumps(result, default=str))]
216
+
217
+ elif name == "store_memory":
218
+ user_id = arguments.get("user_id", DEFAULT_USER)
219
+ api_key = arguments.get("api_key")
220
+
221
+ if api_key:
222
+ agent = validate_api_key(api_key)
223
+ if not agent:
224
+ return [types.TextContent(type="text", text=json.dumps(
225
+ {"error": "Invalid or revoked API key."}))]
226
+ agent_id = agent["agent_id"]
227
+ can_write = agent.get("can_write", ["shared", "private"])
228
+ else:
229
+ agent_id = "user"
230
+ can_write = ["shared", "private"]
231
+
232
+ visibility = arguments.get("visibility", "shared")
233
+ if visibility not in ("shared", "private"):
234
+ visibility = "shared"
235
+ if visibility not in can_write:
236
+ return [types.TextContent(type="text", text=json.dumps(
237
+ {"error": f"Agent '{agent_id}' is not permitted to write '{visibility}' memories."}))]
238
+
239
+ content = arguments["content"]
240
+
241
+ if is_question(content):
242
+ return [types.TextContent(type="text", text=json.dumps(
243
+ {"error": "Questions are not stored as memories."}))]
244
+
245
+ if "importance" not in arguments:
246
+ return [types.TextContent(type="text", text=json.dumps(
247
+ {"error": "importance is required (0.0–1.0). Decide based on how permanent this memory should be."}))]
248
+ importance = max(0.0, min(1.0, float(arguments["importance"])))
249
+ valid_categories = {"fact", "assumption", "failure", "strategy"}
250
+ raw_category = arguments.get("category", "").strip().lower()
251
+ category = raw_category if raw_category in valid_categories else categorize(content)
252
+ embedding = embed(content)
253
+
254
+ backend = get_backend()
255
+ conn = get_conn()
256
+ cur = conn.cursor()
257
+
258
+ resolution = resolve(user_id, content, embedding, conn)
259
+ action = resolution["action"]
260
+ final_content = resolution["content"]
261
+ existing = resolution["existing"]
262
+
263
+ if action == "reinforce":
264
+ if backend == "postgres":
265
+ cur.execute("""
266
+ UPDATE memories SET recall_count = recall_count + 1, last_accessed_at = NOW()
267
+ WHERE id = %s RETURNING id
268
+ """, (existing["id"],))
269
+ else:
270
+ cur.execute("""
271
+ UPDATE memories SET recall_count = recall_count + 1, last_accessed_at = datetime('now')
272
+ WHERE id = ?
273
+ """, (existing["id"],))
274
+ memory_id = existing["id"]
275
+ category = existing["category"]
276
+
277
+ elif action in ("replace", "merge"):
278
+ new_embedding = embed(final_content)
279
+ new_emb_str = emb_to_db(new_embedding, backend)
280
+ new_category = categorize(final_content)
281
+ try:
282
+ if backend == "postgres":
283
+ cur.execute("""
284
+ UPDATE memories
285
+ SET content = %s, embedding = %s::vector, category = %s,
286
+ recall_count = recall_count + 1, last_accessed_at = NOW()
287
+ WHERE id = %s RETURNING id
288
+ """, (final_content, new_emb_str, new_category, existing["id"]))
289
+ else:
290
+ cur.execute("""
291
+ UPDATE memories
292
+ SET content = ?, embedding = ?, category = ?,
293
+ recall_count = recall_count + 1, last_accessed_at = datetime('now')
294
+ WHERE id = ?
295
+ """, (final_content, new_emb_str, new_category, existing["id"]))
296
+ memory_id = existing["id"]
297
+ category = new_category
298
+ except Exception:
299
+ conn.rollback()
300
+ if backend == "postgres":
301
+ cur.execute("""
302
+ UPDATE memories SET recall_count = recall_count + 1, last_accessed_at = NOW()
303
+ WHERE user_id = %s AND content = %s RETURNING id
304
+ """, (user_id, final_content))
305
+ else:
306
+ cur.execute("""
307
+ UPDATE memories SET recall_count = recall_count + 1, last_accessed_at = datetime('now')
308
+ WHERE user_id = ? AND content = ?
309
+ """, (user_id, final_content))
310
+ memory_id = existing["id"]
311
+ category = existing["category"]
312
+
313
+ else: # "new"
314
+ emb_str = emb_to_db(embedding, backend)
315
+ if backend == "postgres":
316
+ cur.execute("""
317
+ INSERT INTO memories (user_id, content, category, importance, embedding, agent_id, visibility)
318
+ VALUES (%s, %s, %s, %s, %s::vector, %s, %s)
319
+ ON CONFLICT (user_id, content) DO UPDATE
320
+ SET recall_count = memories.recall_count + 1, last_accessed_at = NOW()
321
+ RETURNING id
322
+ """, (user_id, final_content, category, importance, emb_str, agent_id, visibility))
323
+ memory_id = cur.fetchone()[0]
324
+ else:
325
+ cur.execute("""
326
+ INSERT INTO memories (user_id, content, category, importance, embedding, agent_id, visibility)
327
+ VALUES (?, ?, ?, ?, ?, ?, ?)
328
+ ON CONFLICT (user_id, content) DO UPDATE
329
+ SET recall_count = recall_count + 1, last_accessed_at = datetime('now')
330
+ """, (user_id, final_content, category, importance, emb_str, agent_id, visibility))
331
+ memory_id = cur.lastrowid
332
+
333
+ conn.commit()
334
+ cur.close()
335
+ conn.close()
336
+
337
+ return [types.TextContent(type="text", text=json.dumps(
338
+ {"stored": 1, "id": memory_id, "content": final_content, "category": category,
339
+ "importance": importance, "agent_id": agent_id, "visibility": visibility,
340
+ "action": action}))]
341
+
342
+ elif name == "update_memory":
343
+ memory_id = arguments["memory_id"]
344
+ new_content = arguments["new_content"]
345
+ if "importance" not in arguments:
346
+ return [types.TextContent(type="text", text=json.dumps(
347
+ {"error": "importance is required (0.0–1.0). Re-evaluate after the update."}))]
348
+ importance = max(0.0, min(1.0, float(arguments["importance"])))
349
+
350
+ category = _services["categorize"](new_content)
351
+ embedding = embed(new_content)
352
+ backend = get_backend()
353
+ emb_str = emb_to_db(embedding, backend)
354
+ conn = get_conn()
355
+ cur = conn.cursor()
356
+
357
+ # Fetch owner to scope the dedup query
358
+ if backend == "postgres":
359
+ cur.execute("SELECT user_id FROM memories WHERE id = %s", (memory_id,))
360
+ else:
361
+ cur.execute("SELECT user_id FROM memories WHERE id = ?", (memory_id,))
362
+ owner = cur.fetchone()
363
+ if owner is None:
364
+ cur.close()
365
+ conn.close()
366
+ return [types.TextContent(type="text", text=json.dumps(
367
+ {"error": f"Memory {memory_id} not found."}))]
368
+ user_id_owner = owner[0]
369
+
370
+ # Check if new content clashes with a *different* row
371
+ resolution = resolve(user_id_owner, new_content, embedding, conn)
372
+ if resolution["action"] != "new" and resolution["existing"]["id"] != memory_id:
373
+ existing = resolution["existing"]
374
+ if backend == "postgres":
375
+ cur.execute("""
376
+ UPDATE memories SET recall_count = recall_count + 1, last_accessed_at = NOW()
377
+ WHERE id = %s RETURNING id, content, category, importance
378
+ """, (existing["id"],))
379
+ row = cur.fetchone()
380
+ else:
381
+ cur.execute("""
382
+ UPDATE memories SET recall_count = recall_count + 1, last_accessed_at = datetime('now')
383
+ WHERE id = ?
384
+ """, (existing["id"],))
385
+ cur.execute("SELECT id, content, category, importance FROM memories WHERE id = ?", (existing["id"],))
386
+ row = cur.fetchone()
387
+ conn.commit()
388
+ cur.close()
389
+ conn.close()
390
+ return [types.TextContent(type="text", text=json.dumps(
391
+ {"updated": 1, "id": row[0], "content": row[1], "category": row[2],
392
+ "importance": row[3], "action": "reinforce_existing"}))]
393
+
394
+ if backend == "postgres":
395
+ cur.execute("""
396
+ UPDATE memories
397
+ SET content = %s,
398
+ embedding = %s::vector,
399
+ category = %s,
400
+ importance = %s,
401
+ recall_count = recall_count + 1,
402
+ last_accessed_at = NOW()
403
+ WHERE id = %s
404
+ RETURNING id, content, category, importance
405
+ """, (new_content, emb_str, category, importance, memory_id))
406
+ row = cur.fetchone()
407
+ else:
408
+ cur.execute("""
409
+ UPDATE memories
410
+ SET content = ?,
411
+ embedding = ?,
412
+ category = ?,
413
+ importance = ?,
414
+ recall_count = recall_count + 1,
415
+ last_accessed_at = datetime('now')
416
+ WHERE id = ?
417
+ """, (new_content, emb_str, category, importance, memory_id))
418
+ cur.execute("SELECT id, content, category, importance FROM memories WHERE id = ?", (memory_id,))
419
+ row = cur.fetchone()
420
+
421
+ conn.commit()
422
+ cur.close()
423
+ conn.close()
424
+
425
+ if row is None:
426
+ return [types.TextContent(type="text", text=json.dumps(
427
+ {"error": f"Memory {memory_id} not found."}))]
428
+
429
+ return [types.TextContent(type="text", text=json.dumps(
430
+ {"updated": 1, "id": row[0], "content": row[1], "category": row[2], "importance": row[3]}))]
431
+
432
+ else:
433
+ return [types.TextContent(type="text", text=json.dumps({"error": f"Unknown tool: {name}"}))]
434
+
435
+
436
+ def _start_decay_scheduler():
437
+ """Run the decay job once immediately, then every 24 hours in a background thread."""
438
+ from src.jobs.decay_job import run as run_decay
439
+
440
+ def loop():
441
+ run_decay()
442
+ timer = threading.Event()
443
+ while not timer.wait(timeout=86400):
444
+ run_decay()
445
+
446
+ t = threading.Thread(target=loop, daemon=True, name="decay-scheduler")
447
+ t.start()
448
+
449
+
450
+ async def main():
451
+ # Run DB migration on startup (creates tables on first run, safe to repeat)
452
+ from src.db.migrate import migrate
453
+ migrate()
454
+ _start_decay_scheduler()
455
+ async with stdio_server() as (read_stream, write_stream):
456
+ await server.run(read_stream, write_stream, server.create_initialization_options())
457
+
458
+
459
+ def run():
460
+ asyncio.run(main())
461
+
462
+
463
+ if __name__ == "__main__":
464
+ run()
src/__init__.py ADDED
File without changes
src/app.py ADDED
@@ -0,0 +1,30 @@
1
+ from contextlib import asynccontextmanager
2
+ from apscheduler.schedulers.asyncio import AsyncIOScheduler
3
+ from fastapi import FastAPI
4
+ from src.routes import memories, retrieve, agents
5
+ from src.jobs.decay_job import run as run_decay
6
+ from src.db.migrate import migrate
7
+
8
+
9
+ scheduler = AsyncIOScheduler()
10
+
11
+
12
+ @asynccontextmanager
13
+ async def lifespan(app: FastAPI):
14
+ migrate()
15
+ scheduler.add_job(run_decay, "interval", hours=24, id="decay_job")
16
+ scheduler.start()
17
+ yield
18
+ scheduler.shutdown()
19
+
20
+
21
+ app = FastAPI(title="YourMemory", version="0.1.0", lifespan=lifespan)
22
+
23
+ app.include_router(memories.router)
24
+ app.include_router(retrieve.router)
25
+ app.include_router(agents.router)
26
+
27
+
28
+ @app.get("/health")
29
+ def health():
30
+ return {"status": "ok"}
src/db/connection.py ADDED
@@ -0,0 +1,62 @@
1
+ """
2
+ Database connection factory.
3
+
4
+ Auto-detects backend from DATABASE_URL:
5
+ postgresql:// or postgres:// → PostgreSQL + pgvector
6
+ sqlite:///path → SQLite at given path
7
+ (unset / anything else) → SQLite at ~/.yourmemory/memories.db (default)
8
+
9
+ Usage:
10
+ from src.db.connection import get_conn, get_backend
11
+
12
+ conn = get_conn() # works for both backends
13
+ backend = get_backend() # "sqlite" or "postgres"
14
+ """
15
+
16
+ import os
17
+ import sqlite3
18
+ from pathlib import Path
19
+ from dotenv import load_dotenv
20
+
21
+ load_dotenv()
22
+
23
+
24
+ def get_backend() -> str:
25
+ """Return 'sqlite' or 'postgres' based on DATABASE_URL."""
26
+ url = os.getenv("DATABASE_URL", "")
27
+ if url.startswith("postgresql://") or url.startswith("postgres://"):
28
+ return "postgres"
29
+ return "sqlite"
30
+
31
+
32
+ def _sqlite_path() -> str:
33
+ """Resolve the SQLite file path."""
34
+ url = os.getenv("DATABASE_URL", "")
35
+ if url.startswith("sqlite:///"):
36
+ return url[10:]
37
+ path = Path.home() / ".yourmemory" / "memories.db"
38
+ path.parent.mkdir(parents=True, exist_ok=True)
39
+ return str(path)
40
+
41
+
42
+ def get_conn():
43
+ """Return an open database connection for the configured backend."""
44
+ if get_backend() == "postgres":
45
+ import psycopg2
46
+ return psycopg2.connect(os.getenv("DATABASE_URL"))
47
+ path = _sqlite_path()
48
+ conn = sqlite3.connect(path)
49
+ conn.row_factory = sqlite3.Row
50
+ return conn
51
+
52
+
53
+ def emb_to_db(embedding: list, backend: str = None) -> str:
54
+ """
55
+ Serialize an embedding list for storage.
56
+ Postgres: '[0.1,0.2,...]' (pgvector wire format)
57
+ SQLite: JSON string (stored as TEXT)
58
+ """
59
+ import json
60
+ if (backend or get_backend()) == "postgres":
61
+ return f"[{','.join(str(x) for x in embedding)}]"
62
+ return json.dumps(embedding)
src/db/migrate.py ADDED
@@ -0,0 +1,32 @@
1
+ import os
2
+ from dotenv import load_dotenv
3
+ from src.db.connection import get_backend, get_conn
4
+
5
+ load_dotenv()
6
+
7
+
8
+ def migrate():
9
+ backend = get_backend()
10
+ schema_file = "schema.sql" if backend == "postgres" else "sqlite_schema.sql"
11
+ schema_path = os.path.join(os.path.dirname(__file__), schema_file)
12
+
13
+ with open(schema_path, "r") as f:
14
+ schema = f.read()
15
+
16
+ conn = get_conn()
17
+ cur = conn.cursor()
18
+
19
+ if backend == "sqlite":
20
+ # executescript handles multiple statements and comments correctly
21
+ conn.executescript(schema)
22
+ else:
23
+ cur.execute(schema)
24
+ conn.commit()
25
+
26
+ cur.close()
27
+ conn.close()
28
+ print(f"Migration complete ({backend}).")
29
+
30
+
31
+ if __name__ == "__main__":
32
+ migrate()
src/db/schema.sql ADDED
@@ -0,0 +1,53 @@
1
+ CREATE EXTENSION IF NOT EXISTS vector;
2
+
3
+ CREATE TABLE IF NOT EXISTS memories (
4
+ id SERIAL PRIMARY KEY,
5
+ user_id TEXT NOT NULL,
6
+ content TEXT NOT NULL,
7
+ memory_type TEXT NOT NULL DEFAULT 'trivial',
8
+ importance FLOAT NOT NULL DEFAULT 0.5,
9
+ recall_count INTEGER NOT NULL DEFAULT 0,
10
+ last_accessed_at TIMESTAMP DEFAULT NOW(),
11
+ created_at TIMESTAMP DEFAULT NOW(),
12
+ category TEXT NOT NULL DEFAULT 'fact',
13
+ agent_id TEXT NOT NULL DEFAULT 'user',
14
+ visibility TEXT NOT NULL DEFAULT 'shared',
15
+ embedding vector(768),
16
+ UNIQUE (user_id, content)
17
+ );
18
+
19
+ -- Add agent_id and visibility to existing tables (safe to run multiple times)
20
+ DO $$ BEGIN
21
+ IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name='memories' AND column_name='agent_id') THEN
22
+ ALTER TABLE memories ADD COLUMN agent_id TEXT NOT NULL DEFAULT 'user';
23
+ END IF;
24
+ IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name='memories' AND column_name='visibility') THEN
25
+ ALTER TABLE memories ADD COLUMN visibility TEXT NOT NULL DEFAULT 'shared';
26
+ END IF;
27
+ END $$;
28
+
29
+ -- ivfflat index is only useful at scale (10k+ rows).
30
+ -- For small datasets, PostgreSQL does an exact scan automatically.
31
+ -- Uncomment when you have enough data:
32
+ -- CREATE INDEX IF NOT EXISTS memories_embedding_idx
33
+ -- ON memories USING ivfflat (embedding vector_cosine_ops)
34
+ -- WITH (lists = 100);
35
+
36
+ CREATE INDEX IF NOT EXISTS memories_user_id_idx ON memories (user_id);
37
+ CREATE INDEX IF NOT EXISTS memories_agent_id_idx ON memories (agent_id);
38
+
39
+ -- Agent registrations — API key auth for multi-agent systems
40
+ CREATE TABLE IF NOT EXISTS agent_registrations (
41
+ id SERIAL PRIMARY KEY,
42
+ agent_id TEXT UNIQUE NOT NULL,
43
+ user_id TEXT NOT NULL,
44
+ api_key_hash TEXT UNIQUE NOT NULL, -- SHA-256 hash, never store plaintext
45
+ can_read TEXT[] DEFAULT '{}', -- agent_ids this agent can read from (empty = all shared)
46
+ can_write TEXT[] DEFAULT ARRAY['shared', 'private'],
47
+ description TEXT DEFAULT '',
48
+ created_at TIMESTAMP DEFAULT NOW(),
49
+ revoked_at TIMESTAMP -- NULL = active
50
+ );
51
+
52
+ CREATE INDEX IF NOT EXISTS agent_reg_user_id_idx ON agent_registrations (user_id);
53
+ CREATE INDEX IF NOT EXISTS agent_reg_api_key_idx ON agent_registrations (api_key_hash);
@@ -0,0 +1,38 @@
1
+ -- YourMemory SQLite schema (zero-setup default backend)
2
+ -- Embeddings stored as JSON TEXT; cosine similarity computed in Python.
3
+
4
+ CREATE TABLE IF NOT EXISTS memories (
5
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
6
+ user_id TEXT NOT NULL,
7
+ content TEXT NOT NULL,
8
+ memory_type TEXT NOT NULL DEFAULT 'trivial',
9
+ importance REAL NOT NULL DEFAULT 0.5,
10
+ recall_count INTEGER NOT NULL DEFAULT 0,
11
+ last_accessed_at TEXT DEFAULT (datetime('now')),
12
+ created_at TEXT DEFAULT (datetime('now')),
13
+ category TEXT NOT NULL DEFAULT 'fact',
14
+ agent_id TEXT NOT NULL DEFAULT 'user',
15
+ visibility TEXT NOT NULL DEFAULT 'shared',
16
+ embedding TEXT, -- JSON array of floats (768 dims)
17
+ UNIQUE (user_id, content)
18
+ );
19
+
20
+ CREATE INDEX IF NOT EXISTS memories_user_id_idx ON memories (user_id);
21
+ CREATE INDEX IF NOT EXISTS memories_agent_id_idx ON memories (agent_id);
22
+
23
+ -- Agent registrations — API key auth for multi-agent systems
24
+ -- can_read / can_write stored as JSON arrays (TEXT) instead of Postgres TEXT[]
25
+ CREATE TABLE IF NOT EXISTS agent_registrations (
26
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
27
+ agent_id TEXT UNIQUE NOT NULL,
28
+ user_id TEXT NOT NULL,
29
+ api_key_hash TEXT UNIQUE NOT NULL,
30
+ can_read TEXT DEFAULT '[]',
31
+ can_write TEXT DEFAULT '["shared", "private"]',
32
+ description TEXT DEFAULT '',
33
+ created_at TEXT DEFAULT (datetime('now')),
34
+ revoked_at TEXT -- NULL = active
35
+ );
36
+
37
+ CREATE INDEX IF NOT EXISTS agent_reg_user_id_idx ON agent_registrations (user_id);
38
+ CREATE INDEX IF NOT EXISTS agent_reg_api_key_idx ON agent_registrations (api_key_hash);