@aeriondyseti/vector-memory-mcp 2.2.2 → 2.2.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. package/README.md +33 -13
  2. package/package.json +8 -7
  3. package/scripts/lancedb-extract.ts +111 -46
  4. package/scripts/migrate-from-lancedb.ts +2 -2
  5. package/scripts/smoke-test.ts +1 -1
  6. package/scripts/sync-version.ts +35 -0
  7. package/scripts/warmup.ts +2 -2
  8. package/{src → server}/config/index.ts +10 -2
  9. package/{src/db → server/core}/connection.ts +10 -2
  10. package/{src/db → server/core}/conversation.repository.ts +1 -1
  11. package/{src/services → server/core}/conversation.service.ts +2 -2
  12. package/{src/db → server/core}/memory.repository.ts +5 -1
  13. package/{src/services → server/core}/memory.service.ts +20 -4
  14. package/server/core/migration.service.ts +882 -0
  15. package/server/core/migrations.ts +115 -0
  16. package/{src/services → server/core}/parsers/claude-code.parser.ts +1 -1
  17. package/{src/services → server/core}/parsers/types.ts +1 -1
  18. package/{src → server}/index.ts +13 -10
  19. package/{src → server}/migration.ts +2 -2
  20. package/{src → server/transports}/http/mcp-transport.ts +2 -2
  21. package/{src → server/transports}/http/server.ts +34 -4
  22. package/{src → server/transports}/mcp/handlers.ts +5 -5
  23. package/server/transports/mcp/resources.ts +161 -0
  24. package/{src → server/transports}/mcp/server.ts +14 -3
  25. package/server/utils/formatting.ts +143 -0
  26. package/src/db/migrations.ts +0 -108
  27. /package/{src/types → server/core}/conversation.ts +0 -0
  28. /package/{src/services → server/core}/embeddings.service.ts +0 -0
  29. /package/{src/types → server/core}/memory.ts +0 -0
  30. /package/{src/db → server/core}/sqlite-utils.ts +0 -0
  31. /package/{src → server/transports}/mcp/tools.ts +0 -0
package/README.md CHANGED
@@ -20,14 +20,27 @@ A local-first MCP server that provides vector-based memory storage. Uses local e
20
20
 
21
21
  ---
22
22
 
23
- ## Quick Start
23
+ ## Installation
24
24
 
25
- ### Prerequisites
25
+ There are two ways to install Vector Memory, depending on how much integration you want.
26
26
 
27
- - [Bun](https://bun.sh/) 1.0+
28
- - An MCP-compatible client (Claude Code, Claude Desktop, etc.)
27
+ ### Option A: Claude Code Plugin (recommended)
28
+
29
+ Install as a plugin to get the full experience: MCP server, session lifecycle hooks, waypoint skills, and context monitoring — all managed automatically.
30
+
31
+ ```bash
32
+ # Add the marketplace
33
+ claude plugin marketplace add AerionDyseti/vector-memory-mcp
34
+
35
+ # Install the plugin
36
+ claude plugin install vector-memory@vector-memory-mcp
37
+ ```
38
+
39
+ This clones the repo and runs the MCP server directly from source. Hooks handle session start/clear/compact events, and skills provide `/waypoint:set`, `/waypoint:get`, and memory usage guidance.
40
+
41
+ ### Option B: MCP Server Only
29
42
 
30
- ### Install
43
+ Install just the MCP server via npm if you want memory storage without hooks or skills, or if you're using a non-Claude Code MCP client.
31
44
 
32
45
  ```bash
33
46
  bun install -g @aeriondyseti/vector-memory-mcp
@@ -35,9 +48,7 @@ bun install -g @aeriondyseti/vector-memory-mcp
35
48
 
36
49
  > First install downloads ML models (~90MB). This may take a minute.
37
50
 
38
- ### Configure
39
-
40
- Add to your MCP client config (e.g., `~/.claude/settings.json`):
51
+ Then add to your MCP client config (e.g., `~/.claude/settings.json`):
41
52
 
42
53
  ```json
43
54
  {
@@ -51,9 +62,16 @@ Add to your MCP client config (e.g., `~/.claude/settings.json`):
51
62
  }
52
63
  ```
53
64
 
54
- ### Use
65
+ ### Prerequisites
55
66
 
56
- Restart your MCP client. You now have access to:
67
+ - [Bun](https://bun.sh/) 1.0+
68
+ - An MCP-compatible client (Claude Code, Claude Desktop, etc.)
69
+
70
+ ---
71
+
72
+ ## Tools
73
+
74
+ Restart your MCP client after installation. You now have access to:
57
75
 
58
76
  | Tool | Description |
59
77
  |------|-------------|
@@ -116,7 +134,9 @@ CLI flags:
116
134
 
117
135
  ## Release Channels
118
136
 
119
- The stable release is what you get by default:
137
+ **Plugin users:** The plugin tracks the repo's default branch. To switch channels, reinstall from a specific branch or tag.
138
+
139
+ **npm users:** The stable release is what you get by default:
120
140
 
121
141
  ```bash
122
142
  bun install -g @aeriondyseti/vector-memory-mcp
@@ -124,8 +144,8 @@ bun install -g @aeriondyseti/vector-memory-mcp
124
144
 
125
145
  Pre-release channels are available for testing upcoming changes. **These are unstable and may break without notice — use at your own risk.**
126
146
 
127
- | Channel | Install | Description |
128
- |---------|---------|-------------|
147
+ | Channel | npm | Description |
148
+ |---------|-----|-------------|
129
149
  | `@latest` | *(default)* | Stable releases |
130
150
  | `@rc` | `@aeriondyseti/vector-memory-mcp@rc` | Release candidates — final testing before stable |
131
151
  | `@dev` | `@aeriondyseti/vector-memory-mcp@dev` | Development builds — latest features, least stable |
package/package.json CHANGED
@@ -1,14 +1,14 @@
1
1
  {
2
2
  "name": "@aeriondyseti/vector-memory-mcp",
3
- "version": "2.2.2",
3
+ "version": "2.2.6",
4
4
  "description": "A zero-configuration RAG memory server for MCP clients",
5
5
  "type": "module",
6
- "main": "src/index.ts",
6
+ "main": "server/index.ts",
7
7
  "bin": {
8
- "vector-memory-mcp": "src/index.ts"
8
+ "vector-memory-mcp": "server/index.ts"
9
9
  },
10
10
  "files": [
11
- "src",
11
+ "server",
12
12
  "scripts",
13
13
  "README.md",
14
14
  "LICENSE"
@@ -23,8 +23,8 @@
23
23
  },
24
24
  "homepage": "https://github.com/aeriondyseti/vector-memory-mcp#readme",
25
25
  "scripts": {
26
- "start": "bun run src/index.ts",
27
- "dev": "bun --watch run src/index.ts",
26
+ "start": "bun run server/index.ts",
27
+ "dev": "bun --watch run server/index.ts",
28
28
  "typecheck": "bunx tsc --noEmit",
29
29
  "test": "bun run scripts/test-runner.ts",
30
30
  "test:raw": "bun test --preload ./tests/preload.ts",
@@ -35,7 +35,8 @@
35
35
  "smoke": "bun run scripts/smoke-test.ts",
36
36
  "warmup": "bun run scripts/warmup.ts",
37
37
  "postinstall": "bun run scripts/warmup.ts",
38
- "prepublishOnly": "bunx tsc --noEmit"
38
+ "prepublishOnly": "bunx tsc --noEmit",
39
+ "postversion": "bun scripts/sync-version.ts"
39
40
  },
40
41
  "keywords": [
41
42
  "mcp",
@@ -14,24 +14,59 @@ if (!source) {
14
14
  process.exit(1);
15
15
  }
16
16
 
17
- /**
18
- * Read a value from an Arrow column at a given row index.
19
- * Arrow timestamp columns return BigInt we convert to epoch-ms here
20
- * without going through Arrow's bigIntToNumber safety check.
21
- */
17
+ // Arrow TimeUnit enum → divisor to convert to milliseconds.
18
+ // 0=SECOND, 1=MILLISECOND, 2=MICROSECOND, 3=NANOSECOND
19
+ // Negative divisor = multiply (seconds ms needs ×1000).
20
+ const TIME_UNIT_TO_MS_DIVISOR: Record<number, bigint> = {
21
+ 0: -1000n, // seconds → ms (multiply by 1000)
22
+ 1: 1n, // ms → no conversion
23
+ 2: 1000n, // μs → ms
24
+ 3: 1000000n, // ns → ms
25
+ };
26
+
27
+ function buildTimestampDivisors(schema: any): Map<string, bigint> {
28
+ const map = new Map<string, bigint>();
29
+ for (const field of schema.fields) {
30
+ if (field.type.typeId === 10) {
31
+ map.set(field.name, TIME_UNIT_TO_MS_DIVISOR[field.type.unit] ?? 1n);
32
+ }
33
+ }
34
+ return map;
35
+ }
36
+
22
37
  function columnValue(batch: any, colName: string, rowIdx: number): unknown {
23
38
  const col = batch.getChild(colName);
24
39
  if (!col) return undefined;
25
- return col.get(rowIdx);
40
+ try {
41
+ return col.get(rowIdx);
42
+ } catch {
43
+ // Arrow's getter can throw on BigInt timestamps exceeding MAX_SAFE_INTEGER;
44
+ // fall back to the raw typed array.
45
+ let offset = rowIdx;
46
+ for (const data of col.data) {
47
+ if (offset < data.length) {
48
+ return (data.values instanceof BigInt64Array || data.values instanceof BigUint64Array)
49
+ ? data.values[offset]
50
+ : null;
51
+ }
52
+ offset -= data.length;
53
+ }
54
+ return null;
55
+ }
26
56
  }
27
57
 
28
- function toEpochMs(value: unknown): number {
29
- if (typeof value === "number") return value;
58
+ function toEpochMs(value: unknown, divisor: bigint = 1n): number {
59
+ if (value == null) return Date.now();
30
60
  if (value instanceof Date) return value.getTime();
31
61
  if (typeof value === "bigint") {
32
- // Arrow timestamps are microseconds; convert to milliseconds.
33
- const ms = value / 1000n;
34
- return Number(ms);
62
+ if (divisor < 0n) return Number(value * -divisor); // seconds ms
63
+ if (divisor === 1n) return Number(value);
64
+ return Number(value / divisor);
65
+ }
66
+ if (typeof value === "number") {
67
+ if (divisor < 0n) return value * Number(-divisor);
68
+ if (divisor === 1n) return value;
69
+ return Math.floor(value / Number(divisor));
35
70
  }
36
71
  return Date.now();
37
72
  }
@@ -49,6 +84,7 @@ function toFloatArray(vec: unknown): number[] {
49
84
  return [];
50
85
  }
51
86
 
87
+ const BATCH_SIZE = 100;
52
88
  const lancedb = await import("@lancedb/lancedb");
53
89
  const db = await lancedb.connect(source);
54
90
  const tableNames = await db.tableNames();
@@ -64,27 +100,45 @@ if (tableNames.includes("memories")) {
64
100
  const total = await table.countRows();
65
101
  console.error(`Reading ${total} memories...`);
66
102
 
67
- // Use toArrow() to get raw Arrow RecordBatches, bypassing StructRow
68
- // property accessors that throw on BigInt timestamps.
69
- const arrowTable = await table.query().toArrow();
70
- for (const batch of arrowTable.batches) {
71
- for (let i = 0; i < batch.numRows; i++) {
72
- const lastAccessed = columnValue(batch, "last_accessed", i);
73
- result.memories.push({
74
- id: columnValue(batch, "id", i),
75
- content: columnValue(batch, "content", i),
76
- metadata: columnValue(batch, "metadata", i) ?? "{}",
77
- vector: toFloatArray(columnValue(batch, "vector", i)),
78
- created_at: toEpochMs(columnValue(batch, "created_at", i)),
79
- updated_at: toEpochMs(columnValue(batch, "updated_at", i)),
80
- last_accessed: lastAccessed != null ? toEpochMs(lastAccessed) : null,
81
- superseded_by: columnValue(batch, "superseded_by", i) ?? null,
82
- usefulness: columnValue(batch, "usefulness", i) ?? 0,
83
- access_count: columnValue(batch, "access_count", i) ?? 0,
84
- });
103
+ // Paginated scan — query().toArrow() without offset/limit returns
104
+ // non-deterministic results that can duplicate some rows and skip others.
105
+ const schemaSample = await table.query().limit(1).toArrow();
106
+ const tsDivisors = buildTimestampDivisors(schemaSample.schema);
107
+ const seen = new Map<string, any>();
108
+
109
+ for (let offset = 0; offset < total; offset += BATCH_SIZE) {
110
+ const arrowTable = await table.query().offset(offset).limit(BATCH_SIZE).toArrow();
111
+ for (const batch of arrowTable.batches) {
112
+ for (let i = 0; i < batch.numRows; i++) {
113
+ const id = columnValue(batch, "id", i) as string;
114
+ const content = columnValue(batch, "content", i) as string;
115
+ const lastAccessed = columnValue(batch, "last_accessed", i);
116
+ const accessedMs = lastAccessed != null ? toEpochMs(lastAccessed, tsDivisors.get("last_accessed")) : null;
117
+ // Deduplicate by ID: prefer most recently accessed, then longest content.
118
+ const existing = seen.get(id);
119
+ if (existing) {
120
+ const existingAccess = existing.last_accessed ?? 0;
121
+ const newAccess = accessedMs ?? 0;
122
+ if (newAccess < existingAccess) continue;
123
+ if (newAccess === existingAccess && content.length <= existing.content.length) continue;
124
+ }
125
+ seen.set(id, {
126
+ id,
127
+ content,
128
+ metadata: columnValue(batch, "metadata", i) ?? "{}",
129
+ vector: toFloatArray(columnValue(batch, "vector", i)),
130
+ created_at: toEpochMs(columnValue(batch, "created_at", i), tsDivisors.get("created_at")),
131
+ updated_at: toEpochMs(columnValue(batch, "updated_at", i), tsDivisors.get("updated_at")),
132
+ last_accessed: accessedMs,
133
+ superseded_by: columnValue(batch, "superseded_by", i) ?? null,
134
+ usefulness: columnValue(batch, "usefulness", i) ?? 0,
135
+ access_count: columnValue(batch, "access_count", i) ?? 0,
136
+ });
137
+ }
85
138
  }
86
139
  }
87
- console.error(` ${result.memories.length} memories read`);
140
+ result.memories = [...seen.values()];
141
+ console.error(` ${result.memories.length} unique memories read (${total} rows scanned)`);
88
142
  }
89
143
 
90
144
  if (tableNames.includes("conversation_history")) {
@@ -92,24 +146,35 @@ if (tableNames.includes("conversation_history")) {
92
146
  const total = await table.countRows();
93
147
  console.error(`Reading ${total} conversation chunks...`);
94
148
 
95
- const arrowTable = await table.query().toArrow();
96
- for (const batch of arrowTable.batches) {
97
- for (let i = 0; i < batch.numRows; i++) {
98
- result.conversations.push({
99
- id: columnValue(batch, "id", i),
100
- content: columnValue(batch, "content", i),
101
- metadata: columnValue(batch, "metadata", i) ?? "{}",
102
- vector: toFloatArray(columnValue(batch, "vector", i)),
103
- created_at: toEpochMs(columnValue(batch, "created_at", i)),
104
- session_id: columnValue(batch, "session_id", i),
105
- role: columnValue(batch, "role", i),
106
- message_index_start: columnValue(batch, "message_index_start", i) ?? 0,
107
- message_index_end: columnValue(batch, "message_index_end", i) ?? 0,
108
- project: columnValue(batch, "project", i) ?? "",
109
- });
149
+ const schemaSample = await table.query().limit(1).toArrow();
150
+ const tsDivisors = buildTimestampDivisors(schemaSample.schema);
151
+ const seen = new Map<string, any>();
152
+
153
+ for (let offset = 0; offset < total; offset += BATCH_SIZE) {
154
+ const arrowTable = await table.query().offset(offset).limit(BATCH_SIZE).toArrow();
155
+ for (const batch of arrowTable.batches) {
156
+ for (let i = 0; i < batch.numRows; i++) {
157
+ const id = columnValue(batch, "id", i) as string;
158
+ const content = columnValue(batch, "content", i) as string;
159
+ const existing = seen.get(id);
160
+ if (existing && existing.content.length >= content.length) continue;
161
+ seen.set(id, {
162
+ id,
163
+ content,
164
+ metadata: columnValue(batch, "metadata", i) ?? "{}",
165
+ vector: toFloatArray(columnValue(batch, "vector", i)),
166
+ created_at: toEpochMs(columnValue(batch, "created_at", i), tsDivisors.get("created_at")),
167
+ session_id: columnValue(batch, "session_id", i),
168
+ role: columnValue(batch, "role", i),
169
+ message_index_start: columnValue(batch, "message_index_start", i) ?? 0,
170
+ message_index_end: columnValue(batch, "message_index_end", i) ?? 0,
171
+ project: columnValue(batch, "project", i) ?? "",
172
+ });
173
+ }
110
174
  }
111
175
  }
112
- console.error(` ${result.conversations.length} conversation chunks read`);
176
+ result.conversations = [...seen.values()];
177
+ console.error(` ${result.conversations.length} unique conversation chunks read (${total} rows scanned)`);
113
178
  }
114
179
 
115
180
  await db.close?.();
@@ -2,7 +2,7 @@
2
2
  /**
3
3
  * Standalone migration script: LanceDB → SQLite (sqlite-vec)
4
4
  *
5
- * This is a thin wrapper around src/migration.ts for direct invocation.
5
+ * This is a thin wrapper around server/migration.ts for direct invocation.
6
6
  * The preferred way to migrate is `vector-memory-mcp migrate`.
7
7
  *
8
8
  * Usage:
@@ -16,7 +16,7 @@
16
16
  * removed in the next major version.
17
17
  */
18
18
 
19
- import { migrate, formatMigrationSummary } from "../src/migration.js";
19
+ import { migrate, formatMigrationSummary } from "../server/migration.js";
20
20
 
21
21
  function parseArgs(): { source: string; target: string } {
22
22
  const args = process.argv.slice(2);
@@ -30,7 +30,7 @@ import { tmpdir } from "os";
30
30
 
31
31
  // ── Helpers ─────────────────────────────────────────────────────────
32
32
 
33
- const SERVER_PATH = join(import.meta.dir, "../src/index.ts");
33
+ const SERVER_PATH = join(import.meta.dir, "../server/index.ts");
34
34
 
35
35
  let passed = 0;
36
36
  let failed = 0;
@@ -0,0 +1,35 @@
1
+ #!/usr/bin/env bun
2
+ /**
3
+ * Sync version into .claude-plugin/ manifest files.
4
+ *
5
+ * Usage:
6
+ * bun scripts/sync-version.ts # reads version from package.json
7
+ * bun scripts/sync-version.ts 2.2.3-dev.4 # uses explicit version
8
+ */
9
+
10
+ import { readFileSync, writeFileSync } from "fs";
11
+ import { join } from "path";
12
+
13
+ const ROOT = join(import.meta.dir, "..");
14
+ const PKG_PATH = join(ROOT, "package.json");
15
+ const PLUGIN_PATH = join(ROOT, ".claude-plugin", "plugin.json");
16
+ const MARKETPLACE_PATH = join(ROOT, ".claude-plugin", "marketplace.json");
17
+
18
+ const explicit = process.argv[2];
19
+ const pkg = JSON.parse(readFileSync(PKG_PATH, "utf-8"));
20
+ const version: string = explicit ?? pkg.version;
21
+
22
+ // Stamp plugin.json
23
+ const plugin = JSON.parse(readFileSync(PLUGIN_PATH, "utf-8"));
24
+ plugin.version = version;
25
+ writeFileSync(PLUGIN_PATH, JSON.stringify(plugin, null, 2) + "\n");
26
+
27
+ // Stamp marketplace.json
28
+ const marketplace = JSON.parse(readFileSync(MARKETPLACE_PATH, "utf-8"));
29
+ marketplace.metadata.version = version;
30
+ for (const p of marketplace.plugins) {
31
+ p.version = version;
32
+ }
33
+ writeFileSync(MARKETPLACE_PATH, JSON.stringify(marketplace, null, 2) + "\n");
34
+
35
+ console.error(`Synced version ${version} → plugin.json, marketplace.json`);
package/scripts/warmup.ts CHANGED
@@ -5,8 +5,8 @@
5
5
  * This runs during installation to ensure everything is ready to use
6
6
  */
7
7
 
8
- import { config } from "../src/config/index.js";
9
- import { EmbeddingsService } from "../src/services/embeddings.service.js";
8
+ import { config } from "../server/config/index.js";
9
+ import { EmbeddingsService } from "../server/core/embeddings.service.js";
10
10
 
11
11
  async function warmup(): Promise<void> {
12
12
  console.log("🔥 Warming up vector-memory-mcp...");
@@ -28,6 +28,7 @@ export interface Config {
28
28
  httpPort: number;
29
29
  httpHost: string;
30
30
  enableHttp: boolean;
31
+ pluginMode: boolean;
31
32
  transportMode: TransportMode;
32
33
  conversationHistory: ConversationHistoryConfig;
33
34
  }
@@ -36,6 +37,7 @@ export interface ConfigOverrides {
36
37
  dbPath?: string;
37
38
  httpPort?: number;
38
39
  enableHttp?: boolean;
40
+ pluginMode?: boolean;
39
41
  transportMode?: TransportMode;
40
42
  enableHistory?: boolean;
41
43
  historyPath?: string;
@@ -55,8 +57,11 @@ function resolvePath(path: string): string {
55
57
 
56
58
  export function loadConfig(overrides: ConfigOverrides = {}): Config {
57
59
  const transportMode = overrides.transportMode ?? "stdio";
58
- // HTTP enabled by default (needed for hooks), can disable with --no-http
59
- const enableHttp = overrides.enableHttp ?? true;
60
+ const pluginMode = overrides.pluginMode ?? false;
61
+ // HTTP enabled only in plugin mode (hooks need it). --no-http overrides.
62
+ // Force HTTP on if transport mode requires it, regardless of plugin mode.
63
+ const enableHttp = overrides.enableHttp
64
+ ?? (transportMode === "http" || transportMode === "both" || pluginMode);
60
65
 
61
66
  return {
62
67
  dbPath: resolvePath(
@@ -74,6 +79,7 @@ export function loadConfig(overrides: ConfigOverrides = {}): Config {
74
79
  ?? DEFAULT_HTTP_PORT,
75
80
  httpHost: DEFAULT_HTTP_HOST,
76
81
  enableHttp,
82
+ pluginMode,
77
83
  transportMode,
78
84
  conversationHistory: {
79
85
  enabled: overrides.enableHistory ?? false,
@@ -95,6 +101,7 @@ export function parseCliArgs(argv: string[]): ConfigOverrides {
95
101
  "--db-file": String,
96
102
  "--port": Number,
97
103
  "--no-http": Boolean,
104
+ "--plugin": Boolean,
98
105
  "--enable-history": Boolean,
99
106
  "--history-path": String,
100
107
  "--history-weight": Number,
@@ -110,6 +117,7 @@ export function parseCliArgs(argv: string[]): ConfigOverrides {
110
117
  dbPath: args["--db-file"],
111
118
  httpPort: args["--port"],
112
119
  enableHttp: args["--no-http"] ? false : undefined,
120
+ pluginMode: args["--plugin"] ?? undefined,
113
121
  enableHistory: args["--enable-history"] ?? undefined,
114
122
  historyPath: args["--history-path"],
115
123
  historyWeight: args["--history-weight"],
@@ -1,7 +1,7 @@
1
1
  import { Database } from "bun:sqlite";
2
- import { mkdirSync } from "fs";
2
+ import { existsSync, mkdirSync } from "fs";
3
3
  import { dirname } from "path";
4
- import { runMigrations } from "./migrations.js";
4
+ import { removeVec0Tables, runMigrations } from "./migrations.js";
5
5
 
6
6
  /**
7
7
  * Open (or create) a SQLite database at the given path
@@ -9,6 +9,14 @@ import { runMigrations } from "./migrations.js";
9
9
  */
10
10
  export function connectToDatabase(dbPath: string): Database {
11
11
  mkdirSync(dirname(dbPath), { recursive: true });
12
+
13
+ // Remove orphaned vec0 virtual table entries before bun:sqlite opens the
14
+ // database. bun:sqlite cannot modify sqlite_master, so this uses the
15
+ // sqlite3 CLI while no other connection holds a lock.
16
+ if (existsSync(dbPath)) {
17
+ removeVec0Tables(dbPath);
18
+ }
19
+
12
20
  const db = new Database(dbPath);
13
21
 
14
22
  // WAL mode for concurrent read performance
@@ -2,7 +2,7 @@ import type { Database } from "bun:sqlite";
2
2
  import type {
3
3
  ConversationHybridRow,
4
4
  HistoryFilters,
5
- } from "../types/conversation.js";
5
+ } from "./conversation.js";
6
6
  import {
7
7
  serializeVector,
8
8
  safeParseJsonObject,
@@ -1,7 +1,7 @@
1
1
  import { createHash } from "crypto";
2
2
  import { readFile, writeFile, mkdir } from "fs/promises";
3
3
  import { dirname, join } from "path";
4
- import type { ConversationRepository } from "../db/conversation.repository.js";
4
+ import type { ConversationRepository } from "./conversation.repository.js";
5
5
  import type {
6
6
  ConversationChunk,
7
7
  ConversationHybridRow,
@@ -10,7 +10,7 @@ import type {
10
10
  ParsedMessage,
11
11
  SessionFileInfo,
12
12
  SessionIndexDetail,
13
- } from "../types/conversation.js";
13
+ } from "./conversation.js";
14
14
  import type { ConversationHistoryConfig } from "../config/index.js";
15
15
  import { resolveSessionLogPath } from "../config/index.js";
16
16
  import type { EmbeddingsService } from "./embeddings.service.js";
@@ -12,11 +12,15 @@ import {
12
12
  type Memory,
13
13
  type HybridRow,
14
14
  DELETED_TOMBSTONE,
15
- } from "../types/memory.js";
15
+ } from "./memory.js";
16
16
 
17
17
  export class MemoryRepository {
18
18
  constructor(private db: Database) {}
19
19
 
20
+ getDb(): Database {
21
+ return this.db;
22
+ }
23
+
20
24
  // ---------------------------------------------------------------------------
21
25
  // Row mapping
22
26
  // ---------------------------------------------------------------------------
@@ -1,8 +1,8 @@
1
1
  import { randomUUID, createHash } from "crypto";
2
- import type { Memory, SearchIntent, IntentProfile, HybridRow } from "../types/memory.js";
3
- import { isDeleted } from "../types/memory.js";
4
- import type { SearchResult, SearchOptions } from "../types/conversation.js";
5
- import type { MemoryRepository } from "../db/memory.repository.js";
2
+ import type { Memory, SearchIntent, IntentProfile, HybridRow } from "./memory.js";
3
+ import { isDeleted } from "./memory.js";
4
+ import type { SearchResult, SearchOptions } from "./conversation.js";
5
+ import type { MemoryRepository } from "./memory.repository.js";
6
6
  import type { EmbeddingsService } from "./embeddings.service.js";
7
7
  import type { ConversationHistoryService } from "./conversation.service.js";
8
8
 
@@ -32,6 +32,14 @@ export class MemoryService {
32
32
  return this.conversationService;
33
33
  }
34
34
 
35
+ getRepository(): MemoryRepository {
36
+ return this.repository;
37
+ }
38
+
39
+ getEmbeddings(): EmbeddingsService {
40
+ return this.embeddings;
41
+ }
42
+
35
43
  async store(
36
44
  content: string,
37
45
  metadata: Record<string, unknown> = {},
@@ -366,6 +374,14 @@ ${list(args.memory_ids)}`;
366
374
  };
367
375
 
368
376
  await this.repository.upsert(memory);
377
+
378
+ // Always update the global (no-project) waypoint so the session-start
379
+ // hook can find the most recent waypoint without knowing the project name.
380
+ const globalId = MemoryService.UUID_ZERO;
381
+ if (memory.id !== globalId) {
382
+ await this.repository.upsert({ ...memory, id: globalId });
383
+ }
384
+
369
385
  return memory;
370
386
  }
371
387