@context-vault/core 3.1.6 → 3.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/dist/capture.d.ts +1 -1
  2. package/dist/capture.d.ts.map +1 -1
  3. package/dist/capture.js +34 -47
  4. package/dist/capture.js.map +1 -1
  5. package/dist/categories.js +30 -30
  6. package/dist/config.d.ts +1 -1
  7. package/dist/config.d.ts.map +1 -1
  8. package/dist/config.js +37 -43
  9. package/dist/config.js.map +1 -1
  10. package/dist/constants.d.ts +1 -1
  11. package/dist/constants.d.ts.map +1 -1
  12. package/dist/constants.js +4 -4
  13. package/dist/constants.js.map +1 -1
  14. package/dist/db.d.ts +2 -2
  15. package/dist/db.d.ts.map +1 -1
  16. package/dist/db.js +21 -20
  17. package/dist/db.js.map +1 -1
  18. package/dist/embed.d.ts.map +1 -1
  19. package/dist/embed.js +11 -11
  20. package/dist/embed.js.map +1 -1
  21. package/dist/files.d.ts.map +1 -1
  22. package/dist/files.js +12 -13
  23. package/dist/files.js.map +1 -1
  24. package/dist/formatters.js +5 -5
  25. package/dist/frontmatter.d.ts.map +1 -1
  26. package/dist/frontmatter.js +23 -23
  27. package/dist/frontmatter.js.map +1 -1
  28. package/dist/index.d.ts +1 -1
  29. package/dist/index.d.ts.map +1 -1
  30. package/dist/index.js +58 -46
  31. package/dist/index.js.map +1 -1
  32. package/dist/ingest-url.d.ts.map +1 -1
  33. package/dist/ingest-url.js +30 -33
  34. package/dist/ingest-url.js.map +1 -1
  35. package/dist/main.d.ts +13 -13
  36. package/dist/main.d.ts.map +1 -1
  37. package/dist/main.js +12 -12
  38. package/dist/main.js.map +1 -1
  39. package/dist/search.d.ts +1 -1
  40. package/dist/search.d.ts.map +1 -1
  41. package/dist/search.js +20 -22
  42. package/dist/search.js.map +1 -1
  43. package/dist/types.d.ts +1 -1
  44. package/package.json +1 -1
  45. package/src/capture.ts +44 -81
  46. package/src/categories.ts +30 -30
  47. package/src/config.ts +45 -60
  48. package/src/constants.ts +8 -10
  49. package/src/db.ts +37 -56
  50. package/src/embed.ts +15 -26
  51. package/src/files.ts +13 -16
  52. package/src/formatters.ts +5 -5
  53. package/src/frontmatter.ts +26 -30
  54. package/src/index.ts +94 -100
  55. package/src/ingest-url.ts +56 -93
  56. package/src/main.ts +13 -18
  57. package/src/search.ts +34 -56
  58. package/src/types.ts +1 -1
package/src/config.ts CHANGED
@@ -1,17 +1,17 @@
1
- import { existsSync, readFileSync } from "node:fs";
2
- import { join, resolve } from "node:path";
3
- import { homedir } from "node:os";
4
- import { DEFAULT_GROWTH_THRESHOLDS, DEFAULT_LIFECYCLE } from "./constants.js";
5
- import type { VaultConfig } from "./types.js";
1
+ import { existsSync, readFileSync } from 'node:fs';
2
+ import { join, resolve } from 'node:path';
3
+ import { homedir } from 'node:os';
4
+ import { DEFAULT_GROWTH_THRESHOLDS, DEFAULT_LIFECYCLE } from './constants.js';
5
+ import type { VaultConfig } from './types.js';
6
6
 
7
7
  export function parseArgs(argv: string[]): Record<string, string | number> {
8
8
  const args: Record<string, string | number> = {};
9
9
  for (let i = 2; i < argv.length; i++) {
10
- if (argv[i] === "--vault-dir" && argv[i + 1]) args.vaultDir = argv[++i];
11
- else if (argv[i] === "--data-dir" && argv[i + 1]) args.dataDir = argv[++i];
12
- else if (argv[i] === "--db-path" && argv[i + 1]) args.dbPath = argv[++i];
13
- else if (argv[i] === "--dev-dir" && argv[i + 1]) args.devDir = argv[++i];
14
- else if (argv[i] === "--event-decay-days" && argv[i + 1])
10
+ if (argv[i] === '--vault-dir' && argv[i + 1]) args.vaultDir = argv[++i];
11
+ else if (argv[i] === '--data-dir' && argv[i + 1]) args.dataDir = argv[++i];
12
+ else if (argv[i] === '--db-path' && argv[i + 1]) args.dbPath = argv[++i];
13
+ else if (argv[i] === '--dev-dir' && argv[i + 1]) args.devDir = argv[++i];
14
+ else if (argv[i] === '--event-decay-days' && argv[i + 1])
15
15
  args.eventDecayDays = Number(argv[++i]);
16
16
  }
17
17
  return args;
@@ -25,23 +25,23 @@ export function resolveConfig(): VaultConfig {
25
25
  (cliArgs.dataDir as string) ||
26
26
  process.env.CONTEXT_VAULT_DATA_DIR ||
27
27
  process.env.CONTEXT_MCP_DATA_DIR ||
28
- join(HOME, ".context-mcp"),
28
+ join(HOME, '.context-mcp')
29
29
  );
30
30
  const config: VaultConfig = {
31
- vaultDir: join(HOME, ".vault"),
31
+ vaultDir: join(HOME, '.vault'),
32
32
  dataDir,
33
- dbPath: join(dataDir, "vault.db"),
34
- devDir: join(HOME, "dev"),
33
+ dbPath: join(dataDir, 'vault.db'),
34
+ devDir: join(HOME, 'dev'),
35
35
  eventDecayDays: 30,
36
36
  thresholds: { ...DEFAULT_GROWTH_THRESHOLDS },
37
37
  telemetry: false,
38
- resolvedFrom: "defaults",
38
+ resolvedFrom: 'defaults',
39
39
  recall: {
40
40
  maxResults: 5,
41
41
  maxOutputBytes: 2000,
42
42
  minRelevanceScore: 0.3,
43
43
  excludeKinds: [],
44
- excludeCategories: ["event"],
44
+ excludeCategories: ['event'],
45
45
  bodyTruncateChars: 400,
46
46
  },
47
47
  consolidation: {
@@ -52,14 +52,14 @@ export function resolveConfig(): VaultConfig {
52
52
  lifecycle: structuredClone(DEFAULT_LIFECYCLE),
53
53
  };
54
54
 
55
- const configPath = join(dataDir, "config.json");
55
+ const configPath = join(dataDir, 'config.json');
56
56
  if (existsSync(configPath)) {
57
57
  try {
58
- const fc = JSON.parse(readFileSync(configPath, "utf-8"));
58
+ const fc = JSON.parse(readFileSync(configPath, 'utf-8'));
59
59
  if (fc.vaultDir) config.vaultDir = fc.vaultDir;
60
60
  if (fc.dataDir) {
61
61
  config.dataDir = fc.dataDir;
62
- config.dbPath = join(resolve(fc.dataDir), "vault.db");
62
+ config.dbPath = join(resolve(fc.dataDir), 'vault.db');
63
63
  }
64
64
  if (fc.dbPath) config.dbPath = fc.dbPath;
65
65
  if (fc.devDir) config.devDir = fc.devDir;
@@ -94,100 +94,85 @@ export function resolveConfig(): VaultConfig {
94
94
  };
95
95
  }
96
96
  if (fc.telemetry != null) config.telemetry = fc.telemetry === true;
97
- if (fc.recall && typeof fc.recall === "object") {
97
+ if (fc.recall && typeof fc.recall === 'object') {
98
98
  const r = fc.recall;
99
- if (r.maxResults != null)
100
- config.recall.maxResults = Number(r.maxResults);
101
- if (r.maxOutputBytes != null)
102
- config.recall.maxOutputBytes = Number(r.maxOutputBytes);
99
+ if (r.maxResults != null) config.recall.maxResults = Number(r.maxResults);
100
+ if (r.maxOutputBytes != null) config.recall.maxOutputBytes = Number(r.maxOutputBytes);
103
101
  if (r.minRelevanceScore != null)
104
102
  config.recall.minRelevanceScore = Number(r.minRelevanceScore);
105
- if (Array.isArray(r.excludeKinds))
106
- config.recall.excludeKinds = r.excludeKinds;
103
+ if (Array.isArray(r.excludeKinds)) config.recall.excludeKinds = r.excludeKinds;
107
104
  if (Array.isArray(r.excludeCategories))
108
105
  config.recall.excludeCategories = r.excludeCategories;
109
106
  if (r.bodyTruncateChars != null)
110
107
  config.recall.bodyTruncateChars = Number(r.bodyTruncateChars);
111
108
  }
112
- if (fc.consolidation && typeof fc.consolidation === "object") {
109
+ if (fc.consolidation && typeof fc.consolidation === 'object') {
113
110
  const c = fc.consolidation;
114
- if (c.tagThreshold != null)
115
- config.consolidation.tagThreshold = Number(c.tagThreshold);
116
- if (c.maxAgeDays != null)
117
- config.consolidation.maxAgeDays = Number(c.maxAgeDays);
111
+ if (c.tagThreshold != null) config.consolidation.tagThreshold = Number(c.tagThreshold);
112
+ if (c.maxAgeDays != null) config.consolidation.maxAgeDays = Number(c.maxAgeDays);
118
113
  if (c.autoConsolidate != null)
119
114
  config.consolidation.autoConsolidate = c.autoConsolidate === true;
120
115
  }
121
- if (fc.lifecycle && typeof fc.lifecycle === "object") {
116
+ if (fc.lifecycle && typeof fc.lifecycle === 'object') {
122
117
  for (const [tier, rules] of Object.entries(fc.lifecycle)) {
123
- if (rules && typeof rules === "object") {
118
+ if (rules && typeof rules === 'object') {
124
119
  if (!config.lifecycle[tier]) config.lifecycle[tier] = {};
125
120
  if ((rules as Record<string, unknown>).archiveAfterDays != null)
126
121
  config.lifecycle[tier].archiveAfterDays = Number(
127
- (rules as Record<string, unknown>).archiveAfterDays,
122
+ (rules as Record<string, unknown>).archiveAfterDays
128
123
  );
129
124
  }
130
125
  }
131
126
  }
132
- config.resolvedFrom = "config file";
127
+ config.resolvedFrom = 'config file';
133
128
  } catch (e) {
134
- throw new Error(
135
- `[context-vault] Invalid config at ${configPath}: ${(e as Error).message}`,
136
- );
129
+ throw new Error(`[context-vault] Invalid config at ${configPath}: ${(e as Error).message}`);
137
130
  }
138
131
  }
139
132
  config.configPath = configPath;
140
133
 
141
- if (
142
- process.env.CONTEXT_VAULT_VAULT_DIR ||
143
- process.env.CONTEXT_MCP_VAULT_DIR
144
- ) {
145
- config.vaultDir =
146
- process.env.CONTEXT_VAULT_VAULT_DIR || process.env.CONTEXT_MCP_VAULT_DIR!;
147
- config.resolvedFrom = "env";
134
+ if (process.env.CONTEXT_VAULT_VAULT_DIR || process.env.CONTEXT_MCP_VAULT_DIR) {
135
+ config.vaultDir = process.env.CONTEXT_VAULT_VAULT_DIR || process.env.CONTEXT_MCP_VAULT_DIR!;
136
+ config.resolvedFrom = 'env';
148
137
  }
149
138
  if (process.env.CONTEXT_VAULT_DB_PATH || process.env.CONTEXT_MCP_DB_PATH) {
150
- config.dbPath =
151
- process.env.CONTEXT_VAULT_DB_PATH || process.env.CONTEXT_MCP_DB_PATH!;
152
- config.resolvedFrom = "env";
139
+ config.dbPath = process.env.CONTEXT_VAULT_DB_PATH || process.env.CONTEXT_MCP_DB_PATH!;
140
+ config.resolvedFrom = 'env';
153
141
  }
154
142
  if (process.env.CONTEXT_VAULT_DEV_DIR || process.env.CONTEXT_MCP_DEV_DIR) {
155
- config.devDir =
156
- process.env.CONTEXT_VAULT_DEV_DIR || process.env.CONTEXT_MCP_DEV_DIR!;
157
- config.resolvedFrom = "env";
143
+ config.devDir = process.env.CONTEXT_VAULT_DEV_DIR || process.env.CONTEXT_MCP_DEV_DIR!;
144
+ config.resolvedFrom = 'env';
158
145
  }
159
146
  if (
160
147
  process.env.CONTEXT_VAULT_EVENT_DECAY_DAYS != null ||
161
148
  process.env.CONTEXT_MCP_EVENT_DECAY_DAYS != null
162
149
  ) {
163
150
  config.eventDecayDays = Number(
164
- process.env.CONTEXT_VAULT_EVENT_DECAY_DAYS ??
165
- process.env.CONTEXT_MCP_EVENT_DECAY_DAYS,
151
+ process.env.CONTEXT_VAULT_EVENT_DECAY_DAYS ?? process.env.CONTEXT_MCP_EVENT_DECAY_DAYS
166
152
  );
167
- config.resolvedFrom = "env";
153
+ config.resolvedFrom = 'env';
168
154
  }
169
155
 
170
156
  if (process.env.CONTEXT_VAULT_TELEMETRY !== undefined) {
171
157
  config.telemetry =
172
- process.env.CONTEXT_VAULT_TELEMETRY === "1" ||
173
- process.env.CONTEXT_VAULT_TELEMETRY === "true";
158
+ process.env.CONTEXT_VAULT_TELEMETRY === '1' || process.env.CONTEXT_VAULT_TELEMETRY === 'true';
174
159
  }
175
160
 
176
161
  if (cliArgs.vaultDir) {
177
162
  config.vaultDir = cliArgs.vaultDir as string;
178
- config.resolvedFrom = "CLI args";
163
+ config.resolvedFrom = 'CLI args';
179
164
  }
180
165
  if (cliArgs.dbPath) {
181
166
  config.dbPath = cliArgs.dbPath as string;
182
- config.resolvedFrom = "CLI args";
167
+ config.resolvedFrom = 'CLI args';
183
168
  }
184
169
  if (cliArgs.devDir) {
185
170
  config.devDir = cliArgs.devDir as string;
186
- config.resolvedFrom = "CLI args";
171
+ config.resolvedFrom = 'CLI args';
187
172
  }
188
173
  if (cliArgs.eventDecayDays != null) {
189
174
  config.eventDecayDays = cliArgs.eventDecayDays as number;
190
- config.resolvedFrom = "CLI args";
175
+ config.resolvedFrom = 'CLI args';
191
176
  }
192
177
 
193
178
  config.vaultDir = resolve(config.vaultDir);
package/src/constants.ts CHANGED
@@ -1,8 +1,7 @@
1
- export const APP_URL = "https://app.context-vault.com";
2
- export const API_URL = "https://api.context-vault.com";
3
- export const MARKETING_URL = "https://contextvault.dev";
4
- export const GITHUB_ISSUES_URL =
5
- "https://github.com/fellanH/context-vault/issues";
1
+ export const APP_URL = 'https://app.context-vault.com';
2
+ export const API_URL = 'https://api.context-vault.com';
3
+ export const MARKETING_URL = 'https://context-vault.com';
4
+ export const GITHUB_ISSUES_URL = 'https://github.com/fellanH/context-vault/issues';
6
5
 
7
6
  export const MAX_BODY_LENGTH = 100 * 1024;
8
7
  export const MAX_TITLE_LENGTH = 500;
@@ -20,8 +19,7 @@ export const DEFAULT_GROWTH_THRESHOLDS = {
20
19
  eventsWithoutTtl: { warn: 200 },
21
20
  };
22
21
 
23
- export const DEFAULT_LIFECYCLE: Record<string, { archiveAfterDays?: number }> =
24
- {
25
- event: { archiveAfterDays: 90 },
26
- ephemeral: { archiveAfterDays: 30 },
27
- };
22
+ export const DEFAULT_LIFECYCLE: Record<string, { archiveAfterDays?: number }> = {
23
+ event: { archiveAfterDays: 90 },
24
+ ephemeral: { archiveAfterDays: 30 },
25
+ };
package/src/db.ts CHANGED
@@ -1,45 +1,45 @@
1
- import { unlinkSync, copyFileSync, existsSync } from "node:fs";
2
- import { DatabaseSync } from "node:sqlite";
3
- import type { PreparedStatements } from "./types.js";
1
+ import { unlinkSync, copyFileSync, existsSync } from 'node:fs';
2
+ import { DatabaseSync } from 'node:sqlite';
3
+ import type { PreparedStatements } from './types.js';
4
4
 
5
5
  export class NativeModuleError extends Error {
6
6
  originalError: Error;
7
7
  constructor(originalError: Error) {
8
8
  const diagnostic = formatNativeModuleError(originalError);
9
9
  super(diagnostic);
10
- this.name = "NativeModuleError";
10
+ this.name = 'NativeModuleError';
11
11
  this.originalError = originalError;
12
12
  }
13
13
  }
14
14
 
15
15
  function formatNativeModuleError(err: Error): string {
16
- const msg = err.message || "";
16
+ const msg = err.message || '';
17
17
  return [
18
18
  `sqlite-vec extension failed to load: ${msg}`,
19
- "",
19
+ '',
20
20
  ` Running Node.js: ${process.version} (${process.execPath})`,
21
- "",
22
- " Fix: Reinstall context-vault:",
23
- " npx -y context-vault@latest setup",
24
- ].join("\n");
21
+ '',
22
+ ' Fix: Reinstall context-vault:',
23
+ ' npx -y context-vault@latest setup',
24
+ ].join('\n');
25
25
  }
26
26
 
27
27
  let _sqliteVec: { load: (db: DatabaseSync) => void } | null = null;
28
28
 
29
29
  async function loadSqliteVec() {
30
30
  if (_sqliteVec) return _sqliteVec;
31
- const vecMod = await import("sqlite-vec");
31
+ const vecMod = await import('sqlite-vec');
32
32
  _sqliteVec = vecMod;
33
33
  return _sqliteVec;
34
34
  }
35
35
 
36
36
  function runTransaction(db: DatabaseSync, fn: () => void): void {
37
- db.exec("BEGIN");
37
+ db.exec('BEGIN');
38
38
  try {
39
39
  fn();
40
- db.exec("COMMIT");
40
+ db.exec('COMMIT');
41
41
  } catch (e) {
42
- db.exec("ROLLBACK");
42
+ db.exec('ROLLBACK');
43
43
  throw e;
44
44
  }
45
45
  }
@@ -105,9 +105,9 @@ export async function initDatabase(dbPath: string): Promise<DatabaseSync> {
105
105
 
106
106
  function createDb(path: string): DatabaseSync {
107
107
  const db = new DatabaseSync(path, { allowExtension: true });
108
- db.exec("PRAGMA journal_mode = WAL");
109
- db.exec("PRAGMA foreign_keys = ON");
110
- db.exec("PRAGMA busy_timeout = 3000");
108
+ db.exec('PRAGMA journal_mode = WAL');
109
+ db.exec('PRAGMA foreign_keys = ON');
110
+ db.exec('PRAGMA busy_timeout = 3000');
111
111
  try {
112
112
  sqliteVec.load(db);
113
113
  } catch (e) {
@@ -117,14 +117,11 @@ export async function initDatabase(dbPath: string): Promise<DatabaseSync> {
117
117
  }
118
118
 
119
119
  const db = createDb(dbPath);
120
- const version = (
121
- db.prepare("PRAGMA user_version").get() as { user_version: number }
122
- ).user_version;
120
+ const version = (db.prepare('PRAGMA user_version').get() as { user_version: number })
121
+ .user_version;
123
122
 
124
123
  if (version > 0 && version < 15) {
125
- console.error(
126
- `[context-vault] Schema v${version} is outdated. Rebuilding database...`,
127
- );
124
+ console.error(`[context-vault] Schema v${version} is outdated. Rebuilding database...`);
128
125
 
129
126
  const backupPath = `${dbPath}.v${version}.backup`;
130
127
  let backupSucceeded = false;
@@ -132,32 +129,30 @@ export async function initDatabase(dbPath: string): Promise<DatabaseSync> {
132
129
  db.close();
133
130
  if (existsSync(dbPath)) {
134
131
  copyFileSync(dbPath, backupPath);
135
- console.error(
136
- `[context-vault] Backed up old database to: ${backupPath}`,
137
- );
132
+ console.error(`[context-vault] Backed up old database to: ${backupPath}`);
138
133
  backupSucceeded = true;
139
134
  } else {
140
135
  backupSucceeded = true;
141
136
  }
142
137
  } catch (backupErr) {
143
138
  console.error(
144
- `[context-vault] Warning: could not backup old database: ${(backupErr as Error).message}`,
139
+ `[context-vault] Warning: could not backup old database: ${(backupErr as Error).message}`
145
140
  );
146
141
  }
147
142
 
148
143
  if (!backupSucceeded) {
149
144
  throw new Error(
150
145
  `[context-vault] Aborting schema migration: backup failed for ${dbPath}. ` +
151
- `Fix the backup issue or manually back up the file before upgrading.`,
146
+ `Fix the backup issue or manually back up the file before upgrading.`
152
147
  );
153
148
  }
154
149
 
155
150
  unlinkSync(dbPath);
156
151
  try {
157
- unlinkSync(dbPath + "-wal");
152
+ unlinkSync(dbPath + '-wal');
158
153
  } catch {}
159
154
  try {
160
- unlinkSync(dbPath + "-shm");
155
+ unlinkSync(dbPath + '-shm');
161
156
  } catch {}
162
157
 
163
158
  const freshDb = createDb(dbPath);
@@ -178,52 +173,38 @@ export function prepareStatements(db: DatabaseSync): PreparedStatements {
178
173
  try {
179
174
  return {
180
175
  insertEntry: db.prepare(
181
- `INSERT INTO vault (id, kind, category, title, body, meta, tags, source, file_path, identity_key, expires_at, created_at, updated_at, source_files, tier) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
176
+ `INSERT INTO vault (id, kind, category, title, body, meta, tags, source, file_path, identity_key, expires_at, created_at, updated_at, source_files, tier) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
182
177
  ),
183
178
  updateEntry: db.prepare(
184
- `UPDATE vault SET title = ?, body = ?, meta = ?, tags = ?, source = ?, category = ?, identity_key = ?, expires_at = ?, updated_at = datetime('now') WHERE file_path = ?`,
179
+ `UPDATE vault SET title = ?, body = ?, meta = ?, tags = ?, source = ?, category = ?, identity_key = ?, expires_at = ?, updated_at = datetime('now') WHERE file_path = ?`
185
180
  ),
186
181
  deleteEntry: db.prepare(`DELETE FROM vault WHERE id = ?`),
187
182
  getRowid: db.prepare(`SELECT rowid FROM vault WHERE id = ?`),
188
183
  getRowidByPath: db.prepare(`SELECT rowid FROM vault WHERE file_path = ?`),
189
184
  getEntryById: db.prepare(`SELECT * FROM vault WHERE id = ?`),
190
- getByIdentityKey: db.prepare(
191
- `SELECT * FROM vault WHERE kind = ? AND identity_key = ?`,
192
- ),
185
+ getByIdentityKey: db.prepare(`SELECT * FROM vault WHERE kind = ? AND identity_key = ?`),
193
186
  upsertByIdentityKey: db.prepare(
194
- `UPDATE vault SET title = ?, body = ?, meta = ?, tags = ?, source = ?, category = ?, file_path = ?, expires_at = ?, source_files = ?, updated_at = datetime('now') WHERE kind = ? AND identity_key = ?`,
195
- ),
196
- updateSourceFiles: db.prepare(
197
- `UPDATE vault SET source_files = ? WHERE id = ?`,
198
- ),
199
- updateRelatedTo: db.prepare(
200
- `UPDATE vault SET related_to = ? WHERE id = ?`,
201
- ),
202
- insertVecStmt: db.prepare(
203
- `INSERT INTO vault_vec (rowid, embedding) VALUES (?, ?)`,
187
+ `UPDATE vault SET title = ?, body = ?, meta = ?, tags = ?, source = ?, category = ?, file_path = ?, expires_at = ?, source_files = ?, updated_at = datetime('now') WHERE kind = ? AND identity_key = ?`
204
188
  ),
189
+ updateSourceFiles: db.prepare(`UPDATE vault SET source_files = ? WHERE id = ?`),
190
+ updateRelatedTo: db.prepare(`UPDATE vault SET related_to = ? WHERE id = ?`),
191
+ insertVecStmt: db.prepare(`INSERT INTO vault_vec (rowid, embedding) VALUES (?, ?)`),
205
192
  deleteVecStmt: db.prepare(`DELETE FROM vault_vec WHERE rowid = ?`),
206
- updateSupersededBy: db.prepare(
207
- `UPDATE vault SET superseded_by = ? WHERE id = ?`,
208
- ),
193
+ updateSupersededBy: db.prepare(`UPDATE vault SET superseded_by = ? WHERE id = ?`),
209
194
  clearSupersededByRef: db.prepare(
210
- `UPDATE vault SET superseded_by = NULL WHERE superseded_by = ?`,
195
+ `UPDATE vault SET superseded_by = NULL WHERE superseded_by = ?`
211
196
  ),
212
197
  };
213
198
  } catch (e) {
214
199
  throw new Error(
215
200
  `Failed to prepare database statements. The database may be corrupted.\n` +
216
201
  `Try deleting and rebuilding: context-vault reindex\n` +
217
- `Original error: ${(e as Error).message}`,
202
+ `Original error: ${(e as Error).message}`
218
203
  );
219
204
  }
220
205
  }
221
206
 
222
- export function insertVec(
223
- stmts: PreparedStatements,
224
- rowid: number,
225
- embedding: Float32Array,
226
- ): void {
207
+ export function insertVec(stmts: PreparedStatements, rowid: number, embedding: Float32Array): void {
227
208
  const safeRowid = BigInt(rowid);
228
209
  if (safeRowid < 1n) throw new Error(`Invalid rowid: ${rowid}`);
229
210
  stmts.insertVecStmt.run(safeRowid, embedding);
@@ -237,7 +218,7 @@ export function deleteVec(stmts: PreparedStatements, rowid: number): void {
237
218
 
238
219
  export function testConnection(db: DatabaseSync): boolean {
239
220
  try {
240
- db.prepare("SELECT 1").get();
221
+ db.prepare('SELECT 1').get();
241
222
  return true;
242
223
  } catch {
243
224
  return false;
package/src/embed.ts CHANGED
@@ -1,6 +1,6 @@
1
- import { join } from "node:path";
2
- import { homedir } from "node:os";
3
- import { mkdirSync } from "node:fs";
1
+ import { join } from 'node:path';
2
+ import { homedir } from 'node:os';
3
+ import { mkdirSync } from 'node:fs';
4
4
 
5
5
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
6
6
  let extractor: any = null;
@@ -14,29 +14,20 @@ async function ensurePipeline(): Promise<typeof extractor> {
14
14
 
15
15
  loadingPromise = (async () => {
16
16
  try {
17
- const { pipeline, env } = await import("@huggingface/transformers");
17
+ const { pipeline, env } = await import('@huggingface/transformers');
18
18
 
19
- const modelCacheDir = join(homedir(), ".context-mcp", "models");
19
+ const modelCacheDir = join(homedir(), '.context-mcp', 'models');
20
20
  mkdirSync(modelCacheDir, { recursive: true });
21
21
  env.cacheDir = modelCacheDir;
22
22
 
23
- console.error(
24
- "[context-vault] Loading embedding model (first run may download ~22MB)...",
25
- );
26
- extractor = await pipeline(
27
- "feature-extraction",
28
- "Xenova/all-MiniLM-L6-v2",
29
- );
23
+ console.error('[context-vault] Loading embedding model (first run may download ~22MB)...');
24
+ extractor = await pipeline('feature-extraction', 'Xenova/all-MiniLM-L6-v2');
30
25
  embedAvailable = true;
31
26
  return extractor;
32
27
  } catch (e) {
33
28
  embedAvailable = false;
34
- console.error(
35
- `[context-vault] Failed to load embedding model: ${(e as Error).message}`,
36
- );
37
- console.error(
38
- `[context-vault] Semantic search disabled. Full-text search still works.`,
39
- );
29
+ console.error(`[context-vault] Failed to load embedding model: ${(e as Error).message}`);
30
+ console.error(`[context-vault] Semantic search disabled. Full-text search still works.`);
40
31
  return null;
41
32
  } finally {
42
33
  loadingPromise = null;
@@ -50,34 +41,32 @@ export async function embed(text: string): Promise<Float32Array | null> {
50
41
  const ext = await ensurePipeline();
51
42
  if (!ext) return null;
52
43
 
53
- const result = await ext([text], { pooling: "mean", normalize: true });
44
+ const result = await ext([text], { pooling: 'mean', normalize: true });
54
45
  if (!result?.data?.length) {
55
46
  extractor = null;
56
47
  embedAvailable = null;
57
48
  loadingPromise = null;
58
- throw new Error("Embedding pipeline returned empty result");
49
+ throw new Error('Embedding pipeline returned empty result');
59
50
  }
60
51
  return new Float32Array(result.data);
61
52
  }
62
53
 
63
- export async function embedBatch(
64
- texts: string[],
65
- ): Promise<(Float32Array | null)[]> {
54
+ export async function embedBatch(texts: string[]): Promise<(Float32Array | null)[]> {
66
55
  if (!texts.length) return [];
67
56
  const ext = await ensurePipeline();
68
57
  if (!ext) return texts.map(() => null);
69
58
 
70
- const result = await ext(texts, { pooling: "mean", normalize: true });
59
+ const result = await ext(texts, { pooling: 'mean', normalize: true });
71
60
  if (!result?.data?.length) {
72
61
  extractor = null;
73
62
  embedAvailable = null;
74
63
  loadingPromise = null;
75
- throw new Error("Embedding pipeline returned empty result");
64
+ throw new Error('Embedding pipeline returned empty result');
76
65
  }
77
66
  const dim = result.data.length / texts.length;
78
67
  if (!Number.isInteger(dim) || dim <= 0) {
79
68
  throw new Error(
80
- `Unexpected embedding dimension: ${result.data.length} / ${texts.length} = ${dim}`,
69
+ `Unexpected embedding dimension: ${result.data.length} / ${texts.length} = ${dim}`
81
70
  );
82
71
  }
83
72
  return texts.map((_, i) => result.data.subarray(i * dim, (i + 1) * dim));
package/src/files.ts CHANGED
@@ -1,18 +1,18 @@
1
- import { readdirSync } from "node:fs";
2
- import { join, resolve, sep } from "node:path";
3
- import { categoryDirFor } from "./categories.js";
1
+ import { readdirSync } from 'node:fs';
2
+ import { join, resolve, sep } from 'node:path';
3
+ import { categoryDirFor } from './categories.js';
4
4
 
5
- const CROCKFORD = "0123456789ABCDEFGHJKMNPQRSTVWXYZ";
5
+ const CROCKFORD = '0123456789ABCDEFGHJKMNPQRSTVWXYZ';
6
6
 
7
7
  export function ulid(): string {
8
8
  const now = Date.now();
9
- let ts = "";
9
+ let ts = '';
10
10
  let t = now;
11
11
  for (let i = 0; i < 10; i++) {
12
12
  ts = CROCKFORD[t & 31] + ts;
13
13
  t = Math.floor(t / 32);
14
14
  }
15
- let rand = "";
15
+ let rand = '';
16
16
  for (let i = 0; i < 16; i++) {
17
17
  rand += CROCKFORD[Math.floor(Math.random() * 32)];
18
18
  }
@@ -22,11 +22,10 @@ export function ulid(): string {
22
22
  export function slugify(text: string, maxLen = 60): string {
23
23
  let slug = text
24
24
  .toLowerCase()
25
- .replace(/[^a-z0-9]+/g, "-")
26
- .replace(/^-+|-+$/g, "");
25
+ .replace(/[^a-z0-9]+/g, '-')
26
+ .replace(/^-+|-+$/g, '');
27
27
  if (slug.length > maxLen) {
28
- slug =
29
- slug.slice(0, maxLen).replace(/-[^-]*$/, "") || slug.slice(0, maxLen);
28
+ slug = slug.slice(0, maxLen).replace(/-[^-]*$/, '') || slug.slice(0, maxLen);
30
29
  }
31
30
  return slug;
32
31
  }
@@ -51,9 +50,7 @@ export function safeJoin(base: string, ...parts: string[]): string {
51
50
  const resolvedBase = resolve(base);
52
51
  const result = resolve(join(base, ...parts));
53
52
  if (!result.startsWith(resolvedBase + sep) && result !== resolvedBase) {
54
- throw new Error(
55
- `Path traversal blocked: resolved path escapes base directory`,
56
- );
53
+ throw new Error(`Path traversal blocked: resolved path escapes base directory`);
57
54
  }
58
55
  return result;
59
56
  }
@@ -68,13 +65,13 @@ export function walkDir(dir: string): WalkResult[] {
68
65
  function walk(currentDir: string, relDir: string) {
69
66
  for (const entry of readdirSync(currentDir, { withFileTypes: true })) {
70
67
  const fullPath = join(currentDir, entry.name);
71
- if (entry.isDirectory() && !entry.name.startsWith("_")) {
68
+ if (entry.isDirectory() && !entry.name.startsWith('_')) {
72
69
  walk(fullPath, relDir ? join(relDir, entry.name) : entry.name);
73
- } else if (entry.isFile() && entry.name.endsWith(".md")) {
70
+ } else if (entry.isFile() && entry.name.endsWith('.md')) {
74
71
  results.push({ filePath: fullPath, relDir });
75
72
  }
76
73
  }
77
74
  }
78
- walk(dir, "");
75
+ walk(dir, '');
79
76
  return results;
80
77
  }
package/src/formatters.ts CHANGED
@@ -5,22 +5,22 @@ interface FormatInput {
5
5
  }
6
6
 
7
7
  const FORMATTERS: Record<string, (input: FormatInput) => string> = {
8
- insight: ({ body }) => "\n" + body + "\n",
8
+ insight: ({ body }) => '\n' + body + '\n',
9
9
 
10
10
  decision: ({ title, body }) => {
11
11
  const t = title || body.slice(0, 80);
12
- return "\n## Decision\n\n" + t + "\n\n## Rationale\n\n" + body + "\n";
12
+ return '\n## Decision\n\n' + t + '\n\n## Rationale\n\n' + body + '\n';
13
13
  },
14
14
 
15
15
  pattern: ({ title, body, meta }) => {
16
16
  const t = title || body.slice(0, 80);
17
- const lang = (meta?.language as string) || "";
18
- return "\n# " + t + "\n\n```" + lang + "\n" + body + "\n```\n";
17
+ const lang = (meta?.language as string) || '';
18
+ return '\n# ' + t + '\n\n```' + lang + '\n' + body + '\n```\n';
19
19
  },
20
20
  };
21
21
 
22
22
  const DEFAULT_FORMATTER = ({ title, body }: FormatInput): string =>
23
- title ? "\n# " + title + "\n\n" + body + "\n" : "\n" + body + "\n";
23
+ title ? '\n# ' + title + '\n\n' + body + '\n' : '\n' + body + '\n';
24
24
 
25
25
  export function formatBody(kind: string, input: FormatInput): string {
26
26
  const fn = FORMATTERS[kind] || DEFAULT_FORMATTER;