@neurcode-ai/cli 0.9.7 → 0.9.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,21 +1,58 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.normalizeIntent = normalizeIntent;
4
- exports.getPlanCachePath = getPlanCachePath;
5
- exports.computePlanCacheKey = computePlanCacheKey;
4
+ exports.getRepoIdentity = getRepoIdentity;
6
5
  exports.getGitRepoFingerprint = getGitRepoFingerprint;
7
6
  exports.getFilesystemFingerprintFromTree = getFilesystemFingerprintFromTree;
7
+ exports.computePromptHash = computePromptHash;
8
+ exports.computePolicyVersionHash = computePolicyVersionHash;
9
+ exports.getNeurcodeVersion = getNeurcodeVersion;
10
+ exports.computePlanCacheKey = computePlanCacheKey;
11
+ exports.getBrainDbPath = getBrainDbPath;
12
+ exports.getBrainPointerPath = getBrainPointerPath;
13
+ exports.getBrainFallbackCachePath = getBrainFallbackCachePath;
14
+ exports.getPlanCachePath = getPlanCachePath;
15
+ exports.getBrainStorageMode = getBrainStorageMode;
16
+ exports.setNoCodeStorageMode = setNoCodeStorageMode;
17
+ exports.isNoCodeStorageMode = isNoCodeStorageMode;
8
18
  exports.readCachedPlan = readCachedPlan;
19
+ exports.peekCachedPlan = peekCachedPlan;
9
20
  exports.writeCachedPlan = writeCachedPlan;
21
+ exports.listCachedPlans = listCachedPlans;
22
+ exports.deleteCachedPlans = deleteCachedPlans;
10
23
  exports.findSimilarCachedPlans = findSimilarCachedPlans;
24
+ exports.getBrainDbSizeBytes = getBrainDbSizeBytes;
25
+ exports.getBrainStoreBackend = getBrainStoreBackend;
26
+ exports.closeBrainStore = closeBrainStore;
11
27
  const child_process_1 = require("child_process");
12
28
  const crypto_1 = require("crypto");
13
29
  const fs_1 = require("fs");
14
30
  const path_1 = require("path");
15
31
  const secret_masking_1 = require("./secret-masking");
16
- const CACHE_SCHEMA_VERSION = 1;
17
- const CACHE_FILE_NAME = 'plan-cache.json';
18
- const MAX_ENTRIES = 50;
32
+ let sqliteCtor = null;
33
+ function getSqliteCtor() {
34
+ if (sqliteCtor)
35
+ return sqliteCtor;
36
+ try {
37
+ sqliteCtor = require('better-sqlite3');
38
+ return sqliteCtor;
39
+ }
40
+ catch {
41
+ return null;
42
+ }
43
+ }
44
+ const CACHE_SCHEMA_VERSION = 2;
45
+ const BRAIN_DB_FILE_NAME = 'brain.db';
46
+ const BRAIN_POINTER_FILE_NAME = 'brain.json';
47
+ const LEGACY_CACHE_FILE_NAME = 'plan-cache.json';
48
+ const FALLBACK_CACHE_FILE_NAME = 'plan-cache.json';
49
+ const MAX_ENTRIES = 500;
50
+ const NON_SEMANTIC_GIT_PATH_MARKERS = [
51
+ '.neurcode/',
52
+ '.gitignore',
53
+ ];
54
+ const dbConnections = new Map();
55
+ let cachedCliVersion = null;
19
56
  function sha256Hex(input) {
20
57
  return (0, crypto_1.createHash)('sha256').update(input).digest('hex');
21
58
  }
@@ -25,129 +62,669 @@ function normalizeIntent(intent) {
25
62
  .replace(/\s+/g, ' ')
26
63
  .toLowerCase();
27
64
  }
28
- function getPlanCachePath(cwd) {
29
- return (0, path_1.join)(cwd, '.neurcode', CACHE_FILE_NAME);
65
+ function sanitizeGitStatusPorcelain(status) {
66
+ const lines = status.split(/\r?\n/).filter((line) => line.trim().length > 0);
67
+ const filtered = lines.filter((line) => {
68
+ for (const marker of NON_SEMANTIC_GIT_PATH_MARKERS) {
69
+ if (line.includes(marker))
70
+ return false;
71
+ }
72
+ return true;
73
+ });
74
+ return filtered.join('\n');
30
75
  }
31
- function ensureNeurcodeDir(cwd) {
32
- const dir = (0, path_1.join)(cwd, '.neurcode');
33
- if (!(0, fs_1.existsSync)(dir)) {
34
- (0, fs_1.mkdirSync)(dir, { recursive: true });
76
+ function normalizeRepoIdentity(raw) {
77
+ const trimmed = raw.trim();
78
+ if (!trimmed)
79
+ return '';
80
+ // Convert SCP-like git syntax to URI form: git@host:owner/repo -> ssh://host/owner/repo
81
+ let normalized = trimmed;
82
+ const scpLike = /^[^@]+@[^:]+:.+$/;
83
+ if (scpLike.test(trimmed)) {
84
+ const at = trimmed.indexOf('@');
85
+ const colon = trimmed.indexOf(':', at);
86
+ const host = trimmed.slice(at + 1, colon);
87
+ const repoPath = trimmed.slice(colon + 1);
88
+ normalized = `ssh://${host}/${repoPath}`;
35
89
  }
36
- }
37
- function safeReadCacheFile(cachePath) {
38
- if (!(0, fs_1.existsSync)(cachePath)) {
39
- return { schemaVersion: CACHE_SCHEMA_VERSION, entries: {} };
90
+ try {
91
+ const parsed = new URL(normalized);
92
+ parsed.username = '';
93
+ parsed.password = '';
94
+ const noAuth = `${parsed.protocol}//${parsed.host}${parsed.pathname}`.replace(/\/+$/, '');
95
+ return noAuth.replace(/\.git$/i, '').toLowerCase();
96
+ }
97
+ catch {
98
+ return normalized.replace(/\/+$/, '').replace(/\.git$/i, '').toLowerCase();
40
99
  }
100
+ }
101
+ function fallbackRepoIdentity(cwd) {
102
+ return `local:${sha256Hex((0, path_1.resolve)(cwd))}`;
103
+ }
104
+ function getRepoIdentity(cwd) {
41
105
  try {
42
- const raw = (0, fs_1.readFileSync)(cachePath, 'utf-8');
43
- const parsed = JSON.parse(raw);
44
- if (parsed.schemaVersion !== CACHE_SCHEMA_VERSION || !parsed.entries || typeof parsed.entries !== 'object') {
45
- throw new Error('Invalid cache schema');
106
+ const inside = (0, child_process_1.execSync)('git rev-parse --is-inside-work-tree', {
107
+ cwd,
108
+ encoding: 'utf-8',
109
+ stdio: ['ignore', 'pipe', 'ignore'],
110
+ }).trim().toLowerCase();
111
+ if (inside === 'true') {
112
+ const remote = (0, child_process_1.execSync)('git config --get remote.origin.url', {
113
+ cwd,
114
+ encoding: 'utf-8',
115
+ stdio: ['ignore', 'pipe', 'ignore'],
116
+ }).trim();
117
+ const normalizedRemote = normalizeRepoIdentity(remote);
118
+ if (normalizedRemote)
119
+ return normalizedRemote;
46
120
  }
47
- const cache = parsed;
48
- // Best-effort: avoid persisting secrets in cached intents (older caches may include them).
49
- // This does not affect cache keys (they remain stable), it only sanitizes the stored intent text.
50
- try {
51
- for (const entry of Object.values(cache.entries)) {
52
- if (!entry?.input?.intent)
53
- continue;
54
- entry.input.intent = (0, secret_masking_1.maskSecretsInText)(entry.input.intent).masked;
121
+ }
122
+ catch {
123
+ // ignore
124
+ }
125
+ return fallbackRepoIdentity(cwd);
126
+ }
127
+ function getGitRepoFingerprint(cwd) {
128
+ try {
129
+ const inside = (0, child_process_1.execSync)('git rev-parse --is-inside-work-tree', {
130
+ cwd,
131
+ encoding: 'utf-8',
132
+ stdio: ['ignore', 'pipe', 'ignore'],
133
+ }).trim().toLowerCase();
134
+ if (inside !== 'true')
135
+ return null;
136
+ const repoIdentity = getRepoIdentity(cwd);
137
+ const headSha = (0, child_process_1.execSync)('git rev-parse HEAD', {
138
+ cwd,
139
+ encoding: 'utf-8',
140
+ stdio: ['ignore', 'pipe', 'ignore'],
141
+ }).trim();
142
+ const headTreeSha = (0, child_process_1.execSync)('git rev-parse HEAD^{tree}', {
143
+ cwd,
144
+ encoding: 'utf-8',
145
+ stdio: ['ignore', 'pipe', 'ignore'],
146
+ }).trim();
147
+ const status = (0, child_process_1.execSync)('git status --porcelain', {
148
+ cwd,
149
+ encoding: 'utf-8',
150
+ stdio: ['ignore', 'pipe', 'ignore'],
151
+ });
152
+ const workingTreeHash = sha256Hex(sanitizeGitStatusPorcelain(status));
153
+ return { kind: 'git', repoIdentity, headSha, headTreeSha, workingTreeHash };
154
+ }
155
+ catch {
156
+ return null;
157
+ }
158
+ }
159
+ function getFilesystemFingerprintFromTree(fileTree, cwd = process.cwd()) {
160
+ const normalized = [...fileTree].sort().join('\n');
161
+ return {
162
+ kind: 'filesystem',
163
+ repoIdentity: fallbackRepoIdentity(cwd),
164
+ fileTreeHash: sha256Hex(normalized),
165
+ };
166
+ }
167
+ function computePromptHash(input) {
168
+ const normalized = normalizeIntent(input.intent);
169
+ const safeIntent = (0, secret_masking_1.maskSecretsInText)(normalized).masked;
170
+ const payload = [
171
+ `intent=${safeIntent}`,
172
+ `ticketRef=${input.ticketRef || ''}`,
173
+ `contextHash=${input.contextHash || ''}`,
174
+ ].join('\n');
175
+ return sha256Hex(payload);
176
+ }
177
+ function collectPolicyFiles(cwd) {
178
+ const out = [];
179
+ const candidates = [
180
+ (0, path_1.join)(cwd, 'neurcode.policy.json'),
181
+ (0, path_1.join)(cwd, 'neurcode.rules.json'),
182
+ (0, path_1.join)(cwd, '.neurcode', 'policy.json'),
183
+ (0, path_1.join)(cwd, '.neurcode', 'rules.json'),
184
+ ];
185
+ for (const filePath of candidates) {
186
+ if ((0, fs_1.existsSync)(filePath))
187
+ out.push(filePath);
188
+ }
189
+ const policyDir = (0, path_1.join)(cwd, '.neurcode', 'policies');
190
+ if ((0, fs_1.existsSync)(policyDir)) {
191
+ const walk = (dir) => {
192
+ const entries = (0, fs_1.readdirSync)(dir, { withFileTypes: true });
193
+ for (const entry of entries) {
194
+ const full = (0, path_1.join)(dir, entry.name);
195
+ if (entry.isDirectory()) {
196
+ walk(full);
197
+ }
198
+ else if (entry.isFile()) {
199
+ out.push(full);
200
+ }
55
201
  }
202
+ };
203
+ try {
204
+ walk(policyDir);
56
205
  }
57
206
  catch {
58
- // ignore
207
+ // ignore unreadable dirs
59
208
  }
60
- return cache;
61
209
  }
62
- catch {
63
- // Preserve the corrupted file for debugging, but do not block the command.
210
+ return [...new Set(out)].sort();
211
+ }
212
+ function computePolicyVersionHash(cwd) {
213
+ const lines = [];
214
+ const envPolicyVersion = process.env.NEURCODE_POLICY_VERSION;
215
+ if (envPolicyVersion && envPolicyVersion.trim()) {
216
+ lines.push(`env:${envPolicyVersion.trim()}`);
217
+ }
218
+ for (const policyFile of collectPolicyFiles(cwd)) {
64
219
  try {
65
- const corruptPath = cachePath.replace(/\.json$/, `.corrupt-${Date.now()}.json`);
66
- (0, fs_1.renameSync)(cachePath, corruptPath);
220
+ const content = (0, fs_1.readFileSync)(policyFile, 'utf-8');
221
+ lines.push(`${policyFile}:${sha256Hex(content)}`);
67
222
  }
68
223
  catch {
69
- // ignore
224
+ // ignore unreadable files
70
225
  }
71
- return { schemaVersion: CACHE_SCHEMA_VERSION, entries: {} };
72
226
  }
227
+ if (lines.length === 0) {
228
+ lines.push('default-policy');
229
+ }
230
+ return sha256Hex(lines.join('\n'));
73
231
  }
74
- function atomicWrite(cachePath, data) {
75
- const tmpPath = `${cachePath}.tmp`;
76
- (0, fs_1.writeFileSync)(tmpPath, data, 'utf-8');
77
- (0, fs_1.renameSync)(tmpPath, cachePath);
78
- }
79
- function prune(cache) {
80
- const keys = Object.keys(cache.entries);
81
- if (keys.length <= MAX_ENTRIES)
82
- return;
83
- const sorted = keys
84
- .map((k) => cache.entries[k])
85
- .filter(Boolean)
86
- .sort((a, b) => {
87
- const aTime = Date.parse(a.lastUsedAt) || 0;
88
- const bTime = Date.parse(b.lastUsedAt) || 0;
89
- return aTime - bTime;
90
- });
91
- const toRemove = sorted.slice(0, Math.max(0, sorted.length - MAX_ENTRIES));
92
- for (const entry of toRemove) {
93
- delete cache.entries[entry.key];
232
+ function getNeurcodeVersion() {
233
+ if (cachedCliVersion)
234
+ return cachedCliVersion;
235
+ const envVersion = process.env.npm_package_version;
236
+ if (envVersion && envVersion.trim()) {
237
+ cachedCliVersion = envVersion.trim();
238
+ return cachedCliVersion;
239
+ }
240
+ const candidates = [
241
+ (0, path_1.join)(__dirname, '../../package.json'),
242
+ (0, path_1.join)(process.cwd(), 'packages/cli/package.json'),
243
+ (0, path_1.join)(process.cwd(), 'package.json'),
244
+ ];
245
+ for (const path of candidates) {
246
+ try {
247
+ if (!(0, fs_1.existsSync)(path))
248
+ continue;
249
+ const raw = (0, fs_1.readFileSync)(path, 'utf-8');
250
+ const parsed = JSON.parse(raw);
251
+ if (parsed.version && parsed.version.trim()) {
252
+ cachedCliVersion = parsed.version.trim();
253
+ return cachedCliVersion;
254
+ }
255
+ }
256
+ catch {
257
+ // ignore parse/read errors
258
+ }
94
259
  }
260
+ cachedCliVersion = 'unknown';
261
+ return cachedCliVersion;
95
262
  }
96
263
  function computePlanCacheKey(input) {
97
- const safeIntent = (0, secret_masking_1.maskSecretsInText)(input.intent).masked;
98
- // Use an explicit, stable string rather than JSON.stringify of arbitrary objects.
99
264
  const payload = [
100
265
  `v=${input.schemaVersion}`,
101
- `apiUrl=${input.apiUrl}`,
102
266
  `orgId=${input.orgId}`,
103
267
  `projectId=${input.projectId}`,
104
- `intent=${safeIntent}`,
105
- `ticketRef=${input.ticketRef || ''}`,
106
- `contextHash=${input.contextHash || ''}`,
107
- `repo.kind=${input.repo.kind}`,
268
+ `repoIdentity=${input.repo.repoIdentity}`,
269
+ `repoKind=${input.repo.kind}`,
108
270
  input.repo.kind === 'git'
109
- ? `repo.headSha=${input.repo.headSha};repo.headTreeSha=${input.repo.headTreeSha};repo.statusHash=${input.repo.statusHash}`
110
- : `repo.fileTreeHash=${input.repo.fileTreeHash}`,
271
+ ? [
272
+ `headSha=${input.repo.headSha}`,
273
+ `headTreeSha=${input.repo.headTreeSha}`,
274
+ `workingTreeHash=${input.repo.workingTreeHash}`,
275
+ ].join(';')
276
+ : `fileTreeHash=${input.repo.fileTreeHash}`,
277
+ `promptHash=${input.promptHash}`,
278
+ `policyVersionHash=${input.policyVersionHash}`,
279
+ `neurcodeVersion=${input.neurcodeVersion}`,
111
280
  ].join('\n');
112
281
  return sha256Hex(payload);
113
282
  }
114
- function getGitRepoFingerprint(cwd) {
283
+ function getBrainDbPath(cwd) {
284
+ return (0, path_1.join)(cwd, '.neurcode', BRAIN_DB_FILE_NAME);
285
+ }
286
+ function getBrainPointerPath(cwd) {
287
+ return (0, path_1.join)(cwd, '.neurcode', BRAIN_POINTER_FILE_NAME);
288
+ }
289
+ function getFallbackCachePath(cwd) {
290
+ return (0, path_1.join)(cwd, '.neurcode', FALLBACK_CACHE_FILE_NAME);
291
+ }
292
+ function getBrainFallbackCachePath(cwd) {
293
+ return getFallbackCachePath(cwd);
294
+ }
295
+ // Backward-compatible helper name retained for callers.
296
+ function getPlanCachePath(cwd) {
297
+ return getBrainDbPath(cwd);
298
+ }
299
+ function ensureNeurcodeDir(cwd) {
300
+ const dir = (0, path_1.join)(cwd, '.neurcode');
301
+ if (!(0, fs_1.existsSync)(dir)) {
302
+ (0, fs_1.mkdirSync)(dir, { recursive: true });
303
+ }
304
+ }
305
+ function readFallbackCache(cwd) {
115
306
  try {
116
- const inside = (0, child_process_1.execSync)('git rev-parse --is-inside-work-tree', { cwd, encoding: 'utf-8', stdio: ['ignore', 'pipe', 'ignore'] })
117
- .trim()
118
- .toLowerCase();
119
- if (inside !== 'true')
307
+ const path = getFallbackCachePath(cwd);
308
+ if (!(0, fs_1.existsSync)(path)) {
309
+ return { schemaVersion: CACHE_SCHEMA_VERSION, entries: {} };
310
+ }
311
+ const raw = (0, fs_1.readFileSync)(path, 'utf-8');
312
+ const parsed = JSON.parse(raw);
313
+ if (parsed.schemaVersion !== CACHE_SCHEMA_VERSION || !parsed.entries || typeof parsed.entries !== 'object') {
314
+ throw new Error('Invalid fallback cache schema');
315
+ }
316
+ return parsed;
317
+ }
318
+ catch {
319
+ return { schemaVersion: CACHE_SCHEMA_VERSION, entries: {} };
320
+ }
321
+ }
322
+ function writeFallbackCache(cwd, cache) {
323
+ try {
324
+ ensureNeurcodeDir(cwd);
325
+ const path = getFallbackCachePath(cwd);
326
+ const tmp = `${path}.tmp`;
327
+ (0, fs_1.writeFileSync)(tmp, JSON.stringify(cache, null, 2) + '\n', 'utf-8');
328
+ (0, fs_1.renameSync)(tmp, path);
329
+ }
330
+ catch {
331
+ // ignore
332
+ }
333
+ }
334
+ function pruneFallback(cache) {
335
+ const entries = Object.values(cache.entries);
336
+ if (entries.length <= MAX_ENTRIES)
337
+ return;
338
+ entries.sort((a, b) => {
339
+ const aTime = Date.parse(a.lastUsedAt) || 0;
340
+ const bTime = Date.parse(b.lastUsedAt) || 0;
341
+ return aTime - bTime;
342
+ });
343
+ const toDelete = entries.slice(0, Math.max(0, entries.length - MAX_ENTRIES));
344
+ for (const entry of toDelete) {
345
+ delete cache.entries[entry.key];
346
+ }
347
+ }
348
+ function readJsonSafe(path) {
349
+ try {
350
+ if (!(0, fs_1.existsSync)(path))
120
351
  return null;
121
- const headSha = (0, child_process_1.execSync)('git rev-parse HEAD', { cwd, encoding: 'utf-8', stdio: ['ignore', 'pipe', 'ignore'] }).trim();
122
- const headTreeSha = (0, child_process_1.execSync)('git rev-parse HEAD^{tree}', { cwd, encoding: 'utf-8', stdio: ['ignore', 'pipe', 'ignore'] }).trim();
123
- const status = (0, child_process_1.execSync)('git status --porcelain', { cwd, encoding: 'utf-8', stdio: ['ignore', 'pipe', 'ignore'] });
124
- const statusHash = sha256Hex(status);
125
- return { kind: 'git', headSha, headTreeSha, statusHash };
352
+ const raw = (0, fs_1.readFileSync)(path, 'utf-8');
353
+ return JSON.parse(raw);
126
354
  }
127
355
  catch {
128
356
  return null;
129
357
  }
130
358
  }
131
- function getFilesystemFingerprintFromTree(fileTree) {
132
- // Normalize ordering so the hash is stable even if scanning order changes.
133
- const normalized = [...fileTree].sort().join('\n');
134
- return { kind: 'filesystem', fileTreeHash: sha256Hex(normalized) };
359
+ function writePointer(cwd, pointer) {
360
+ try {
361
+ ensureNeurcodeDir(cwd);
362
+ const pointerPath = getBrainPointerPath(cwd);
363
+ (0, fs_1.writeFileSync)(pointerPath, JSON.stringify(pointer, null, 2) + '\n', 'utf-8');
364
+ }
365
+ catch {
366
+ // ignore
367
+ }
368
+ }
369
+ function updatePointer(cwd, patch) {
370
+ const existing = readJsonSafe(getBrainPointerPath(cwd));
371
+ const next = {
372
+ schemaVersion: 1,
373
+ dbPath: '.neurcode/brain.db',
374
+ repoIdentity: patch.repoIdentity ?? existing?.repoIdentity,
375
+ settings: {
376
+ noCodeStorage: patch.settings?.noCodeStorage ?? existing?.settings?.noCodeStorage ?? false,
377
+ },
378
+ updatedAt: new Date().toISOString(),
379
+ };
380
+ writePointer(cwd, next);
381
+ }
382
+ function getBrainStorageMode(cwd) {
383
+ const env = process.env.NEURCODE_BRAIN_NO_CODE_STORAGE;
384
+ if (typeof env === 'string' && env.trim()) {
385
+ const normalized = env.trim().toLowerCase();
386
+ const enabled = normalized === '1' || normalized === 'true' || normalized === 'yes' || normalized === 'on';
387
+ return { noCodeStorage: enabled, source: 'env' };
388
+ }
389
+ const pointer = readJsonSafe(getBrainPointerPath(cwd));
390
+ if (pointer?.settings && typeof pointer.settings.noCodeStorage === 'boolean') {
391
+ return { noCodeStorage: pointer.settings.noCodeStorage, source: 'pointer' };
392
+ }
393
+ return { noCodeStorage: false, source: 'default' };
394
+ }
395
+ function setNoCodeStorageMode(cwd, enabled) {
396
+ updatePointer(cwd, { settings: { noCodeStorage: enabled } });
397
+ }
398
+ function isNoCodeStorageMode(cwd) {
399
+ return getBrainStorageMode(cwd).noCodeStorage;
400
+ }
401
+ function initDbSchema(db) {
402
+ db.exec(`
403
+ CREATE TABLE IF NOT EXISTS brain_meta (
404
+ key TEXT PRIMARY KEY,
405
+ value TEXT NOT NULL
406
+ );
407
+
408
+ CREATE TABLE IF NOT EXISTS plan_cache (
409
+ key TEXT PRIMARY KEY,
410
+ created_at TEXT NOT NULL,
411
+ last_used_at TEXT NOT NULL,
412
+ use_count INTEGER NOT NULL DEFAULT 1,
413
+ org_id TEXT NOT NULL,
414
+ project_id TEXT NOT NULL,
415
+ repo_kind TEXT NOT NULL,
416
+ repo_identity TEXT NOT NULL,
417
+ head_sha TEXT,
418
+ head_tree_sha TEXT,
419
+ working_tree_hash TEXT,
420
+ file_tree_hash TEXT,
421
+ prompt_hash TEXT NOT NULL,
422
+ policy_version_hash TEXT NOT NULL,
423
+ neurcode_version TEXT NOT NULL,
424
+ intent_norm TEXT NOT NULL,
425
+ intent_hash TEXT NOT NULL,
426
+ ticket_ref TEXT,
427
+ context_hash TEXT,
428
+ response_json TEXT NOT NULL,
429
+ no_code_storage INTEGER NOT NULL DEFAULT 0
430
+ );
431
+
432
+ CREATE INDEX IF NOT EXISTS idx_plan_cache_scope ON plan_cache(org_id, project_id);
433
+ CREATE INDEX IF NOT EXISTS idx_plan_cache_lru ON plan_cache(last_used_at);
434
+ CREATE INDEX IF NOT EXISTS idx_plan_cache_repo ON plan_cache(repo_identity);
435
+ `);
436
+ }
437
+ function getMeta(db, key) {
438
+ try {
439
+ const row = db.prepare('SELECT value FROM brain_meta WHERE key = ?').get(key);
440
+ return row?.value || null;
441
+ }
442
+ catch {
443
+ return null;
444
+ }
445
+ }
446
+ function setMeta(db, key, value) {
447
+ try {
448
+ db.prepare(`
449
+ INSERT INTO brain_meta (key, value) VALUES (?, ?)
450
+ ON CONFLICT(key) DO UPDATE SET value = excluded.value
451
+ `).run(key, value);
452
+ }
453
+ catch {
454
+ // ignore
455
+ }
456
+ }
457
+ function sanitizeCachedResponseForStorage(response, noCodeStorage) {
458
+ if (!noCodeStorage) {
459
+ return response;
460
+ }
461
+ const summaryMasked = (0, secret_masking_1.maskSecretsInText)(response.plan.summary || '').masked;
462
+ const summaryHash = sha256Hex(summaryMasked);
463
+ const files = (response.plan.files || []).slice(0, 80).map((file) => ({
464
+ path: file.path,
465
+ action: file.action,
466
+ reason: [
467
+ file.reason ? `reasonHash=${sha256Hex((0, secret_masking_1.maskSecretsInText)(file.reason).masked)}` : null,
468
+ file.suggestion ? `suggestionHash=${sha256Hex((0, secret_masking_1.maskSecretsInText)(file.suggestion).masked)}` : null,
469
+ ].filter(Boolean).join(' | ') || undefined,
470
+ suggestion: undefined,
471
+ }));
472
+ const recommendations = (response.plan.recommendations || []).slice(0, 20).map((r) => {
473
+ const masked = (0, secret_masking_1.maskSecretsInText)(r).masked;
474
+ return `recHash=${sha256Hex(masked)}`;
475
+ });
476
+ return {
477
+ ...response,
478
+ plan: {
479
+ ...response.plan,
480
+ summary: `no-code-storage summaryHash=${summaryHash} files=${response.plan.files?.length || 0} recommendations=${response.plan.recommendations?.length || 0}`,
481
+ files,
482
+ recommendations,
483
+ },
484
+ };
485
+ }
486
+ function migrateLegacyJsonCache(cwd, db) {
487
+ const migrationKey = 'legacy_plan_cache_migrated_v1';
488
+ if (getMeta(db, migrationKey) === '1')
489
+ return;
490
+ const legacyPath = (0, path_1.join)(cwd, '.neurcode', LEGACY_CACHE_FILE_NAME);
491
+ if (!(0, fs_1.existsSync)(legacyPath)) {
492
+ setMeta(db, migrationKey, '1');
493
+ return;
494
+ }
495
+ try {
496
+ const raw = (0, fs_1.readFileSync)(legacyPath, 'utf-8');
497
+ const parsed = JSON.parse(raw);
498
+ const entries = parsed?.entries && typeof parsed.entries === 'object' ? Object.values(parsed.entries) : [];
499
+ const insertStmt = db.prepare(`
500
+ INSERT OR IGNORE INTO plan_cache (
501
+ key,
502
+ created_at,
503
+ last_used_at,
504
+ use_count,
505
+ org_id,
506
+ project_id,
507
+ repo_kind,
508
+ repo_identity,
509
+ head_sha,
510
+ head_tree_sha,
511
+ working_tree_hash,
512
+ file_tree_hash,
513
+ prompt_hash,
514
+ policy_version_hash,
515
+ neurcode_version,
516
+ intent_norm,
517
+ intent_hash,
518
+ ticket_ref,
519
+ context_hash,
520
+ response_json,
521
+ no_code_storage
522
+ ) VALUES (
523
+ @key,
524
+ @created_at,
525
+ @last_used_at,
526
+ @use_count,
527
+ @org_id,
528
+ @project_id,
529
+ @repo_kind,
530
+ @repo_identity,
531
+ @head_sha,
532
+ @head_tree_sha,
533
+ @working_tree_hash,
534
+ @file_tree_hash,
535
+ @prompt_hash,
536
+ @policy_version_hash,
537
+ @neurcode_version,
538
+ @intent_norm,
539
+ @intent_hash,
540
+ @ticket_ref,
541
+ @context_hash,
542
+ @response_json,
543
+ @no_code_storage
544
+ )
545
+ `);
546
+ for (const item of entries) {
547
+ const legacy = item;
548
+ const input = legacy?.input || {};
549
+ const response = legacy?.response || null;
550
+ if (!legacy?.key || !input?.orgId || !input?.projectId || !response)
551
+ continue;
552
+ const intentNorm = normalizeIntent(String(input.intent || ''));
553
+ const intentHash = sha256Hex(intentNorm);
554
+ const repoIdentity = getRepoIdentity(cwd);
555
+ const repoKind = input.repo?.kind === 'git' ? 'git' : 'filesystem';
556
+ const promptHash = computePromptHash({
557
+ intent: intentNorm,
558
+ ticketRef: input.ticketRef,
559
+ contextHash: input.contextHash,
560
+ });
561
+ insertStmt.run({
562
+ key: String(legacy.key),
563
+ created_at: String(legacy.createdAt || new Date().toISOString()),
564
+ last_used_at: String(legacy.lastUsedAt || legacy.createdAt || new Date().toISOString()),
565
+ use_count: Number(legacy.useCount || 1),
566
+ org_id: String(input.orgId),
567
+ project_id: String(input.projectId),
568
+ repo_kind: repoKind,
569
+ repo_identity: repoIdentity,
570
+ head_sha: repoKind === 'git' ? String(input.repo?.headSha || '') || null : null,
571
+ head_tree_sha: repoKind === 'git' ? String(input.repo?.headTreeSha || '') || null : null,
572
+ working_tree_hash: repoKind === 'git' ? String(input.repo?.statusHash || '') || null : null,
573
+ file_tree_hash: repoKind === 'filesystem' ? String(input.repo?.fileTreeHash || '') || null : null,
574
+ prompt_hash: promptHash,
575
+ policy_version_hash: 'legacy',
576
+ neurcode_version: 'legacy',
577
+ intent_norm: intentNorm,
578
+ intent_hash: intentHash,
579
+ ticket_ref: input.ticketRef ? String(input.ticketRef) : null,
580
+ context_hash: input.contextHash ? String(input.contextHash) : null,
581
+ response_json: JSON.stringify(response),
582
+ no_code_storage: 0,
583
+ });
584
+ }
585
+ try {
586
+ (0, fs_1.renameSync)(legacyPath, legacyPath.replace(/\.json$/, `.migrated-${Date.now()}.json`));
587
+ }
588
+ catch {
589
+ // ignore rename failures
590
+ }
591
+ }
592
+ catch {
593
+ // ignore invalid legacy cache
594
+ }
595
+ setMeta(db, migrationKey, '1');
596
+ }
597
+ function getDb(cwd) {
598
+ const dbPath = getBrainDbPath(cwd);
599
+ const existing = dbConnections.get(dbPath);
600
+ if (existing)
601
+ return existing;
602
+ const Ctor = getSqliteCtor();
603
+ if (!Ctor)
604
+ return null;
605
+ try {
606
+ ensureNeurcodeDir(cwd);
607
+ const db = new Ctor(dbPath);
608
+ db.pragma('journal_mode = WAL');
609
+ db.pragma('synchronous = NORMAL');
610
+ initDbSchema(db);
611
+ migrateLegacyJsonCache(cwd, db);
612
+ dbConnections.set(dbPath, db);
613
+ return db;
614
+ }
615
+ catch {
616
+ return null;
617
+ }
618
+ }
619
+ function prune(db) {
620
+ try {
621
+ const row = db.prepare('SELECT COUNT(*) as count FROM plan_cache').get();
622
+ const total = Number(row?.count || 0);
623
+ if (total <= MAX_ENTRIES)
624
+ return;
625
+ const toDelete = total - MAX_ENTRIES;
626
+ db.prepare(`
627
+ DELETE FROM plan_cache
628
+ WHERE key IN (
629
+ SELECT key
630
+ FROM plan_cache
631
+ ORDER BY last_used_at ASC
632
+ LIMIT ?
633
+ )
634
+ `).run(toDelete);
635
+ }
636
+ catch {
637
+ // ignore
638
+ }
639
+ }
640
+ function toEntry(row) {
641
+ if (!row)
642
+ return null;
643
+ try {
644
+ const response = JSON.parse(row.response_json);
645
+ const repo = row.repo_kind === 'git'
646
+ ? {
647
+ kind: 'git',
648
+ repoIdentity: row.repo_identity,
649
+ headSha: row.head_sha || '',
650
+ headTreeSha: row.head_tree_sha || '',
651
+ workingTreeHash: row.working_tree_hash || '',
652
+ }
653
+ : {
654
+ kind: 'filesystem',
655
+ repoIdentity: row.repo_identity,
656
+ fileTreeHash: row.file_tree_hash || '',
657
+ };
658
+ return {
659
+ key: row.key,
660
+ createdAt: row.created_at,
661
+ lastUsedAt: row.last_used_at,
662
+ useCount: Number(row.use_count || 0),
663
+ input: {
664
+ schemaVersion: CACHE_SCHEMA_VERSION,
665
+ orgId: row.org_id,
666
+ projectId: row.project_id,
667
+ repo,
668
+ promptHash: row.prompt_hash,
669
+ policyVersionHash: row.policy_version_hash,
670
+ neurcodeVersion: row.neurcode_version,
671
+ intent: row.intent_norm || '',
672
+ intentHash: row.intent_hash,
673
+ ticketRef: row.ticket_ref || undefined,
674
+ contextHash: row.context_hash || undefined,
675
+ },
676
+ response,
677
+ };
678
+ }
679
+ catch {
680
+ return null;
681
+ }
135
682
  }
136
683
  function readCachedPlan(cwd, key) {
137
684
  try {
138
- const cachePath = getPlanCachePath(cwd);
139
- const cache = safeReadCacheFile(cachePath);
140
- const entry = cache.entries[key];
141
- if (!entry)
685
+ const db = getDb(cwd);
686
+ if (!db) {
687
+ const cache = readFallbackCache(cwd);
688
+ const existing = cache.entries[key];
689
+ if (!existing)
690
+ return null;
691
+ const now = new Date().toISOString();
692
+ const next = {
693
+ ...existing,
694
+ lastUsedAt: now,
695
+ useCount: Number(existing.useCount || 0) + 1,
696
+ };
697
+ cache.entries[key] = next;
698
+ pruneFallback(cache);
699
+ writeFallbackCache(cwd, cache);
700
+ return next;
701
+ }
702
+ const row = db.prepare('SELECT * FROM plan_cache WHERE key = ?').get(key);
703
+ if (!row)
142
704
  return null;
143
- // Update LRU metadata.
144
705
  const now = new Date().toISOString();
145
- entry.lastUsedAt = now;
146
- entry.useCount = (entry.useCount || 0) + 1;
147
- cache.entries[key] = entry;
148
- prune(cache);
149
- atomicWrite(cachePath, JSON.stringify(cache, null, 2) + '\n');
150
- return entry;
706
+ db.prepare(`
707
+ UPDATE plan_cache
708
+ SET last_used_at = ?, use_count = use_count + 1
709
+ WHERE key = ?
710
+ `).run(now, key);
711
+ row.last_used_at = now;
712
+ row.use_count = Number(row.use_count || 0) + 1;
713
+ return toEntry(row);
714
+ }
715
+ catch {
716
+ return null;
717
+ }
718
+ }
719
+ function peekCachedPlan(cwd, key) {
720
+ try {
721
+ const db = getDb(cwd);
722
+ if (!db) {
723
+ const cache = readFallbackCache(cwd);
724
+ return cache.entries[key] || null;
725
+ }
726
+ const row = db.prepare('SELECT * FROM plan_cache WHERE key = ?').get(key);
727
+ return toEntry(row);
151
728
  }
152
729
  catch {
153
730
  return null;
@@ -155,32 +732,193 @@ function readCachedPlan(cwd, key) {
155
732
  }
156
733
  function writeCachedPlan(cwd, entry) {
157
734
  try {
158
- ensureNeurcodeDir(cwd);
159
- const cachePath = getPlanCachePath(cwd);
160
- const cache = safeReadCacheFile(cachePath);
161
735
  const now = new Date().toISOString();
162
- const existing = cache.entries[entry.key];
163
- const next = {
164
- ...entry,
165
- createdAt: existing?.createdAt || now,
166
- lastUsedAt: now,
167
- useCount: (existing?.useCount || 0) + 1,
168
- };
169
- // Best-effort: do not persist secrets in the on-disk cache.
170
- try {
171
- if (next.input?.intent) {
172
- next.input.intent = (0, secret_masking_1.maskSecretsInText)(next.input.intent).masked;
736
+ const noCodeStorage = isNoCodeStorageMode(cwd);
737
+ const normalizedIntent = normalizeIntent(entry.input.intent || '');
738
+ const safeIntent = (0, secret_masking_1.maskSecretsInText)(normalizedIntent).masked;
739
+ const intentForStorage = noCodeStorage ? '' : safeIntent;
740
+ const intentHash = sha256Hex(safeIntent);
741
+ const responseToStore = sanitizeCachedResponseForStorage(entry.response, noCodeStorage);
742
+ const repo = entry.input.repo;
743
+ const repoIdentity = repo.repoIdentity || fallbackRepoIdentity(cwd);
744
+ const db = getDb(cwd);
745
+ if (!db) {
746
+ const cache = readFallbackCache(cwd);
747
+ const existing = cache.entries[entry.key];
748
+ const next = {
749
+ key: entry.key,
750
+ createdAt: existing?.createdAt || now,
751
+ lastUsedAt: now,
752
+ useCount: Number(existing?.useCount || 0) + 1,
753
+ input: {
754
+ ...entry.input,
755
+ schemaVersion: CACHE_SCHEMA_VERSION,
756
+ repo: {
757
+ ...repo,
758
+ repoIdentity,
759
+ },
760
+ intent: intentForStorage,
761
+ intentHash,
762
+ ticketRef: entry.input.ticketRef,
763
+ contextHash: entry.input.contextHash,
764
+ },
765
+ response: responseToStore,
766
+ };
767
+ cache.entries[entry.key] = next;
768
+ pruneFallback(cache);
769
+ writeFallbackCache(cwd, cache);
770
+ updatePointer(cwd, {
771
+ repoIdentity,
772
+ settings: { noCodeStorage },
773
+ });
774
+ return;
775
+ }
776
+ db.prepare(`
777
+ INSERT INTO plan_cache (
778
+ key,
779
+ created_at,
780
+ last_used_at,
781
+ use_count,
782
+ org_id,
783
+ project_id,
784
+ repo_kind,
785
+ repo_identity,
786
+ head_sha,
787
+ head_tree_sha,
788
+ working_tree_hash,
789
+ file_tree_hash,
790
+ prompt_hash,
791
+ policy_version_hash,
792
+ neurcode_version,
793
+ intent_norm,
794
+ intent_hash,
795
+ ticket_ref,
796
+ context_hash,
797
+ response_json,
798
+ no_code_storage
799
+ ) VALUES (
800
+ @key,
801
+ @created_at,
802
+ @last_used_at,
803
+ 1,
804
+ @org_id,
805
+ @project_id,
806
+ @repo_kind,
807
+ @repo_identity,
808
+ @head_sha,
809
+ @head_tree_sha,
810
+ @working_tree_hash,
811
+ @file_tree_hash,
812
+ @prompt_hash,
813
+ @policy_version_hash,
814
+ @neurcode_version,
815
+ @intent_norm,
816
+ @intent_hash,
817
+ @ticket_ref,
818
+ @context_hash,
819
+ @response_json,
820
+ @no_code_storage
821
+ )
822
+ ON CONFLICT(key) DO UPDATE SET
823
+ last_used_at = excluded.last_used_at,
824
+ use_count = plan_cache.use_count + 1,
825
+ org_id = excluded.org_id,
826
+ project_id = excluded.project_id,
827
+ repo_kind = excluded.repo_kind,
828
+ repo_identity = excluded.repo_identity,
829
+ head_sha = excluded.head_sha,
830
+ head_tree_sha = excluded.head_tree_sha,
831
+ working_tree_hash = excluded.working_tree_hash,
832
+ file_tree_hash = excluded.file_tree_hash,
833
+ prompt_hash = excluded.prompt_hash,
834
+ policy_version_hash = excluded.policy_version_hash,
835
+ neurcode_version = excluded.neurcode_version,
836
+ intent_norm = excluded.intent_norm,
837
+ intent_hash = excluded.intent_hash,
838
+ ticket_ref = excluded.ticket_ref,
839
+ context_hash = excluded.context_hash,
840
+ response_json = excluded.response_json,
841
+ no_code_storage = excluded.no_code_storage
842
+ `).run({
843
+ key: entry.key,
844
+ created_at: now,
845
+ last_used_at: now,
846
+ org_id: entry.input.orgId,
847
+ project_id: entry.input.projectId,
848
+ repo_kind: repo.kind,
849
+ repo_identity: repoIdentity,
850
+ head_sha: repo.kind === 'git' ? repo.headSha : null,
851
+ head_tree_sha: repo.kind === 'git' ? repo.headTreeSha : null,
852
+ working_tree_hash: repo.kind === 'git' ? repo.workingTreeHash : null,
853
+ file_tree_hash: repo.kind === 'filesystem' ? repo.fileTreeHash : null,
854
+ prompt_hash: entry.input.promptHash,
855
+ policy_version_hash: entry.input.policyVersionHash,
856
+ neurcode_version: entry.input.neurcodeVersion,
857
+ intent_norm: intentForStorage,
858
+ intent_hash: intentHash,
859
+ ticket_ref: entry.input.ticketRef || null,
860
+ context_hash: entry.input.contextHash || null,
861
+ response_json: JSON.stringify(responseToStore),
862
+ no_code_storage: noCodeStorage ? 1 : 0,
863
+ });
864
+ prune(db);
865
+ updatePointer(cwd, {
866
+ repoIdentity,
867
+ settings: { noCodeStorage },
868
+ });
869
+ }
870
+ catch {
871
+ // Cache failures should never block plan generation.
872
+ }
873
+ }
874
+ function listCachedPlans(cwd) {
875
+ try {
876
+ const db = getDb(cwd);
877
+ if (!db) {
878
+ const cache = readFallbackCache(cwd);
879
+ return Object.values(cache.entries).sort((a, b) => {
880
+ const aTime = Date.parse(a.lastUsedAt) || 0;
881
+ const bTime = Date.parse(b.lastUsedAt) || 0;
882
+ return bTime - aTime;
883
+ });
884
+ }
885
+ const rows = db.prepare('SELECT * FROM plan_cache ORDER BY last_used_at DESC').all();
886
+ return rows.map((row) => toEntry(row)).filter(Boolean);
887
+ }
888
+ catch {
889
+ return [];
890
+ }
891
+ }
892
+ function deleteCachedPlans(cwd, shouldDelete) {
893
+ try {
894
+ const db = getDb(cwd);
895
+ if (!db) {
896
+ const cache = readFallbackCache(cwd);
897
+ let deleted = 0;
898
+ for (const [key, entry] of Object.entries(cache.entries)) {
899
+ if (!entry)
900
+ continue;
901
+ if (shouldDelete(entry)) {
902
+ delete cache.entries[key];
903
+ deleted++;
904
+ }
173
905
  }
906
+ pruneFallback(cache);
907
+ writeFallbackCache(cwd, cache);
908
+ return { deleted, remaining: Object.keys(cache.entries).length };
174
909
  }
175
- catch {
176
- // ignore
910
+ const rows = db.prepare('SELECT * FROM plan_cache').all();
911
+ const entries = rows.map((row) => toEntry(row)).filter(Boolean);
912
+ const keysToDelete = entries.filter((entry) => shouldDelete(entry)).map((entry) => entry.key);
913
+ const stmt = db.prepare('DELETE FROM plan_cache WHERE key = ?');
914
+ for (const key of keysToDelete) {
915
+ stmt.run(key);
177
916
  }
178
- cache.entries[entry.key] = next;
179
- prune(cache);
180
- atomicWrite(cachePath, JSON.stringify(cache, null, 2) + '\n');
917
+ const remainingRow = db.prepare('SELECT COUNT(*) as count FROM plan_cache').get();
918
+ return { deleted: keysToDelete.length, remaining: Number(remainingRow?.count || 0) };
181
919
  }
182
920
  catch {
183
- // Cache failures should never block plan generation.
921
+ return { deleted: 0, remaining: 0 };
184
922
  }
185
923
  }
186
924
  function tokenize(text) {
@@ -194,8 +932,8 @@ function jaccard(a, b) {
194
932
  if (a.size === 0 || b.size === 0)
195
933
  return 0;
196
934
  let inter = 0;
197
- for (const x of a) {
198
- if (b.has(x))
935
+ for (const token of a) {
936
+ if (b.has(token))
199
937
  inter++;
200
938
  }
201
939
  const union = a.size + b.size - inter;
@@ -203,25 +941,112 @@ function jaccard(a, b) {
203
941
  }
204
942
  function findSimilarCachedPlans(cwd, filter, intent, k = 3) {
205
943
  try {
206
- const cachePath = getPlanCachePath(cwd);
207
- const cache = safeReadCacheFile(cachePath);
208
- const entries = Object.values(cache.entries);
209
- const queryTokens = new Set(tokenize(intent));
210
- const scored = entries
211
- .filter((e) => e.input.orgId === filter.orgId && e.input.projectId === filter.projectId)
212
- .map((e) => {
213
- const eTokens = new Set(tokenize(e.input.intent));
214
- const score = jaccard(queryTokens, eTokens);
215
- return { entry: e, score };
944
+ const db = getDb(cwd);
945
+ const normalizedIntent = normalizeIntent(intent);
946
+ const queryTokens = new Set(tokenize(normalizedIntent));
947
+ if (queryTokens.size === 0)
948
+ return [];
949
+ if (!db) {
950
+ const cache = readFallbackCache(cwd);
951
+ const entries = Object.values(cache.entries)
952
+ .filter((entry) => entry.input.orgId === filter.orgId && entry.input.projectId === filter.projectId)
953
+ .filter((entry) => !filter.repoIdentity || entry.input.repo.repoIdentity === filter.repoIdentity)
954
+ .filter((entry) => Boolean(entry.input.intent));
955
+ const scored = entries
956
+ .map((entry) => {
957
+ const tokens = new Set(tokenize(entry.input.intent || ''));
958
+ const score = jaccard(queryTokens, tokens);
959
+ if (score <= 0)
960
+ return null;
961
+ return { entry, score };
962
+ })
963
+ .filter(Boolean);
964
+ return scored
965
+ .sort((a, b) => b.score - a.score)
966
+ .slice(0, k)
967
+ .map((x) => x.entry);
968
+ }
969
+ const rows = filter.repoIdentity
970
+ ? db.prepare(`
971
+ SELECT *
972
+ FROM plan_cache
973
+ WHERE org_id = ? AND project_id = ? AND repo_identity = ? AND intent_norm <> ''
974
+ ORDER BY last_used_at DESC
975
+ LIMIT 120
976
+ `).all(filter.orgId, filter.projectId, filter.repoIdentity)
977
+ : db.prepare(`
978
+ SELECT *
979
+ FROM plan_cache
980
+ WHERE org_id = ? AND project_id = ? AND intent_norm <> ''
981
+ ORDER BY last_used_at DESC
982
+ LIMIT 120
983
+ `).all(filter.orgId, filter.projectId);
984
+ const scored = rows
985
+ .map((row) => {
986
+ const entry = toEntry(row);
987
+ if (!entry)
988
+ return null;
989
+ if (!entry.input.intent)
990
+ return null;
991
+ const tokens = new Set(tokenize(entry.input.intent));
992
+ const score = jaccard(queryTokens, tokens);
993
+ if (score <= 0)
994
+ return null;
995
+ return { entry, score };
216
996
  })
217
- .filter((s) => s.score > 0)
997
+ .filter(Boolean);
998
+ return scored
218
999
  .sort((a, b) => b.score - a.score)
219
1000
  .slice(0, k)
220
- .map((s) => s.entry);
221
- return scored;
1001
+ .map((x) => x.entry);
222
1002
  }
223
1003
  catch {
224
1004
  return [];
225
1005
  }
226
1006
  }
1007
+ function getBrainDbSizeBytes(cwd) {
1008
+ try {
1009
+ const dbPath = getBrainDbPath(cwd);
1010
+ if (!(0, fs_1.existsSync)(dbPath))
1011
+ return null;
1012
+ return (0, fs_1.statSync)(dbPath).size;
1013
+ }
1014
+ catch {
1015
+ return null;
1016
+ }
1017
+ }
1018
+ function getBrainStoreBackend(cwd) {
1019
+ const db = getDb(cwd);
1020
+ return db ? 'sqlite' : 'json-fallback';
1021
+ }
1022
+ function closeBrainStore(cwd) {
1023
+ try {
1024
+ if (cwd) {
1025
+ const dbPath = getBrainDbPath(cwd);
1026
+ const db = dbConnections.get(dbPath);
1027
+ if (db) {
1028
+ try {
1029
+ db.close();
1030
+ }
1031
+ catch {
1032
+ // ignore
1033
+ }
1034
+ dbConnections.delete(dbPath);
1035
+ }
1036
+ return;
1037
+ }
1038
+ for (const [dbPath, db] of dbConnections.entries()) {
1039
+ try {
1040
+ db.close();
1041
+ }
1042
+ catch {
1043
+ // ignore
1044
+ }
1045
+ dbConnections.delete(dbPath);
1046
+ }
1047
+ }
1048
+ catch {
1049
+ // ignore
1050
+ }
1051
+ }
227
1052
  //# sourceMappingURL=plan-cache.js.map