@ophan/cli 0.0.1 → 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/watch.js CHANGED
@@ -33,27 +33,90 @@ var __importStar = (this && this.__importStar) || (function () {
33
33
  };
34
34
  })();
35
35
  Object.defineProperty(exports, "__esModule", { value: true });
36
+ exports.formatError = formatError;
37
+ exports.isGitIgnored = isGitIgnored;
38
+ exports.withTimeout = withTimeout;
36
39
  exports.startWatch = startWatch;
37
40
  const fs = __importStar(require("fs"));
38
41
  const path = __importStar(require("path"));
42
+ const child_process_1 = require("child_process");
39
43
  const core_1 = require("@ophan/core");
40
44
  function emit(event) {
41
45
  process.stdout.write(JSON.stringify(event) + "\n");
42
46
  }
47
+ function formatError(err) {
48
+ if (err instanceof Error)
49
+ return err.message || err.constructor.name;
50
+ if (typeof err === "string")
51
+ return err || "Empty error string";
52
+ // Handle non-Error objects (e.g. Supabase/Anthropic SDK errors with circular refs)
53
+ if (err && typeof err === "object") {
54
+ const obj = err;
55
+ if (typeof obj.message === "string" && obj.message)
56
+ return obj.message;
57
+ if (typeof obj.error === "string" && obj.error)
58
+ return obj.error;
59
+ try {
60
+ const json = JSON.stringify(err);
61
+ if (json && json !== "{}")
62
+ return json;
63
+ }
64
+ catch { /* circular — fall through */ }
65
+ // Last resort for objects: try toString, check it's not the useless default
66
+ const str = String(err);
67
+ if (str !== "[object Object]")
68
+ return str;
69
+ return "Unknown error (non-serializable object)";
70
+ }
71
+ if (err === undefined)
72
+ return "Unknown error (undefined)";
73
+ if (err === null)
74
+ return "Unknown error (null)";
75
+ return String(err);
76
+ }
77
+ /**
78
+ * Check if a file is ignored by git (respects all .gitignore files, global gitignore,
79
+ * and .git/info/exclude). Returns false if not in a git repo or git is unavailable.
80
+ */
81
+ function isGitIgnored(filePath, cwd) {
82
+ try {
83
+ (0, child_process_1.execSync)(`git check-ignore -q "${filePath}"`, {
84
+ cwd,
85
+ stdio: "pipe",
86
+ });
87
+ return true; // exit code 0 = ignored
88
+ }
89
+ catch {
90
+ return false; // exit code 1 = not ignored, or git not available
91
+ }
92
+ }
93
+ /** Wraps a promise with a timeout. Rejects with a timeout error if not resolved within `ms`. */
94
+ function withTimeout(promise, ms, label) {
95
+ return new Promise((resolve, reject) => {
96
+ const timer = setTimeout(() => reject(new Error(`Analysis timeout after ${ms / 1000}s for ${label}`)), ms);
97
+ promise.then((v) => { clearTimeout(timer); resolve(v); }, (e) => { clearTimeout(timer); reject(e); });
98
+ });
99
+ }
43
100
  async function startWatch(options) {
44
101
  const { rootPath, pullFn, syncFn, json } = options;
45
102
  const dbPath = path.join(rootPath, ".ophan", "index.db");
46
103
  const lockPath = path.join(rootPath, ".ophan", "watch.lock");
47
- // Check for existing watcher
104
+ // Check for existing watcher — kill it so only one instance runs at a time
48
105
  if (fs.existsSync(lockPath)) {
49
106
  try {
50
107
  const pid = parseInt(fs.readFileSync(lockPath, "utf-8").trim());
51
108
  process.kill(pid, 0); // Check if process is alive (signal 0 = no-op)
109
+ // Kill the existing watcher and take over
52
110
  if (json)
53
- emit({ event: "error", message: `Another watcher is running (PID ${pid})` });
111
+ emit({ event: "replacing_watcher", oldPid: pid });
54
112
  else
55
- console.error(`Another watcher is already running (PID ${pid}). Kill it or remove .ophan/watch.lock`);
56
- process.exit(1);
113
+ console.log(` Stopping previous watcher (PID ${pid})...`);
114
+ try {
115
+ process.kill(pid, "SIGTERM");
116
+ }
117
+ catch { /* already dying */ }
118
+ // Brief wait for process to exit and release resources
119
+ await new Promise((resolve) => setTimeout(resolve, 500));
57
120
  }
58
121
  catch {
59
122
  // Process is dead — stale lock file, continue
@@ -74,33 +137,78 @@ async function startWatch(options) {
74
137
  console.log("🔮 Ophan watching...\n");
75
138
  if (!json)
76
139
  console.log(" Running initial scan...");
77
- const scanResult = await (0, core_1.analyzeRepository)(rootPath, (current, total, file) => {
78
- if (!json && process.stdout.isTTY) {
79
- process.stdout.clearLine(0);
80
- process.stdout.cursorTo(0);
81
- process.stdout.write(` [${current}/${total}] ${file}`);
140
+ const INITIAL_SCAN_RETRIES = 2;
141
+ const PROGRESS_THROTTLE_MS = 500;
142
+ let scanSucceeded = false;
143
+ for (let attempt = 0; attempt <= INITIAL_SCAN_RETRIES; attempt++) {
144
+ try {
145
+ let lastProgressEmit = 0;
146
+ const scanResult = await (0, core_1.analyzeRepository)(rootPath, (current, total, file) => {
147
+ if (json) {
148
+ const now = Date.now();
149
+ if (now - lastProgressEmit > PROGRESS_THROTTLE_MS || current === total) {
150
+ emit({ event: "scan_progress", current, total, file });
151
+ lastProgressEmit = now;
152
+ }
153
+ }
154
+ else if (process.stdout.isTTY) {
155
+ process.stdout.clearLine(0);
156
+ process.stdout.cursorTo(0);
157
+ process.stdout.write(` [${current}/${total}] ${file}`);
158
+ }
159
+ }, pullFn);
160
+ if (json) {
161
+ emit({
162
+ event: "scan_complete",
163
+ files: scanResult.files,
164
+ analyzed: scanResult.analyzed,
165
+ cached: scanResult.skipped,
166
+ pulled: scanResult.pulled,
167
+ });
168
+ }
169
+ else {
170
+ console.log(`\n Initial scan: ${scanResult.analyzed} analyzed, ${scanResult.skipped} cached` +
171
+ (scanResult.pulled ? ` (${scanResult.pulled} from cloud)` : "") +
172
+ ` across ${scanResult.files} files`);
173
+ }
174
+ // Sync after initial scan if anything was analyzed
175
+ if (syncFn && scanResult.analyzed > 0) {
176
+ await runSync(syncFn, json);
177
+ }
178
+ scanSucceeded = true;
179
+ break;
180
+ }
181
+ catch (err) {
182
+ const msg = formatError(err);
183
+ if (attempt < INITIAL_SCAN_RETRIES) {
184
+ const delay = (attempt + 1) * 3000;
185
+ if (json) {
186
+ emit({ event: "retry", attempt: attempt + 1, maxAttempts: INITIAL_SCAN_RETRIES + 1, message: msg, delayMs: delay });
187
+ }
188
+ else {
189
+ console.error(`\n ❌ Initial scan failed: ${msg}. Retrying in ${delay / 1000}s...`);
190
+ }
191
+ await new Promise((resolve) => setTimeout(resolve, delay));
192
+ }
193
+ else {
194
+ if (json) {
195
+ emit({ event: "error", message: `Initial scan failed after ${INITIAL_SCAN_RETRIES + 1} attempts: ${msg}` });
196
+ }
197
+ else {
198
+ console.error(`\n ❌ Initial scan failed after ${INITIAL_SCAN_RETRIES + 1} attempts: ${msg}`);
199
+ }
200
+ }
82
201
  }
83
- }, pullFn);
84
- if (json) {
85
- emit({
86
- event: "scan_complete",
87
- files: scanResult.files,
88
- analyzed: scanResult.analyzed,
89
- cached: scanResult.skipped,
90
- pulled: scanResult.pulled,
91
- });
92
- }
93
- else {
94
- console.log(`\n Initial scan: ${scanResult.analyzed} analyzed, ${scanResult.skipped} cached` +
95
- (scanResult.pulled ? ` (${scanResult.pulled} from cloud)` : "") +
96
- ` across ${scanResult.files} files`);
97
202
  }
98
- // Sync after initial scan if anything was analyzed
99
- if (syncFn && scanResult.analyzed > 0) {
100
- await runSync(syncFn, json);
203
+ if (!scanSucceeded) {
204
+ // Continue to file watching incremental analysis can still work.
205
+ // Individual file changes will trigger re-analysis.
206
+ if (json)
207
+ emit({ event: "error", message: "Continuing in watch-only mode. File changes will trigger analysis." });
101
208
  }
102
209
  // Phase 2: Open DB for incremental watching
103
210
  const db = (0, core_1.initDb)(dbPath);
211
+ const practices = (0, core_1.loadPracticesFromDb)(db);
104
212
  const extSet = new Set((0, core_1.getSupportedExtensions)().map((e) => e.toLowerCase()));
105
213
  const DEBOUNCE_MS = 5000;
106
214
  const SYNC_DEBOUNCE_MS = 10000;
@@ -123,12 +231,20 @@ async function startWatch(options) {
123
231
  }
124
232
  }, SYNC_DEBOUNCE_MS);
125
233
  }
126
- // FIFO analysis queue
234
+ // FIFO analysis queue with circuit breaker
127
235
  let analyzing = false;
128
236
  const queue = [];
237
+ const MAX_CONSECUTIVE_ERRORS = 3;
238
+ const CIRCUIT_BREAKER_COOLDOWN_MS = 30000;
239
+ const ANALYSIS_TIMEOUT_MS = 120000;
240
+ let consecutiveErrors = 0;
241
+ let circuitBrokenUntil = 0;
129
242
  async function processQueue() {
130
243
  if (analyzing || queue.length === 0)
131
244
  return;
245
+ // Circuit breaker: stop processing if too many consecutive errors
246
+ if (Date.now() < circuitBrokenUntil)
247
+ return;
132
248
  analyzing = true;
133
249
  while (queue.length > 0) {
134
250
  const file = queue.shift();
@@ -139,7 +255,8 @@ async function startWatch(options) {
139
255
  console.log(` Analyzing ${relPath}...`);
140
256
  const start = Date.now();
141
257
  try {
142
- const result = await (0, core_1.analyzeFiles)(db, rootPath, [file], { pullFn });
258
+ const result = await withTimeout((0, core_1.analyzeFiles)(db, rootPath, [file], { pullFn, practices }), ANALYSIS_TIMEOUT_MS, relPath);
259
+ consecutiveErrors = 0; // Reset on success
143
260
  const duration = Date.now() - start;
144
261
  if (json) {
145
262
  emit({
@@ -160,17 +277,38 @@ async function startWatch(options) {
160
277
  }
161
278
  }
162
279
  catch (err) {
280
+ consecutiveErrors++;
281
+ const msg = formatError(err);
163
282
  if (json) {
164
- emit({ event: "error", file: relPath, message: err.message });
283
+ emit({ event: "error", file: relPath, message: msg });
165
284
  }
166
285
  else {
167
- console.error(` ❌ ${relPath}: ${err.message}`);
286
+ console.error(` ❌ ${relPath}: ${msg}`);
287
+ }
288
+ // Circuit breaker: pause after repeated failures
289
+ if (consecutiveErrors >= MAX_CONSECUTIVE_ERRORS) {
290
+ circuitBrokenUntil = Date.now() + CIRCUIT_BREAKER_COOLDOWN_MS;
291
+ const remaining = queue.length;
292
+ if (json) {
293
+ emit({
294
+ event: "error",
295
+ message: `${MAX_CONSECUTIVE_ERRORS} consecutive failures — pausing analysis for ${CIRCUIT_BREAKER_COOLDOWN_MS / 1000}s. ${remaining} file(s) queued for retry.`,
296
+ });
297
+ }
298
+ else {
299
+ console.error(` ⏸ ${MAX_CONSECUTIVE_ERRORS} consecutive failures — pausing for ${CIRCUIT_BREAKER_COOLDOWN_MS / 1000}s`);
300
+ }
301
+ break; // Leave remaining files in queue for retry after cooldown
168
302
  }
169
303
  }
170
304
  }
171
305
  analyzing = false;
172
306
  }
173
- const IGNORE_SEGMENTS = ["node_modules", ".ophan", "__pycache__", ".venv", "venv", ".tox", ".eggs", "dist"];
307
+ // Fast-path segments that are always ignored (avoids git subprocess for common cases)
308
+ const ALWAYS_IGNORE = ["node_modules", ".ophan"];
309
+ // Cache of directory prefixes known to be gitignored — avoids repeated git check-ignore calls
310
+ // for files in the same ignored directory (e.g. .output/public/assets/a.js, .output/public/assets/b.js)
311
+ const ignoredDirCache = new Set();
174
312
  function onFileChange(filename) {
175
313
  const ext = path.extname(filename).toLowerCase();
176
314
  if (!extSet.has(ext))
@@ -178,9 +316,21 @@ async function startWatch(options) {
178
316
  const absPath = path.isAbsolute(filename)
179
317
  ? filename
180
318
  : path.resolve(rootPath, filename);
181
- // Skip ignored directories
182
- if (IGNORE_SEGMENTS.some((seg) => absPath.includes(`/${seg}/`) || absPath.includes(`\\${seg}\\`)))
319
+ // Fast-path: skip universally-ignored directories without shelling out to git
320
+ if (ALWAYS_IGNORE.some((seg) => absPath.includes(`/${seg}/`) || absPath.includes(`\\${seg}\\`)))
183
321
  return;
322
+ // Check directory-level cache before calling git
323
+ const dir = path.dirname(absPath);
324
+ for (const cached of ignoredDirCache) {
325
+ if (dir.startsWith(cached))
326
+ return;
327
+ }
328
+ // Ask git whether this file is ignored (respects all .gitignore files + global)
329
+ if (isGitIgnored(absPath, rootPath)) {
330
+ // Cache the directory so sibling files skip the git call
331
+ ignoredDirCache.add(dir);
332
+ return;
333
+ }
184
334
  // Skip if file was deleted
185
335
  if (!fs.existsSync(absPath))
186
336
  return;
@@ -193,7 +343,13 @@ async function startWatch(options) {
193
343
  if (!queue.includes(absPath)) {
194
344
  queue.push(absPath);
195
345
  }
196
- processQueue();
346
+ processQueue().catch((err) => {
347
+ const msg = formatError(err);
348
+ if (json)
349
+ emit({ event: "error", message: msg });
350
+ else
351
+ console.error(` ❌ Analysis error: ${msg}`);
352
+ });
197
353
  }, DEBOUNCE_MS));
198
354
  }
199
355
  // Start recursive file watcher (macOS + Windows native, Linux may need chokidar)
package/ophan_logo.png ADDED
Binary file
package/package.json CHANGED
@@ -1,7 +1,10 @@
1
1
  {
2
2
  "name": "@ophan/cli",
3
- "version": "0.0.1",
4
- "files": ["dist"],
3
+ "version": "0.0.3",
4
+ "files": [
5
+ "dist",
6
+ "ophan_logo.png"
7
+ ],
5
8
  "bin": {
6
9
  "ophan": "./dist/index.js"
7
10
  },
@@ -13,9 +16,10 @@
13
16
  "dependencies": {
14
17
  "@ophan/core": "workspace:*",
15
18
  "@supabase/supabase-js": "^2.49.4",
16
- "better-sqlite3": "^11.9.1",
19
+ "better-sqlite3": "^12.6.2",
17
20
  "commander": "^14.0.2",
18
21
  "dotenv": "^16.4.7",
22
+ "p-limit": "^6.2.0",
19
23
  "open": "^10.1.0"
20
24
  },
21
25
  "devDependencies": {
package/dist/sync.test.js DELETED
@@ -1,288 +0,0 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- const vitest_1 = require("vitest");
7
- const better_sqlite3_1 = __importDefault(require("better-sqlite3"));
8
- const test_utils_1 = require("./test-utils");
9
- const sync_1 = require("./sync");
10
- const test_utils_2 = require("@ophan/core/test-utils");
11
- const USER_A = "user-aaaa-aaaa-aaaa-aaaaaaaaaaaa";
12
- const USER_B = "user-bbbb-bbbb-bbbb-bbbbbbbbbbbb";
13
- const REPO_ID = "repo-1";
14
- function defaultRepoResponse() {
15
- return { data: { id: REPO_ID }, error: null };
16
- }
17
- // ============ syncToSupabase ============
18
- (0, vitest_1.describe)("syncToSupabase", () => {
19
- const cleanups = [];
20
- function tracked(result) {
21
- cleanups.push(result.cleanup);
22
- return result;
23
- }
24
- (0, vitest_1.afterEach)(() => {
25
- cleanups.forEach((fn) => fn());
26
- cleanups.length = 0;
27
- });
28
- (0, vitest_1.describe)("account change detection", () => {
29
- (0, vitest_1.it)("first sync — stores user_id, no reset", async () => {
30
- const { rootPath, dbPath } = tracked((0, test_utils_1.createTempDb)());
31
- const mock = (0, test_utils_1.createMockSupabase)({
32
- "repos.upsert": defaultRepoResponse(),
33
- });
34
- await (0, sync_1.syncToSupabase)(rootPath, mock.client, USER_A);
35
- const db = new better_sqlite3_1.default(dbPath);
36
- const row = db
37
- .prepare("SELECT value FROM sync_meta WHERE key = 'last_synced_user_id'")
38
- .get();
39
- db.close();
40
- (0, vitest_1.expect)(row.value).toBe(USER_A);
41
- });
42
- (0, vitest_1.it)("same user re-sync — no reset of synced_at", async () => {
43
- const { rootPath, dbPath } = tracked((0, test_utils_1.createTempDb)((db) => {
44
- db.prepare("INSERT INTO sync_meta (key, value) VALUES ('last_synced_user_id', ?)").run(USER_A);
45
- (0, test_utils_2.insertAnalysisPair)(db, "hash1", { syncedAt: 1000 });
46
- }));
47
- const mock = (0, test_utils_1.createMockSupabase)({
48
- "repos.upsert": defaultRepoResponse(),
49
- });
50
- await (0, sync_1.syncToSupabase)(rootPath, mock.client, USER_A);
51
- // synced_at should still be set (not reset to NULL)
52
- const db = new better_sqlite3_1.default(dbPath);
53
- const row = db
54
- .prepare("SELECT synced_at FROM function_analysis WHERE content_hash = 'hash1' AND analysis_type = 'documentation'")
55
- .get();
56
- db.close();
57
- (0, vitest_1.expect)(row.synced_at).toBe(1000);
58
- });
59
- (0, vitest_1.it)("different user — resets synced_at to NULL and clears GC", async () => {
60
- const { rootPath, dbPath } = tracked((0, test_utils_1.createTempDb)((db) => {
61
- db.prepare("INSERT INTO sync_meta (key, value) VALUES ('last_synced_user_id', ?)").run(USER_A);
62
- (0, test_utils_2.insertAnalysisPair)(db, "hash1", { syncedAt: 1000 });
63
- db.prepare("INSERT INTO function_gc (content_hash, analysis_type, gc_at) VALUES ('old', 'documentation', 999)").run();
64
- }));
65
- const mock = (0, test_utils_1.createMockSupabase)({
66
- "repos.upsert": defaultRepoResponse(),
67
- });
68
- await (0, sync_1.syncToSupabase)(rootPath, mock.client, USER_B);
69
- const db = new better_sqlite3_1.default(dbPath);
70
- // synced_at should be NULL (reset) then re-set by the push
71
- const meta = db
72
- .prepare("SELECT value FROM sync_meta WHERE key = 'last_synced_user_id'")
73
- .get();
74
- const gcCount = db
75
- .prepare("SELECT count(*) as c FROM function_gc")
76
- .get();
77
- db.close();
78
- (0, vitest_1.expect)(meta.value).toBe(USER_B);
79
- (0, vitest_1.expect)(gcCount.c).toBe(0);
80
- });
81
- });
82
- (0, vitest_1.describe)("push batching", () => {
83
- (0, vitest_1.it)("pushes only unsynced rows (synced_at IS NULL)", async () => {
84
- const { rootPath } = tracked((0, test_utils_1.createTempDb)((db) => {
85
- (0, test_utils_2.insertAnalysisPair)(db, "hash-synced", { syncedAt: 1000 });
86
- (0, test_utils_2.insertAnalysisPair)(db, "hash-unsynced", { syncedAt: null });
87
- }));
88
- const mock = (0, test_utils_1.createMockSupabase)({
89
- "repos.upsert": defaultRepoResponse(),
90
- });
91
- const result = await (0, sync_1.syncToSupabase)(rootPath, mock.client, USER_A);
92
- // Only hash-unsynced's 2 rows (doc + sec) should be pushed
93
- (0, vitest_1.expect)(result.pushed).toBe(2);
94
- });
95
- (0, vitest_1.it)("marks synced_at after push", async () => {
96
- const { rootPath, dbPath } = tracked((0, test_utils_1.createTempDb)((db) => {
97
- (0, test_utils_2.insertAnalysisPair)(db, "hash1", { syncedAt: null });
98
- }));
99
- const mock = (0, test_utils_1.createMockSupabase)({
100
- "repos.upsert": defaultRepoResponse(),
101
- });
102
- await (0, sync_1.syncToSupabase)(rootPath, mock.client, USER_A);
103
- const db = new better_sqlite3_1.default(dbPath);
104
- const rows = db
105
- .prepare("SELECT synced_at FROM function_analysis WHERE content_hash = 'hash1'")
106
- .all();
107
- db.close();
108
- for (const row of rows) {
109
- (0, vitest_1.expect)(row.synced_at).not.toBeNull();
110
- (0, vitest_1.expect)(row.synced_at).toBeGreaterThan(0);
111
- }
112
- });
113
- });
114
- (0, vitest_1.describe)("location full-sync", () => {
115
- (0, vitest_1.it)("DELETE + INSERT pattern for file_functions", async () => {
116
- const { rootPath } = tracked((0, test_utils_1.createTempDb)((db) => {
117
- (0, test_utils_2.insertFileFunction)(db, "src/a.ts", "fn1", "hash1");
118
- (0, test_utils_2.insertFileFunction)(db, "src/b.ts", "fn2", "hash2");
119
- }));
120
- const mock = (0, test_utils_1.createMockSupabase)({
121
- "repos.upsert": defaultRepoResponse(),
122
- });
123
- const result = await (0, sync_1.syncToSupabase)(rootPath, mock.client, USER_A);
124
- (0, vitest_1.expect)(result.locations).toBe(2);
125
- // Verify DELETE was called before INSERT
126
- const locationCalls = mock.calls.filter((c) => c.table === "function_locations");
127
- const deleteIdx = locationCalls.findIndex((c) => c.method === "delete");
128
- const insertIdx = locationCalls.findIndex((c) => c.method === "insert");
129
- (0, vitest_1.expect)(deleteIdx).toBeLessThan(insertIdx);
130
- });
131
- (0, vitest_1.it)("empty file_functions — only deletes remote", async () => {
132
- const { rootPath } = tracked((0, test_utils_1.createTempDb)());
133
- const mock = (0, test_utils_1.createMockSupabase)({
134
- "repos.upsert": defaultRepoResponse(),
135
- });
136
- const result = await (0, sync_1.syncToSupabase)(rootPath, mock.client, USER_A);
137
- (0, vitest_1.expect)(result.locations).toBe(0);
138
- // Should still call delete (clear remote)
139
- const deleteCalls = mock.calls.filter((c) => c.table === "function_locations" && c.method === "delete");
140
- (0, vitest_1.expect)(deleteCalls.length).toBe(1);
141
- // Should NOT call insert
142
- const insertCalls = mock.calls.filter((c) => c.table === "function_locations" && c.method === "insert");
143
- (0, vitest_1.expect)(insertCalls.length).toBe(0);
144
- });
145
- });
146
- (0, vitest_1.describe)("GC tombstone propagation", () => {
147
- (0, vitest_1.it)("processes unsynced GC entries and cleans them up", async () => {
148
- const { rootPath, dbPath } = tracked((0, test_utils_1.createTempDb)((db) => {
149
- db.prepare("INSERT INTO function_gc (content_hash, analysis_type, gc_at) VALUES (?, ?, ?)").run("dead-hash", "documentation", Math.floor(Date.now() / 1000));
150
- }));
151
- const mock = (0, test_utils_1.createMockSupabase)({
152
- "repos.upsert": defaultRepoResponse(),
153
- });
154
- const result = await (0, sync_1.syncToSupabase)(rootPath, mock.client, USER_A);
155
- (0, vitest_1.expect)(result.gcProcessed).toBe(1);
156
- // GC rows should be cleaned up (synced then deleted)
157
- const db = new better_sqlite3_1.default(dbPath);
158
- const gcCount = db
159
- .prepare("SELECT count(*) as c FROM function_gc")
160
- .get();
161
- db.close();
162
- (0, vitest_1.expect)(gcCount.c).toBe(0);
163
- });
164
- (0, vitest_1.it)("type-specific tombstone — deletes specific analysis_type", async () => {
165
- const { rootPath } = tracked((0, test_utils_1.createTempDb)((db) => {
166
- db.prepare("INSERT INTO function_gc (content_hash, analysis_type, gc_at) VALUES (?, ?, ?)").run("dead-hash", "security", Math.floor(Date.now() / 1000));
167
- }));
168
- const mock = (0, test_utils_1.createMockSupabase)({
169
- "repos.upsert": defaultRepoResponse(),
170
- });
171
- await (0, sync_1.syncToSupabase)(rootPath, mock.client, USER_A);
172
- // Should delete with analysis_type filter
173
- const deleteCalls = mock.calls.filter((c) => c.table === "function_analysis" && c.method === "delete");
174
- (0, vitest_1.expect)(deleteCalls.length).toBe(1);
175
- // Should have eq('analysis_type', 'security') in the chain
176
- const eqCalls = mock.calls.filter((c) => c.table === "function_analysis" &&
177
- c.method === "eq" &&
178
- c.args[0] === "analysis_type");
179
- (0, vitest_1.expect)(eqCalls.length).toBe(1);
180
- (0, vitest_1.expect)(eqCalls[0].args[1]).toBe("security");
181
- });
182
- (0, vitest_1.it)("NULL analysis_type (legacy) — deletes all types for hash", async () => {
183
- const { rootPath } = tracked((0, test_utils_1.createTempDb)((db) => {
184
- db.prepare("INSERT INTO function_gc (content_hash, analysis_type, gc_at) VALUES (?, ?, ?)").run("dead-hash", null, Math.floor(Date.now() / 1000));
185
- }));
186
- const mock = (0, test_utils_1.createMockSupabase)({
187
- "repos.upsert": defaultRepoResponse(),
188
- });
189
- await (0, sync_1.syncToSupabase)(rootPath, mock.client, USER_A);
190
- // Should delete WITHOUT analysis_type filter (no eq('analysis_type', ...))
191
- const eqCalls = mock.calls.filter((c) => c.table === "function_analysis" &&
192
- c.method === "eq" &&
193
- c.args[0] === "analysis_type");
194
- (0, vitest_1.expect)(eqCalls.length).toBe(0);
195
- });
196
- });
197
- });
198
- // ============ pullFromSupabase ============
199
- (0, vitest_1.describe)("pullFromSupabase", () => {
200
- const cleanups = [];
201
- function tracked(result) {
202
- cleanups.push(result.cleanup);
203
- return result;
204
- }
205
- (0, vitest_1.afterEach)(() => {
206
- cleanups.forEach((fn) => fn());
207
- cleanups.length = 0;
208
- });
209
- (0, vitest_1.it)("empty missing hashes — returns early with 0", async () => {
210
- const { rootPath } = tracked((0, test_utils_1.createTempDb)());
211
- const mock = (0, test_utils_1.createMockSupabase)();
212
- const result = await (0, sync_1.pullFromSupabase)(rootPath, mock.client, USER_A, REPO_ID, []);
213
- (0, vitest_1.expect)(result.pulled).toBe(0);
214
- // Should NOT call supabase at all
215
- (0, vitest_1.expect)(mock.calls.length).toBe(0);
216
- });
217
- (0, vitest_1.it)("applies defensive defaults for missing fields", async () => {
218
- const { rootPath, dbPath } = tracked((0, test_utils_1.createTempDb)());
219
- const mock = (0, test_utils_1.createMockSupabase)({
220
- "function_analysis.select": {
221
- data: [
222
- {
223
- content_hash: "hash1",
224
- analysis_type: "documentation",
225
- analysis: { description: "Test" },
226
- model_version: "claude-3",
227
- schema_version: null, // missing — should default to 1
228
- language: null, // missing — should default to 'typescript'
229
- entity_type: null, // missing — should default to 'function'
230
- },
231
- ],
232
- error: null,
233
- },
234
- });
235
- const result = await (0, sync_1.pullFromSupabase)(rootPath, mock.client, USER_A, REPO_ID, ["hash1"]);
236
- (0, vitest_1.expect)(result.pulled).toBe(1);
237
- // Verify defaults were applied
238
- const db = new better_sqlite3_1.default(dbPath);
239
- const row = db
240
- .prepare("SELECT language, entity_type, schema_version FROM function_analysis WHERE content_hash = 'hash1'")
241
- .get();
242
- db.close();
243
- (0, vitest_1.expect)(row.language).toBe("typescript");
244
- (0, vitest_1.expect)(row.entity_type).toBe("function");
245
- (0, vitest_1.expect)(row.schema_version).toBe(1);
246
- });
247
- (0, vitest_1.it)("handles string vs object analysis field", async () => {
248
- const { rootPath, dbPath } = tracked((0, test_utils_1.createTempDb)());
249
- const mock = (0, test_utils_1.createMockSupabase)({
250
- "function_analysis.select": {
251
- data: [
252
- {
253
- content_hash: "hash-obj",
254
- analysis_type: "documentation",
255
- analysis: { description: "Object form" }, // object — should be stringified
256
- model_version: "claude-3",
257
- schema_version: 1,
258
- language: "typescript",
259
- entity_type: "function",
260
- },
261
- {
262
- content_hash: "hash-str",
263
- analysis_type: "documentation",
264
- analysis: '{"description":"String form"}', // already string
265
- model_version: "claude-3",
266
- schema_version: 1,
267
- language: "typescript",
268
- entity_type: "function",
269
- },
270
- ],
271
- error: null,
272
- },
273
- });
274
- const result = await (0, sync_1.pullFromSupabase)(rootPath, mock.client, USER_A, REPO_ID, ["hash-obj", "hash-str"]);
275
- (0, vitest_1.expect)(result.pulled).toBe(2);
276
- // Both should be stored as valid JSON strings
277
- const db = new better_sqlite3_1.default(dbPath);
278
- const objRow = db
279
- .prepare("SELECT analysis FROM function_analysis WHERE content_hash = 'hash-obj'")
280
- .get();
281
- const strRow = db
282
- .prepare("SELECT analysis FROM function_analysis WHERE content_hash = 'hash-str'")
283
- .get();
284
- db.close();
285
- (0, vitest_1.expect)(JSON.parse(objRow.analysis).description).toBe("Object form");
286
- (0, vitest_1.expect)(JSON.parse(strRow.analysis).description).toBe("String form");
287
- });
288
- });