@ophan/cli 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +82 -0
- package/dist/auth.js +239 -0
- package/dist/index.js +245 -0
- package/dist/sync.js +255 -0
- package/dist/sync.test.js +288 -0
- package/dist/test-utils.js +161 -0
- package/dist/watch.js +247 -0
- package/package.json +27 -0
package/dist/sync.js
ADDED
|
@@ -0,0 +1,255 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
+
};
|
|
38
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
exports.syncToSupabase = syncToSupabase;
|
|
40
|
+
exports.pullFromSupabase = pullFromSupabase;
|
|
41
|
+
const better_sqlite3_1 = __importDefault(require("better-sqlite3"));
|
|
42
|
+
const child_process_1 = require("child_process");
|
|
43
|
+
const path = __importStar(require("path"));
|
|
44
|
+
const fs = __importStar(require("fs"));
|
|
45
|
+
const core_1 = require("@ophan/core");
|
|
46
|
+
function getGitRemoteUrl(rootPath) {
|
|
47
|
+
try {
|
|
48
|
+
return ((0, child_process_1.execSync)("git remote get-url origin", {
|
|
49
|
+
cwd: rootPath,
|
|
50
|
+
encoding: "utf8",
|
|
51
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
52
|
+
}).trim() || null);
|
|
53
|
+
}
|
|
54
|
+
catch {
|
|
55
|
+
return null;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
async function syncToSupabase(rootPath, supabase, userId, onProgress) {
|
|
59
|
+
const dbPath = path.join(rootPath, ".ophan", "index.db");
|
|
60
|
+
if (!fs.existsSync(dbPath)) {
|
|
61
|
+
throw new Error(`No analysis database found at ${dbPath}\n Run \`ophan analyze\` first.`);
|
|
62
|
+
}
|
|
63
|
+
const db = new better_sqlite3_1.default(dbPath);
|
|
64
|
+
// Ensure sync_meta table exists (may not if DB was created by older version)
|
|
65
|
+
db.exec("CREATE TABLE IF NOT EXISTS sync_meta (key TEXT PRIMARY KEY, value TEXT NOT NULL)");
|
|
66
|
+
try {
|
|
67
|
+
// 0. Detect user change — if different user, reset sync state
|
|
68
|
+
const lastUser = db
|
|
69
|
+
.prepare("SELECT value FROM sync_meta WHERE key = 'last_synced_user_id'")
|
|
70
|
+
.get();
|
|
71
|
+
if (lastUser && lastUser.value !== userId) {
|
|
72
|
+
onProgress?.(`Account changed (was ${lastUser.value.slice(0, 8)}...). Re-syncing all data...`);
|
|
73
|
+
db.exec("UPDATE function_analysis SET synced_at = NULL");
|
|
74
|
+
db.exec("DELETE FROM function_gc");
|
|
75
|
+
}
|
|
76
|
+
// Update last synced user
|
|
77
|
+
db.prepare("INSERT OR REPLACE INTO sync_meta (key, value) VALUES ('last_synced_user_id', ?)").run(userId);
|
|
78
|
+
// 1. Ensure repo exists
|
|
79
|
+
const repoName = path.basename(rootPath);
|
|
80
|
+
const remoteUrl = getGitRemoteUrl(rootPath);
|
|
81
|
+
onProgress?.("Registering repo...");
|
|
82
|
+
const { data: repo, error: repoError } = await supabase
|
|
83
|
+
.from("repos")
|
|
84
|
+
.upsert({
|
|
85
|
+
user_id: userId,
|
|
86
|
+
name: repoName,
|
|
87
|
+
remote_url: remoteUrl,
|
|
88
|
+
}, { onConflict: "user_id,name" })
|
|
89
|
+
.select("id")
|
|
90
|
+
.single();
|
|
91
|
+
if (repoError) {
|
|
92
|
+
throw new Error(`Failed to register repo: ${repoError.message}`);
|
|
93
|
+
}
|
|
94
|
+
const repoId = repo.id;
|
|
95
|
+
// 2. Push unsynced analysis (now includes analysis_type + schema_version)
|
|
96
|
+
onProgress?.("Pushing analysis...");
|
|
97
|
+
const unsynced = db
|
|
98
|
+
.prepare(`SELECT content_hash, analysis_type, analysis, model_version, schema_version, language, entity_type, created_at
|
|
99
|
+
FROM function_analysis WHERE synced_at IS NULL`)
|
|
100
|
+
.all();
|
|
101
|
+
if (unsynced.length > 0) {
|
|
102
|
+
const rows = unsynced.map((row) => ({
|
|
103
|
+
content_hash: row.content_hash,
|
|
104
|
+
analysis_type: row.analysis_type,
|
|
105
|
+
repo_id: repoId,
|
|
106
|
+
user_id: userId,
|
|
107
|
+
analysis: JSON.parse(row.analysis),
|
|
108
|
+
model_version: row.model_version,
|
|
109
|
+
schema_version: row.schema_version,
|
|
110
|
+
language: row.language,
|
|
111
|
+
entity_type: row.entity_type,
|
|
112
|
+
}));
|
|
113
|
+
// Batch upsert in chunks of 500 (Supabase limit)
|
|
114
|
+
for (let i = 0; i < rows.length; i += 500) {
|
|
115
|
+
const chunk = rows.slice(i, i + 500);
|
|
116
|
+
const { error } = await supabase
|
|
117
|
+
.from("function_analysis")
|
|
118
|
+
.upsert(chunk, { onConflict: "content_hash,repo_id,analysis_type" });
|
|
119
|
+
if (error) {
|
|
120
|
+
throw new Error(`Failed to push analysis: ${error.message}`);
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
// Mark as synced locally
|
|
124
|
+
const now = Math.floor(Date.now() / 1000);
|
|
125
|
+
const markSynced = db.prepare("UPDATE function_analysis SET synced_at = ? WHERE content_hash = ? AND analysis_type = ?");
|
|
126
|
+
const markBatch = db.transaction(() => {
|
|
127
|
+
for (const row of unsynced) {
|
|
128
|
+
markSynced.run(now, row.content_hash, row.analysis_type);
|
|
129
|
+
}
|
|
130
|
+
});
|
|
131
|
+
markBatch();
|
|
132
|
+
}
|
|
133
|
+
// 3. Replace function_locations (full sync each time)
|
|
134
|
+
onProgress?.("Syncing file locations...");
|
|
135
|
+
const { error: delError } = await supabase
|
|
136
|
+
.from("function_locations")
|
|
137
|
+
.delete()
|
|
138
|
+
.eq("repo_id", repoId);
|
|
139
|
+
if (delError) {
|
|
140
|
+
throw new Error(`Failed to clear locations: ${delError.message}`);
|
|
141
|
+
}
|
|
142
|
+
const locations = db
|
|
143
|
+
.prepare(`SELECT file_path, function_name, content_hash, language, entity_type
|
|
144
|
+
FROM file_functions`)
|
|
145
|
+
.all();
|
|
146
|
+
if (locations.length > 0) {
|
|
147
|
+
const locationRows = locations.map((row) => ({
|
|
148
|
+
repo_id: repoId,
|
|
149
|
+
user_id: userId,
|
|
150
|
+
file_path: row.file_path,
|
|
151
|
+
function_name: row.function_name,
|
|
152
|
+
content_hash: row.content_hash,
|
|
153
|
+
language: row.language,
|
|
154
|
+
entity_type: row.entity_type,
|
|
155
|
+
}));
|
|
156
|
+
for (let i = 0; i < locationRows.length; i += 500) {
|
|
157
|
+
const chunk = locationRows.slice(i, i + 500);
|
|
158
|
+
const { error } = await supabase
|
|
159
|
+
.from("function_locations")
|
|
160
|
+
.insert(chunk);
|
|
161
|
+
if (error) {
|
|
162
|
+
throw new Error(`Failed to push locations: ${error.message}`);
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
// 4. Process GC tombstones
|
|
167
|
+
onProgress?.("Processing garbage collection...");
|
|
168
|
+
const gcRows = db
|
|
169
|
+
.prepare("SELECT content_hash, analysis_type FROM function_gc WHERE synced_at IS NULL")
|
|
170
|
+
.all();
|
|
171
|
+
if (gcRows.length > 0) {
|
|
172
|
+
for (const row of gcRows) {
|
|
173
|
+
if (row.analysis_type) {
|
|
174
|
+
// Delete specific analysis type
|
|
175
|
+
await supabase
|
|
176
|
+
.from("function_analysis")
|
|
177
|
+
.delete()
|
|
178
|
+
.eq("content_hash", row.content_hash)
|
|
179
|
+
.eq("repo_id", repoId)
|
|
180
|
+
.eq("analysis_type", row.analysis_type);
|
|
181
|
+
}
|
|
182
|
+
else {
|
|
183
|
+
// NULL analysis_type means delete all types (legacy GC entry)
|
|
184
|
+
await supabase
|
|
185
|
+
.from("function_analysis")
|
|
186
|
+
.delete()
|
|
187
|
+
.eq("content_hash", row.content_hash)
|
|
188
|
+
.eq("repo_id", repoId);
|
|
189
|
+
}
|
|
190
|
+
await supabase
|
|
191
|
+
.from("function_locations")
|
|
192
|
+
.delete()
|
|
193
|
+
.eq("content_hash", row.content_hash)
|
|
194
|
+
.eq("repo_id", repoId);
|
|
195
|
+
}
|
|
196
|
+
// Mark GC rows as synced locally, then delete them
|
|
197
|
+
const now = Math.floor(Date.now() / 1000);
|
|
198
|
+
db.transaction(() => {
|
|
199
|
+
db.prepare("UPDATE function_gc SET synced_at = ? WHERE synced_at IS NULL").run(now);
|
|
200
|
+
db.prepare("DELETE FROM function_gc WHERE synced_at IS NOT NULL").run();
|
|
201
|
+
})();
|
|
202
|
+
}
|
|
203
|
+
return {
|
|
204
|
+
pushed: unsynced.length,
|
|
205
|
+
locations: locations.length,
|
|
206
|
+
gcProcessed: gcRows.length,
|
|
207
|
+
};
|
|
208
|
+
}
|
|
209
|
+
finally {
|
|
210
|
+
db.close();
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
// ============ PULL SYNC ============
|
|
214
|
+
/**
|
|
215
|
+
* Pull analysis from Supabase for content hashes missing locally.
|
|
216
|
+
* Used before `analyzeRepository()` to avoid re-running expensive Claude analysis.
|
|
217
|
+
*/
|
|
218
|
+
async function pullFromSupabase(rootPath, supabase, userId, repoId, missingHashes, onProgress) {
|
|
219
|
+
if (missingHashes.length === 0)
|
|
220
|
+
return { pulled: 0 };
|
|
221
|
+
const dbPath = path.join(rootPath, ".ophan", "index.db");
|
|
222
|
+
onProgress?.(`Pulling ${missingHashes.length} cached analyses from cloud...`);
|
|
223
|
+
const allRows = [];
|
|
224
|
+
// Batch queries in chunks of 200 to avoid URL length limits
|
|
225
|
+
for (let i = 0; i < missingHashes.length; i += 200) {
|
|
226
|
+
const chunk = missingHashes.slice(i, i + 200);
|
|
227
|
+
const { data, error } = await supabase
|
|
228
|
+
.from("function_analysis")
|
|
229
|
+
.select("content_hash, analysis_type, analysis, model_version, schema_version, language, entity_type")
|
|
230
|
+
.eq("repo_id", repoId)
|
|
231
|
+
.in("content_hash", chunk);
|
|
232
|
+
if (error) {
|
|
233
|
+
onProgress?.(`Warning: pull failed: ${error.message}`);
|
|
234
|
+
continue;
|
|
235
|
+
}
|
|
236
|
+
if (data) {
|
|
237
|
+
for (const row of data) {
|
|
238
|
+
allRows.push({
|
|
239
|
+
content_hash: row.content_hash,
|
|
240
|
+
analysis_type: row.analysis_type,
|
|
241
|
+
analysis: typeof row.analysis === "string" ? row.analysis : JSON.stringify(row.analysis),
|
|
242
|
+
model_version: row.model_version,
|
|
243
|
+
schema_version: row.schema_version ?? 1,
|
|
244
|
+
language: row.language ?? "typescript",
|
|
245
|
+
entity_type: row.entity_type ?? "function",
|
|
246
|
+
});
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
if (allRows.length === 0)
|
|
251
|
+
return { pulled: 0 };
|
|
252
|
+
const imported = (0, core_1.importAnalysis)(dbPath, allRows);
|
|
253
|
+
onProgress?.(`Pulled ${imported} analyses from cloud`);
|
|
254
|
+
return { pulled: imported };
|
|
255
|
+
}
|
|
@@ -0,0 +1,288 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const vitest_1 = require("vitest");
|
|
7
|
+
const better_sqlite3_1 = __importDefault(require("better-sqlite3"));
|
|
8
|
+
const test_utils_1 = require("./test-utils");
|
|
9
|
+
const sync_1 = require("./sync");
|
|
10
|
+
const test_utils_2 = require("@ophan/core/test-utils");
|
|
11
|
+
const USER_A = "user-aaaa-aaaa-aaaa-aaaaaaaaaaaa";
|
|
12
|
+
const USER_B = "user-bbbb-bbbb-bbbb-bbbbbbbbbbbb";
|
|
13
|
+
const REPO_ID = "repo-1";
|
|
14
|
+
function defaultRepoResponse() {
|
|
15
|
+
return { data: { id: REPO_ID }, error: null };
|
|
16
|
+
}
|
|
17
|
+
// ============ syncToSupabase ============
|
|
18
|
+
(0, vitest_1.describe)("syncToSupabase", () => {
|
|
19
|
+
const cleanups = [];
|
|
20
|
+
function tracked(result) {
|
|
21
|
+
cleanups.push(result.cleanup);
|
|
22
|
+
return result;
|
|
23
|
+
}
|
|
24
|
+
(0, vitest_1.afterEach)(() => {
|
|
25
|
+
cleanups.forEach((fn) => fn());
|
|
26
|
+
cleanups.length = 0;
|
|
27
|
+
});
|
|
28
|
+
(0, vitest_1.describe)("account change detection", () => {
|
|
29
|
+
(0, vitest_1.it)("first sync — stores user_id, no reset", async () => {
|
|
30
|
+
const { rootPath, dbPath } = tracked((0, test_utils_1.createTempDb)());
|
|
31
|
+
const mock = (0, test_utils_1.createMockSupabase)({
|
|
32
|
+
"repos.upsert": defaultRepoResponse(),
|
|
33
|
+
});
|
|
34
|
+
await (0, sync_1.syncToSupabase)(rootPath, mock.client, USER_A);
|
|
35
|
+
const db = new better_sqlite3_1.default(dbPath);
|
|
36
|
+
const row = db
|
|
37
|
+
.prepare("SELECT value FROM sync_meta WHERE key = 'last_synced_user_id'")
|
|
38
|
+
.get();
|
|
39
|
+
db.close();
|
|
40
|
+
(0, vitest_1.expect)(row.value).toBe(USER_A);
|
|
41
|
+
});
|
|
42
|
+
(0, vitest_1.it)("same user re-sync — no reset of synced_at", async () => {
|
|
43
|
+
const { rootPath, dbPath } = tracked((0, test_utils_1.createTempDb)((db) => {
|
|
44
|
+
db.prepare("INSERT INTO sync_meta (key, value) VALUES ('last_synced_user_id', ?)").run(USER_A);
|
|
45
|
+
(0, test_utils_2.insertAnalysisPair)(db, "hash1", { syncedAt: 1000 });
|
|
46
|
+
}));
|
|
47
|
+
const mock = (0, test_utils_1.createMockSupabase)({
|
|
48
|
+
"repos.upsert": defaultRepoResponse(),
|
|
49
|
+
});
|
|
50
|
+
await (0, sync_1.syncToSupabase)(rootPath, mock.client, USER_A);
|
|
51
|
+
// synced_at should still be set (not reset to NULL)
|
|
52
|
+
const db = new better_sqlite3_1.default(dbPath);
|
|
53
|
+
const row = db
|
|
54
|
+
.prepare("SELECT synced_at FROM function_analysis WHERE content_hash = 'hash1' AND analysis_type = 'documentation'")
|
|
55
|
+
.get();
|
|
56
|
+
db.close();
|
|
57
|
+
(0, vitest_1.expect)(row.synced_at).toBe(1000);
|
|
58
|
+
});
|
|
59
|
+
(0, vitest_1.it)("different user — resets synced_at to NULL and clears GC", async () => {
|
|
60
|
+
const { rootPath, dbPath } = tracked((0, test_utils_1.createTempDb)((db) => {
|
|
61
|
+
db.prepare("INSERT INTO sync_meta (key, value) VALUES ('last_synced_user_id', ?)").run(USER_A);
|
|
62
|
+
(0, test_utils_2.insertAnalysisPair)(db, "hash1", { syncedAt: 1000 });
|
|
63
|
+
db.prepare("INSERT INTO function_gc (content_hash, analysis_type, gc_at) VALUES ('old', 'documentation', 999)").run();
|
|
64
|
+
}));
|
|
65
|
+
const mock = (0, test_utils_1.createMockSupabase)({
|
|
66
|
+
"repos.upsert": defaultRepoResponse(),
|
|
67
|
+
});
|
|
68
|
+
await (0, sync_1.syncToSupabase)(rootPath, mock.client, USER_B);
|
|
69
|
+
const db = new better_sqlite3_1.default(dbPath);
|
|
70
|
+
// synced_at should be NULL (reset) then re-set by the push
|
|
71
|
+
const meta = db
|
|
72
|
+
.prepare("SELECT value FROM sync_meta WHERE key = 'last_synced_user_id'")
|
|
73
|
+
.get();
|
|
74
|
+
const gcCount = db
|
|
75
|
+
.prepare("SELECT count(*) as c FROM function_gc")
|
|
76
|
+
.get();
|
|
77
|
+
db.close();
|
|
78
|
+
(0, vitest_1.expect)(meta.value).toBe(USER_B);
|
|
79
|
+
(0, vitest_1.expect)(gcCount.c).toBe(0);
|
|
80
|
+
});
|
|
81
|
+
});
|
|
82
|
+
(0, vitest_1.describe)("push batching", () => {
|
|
83
|
+
(0, vitest_1.it)("pushes only unsynced rows (synced_at IS NULL)", async () => {
|
|
84
|
+
const { rootPath } = tracked((0, test_utils_1.createTempDb)((db) => {
|
|
85
|
+
(0, test_utils_2.insertAnalysisPair)(db, "hash-synced", { syncedAt: 1000 });
|
|
86
|
+
(0, test_utils_2.insertAnalysisPair)(db, "hash-unsynced", { syncedAt: null });
|
|
87
|
+
}));
|
|
88
|
+
const mock = (0, test_utils_1.createMockSupabase)({
|
|
89
|
+
"repos.upsert": defaultRepoResponse(),
|
|
90
|
+
});
|
|
91
|
+
const result = await (0, sync_1.syncToSupabase)(rootPath, mock.client, USER_A);
|
|
92
|
+
// Only hash-unsynced's 2 rows (doc + sec) should be pushed
|
|
93
|
+
(0, vitest_1.expect)(result.pushed).toBe(2);
|
|
94
|
+
});
|
|
95
|
+
(0, vitest_1.it)("marks synced_at after push", async () => {
|
|
96
|
+
const { rootPath, dbPath } = tracked((0, test_utils_1.createTempDb)((db) => {
|
|
97
|
+
(0, test_utils_2.insertAnalysisPair)(db, "hash1", { syncedAt: null });
|
|
98
|
+
}));
|
|
99
|
+
const mock = (0, test_utils_1.createMockSupabase)({
|
|
100
|
+
"repos.upsert": defaultRepoResponse(),
|
|
101
|
+
});
|
|
102
|
+
await (0, sync_1.syncToSupabase)(rootPath, mock.client, USER_A);
|
|
103
|
+
const db = new better_sqlite3_1.default(dbPath);
|
|
104
|
+
const rows = db
|
|
105
|
+
.prepare("SELECT synced_at FROM function_analysis WHERE content_hash = 'hash1'")
|
|
106
|
+
.all();
|
|
107
|
+
db.close();
|
|
108
|
+
for (const row of rows) {
|
|
109
|
+
(0, vitest_1.expect)(row.synced_at).not.toBeNull();
|
|
110
|
+
(0, vitest_1.expect)(row.synced_at).toBeGreaterThan(0);
|
|
111
|
+
}
|
|
112
|
+
});
|
|
113
|
+
});
|
|
114
|
+
(0, vitest_1.describe)("location full-sync", () => {
|
|
115
|
+
(0, vitest_1.it)("DELETE + INSERT pattern for file_functions", async () => {
|
|
116
|
+
const { rootPath } = tracked((0, test_utils_1.createTempDb)((db) => {
|
|
117
|
+
(0, test_utils_2.insertFileFunction)(db, "src/a.ts", "fn1", "hash1");
|
|
118
|
+
(0, test_utils_2.insertFileFunction)(db, "src/b.ts", "fn2", "hash2");
|
|
119
|
+
}));
|
|
120
|
+
const mock = (0, test_utils_1.createMockSupabase)({
|
|
121
|
+
"repos.upsert": defaultRepoResponse(),
|
|
122
|
+
});
|
|
123
|
+
const result = await (0, sync_1.syncToSupabase)(rootPath, mock.client, USER_A);
|
|
124
|
+
(0, vitest_1.expect)(result.locations).toBe(2);
|
|
125
|
+
// Verify DELETE was called before INSERT
|
|
126
|
+
const locationCalls = mock.calls.filter((c) => c.table === "function_locations");
|
|
127
|
+
const deleteIdx = locationCalls.findIndex((c) => c.method === "delete");
|
|
128
|
+
const insertIdx = locationCalls.findIndex((c) => c.method === "insert");
|
|
129
|
+
(0, vitest_1.expect)(deleteIdx).toBeLessThan(insertIdx);
|
|
130
|
+
});
|
|
131
|
+
(0, vitest_1.it)("empty file_functions — only deletes remote", async () => {
|
|
132
|
+
const { rootPath } = tracked((0, test_utils_1.createTempDb)());
|
|
133
|
+
const mock = (0, test_utils_1.createMockSupabase)({
|
|
134
|
+
"repos.upsert": defaultRepoResponse(),
|
|
135
|
+
});
|
|
136
|
+
const result = await (0, sync_1.syncToSupabase)(rootPath, mock.client, USER_A);
|
|
137
|
+
(0, vitest_1.expect)(result.locations).toBe(0);
|
|
138
|
+
// Should still call delete (clear remote)
|
|
139
|
+
const deleteCalls = mock.calls.filter((c) => c.table === "function_locations" && c.method === "delete");
|
|
140
|
+
(0, vitest_1.expect)(deleteCalls.length).toBe(1);
|
|
141
|
+
// Should NOT call insert
|
|
142
|
+
const insertCalls = mock.calls.filter((c) => c.table === "function_locations" && c.method === "insert");
|
|
143
|
+
(0, vitest_1.expect)(insertCalls.length).toBe(0);
|
|
144
|
+
});
|
|
145
|
+
});
|
|
146
|
+
(0, vitest_1.describe)("GC tombstone propagation", () => {
|
|
147
|
+
(0, vitest_1.it)("processes unsynced GC entries and cleans them up", async () => {
|
|
148
|
+
const { rootPath, dbPath } = tracked((0, test_utils_1.createTempDb)((db) => {
|
|
149
|
+
db.prepare("INSERT INTO function_gc (content_hash, analysis_type, gc_at) VALUES (?, ?, ?)").run("dead-hash", "documentation", Math.floor(Date.now() / 1000));
|
|
150
|
+
}));
|
|
151
|
+
const mock = (0, test_utils_1.createMockSupabase)({
|
|
152
|
+
"repos.upsert": defaultRepoResponse(),
|
|
153
|
+
});
|
|
154
|
+
const result = await (0, sync_1.syncToSupabase)(rootPath, mock.client, USER_A);
|
|
155
|
+
(0, vitest_1.expect)(result.gcProcessed).toBe(1);
|
|
156
|
+
// GC rows should be cleaned up (synced then deleted)
|
|
157
|
+
const db = new better_sqlite3_1.default(dbPath);
|
|
158
|
+
const gcCount = db
|
|
159
|
+
.prepare("SELECT count(*) as c FROM function_gc")
|
|
160
|
+
.get();
|
|
161
|
+
db.close();
|
|
162
|
+
(0, vitest_1.expect)(gcCount.c).toBe(0);
|
|
163
|
+
});
|
|
164
|
+
(0, vitest_1.it)("type-specific tombstone — deletes specific analysis_type", async () => {
|
|
165
|
+
const { rootPath } = tracked((0, test_utils_1.createTempDb)((db) => {
|
|
166
|
+
db.prepare("INSERT INTO function_gc (content_hash, analysis_type, gc_at) VALUES (?, ?, ?)").run("dead-hash", "security", Math.floor(Date.now() / 1000));
|
|
167
|
+
}));
|
|
168
|
+
const mock = (0, test_utils_1.createMockSupabase)({
|
|
169
|
+
"repos.upsert": defaultRepoResponse(),
|
|
170
|
+
});
|
|
171
|
+
await (0, sync_1.syncToSupabase)(rootPath, mock.client, USER_A);
|
|
172
|
+
// Should delete with analysis_type filter
|
|
173
|
+
const deleteCalls = mock.calls.filter((c) => c.table === "function_analysis" && c.method === "delete");
|
|
174
|
+
(0, vitest_1.expect)(deleteCalls.length).toBe(1);
|
|
175
|
+
// Should have eq('analysis_type', 'security') in the chain
|
|
176
|
+
const eqCalls = mock.calls.filter((c) => c.table === "function_analysis" &&
|
|
177
|
+
c.method === "eq" &&
|
|
178
|
+
c.args[0] === "analysis_type");
|
|
179
|
+
(0, vitest_1.expect)(eqCalls.length).toBe(1);
|
|
180
|
+
(0, vitest_1.expect)(eqCalls[0].args[1]).toBe("security");
|
|
181
|
+
});
|
|
182
|
+
(0, vitest_1.it)("NULL analysis_type (legacy) — deletes all types for hash", async () => {
|
|
183
|
+
const { rootPath } = tracked((0, test_utils_1.createTempDb)((db) => {
|
|
184
|
+
db.prepare("INSERT INTO function_gc (content_hash, analysis_type, gc_at) VALUES (?, ?, ?)").run("dead-hash", null, Math.floor(Date.now() / 1000));
|
|
185
|
+
}));
|
|
186
|
+
const mock = (0, test_utils_1.createMockSupabase)({
|
|
187
|
+
"repos.upsert": defaultRepoResponse(),
|
|
188
|
+
});
|
|
189
|
+
await (0, sync_1.syncToSupabase)(rootPath, mock.client, USER_A);
|
|
190
|
+
// Should delete WITHOUT analysis_type filter (no eq('analysis_type', ...))
|
|
191
|
+
const eqCalls = mock.calls.filter((c) => c.table === "function_analysis" &&
|
|
192
|
+
c.method === "eq" &&
|
|
193
|
+
c.args[0] === "analysis_type");
|
|
194
|
+
(0, vitest_1.expect)(eqCalls.length).toBe(0);
|
|
195
|
+
});
|
|
196
|
+
});
|
|
197
|
+
});
|
|
198
|
+
// ============ pullFromSupabase ============
|
|
199
|
+
(0, vitest_1.describe)("pullFromSupabase", () => {
|
|
200
|
+
const cleanups = [];
|
|
201
|
+
function tracked(result) {
|
|
202
|
+
cleanups.push(result.cleanup);
|
|
203
|
+
return result;
|
|
204
|
+
}
|
|
205
|
+
(0, vitest_1.afterEach)(() => {
|
|
206
|
+
cleanups.forEach((fn) => fn());
|
|
207
|
+
cleanups.length = 0;
|
|
208
|
+
});
|
|
209
|
+
(0, vitest_1.it)("empty missing hashes — returns early with 0", async () => {
|
|
210
|
+
const { rootPath } = tracked((0, test_utils_1.createTempDb)());
|
|
211
|
+
const mock = (0, test_utils_1.createMockSupabase)();
|
|
212
|
+
const result = await (0, sync_1.pullFromSupabase)(rootPath, mock.client, USER_A, REPO_ID, []);
|
|
213
|
+
(0, vitest_1.expect)(result.pulled).toBe(0);
|
|
214
|
+
// Should NOT call supabase at all
|
|
215
|
+
(0, vitest_1.expect)(mock.calls.length).toBe(0);
|
|
216
|
+
});
|
|
217
|
+
(0, vitest_1.it)("applies defensive defaults for missing fields", async () => {
|
|
218
|
+
const { rootPath, dbPath } = tracked((0, test_utils_1.createTempDb)());
|
|
219
|
+
const mock = (0, test_utils_1.createMockSupabase)({
|
|
220
|
+
"function_analysis.select": {
|
|
221
|
+
data: [
|
|
222
|
+
{
|
|
223
|
+
content_hash: "hash1",
|
|
224
|
+
analysis_type: "documentation",
|
|
225
|
+
analysis: { description: "Test" },
|
|
226
|
+
model_version: "claude-3",
|
|
227
|
+
schema_version: null, // missing — should default to 1
|
|
228
|
+
language: null, // missing — should default to 'typescript'
|
|
229
|
+
entity_type: null, // missing — should default to 'function'
|
|
230
|
+
},
|
|
231
|
+
],
|
|
232
|
+
error: null,
|
|
233
|
+
},
|
|
234
|
+
});
|
|
235
|
+
const result = await (0, sync_1.pullFromSupabase)(rootPath, mock.client, USER_A, REPO_ID, ["hash1"]);
|
|
236
|
+
(0, vitest_1.expect)(result.pulled).toBe(1);
|
|
237
|
+
// Verify defaults were applied
|
|
238
|
+
const db = new better_sqlite3_1.default(dbPath);
|
|
239
|
+
const row = db
|
|
240
|
+
.prepare("SELECT language, entity_type, schema_version FROM function_analysis WHERE content_hash = 'hash1'")
|
|
241
|
+
.get();
|
|
242
|
+
db.close();
|
|
243
|
+
(0, vitest_1.expect)(row.language).toBe("typescript");
|
|
244
|
+
(0, vitest_1.expect)(row.entity_type).toBe("function");
|
|
245
|
+
(0, vitest_1.expect)(row.schema_version).toBe(1);
|
|
246
|
+
});
|
|
247
|
+
(0, vitest_1.it)("handles string vs object analysis field", async () => {
|
|
248
|
+
const { rootPath, dbPath } = tracked((0, test_utils_1.createTempDb)());
|
|
249
|
+
const mock = (0, test_utils_1.createMockSupabase)({
|
|
250
|
+
"function_analysis.select": {
|
|
251
|
+
data: [
|
|
252
|
+
{
|
|
253
|
+
content_hash: "hash-obj",
|
|
254
|
+
analysis_type: "documentation",
|
|
255
|
+
analysis: { description: "Object form" }, // object — should be stringified
|
|
256
|
+
model_version: "claude-3",
|
|
257
|
+
schema_version: 1,
|
|
258
|
+
language: "typescript",
|
|
259
|
+
entity_type: "function",
|
|
260
|
+
},
|
|
261
|
+
{
|
|
262
|
+
content_hash: "hash-str",
|
|
263
|
+
analysis_type: "documentation",
|
|
264
|
+
analysis: '{"description":"String form"}', // already string
|
|
265
|
+
model_version: "claude-3",
|
|
266
|
+
schema_version: 1,
|
|
267
|
+
language: "typescript",
|
|
268
|
+
entity_type: "function",
|
|
269
|
+
},
|
|
270
|
+
],
|
|
271
|
+
error: null,
|
|
272
|
+
},
|
|
273
|
+
});
|
|
274
|
+
const result = await (0, sync_1.pullFromSupabase)(rootPath, mock.client, USER_A, REPO_ID, ["hash-obj", "hash-str"]);
|
|
275
|
+
(0, vitest_1.expect)(result.pulled).toBe(2);
|
|
276
|
+
// Both should be stored as valid JSON strings
|
|
277
|
+
const db = new better_sqlite3_1.default(dbPath);
|
|
278
|
+
const objRow = db
|
|
279
|
+
.prepare("SELECT analysis FROM function_analysis WHERE content_hash = 'hash-obj'")
|
|
280
|
+
.get();
|
|
281
|
+
const strRow = db
|
|
282
|
+
.prepare("SELECT analysis FROM function_analysis WHERE content_hash = 'hash-str'")
|
|
283
|
+
.get();
|
|
284
|
+
db.close();
|
|
285
|
+
(0, vitest_1.expect)(JSON.parse(objRow.analysis).description).toBe("Object form");
|
|
286
|
+
(0, vitest_1.expect)(JSON.parse(strRow.analysis).description).toBe("String form");
|
|
287
|
+
});
|
|
288
|
+
});
|