@ophan/cli 0.0.2 → 0.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/auth.js +3 -3
- package/dist/index.js +381 -22
- package/dist/sync.js +300 -58
- package/dist/watch.js +190 -34
- package/ophan_logo.png +0 -0
- package/package.json +5 -3
package/README.md
CHANGED
|
@@ -35,7 +35,7 @@ npx @ophan/cli gc # Clean up old analysis entries
|
|
|
35
35
|
|
|
36
36
|
```bash
|
|
37
37
|
npm install --save-dev @ophan/cli
|
|
38
|
-
npx ophan analyze
|
|
38
|
+
npx @ophan/cli analyze
|
|
39
39
|
```
|
|
40
40
|
|
|
41
41
|
Add to your team's repo so everyone gets the CLI on `npm install`. Analysis is cached by content hash — unchanged functions are never re-analyzed.
|
package/dist/auth.js
CHANGED
|
@@ -113,7 +113,7 @@ async function login(webappUrl, timeoutMs = 120000) {
|
|
|
113
113
|
return new Promise((resolve, reject) => {
|
|
114
114
|
const timeout = setTimeout(() => {
|
|
115
115
|
server.close();
|
|
116
|
-
reject(new Error("Login timed out. Please try again with `ophan login`."));
|
|
116
|
+
reject(new Error("Login timed out. Please try again with `npx @ophan/cli login`."));
|
|
117
117
|
}, timeoutMs);
|
|
118
118
|
const server = http.createServer(async (req, res) => {
|
|
119
119
|
const url = new URL(req.url || "/", `http://localhost:${port}`);
|
|
@@ -196,7 +196,7 @@ async function login(webappUrl, timeoutMs = 120000) {
|
|
|
196
196
|
async function getAuthenticatedClient() {
|
|
197
197
|
const creds = readCredentials();
|
|
198
198
|
if (!creds) {
|
|
199
|
-
throw new Error("Not logged in. Run `ophan login` first.");
|
|
199
|
+
throw new Error("Not logged in. Run `npx @ophan/cli login` first.");
|
|
200
200
|
}
|
|
201
201
|
const supabase = (0, supabase_js_1.createClient)(creds.api_url, creds.api_key, {
|
|
202
202
|
auth: {
|
|
@@ -224,7 +224,7 @@ async function getAuthenticatedClient() {
|
|
|
224
224
|
refresh_token: creds.refresh_token,
|
|
225
225
|
});
|
|
226
226
|
if (error || !data.session) {
|
|
227
|
-
throw new Error(`Session expired or invalid. Run \`ophan login\` again.\n ${error?.message || "No session returned"}`);
|
|
227
|
+
throw new Error(`Session expired or invalid. Run \`npx @ophan/cli login\` again.\n ${error?.message || "No session returned"}`);
|
|
228
228
|
}
|
|
229
229
|
// Persist the new tokens so next invocation can use the fresh access_token
|
|
230
230
|
// without consuming the refresh token again
|
package/dist/index.js
CHANGED
|
@@ -46,11 +46,16 @@ var __importStar = (this && this.__importStar) || (function () {
|
|
|
46
46
|
return result;
|
|
47
47
|
};
|
|
48
48
|
})();
|
|
49
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
50
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
51
|
+
};
|
|
49
52
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
50
53
|
require("dotenv/config");
|
|
51
54
|
const commander_1 = require("commander");
|
|
52
55
|
const core_1 = require("@ophan/core");
|
|
56
|
+
const better_sqlite3_1 = __importDefault(require("better-sqlite3"));
|
|
53
57
|
const path = __importStar(require("path"));
|
|
58
|
+
const fs = __importStar(require("fs"));
|
|
54
59
|
const auth_1 = require("./auth");
|
|
55
60
|
const sync_1 = require("./sync");
|
|
56
61
|
const watch_1 = require("./watch");
|
|
@@ -64,7 +69,7 @@ program
|
|
|
64
69
|
* Returns undefined if user isn't logged in or repo doesn't exist in Supabase.
|
|
65
70
|
* Silent — never fails the analyze command.
|
|
66
71
|
*/
|
|
67
|
-
async function createPullFn(rootPath) {
|
|
72
|
+
async function createPullFn(rootPath, onProgress) {
|
|
68
73
|
const creds = (0, auth_1.readCredentials)();
|
|
69
74
|
if (!creds)
|
|
70
75
|
return undefined;
|
|
@@ -79,8 +84,14 @@ async function createPullFn(rootPath) {
|
|
|
79
84
|
.single();
|
|
80
85
|
if (!repo)
|
|
81
86
|
return undefined;
|
|
87
|
+
const log = onProgress ?? ((step) => console.log(` ${step}`));
|
|
82
88
|
return async (hashes) => {
|
|
83
|
-
|
|
89
|
+
try {
|
|
90
|
+
await (0, sync_1.pullFromSupabase)(rootPath, supabase, userId, repo.id, hashes, log);
|
|
91
|
+
}
|
|
92
|
+
catch {
|
|
93
|
+
// Pull is a non-critical optimization — silently degrade
|
|
94
|
+
}
|
|
84
95
|
};
|
|
85
96
|
}
|
|
86
97
|
catch {
|
|
@@ -96,7 +107,13 @@ async function runAnalyze(rootPath) {
|
|
|
96
107
|
process.stdout.cursorTo(0);
|
|
97
108
|
}
|
|
98
109
|
process.stdout.write(` [${current}/${total}] ${file}\n`);
|
|
99
|
-
}, pullFn)
|
|
110
|
+
}, pullFn, (analyzed, total) => {
|
|
111
|
+
if (process.stdout.isTTY) {
|
|
112
|
+
process.stdout.clearLine(0);
|
|
113
|
+
process.stdout.cursorTo(0);
|
|
114
|
+
}
|
|
115
|
+
process.stdout.write(` Analyzing: ${analyzed}/${total} functions`);
|
|
116
|
+
});
|
|
100
117
|
console.log(`\n✅ Done! ${result.analyzed} analyzed, ${result.skipped} cached` +
|
|
101
118
|
(result.pulled ? ` (${result.pulled} from cloud)` : "") +
|
|
102
119
|
` across ${result.files} files` +
|
|
@@ -115,10 +132,29 @@ program
|
|
|
115
132
|
.description("Remove orphaned analysis entries (manual, safe for branch switching)")
|
|
116
133
|
.option("-p, --path <path>", "Path to repository", process.cwd())
|
|
117
134
|
.option("-d, --days <days>", "Grace period in days", "30")
|
|
135
|
+
.option("-f, --force", "Run GC even when not on default branch")
|
|
118
136
|
.action(async (options) => {
|
|
119
137
|
const rootPath = path.resolve(options.path);
|
|
120
138
|
const maxAgeDays = parseInt(options.days, 10);
|
|
121
139
|
const dbPath = path.join(rootPath, ".ophan", "index.db");
|
|
140
|
+
// Branch safety check — GC on a feature branch can orphan
|
|
141
|
+
// hashes that only exist on the default branch
|
|
142
|
+
const currentBranch = (0, sync_1.getGitBranch)(rootPath);
|
|
143
|
+
if (currentBranch !== null) {
|
|
144
|
+
const defaultBranch = (0, sync_1.getDefaultBranch)(rootPath);
|
|
145
|
+
if (currentBranch !== defaultBranch && !options.force) {
|
|
146
|
+
console.error(`⚠️ You're on branch '${currentBranch}', not '${defaultBranch}'.\n` +
|
|
147
|
+
` Running GC on a feature branch can delete analysis for functions\n` +
|
|
148
|
+
` that only exist on '${defaultBranch}'. If synced, these deletions\n` +
|
|
149
|
+
` propagate to the cloud.\n\n` +
|
|
150
|
+
` Switch to '${defaultBranch}' first, or use --force to proceed.`);
|
|
151
|
+
process.exit(1);
|
|
152
|
+
}
|
|
153
|
+
if (currentBranch !== defaultBranch && options.force) {
|
|
154
|
+
console.warn(`⚠️ Running GC on branch '${currentBranch}' (default: '${defaultBranch}').\n` +
|
|
155
|
+
` Proceeding with --force.\n`);
|
|
156
|
+
}
|
|
157
|
+
}
|
|
122
158
|
console.log("🧹 Refreshing file index...\n");
|
|
123
159
|
await (0, core_1.refreshFileIndex)(rootPath, (current, total, file) => {
|
|
124
160
|
if (process.stdout.isTTY) {
|
|
@@ -144,7 +180,7 @@ program
|
|
|
144
180
|
try {
|
|
145
181
|
await (0, auth_1.getAuthenticatedClient)();
|
|
146
182
|
console.log(` Already logged in as ${existing.email}`);
|
|
147
|
-
console.log(` Run \`ophan logout\` first to switch accounts.`);
|
|
183
|
+
console.log(` Run \`npx @ophan/cli logout\` first to switch accounts.`);
|
|
148
184
|
return;
|
|
149
185
|
}
|
|
150
186
|
catch {
|
|
@@ -172,21 +208,51 @@ program
|
|
|
172
208
|
// ========== SYNC ==========
|
|
173
209
|
program
|
|
174
210
|
.command("sync")
|
|
175
|
-
.description("Sync analysis
|
|
211
|
+
.description("Sync analysis with ophan.dev (pull + push)")
|
|
176
212
|
.option("-p, --path <path>", "Path to repository", process.cwd())
|
|
177
213
|
.action(async (options) => {
|
|
178
214
|
const rootPath = path.resolve(options.path);
|
|
215
|
+
const dbPath = path.join(rootPath, ".ophan", "index.db");
|
|
179
216
|
try {
|
|
180
217
|
const { supabase, userId } = await (0, auth_1.getAuthenticatedClient)();
|
|
181
|
-
console.log("☁️ Syncing
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
(
|
|
188
|
-
|
|
189
|
-
|
|
218
|
+
console.log("☁️ Syncing with ophan.dev...\n");
|
|
219
|
+
// Pull first: fetch missing analysis from cloud
|
|
220
|
+
let pulled = 0;
|
|
221
|
+
const repoName = path.basename(rootPath);
|
|
222
|
+
const { data: repo } = await supabase
|
|
223
|
+
.from("repos")
|
|
224
|
+
.select("id")
|
|
225
|
+
.eq("user_id", userId)
|
|
226
|
+
.eq("name", repoName)
|
|
227
|
+
.single();
|
|
228
|
+
if (repo) {
|
|
229
|
+
const missingHashes = (0, core_1.findMissingHashes)(dbPath);
|
|
230
|
+
if (missingHashes.length > 0) {
|
|
231
|
+
const pullResult = await (0, sync_1.pullFromSupabase)(rootPath, supabase, userId, repo.id, missingHashes, (step) => console.log(` ${step}`));
|
|
232
|
+
pulled = pullResult.pulled;
|
|
233
|
+
}
|
|
234
|
+
else {
|
|
235
|
+
console.log(" No missing analyses to pull.");
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
// Then push local analysis to cloud
|
|
239
|
+
const result = await (0, sync_1.syncToSupabase)(rootPath, supabase, userId, (step) => console.log(` ${step}`));
|
|
240
|
+
const parts = [
|
|
241
|
+
`${pulled} pulled`,
|
|
242
|
+
`${result.pushed} pushed`,
|
|
243
|
+
`${result.locations} file locations synced`,
|
|
244
|
+
];
|
|
245
|
+
if (result.practices > 0)
|
|
246
|
+
parts.push(`${result.practices} practices synced`);
|
|
247
|
+
if (result.communities > 0)
|
|
248
|
+
parts.push(`${result.communities} community memberships`);
|
|
249
|
+
if (result.communityEdges > 0)
|
|
250
|
+
parts.push(`${result.communityEdges} community edges`);
|
|
251
|
+
if (result.summaries > 0)
|
|
252
|
+
parts.push(`${result.summaries} summaries`);
|
|
253
|
+
if (result.gcProcessed > 0)
|
|
254
|
+
parts.push(`${result.gcProcessed} GC deletions applied`);
|
|
255
|
+
console.log(`\n✅ Sync complete: ${parts.join(", ")}`);
|
|
190
256
|
}
|
|
191
257
|
catch (err) {
|
|
192
258
|
console.error(`\n❌ Sync failed: ${err.message}`);
|
|
@@ -202,7 +268,21 @@ program
|
|
|
202
268
|
.option("--sync", "Auto-sync to cloud after each analysis batch")
|
|
203
269
|
.action(async (options) => {
|
|
204
270
|
const rootPath = path.resolve(options.path);
|
|
205
|
-
|
|
271
|
+
// Watch is long-running (used by extension host) — never crash on stray rejections.
|
|
272
|
+
// Common cause: Supabase server unreachable during pull/sync operations.
|
|
273
|
+
process.on("unhandledRejection", (reason) => {
|
|
274
|
+
const msg = reason?.message ?? String(reason);
|
|
275
|
+
if (options.json) {
|
|
276
|
+
process.stdout.write(JSON.stringify({ event: "error", message: `Unhandled rejection: ${msg}` }) + "\n");
|
|
277
|
+
}
|
|
278
|
+
else {
|
|
279
|
+
console.error(` ❌ Unexpected error: ${msg}`);
|
|
280
|
+
}
|
|
281
|
+
});
|
|
282
|
+
const pullProgress = options.json
|
|
283
|
+
? (step) => process.stdout.write(JSON.stringify({ event: "pull_progress", message: step }) + "\n")
|
|
284
|
+
: undefined;
|
|
285
|
+
const pullFn = await createPullFn(rootPath, pullProgress);
|
|
206
286
|
let syncFn;
|
|
207
287
|
if (options.sync) {
|
|
208
288
|
const creds = (0, auth_1.readCredentials)();
|
|
@@ -213,18 +293,18 @@ program
|
|
|
213
293
|
}
|
|
214
294
|
catch {
|
|
215
295
|
if (options.json) {
|
|
216
|
-
process.stdout.write(JSON.stringify({ event: "sync_warning", message: "Session expired — sync disabled. Run ophan login." }) + "\n");
|
|
296
|
+
process.stdout.write(JSON.stringify({ event: "sync_warning", message: "Session expired — sync disabled. Run npx @ophan/cli login." }) + "\n");
|
|
217
297
|
}
|
|
218
298
|
else {
|
|
219
|
-
console.warn(" ⚠️ Session expired — sync disabled. Run `ophan login` to re-authenticate.");
|
|
299
|
+
console.warn(" ⚠️ Session expired — sync disabled. Run `npx @ophan/cli login` to re-authenticate.");
|
|
220
300
|
}
|
|
221
301
|
}
|
|
222
302
|
}
|
|
223
303
|
else if (options.json) {
|
|
224
|
-
process.stdout.write(JSON.stringify({ event: "sync_warning", message: "Not logged in — sync disabled. Run ophan login." }) + "\n");
|
|
304
|
+
process.stdout.write(JSON.stringify({ event: "sync_warning", message: "Not logged in — sync disabled. Run npx @ophan/cli login." }) + "\n");
|
|
225
305
|
}
|
|
226
306
|
else {
|
|
227
|
-
console.warn(" ⚠️ Not logged in — sync disabled. Run `ophan login` to enable cloud sync.");
|
|
307
|
+
console.warn(" ⚠️ Not logged in — sync disabled. Run `npx @ophan/cli login` to enable cloud sync.");
|
|
228
308
|
}
|
|
229
309
|
}
|
|
230
310
|
await (0, watch_1.startWatch)({
|
|
@@ -234,12 +314,291 @@ program
|
|
|
234
314
|
json: options.json,
|
|
235
315
|
});
|
|
236
316
|
});
|
|
237
|
-
//
|
|
317
|
+
// ========== INIT ==========
|
|
238
318
|
program
|
|
239
319
|
.command("init")
|
|
240
|
-
.description("
|
|
320
|
+
.description("Initialize Ophan for this repository (fast, no network calls)")
|
|
241
321
|
.option("-p, --path <path>", "Path to repository", process.cwd())
|
|
242
322
|
.action(async (options) => {
|
|
243
|
-
|
|
323
|
+
const rootPath = path.resolve(options.path);
|
|
324
|
+
const dbPath = path.join(rootPath, ".ophan", "index.db");
|
|
325
|
+
console.log("Initializing Ophan...\n");
|
|
326
|
+
// Create .ophan dir + DB tables + gitignore
|
|
327
|
+
fs.mkdirSync(path.join(rootPath, ".ophan"), { recursive: true });
|
|
328
|
+
(0, core_1.ensureGitignore)(rootPath);
|
|
329
|
+
(0, core_1.initDb)(dbPath).close();
|
|
330
|
+
// Scan files and index all functions
|
|
331
|
+
let fileCount = 0;
|
|
332
|
+
await (0, core_1.refreshFileIndex)(rootPath, (current, total, file) => {
|
|
333
|
+
fileCount = total;
|
|
334
|
+
if (process.stdout.isTTY) {
|
|
335
|
+
process.stdout.clearLine(0);
|
|
336
|
+
process.stdout.cursorTo(0);
|
|
337
|
+
}
|
|
338
|
+
process.stdout.write(` [${current}/${total}] ${file}\n`);
|
|
339
|
+
});
|
|
340
|
+
// Count indexed functions
|
|
341
|
+
const db = new better_sqlite3_1.default(dbPath, { readonly: true });
|
|
342
|
+
const row = db.prepare("SELECT COUNT(*) as count FROM file_functions").get();
|
|
343
|
+
db.close();
|
|
344
|
+
console.log(`\n✅ Initialized! Found ${row.count} functions across ${fileCount} files.`);
|
|
345
|
+
console.log(` Database: .ophan/index.db\n`);
|
|
346
|
+
console.log(` Next: run \`npx @ophan/cli watch\` to start analysis.`);
|
|
347
|
+
});
|
|
348
|
+
// ========== GRAPH ==========
|
|
349
|
+
program
|
|
350
|
+
.command("graph")
|
|
351
|
+
.description("Build function relationship graph and detect communities")
|
|
352
|
+
.option("-p, --path <path>", "Path to repository", process.cwd())
|
|
353
|
+
.option("-a, --algorithm <algorithm>", "Community detection algorithm (louvain, leiden, label-propagation)", core_1.DEFAULT_GRAPH_CONFIG.algorithm)
|
|
354
|
+
.option("-r, --resolution <number>", "Louvain resolution parameter (higher = more communities)", String(core_1.DEFAULT_GRAPH_CONFIG.resolution))
|
|
355
|
+
.option("--min-size <number>", "Minimum community size (smaller clusters are dissolved)", String(core_1.DEFAULT_GRAPH_CONFIG.minCommunitySize))
|
|
356
|
+
.option("--max-size <number>", "Maximum community size (informational)", String(core_1.DEFAULT_GRAPH_CONFIG.maxCommunitySize))
|
|
357
|
+
.option("--json", "Output results as JSON")
|
|
358
|
+
.option("--summarize", "Generate documentation for detected communities using Claude")
|
|
359
|
+
.option("--raw-source", "Use raw function source code for documentation (requires --summarize)")
|
|
360
|
+
.option("--directory-decay <number>", "Directory distance decay factor (0=off, higher=stronger)", String(core_1.DEFAULT_GRAPH_CONFIG.directoryDecay))
|
|
361
|
+
.option("--compare", "Compare multiple algorithm configurations side by side")
|
|
362
|
+
.action(async (options) => {
|
|
363
|
+
const rootPath = path.resolve(options.path);
|
|
364
|
+
const dbPath = path.join(rootPath, ".ophan", "index.db");
|
|
365
|
+
if (!fs.existsSync(dbPath)) {
|
|
366
|
+
console.error("No .ophan/index.db found. Run `npx @ophan/cli init` first.");
|
|
367
|
+
process.exit(1);
|
|
368
|
+
}
|
|
369
|
+
if (options.rawSource && !options.summarize) {
|
|
370
|
+
console.error("--raw-source requires --summarize");
|
|
371
|
+
process.exit(1);
|
|
372
|
+
}
|
|
373
|
+
const config = {
|
|
374
|
+
algorithm: options.algorithm,
|
|
375
|
+
edgeWeights: core_1.DEFAULT_GRAPH_CONFIG.edgeWeights,
|
|
376
|
+
resolution: parseFloat(options.resolution),
|
|
377
|
+
minCommunitySize: parseInt(options.minSize, 10),
|
|
378
|
+
maxCommunitySize: parseInt(options.maxSize, 10),
|
|
379
|
+
directoryDecay: parseFloat(options.directoryDecay),
|
|
380
|
+
};
|
|
381
|
+
if (!options.json) {
|
|
382
|
+
console.log(`\nBuilding function graph (${config.algorithm}, resolution=${config.resolution})...\n`);
|
|
383
|
+
}
|
|
384
|
+
// Extract functions with relationship data
|
|
385
|
+
const functions = await (0, core_1.extractAllFunctions)(rootPath, !options.json ? (current, total, file) => {
|
|
386
|
+
if (process.stdout.isTTY) {
|
|
387
|
+
process.stdout.clearLine(0);
|
|
388
|
+
process.stdout.cursorTo(0);
|
|
389
|
+
}
|
|
390
|
+
process.stdout.write(` [${current}/${total}] ${file}`);
|
|
391
|
+
} : undefined);
|
|
392
|
+
if (!options.json) {
|
|
393
|
+
console.log(`\n\n ${functions.length} functions extracted, resolving edges...\n`);
|
|
394
|
+
}
|
|
395
|
+
// Populate file_functions index from extracted functions (needed for edge resolution)
|
|
396
|
+
const db = (0, core_1.initDb)(dbPath);
|
|
397
|
+
// Snapshot old hashes before populateFileIndex overwrites file_functions
|
|
398
|
+
// This enables incremental edge resolution (only recompute changed + neighbor edges)
|
|
399
|
+
const oldHashes = new Set(db.prepare("SELECT DISTINCT content_hash FROM file_functions").all()
|
|
400
|
+
.map((r) => r.content_hash));
|
|
401
|
+
(0, core_1.populateFileIndex)(db, functions);
|
|
402
|
+
if (!options.json) {
|
|
403
|
+
console.log(` Indexed ${functions.length} functions, detecting communities...\n`);
|
|
404
|
+
}
|
|
405
|
+
// --compare mode: resolve edges once, run multiple algorithm configs, print table
|
|
406
|
+
if (options.compare) {
|
|
407
|
+
const resolver = rootPath ? (0, core_1.buildModuleResolver)(rootPath, functions) : undefined;
|
|
408
|
+
const edges = (0, core_1.resolveEdges)(db, functions, config, undefined, resolver);
|
|
409
|
+
(0, core_1.storeEdges)(db, edges);
|
|
410
|
+
const edgesWithTransitive = (0, core_1.addTransitiveEdges)(edges, config);
|
|
411
|
+
// Build hash→filePath map for directory distance decay
|
|
412
|
+
let hashToFilePath;
|
|
413
|
+
if (rootPath && (config.directoryDecay ?? 0) > 0) {
|
|
414
|
+
hashToFilePath = new Map();
|
|
415
|
+
for (const fn of functions)
|
|
416
|
+
hashToFilePath.set(fn.contentHash, fn.filePath);
|
|
417
|
+
}
|
|
418
|
+
const allHashes = new Set(functions.map((f) => f.contentHash));
|
|
419
|
+
const metrics = (0, core_1.runComparison)(edgesWithTransitive, config, core_1.DEFAULT_COMPARISONS, hashToFilePath, rootPath, allHashes);
|
|
420
|
+
if (options.json) {
|
|
421
|
+
console.log(JSON.stringify({ nodes: allHashes.size, edges: edgesWithTransitive.length, comparisons: metrics }));
|
|
422
|
+
}
|
|
423
|
+
else {
|
|
424
|
+
console.log(` Algorithm Comparison (${allHashes.size} nodes, ${edgesWithTransitive.length} edges)\n`);
|
|
425
|
+
// Table header
|
|
426
|
+
const header = [
|
|
427
|
+
"Configuration".padEnd(25),
|
|
428
|
+
"Communities".padStart(12),
|
|
429
|
+
"Dissolved".padStart(14),
|
|
430
|
+
"Coverage".padStart(9),
|
|
431
|
+
"Min".padStart(5),
|
|
432
|
+
"Med".padStart(5),
|
|
433
|
+
"Max".padStart(5),
|
|
434
|
+
"Modularity".padStart(11),
|
|
435
|
+
].join("");
|
|
436
|
+
console.log(` ${header}`);
|
|
437
|
+
console.log(` ${"─".repeat(header.length)}`);
|
|
438
|
+
for (const m of metrics) {
|
|
439
|
+
const dissolved = `${m.dissolvedCount} (${m.dissolvedPct.toFixed(0)}%)`;
|
|
440
|
+
const modularity = m.modularity !== null ? m.modularity.toFixed(4) : "---";
|
|
441
|
+
const row = [
|
|
442
|
+
m.label.padEnd(25),
|
|
443
|
+
String(m.communityCount).padStart(12),
|
|
444
|
+
dissolved.padStart(14),
|
|
445
|
+
`${m.coverage.toFixed(0)}%`.padStart(9),
|
|
446
|
+
String(m.minSize).padStart(5),
|
|
447
|
+
String(m.medianSize).padStart(5),
|
|
448
|
+
String(m.maxSize).padStart(5),
|
|
449
|
+
modularity.padStart(11),
|
|
450
|
+
].join("");
|
|
451
|
+
console.log(` ${row}`);
|
|
452
|
+
}
|
|
453
|
+
console.log();
|
|
454
|
+
}
|
|
455
|
+
db.close();
|
|
456
|
+
return;
|
|
457
|
+
}
|
|
458
|
+
// Run full graph pipeline with hierarchical detection
|
|
459
|
+
const hierResult = (0, core_1.detectHierarchicalCommunities)(db, functions, config, oldHashes, rootPath);
|
|
460
|
+
const result = hierResult.l0;
|
|
461
|
+
// Build source map for raw-source summarization mode
|
|
462
|
+
let sourceMap;
|
|
463
|
+
if (options.rawSource) {
|
|
464
|
+
sourceMap = new Map();
|
|
465
|
+
for (const fn of functions)
|
|
466
|
+
sourceMap.set(fn.contentHash, fn.sourceCode);
|
|
467
|
+
}
|
|
468
|
+
if (options.json) {
|
|
469
|
+
const output = {
|
|
470
|
+
algorithm: config.algorithm,
|
|
471
|
+
resolution: config.resolution,
|
|
472
|
+
nodes: result.nodesInGraph,
|
|
473
|
+
edges: result.edgesInGraph,
|
|
474
|
+
communities: result.communityCount,
|
|
475
|
+
dissolved: result.dissolvedCount,
|
|
476
|
+
modularity: result.modularity,
|
|
477
|
+
assignments: result.assignments,
|
|
478
|
+
communityEdges: hierResult.communityEdges,
|
|
479
|
+
l1Groups: hierResult.l1GroupCount,
|
|
480
|
+
l1Assignments: hierResult.l1Assignments,
|
|
481
|
+
};
|
|
482
|
+
// Summarize if requested
|
|
483
|
+
if (options.summarize) {
|
|
484
|
+
const sumResult = await (0, core_1.summarizeCommunities)(db, {
|
|
485
|
+
config: { algorithm: config.algorithm },
|
|
486
|
+
sourceMap,
|
|
487
|
+
rootPath,
|
|
488
|
+
});
|
|
489
|
+
const l1 = (0, core_1.loadAllSummaries)(db, config.algorithm, 1);
|
|
490
|
+
const l2 = (0, core_1.loadAllSummaries)(db, config.algorithm, 2);
|
|
491
|
+
const l3 = (0, core_1.loadAllSummaries)(db, config.algorithm, 3);
|
|
492
|
+
const cc = (0, core_1.loadAllSummaries)(db, config.algorithm, 10);
|
|
493
|
+
output.summaries = {
|
|
494
|
+
l1: l1.map((s) => ({ communityId: s.communityId, ...s.summary })),
|
|
495
|
+
l2: l2.map((s) => ({ communityId: s.communityId, ...s.summary })),
|
|
496
|
+
l3: l3.map((s) => ({ communityId: s.communityId, ...s.summary })),
|
|
497
|
+
crossCutting: cc.map((s) => ({ communityId: s.communityId, ...s.summary })),
|
|
498
|
+
stats: sumResult,
|
|
499
|
+
};
|
|
500
|
+
}
|
|
501
|
+
console.log(JSON.stringify(output));
|
|
502
|
+
}
|
|
503
|
+
else {
|
|
504
|
+
console.log(` Nodes: ${result.nodesInGraph}`);
|
|
505
|
+
console.log(` Edges: ${result.edgesInGraph}`);
|
|
506
|
+
console.log(` Communities: ${result.communityCount}`);
|
|
507
|
+
if (result.dissolvedCount > 0) {
|
|
508
|
+
console.log(` Dissolved: ${result.dissolvedCount} (below min size ${config.minCommunitySize})`);
|
|
509
|
+
}
|
|
510
|
+
if (result.modularity !== null) {
|
|
511
|
+
console.log(` Modularity: ${result.modularity.toFixed(4)}`);
|
|
512
|
+
}
|
|
513
|
+
if (result.effectiveResolution && result.effectiveResolution !== config.resolution) {
|
|
514
|
+
console.log(` Resolution: ${result.effectiveResolution} (auto-scaled from ${config.resolution})`);
|
|
515
|
+
}
|
|
516
|
+
if (hierResult.l1GroupCount > 0) {
|
|
517
|
+
console.log(` Groups: ${hierResult.l1GroupCount} (hierarchical)`);
|
|
518
|
+
}
|
|
519
|
+
if (hierResult.communityEdges.length > 0) {
|
|
520
|
+
console.log(` Cross-edges: ${hierResult.communityEdges.length} (between communities)`);
|
|
521
|
+
}
|
|
522
|
+
// Show community breakdown
|
|
523
|
+
if (result.communityCount > 0) {
|
|
524
|
+
const communityMap = new Map();
|
|
525
|
+
for (const a of result.assignments) {
|
|
526
|
+
if (a.communityId === "__dissolved")
|
|
527
|
+
continue;
|
|
528
|
+
const members = communityMap.get(a.communityId) || [];
|
|
529
|
+
members.push(a.contentHash);
|
|
530
|
+
communityMap.set(a.communityId, members);
|
|
531
|
+
}
|
|
532
|
+
console.log(`\n Community breakdown:`);
|
|
533
|
+
// Look up function names from DB for display
|
|
534
|
+
const readDb = new better_sqlite3_1.default(dbPath, { readonly: true });
|
|
535
|
+
const lookupName = readDb.prepare("SELECT function_name, file_path FROM file_functions WHERE content_hash = ? LIMIT 1");
|
|
536
|
+
const sorted = [...communityMap.entries()].sort((a, b) => b[1].length - a[1].length);
|
|
537
|
+
for (const [communityId, hashes] of sorted) {
|
|
538
|
+
const overMax = hashes.length > config.maxCommunitySize;
|
|
539
|
+
const flag = overMax ? " (large)" : "";
|
|
540
|
+
console.log(`\n Community ${communityId}${flag} (${hashes.length} functions):`);
|
|
541
|
+
for (const hash of hashes.slice(0, 15)) {
|
|
542
|
+
const row = lookupName.get(hash);
|
|
543
|
+
if (row) {
|
|
544
|
+
const relPath = path.relative(rootPath, row.file_path);
|
|
545
|
+
console.log(` ${row.function_name} (${relPath})`);
|
|
546
|
+
}
|
|
547
|
+
else {
|
|
548
|
+
console.log(` ${hash.slice(0, 12)}...`);
|
|
549
|
+
}
|
|
550
|
+
}
|
|
551
|
+
if (hashes.length > 15) {
|
|
552
|
+
console.log(` ... and ${hashes.length - 15} more`);
|
|
553
|
+
}
|
|
554
|
+
}
|
|
555
|
+
readDb.close();
|
|
556
|
+
}
|
|
557
|
+
// Summarize if requested
|
|
558
|
+
if (options.summarize) {
|
|
559
|
+
// Check if any function analysis exists (for a useful warning in non-raw mode)
|
|
560
|
+
const analysisCount = db.prepare("SELECT COUNT(*) as count FROM function_analysis").get().count;
|
|
561
|
+
if (!options.rawSource && analysisCount === 0) {
|
|
562
|
+
console.log(" ⚠️ No function analysis found. Run `ophan analyze` first for best results.\n");
|
|
563
|
+
}
|
|
564
|
+
else if (result.communityCount > 0) {
|
|
565
|
+
console.log(options.rawSource
|
|
566
|
+
? "\n📝 Generating documentation from source code...\n"
|
|
567
|
+
: "\n📝 Generating documentation...\n");
|
|
568
|
+
const sumResult = await (0, core_1.summarizeCommunities)(db, {
|
|
569
|
+
config: { algorithm: config.algorithm },
|
|
570
|
+
onProgress: (step) => console.log(` ${step}`),
|
|
571
|
+
sourceMap,
|
|
572
|
+
rootPath,
|
|
573
|
+
});
|
|
574
|
+
const generated = sumResult.l1Summarized + sumResult.l2Summarized + sumResult.l3Summarized;
|
|
575
|
+
const cached = sumResult.l1Cached + sumResult.l2Cached + sumResult.l3Cached;
|
|
576
|
+
const driftSkipped = sumResult.l1DriftSkipped;
|
|
577
|
+
const failed = sumResult.l1Failed ?? 0;
|
|
578
|
+
const parts = [`${generated} generated`, `${cached} cached`];
|
|
579
|
+
if (driftSkipped > 0)
|
|
580
|
+
parts.push(`${driftSkipped} drift-skipped`);
|
|
581
|
+
if (failed > 0)
|
|
582
|
+
parts.push(`${failed} failed`);
|
|
583
|
+
console.log(`\n✅ Documentation: ${parts.join(", ")}`);
|
|
584
|
+
if (sumResult.ccDetected > 0) {
|
|
585
|
+
console.log(` Cross-cutting: ${sumResult.ccDetected} detected, ${sumResult.ccSummarized} documented, ${sumResult.ccCached} cached`);
|
|
586
|
+
const ccSummaries = (0, core_1.loadAllSummaries)(db, config.algorithm, 10);
|
|
587
|
+
if (ccSummaries.length > 0) {
|
|
588
|
+
console.log(`\n Cross-Cutting Concerns:`);
|
|
589
|
+
for (const s of ccSummaries) {
|
|
590
|
+
const sum = s.summary;
|
|
591
|
+
const badge = sum.concernType === "security" ? "SEC" : "DATA";
|
|
592
|
+
const communities = (sum.affectedCommunities || [])
|
|
593
|
+
.map((c) => c.communityTitle).join(", ");
|
|
594
|
+
console.log(` [${badge}] ${sum.title} (${communities})`);
|
|
595
|
+
}
|
|
596
|
+
}
|
|
597
|
+
}
|
|
598
|
+
}
|
|
599
|
+
}
|
|
600
|
+
console.log();
|
|
601
|
+
}
|
|
602
|
+
db.close();
|
|
244
603
|
});
|
|
245
604
|
program.parse();
|
package/dist/sync.js
CHANGED
|
@@ -36,13 +36,67 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
36
36
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
37
|
};
|
|
38
38
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
exports.getGitBranch = getGitBranch;
|
|
40
|
+
exports.getGitCommitHash = getGitCommitHash;
|
|
41
|
+
exports.getDefaultBranch = getDefaultBranch;
|
|
39
42
|
exports.syncToSupabase = syncToSupabase;
|
|
40
43
|
exports.pullFromSupabase = pullFromSupabase;
|
|
41
44
|
const better_sqlite3_1 = __importDefault(require("better-sqlite3"));
|
|
42
45
|
const child_process_1 = require("child_process");
|
|
43
46
|
const path = __importStar(require("path"));
|
|
44
47
|
const fs = __importStar(require("fs"));
|
|
48
|
+
const p_limit_1 = __importDefault(require("p-limit"));
|
|
45
49
|
const core_1 = require("@ophan/core");
|
|
50
|
+
const supabaseLimit = (0, p_limit_1.default)(3);
|
|
51
|
+
/** Run chunked Supabase operations in parallel with bounded concurrency. */
|
|
52
|
+
async function parallelChunks(rows, chunkSize, fn) {
|
|
53
|
+
const chunks = [];
|
|
54
|
+
for (let i = 0; i < rows.length; i += chunkSize) {
|
|
55
|
+
chunks.push(rows.slice(i, i + chunkSize));
|
|
56
|
+
}
|
|
57
|
+
await Promise.all(chunks.map((chunk) => supabaseLimit(() => fn(chunk))));
|
|
58
|
+
}
|
|
59
|
+
function getGitBranch(rootPath) {
|
|
60
|
+
try {
|
|
61
|
+
return ((0, child_process_1.execSync)("git rev-parse --abbrev-ref HEAD", {
|
|
62
|
+
cwd: rootPath,
|
|
63
|
+
encoding: "utf8",
|
|
64
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
65
|
+
}).trim() || null);
|
|
66
|
+
}
|
|
67
|
+
catch {
|
|
68
|
+
return null;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
function getGitCommitHash(rootPath) {
|
|
72
|
+
try {
|
|
73
|
+
return ((0, child_process_1.execSync)("git rev-parse HEAD", {
|
|
74
|
+
cwd: rootPath,
|
|
75
|
+
encoding: "utf8",
|
|
76
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
77
|
+
}).trim() || null);
|
|
78
|
+
}
|
|
79
|
+
catch {
|
|
80
|
+
return null;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
function getDefaultBranch(rootPath) {
|
|
84
|
+
try {
|
|
85
|
+
const ref = (0, child_process_1.execSync)("git symbolic-ref refs/remotes/origin/HEAD", {
|
|
86
|
+
cwd: rootPath,
|
|
87
|
+
encoding: "utf8",
|
|
88
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
89
|
+
}).trim();
|
|
90
|
+
// ref = "refs/remotes/origin/main" → extract "main"
|
|
91
|
+
const match = ref.match(/refs\/remotes\/origin\/(.+)/);
|
|
92
|
+
if (match)
|
|
93
|
+
return match[1];
|
|
94
|
+
}
|
|
95
|
+
catch {
|
|
96
|
+
// No remote, shallow clone, or not a git repo
|
|
97
|
+
}
|
|
98
|
+
return "main";
|
|
99
|
+
}
|
|
46
100
|
function getGitRemoteUrl(rootPath) {
|
|
47
101
|
try {
|
|
48
102
|
return ((0, child_process_1.execSync)("git remote get-url origin", {
|
|
@@ -58,7 +112,7 @@ function getGitRemoteUrl(rootPath) {
|
|
|
58
112
|
async function syncToSupabase(rootPath, supabase, userId, onProgress) {
|
|
59
113
|
const dbPath = path.join(rootPath, ".ophan", "index.db");
|
|
60
114
|
if (!fs.existsSync(dbPath)) {
|
|
61
|
-
throw new Error(`No analysis database found at ${dbPath}\n Run \`ophan
|
|
115
|
+
throw new Error(`No analysis database found at ${dbPath}\n Run \`npx @ophan/cli init\` first.`);
|
|
62
116
|
}
|
|
63
117
|
const db = new better_sqlite3_1.default(dbPath);
|
|
64
118
|
// Ensure sync_meta table exists (may not if DB was created by older version)
|
|
@@ -86,12 +140,42 @@ async function syncToSupabase(rootPath, supabase, userId, onProgress) {
|
|
|
86
140
|
name: repoName,
|
|
87
141
|
remote_url: remoteUrl,
|
|
88
142
|
}, { onConflict: "user_id,name" })
|
|
89
|
-
.select("id")
|
|
143
|
+
.select("id, docs_branch")
|
|
90
144
|
.single();
|
|
91
145
|
if (repoError) {
|
|
92
146
|
throw new Error(`Failed to register repo: ${repoError.message}`);
|
|
93
147
|
}
|
|
94
148
|
const repoId = repo.id;
|
|
149
|
+
const docsBranch = repo.docs_branch ?? "main";
|
|
150
|
+
// Determine whether to sync architecture data (communities, edges, summaries).
|
|
151
|
+
// Gate: only sync when on the configured docs branch.
|
|
152
|
+
// Non-git repos (branch = null) bypass the gate — always sync.
|
|
153
|
+
const currentBranch = getGitBranch(rootPath);
|
|
154
|
+
const syncArchitecture = currentBranch === null || currentBranch === docsBranch;
|
|
155
|
+
if (!syncArchitecture) {
|
|
156
|
+
onProgress?.(`Skipping architecture sync (on '${currentBranch}', docs branch is '${docsBranch}')`);
|
|
157
|
+
}
|
|
158
|
+
// 1.5. Pull practices from Supabase → local SQLite
|
|
159
|
+
onProgress?.("Syncing practices...");
|
|
160
|
+
const { data: practiceRows } = await supabase
|
|
161
|
+
.from("repo_practices")
|
|
162
|
+
.select("practice_id, rule, severity")
|
|
163
|
+
.eq("repo_id", repoId);
|
|
164
|
+
const practicesSynced = practiceRows?.length ?? 0;
|
|
165
|
+
if (practiceRows && practiceRows.length > 0) {
|
|
166
|
+
(0, core_1.importPractices)(db, practiceRows.map((r) => ({
|
|
167
|
+
id: r.practice_id,
|
|
168
|
+
rule: r.rule,
|
|
169
|
+
severity: r.severity,
|
|
170
|
+
})));
|
|
171
|
+
}
|
|
172
|
+
else {
|
|
173
|
+
// No practices defined — clear local table
|
|
174
|
+
try {
|
|
175
|
+
db.prepare("DELETE FROM practices").run();
|
|
176
|
+
}
|
|
177
|
+
catch { }
|
|
178
|
+
}
|
|
95
179
|
// 2. Push unsynced analysis (now includes analysis_type + schema_version)
|
|
96
180
|
onProgress?.("Pushing analysis...");
|
|
97
181
|
const unsynced = db
|
|
@@ -110,16 +194,15 @@ async function syncToSupabase(rootPath, supabase, userId, onProgress) {
|
|
|
110
194
|
language: row.language,
|
|
111
195
|
entity_type: row.entity_type,
|
|
112
196
|
}));
|
|
113
|
-
// Batch upsert in chunks of 500
|
|
114
|
-
|
|
115
|
-
const chunk = rows.slice(i, i + 500);
|
|
197
|
+
// Batch upsert in chunks of 500 with bounded concurrency
|
|
198
|
+
await parallelChunks(rows, 500, async (chunk) => {
|
|
116
199
|
const { error } = await supabase
|
|
117
200
|
.from("function_analysis")
|
|
118
201
|
.upsert(chunk, { onConflict: "content_hash,repo_id,analysis_type" });
|
|
119
202
|
if (error) {
|
|
120
203
|
throw new Error(`Failed to push analysis: ${error.message}`);
|
|
121
204
|
}
|
|
122
|
-
}
|
|
205
|
+
});
|
|
123
206
|
// Mark as synced locally
|
|
124
207
|
const now = Math.floor(Date.now() / 1000);
|
|
125
208
|
const markSynced = db.prepare("UPDATE function_analysis SET synced_at = ? WHERE content_hash = ? AND analysis_type = ?");
|
|
@@ -140,28 +223,37 @@ async function syncToSupabase(rootPath, supabase, userId, onProgress) {
|
|
|
140
223
|
throw new Error(`Failed to clear locations: ${delError.message}`);
|
|
141
224
|
}
|
|
142
225
|
const locations = db
|
|
143
|
-
.prepare(`SELECT file_path, function_name, content_hash, language, entity_type
|
|
144
|
-
FROM file_functions
|
|
226
|
+
.prepare(`SELECT file_path, function_name, content_hash, language, entity_type, start_line
|
|
227
|
+
FROM file_functions
|
|
228
|
+
GROUP BY file_path, function_name`)
|
|
145
229
|
.all();
|
|
146
230
|
if (locations.length > 0) {
|
|
231
|
+
// Convert absolute paths to repo-relative for cloud storage
|
|
232
|
+
// (local DB stores absolute paths for file I/O, but cloud paths
|
|
233
|
+
// must be portable across machines)
|
|
234
|
+
const rootPrefix = rootPath.endsWith(path.sep)
|
|
235
|
+
? rootPath
|
|
236
|
+
: rootPath + path.sep;
|
|
147
237
|
const locationRows = locations.map((row) => ({
|
|
148
238
|
repo_id: repoId,
|
|
149
239
|
user_id: userId,
|
|
150
|
-
file_path: row.file_path
|
|
240
|
+
file_path: row.file_path.startsWith(rootPrefix)
|
|
241
|
+
? row.file_path.slice(rootPrefix.length)
|
|
242
|
+
: row.file_path,
|
|
151
243
|
function_name: row.function_name,
|
|
152
244
|
content_hash: row.content_hash,
|
|
153
245
|
language: row.language,
|
|
154
246
|
entity_type: row.entity_type,
|
|
247
|
+
start_line: row.start_line,
|
|
155
248
|
}));
|
|
156
|
-
|
|
157
|
-
const chunk = locationRows.slice(i, i + 500);
|
|
249
|
+
await parallelChunks(locationRows, 500, async (chunk) => {
|
|
158
250
|
const { error } = await supabase
|
|
159
251
|
.from("function_locations")
|
|
160
252
|
.insert(chunk);
|
|
161
253
|
if (error) {
|
|
162
254
|
throw new Error(`Failed to push locations: ${error.message}`);
|
|
163
255
|
}
|
|
164
|
-
}
|
|
256
|
+
});
|
|
165
257
|
}
|
|
166
258
|
// 4. Process GC tombstones
|
|
167
259
|
onProgress?.("Processing garbage collection...");
|
|
@@ -169,30 +261,36 @@ async function syncToSupabase(rootPath, supabase, userId, onProgress) {
|
|
|
169
261
|
.prepare("SELECT content_hash, analysis_type FROM function_gc WHERE synced_at IS NULL")
|
|
170
262
|
.all();
|
|
171
263
|
if (gcRows.length > 0) {
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
264
|
+
// Batch GC deletes using .in() for efficiency
|
|
265
|
+
const withType = gcRows.filter((r) => r.analysis_type);
|
|
266
|
+
const withoutType = gcRows.filter((r) => !r.analysis_type);
|
|
267
|
+
// Delete analysis rows with specific type (group by type)
|
|
268
|
+
if (withType.length > 0) {
|
|
269
|
+
const byType = new Map();
|
|
270
|
+
for (const row of withType) {
|
|
271
|
+
const hashes = byType.get(row.analysis_type) || [];
|
|
272
|
+
hashes.push(row.content_hash);
|
|
273
|
+
byType.set(row.analysis_type, hashes);
|
|
181
274
|
}
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
.delete()
|
|
187
|
-
.eq("content_hash", row.content_hash)
|
|
188
|
-
.eq("repo_id", repoId);
|
|
189
|
-
}
|
|
190
|
-
await supabase
|
|
191
|
-
.from("function_locations")
|
|
192
|
-
.delete()
|
|
193
|
-
.eq("content_hash", row.content_hash)
|
|
194
|
-
.eq("repo_id", repoId);
|
|
275
|
+
await Promise.all([...byType.entries()].map(([type, hashes]) => parallelChunks(hashes, 50, async (chunk) => {
|
|
276
|
+
await supabase.from("function_analysis").delete()
|
|
277
|
+
.eq("repo_id", repoId).eq("analysis_type", type).in("content_hash", chunk);
|
|
278
|
+
})));
|
|
195
279
|
}
|
|
280
|
+
// Delete analysis rows without type (legacy GC entries)
|
|
281
|
+
if (withoutType.length > 0) {
|
|
282
|
+
const legacyHashes = withoutType.map((r) => r.content_hash);
|
|
283
|
+
await parallelChunks(legacyHashes, 50, async (chunk) => {
|
|
284
|
+
await supabase.from("function_analysis").delete()
|
|
285
|
+
.eq("repo_id", repoId).in("content_hash", chunk);
|
|
286
|
+
});
|
|
287
|
+
}
|
|
288
|
+
// Delete location rows for all GC'd hashes
|
|
289
|
+
const allGcHashes = gcRows.map((r) => r.content_hash);
|
|
290
|
+
await parallelChunks(allGcHashes, 50, async (chunk) => {
|
|
291
|
+
await supabase.from("function_locations").delete()
|
|
292
|
+
.eq("repo_id", repoId).in("content_hash", chunk);
|
|
293
|
+
});
|
|
196
294
|
// Mark GC rows as synced locally, then delete them
|
|
197
295
|
const now = Math.floor(Date.now() / 1000);
|
|
198
296
|
db.transaction(() => {
|
|
@@ -200,10 +298,145 @@ async function syncToSupabase(rootPath, supabase, userId, onProgress) {
|
|
|
200
298
|
db.prepare("DELETE FROM function_gc WHERE synced_at IS NOT NULL").run();
|
|
201
299
|
})();
|
|
202
300
|
}
|
|
301
|
+
// 5. Replace communities (full sync, like function_locations)
|
|
302
|
+
// Steps 5-7 are gated by syncArchitecture (only sync on docs branch)
|
|
303
|
+
let communitiesSynced = 0;
|
|
304
|
+
if (!syncArchitecture) {
|
|
305
|
+
return {
|
|
306
|
+
pushed: unsynced.length,
|
|
307
|
+
locations: locations.length,
|
|
308
|
+
gcProcessed: gcRows.length,
|
|
309
|
+
practices: practicesSynced,
|
|
310
|
+
communities: 0,
|
|
311
|
+
communityEdges: 0,
|
|
312
|
+
summaries: 0,
|
|
313
|
+
skippedArchitecture: true,
|
|
314
|
+
commitHash: null,
|
|
315
|
+
};
|
|
316
|
+
}
|
|
317
|
+
const hasCommunities = db.prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='communities'").get();
|
|
318
|
+
if (hasCommunities) {
|
|
319
|
+
onProgress?.("Syncing community memberships...");
|
|
320
|
+
const { error: commDelError } = await supabase
|
|
321
|
+
.from("communities")
|
|
322
|
+
.delete()
|
|
323
|
+
.eq("repo_id", repoId);
|
|
324
|
+
if (commDelError) {
|
|
325
|
+
throw new Error(`Failed to clear communities: ${commDelError.message}`);
|
|
326
|
+
}
|
|
327
|
+
const communityRows = db.prepare("SELECT content_hash, level, community_id, algorithm FROM communities").all();
|
|
328
|
+
if (communityRows.length > 0) {
|
|
329
|
+
const rows = communityRows.map((row) => ({
|
|
330
|
+
content_hash: row.content_hash,
|
|
331
|
+
level: row.level,
|
|
332
|
+
community_id: row.community_id,
|
|
333
|
+
algorithm: row.algorithm,
|
|
334
|
+
repo_id: repoId,
|
|
335
|
+
user_id: userId,
|
|
336
|
+
}));
|
|
337
|
+
await parallelChunks(rows, 500, async (chunk) => {
|
|
338
|
+
const { error } = await supabase.from("communities").insert(chunk);
|
|
339
|
+
if (error) {
|
|
340
|
+
throw new Error(`Failed to push communities: ${error.message}`);
|
|
341
|
+
}
|
|
342
|
+
});
|
|
343
|
+
communitiesSynced = communityRows.length;
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
// 6. Replace community_edges (full sync, like communities)
|
|
347
|
+
let communityEdgesSynced = 0;
|
|
348
|
+
const hasCommunityEdges = db.prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='community_edges'").get();
|
|
349
|
+
if (hasCommunityEdges) {
|
|
350
|
+
onProgress?.("Syncing community edges...");
|
|
351
|
+
const { error: ceDelError } = await supabase
|
|
352
|
+
.from("community_edges")
|
|
353
|
+
.delete()
|
|
354
|
+
.eq("repo_id", repoId);
|
|
355
|
+
if (ceDelError) {
|
|
356
|
+
throw new Error(`Failed to clear community edges: ${ceDelError.message}`);
|
|
357
|
+
}
|
|
358
|
+
const ceRows = db.prepare("SELECT source_community, target_community, algorithm, weight, edge_count FROM community_edges").all();
|
|
359
|
+
if (ceRows.length > 0) {
|
|
360
|
+
const rows = ceRows.map((row) => ({
|
|
361
|
+
source_community: row.source_community,
|
|
362
|
+
target_community: row.target_community,
|
|
363
|
+
algorithm: row.algorithm,
|
|
364
|
+
repo_id: repoId,
|
|
365
|
+
user_id: userId,
|
|
366
|
+
weight: row.weight,
|
|
367
|
+
edge_count: row.edge_count,
|
|
368
|
+
}));
|
|
369
|
+
await parallelChunks(rows, 500, async (chunk) => {
|
|
370
|
+
const { error } = await supabase.from("community_edges").insert(chunk);
|
|
371
|
+
if (error) {
|
|
372
|
+
throw new Error(`Failed to push community edges: ${error.message}`);
|
|
373
|
+
}
|
|
374
|
+
});
|
|
375
|
+
communityEdgesSynced = ceRows.length;
|
|
376
|
+
}
|
|
377
|
+
}
|
|
378
|
+
// 7. Replace community summaries (full sync, like communities)
|
|
379
|
+
let summariesSynced = 0;
|
|
380
|
+
const hasSummaries = db.prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='community_summaries'").get();
|
|
381
|
+
if (hasSummaries) {
|
|
382
|
+
onProgress?.("Syncing community summaries...");
|
|
383
|
+
const { error: sumDelError } = await supabase
|
|
384
|
+
.from("community_summaries")
|
|
385
|
+
.delete()
|
|
386
|
+
.eq("repo_id", repoId);
|
|
387
|
+
if (sumDelError) {
|
|
388
|
+
throw new Error(`Failed to clear summaries: ${sumDelError.message}`);
|
|
389
|
+
}
|
|
390
|
+
const summaryRows = db.prepare("SELECT community_id, level, algorithm, input_hash, summary, model_version, created_at FROM community_summaries").all();
|
|
391
|
+
if (summaryRows.length > 0) {
|
|
392
|
+
const rows = summaryRows.map((row) => ({
|
|
393
|
+
community_id: row.community_id,
|
|
394
|
+
level: row.level,
|
|
395
|
+
algorithm: row.algorithm,
|
|
396
|
+
repo_id: repoId,
|
|
397
|
+
user_id: userId,
|
|
398
|
+
input_hash: row.input_hash,
|
|
399
|
+
summary: JSON.parse(row.summary),
|
|
400
|
+
model_version: row.model_version,
|
|
401
|
+
}));
|
|
402
|
+
await parallelChunks(rows, 500, async (chunk) => {
|
|
403
|
+
const { error } = await supabase
|
|
404
|
+
.from("community_summaries")
|
|
405
|
+
.insert(chunk);
|
|
406
|
+
if (error) {
|
|
407
|
+
throw new Error(`Failed to push summaries: ${error.message}`);
|
|
408
|
+
}
|
|
409
|
+
});
|
|
410
|
+
summariesSynced = summaryRows.length;
|
|
411
|
+
}
|
|
412
|
+
}
|
|
413
|
+
// 8. Record docs sync commit hash (git repos only)
|
|
414
|
+
const commitHash = getGitCommitHash(rootPath);
|
|
415
|
+
if (commitHash && currentBranch) {
|
|
416
|
+
await supabase.from("docs_sync_history").insert({
|
|
417
|
+
repo_id: repoId,
|
|
418
|
+
user_id: userId,
|
|
419
|
+
commit_hash: commitHash,
|
|
420
|
+
branch: currentBranch,
|
|
421
|
+
});
|
|
422
|
+
await supabase
|
|
423
|
+
.from("repos")
|
|
424
|
+
.update({
|
|
425
|
+
last_docs_sync_commit: commitHash,
|
|
426
|
+
last_docs_sync_at: new Date().toISOString(),
|
|
427
|
+
})
|
|
428
|
+
.eq("id", repoId);
|
|
429
|
+
}
|
|
203
430
|
return {
|
|
204
431
|
pushed: unsynced.length,
|
|
205
432
|
locations: locations.length,
|
|
206
433
|
gcProcessed: gcRows.length,
|
|
434
|
+
practices: practicesSynced,
|
|
435
|
+
communities: communitiesSynced,
|
|
436
|
+
communityEdges: communityEdgesSynced,
|
|
437
|
+
summaries: summariesSynced,
|
|
438
|
+
skippedArchitecture: false,
|
|
439
|
+
commitHash,
|
|
207
440
|
};
|
|
208
441
|
}
|
|
209
442
|
finally {
|
|
@@ -221,32 +454,41 @@ async function pullFromSupabase(rootPath, supabase, userId, repoId, missingHashe
|
|
|
221
454
|
const dbPath = path.join(rootPath, ".ophan", "index.db");
|
|
222
455
|
onProgress?.(`Pulling ${missingHashes.length} cached analyses from cloud...`);
|
|
223
456
|
const allRows = [];
|
|
224
|
-
// Batch queries in chunks of
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
457
|
+
// Batch queries in chunks of 50 with bounded concurrency
|
|
458
|
+
// (SHA256 hashes are 64 chars each; 50 × 64 ≈ 3.2KB fits within URI limits)
|
|
459
|
+
const pullChunks = [];
|
|
460
|
+
for (let i = 0; i < missingHashes.length; i += 50) {
|
|
461
|
+
pullChunks.push(missingHashes.slice(i, i + 50));
|
|
462
|
+
}
|
|
463
|
+
await Promise.all(pullChunks.map((chunk) => supabaseLimit(async () => {
|
|
464
|
+
try {
|
|
465
|
+
const { data, error } = await supabase
|
|
466
|
+
.from("function_analysis")
|
|
467
|
+
.select("content_hash, analysis_type, analysis, model_version, schema_version, language, entity_type")
|
|
468
|
+
.eq("repo_id", repoId)
|
|
469
|
+
.in("content_hash", chunk);
|
|
470
|
+
if (error) {
|
|
471
|
+
onProgress?.(`Warning: pull failed: ${error.message}`);
|
|
472
|
+
return;
|
|
473
|
+
}
|
|
474
|
+
if (data) {
|
|
475
|
+
for (const row of data) {
|
|
476
|
+
allRows.push({
|
|
477
|
+
content_hash: row.content_hash,
|
|
478
|
+
analysis_type: row.analysis_type,
|
|
479
|
+
analysis: typeof row.analysis === "string" ? row.analysis : JSON.stringify(row.analysis),
|
|
480
|
+
model_version: row.model_version,
|
|
481
|
+
schema_version: row.schema_version ?? 1,
|
|
482
|
+
language: row.language ?? "typescript",
|
|
483
|
+
entity_type: row.entity_type ?? "function",
|
|
484
|
+
});
|
|
485
|
+
}
|
|
247
486
|
}
|
|
248
487
|
}
|
|
249
|
-
|
|
488
|
+
catch (err) {
|
|
489
|
+
onProgress?.(`Warning: pull request failed: ${err?.message ?? String(err)}`);
|
|
490
|
+
}
|
|
491
|
+
})));
|
|
250
492
|
if (allRows.length === 0)
|
|
251
493
|
return { pulled: 0 };
|
|
252
494
|
const imported = (0, core_1.importAnalysis)(dbPath, allRows);
|
package/dist/watch.js
CHANGED
|
@@ -33,27 +33,90 @@ var __importStar = (this && this.__importStar) || (function () {
|
|
|
33
33
|
};
|
|
34
34
|
})();
|
|
35
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.formatError = formatError;
|
|
37
|
+
exports.isGitIgnored = isGitIgnored;
|
|
38
|
+
exports.withTimeout = withTimeout;
|
|
36
39
|
exports.startWatch = startWatch;
|
|
37
40
|
const fs = __importStar(require("fs"));
|
|
38
41
|
const path = __importStar(require("path"));
|
|
42
|
+
const child_process_1 = require("child_process");
|
|
39
43
|
const core_1 = require("@ophan/core");
|
|
40
44
|
function emit(event) {
|
|
41
45
|
process.stdout.write(JSON.stringify(event) + "\n");
|
|
42
46
|
}
|
|
47
|
+
function formatError(err) {
|
|
48
|
+
if (err instanceof Error)
|
|
49
|
+
return err.message || err.constructor.name;
|
|
50
|
+
if (typeof err === "string")
|
|
51
|
+
return err || "Empty error string";
|
|
52
|
+
// Handle non-Error objects (e.g. Supabase/Anthropic SDK errors with circular refs)
|
|
53
|
+
if (err && typeof err === "object") {
|
|
54
|
+
const obj = err;
|
|
55
|
+
if (typeof obj.message === "string" && obj.message)
|
|
56
|
+
return obj.message;
|
|
57
|
+
if (typeof obj.error === "string" && obj.error)
|
|
58
|
+
return obj.error;
|
|
59
|
+
try {
|
|
60
|
+
const json = JSON.stringify(err);
|
|
61
|
+
if (json && json !== "{}")
|
|
62
|
+
return json;
|
|
63
|
+
}
|
|
64
|
+
catch { /* circular — fall through */ }
|
|
65
|
+
// Last resort for objects: try toString, check it's not the useless default
|
|
66
|
+
const str = String(err);
|
|
67
|
+
if (str !== "[object Object]")
|
|
68
|
+
return str;
|
|
69
|
+
return "Unknown error (non-serializable object)";
|
|
70
|
+
}
|
|
71
|
+
if (err === undefined)
|
|
72
|
+
return "Unknown error (undefined)";
|
|
73
|
+
if (err === null)
|
|
74
|
+
return "Unknown error (null)";
|
|
75
|
+
return String(err);
|
|
76
|
+
}
|
|
77
|
+
/**
|
|
78
|
+
* Check if a file is ignored by git (respects all .gitignore files, global gitignore,
|
|
79
|
+
* and .git/info/exclude). Returns false if not in a git repo or git is unavailable.
|
|
80
|
+
*/
|
|
81
|
+
function isGitIgnored(filePath, cwd) {
|
|
82
|
+
try {
|
|
83
|
+
(0, child_process_1.execSync)(`git check-ignore -q "${filePath}"`, {
|
|
84
|
+
cwd,
|
|
85
|
+
stdio: "pipe",
|
|
86
|
+
});
|
|
87
|
+
return true; // exit code 0 = ignored
|
|
88
|
+
}
|
|
89
|
+
catch {
|
|
90
|
+
return false; // exit code 1 = not ignored, or git not available
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
/** Wraps a promise with a timeout. Rejects with a timeout error if not resolved within `ms`. */
|
|
94
|
+
function withTimeout(promise, ms, label) {
|
|
95
|
+
return new Promise((resolve, reject) => {
|
|
96
|
+
const timer = setTimeout(() => reject(new Error(`Analysis timeout after ${ms / 1000}s for ${label}`)), ms);
|
|
97
|
+
promise.then((v) => { clearTimeout(timer); resolve(v); }, (e) => { clearTimeout(timer); reject(e); });
|
|
98
|
+
});
|
|
99
|
+
}
|
|
43
100
|
async function startWatch(options) {
|
|
44
101
|
const { rootPath, pullFn, syncFn, json } = options;
|
|
45
102
|
const dbPath = path.join(rootPath, ".ophan", "index.db");
|
|
46
103
|
const lockPath = path.join(rootPath, ".ophan", "watch.lock");
|
|
47
|
-
// Check for existing watcher
|
|
104
|
+
// Check for existing watcher — kill it so only one instance runs at a time
|
|
48
105
|
if (fs.existsSync(lockPath)) {
|
|
49
106
|
try {
|
|
50
107
|
const pid = parseInt(fs.readFileSync(lockPath, "utf-8").trim());
|
|
51
108
|
process.kill(pid, 0); // Check if process is alive (signal 0 = no-op)
|
|
109
|
+
// Kill the existing watcher and take over
|
|
52
110
|
if (json)
|
|
53
|
-
emit({ event: "
|
|
111
|
+
emit({ event: "replacing_watcher", oldPid: pid });
|
|
54
112
|
else
|
|
55
|
-
console.
|
|
56
|
-
|
|
113
|
+
console.log(` Stopping previous watcher (PID ${pid})...`);
|
|
114
|
+
try {
|
|
115
|
+
process.kill(pid, "SIGTERM");
|
|
116
|
+
}
|
|
117
|
+
catch { /* already dying */ }
|
|
118
|
+
// Brief wait for process to exit and release resources
|
|
119
|
+
await new Promise((resolve) => setTimeout(resolve, 500));
|
|
57
120
|
}
|
|
58
121
|
catch {
|
|
59
122
|
// Process is dead — stale lock file, continue
|
|
@@ -74,33 +137,78 @@ async function startWatch(options) {
|
|
|
74
137
|
console.log("🔮 Ophan watching...\n");
|
|
75
138
|
if (!json)
|
|
76
139
|
console.log(" Running initial scan...");
|
|
77
|
-
const
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
140
|
+
const INITIAL_SCAN_RETRIES = 2;
|
|
141
|
+
const PROGRESS_THROTTLE_MS = 500;
|
|
142
|
+
let scanSucceeded = false;
|
|
143
|
+
for (let attempt = 0; attempt <= INITIAL_SCAN_RETRIES; attempt++) {
|
|
144
|
+
try {
|
|
145
|
+
let lastProgressEmit = 0;
|
|
146
|
+
const scanResult = await (0, core_1.analyzeRepository)(rootPath, (current, total, file) => {
|
|
147
|
+
if (json) {
|
|
148
|
+
const now = Date.now();
|
|
149
|
+
if (now - lastProgressEmit > PROGRESS_THROTTLE_MS || current === total) {
|
|
150
|
+
emit({ event: "scan_progress", current, total, file });
|
|
151
|
+
lastProgressEmit = now;
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
else if (process.stdout.isTTY) {
|
|
155
|
+
process.stdout.clearLine(0);
|
|
156
|
+
process.stdout.cursorTo(0);
|
|
157
|
+
process.stdout.write(` [${current}/${total}] ${file}`);
|
|
158
|
+
}
|
|
159
|
+
}, pullFn);
|
|
160
|
+
if (json) {
|
|
161
|
+
emit({
|
|
162
|
+
event: "scan_complete",
|
|
163
|
+
files: scanResult.files,
|
|
164
|
+
analyzed: scanResult.analyzed,
|
|
165
|
+
cached: scanResult.skipped,
|
|
166
|
+
pulled: scanResult.pulled,
|
|
167
|
+
});
|
|
168
|
+
}
|
|
169
|
+
else {
|
|
170
|
+
console.log(`\n Initial scan: ${scanResult.analyzed} analyzed, ${scanResult.skipped} cached` +
|
|
171
|
+
(scanResult.pulled ? ` (${scanResult.pulled} from cloud)` : "") +
|
|
172
|
+
` across ${scanResult.files} files`);
|
|
173
|
+
}
|
|
174
|
+
// Sync after initial scan if anything was analyzed
|
|
175
|
+
if (syncFn && scanResult.analyzed > 0) {
|
|
176
|
+
await runSync(syncFn, json);
|
|
177
|
+
}
|
|
178
|
+
scanSucceeded = true;
|
|
179
|
+
break;
|
|
180
|
+
}
|
|
181
|
+
catch (err) {
|
|
182
|
+
const msg = formatError(err);
|
|
183
|
+
if (attempt < INITIAL_SCAN_RETRIES) {
|
|
184
|
+
const delay = (attempt + 1) * 3000;
|
|
185
|
+
if (json) {
|
|
186
|
+
emit({ event: "retry", attempt: attempt + 1, maxAttempts: INITIAL_SCAN_RETRIES + 1, message: msg, delayMs: delay });
|
|
187
|
+
}
|
|
188
|
+
else {
|
|
189
|
+
console.error(`\n ❌ Initial scan failed: ${msg}. Retrying in ${delay / 1000}s...`);
|
|
190
|
+
}
|
|
191
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
192
|
+
}
|
|
193
|
+
else {
|
|
194
|
+
if (json) {
|
|
195
|
+
emit({ event: "error", message: `Initial scan failed after ${INITIAL_SCAN_RETRIES + 1} attempts: ${msg}` });
|
|
196
|
+
}
|
|
197
|
+
else {
|
|
198
|
+
console.error(`\n ❌ Initial scan failed after ${INITIAL_SCAN_RETRIES + 1} attempts: ${msg}`);
|
|
199
|
+
}
|
|
200
|
+
}
|
|
82
201
|
}
|
|
83
|
-
}, pullFn);
|
|
84
|
-
if (json) {
|
|
85
|
-
emit({
|
|
86
|
-
event: "scan_complete",
|
|
87
|
-
files: scanResult.files,
|
|
88
|
-
analyzed: scanResult.analyzed,
|
|
89
|
-
cached: scanResult.skipped,
|
|
90
|
-
pulled: scanResult.pulled,
|
|
91
|
-
});
|
|
92
|
-
}
|
|
93
|
-
else {
|
|
94
|
-
console.log(`\n Initial scan: ${scanResult.analyzed} analyzed, ${scanResult.skipped} cached` +
|
|
95
|
-
(scanResult.pulled ? ` (${scanResult.pulled} from cloud)` : "") +
|
|
96
|
-
` across ${scanResult.files} files`);
|
|
97
202
|
}
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
203
|
+
if (!scanSucceeded) {
|
|
204
|
+
// Continue to file watching — incremental analysis can still work.
|
|
205
|
+
// Individual file changes will trigger re-analysis.
|
|
206
|
+
if (json)
|
|
207
|
+
emit({ event: "error", message: "Continuing in watch-only mode. File changes will trigger analysis." });
|
|
101
208
|
}
|
|
102
209
|
// Phase 2: Open DB for incremental watching
|
|
103
210
|
const db = (0, core_1.initDb)(dbPath);
|
|
211
|
+
const practices = (0, core_1.loadPracticesFromDb)(db);
|
|
104
212
|
const extSet = new Set((0, core_1.getSupportedExtensions)().map((e) => e.toLowerCase()));
|
|
105
213
|
const DEBOUNCE_MS = 5000;
|
|
106
214
|
const SYNC_DEBOUNCE_MS = 10000;
|
|
@@ -123,12 +231,20 @@ async function startWatch(options) {
|
|
|
123
231
|
}
|
|
124
232
|
}, SYNC_DEBOUNCE_MS);
|
|
125
233
|
}
|
|
126
|
-
// FIFO analysis queue
|
|
234
|
+
// FIFO analysis queue with circuit breaker
|
|
127
235
|
let analyzing = false;
|
|
128
236
|
const queue = [];
|
|
237
|
+
const MAX_CONSECUTIVE_ERRORS = 3;
|
|
238
|
+
const CIRCUIT_BREAKER_COOLDOWN_MS = 30000;
|
|
239
|
+
const ANALYSIS_TIMEOUT_MS = 120000;
|
|
240
|
+
let consecutiveErrors = 0;
|
|
241
|
+
let circuitBrokenUntil = 0;
|
|
129
242
|
async function processQueue() {
|
|
130
243
|
if (analyzing || queue.length === 0)
|
|
131
244
|
return;
|
|
245
|
+
// Circuit breaker: stop processing if too many consecutive errors
|
|
246
|
+
if (Date.now() < circuitBrokenUntil)
|
|
247
|
+
return;
|
|
132
248
|
analyzing = true;
|
|
133
249
|
while (queue.length > 0) {
|
|
134
250
|
const file = queue.shift();
|
|
@@ -139,7 +255,8 @@ async function startWatch(options) {
|
|
|
139
255
|
console.log(` Analyzing ${relPath}...`);
|
|
140
256
|
const start = Date.now();
|
|
141
257
|
try {
|
|
142
|
-
const result = await (0, core_1.analyzeFiles)(db, rootPath, [file], { pullFn });
|
|
258
|
+
const result = await withTimeout((0, core_1.analyzeFiles)(db, rootPath, [file], { pullFn, practices }), ANALYSIS_TIMEOUT_MS, relPath);
|
|
259
|
+
consecutiveErrors = 0; // Reset on success
|
|
143
260
|
const duration = Date.now() - start;
|
|
144
261
|
if (json) {
|
|
145
262
|
emit({
|
|
@@ -160,17 +277,38 @@ async function startWatch(options) {
|
|
|
160
277
|
}
|
|
161
278
|
}
|
|
162
279
|
catch (err) {
|
|
280
|
+
consecutiveErrors++;
|
|
281
|
+
const msg = formatError(err);
|
|
163
282
|
if (json) {
|
|
164
|
-
emit({ event: "error", file: relPath, message:
|
|
283
|
+
emit({ event: "error", file: relPath, message: msg });
|
|
165
284
|
}
|
|
166
285
|
else {
|
|
167
|
-
console.error(` ❌ ${relPath}: ${
|
|
286
|
+
console.error(` ❌ ${relPath}: ${msg}`);
|
|
287
|
+
}
|
|
288
|
+
// Circuit breaker: pause after repeated failures
|
|
289
|
+
if (consecutiveErrors >= MAX_CONSECUTIVE_ERRORS) {
|
|
290
|
+
circuitBrokenUntil = Date.now() + CIRCUIT_BREAKER_COOLDOWN_MS;
|
|
291
|
+
const remaining = queue.length;
|
|
292
|
+
if (json) {
|
|
293
|
+
emit({
|
|
294
|
+
event: "error",
|
|
295
|
+
message: `${MAX_CONSECUTIVE_ERRORS} consecutive failures — pausing analysis for ${CIRCUIT_BREAKER_COOLDOWN_MS / 1000}s. ${remaining} file(s) queued for retry.`,
|
|
296
|
+
});
|
|
297
|
+
}
|
|
298
|
+
else {
|
|
299
|
+
console.error(` ⏸ ${MAX_CONSECUTIVE_ERRORS} consecutive failures — pausing for ${CIRCUIT_BREAKER_COOLDOWN_MS / 1000}s`);
|
|
300
|
+
}
|
|
301
|
+
break; // Leave remaining files in queue for retry after cooldown
|
|
168
302
|
}
|
|
169
303
|
}
|
|
170
304
|
}
|
|
171
305
|
analyzing = false;
|
|
172
306
|
}
|
|
173
|
-
|
|
307
|
+
// Fast-path segments that are always ignored (avoids git subprocess for common cases)
|
|
308
|
+
const ALWAYS_IGNORE = ["node_modules", ".ophan"];
|
|
309
|
+
// Cache of directory prefixes known to be gitignored — avoids repeated git check-ignore calls
|
|
310
|
+
// for files in the same ignored directory (e.g. .output/public/assets/a.js, .output/public/assets/b.js)
|
|
311
|
+
const ignoredDirCache = new Set();
|
|
174
312
|
function onFileChange(filename) {
|
|
175
313
|
const ext = path.extname(filename).toLowerCase();
|
|
176
314
|
if (!extSet.has(ext))
|
|
@@ -178,9 +316,21 @@ async function startWatch(options) {
|
|
|
178
316
|
const absPath = path.isAbsolute(filename)
|
|
179
317
|
? filename
|
|
180
318
|
: path.resolve(rootPath, filename);
|
|
181
|
-
//
|
|
182
|
-
if (
|
|
319
|
+
// Fast-path: skip universally-ignored directories without shelling out to git
|
|
320
|
+
if (ALWAYS_IGNORE.some((seg) => absPath.includes(`/${seg}/`) || absPath.includes(`\\${seg}\\`)))
|
|
183
321
|
return;
|
|
322
|
+
// Check directory-level cache before calling git
|
|
323
|
+
const dir = path.dirname(absPath);
|
|
324
|
+
for (const cached of ignoredDirCache) {
|
|
325
|
+
if (dir.startsWith(cached))
|
|
326
|
+
return;
|
|
327
|
+
}
|
|
328
|
+
// Ask git whether this file is ignored (respects all .gitignore files + global)
|
|
329
|
+
if (isGitIgnored(absPath, rootPath)) {
|
|
330
|
+
// Cache the directory so sibling files skip the git call
|
|
331
|
+
ignoredDirCache.add(dir);
|
|
332
|
+
return;
|
|
333
|
+
}
|
|
184
334
|
// Skip if file was deleted
|
|
185
335
|
if (!fs.existsSync(absPath))
|
|
186
336
|
return;
|
|
@@ -193,7 +343,13 @@ async function startWatch(options) {
|
|
|
193
343
|
if (!queue.includes(absPath)) {
|
|
194
344
|
queue.push(absPath);
|
|
195
345
|
}
|
|
196
|
-
processQueue()
|
|
346
|
+
processQueue().catch((err) => {
|
|
347
|
+
const msg = formatError(err);
|
|
348
|
+
if (json)
|
|
349
|
+
emit({ event: "error", message: msg });
|
|
350
|
+
else
|
|
351
|
+
console.error(` ❌ Analysis error: ${msg}`);
|
|
352
|
+
});
|
|
197
353
|
}, DEBOUNCE_MS));
|
|
198
354
|
}
|
|
199
355
|
// Start recursive file watcher (macOS + Windows native, Linux may need chokidar)
|
package/ophan_logo.png
ADDED
|
Binary file
|
package/package.json
CHANGED
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ophan/cli",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.4",
|
|
4
4
|
"files": [
|
|
5
|
-
"dist"
|
|
5
|
+
"dist",
|
|
6
|
+
"ophan_logo.png"
|
|
6
7
|
],
|
|
7
8
|
"bin": {
|
|
8
9
|
"ophan": "./dist/index.js"
|
|
@@ -15,9 +16,10 @@
|
|
|
15
16
|
"dependencies": {
|
|
16
17
|
"@ophan/core": "workspace:*",
|
|
17
18
|
"@supabase/supabase-js": "^2.49.4",
|
|
18
|
-
"better-sqlite3": "^
|
|
19
|
+
"better-sqlite3": "^12.6.2",
|
|
19
20
|
"commander": "^14.0.2",
|
|
20
21
|
"dotenv": "^16.4.7",
|
|
22
|
+
"p-limit": "^6.2.0",
|
|
21
23
|
"open": "^10.1.0"
|
|
22
24
|
},
|
|
23
25
|
"devDependencies": {
|