structx 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/benchmark/baseline.d.ts +10 -0
- package/dist/benchmark/baseline.d.ts.map +1 -0
- package/dist/benchmark/baseline.js +84 -0
- package/dist/benchmark/baseline.js.map +1 -0
- package/dist/benchmark/questions.d.ts +2 -0
- package/dist/benchmark/questions.d.ts.map +1 -0
- package/dist/benchmark/questions.js +14 -0
- package/dist/benchmark/questions.js.map +1 -0
- package/dist/benchmark/reporter.d.ts +8 -0
- package/dist/benchmark/reporter.d.ts.map +1 -0
- package/dist/benchmark/reporter.js +120 -0
- package/dist/benchmark/reporter.js.map +1 -0
- package/dist/benchmark/runner.d.ts +24 -0
- package/dist/benchmark/runner.d.ts.map +1 -0
- package/dist/benchmark/runner.js +110 -0
- package/dist/benchmark/runner.js.map +1 -0
- package/dist/cli.d.ts +3 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +753 -0
- package/dist/cli.js.map +1 -0
- package/dist/config.d.ts +15 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +83 -0
- package/dist/config.js.map +1 -0
- package/dist/db/connection.d.ts +5 -0
- package/dist/db/connection.d.ts.map +1 -0
- package/dist/db/connection.js +89 -0
- package/dist/db/connection.js.map +1 -0
- package/dist/db/queries.d.ts +122 -0
- package/dist/db/queries.d.ts.map +1 -0
- package/dist/db/queries.js +191 -0
- package/dist/db/queries.js.map +1 -0
- package/dist/db/schema.sql +85 -0
- package/dist/ingest/differ.d.ts +13 -0
- package/dist/ingest/differ.d.ts.map +1 -0
- package/dist/ingest/differ.js +63 -0
- package/dist/ingest/differ.js.map +1 -0
- package/dist/ingest/parser.d.ts +15 -0
- package/dist/ingest/parser.d.ts.map +1 -0
- package/dist/ingest/parser.js +154 -0
- package/dist/ingest/parser.js.map +1 -0
- package/dist/ingest/relationships.d.ts +8 -0
- package/dist/ingest/relationships.d.ts.map +1 -0
- package/dist/ingest/relationships.js +93 -0
- package/dist/ingest/relationships.js.map +1 -0
- package/dist/ingest/scanner.d.ts +2 -0
- package/dist/ingest/scanner.d.ts.map +1 -0
- package/dist/ingest/scanner.js +67 -0
- package/dist/ingest/scanner.js.map +1 -0
- package/dist/instructions/claude.md +41 -0
- package/dist/instructions/copilot.md +39 -0
- package/dist/instructions/cursor.md +39 -0
- package/dist/instructions/generic.md +41 -0
- package/dist/query/answerer.d.ts +9 -0
- package/dist/query/answerer.d.ts.map +1 -0
- package/dist/query/answerer.js +46 -0
- package/dist/query/answerer.js.map +1 -0
- package/dist/query/classifier.d.ts +10 -0
- package/dist/query/classifier.d.ts.map +1 -0
- package/dist/query/classifier.js +60 -0
- package/dist/query/classifier.js.map +1 -0
- package/dist/query/context-builder.d.ts +3 -0
- package/dist/query/context-builder.d.ts.map +1 -0
- package/dist/query/context-builder.js +104 -0
- package/dist/query/context-builder.js.map +1 -0
- package/dist/query/retriever.d.ts +23 -0
- package/dist/query/retriever.d.ts.map +1 -0
- package/dist/query/retriever.js +142 -0
- package/dist/query/retriever.js.map +1 -0
- package/dist/semantic/analyzer.d.ts +15 -0
- package/dist/semantic/analyzer.d.ts.map +1 -0
- package/dist/semantic/analyzer.js +179 -0
- package/dist/semantic/analyzer.js.map +1 -0
- package/dist/semantic/cost.d.ts +11 -0
- package/dist/semantic/cost.d.ts.map +1 -0
- package/dist/semantic/cost.js +31 -0
- package/dist/semantic/cost.js.map +1 -0
- package/dist/semantic/prompt.d.ts +11 -0
- package/dist/semantic/prompt.d.ts.map +1 -0
- package/dist/semantic/prompt.js +71 -0
- package/dist/semantic/prompt.js.map +1 -0
- package/dist/semantic/validator.d.ts +15 -0
- package/dist/semantic/validator.d.ts.map +1 -0
- package/dist/semantic/validator.js +95 -0
- package/dist/semantic/validator.js.map +1 -0
- package/dist/utils/logger.d.ts +9 -0
- package/dist/utils/logger.d.ts.map +1 -0
- package/dist/utils/logger.js +48 -0
- package/dist/utils/logger.js.map +1 -0
- package/dist/utils/tokens.d.ts +3 -0
- package/dist/utils/tokens.d.ts.map +1 -0
- package/dist/utils/tokens.js +21 -0
- package/dist/utils/tokens.js.map +1 -0
- package/package.json +39 -0
package/dist/cli.js
ADDED
|
@@ -0,0 +1,753 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
"use strict";
|
|
3
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
4
|
+
if (k2 === undefined) k2 = k;
|
|
5
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
6
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
7
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
8
|
+
}
|
|
9
|
+
Object.defineProperty(o, k2, desc);
|
|
10
|
+
}) : (function(o, m, k, k2) {
|
|
11
|
+
if (k2 === undefined) k2 = k;
|
|
12
|
+
o[k2] = m[k];
|
|
13
|
+
}));
|
|
14
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
15
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
16
|
+
}) : function(o, v) {
|
|
17
|
+
o["default"] = v;
|
|
18
|
+
});
|
|
19
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
20
|
+
var ownKeys = function(o) {
|
|
21
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
22
|
+
var ar = [];
|
|
23
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
24
|
+
return ar;
|
|
25
|
+
};
|
|
26
|
+
return ownKeys(o);
|
|
27
|
+
};
|
|
28
|
+
return function (mod) {
|
|
29
|
+
if (mod && mod.__esModule) return mod;
|
|
30
|
+
var result = {};
|
|
31
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
32
|
+
__setModuleDefault(result, mod);
|
|
33
|
+
return result;
|
|
34
|
+
};
|
|
35
|
+
})();
|
|
36
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
37
|
+
require("dotenv/config");
|
|
38
|
+
const commander_1 = require("commander");
|
|
39
|
+
const path = __importStar(require("path"));
|
|
40
|
+
const fs = __importStar(require("fs"));
|
|
41
|
+
const config_1 = require("./config");
|
|
42
|
+
const connection_1 = require("./db/connection");
|
|
43
|
+
const queries_1 = require("./db/queries");
|
|
44
|
+
const logger_1 = require("./utils/logger");
|
|
45
|
+
const scanner_1 = require("./ingest/scanner");
|
|
46
|
+
const parser_1 = require("./ingest/parser");
|
|
47
|
+
const relationships_1 = require("./ingest/relationships");
|
|
48
|
+
const differ_1 = require("./ingest/differ");
|
|
49
|
+
const analyzer_1 = require("./semantic/analyzer");
|
|
50
|
+
const cost_1 = require("./semantic/cost");
|
|
51
|
+
const queries_2 = require("./db/queries");
|
|
52
|
+
const classifier_1 = require("./query/classifier");
|
|
53
|
+
const retriever_1 = require("./query/retriever");
|
|
54
|
+
const context_builder_1 = require("./query/context-builder");
|
|
55
|
+
const answerer_1 = require("./query/answerer");
|
|
56
|
+
const runner_1 = require("./benchmark/runner");
|
|
57
|
+
const reporter_1 = require("./benchmark/reporter");
|
|
58
|
+
const program = new commander_1.Command();
|
|
59
|
+
program
|
|
60
|
+
.name('structx')
|
|
61
|
+
.description('Graph-powered code intelligence CLI for TypeScript')
|
|
62
|
+
.version('1.0.0')
|
|
63
|
+
.option('--verbose', 'Enable verbose logging')
|
|
64
|
+
.hook('preAction', (thisCommand) => {
|
|
65
|
+
if (thisCommand.opts().verbose) {
|
|
66
|
+
(0, logger_1.setLogLevel)('debug');
|
|
67
|
+
}
|
|
68
|
+
});
|
|
69
|
+
// ── setup (one-step bootstrap) ──
|
|
70
|
+
program
|
|
71
|
+
.command('setup')
|
|
72
|
+
.description('One-step bootstrap: init + ingest + analyze')
|
|
73
|
+
.argument('[repo-path]', 'Path to TypeScript repository', '.')
|
|
74
|
+
.action(async (repoPath) => {
|
|
75
|
+
const resolved = path.resolve(repoPath);
|
|
76
|
+
const structxDir = (0, config_1.getStructXDir)(resolved);
|
|
77
|
+
// Step 1: Init
|
|
78
|
+
const dbPath = (0, connection_1.getDbPath)(structxDir);
|
|
79
|
+
if (fs.existsSync(dbPath)) {
|
|
80
|
+
console.log(`StructX already initialized at ${structxDir}`);
|
|
81
|
+
}
|
|
82
|
+
else {
|
|
83
|
+
const db = (0, connection_1.initializeDatabase)(dbPath);
|
|
84
|
+
db.close();
|
|
85
|
+
(0, config_1.saveConfig)(structxDir, { repoPath: resolved });
|
|
86
|
+
console.log(`Initialized StructX at ${structxDir}`);
|
|
87
|
+
}
|
|
88
|
+
// Step 2: Ingest
|
|
89
|
+
const config = (0, config_1.loadConfig)(structxDir);
|
|
90
|
+
const db = (0, connection_1.openDatabase)(dbPath);
|
|
91
|
+
const project = (0, parser_1.createProject)(resolved);
|
|
92
|
+
console.log(`\nScanning ${resolved} for TypeScript files...`);
|
|
93
|
+
const files = (0, scanner_1.scanDirectory)(resolved);
|
|
94
|
+
console.log(`Found ${files.length} TypeScript files.`);
|
|
95
|
+
let newFiles = 0;
|
|
96
|
+
let changedFiles = 0;
|
|
97
|
+
let unchangedFiles = 0;
|
|
98
|
+
let totalFunctions = 0;
|
|
99
|
+
let totalRelationships = 0;
|
|
100
|
+
let queued = 0;
|
|
101
|
+
for (const filePath of files) {
|
|
102
|
+
const relativePath = path.relative(resolved, filePath);
|
|
103
|
+
const content = fs.readFileSync(filePath, 'utf-8');
|
|
104
|
+
const contentHash = (0, parser_1.hashFileContent)(content);
|
|
105
|
+
const existingFile = (0, queries_1.getFileByPath)(db, relativePath);
|
|
106
|
+
if (existingFile && existingFile.content_hash === contentHash) {
|
|
107
|
+
unchangedFiles++;
|
|
108
|
+
continue;
|
|
109
|
+
}
|
|
110
|
+
const isNew = !existingFile;
|
|
111
|
+
if (isNew)
|
|
112
|
+
newFiles++;
|
|
113
|
+
else
|
|
114
|
+
changedFiles++;
|
|
115
|
+
const fileId = (0, queries_1.upsertFile)(db, relativePath, contentHash);
|
|
116
|
+
const oldFunctions = isNew ? [] : (0, queries_1.getFunctionsByFileId)(db, fileId);
|
|
117
|
+
const oldFunctionMap = new Map(oldFunctions.map(f => [f.name, f]));
|
|
118
|
+
if (!isNew) {
|
|
119
|
+
for (const oldFn of oldFunctions) {
|
|
120
|
+
(0, queries_1.deleteRelationshipsByCallerFunctionId)(db, oldFn.id);
|
|
121
|
+
}
|
|
122
|
+
(0, queries_1.deleteFunctionsByFileId)(db, fileId);
|
|
123
|
+
}
|
|
124
|
+
let functions;
|
|
125
|
+
try {
|
|
126
|
+
functions = (0, parser_1.parseFile)(project, filePath);
|
|
127
|
+
}
|
|
128
|
+
catch (err) {
|
|
129
|
+
logger_1.logger.warn(`Failed to parse ${relativePath}: ${err.message}`);
|
|
130
|
+
continue;
|
|
131
|
+
}
|
|
132
|
+
const functionIdMap = new Map();
|
|
133
|
+
for (const fn of functions) {
|
|
134
|
+
const fnId = (0, queries_1.insertFunction)(db, {
|
|
135
|
+
file_id: fileId,
|
|
136
|
+
name: fn.name,
|
|
137
|
+
signature: fn.signature,
|
|
138
|
+
body: fn.body,
|
|
139
|
+
code_hash: fn.codeHash,
|
|
140
|
+
start_line: fn.startLine,
|
|
141
|
+
end_line: fn.endLine,
|
|
142
|
+
is_exported: fn.isExported,
|
|
143
|
+
is_async: fn.isAsync,
|
|
144
|
+
});
|
|
145
|
+
functionIdMap.set(fn.name, fnId);
|
|
146
|
+
totalFunctions++;
|
|
147
|
+
const oldFn = oldFunctionMap.get(fn.name);
|
|
148
|
+
if (!oldFn) {
|
|
149
|
+
(0, queries_1.enqueueForAnalysis)(db, fnId, 'new', (0, differ_1.getPriority)('new', fn.isExported));
|
|
150
|
+
queued++;
|
|
151
|
+
}
|
|
152
|
+
else {
|
|
153
|
+
const { reanalyze, reason } = (0, differ_1.shouldReanalyze)(oldFn, fn.signature, fn.codeHash, fn.body, config.diffThreshold);
|
|
154
|
+
if (reanalyze) {
|
|
155
|
+
(0, queries_1.enqueueForAnalysis)(db, fnId, reason, (0, differ_1.getPriority)(reason, fn.isExported));
|
|
156
|
+
queued++;
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
try {
|
|
161
|
+
const calls = (0, relationships_1.extractCallsFromFile)(project, filePath);
|
|
162
|
+
for (const call of calls) {
|
|
163
|
+
if (call.callerName === '__file__')
|
|
164
|
+
continue;
|
|
165
|
+
const callerId = functionIdMap.get(call.callerName);
|
|
166
|
+
if (!callerId)
|
|
167
|
+
continue;
|
|
168
|
+
const callee = (0, queries_1.getFunctionByName)(db, call.calleeName);
|
|
169
|
+
(0, queries_1.insertRelationship)(db, callerId, call.calleeName, call.relationType, callee?.id);
|
|
170
|
+
totalRelationships++;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
catch (err) {
|
|
174
|
+
logger_1.logger.warn(`Failed to extract calls from ${relativePath}: ${err.message}`);
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
console.log(`\nIngestion complete:`);
|
|
178
|
+
console.log(` New files: ${newFiles}`);
|
|
179
|
+
console.log(` Changed files: ${changedFiles}`);
|
|
180
|
+
console.log(` Unchanged: ${unchangedFiles}`);
|
|
181
|
+
console.log(` Functions: ${totalFunctions}`);
|
|
182
|
+
console.log(` Relationships: ${totalRelationships}`);
|
|
183
|
+
console.log(` Queued: ${queued} functions for semantic analysis`);
|
|
184
|
+
// Step 3: Analyze (auto-confirm)
|
|
185
|
+
if (queued > 0 && config.anthropicApiKey) {
|
|
186
|
+
const pendingCount = (0, queries_2.getPendingAnalysisCount)(db);
|
|
187
|
+
const estimate = (0, cost_1.estimateAnalysisCost)(pendingCount, config.batchSize, config.analysisModel);
|
|
188
|
+
console.log('\n' + (0, cost_1.formatCostEstimate)(estimate));
|
|
189
|
+
console.log('\nAnalyzing...');
|
|
190
|
+
let totalAnalyzed = 0;
|
|
191
|
+
let totalCached = 0;
|
|
192
|
+
let totalFailed = 0;
|
|
193
|
+
let totalInputTokens = 0;
|
|
194
|
+
let totalOutputTokens = 0;
|
|
195
|
+
let totalCost = 0;
|
|
196
|
+
let batchNum = 0;
|
|
197
|
+
while (true) {
|
|
198
|
+
const pending = (0, queries_2.getPendingAnalysis)(db, config.batchSize);
|
|
199
|
+
if (pending.length === 0)
|
|
200
|
+
break;
|
|
201
|
+
batchNum++;
|
|
202
|
+
const items = pending.map(p => ({ id: p.id, function_id: p.function_id }));
|
|
203
|
+
console.log(` Batch ${batchNum}: ${items.length} functions...`);
|
|
204
|
+
const batchResult = await (0, analyzer_1.analyzeBatch)(db, items, config.analysisModel, config.anthropicApiKey);
|
|
205
|
+
totalAnalyzed += batchResult.analyzed;
|
|
206
|
+
totalCached += batchResult.cached;
|
|
207
|
+
totalFailed += batchResult.failed;
|
|
208
|
+
totalInputTokens += batchResult.totalInputTokens;
|
|
209
|
+
totalOutputTokens += batchResult.totalOutputTokens;
|
|
210
|
+
totalCost += batchResult.totalCost;
|
|
211
|
+
}
|
|
212
|
+
(0, analyzer_1.rebuildSearchIndex)(db);
|
|
213
|
+
console.log(`\nAnalysis complete:`);
|
|
214
|
+
console.log(` Analyzed: ${totalAnalyzed}`);
|
|
215
|
+
console.log(` From cache: ${totalCached}`);
|
|
216
|
+
console.log(` Failed: ${totalFailed}`);
|
|
217
|
+
console.log(` Input tokens: ${totalInputTokens.toLocaleString()}`);
|
|
218
|
+
console.log(` Output tokens: ${totalOutputTokens.toLocaleString()}`);
|
|
219
|
+
console.log(` Total cost: $${totalCost.toFixed(4)}`);
|
|
220
|
+
}
|
|
221
|
+
else if (queued > 0) {
|
|
222
|
+
console.log('\nSkipping analysis: ANTHROPIC_API_KEY not set.');
|
|
223
|
+
console.log('Set the key and run "structx analyze . --yes" to enrich functions.');
|
|
224
|
+
}
|
|
225
|
+
else {
|
|
226
|
+
console.log('\nNo functions to analyze.');
|
|
227
|
+
}
|
|
228
|
+
db.close();
|
|
229
|
+
console.log('\nSetup complete.');
|
|
230
|
+
});
|
|
231
|
+
// ── install (drop instruction files into project) ──
|
|
232
|
+
program
|
|
233
|
+
.command('install')
|
|
234
|
+
.description('Install AI agent instruction files into a project')
|
|
235
|
+
.argument('[repo-path]', 'Path to target project', '.')
|
|
236
|
+
.action((repoPath) => {
|
|
237
|
+
const resolved = path.resolve(repoPath);
|
|
238
|
+
const instructionsDir = path.join(__dirname, 'instructions');
|
|
239
|
+
if (!fs.existsSync(instructionsDir)) {
|
|
240
|
+
console.log('Instruction templates not found. Package may be incorrectly installed.');
|
|
241
|
+
return;
|
|
242
|
+
}
|
|
243
|
+
const genericContent = fs.readFileSync(path.join(instructionsDir, 'generic.md'), 'utf-8');
|
|
244
|
+
let installed = 0;
|
|
245
|
+
// CLAUDE.md — append if exists, create if not
|
|
246
|
+
const claudeMdPath = path.join(resolved, 'CLAUDE.md');
|
|
247
|
+
const claudeContent = fs.readFileSync(path.join(instructionsDir, 'claude.md'), 'utf-8');
|
|
248
|
+
if (fs.existsSync(claudeMdPath)) {
|
|
249
|
+
const existing = fs.readFileSync(claudeMdPath, 'utf-8');
|
|
250
|
+
if (existing.includes('StructX')) {
|
|
251
|
+
console.log(' CLAUDE.md — already contains StructX section, skipping.');
|
|
252
|
+
}
|
|
253
|
+
else {
|
|
254
|
+
fs.appendFileSync(claudeMdPath, '\n\n' + claudeContent, 'utf-8');
|
|
255
|
+
console.log(' CLAUDE.md — appended StructX section.');
|
|
256
|
+
installed++;
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
else {
|
|
260
|
+
fs.writeFileSync(claudeMdPath, claudeContent, 'utf-8');
|
|
261
|
+
console.log(' CLAUDE.md — created.');
|
|
262
|
+
installed++;
|
|
263
|
+
}
|
|
264
|
+
// .cursorrules
|
|
265
|
+
const cursorPath = path.join(resolved, '.cursorrules');
|
|
266
|
+
const cursorContent = fs.readFileSync(path.join(instructionsDir, 'cursor.md'), 'utf-8');
|
|
267
|
+
if (fs.existsSync(cursorPath)) {
|
|
268
|
+
const existing = fs.readFileSync(cursorPath, 'utf-8');
|
|
269
|
+
if (existing.includes('StructX')) {
|
|
270
|
+
console.log(' .cursorrules — already contains StructX section, skipping.');
|
|
271
|
+
}
|
|
272
|
+
else {
|
|
273
|
+
fs.appendFileSync(cursorPath, '\n\n' + cursorContent, 'utf-8');
|
|
274
|
+
console.log(' .cursorrules — appended StructX section.');
|
|
275
|
+
installed++;
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
else {
|
|
279
|
+
fs.writeFileSync(cursorPath, cursorContent, 'utf-8');
|
|
280
|
+
console.log(' .cursorrules — created.');
|
|
281
|
+
installed++;
|
|
282
|
+
}
|
|
283
|
+
// .github/copilot-instructions.md
|
|
284
|
+
const githubDir = path.join(resolved, '.github');
|
|
285
|
+
const copilotPath = path.join(githubDir, 'copilot-instructions.md');
|
|
286
|
+
const copilotContent = fs.readFileSync(path.join(instructionsDir, 'copilot.md'), 'utf-8');
|
|
287
|
+
if (!fs.existsSync(githubDir)) {
|
|
288
|
+
fs.mkdirSync(githubDir, { recursive: true });
|
|
289
|
+
}
|
|
290
|
+
if (fs.existsSync(copilotPath)) {
|
|
291
|
+
const existing = fs.readFileSync(copilotPath, 'utf-8');
|
|
292
|
+
if (existing.includes('StructX')) {
|
|
293
|
+
console.log(' .github/copilot-instructions.md — already contains StructX section, skipping.');
|
|
294
|
+
}
|
|
295
|
+
else {
|
|
296
|
+
fs.appendFileSync(copilotPath, '\n\n' + copilotContent, 'utf-8');
|
|
297
|
+
console.log(' .github/copilot-instructions.md — appended StructX section.');
|
|
298
|
+
installed++;
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
else {
|
|
302
|
+
fs.writeFileSync(copilotPath, copilotContent, 'utf-8');
|
|
303
|
+
console.log(' .github/copilot-instructions.md — created.');
|
|
304
|
+
installed++;
|
|
305
|
+
}
|
|
306
|
+
console.log(`\nInstalled ${installed} instruction file(s) into ${resolved}`);
|
|
307
|
+
});
|
|
308
|
+
// ── init ──
|
|
309
|
+
program
|
|
310
|
+
.command('init')
|
|
311
|
+
.description('Initialize StructX for the current repository')
|
|
312
|
+
.argument('[repo-path]', 'Path to TypeScript repository', '.')
|
|
313
|
+
.action((repoPath) => {
|
|
314
|
+
const resolved = path.resolve(repoPath);
|
|
315
|
+
const structxDir = (0, config_1.getStructXDir)(resolved);
|
|
316
|
+
if (fs.existsSync((0, connection_1.getDbPath)(structxDir))) {
|
|
317
|
+
console.log(`StructX already initialized at ${structxDir}`);
|
|
318
|
+
return;
|
|
319
|
+
}
|
|
320
|
+
// Create DB
|
|
321
|
+
const dbPath = (0, connection_1.getDbPath)(structxDir);
|
|
322
|
+
const db = (0, connection_1.initializeDatabase)(dbPath);
|
|
323
|
+
db.close();
|
|
324
|
+
// Create config
|
|
325
|
+
(0, config_1.saveConfig)(structxDir, {
|
|
326
|
+
repoPath: resolved,
|
|
327
|
+
});
|
|
328
|
+
console.log(`Initialized StructX at ${structxDir}`);
|
|
329
|
+
console.log(` Database: ${dbPath}`);
|
|
330
|
+
console.log(` Config: ${path.join(structxDir, 'config.json')}`);
|
|
331
|
+
console.log(`\nNext: run 'structx ingest ${repoPath}' to parse your codebase.`);
|
|
332
|
+
});
|
|
333
|
+
// ── status ──
|
|
334
|
+
program
|
|
335
|
+
.command('status')
|
|
336
|
+
.description('Show current StructX statistics')
|
|
337
|
+
.argument('[repo-path]', 'Path to TypeScript repository', '.')
|
|
338
|
+
.action((repoPath) => {
|
|
339
|
+
const resolved = path.resolve(repoPath);
|
|
340
|
+
const structxDir = (0, config_1.getStructXDir)(resolved);
|
|
341
|
+
const dbPath = (0, connection_1.getDbPath)(structxDir);
|
|
342
|
+
if (!fs.existsSync(dbPath)) {
|
|
343
|
+
console.log('StructX not initialized. Run "structx init" first.');
|
|
344
|
+
return;
|
|
345
|
+
}
|
|
346
|
+
const db = (0, connection_1.openDatabase)(dbPath);
|
|
347
|
+
const stats = (0, queries_1.getStats)(db);
|
|
348
|
+
db.close();
|
|
349
|
+
console.log('StructX Status');
|
|
350
|
+
console.log('──────────────────────────');
|
|
351
|
+
console.log(` Files: ${stats.totalFiles}`);
|
|
352
|
+
console.log(` Functions: ${stats.totalFunctions}`);
|
|
353
|
+
console.log(` Relationships: ${stats.totalRelationships}`);
|
|
354
|
+
console.log(` Analyzed: ${stats.analyzedFunctions} / ${stats.totalFunctions}`);
|
|
355
|
+
console.log(` Pending: ${stats.pendingAnalysis}`);
|
|
356
|
+
console.log(` QA Runs: ${stats.totalQaRuns}`);
|
|
357
|
+
});
|
|
358
|
+
// ── doctor ──
|
|
359
|
+
program
|
|
360
|
+
.command('doctor')
|
|
361
|
+
.description('Validate StructX environment and configuration')
|
|
362
|
+
.argument('[repo-path]', 'Path to TypeScript repository', '.')
|
|
363
|
+
.action((repoPath) => {
|
|
364
|
+
const resolved = path.resolve(repoPath);
|
|
365
|
+
const structxDir = (0, config_1.getStructXDir)(resolved);
|
|
366
|
+
let allGood = true;
|
|
367
|
+
// Check Node version
|
|
368
|
+
const nodeVersion = process.version;
|
|
369
|
+
const major = parseInt(nodeVersion.slice(1).split('.')[0], 10);
|
|
370
|
+
if (major >= 18) {
|
|
371
|
+
console.log(` [OK] Node.js ${nodeVersion}`);
|
|
372
|
+
}
|
|
373
|
+
else {
|
|
374
|
+
console.log(` [FAIL] Node.js ${nodeVersion} (requires >= 18)`);
|
|
375
|
+
allGood = false;
|
|
376
|
+
}
|
|
377
|
+
// Check DB
|
|
378
|
+
const dbPath = (0, connection_1.getDbPath)(structxDir);
|
|
379
|
+
if (fs.existsSync(dbPath)) {
|
|
380
|
+
console.log(` [OK] Database exists at ${dbPath}`);
|
|
381
|
+
}
|
|
382
|
+
else {
|
|
383
|
+
console.log(` [FAIL] Database not found. Run 'structx init' first.`);
|
|
384
|
+
allGood = false;
|
|
385
|
+
}
|
|
386
|
+
// Check config
|
|
387
|
+
try {
|
|
388
|
+
const config = (0, config_1.loadConfig)(structxDir);
|
|
389
|
+
// Check API key
|
|
390
|
+
if (config.anthropicApiKey) {
|
|
391
|
+
console.log(' [OK] Anthropic API key is set');
|
|
392
|
+
}
|
|
393
|
+
else {
|
|
394
|
+
console.log(' [WARN] Anthropic API key not set (set ANTHROPIC_API_KEY env var or add to config)');
|
|
395
|
+
allGood = false;
|
|
396
|
+
}
|
|
397
|
+
// Check repo path
|
|
398
|
+
if (fs.existsSync(config.repoPath)) {
|
|
399
|
+
console.log(` [OK] Repository path: ${config.repoPath}`);
|
|
400
|
+
}
|
|
401
|
+
else {
|
|
402
|
+
console.log(` [FAIL] Repository path not found: ${config.repoPath}`);
|
|
403
|
+
allGood = false;
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
catch {
|
|
407
|
+
console.log(' [FAIL] Config not found. Run "structx init" first.');
|
|
408
|
+
allGood = false;
|
|
409
|
+
}
|
|
410
|
+
console.log();
|
|
411
|
+
if (allGood) {
|
|
412
|
+
console.log('All checks passed.');
|
|
413
|
+
}
|
|
414
|
+
else {
|
|
415
|
+
console.log('Some checks failed. Fix the issues above and try again.');
|
|
416
|
+
}
|
|
417
|
+
});
|
|
418
|
+
// ── ingest ──
|
|
419
|
+
program
|
|
420
|
+
.command('ingest')
|
|
421
|
+
.description('Parse codebase into function graph')
|
|
422
|
+
.argument('[repo-path]', 'Path to TypeScript repository', '.')
|
|
423
|
+
.action((repoPath) => {
|
|
424
|
+
const resolved = path.resolve(repoPath);
|
|
425
|
+
const structxDir = (0, config_1.getStructXDir)(resolved);
|
|
426
|
+
const dbPath = (0, connection_1.getDbPath)(structxDir);
|
|
427
|
+
if (!fs.existsSync(dbPath)) {
|
|
428
|
+
console.log('StructX not initialized. Run "structx init" first.');
|
|
429
|
+
return;
|
|
430
|
+
}
|
|
431
|
+
const config = (0, config_1.loadConfig)(structxDir);
|
|
432
|
+
const db = (0, connection_1.openDatabase)(dbPath);
|
|
433
|
+
const project = (0, parser_1.createProject)(resolved);
|
|
434
|
+
console.log(`Scanning ${resolved} for TypeScript files...`);
|
|
435
|
+
const files = (0, scanner_1.scanDirectory)(resolved);
|
|
436
|
+
console.log(`Found ${files.length} TypeScript files.`);
|
|
437
|
+
let newFiles = 0;
|
|
438
|
+
let changedFiles = 0;
|
|
439
|
+
let unchangedFiles = 0;
|
|
440
|
+
let totalFunctions = 0;
|
|
441
|
+
let totalRelationships = 0;
|
|
442
|
+
let queued = 0;
|
|
443
|
+
for (const filePath of files) {
|
|
444
|
+
const relativePath = path.relative(resolved, filePath);
|
|
445
|
+
const content = fs.readFileSync(filePath, 'utf-8');
|
|
446
|
+
const contentHash = (0, parser_1.hashFileContent)(content);
|
|
447
|
+
// Check if file changed
|
|
448
|
+
const existingFile = (0, queries_1.getFileByPath)(db, relativePath);
|
|
449
|
+
if (existingFile && existingFile.content_hash === contentHash) {
|
|
450
|
+
unchangedFiles++;
|
|
451
|
+
continue;
|
|
452
|
+
}
|
|
453
|
+
const isNew = !existingFile;
|
|
454
|
+
if (isNew)
|
|
455
|
+
newFiles++;
|
|
456
|
+
else
|
|
457
|
+
changedFiles++;
|
|
458
|
+
// Upsert file record
|
|
459
|
+
const fileId = (0, queries_1.upsertFile)(db, relativePath, contentHash);
|
|
460
|
+
// Get old functions for diff comparison
|
|
461
|
+
const oldFunctions = isNew ? [] : (0, queries_1.getFunctionsByFileId)(db, fileId);
|
|
462
|
+
const oldFunctionMap = new Map(oldFunctions.map(f => [f.name, f]));
|
|
463
|
+
// Clear old data for this file
|
|
464
|
+
if (!isNew) {
|
|
465
|
+
// Delete relationships for all old functions in this file
|
|
466
|
+
for (const oldFn of oldFunctions) {
|
|
467
|
+
(0, queries_1.deleteRelationshipsByCallerFunctionId)(db, oldFn.id);
|
|
468
|
+
}
|
|
469
|
+
(0, queries_1.deleteFunctionsByFileId)(db, fileId);
|
|
470
|
+
}
|
|
471
|
+
// Parse functions
|
|
472
|
+
let functions;
|
|
473
|
+
try {
|
|
474
|
+
functions = (0, parser_1.parseFile)(project, filePath);
|
|
475
|
+
}
|
|
476
|
+
catch (err) {
|
|
477
|
+
logger_1.logger.warn(`Failed to parse ${relativePath}: ${err.message}`);
|
|
478
|
+
continue;
|
|
479
|
+
}
|
|
480
|
+
// Insert functions and check for re-analysis needs
|
|
481
|
+
const functionIdMap = new Map();
|
|
482
|
+
for (const fn of functions) {
|
|
483
|
+
const fnId = (0, queries_1.insertFunction)(db, {
|
|
484
|
+
file_id: fileId,
|
|
485
|
+
name: fn.name,
|
|
486
|
+
signature: fn.signature,
|
|
487
|
+
body: fn.body,
|
|
488
|
+
code_hash: fn.codeHash,
|
|
489
|
+
start_line: fn.startLine,
|
|
490
|
+
end_line: fn.endLine,
|
|
491
|
+
is_exported: fn.isExported,
|
|
492
|
+
is_async: fn.isAsync,
|
|
493
|
+
});
|
|
494
|
+
functionIdMap.set(fn.name, fnId);
|
|
495
|
+
totalFunctions++;
|
|
496
|
+
// Determine if we need semantic re-analysis
|
|
497
|
+
const oldFn = oldFunctionMap.get(fn.name);
|
|
498
|
+
if (!oldFn) {
|
|
499
|
+
// New function — queue for analysis
|
|
500
|
+
(0, queries_1.enqueueForAnalysis)(db, fnId, 'new', (0, differ_1.getPriority)('new', fn.isExported));
|
|
501
|
+
queued++;
|
|
502
|
+
}
|
|
503
|
+
else {
|
|
504
|
+
const { reanalyze, reason } = (0, differ_1.shouldReanalyze)(oldFn, fn.signature, fn.codeHash, fn.body, config.diffThreshold);
|
|
505
|
+
if (reanalyze) {
|
|
506
|
+
(0, queries_1.enqueueForAnalysis)(db, fnId, reason, (0, differ_1.getPriority)(reason, fn.isExported));
|
|
507
|
+
queued++;
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
// Extract and insert relationships
|
|
512
|
+
try {
|
|
513
|
+
const calls = (0, relationships_1.extractCallsFromFile)(project, filePath);
|
|
514
|
+
for (const call of calls) {
|
|
515
|
+
if (call.callerName === '__file__')
|
|
516
|
+
continue; // Skip file-level imports for now
|
|
517
|
+
const callerId = functionIdMap.get(call.callerName);
|
|
518
|
+
if (!callerId)
|
|
519
|
+
continue;
|
|
520
|
+
// Try to resolve callee to a function ID
|
|
521
|
+
const callee = (0, queries_1.getFunctionByName)(db, call.calleeName);
|
|
522
|
+
(0, queries_1.insertRelationship)(db, callerId, call.calleeName, call.relationType, callee?.id);
|
|
523
|
+
totalRelationships++;
|
|
524
|
+
}
|
|
525
|
+
}
|
|
526
|
+
catch (err) {
|
|
527
|
+
logger_1.logger.warn(`Failed to extract calls from ${relativePath}: ${err.message}`);
|
|
528
|
+
}
|
|
529
|
+
}
|
|
530
|
+
db.close();
|
|
531
|
+
console.log(`\nIngestion complete:`);
|
|
532
|
+
console.log(` New files: ${newFiles}`);
|
|
533
|
+
console.log(` Changed files: ${changedFiles}`);
|
|
534
|
+
console.log(` Unchanged: ${unchangedFiles}`);
|
|
535
|
+
console.log(` Functions: ${totalFunctions}`);
|
|
536
|
+
console.log(` Relationships: ${totalRelationships}`);
|
|
537
|
+
console.log(` Queued: ${queued} functions for semantic analysis`);
|
|
538
|
+
if (queued > 0) {
|
|
539
|
+
console.log(`\nNext: run 'structx analyze' to enrich functions with semantic metadata.`);
|
|
540
|
+
}
|
|
541
|
+
});
|
|
542
|
+
// ── analyze ──
|
|
543
|
+
program
|
|
544
|
+
.command('analyze')
|
|
545
|
+
.description('Run LLM semantic analysis on extracted functions')
|
|
546
|
+
.argument('[repo-path]', 'Path to TypeScript repository', '.')
|
|
547
|
+
.option('--yes', 'Skip cost confirmation prompt')
|
|
548
|
+
.action(async (repoPath, opts) => {
|
|
549
|
+
const resolved = path.resolve(repoPath);
|
|
550
|
+
const structxDir = (0, config_1.getStructXDir)(resolved);
|
|
551
|
+
const dbPath = (0, connection_1.getDbPath)(structxDir);
|
|
552
|
+
if (!fs.existsSync(dbPath)) {
|
|
553
|
+
console.log('StructX not initialized. Run "structx init" first.');
|
|
554
|
+
return;
|
|
555
|
+
}
|
|
556
|
+
const config = (0, config_1.loadConfig)(structxDir);
|
|
557
|
+
if (!config.anthropicApiKey) {
|
|
558
|
+
console.log('Anthropic API key not set. Set ANTHROPIC_API_KEY env var or add to .structx/config.json');
|
|
559
|
+
return;
|
|
560
|
+
}
|
|
561
|
+
const db = (0, connection_1.openDatabase)(dbPath);
|
|
562
|
+
const pendingCount = (0, queries_2.getPendingAnalysisCount)(db);
|
|
563
|
+
if (pendingCount === 0) {
|
|
564
|
+
console.log('No functions pending analysis. Run "structx ingest" first.');
|
|
565
|
+
db.close();
|
|
566
|
+
return;
|
|
567
|
+
}
|
|
568
|
+
// Show cost estimate
|
|
569
|
+
const estimate = (0, cost_1.estimateAnalysisCost)(pendingCount, config.batchSize, config.analysisModel);
|
|
570
|
+
console.log('\n' + (0, cost_1.formatCostEstimate)(estimate));
|
|
571
|
+
if (!opts.yes) {
|
|
572
|
+
const readline = await Promise.resolve().then(() => __importStar(require('readline')));
|
|
573
|
+
const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
|
|
574
|
+
const answer = await new Promise(resolve => {
|
|
575
|
+
rl.question('\nProceed? [y/N] ', resolve);
|
|
576
|
+
});
|
|
577
|
+
rl.close();
|
|
578
|
+
if (answer.toLowerCase() !== 'y') {
|
|
579
|
+
console.log('Aborted.');
|
|
580
|
+
db.close();
|
|
581
|
+
return;
|
|
582
|
+
}
|
|
583
|
+
}
|
|
584
|
+
console.log('\nAnalyzing...');
|
|
585
|
+
let totalAnalyzed = 0;
|
|
586
|
+
let totalCached = 0;
|
|
587
|
+
let totalFailed = 0;
|
|
588
|
+
let totalInputTokens = 0;
|
|
589
|
+
let totalOutputTokens = 0;
|
|
590
|
+
let totalCost = 0;
|
|
591
|
+
let batchNum = 0;
|
|
592
|
+
while (true) {
|
|
593
|
+
const pending = (0, queries_2.getPendingAnalysis)(db, config.batchSize);
|
|
594
|
+
if (pending.length === 0)
|
|
595
|
+
break;
|
|
596
|
+
batchNum++;
|
|
597
|
+
const items = pending.map(p => ({ id: p.id, function_id: p.function_id }));
|
|
598
|
+
console.log(` Batch ${batchNum}: ${items.length} functions...`);
|
|
599
|
+
const batchResult = await (0, analyzer_1.analyzeBatch)(db, items, config.analysisModel, config.anthropicApiKey);
|
|
600
|
+
totalAnalyzed += batchResult.analyzed;
|
|
601
|
+
totalCached += batchResult.cached;
|
|
602
|
+
totalFailed += batchResult.failed;
|
|
603
|
+
totalInputTokens += batchResult.totalInputTokens;
|
|
604
|
+
totalOutputTokens += batchResult.totalOutputTokens;
|
|
605
|
+
totalCost += batchResult.totalCost;
|
|
606
|
+
}
|
|
607
|
+
// Rebuild FTS index
|
|
608
|
+
(0, analyzer_1.rebuildSearchIndex)(db);
|
|
609
|
+
db.close();
|
|
610
|
+
console.log(`\nAnalysis complete:`);
|
|
611
|
+
console.log(` Analyzed: ${totalAnalyzed}`);
|
|
612
|
+
console.log(` From cache: ${totalCached}`);
|
|
613
|
+
console.log(` Failed: ${totalFailed}`);
|
|
614
|
+
console.log(` Input tokens: ${totalInputTokens.toLocaleString()}`);
|
|
615
|
+
console.log(` Output tokens: ${totalOutputTokens.toLocaleString()}`);
|
|
616
|
+
console.log(` Total cost: $${totalCost.toFixed(4)}`);
|
|
617
|
+
});
|
|
618
|
+
// ── ask ──
|
|
619
|
+
program
|
|
620
|
+
.command('ask')
|
|
621
|
+
.description('Ask a question about the codebase')
|
|
622
|
+
.argument('<question>', 'The question to ask')
|
|
623
|
+
.option('--repo <path>', 'Path to TypeScript repository', '.')
|
|
624
|
+
.action(async (question, opts) => {
|
|
625
|
+
const resolved = path.resolve(opts.repo);
|
|
626
|
+
const structxDir = (0, config_1.getStructXDir)(resolved);
|
|
627
|
+
const dbPath = (0, connection_1.getDbPath)(structxDir);
|
|
628
|
+
if (!fs.existsSync(dbPath)) {
|
|
629
|
+
console.log('StructX not initialized. Run "structx init" first.');
|
|
630
|
+
return;
|
|
631
|
+
}
|
|
632
|
+
const config = (0, config_1.loadConfig)(structxDir);
|
|
633
|
+
if (!config.anthropicApiKey) {
|
|
634
|
+
console.log('Anthropic API key not set. Set ANTHROPIC_API_KEY env var or add to .structx/config.json');
|
|
635
|
+
return;
|
|
636
|
+
}
|
|
637
|
+
const db = (0, connection_1.openDatabase)(dbPath);
|
|
638
|
+
const startTime = Date.now();
|
|
639
|
+
// Step 1: Classify the question
|
|
640
|
+
console.log('Classifying question...');
|
|
641
|
+
const classification = await (0, classifier_1.classifyQuestion)(question, config.classifierModel, config.anthropicApiKey);
|
|
642
|
+
logger_1.logger.debug('Classification', classification);
|
|
643
|
+
// Step 2: Retrieve context
|
|
644
|
+
console.log(`Retrieving context (strategy: ${classification.strategy})...`);
|
|
645
|
+
const graphQueryStart = Date.now();
|
|
646
|
+
let retrieved;
|
|
647
|
+
switch (classification.strategy) {
|
|
648
|
+
case 'direct':
|
|
649
|
+
retrieved = (0, retriever_1.directLookup)(db, classification.functionName || '');
|
|
650
|
+
break;
|
|
651
|
+
case 'relationship':
|
|
652
|
+
retrieved = (0, retriever_1.relationshipQuery)(db, classification.functionName || '', classification.direction || 'callers');
|
|
653
|
+
break;
|
|
654
|
+
case 'semantic':
|
|
655
|
+
retrieved = (0, retriever_1.semanticSearch)(db, classification.keywords);
|
|
656
|
+
break;
|
|
657
|
+
case 'domain':
|
|
658
|
+
retrieved = (0, retriever_1.domainQuery)(db, classification.domain || 'other');
|
|
659
|
+
break;
|
|
660
|
+
case 'impact':
|
|
661
|
+
retrieved = (0, retriever_1.impactAnalysis)(db, classification.functionName || '');
|
|
662
|
+
break;
|
|
663
|
+
default:
|
|
664
|
+
retrieved = (0, retriever_1.semanticSearch)(db, classification.keywords);
|
|
665
|
+
}
|
|
666
|
+
const graphQueryTimeMs = Date.now() - graphQueryStart;
|
|
667
|
+
// Step 3: Build context
|
|
668
|
+
const context = (0, context_builder_1.buildContext)(retrieved, question);
|
|
669
|
+
// Step 4: Generate answer
|
|
670
|
+
console.log('Generating answer...\n');
|
|
671
|
+
const answerResult = await (0, answerer_1.generateAnswer)(question, context, config.answerModel, config.anthropicApiKey);
|
|
672
|
+
// Display answer
|
|
673
|
+
console.log('─'.repeat(60));
|
|
674
|
+
console.log(answerResult.answer);
|
|
675
|
+
console.log('─'.repeat(60));
|
|
676
|
+
console.log(`\nStrategy: ${classification.strategy} | Functions: ${retrieved.functions.length} | Graph query: ${graphQueryTimeMs}ms`);
|
|
677
|
+
console.log(`Tokens: ${answerResult.inputTokens} in / ${answerResult.outputTokens} out | Cost: $${answerResult.cost.toFixed(4)} | Time: ${answerResult.responseTimeMs}ms`);
|
|
678
|
+
// Save run to DB
|
|
679
|
+
(0, queries_2.insertQaRun)(db, {
|
|
680
|
+
mode: 'structx',
|
|
681
|
+
question,
|
|
682
|
+
input_tokens: answerResult.inputTokens,
|
|
683
|
+
output_tokens: answerResult.outputTokens,
|
|
684
|
+
total_tokens: answerResult.inputTokens + answerResult.outputTokens,
|
|
685
|
+
cost_usd: answerResult.cost,
|
|
686
|
+
response_time_ms: answerResult.responseTimeMs,
|
|
687
|
+
files_accessed: null,
|
|
688
|
+
functions_retrieved: retrieved.functions.length,
|
|
689
|
+
graph_query_time_ms: graphQueryTimeMs,
|
|
690
|
+
answer_text: answerResult.answer,
|
|
691
|
+
});
|
|
692
|
+
db.close();
|
|
693
|
+
});
|
|
694
|
+
// ── benchmark (placeholder) ──
|
|
695
|
+
const benchmarkCmd = program
|
|
696
|
+
.command('benchmark')
|
|
697
|
+
.description('Run and view benchmark comparisons');
|
|
698
|
+
benchmarkCmd
|
|
699
|
+
.command('run')
|
|
700
|
+
.description('Run comparison benchmark')
|
|
701
|
+
.option('--repo <path>', 'Path to TypeScript repository', '.')
|
|
702
|
+
.action(async (opts) => {
|
|
703
|
+
const resolved = path.resolve(opts.repo);
|
|
704
|
+
const structxDir = (0, config_1.getStructXDir)(resolved);
|
|
705
|
+
const dbPath = (0, connection_1.getDbPath)(structxDir);
|
|
706
|
+
if (!fs.existsSync(dbPath)) {
|
|
707
|
+
console.log('StructX not initialized. Run "structx init" first.');
|
|
708
|
+
return;
|
|
709
|
+
}
|
|
710
|
+
const config = (0, config_1.loadConfig)(structxDir);
|
|
711
|
+
if (!config.anthropicApiKey) {
|
|
712
|
+
console.log('Anthropic API key not set.');
|
|
713
|
+
return;
|
|
714
|
+
}
|
|
715
|
+
const db = (0, connection_1.openDatabase)(dbPath);
|
|
716
|
+
console.log('Starting benchmark...');
|
|
717
|
+
console.log('Running 8 questions in both StructX and Traditional modes.\n');
|
|
718
|
+
const results = await (0, runner_1.runBenchmark)(db, config);
|
|
719
|
+
// Generate and save reports
|
|
720
|
+
const markdown = (0, reporter_1.generateMarkdownReport)(results);
|
|
721
|
+
const csv = (0, reporter_1.generateCsvReport)(results);
|
|
722
|
+
const { markdownPath, csvPath } = (0, reporter_1.saveReport)(structxDir, markdown, csv);
|
|
723
|
+
console.log(`\nReports saved:`);
|
|
724
|
+
console.log(` Markdown: ${markdownPath}`);
|
|
725
|
+
console.log(` CSV: ${csvPath}`);
|
|
726
|
+
db.close();
|
|
727
|
+
});
|
|
728
|
+
benchmarkCmd
|
|
729
|
+
.command('report')
|
|
730
|
+
.description('Show latest benchmark report')
|
|
731
|
+
.option('--repo <path>', 'Path to TypeScript repository', '.')
|
|
732
|
+
.action((opts) => {
|
|
733
|
+
const resolved = path.resolve(opts.repo);
|
|
734
|
+
const structxDir = (0, config_1.getStructXDir)(resolved);
|
|
735
|
+
const reportsDir = path.join(structxDir, 'reports');
|
|
736
|
+
if (!fs.existsSync(reportsDir)) {
|
|
737
|
+
console.log('No benchmark reports found. Run "structx benchmark run" first.');
|
|
738
|
+
return;
|
|
739
|
+
}
|
|
740
|
+
// Find latest markdown report
|
|
741
|
+
const files = fs.readdirSync(reportsDir)
|
|
742
|
+
.filter(f => f.endsWith('.md'))
|
|
743
|
+
.sort()
|
|
744
|
+
.reverse();
|
|
745
|
+
if (files.length === 0) {
|
|
746
|
+
console.log('No benchmark reports found. Run "structx benchmark run" first.');
|
|
747
|
+
return;
|
|
748
|
+
}
|
|
749
|
+
const latestReport = fs.readFileSync(path.join(reportsDir, files[0]), 'utf-8');
|
|
750
|
+
console.log(latestReport);
|
|
751
|
+
});
|
|
752
|
+
program.parse();
|
|
753
|
+
//# sourceMappingURL=cli.js.map
|