@probelabs/visor 0.1.19 → 0.1.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/action-cli-bridge.d.ts +0 -1
- package/dist/action-cli-bridge.d.ts.map +1 -1
- package/dist/ai-review-service.d.ts +0 -1
- package/dist/ai-review-service.d.ts.map +1 -1
- package/dist/check-execution-engine.d.ts +0 -1
- package/dist/check-execution-engine.d.ts.map +1 -1
- package/dist/cli-main.d.ts +0 -2
- package/dist/cli-main.d.ts.map +1 -1
- package/dist/cli.d.ts +0 -1
- package/dist/cli.d.ts.map +1 -1
- package/dist/commands.d.ts +0 -1
- package/dist/commands.d.ts.map +1 -1
- package/dist/config.d.ts +0 -1
- package/dist/config.d.ts.map +1 -1
- package/dist/dependency-resolver.d.ts +0 -1
- package/dist/dependency-resolver.d.ts.map +1 -1
- package/dist/event-mapper.d.ts +0 -1
- package/dist/event-mapper.d.ts.map +1 -1
- package/dist/failure-condition-evaluator.d.ts +0 -1
- package/dist/failure-condition-evaluator.d.ts.map +1 -1
- package/dist/git-repository-analyzer.d.ts +0 -1
- package/dist/git-repository-analyzer.d.ts.map +1 -1
- package/dist/github-check-service.d.ts +0 -1
- package/dist/github-check-service.d.ts.map +1 -1
- package/dist/github-comments.d.ts +0 -1
- package/dist/github-comments.d.ts.map +1 -1
- package/dist/index.d.ts +0 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +127704 -805
- package/dist/output-formatters.d.ts +0 -1
- package/dist/output-formatters.d.ts.map +1 -1
- package/dist/pr-analyzer.d.ts +0 -1
- package/dist/pr-analyzer.d.ts.map +1 -1
- package/dist/pr-detector.d.ts +0 -1
- package/dist/pr-detector.d.ts.map +1 -1
- package/dist/providers/ai-check-provider.d.ts.map +1 -1
- package/dist/providers/check-provider-registry.d.ts.map +1 -1
- package/dist/providers/check-provider.interface.d.ts.map +1 -1
- package/dist/providers/index.d.ts.map +1 -1
- package/dist/providers/noop-check-provider.d.ts.map +1 -1
- package/dist/providers/tool-check-provider.d.ts.map +1 -1
- package/dist/providers/webhook-check-provider.d.ts.map +1 -1
- package/dist/reviewer.d.ts +0 -1
- package/dist/reviewer.d.ts.map +1 -1
- package/dist/session-registry.d.ts +0 -1
- package/dist/session-registry.d.ts.map +1 -1
- package/dist/types/cli.d.ts.map +1 -1
- package/dist/types/config.d.ts.map +1 -1
- package/dist/utils/env-resolver.d.ts.map +1 -1
- package/package.json +3 -3
- package/dist/action-cli-bridge.js +0 -380
- package/dist/action-cli-bridge.js.map +0 -1
- package/dist/ai-review-service.js +0 -854
- package/dist/ai-review-service.js.map +0 -1
- package/dist/check-execution-engine.js +0 -1720
- package/dist/check-execution-engine.js.map +0 -1
- package/dist/cli-main.js +0 -249
- package/dist/cli-main.js.map +0 -1
- package/dist/cli.js +0 -241
- package/dist/cli.js.map +0 -1
- package/dist/commands.js +0 -53
- package/dist/commands.js.map +0 -1
- package/dist/config.js +0 -437
- package/dist/config.js.map +0 -1
- package/dist/dependency-resolver.js +0 -163
- package/dist/dependency-resolver.js.map +0 -1
- package/dist/event-mapper.js +0 -316
- package/dist/event-mapper.js.map +0 -1
- package/dist/failure-condition-evaluator.js +0 -481
- package/dist/failure-condition-evaluator.js.map +0 -1
- package/dist/git-repository-analyzer.js +0 -285
- package/dist/git-repository-analyzer.js.map +0 -1
- package/dist/github-check-service.js +0 -369
- package/dist/github-check-service.js.map +0 -1
- package/dist/github-comments.js +0 -289
- package/dist/github-comments.js.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/output-formatters.js +0 -624
- package/dist/output-formatters.js.map +0 -1
- package/dist/pr-analyzer.js +0 -195
- package/dist/pr-analyzer.js.map +0 -1
- package/dist/pr-detector.js +0 -357
- package/dist/pr-detector.js.map +0 -1
- package/dist/providers/ai-check-provider.js +0 -437
- package/dist/providers/ai-check-provider.js.map +0 -1
- package/dist/providers/check-provider-registry.js +0 -138
- package/dist/providers/check-provider-registry.js.map +0 -1
- package/dist/providers/check-provider.interface.js +0 -11
- package/dist/providers/check-provider.interface.js.map +0 -1
- package/dist/providers/index.js +0 -19
- package/dist/providers/index.js.map +0 -1
- package/dist/providers/noop-check-provider.js +0 -55
- package/dist/providers/noop-check-provider.js.map +0 -1
- package/dist/providers/tool-check-provider.js +0 -174
- package/dist/providers/tool-check-provider.js.map +0 -1
- package/dist/providers/webhook-check-provider.js +0 -173
- package/dist/providers/webhook-check-provider.js.map +0 -1
- package/dist/reviewer.js +0 -260
- package/dist/reviewer.js.map +0 -1
- package/dist/session-registry.js +0 -67
- package/dist/session-registry.js.map +0 -1
- package/dist/types/cli.js +0 -3
- package/dist/types/cli.js.map +0 -1
- package/dist/types/config.js +0 -6
- package/dist/types/config.js.map +0 -1
- package/dist/utils/env-resolver.js +0 -130
- package/dist/utils/env-resolver.js.map +0 -1
|
@@ -1,1720 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
-
}) : function(o, v) {
|
|
16
|
-
o["default"] = v;
|
|
17
|
-
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
-
var ownKeys = function(o) {
|
|
20
|
-
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
-
var ar = [];
|
|
22
|
-
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
-
return ar;
|
|
24
|
-
};
|
|
25
|
-
return ownKeys(o);
|
|
26
|
-
};
|
|
27
|
-
return function (mod) {
|
|
28
|
-
if (mod && mod.__esModule) return mod;
|
|
29
|
-
var result = {};
|
|
30
|
-
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
-
__setModuleDefault(result, mod);
|
|
32
|
-
return result;
|
|
33
|
-
};
|
|
34
|
-
})();
|
|
35
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
-
exports.CheckExecutionEngine = void 0;
|
|
37
|
-
const reviewer_1 = require("./reviewer");
|
|
38
|
-
const git_repository_analyzer_1 = require("./git-repository-analyzer");
|
|
39
|
-
const check_provider_registry_1 = require("./providers/check-provider-registry");
|
|
40
|
-
const dependency_resolver_1 = require("./dependency-resolver");
|
|
41
|
-
const failure_condition_evaluator_1 = require("./failure-condition-evaluator");
|
|
42
|
-
const github_check_service_1 = require("./github-check-service");
|
|
43
|
-
/**
|
|
44
|
-
* Filter environment variables to only include safe ones for sandbox evaluation
|
|
45
|
-
*/
|
|
46
|
-
function getSafeEnvironmentVariables() {
|
|
47
|
-
const safeEnvVars = [
|
|
48
|
-
'CI',
|
|
49
|
-
'GITHUB_EVENT_NAME',
|
|
50
|
-
'GITHUB_REPOSITORY',
|
|
51
|
-
'GITHUB_REF',
|
|
52
|
-
'GITHUB_SHA',
|
|
53
|
-
'GITHUB_HEAD_REF',
|
|
54
|
-
'GITHUB_BASE_REF',
|
|
55
|
-
'GITHUB_ACTOR',
|
|
56
|
-
'GITHUB_WORKFLOW',
|
|
57
|
-
'GITHUB_RUN_ID',
|
|
58
|
-
'GITHUB_RUN_NUMBER',
|
|
59
|
-
'NODE_ENV',
|
|
60
|
-
];
|
|
61
|
-
const safeEnv = {};
|
|
62
|
-
for (const key of safeEnvVars) {
|
|
63
|
-
if (process.env[key]) {
|
|
64
|
-
safeEnv[key] = process.env[key];
|
|
65
|
-
}
|
|
66
|
-
}
|
|
67
|
-
return safeEnv;
|
|
68
|
-
}
|
|
69
|
-
class CheckExecutionEngine {
|
|
70
|
-
gitAnalyzer;
|
|
71
|
-
mockOctokit;
|
|
72
|
-
reviewer;
|
|
73
|
-
providerRegistry;
|
|
74
|
-
failureEvaluator;
|
|
75
|
-
githubCheckService;
|
|
76
|
-
checkRunMap;
|
|
77
|
-
githubContext;
|
|
78
|
-
constructor(workingDirectory) {
|
|
79
|
-
this.gitAnalyzer = new git_repository_analyzer_1.GitRepositoryAnalyzer(workingDirectory);
|
|
80
|
-
this.providerRegistry = check_provider_registry_1.CheckProviderRegistry.getInstance();
|
|
81
|
-
this.failureEvaluator = new failure_condition_evaluator_1.FailureConditionEvaluator();
|
|
82
|
-
// Create a mock Octokit instance for local analysis
|
|
83
|
-
// This allows us to reuse the existing PRReviewer logic without network calls
|
|
84
|
-
this.mockOctokit = this.createMockOctokit();
|
|
85
|
-
this.reviewer = new reviewer_1.PRReviewer(this.mockOctokit);
|
|
86
|
-
}
|
|
87
|
-
/**
|
|
88
|
-
* Execute checks on the local repository
|
|
89
|
-
*/
|
|
90
|
-
async executeChecks(options) {
|
|
91
|
-
const startTime = Date.now();
|
|
92
|
-
const timestamp = new Date().toISOString();
|
|
93
|
-
try {
|
|
94
|
-
// Determine where to send log messages based on output format
|
|
95
|
-
const logFn = options.outputFormat === 'json' || options.outputFormat === 'sarif'
|
|
96
|
-
? console.error
|
|
97
|
-
: console.log;
|
|
98
|
-
// Initialize GitHub checks if enabled
|
|
99
|
-
if (options.githubChecks?.enabled && options.githubChecks.octokit) {
|
|
100
|
-
await this.initializeGitHubChecks(options, logFn);
|
|
101
|
-
}
|
|
102
|
-
// Analyze the repository
|
|
103
|
-
logFn('🔍 Analyzing local git repository...');
|
|
104
|
-
const repositoryInfo = await this.gitAnalyzer.analyzeRepository();
|
|
105
|
-
if (!repositoryInfo.isGitRepository) {
|
|
106
|
-
// Complete GitHub checks with error if they were initialized
|
|
107
|
-
if (this.checkRunMap) {
|
|
108
|
-
await this.completeGitHubChecksWithError('Not a git repository or no changes found');
|
|
109
|
-
}
|
|
110
|
-
return this.createErrorResult(repositoryInfo, 'Not a git repository or no changes found', startTime, timestamp, options.checks);
|
|
111
|
-
}
|
|
112
|
-
// Convert to PRInfo format for compatibility with existing reviewer
|
|
113
|
-
const prInfo = this.gitAnalyzer.toPRInfo(repositoryInfo);
|
|
114
|
-
// Update GitHub checks to in-progress status
|
|
115
|
-
if (this.checkRunMap) {
|
|
116
|
-
await this.updateGitHubChecksInProgress(options);
|
|
117
|
-
}
|
|
118
|
-
// Execute checks using the existing PRReviewer
|
|
119
|
-
logFn(`🤖 Executing checks: ${options.checks.join(', ')}`);
|
|
120
|
-
const reviewSummary = await this.executeReviewChecks(prInfo, options.checks, options.timeout, options.config, options.outputFormat, options.debug, options.maxParallelism, options.failFast);
|
|
121
|
-
// Complete GitHub checks with results
|
|
122
|
-
if (this.checkRunMap) {
|
|
123
|
-
await this.completeGitHubChecksWithResults(reviewSummary, options);
|
|
124
|
-
}
|
|
125
|
-
const executionTime = Date.now() - startTime;
|
|
126
|
-
// Collect debug information when debug mode is enabled
|
|
127
|
-
let debugInfo;
|
|
128
|
-
if (options.debug && reviewSummary.debug) {
|
|
129
|
-
debugInfo = {
|
|
130
|
-
provider: reviewSummary.debug.provider,
|
|
131
|
-
model: reviewSummary.debug.model,
|
|
132
|
-
processingTime: reviewSummary.debug.processingTime,
|
|
133
|
-
parallelExecution: options.checks.length > 1,
|
|
134
|
-
checksExecuted: options.checks,
|
|
135
|
-
totalApiCalls: reviewSummary.debug.totalApiCalls || options.checks.length,
|
|
136
|
-
apiCallDetails: reviewSummary.debug.apiCallDetails,
|
|
137
|
-
};
|
|
138
|
-
}
|
|
139
|
-
return {
|
|
140
|
-
repositoryInfo,
|
|
141
|
-
reviewSummary,
|
|
142
|
-
executionTime,
|
|
143
|
-
timestamp,
|
|
144
|
-
checksExecuted: options.checks,
|
|
145
|
-
debug: debugInfo,
|
|
146
|
-
};
|
|
147
|
-
}
|
|
148
|
-
catch (error) {
|
|
149
|
-
console.error('Error executing checks:', error);
|
|
150
|
-
// Complete GitHub checks with error if they were initialized
|
|
151
|
-
if (this.checkRunMap) {
|
|
152
|
-
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
|
|
153
|
-
await this.completeGitHubChecksWithError(errorMessage);
|
|
154
|
-
}
|
|
155
|
-
const fallbackRepositoryInfo = {
|
|
156
|
-
title: 'Error during analysis',
|
|
157
|
-
body: `Error: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
|
158
|
-
author: 'system',
|
|
159
|
-
base: 'main',
|
|
160
|
-
head: 'HEAD',
|
|
161
|
-
files: [],
|
|
162
|
-
totalAdditions: 0,
|
|
163
|
-
totalDeletions: 0,
|
|
164
|
-
isGitRepository: false,
|
|
165
|
-
workingDirectory: options.workingDirectory || process.cwd(),
|
|
166
|
-
};
|
|
167
|
-
return this.createErrorResult(fallbackRepositoryInfo, error instanceof Error ? error.message : 'Unknown error occurred', startTime, timestamp, options.checks);
|
|
168
|
-
}
|
|
169
|
-
}
|
|
170
|
-
/**
|
|
171
|
-
* Execute tasks with controlled parallelism using a pool pattern
|
|
172
|
-
*/
|
|
173
|
-
async executeWithLimitedParallelism(tasks, maxParallelism, failFast) {
|
|
174
|
-
if (maxParallelism <= 0) {
|
|
175
|
-
throw new Error('Max parallelism must be greater than 0');
|
|
176
|
-
}
|
|
177
|
-
if (tasks.length === 0) {
|
|
178
|
-
return [];
|
|
179
|
-
}
|
|
180
|
-
const results = new Array(tasks.length);
|
|
181
|
-
let currentIndex = 0;
|
|
182
|
-
let shouldStop = false;
|
|
183
|
-
// Worker function that processes tasks
|
|
184
|
-
const worker = async () => {
|
|
185
|
-
while (currentIndex < tasks.length && !shouldStop) {
|
|
186
|
-
const taskIndex = currentIndex++;
|
|
187
|
-
if (taskIndex >= tasks.length)
|
|
188
|
-
break;
|
|
189
|
-
try {
|
|
190
|
-
const result = await tasks[taskIndex]();
|
|
191
|
-
results[taskIndex] = { status: 'fulfilled', value: result };
|
|
192
|
-
// Check if we should stop due to fail-fast
|
|
193
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
194
|
-
if (failFast && this.shouldFailFast(result)) {
|
|
195
|
-
shouldStop = true;
|
|
196
|
-
break;
|
|
197
|
-
}
|
|
198
|
-
}
|
|
199
|
-
catch (error) {
|
|
200
|
-
results[taskIndex] = { status: 'rejected', reason: error };
|
|
201
|
-
// If fail-fast is enabled and we have an error, stop execution
|
|
202
|
-
if (failFast) {
|
|
203
|
-
shouldStop = true;
|
|
204
|
-
break;
|
|
205
|
-
}
|
|
206
|
-
}
|
|
207
|
-
}
|
|
208
|
-
};
|
|
209
|
-
// Create workers up to the parallelism limit
|
|
210
|
-
const workers = [];
|
|
211
|
-
const workerCount = Math.min(maxParallelism, tasks.length);
|
|
212
|
-
for (let i = 0; i < workerCount; i++) {
|
|
213
|
-
workers.push(worker());
|
|
214
|
-
}
|
|
215
|
-
// Wait for all workers to complete
|
|
216
|
-
await Promise.all(workers);
|
|
217
|
-
return results;
|
|
218
|
-
}
|
|
219
|
-
/**
|
|
220
|
-
* Execute review checks using parallel execution for multiple AI checks
|
|
221
|
-
*/
|
|
222
|
-
async executeReviewChecks(prInfo, checks, timeout, config, outputFormat, debug, maxParallelism, failFast) {
|
|
223
|
-
// Determine where to send log messages based on output format
|
|
224
|
-
const logFn = outputFormat === 'json' || outputFormat === 'sarif' ? console.error : console.log;
|
|
225
|
-
logFn(`🔧 Debug: executeReviewChecks called with checks: ${JSON.stringify(checks)}`);
|
|
226
|
-
logFn(`🔧 Debug: Config available: ${!!config}, Config has checks: ${!!config?.checks}`);
|
|
227
|
-
// Filter checks based on current event type to prevent execution of checks that shouldn't run
|
|
228
|
-
const filteredChecks = this.filterChecksByEvent(checks, config, prInfo, logFn);
|
|
229
|
-
if (filteredChecks.length !== checks.length) {
|
|
230
|
-
logFn(`🔧 Debug: Event filtering reduced checks from ${checks.length} to ${filteredChecks.length}: ${JSON.stringify(filteredChecks)}`);
|
|
231
|
-
}
|
|
232
|
-
// Use filtered checks for execution
|
|
233
|
-
checks = filteredChecks;
|
|
234
|
-
// If we have a config with individual check definitions, use dependency-aware execution
|
|
235
|
-
// Check if any of the checks have dependencies or if there are multiple checks
|
|
236
|
-
const hasDependencies = config?.checks &&
|
|
237
|
-
checks.some(checkName => {
|
|
238
|
-
const checkConfig = config.checks[checkName];
|
|
239
|
-
return checkConfig?.depends_on && checkConfig.depends_on.length > 0;
|
|
240
|
-
});
|
|
241
|
-
if (config?.checks && (checks.length > 1 || hasDependencies)) {
|
|
242
|
-
logFn(`🔧 Debug: Using dependency-aware execution for ${checks.length} checks (has dependencies: ${hasDependencies})`);
|
|
243
|
-
return await this.executeDependencyAwareChecks(prInfo, checks, timeout, config, logFn, debug, maxParallelism, failFast);
|
|
244
|
-
}
|
|
245
|
-
// Single check execution (existing logic)
|
|
246
|
-
if (checks.length === 1) {
|
|
247
|
-
logFn(`🔧 Debug: Using single check execution for: ${checks[0]}`);
|
|
248
|
-
// If we have a config definition for this check, use it
|
|
249
|
-
if (config?.checks?.[checks[0]]) {
|
|
250
|
-
return await this.executeSingleConfiguredCheck(prInfo, checks[0], timeout, config, logFn);
|
|
251
|
-
}
|
|
252
|
-
// Try provider system for single checks
|
|
253
|
-
if (this.providerRegistry.hasProvider(checks[0])) {
|
|
254
|
-
const provider = this.providerRegistry.getProviderOrThrow(checks[0]);
|
|
255
|
-
const providerConfig = {
|
|
256
|
-
type: checks[0],
|
|
257
|
-
prompt: 'all',
|
|
258
|
-
ai: timeout ? { timeout } : undefined,
|
|
259
|
-
};
|
|
260
|
-
const result = await provider.execute(prInfo, providerConfig);
|
|
261
|
-
// Prefix issues with check name for consistent grouping
|
|
262
|
-
const prefixedIssues = (result.issues || []).map(issue => ({
|
|
263
|
-
...issue,
|
|
264
|
-
ruleId: `${checks[0]}/${issue.ruleId}`,
|
|
265
|
-
}));
|
|
266
|
-
return {
|
|
267
|
-
...result,
|
|
268
|
-
issues: prefixedIssues,
|
|
269
|
-
};
|
|
270
|
-
}
|
|
271
|
-
}
|
|
272
|
-
// Check if 'ai' provider is available for focus-based checks (legacy support)
|
|
273
|
-
if (this.providerRegistry.hasProvider('ai')) {
|
|
274
|
-
logFn(`🔧 Debug: Using AI provider with focus mapping`);
|
|
275
|
-
const provider = this.providerRegistry.getProviderOrThrow('ai');
|
|
276
|
-
let focus = 'all';
|
|
277
|
-
let checkName = 'all';
|
|
278
|
-
if (checks.length === 1) {
|
|
279
|
-
checkName = checks[0];
|
|
280
|
-
if (checks[0] === 'security' || checks[0] === 'performance' || checks[0] === 'style') {
|
|
281
|
-
focus = checks[0];
|
|
282
|
-
}
|
|
283
|
-
}
|
|
284
|
-
else {
|
|
285
|
-
// For multiple checks, combine them into 'all' focus
|
|
286
|
-
focus = 'all';
|
|
287
|
-
}
|
|
288
|
-
const providerConfig = {
|
|
289
|
-
type: 'ai',
|
|
290
|
-
prompt: focus,
|
|
291
|
-
focus: focus,
|
|
292
|
-
ai: timeout ? { timeout } : undefined,
|
|
293
|
-
// Inherit global AI provider and model settings if config is available
|
|
294
|
-
ai_provider: config?.ai_provider,
|
|
295
|
-
ai_model: config?.ai_model,
|
|
296
|
-
};
|
|
297
|
-
const result = await provider.execute(prInfo, providerConfig);
|
|
298
|
-
// Prefix issues with check name for consistent grouping
|
|
299
|
-
const prefixedIssues = (result.issues || []).map(issue => ({
|
|
300
|
-
...issue,
|
|
301
|
-
ruleId: `${checkName}/${issue.ruleId}`,
|
|
302
|
-
}));
|
|
303
|
-
return {
|
|
304
|
-
...result,
|
|
305
|
-
issues: prefixedIssues,
|
|
306
|
-
};
|
|
307
|
-
}
|
|
308
|
-
// Fallback to existing PRReviewer for backward compatibility
|
|
309
|
-
logFn(`🔧 Debug: Using legacy PRReviewer fallback`);
|
|
310
|
-
const focusMap = {
|
|
311
|
-
security: 'security',
|
|
312
|
-
performance: 'performance',
|
|
313
|
-
style: 'style',
|
|
314
|
-
all: 'all',
|
|
315
|
-
architecture: 'all',
|
|
316
|
-
};
|
|
317
|
-
let focus = 'all';
|
|
318
|
-
if (checks.length === 1 && focusMap[checks[0]]) {
|
|
319
|
-
focus = focusMap[checks[0]];
|
|
320
|
-
}
|
|
321
|
-
return await this.reviewer.reviewPR('local', 'repository', 0, prInfo, {
|
|
322
|
-
focus,
|
|
323
|
-
format: 'table',
|
|
324
|
-
});
|
|
325
|
-
}
|
|
326
|
-
/**
|
|
327
|
-
* Execute review checks and return grouped results for new architecture
|
|
328
|
-
*/
|
|
329
|
-
async executeGroupedChecks(prInfo, checks, timeout, config, outputFormat, debug, maxParallelism, failFast) {
|
|
330
|
-
// Determine where to send log messages based on output format
|
|
331
|
-
const logFn = outputFormat === 'json' || outputFormat === 'sarif' ? console.error : console.log;
|
|
332
|
-
logFn(`🔧 Debug: executeGroupedChecks called with checks: ${JSON.stringify(checks)}`);
|
|
333
|
-
logFn(`🔧 Debug: Config available: ${!!config}, Config has checks: ${!!config?.checks}`);
|
|
334
|
-
// Filter checks based on current event type to prevent execution of checks that shouldn't run
|
|
335
|
-
const filteredChecks = this.filterChecksByEvent(checks, config, prInfo, logFn);
|
|
336
|
-
if (filteredChecks.length !== checks.length) {
|
|
337
|
-
logFn(`🔧 Debug: Event filtering reduced checks from ${checks.length} to ${filteredChecks.length}: ${JSON.stringify(filteredChecks)}`);
|
|
338
|
-
}
|
|
339
|
-
// Use filtered checks for execution
|
|
340
|
-
checks = filteredChecks;
|
|
341
|
-
if (!config?.checks) {
|
|
342
|
-
throw new Error('Config with check definitions required for grouped execution');
|
|
343
|
-
}
|
|
344
|
-
// If we have a config with individual check definitions, use dependency-aware execution
|
|
345
|
-
const hasDependencies = checks.some(checkName => {
|
|
346
|
-
const checkConfig = config.checks[checkName];
|
|
347
|
-
return checkConfig?.depends_on && checkConfig.depends_on.length > 0;
|
|
348
|
-
});
|
|
349
|
-
if (checks.length > 1 || hasDependencies) {
|
|
350
|
-
logFn(`🔧 Debug: Using grouped dependency-aware execution for ${checks.length} checks (has dependencies: ${hasDependencies})`);
|
|
351
|
-
return await this.executeGroupedDependencyAwareChecks(prInfo, checks, timeout, config, logFn, debug, maxParallelism, failFast);
|
|
352
|
-
}
|
|
353
|
-
// Single check execution
|
|
354
|
-
if (checks.length === 1) {
|
|
355
|
-
logFn(`🔧 Debug: Using grouped single check execution for: ${checks[0]}`);
|
|
356
|
-
const checkResult = await this.executeSingleGroupedCheck(prInfo, checks[0], timeout, config, logFn, debug);
|
|
357
|
-
const groupedResults = {};
|
|
358
|
-
groupedResults[checkResult.group] = [checkResult];
|
|
359
|
-
return groupedResults;
|
|
360
|
-
}
|
|
361
|
-
// No checks to execute
|
|
362
|
-
return {};
|
|
363
|
-
}
|
|
364
|
-
/**
|
|
365
|
-
* Execute single check and return grouped result
|
|
366
|
-
*/
|
|
367
|
-
async executeSingleGroupedCheck(prInfo, checkName, timeout, config, logFn, debug) {
|
|
368
|
-
if (!config?.checks?.[checkName]) {
|
|
369
|
-
throw new Error(`No configuration found for check: ${checkName}`);
|
|
370
|
-
}
|
|
371
|
-
const checkConfig = config.checks[checkName];
|
|
372
|
-
const provider = this.providerRegistry.getProviderOrThrow('ai');
|
|
373
|
-
const providerConfig = {
|
|
374
|
-
type: 'ai',
|
|
375
|
-
prompt: checkConfig.prompt,
|
|
376
|
-
focus: checkConfig.focus || this.mapCheckNameToFocus(checkName),
|
|
377
|
-
schema: checkConfig.schema,
|
|
378
|
-
group: checkConfig.group,
|
|
379
|
-
ai: {
|
|
380
|
-
timeout: timeout || 600000,
|
|
381
|
-
debug: debug,
|
|
382
|
-
...(checkConfig.ai || {}),
|
|
383
|
-
},
|
|
384
|
-
ai_provider: checkConfig.ai_provider || config.ai_provider,
|
|
385
|
-
ai_model: checkConfig.ai_model || config.ai_model,
|
|
386
|
-
};
|
|
387
|
-
const result = await provider.execute(prInfo, providerConfig);
|
|
388
|
-
// Render the check content using the appropriate template
|
|
389
|
-
const content = await this.renderCheckContent(checkName, result, checkConfig, prInfo);
|
|
390
|
-
return {
|
|
391
|
-
checkName,
|
|
392
|
-
content,
|
|
393
|
-
group: checkConfig.group || 'default',
|
|
394
|
-
debug: result.debug,
|
|
395
|
-
issues: result.issues, // Include structured issues
|
|
396
|
-
};
|
|
397
|
-
}
|
|
398
|
-
/**
|
|
399
|
-
* Execute multiple checks with dependency awareness - return grouped results
|
|
400
|
-
*/
|
|
401
|
-
async executeGroupedDependencyAwareChecks(prInfo, checks, timeout, config, logFn, debug, maxParallelism, failFast) {
|
|
402
|
-
// Use the existing dependency-aware execution logic
|
|
403
|
-
const reviewSummary = await this.executeDependencyAwareChecks(prInfo, checks, timeout, config, logFn, debug, maxParallelism, failFast);
|
|
404
|
-
// Convert the flat ReviewSummary to grouped CheckResults
|
|
405
|
-
return await this.convertReviewSummaryToGroupedResults(reviewSummary, checks, config, prInfo);
|
|
406
|
-
}
|
|
407
|
-
/**
|
|
408
|
-
* Convert ReviewSummary to GroupedCheckResults
|
|
409
|
-
*/
|
|
410
|
-
async convertReviewSummaryToGroupedResults(reviewSummary, checks, config, prInfo) {
|
|
411
|
-
const groupedResults = {};
|
|
412
|
-
// Process each check individually
|
|
413
|
-
for (const checkName of checks) {
|
|
414
|
-
const checkConfig = config?.checks?.[checkName];
|
|
415
|
-
if (!checkConfig)
|
|
416
|
-
continue;
|
|
417
|
-
// Extract issues for this check
|
|
418
|
-
const checkIssues = (reviewSummary.issues || []).filter(issue => issue.ruleId?.startsWith(`${checkName}/`));
|
|
419
|
-
// Extract suggestions for this check
|
|
420
|
-
const checkSuggestions = (reviewSummary.suggestions || []).filter(suggestion => suggestion.startsWith(`[${checkName}]`));
|
|
421
|
-
// Create a mini ReviewSummary for this check
|
|
422
|
-
const checkSummary = {
|
|
423
|
-
issues: checkIssues,
|
|
424
|
-
suggestions: checkSuggestions,
|
|
425
|
-
debug: reviewSummary.debug,
|
|
426
|
-
};
|
|
427
|
-
// Render content for this check
|
|
428
|
-
const content = await this.renderCheckContent(checkName, checkSummary, checkConfig, prInfo);
|
|
429
|
-
const checkResult = {
|
|
430
|
-
checkName,
|
|
431
|
-
content,
|
|
432
|
-
group: checkConfig.group || 'default',
|
|
433
|
-
debug: reviewSummary.debug,
|
|
434
|
-
issues: checkIssues, // Include structured issues
|
|
435
|
-
};
|
|
436
|
-
// Add to appropriate group
|
|
437
|
-
const group = checkResult.group;
|
|
438
|
-
if (!groupedResults[group]) {
|
|
439
|
-
groupedResults[group] = [];
|
|
440
|
-
}
|
|
441
|
-
groupedResults[group].push(checkResult);
|
|
442
|
-
}
|
|
443
|
-
return groupedResults;
|
|
444
|
-
}
|
|
445
|
-
/**
|
|
446
|
-
* Validates that a file path is safe and within the project directory
|
|
447
|
-
* Prevents path traversal attacks by:
|
|
448
|
-
* - Blocking absolute paths
|
|
449
|
-
* - Blocking paths with ".." segments
|
|
450
|
-
* - Ensuring resolved path is within project directory
|
|
451
|
-
* - Blocking special characters and null bytes
|
|
452
|
-
* - Enforcing .liquid file extension
|
|
453
|
-
*/
|
|
454
|
-
async validateTemplatePath(templatePath) {
|
|
455
|
-
const path = await Promise.resolve().then(() => __importStar(require('path')));
|
|
456
|
-
// Validate input
|
|
457
|
-
if (!templatePath || typeof templatePath !== 'string' || templatePath.trim() === '') {
|
|
458
|
-
throw new Error('Template path must be a non-empty string');
|
|
459
|
-
}
|
|
460
|
-
// Block null bytes and other dangerous characters
|
|
461
|
-
if (templatePath.includes('\0') || templatePath.includes('\x00')) {
|
|
462
|
-
throw new Error('Template path contains invalid characters');
|
|
463
|
-
}
|
|
464
|
-
// Enforce .liquid file extension
|
|
465
|
-
if (!templatePath.endsWith('.liquid')) {
|
|
466
|
-
throw new Error('Template file must have .liquid extension');
|
|
467
|
-
}
|
|
468
|
-
// Block absolute paths
|
|
469
|
-
if (path.isAbsolute(templatePath)) {
|
|
470
|
-
throw new Error('Template path must be relative to project directory');
|
|
471
|
-
}
|
|
472
|
-
// Block paths with ".." segments
|
|
473
|
-
if (templatePath.includes('..')) {
|
|
474
|
-
throw new Error('Template path cannot contain ".." segments');
|
|
475
|
-
}
|
|
476
|
-
// Block paths starting with ~ (home directory)
|
|
477
|
-
if (templatePath.startsWith('~')) {
|
|
478
|
-
throw new Error('Template path cannot reference home directory');
|
|
479
|
-
}
|
|
480
|
-
// Get the project root directory from git analyzer
|
|
481
|
-
const repositoryInfo = await this.gitAnalyzer.analyzeRepository();
|
|
482
|
-
const projectRoot = repositoryInfo.workingDirectory;
|
|
483
|
-
// Validate project root
|
|
484
|
-
if (!projectRoot || typeof projectRoot !== 'string') {
|
|
485
|
-
throw new Error('Unable to determine project root directory');
|
|
486
|
-
}
|
|
487
|
-
// Resolve the template path relative to project root
|
|
488
|
-
const resolvedPath = path.resolve(projectRoot, templatePath);
|
|
489
|
-
const resolvedProjectRoot = path.resolve(projectRoot);
|
|
490
|
-
// Validate resolved paths
|
|
491
|
-
if (!resolvedPath ||
|
|
492
|
-
!resolvedProjectRoot ||
|
|
493
|
-
resolvedPath === '' ||
|
|
494
|
-
resolvedProjectRoot === '') {
|
|
495
|
-
throw new Error(`Unable to resolve template path: projectRoot="${projectRoot}", templatePath="${templatePath}", resolvedPath="${resolvedPath}", resolvedProjectRoot="${resolvedProjectRoot}"`);
|
|
496
|
-
}
|
|
497
|
-
// Ensure the resolved path is still within the project directory
|
|
498
|
-
if (!resolvedPath.startsWith(resolvedProjectRoot + path.sep) &&
|
|
499
|
-
resolvedPath !== resolvedProjectRoot) {
|
|
500
|
-
throw new Error('Template path escapes project directory');
|
|
501
|
-
}
|
|
502
|
-
return resolvedPath;
|
|
503
|
-
}
|
|
504
|
-
/**
|
|
505
|
-
* Render check content using the appropriate template
|
|
506
|
-
*/
|
|
507
|
-
async renderCheckContent(checkName, reviewSummary, checkConfig, _prInfo) {
|
|
508
|
-
// Import the liquid template system
|
|
509
|
-
const { Liquid } = await Promise.resolve().then(() => __importStar(require('liquidjs')));
|
|
510
|
-
const fs = await Promise.resolve().then(() => __importStar(require('fs/promises')));
|
|
511
|
-
const path = await Promise.resolve().then(() => __importStar(require('path')));
|
|
512
|
-
const liquid = new Liquid({
|
|
513
|
-
trimTagLeft: false,
|
|
514
|
-
trimTagRight: false,
|
|
515
|
-
trimOutputLeft: false,
|
|
516
|
-
trimOutputRight: false,
|
|
517
|
-
greedy: false,
|
|
518
|
-
});
|
|
519
|
-
// Determine template to use
|
|
520
|
-
const schema = checkConfig.schema || 'plain';
|
|
521
|
-
let templateContent;
|
|
522
|
-
if (checkConfig.template) {
|
|
523
|
-
// Custom template
|
|
524
|
-
if (checkConfig.template.content) {
|
|
525
|
-
templateContent = checkConfig.template.content;
|
|
526
|
-
}
|
|
527
|
-
else if (checkConfig.template.file) {
|
|
528
|
-
// Validate the template file path to prevent path traversal attacks
|
|
529
|
-
const validatedPath = await this.validateTemplatePath(checkConfig.template.file);
|
|
530
|
-
templateContent = await fs.readFile(validatedPath, 'utf-8');
|
|
531
|
-
}
|
|
532
|
-
else {
|
|
533
|
-
throw new Error('Custom template must specify either "file" or "content"');
|
|
534
|
-
}
|
|
535
|
-
}
|
|
536
|
-
else if (schema === 'plain') {
|
|
537
|
-
// Plain schema - return raw content directly
|
|
538
|
-
// Strip [checkName] prefixes from suggestions before joining
|
|
539
|
-
const cleanedSuggestions = (reviewSummary.suggestions || []).map(suggestion => {
|
|
540
|
-
// Remove [checkName] prefix if present
|
|
541
|
-
return suggestion.replace(/^\[[^\]]+\]\s*/, '');
|
|
542
|
-
});
|
|
543
|
-
return (reviewSummary.issues?.[0]?.message || '') + (cleanedSuggestions.join('\n\n') || '');
|
|
544
|
-
}
|
|
545
|
-
else {
|
|
546
|
-
// Use built-in schema template
|
|
547
|
-
const sanitizedSchema = schema.replace(/[^a-zA-Z0-9-]/g, '');
|
|
548
|
-
if (!sanitizedSchema) {
|
|
549
|
-
throw new Error('Invalid schema name');
|
|
550
|
-
}
|
|
551
|
-
const templatePath = path.join(__dirname, `../output/${sanitizedSchema}/template.liquid`);
|
|
552
|
-
templateContent = await fs.readFile(templatePath, 'utf-8');
|
|
553
|
-
}
|
|
554
|
-
// Prepare template data
|
|
555
|
-
const templateData = {
|
|
556
|
-
issues: reviewSummary.issues || [],
|
|
557
|
-
checkName: checkName,
|
|
558
|
-
suggestions: reviewSummary.suggestions || [],
|
|
559
|
-
};
|
|
560
|
-
const rendered = await liquid.parseAndRender(templateContent, templateData);
|
|
561
|
-
return rendered.trim();
|
|
562
|
-
}
|
|
563
|
-
/**
|
|
564
|
-
* Execute multiple checks with dependency awareness - intelligently parallel and sequential
|
|
565
|
-
*/
|
|
566
|
-
async executeDependencyAwareChecks(prInfo, checks, timeout, config, logFn, debug, maxParallelism, failFast) {
|
|
567
|
-
const log = logFn || console.error;
|
|
568
|
-
log(`🔧 Debug: Starting dependency-aware execution of ${checks.length} checks`);
|
|
569
|
-
if (!config?.checks) {
|
|
570
|
-
throw new Error('Config with check definitions required for dependency-aware execution');
|
|
571
|
-
}
|
|
572
|
-
// Determine effective max parallelism (CLI > config > default)
|
|
573
|
-
const effectiveMaxParallelism = maxParallelism ?? config.max_parallelism ?? 3;
|
|
574
|
-
// Determine effective fail-fast setting (CLI > config > default)
|
|
575
|
-
const effectiveFailFast = failFast ?? config.fail_fast ?? false;
|
|
576
|
-
log(`🔧 Debug: Using max parallelism: ${effectiveMaxParallelism}`);
|
|
577
|
-
log(`🔧 Debug: Using fail-fast: ${effectiveFailFast}`);
|
|
578
|
-
// Build dependency graph and check for session reuse requirements
|
|
579
|
-
const dependencies = {};
|
|
580
|
-
const sessionReuseChecks = new Set();
|
|
581
|
-
const sessionProviders = new Map(); // checkName -> parent session provider
|
|
582
|
-
for (const checkName of checks) {
|
|
583
|
-
const checkConfig = config.checks[checkName];
|
|
584
|
-
if (checkConfig) {
|
|
585
|
-
dependencies[checkName] = checkConfig.depends_on || [];
|
|
586
|
-
// Track checks that need session reuse
|
|
587
|
-
if (checkConfig.reuse_ai_session === true) {
|
|
588
|
-
sessionReuseChecks.add(checkName);
|
|
589
|
-
// Find the parent check that will provide the session
|
|
590
|
-
// For now, use the first dependency as the session provider
|
|
591
|
-
if (checkConfig.depends_on && checkConfig.depends_on.length > 0) {
|
|
592
|
-
sessionProviders.set(checkName, checkConfig.depends_on[0]);
|
|
593
|
-
}
|
|
594
|
-
}
|
|
595
|
-
}
|
|
596
|
-
else {
|
|
597
|
-
dependencies[checkName] = [];
|
|
598
|
-
}
|
|
599
|
-
}
|
|
600
|
-
if (sessionReuseChecks.size > 0) {
|
|
601
|
-
log(`🔄 Debug: Found ${sessionReuseChecks.size} checks requiring session reuse: ${Array.from(sessionReuseChecks).join(', ')}`);
|
|
602
|
-
}
|
|
603
|
-
// Validate dependencies
|
|
604
|
-
const validation = dependency_resolver_1.DependencyResolver.validateDependencies(checks, dependencies);
|
|
605
|
-
if (!validation.valid) {
|
|
606
|
-
return {
|
|
607
|
-
issues: [
|
|
608
|
-
{
|
|
609
|
-
severity: 'error',
|
|
610
|
-
message: `Dependency validation failed: ${validation.errors.join(', ')}`,
|
|
611
|
-
file: '',
|
|
612
|
-
line: 0,
|
|
613
|
-
ruleId: 'dependency-validation-error',
|
|
614
|
-
category: 'logic',
|
|
615
|
-
},
|
|
616
|
-
],
|
|
617
|
-
suggestions: [],
|
|
618
|
-
};
|
|
619
|
-
}
|
|
620
|
-
// Build dependency graph
|
|
621
|
-
const dependencyGraph = dependency_resolver_1.DependencyResolver.buildDependencyGraph(dependencies);
|
|
622
|
-
if (dependencyGraph.hasCycles) {
|
|
623
|
-
return {
|
|
624
|
-
issues: [
|
|
625
|
-
{
|
|
626
|
-
severity: 'error',
|
|
627
|
-
message: `Circular dependencies detected: ${dependencyGraph.cycleNodes?.join(' -> ')}`,
|
|
628
|
-
file: '',
|
|
629
|
-
line: 0,
|
|
630
|
-
ruleId: 'circular-dependency-error',
|
|
631
|
-
category: 'logic',
|
|
632
|
-
},
|
|
633
|
-
],
|
|
634
|
-
suggestions: [],
|
|
635
|
-
};
|
|
636
|
-
}
|
|
637
|
-
// Log execution plan
|
|
638
|
-
const stats = dependency_resolver_1.DependencyResolver.getExecutionStats(dependencyGraph);
|
|
639
|
-
log(`🔧 Debug: Execution plan - ${stats.totalChecks} checks in ${stats.parallelLevels} levels, max parallelism: ${stats.maxParallelism}`);
|
|
640
|
-
// Execute checks level by level
|
|
641
|
-
const results = new Map();
|
|
642
|
-
const sessionRegistry = require('./session-registry').SessionRegistry.getInstance();
|
|
643
|
-
const provider = this.providerRegistry.getProviderOrThrow('ai');
|
|
644
|
-
const sessionIds = new Map(); // checkName -> sessionId
|
|
645
|
-
let shouldStopExecution = false;
|
|
646
|
-
for (let levelIndex = 0; levelIndex < dependencyGraph.executionOrder.length && !shouldStopExecution; levelIndex++) {
|
|
647
|
-
const executionGroup = dependencyGraph.executionOrder[levelIndex];
|
|
648
|
-
// Check if any checks in this level require session reuse - if so, force sequential execution
|
|
649
|
-
const checksInLevel = executionGroup.parallel;
|
|
650
|
-
const hasSessionReuseInLevel = checksInLevel.some(checkName => sessionReuseChecks.has(checkName));
|
|
651
|
-
let actualParallelism = Math.min(effectiveMaxParallelism, executionGroup.parallel.length);
|
|
652
|
-
if (hasSessionReuseInLevel) {
|
|
653
|
-
// Force sequential execution when session reuse is involved
|
|
654
|
-
actualParallelism = 1;
|
|
655
|
-
log(`🔄 Debug: Level ${executionGroup.level} contains session reuse checks - forcing sequential execution (parallelism: 1)`);
|
|
656
|
-
}
|
|
657
|
-
log(`🔧 Debug: Executing level ${executionGroup.level} with ${executionGroup.parallel.length} checks (parallelism: ${actualParallelism})`);
|
|
658
|
-
// Create task functions for checks in this level
|
|
659
|
-
const levelTaskFunctions = executionGroup.parallel.map(checkName => async () => {
|
|
660
|
-
const checkConfig = config.checks[checkName];
|
|
661
|
-
if (!checkConfig) {
|
|
662
|
-
return {
|
|
663
|
-
checkName,
|
|
664
|
-
error: `No configuration found for check: ${checkName}`,
|
|
665
|
-
result: null,
|
|
666
|
-
};
|
|
667
|
-
}
|
|
668
|
-
try {
|
|
669
|
-
log(`🔧 Debug: Starting check: ${checkName} at level ${executionGroup.level}`);
|
|
670
|
-
// Evaluate if condition to determine whether to run this check
|
|
671
|
-
if (checkConfig.if) {
|
|
672
|
-
const shouldRun = await this.failureEvaluator.evaluateIfCondition(checkName, checkConfig.if, {
|
|
673
|
-
branch: prInfo.head,
|
|
674
|
-
baseBranch: prInfo.base,
|
|
675
|
-
filesChanged: prInfo.files.map(f => f.filename),
|
|
676
|
-
event: 'issue_comment', // Command triggered from comment
|
|
677
|
-
environment: getSafeEnvironmentVariables(),
|
|
678
|
-
previousResults: results,
|
|
679
|
-
});
|
|
680
|
-
if (!shouldRun) {
|
|
681
|
-
log(`🔧 Debug: Skipping check '${checkName}' - if condition evaluated to false`);
|
|
682
|
-
return {
|
|
683
|
-
checkName,
|
|
684
|
-
error: null,
|
|
685
|
-
result: {
|
|
686
|
-
issues: [],
|
|
687
|
-
suggestions: [`Check '${checkName}' was skipped - condition not met`],
|
|
688
|
-
},
|
|
689
|
-
};
|
|
690
|
-
}
|
|
691
|
-
}
|
|
692
|
-
// Create provider config for this specific check
|
|
693
|
-
const providerConfig = {
|
|
694
|
-
type: 'ai',
|
|
695
|
-
prompt: checkConfig.prompt,
|
|
696
|
-
focus: checkConfig.focus || this.mapCheckNameToFocus(checkName),
|
|
697
|
-
schema: checkConfig.schema,
|
|
698
|
-
group: checkConfig.group,
|
|
699
|
-
checkName: checkName, // Add checkName for sessionID
|
|
700
|
-
ai: {
|
|
701
|
-
timeout: timeout || 600000,
|
|
702
|
-
debug: debug,
|
|
703
|
-
...(checkConfig.ai || {}),
|
|
704
|
-
},
|
|
705
|
-
};
|
|
706
|
-
// Pass results from dependencies if needed
|
|
707
|
-
const dependencyResults = new Map();
|
|
708
|
-
for (const depId of checkConfig.depends_on || []) {
|
|
709
|
-
if (results.has(depId)) {
|
|
710
|
-
dependencyResults.set(depId, results.get(depId));
|
|
711
|
-
}
|
|
712
|
-
}
|
|
713
|
-
// Determine if we should use session reuse
|
|
714
|
-
let sessionInfo = undefined;
|
|
715
|
-
if (sessionReuseChecks.has(checkName)) {
|
|
716
|
-
const parentCheckName = sessionProviders.get(checkName);
|
|
717
|
-
if (parentCheckName && sessionIds.has(parentCheckName)) {
|
|
718
|
-
const parentSessionId = sessionIds.get(parentCheckName);
|
|
719
|
-
sessionInfo = {
|
|
720
|
-
parentSessionId: parentSessionId,
|
|
721
|
-
reuseSession: true,
|
|
722
|
-
};
|
|
723
|
-
log(`🔄 Debug: Check ${checkName} will reuse session from parent ${parentCheckName}: ${parentSessionId}`);
|
|
724
|
-
}
|
|
725
|
-
else {
|
|
726
|
-
log(`⚠️ Warning: Check ${checkName} requires session reuse but parent ${parentCheckName} session not found`);
|
|
727
|
-
}
|
|
728
|
-
}
|
|
729
|
-
// For checks that create new sessions, generate a session ID
|
|
730
|
-
let currentSessionId = undefined;
|
|
731
|
-
if (!sessionInfo?.reuseSession) {
|
|
732
|
-
const timestamp = new Date().toISOString();
|
|
733
|
-
currentSessionId = `visor-${timestamp.replace(/[:.]/g, '-')}-${checkName}`;
|
|
734
|
-
sessionIds.set(checkName, currentSessionId);
|
|
735
|
-
log(`🆕 Debug: Check ${checkName} will create new session: ${currentSessionId}`);
|
|
736
|
-
// Add session ID to provider config
|
|
737
|
-
providerConfig.sessionId = currentSessionId;
|
|
738
|
-
}
|
|
739
|
-
const result = await provider.execute(prInfo, providerConfig, dependencyResults, sessionInfo);
|
|
740
|
-
log(`🔧 Debug: Completed check: ${checkName}, issues found: ${(result.issues || []).length}`);
|
|
741
|
-
// Add group, schema, template info and timestamp to issues from config
|
|
742
|
-
const enrichedIssues = (result.issues || []).map(issue => ({
|
|
743
|
-
...issue,
|
|
744
|
-
ruleId: `${checkName}/${issue.ruleId}`,
|
|
745
|
-
group: checkConfig.group,
|
|
746
|
-
schema: checkConfig.schema,
|
|
747
|
-
template: checkConfig.template,
|
|
748
|
-
timestamp: Date.now(),
|
|
749
|
-
}));
|
|
750
|
-
const enrichedResult = {
|
|
751
|
-
...result,
|
|
752
|
-
issues: enrichedIssues,
|
|
753
|
-
};
|
|
754
|
-
return {
|
|
755
|
-
checkName,
|
|
756
|
-
error: null,
|
|
757
|
-
result: enrichedResult,
|
|
758
|
-
};
|
|
759
|
-
}
|
|
760
|
-
catch (error) {
|
|
761
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
762
|
-
log(`🔧 Debug: Error in check ${checkName}: ${errorMessage}`);
|
|
763
|
-
return {
|
|
764
|
-
checkName,
|
|
765
|
-
error: errorMessage,
|
|
766
|
-
result: null,
|
|
767
|
-
};
|
|
768
|
-
}
|
|
769
|
-
});
|
|
770
|
-
// Execute checks in this level with controlled parallelism
|
|
771
|
-
const levelResults = await this.executeWithLimitedParallelism(levelTaskFunctions, actualParallelism, effectiveFailFast);
|
|
772
|
-
// Process results and store them for next level
|
|
773
|
-
for (let i = 0; i < levelResults.length; i++) {
|
|
774
|
-
const checkName = executionGroup.parallel[i];
|
|
775
|
-
const result = levelResults[i];
|
|
776
|
-
if (result.status === 'fulfilled' && result.value.result && !result.value.error) {
|
|
777
|
-
results.set(checkName, result.value.result);
|
|
778
|
-
}
|
|
779
|
-
else {
|
|
780
|
-
// Store error result for dependency tracking
|
|
781
|
-
const errorSummary = {
|
|
782
|
-
issues: [
|
|
783
|
-
{
|
|
784
|
-
file: 'system',
|
|
785
|
-
line: 0,
|
|
786
|
-
endLine: undefined,
|
|
787
|
-
ruleId: `${checkName}/error`,
|
|
788
|
-
message: result.status === 'fulfilled'
|
|
789
|
-
? result.value.error || 'Unknown error'
|
|
790
|
-
: result.reason instanceof Error
|
|
791
|
-
? result.reason.message
|
|
792
|
-
: String(result.reason),
|
|
793
|
-
severity: 'error',
|
|
794
|
-
category: 'logic',
|
|
795
|
-
suggestion: undefined,
|
|
796
|
-
replacement: undefined,
|
|
797
|
-
},
|
|
798
|
-
],
|
|
799
|
-
suggestions: [],
|
|
800
|
-
};
|
|
801
|
-
results.set(checkName, errorSummary);
|
|
802
|
-
// Check if we should stop execution due to fail-fast
|
|
803
|
-
if (effectiveFailFast) {
|
|
804
|
-
log(`🛑 Check "${checkName}" failed and fail-fast is enabled - stopping execution`);
|
|
805
|
-
shouldStopExecution = true;
|
|
806
|
-
break;
|
|
807
|
-
}
|
|
808
|
-
}
|
|
809
|
-
}
|
|
810
|
-
// If fail-fast is enabled, check if any successful checks have failure conditions
|
|
811
|
-
if (effectiveFailFast && !shouldStopExecution) {
|
|
812
|
-
for (let i = 0; i < levelResults.length; i++) {
|
|
813
|
-
const checkName = executionGroup.parallel[i];
|
|
814
|
-
const result = levelResults[i];
|
|
815
|
-
if (result.status === 'fulfilled' && result.value.result && !result.value.error) {
|
|
816
|
-
// Check for issues that should trigger fail-fast
|
|
817
|
-
const hasFailuresToReport = (result.value.result.issues || []).some(issue => issue.severity === 'error' || issue.severity === 'critical');
|
|
818
|
-
if (hasFailuresToReport) {
|
|
819
|
-
log(`🛑 Check "${checkName}" found critical/error issues and fail-fast is enabled - stopping execution`);
|
|
820
|
-
shouldStopExecution = true;
|
|
821
|
-
break;
|
|
822
|
-
}
|
|
823
|
-
}
|
|
824
|
-
}
|
|
825
|
-
}
|
|
826
|
-
}
|
|
827
|
-
// Log final execution status
|
|
828
|
-
if (shouldStopExecution) {
|
|
829
|
-
log(`🛑 Execution stopped early due to fail-fast after processing ${results.size} of ${checks.length} checks`);
|
|
830
|
-
}
|
|
831
|
-
else {
|
|
832
|
-
log(`✅ Dependency-aware execution completed successfully for all ${results.size} checks`);
|
|
833
|
-
}
|
|
834
|
-
// Cleanup sessions after execution
|
|
835
|
-
if (sessionIds.size > 0) {
|
|
836
|
-
log(`🧹 Cleaning up ${sessionIds.size} AI sessions...`);
|
|
837
|
-
for (const [checkName, sessionId] of sessionIds) {
|
|
838
|
-
try {
|
|
839
|
-
sessionRegistry.unregisterSession(sessionId);
|
|
840
|
-
log(`🗑️ Cleaned up session for check ${checkName}: ${sessionId}`);
|
|
841
|
-
}
|
|
842
|
-
catch (error) {
|
|
843
|
-
log(`⚠️ Failed to cleanup session for check ${checkName}: ${error}`);
|
|
844
|
-
}
|
|
845
|
-
}
|
|
846
|
-
}
|
|
847
|
-
// Aggregate all results
|
|
848
|
-
return this.aggregateDependencyAwareResults(results, dependencyGraph, debug, shouldStopExecution);
|
|
849
|
-
}
|
|
850
|
-
/**
|
|
851
|
-
* Execute multiple checks in parallel using controlled parallelism (legacy method)
|
|
852
|
-
*/
|
|
853
|
-
async executeParallelChecks(prInfo, checks, timeout, config, logFn, debug, maxParallelism, failFast) {
|
|
854
|
-
const log = logFn || console.error;
|
|
855
|
-
log(`🔧 Debug: Starting parallel execution of ${checks.length} checks`);
|
|
856
|
-
if (!config?.checks) {
|
|
857
|
-
throw new Error('Config with check definitions required for parallel execution');
|
|
858
|
-
}
|
|
859
|
-
// Determine effective max parallelism (CLI > config > default)
|
|
860
|
-
const effectiveMaxParallelism = maxParallelism ?? config.max_parallelism ?? 3;
|
|
861
|
-
// Determine effective fail-fast setting (CLI > config > default)
|
|
862
|
-
const effectiveFailFast = failFast ?? config.fail_fast ?? false;
|
|
863
|
-
log(`🔧 Debug: Using max parallelism: ${effectiveMaxParallelism}`);
|
|
864
|
-
log(`🔧 Debug: Using fail-fast: ${effectiveFailFast}`);
|
|
865
|
-
const provider = this.providerRegistry.getProviderOrThrow('ai');
|
|
866
|
-
// Create individual check task functions
|
|
867
|
-
const checkTaskFunctions = checks.map(checkName => async () => {
|
|
868
|
-
const checkConfig = config.checks[checkName];
|
|
869
|
-
if (!checkConfig) {
|
|
870
|
-
log(`🔧 Debug: No config found for check: ${checkName}`);
|
|
871
|
-
return {
|
|
872
|
-
checkName,
|
|
873
|
-
error: `No configuration found for check: ${checkName}`,
|
|
874
|
-
result: null,
|
|
875
|
-
};
|
|
876
|
-
}
|
|
877
|
-
try {
|
|
878
|
-
console.error(`🔧 Debug: Starting check: ${checkName} with prompt type: ${typeof checkConfig.prompt}`);
|
|
879
|
-
// Evaluate if condition to determine whether to run this check
|
|
880
|
-
if (checkConfig.if) {
|
|
881
|
-
const shouldRun = await this.failureEvaluator.evaluateIfCondition(checkName, checkConfig.if, {
|
|
882
|
-
branch: prInfo.head,
|
|
883
|
-
baseBranch: prInfo.base,
|
|
884
|
-
filesChanged: prInfo.files.map(f => f.filename),
|
|
885
|
-
event: 'issue_comment', // Command triggered from comment
|
|
886
|
-
environment: getSafeEnvironmentVariables(),
|
|
887
|
-
previousResults: new Map(), // No previous results in parallel execution
|
|
888
|
-
});
|
|
889
|
-
if (!shouldRun) {
|
|
890
|
-
console.error(`🔧 Debug: Skipping check '${checkName}' - if condition evaluated to false`);
|
|
891
|
-
return {
|
|
892
|
-
checkName,
|
|
893
|
-
error: null,
|
|
894
|
-
result: {
|
|
895
|
-
issues: [],
|
|
896
|
-
suggestions: [`Check '${checkName}' was skipped - condition not met`],
|
|
897
|
-
},
|
|
898
|
-
};
|
|
899
|
-
}
|
|
900
|
-
}
|
|
901
|
-
// Create provider config for this specific check
|
|
902
|
-
const providerConfig = {
|
|
903
|
-
type: 'ai',
|
|
904
|
-
prompt: checkConfig.prompt,
|
|
905
|
-
focus: checkConfig.focus || this.mapCheckNameToFocus(checkName),
|
|
906
|
-
schema: checkConfig.schema,
|
|
907
|
-
group: checkConfig.group,
|
|
908
|
-
ai: {
|
|
909
|
-
timeout: timeout || 600000,
|
|
910
|
-
debug: debug, // Pass debug flag to AI provider
|
|
911
|
-
...(checkConfig.ai || {}),
|
|
912
|
-
},
|
|
913
|
-
};
|
|
914
|
-
const result = await provider.execute(prInfo, providerConfig);
|
|
915
|
-
console.error(`🔧 Debug: Completed check: ${checkName}, issues found: ${(result.issues || []).length}`);
|
|
916
|
-
// Add group, schema info and timestamp to issues from config
|
|
917
|
-
const enrichedIssues = (result.issues || []).map(issue => ({
|
|
918
|
-
...issue,
|
|
919
|
-
ruleId: `${checkName}/${issue.ruleId}`,
|
|
920
|
-
group: checkConfig.group,
|
|
921
|
-
schema: checkConfig.schema,
|
|
922
|
-
template: checkConfig.template,
|
|
923
|
-
timestamp: Date.now(),
|
|
924
|
-
}));
|
|
925
|
-
const enrichedResult = {
|
|
926
|
-
...result,
|
|
927
|
-
issues: enrichedIssues,
|
|
928
|
-
};
|
|
929
|
-
return {
|
|
930
|
-
checkName,
|
|
931
|
-
error: null,
|
|
932
|
-
result: enrichedResult,
|
|
933
|
-
};
|
|
934
|
-
}
|
|
935
|
-
catch (error) {
|
|
936
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
937
|
-
log(`🔧 Debug: Error in check ${checkName}: ${errorMessage}`);
|
|
938
|
-
return {
|
|
939
|
-
checkName,
|
|
940
|
-
error: errorMessage,
|
|
941
|
-
result: null,
|
|
942
|
-
};
|
|
943
|
-
}
|
|
944
|
-
});
|
|
945
|
-
// Execute all checks with controlled parallelism
|
|
946
|
-
log(`🔧 Debug: Executing ${checkTaskFunctions.length} checks with max parallelism: ${effectiveMaxParallelism}`);
|
|
947
|
-
const results = await this.executeWithLimitedParallelism(checkTaskFunctions, effectiveMaxParallelism, effectiveFailFast);
|
|
948
|
-
// Check if execution was stopped early
|
|
949
|
-
const completedChecks = results.filter(r => r.status === 'fulfilled' || r.status === 'rejected').length;
|
|
950
|
-
const stoppedEarly = completedChecks < checks.length;
|
|
951
|
-
if (stoppedEarly && effectiveFailFast) {
|
|
952
|
-
log(`🛑 Parallel execution stopped early due to fail-fast after processing ${completedChecks} of ${checks.length} checks`);
|
|
953
|
-
}
|
|
954
|
-
else {
|
|
955
|
-
log(`✅ Parallel execution completed for all ${completedChecks} checks`);
|
|
956
|
-
}
|
|
957
|
-
// Aggregate results from all checks
|
|
958
|
-
return this.aggregateParallelResults(results, checks, debug, stoppedEarly);
|
|
959
|
-
}
|
|
960
|
-
/**
|
|
961
|
-
* Execute a single configured check
|
|
962
|
-
*/
|
|
963
|
-
async executeSingleConfiguredCheck(prInfo, checkName, timeout, config, _logFn) {
|
|
964
|
-
if (!config?.checks?.[checkName]) {
|
|
965
|
-
throw new Error(`No configuration found for check: ${checkName}`);
|
|
966
|
-
}
|
|
967
|
-
const checkConfig = config.checks[checkName];
|
|
968
|
-
const provider = this.providerRegistry.getProviderOrThrow('ai');
|
|
969
|
-
const providerConfig = {
|
|
970
|
-
type: 'ai',
|
|
971
|
-
prompt: checkConfig.prompt,
|
|
972
|
-
focus: checkConfig.focus || this.mapCheckNameToFocus(checkName),
|
|
973
|
-
schema: checkConfig.schema,
|
|
974
|
-
group: checkConfig.group,
|
|
975
|
-
ai: {
|
|
976
|
-
timeout: timeout || 600000,
|
|
977
|
-
...(checkConfig.ai || {}),
|
|
978
|
-
},
|
|
979
|
-
// Inherit global AI provider and model settings
|
|
980
|
-
ai_provider: checkConfig.ai_provider || config.ai_provider,
|
|
981
|
-
ai_model: checkConfig.ai_model || config.ai_model,
|
|
982
|
-
};
|
|
983
|
-
const result = await provider.execute(prInfo, providerConfig);
|
|
984
|
-
// Prefix issues with check name and add group/schema info and timestamp from config
|
|
985
|
-
const prefixedIssues = (result.issues || []).map(issue => ({
|
|
986
|
-
...issue,
|
|
987
|
-
ruleId: `${checkName}/${issue.ruleId}`,
|
|
988
|
-
group: checkConfig.group,
|
|
989
|
-
schema: checkConfig.schema,
|
|
990
|
-
timestamp: Date.now(),
|
|
991
|
-
}));
|
|
992
|
-
return {
|
|
993
|
-
...result,
|
|
994
|
-
issues: prefixedIssues,
|
|
995
|
-
};
|
|
996
|
-
}
|
|
997
|
-
/**
|
|
998
|
-
* Map check name to focus for AI provider
|
|
999
|
-
* This is a fallback when focus is not explicitly configured
|
|
1000
|
-
*/
|
|
1001
|
-
mapCheckNameToFocus(checkName) {
|
|
1002
|
-
const focusMap = {
|
|
1003
|
-
security: 'security',
|
|
1004
|
-
performance: 'performance',
|
|
1005
|
-
style: 'style',
|
|
1006
|
-
architecture: 'architecture',
|
|
1007
|
-
};
|
|
1008
|
-
return focusMap[checkName] || 'all';
|
|
1009
|
-
}
|
|
1010
|
-
/**
|
|
1011
|
-
* Aggregate results from dependency-aware check execution
|
|
1012
|
-
*/
|
|
1013
|
-
aggregateDependencyAwareResults(results, dependencyGraph, debug, stoppedEarly) {
|
|
1014
|
-
const aggregatedIssues = [];
|
|
1015
|
-
const aggregatedSuggestions = [];
|
|
1016
|
-
const debugInfo = [];
|
|
1017
|
-
// Add execution plan info
|
|
1018
|
-
const stats = dependency_resolver_1.DependencyResolver.getExecutionStats(dependencyGraph);
|
|
1019
|
-
const executionInfo = [
|
|
1020
|
-
stoppedEarly
|
|
1021
|
-
? `🛑 Dependency-aware execution stopped early (fail-fast):`
|
|
1022
|
-
: `🔍 Dependency-aware execution completed:`,
|
|
1023
|
-
` - ${results.size} of ${stats.totalChecks} checks processed`,
|
|
1024
|
-
` - Execution levels: ${stats.parallelLevels}`,
|
|
1025
|
-
` - Maximum parallelism: ${stats.maxParallelism}`,
|
|
1026
|
-
` - Average parallelism: ${stats.averageParallelism.toFixed(1)}`,
|
|
1027
|
-
` - Checks with dependencies: ${stats.checksWithDependencies}`,
|
|
1028
|
-
stoppedEarly ? ` - Stopped early due to fail-fast behavior` : ``,
|
|
1029
|
-
].filter(Boolean);
|
|
1030
|
-
debugInfo.push(...executionInfo);
|
|
1031
|
-
// Process results in dependency order for better output organization
|
|
1032
|
-
for (const executionGroup of dependencyGraph.executionOrder) {
|
|
1033
|
-
for (const checkName of executionGroup.parallel) {
|
|
1034
|
-
const result = results.get(checkName);
|
|
1035
|
-
if (!result) {
|
|
1036
|
-
debugInfo.push(`❌ Check "${checkName}" had no result`);
|
|
1037
|
-
continue;
|
|
1038
|
-
}
|
|
1039
|
-
// Check if this was a successful result
|
|
1040
|
-
const hasErrors = (result.issues || []).some(issue => issue.ruleId?.includes('/error') || issue.ruleId?.includes('/promise-error'));
|
|
1041
|
-
if (hasErrors) {
|
|
1042
|
-
debugInfo.push(`❌ Check "${checkName}" failed with errors`);
|
|
1043
|
-
}
|
|
1044
|
-
else {
|
|
1045
|
-
debugInfo.push(`✅ Check "${checkName}" completed: ${(result.issues || []).length} issues found (level ${executionGroup.level})`);
|
|
1046
|
-
}
|
|
1047
|
-
// Issues are already prefixed and enriched with group/schema info
|
|
1048
|
-
aggregatedIssues.push(...(result.issues || []));
|
|
1049
|
-
// Add suggestions with check name prefix
|
|
1050
|
-
const prefixedSuggestions = (result.suggestions || []).map(suggestion => `[${checkName}] ${suggestion}`);
|
|
1051
|
-
aggregatedSuggestions.push(...prefixedSuggestions);
|
|
1052
|
-
}
|
|
1053
|
-
}
|
|
1054
|
-
// Add summary information
|
|
1055
|
-
aggregatedSuggestions.unshift(...debugInfo);
|
|
1056
|
-
console.error(`🔧 Debug: Aggregated ${aggregatedIssues.length} issues from ${results.size} dependency-aware checks`);
|
|
1057
|
-
// Collect debug information when debug mode is enabled
|
|
1058
|
-
let aggregatedDebug;
|
|
1059
|
-
if (debug) {
|
|
1060
|
-
const debugResults = Array.from(results.entries()).filter(([_, result]) => result.debug);
|
|
1061
|
-
if (debugResults.length > 0) {
|
|
1062
|
-
const [, firstResult] = debugResults[0];
|
|
1063
|
-
const firstDebug = firstResult.debug;
|
|
1064
|
-
const totalProcessingTime = debugResults.reduce((sum, [_, result]) => {
|
|
1065
|
-
return sum + (result.debug.processingTime || 0);
|
|
1066
|
-
}, 0);
|
|
1067
|
-
aggregatedDebug = {
|
|
1068
|
-
provider: firstDebug.provider,
|
|
1069
|
-
model: firstDebug.model,
|
|
1070
|
-
apiKeySource: firstDebug.apiKeySource,
|
|
1071
|
-
processingTime: totalProcessingTime,
|
|
1072
|
-
prompt: debugResults
|
|
1073
|
-
.map(([checkName, result]) => `[${checkName}]\n${result.debug.prompt}`)
|
|
1074
|
-
.join('\n\n'),
|
|
1075
|
-
rawResponse: debugResults
|
|
1076
|
-
.map(([checkName, result]) => `[${checkName}]\n${result.debug.rawResponse}`)
|
|
1077
|
-
.join('\n\n'),
|
|
1078
|
-
promptLength: debugResults.reduce((sum, [_, result]) => sum + (result.debug.promptLength || 0), 0),
|
|
1079
|
-
responseLength: debugResults.reduce((sum, [_, result]) => sum + (result.debug.responseLength || 0), 0),
|
|
1080
|
-
jsonParseSuccess: debugResults.every(([_, result]) => result.debug.jsonParseSuccess),
|
|
1081
|
-
errors: debugResults.flatMap(([checkName, result]) => (result.debug.errors || []).map((error) => `[${checkName}] ${error}`)),
|
|
1082
|
-
timestamp: new Date().toISOString(),
|
|
1083
|
-
totalApiCalls: debugResults.length,
|
|
1084
|
-
apiCallDetails: debugResults.map(([checkName, result]) => ({
|
|
1085
|
-
checkName,
|
|
1086
|
-
provider: result.debug.provider,
|
|
1087
|
-
model: result.debug.model,
|
|
1088
|
-
processingTime: result.debug.processingTime,
|
|
1089
|
-
success: result.debug.jsonParseSuccess,
|
|
1090
|
-
})),
|
|
1091
|
-
};
|
|
1092
|
-
}
|
|
1093
|
-
}
|
|
1094
|
-
return {
|
|
1095
|
-
issues: aggregatedIssues,
|
|
1096
|
-
suggestions: aggregatedSuggestions,
|
|
1097
|
-
debug: aggregatedDebug,
|
|
1098
|
-
};
|
|
1099
|
-
}
|
|
1100
|
-
/**
|
|
1101
|
-
* Aggregate results from parallel check execution (legacy method)
|
|
1102
|
-
*/
|
|
1103
|
-
aggregateParallelResults(results, checkNames, debug, stoppedEarly) {
|
|
1104
|
-
const aggregatedIssues = [];
|
|
1105
|
-
const aggregatedSuggestions = [];
|
|
1106
|
-
const debugInfo = [];
|
|
1107
|
-
let successfulChecks = 0;
|
|
1108
|
-
let failedChecks = 0;
|
|
1109
|
-
results.forEach((result, index) => {
|
|
1110
|
-
const checkName = checkNames[index];
|
|
1111
|
-
if (result.status === 'fulfilled') {
|
|
1112
|
-
const checkResult = result.value;
|
|
1113
|
-
if (checkResult.error) {
|
|
1114
|
-
failedChecks++;
|
|
1115
|
-
const log = console.error;
|
|
1116
|
-
log(`🔧 Debug: Check ${checkName} failed: ${checkResult.error}`);
|
|
1117
|
-
debugInfo.push(`❌ Check "${checkName}" failed: ${checkResult.error}`);
|
|
1118
|
-
// Check if this is a critical error
|
|
1119
|
-
const isCriticalError = checkResult.error.includes('API rate limit') ||
|
|
1120
|
-
checkResult.error.includes('403') ||
|
|
1121
|
-
checkResult.error.includes('401') ||
|
|
1122
|
-
checkResult.error.includes('authentication') ||
|
|
1123
|
-
checkResult.error.includes('API key');
|
|
1124
|
-
// Add error as an issue with appropriate severity
|
|
1125
|
-
aggregatedIssues.push({
|
|
1126
|
-
file: 'system',
|
|
1127
|
-
line: 0,
|
|
1128
|
-
endLine: undefined,
|
|
1129
|
-
ruleId: `${checkName}/error`,
|
|
1130
|
-
message: `Check "${checkName}" failed: ${checkResult.error}`,
|
|
1131
|
-
severity: isCriticalError ? 'critical' : 'error',
|
|
1132
|
-
category: 'logic',
|
|
1133
|
-
suggestion: isCriticalError
|
|
1134
|
-
? 'Please check your API credentials and rate limits'
|
|
1135
|
-
: undefined,
|
|
1136
|
-
replacement: undefined,
|
|
1137
|
-
});
|
|
1138
|
-
}
|
|
1139
|
-
else if (checkResult.result) {
|
|
1140
|
-
successfulChecks++;
|
|
1141
|
-
console.error(`🔧 Debug: Check ${checkName} succeeded with ${(checkResult.result.issues || []).length} issues`);
|
|
1142
|
-
debugInfo.push(`✅ Check "${checkName}" completed: ${(checkResult.result.issues || []).length} issues found`);
|
|
1143
|
-
// Issues are already prefixed and enriched with group/schema info
|
|
1144
|
-
aggregatedIssues.push(...(checkResult.result.issues || []));
|
|
1145
|
-
// Add suggestions with check name prefix
|
|
1146
|
-
const prefixedSuggestions = (checkResult.result.suggestions || []).map(suggestion => `[${checkName}] ${suggestion}`);
|
|
1147
|
-
aggregatedSuggestions.push(...prefixedSuggestions);
|
|
1148
|
-
}
|
|
1149
|
-
}
|
|
1150
|
-
else {
|
|
1151
|
-
failedChecks++;
|
|
1152
|
-
const errorMessage = result.reason instanceof Error ? result.reason.message : String(result.reason);
|
|
1153
|
-
const log = console.error;
|
|
1154
|
-
log(`🔧 Debug: Check ${checkName} promise rejected: ${errorMessage}`);
|
|
1155
|
-
debugInfo.push(`❌ Check "${checkName}" promise rejected: ${errorMessage}`);
|
|
1156
|
-
// Check if this is a critical error
|
|
1157
|
-
const isCriticalError = errorMessage.includes('API rate limit') ||
|
|
1158
|
-
errorMessage.includes('403') ||
|
|
1159
|
-
errorMessage.includes('401') ||
|
|
1160
|
-
errorMessage.includes('authentication') ||
|
|
1161
|
-
errorMessage.includes('API key');
|
|
1162
|
-
aggregatedIssues.push({
|
|
1163
|
-
file: 'system',
|
|
1164
|
-
line: 0,
|
|
1165
|
-
endLine: undefined,
|
|
1166
|
-
ruleId: `${checkName}/promise-error`,
|
|
1167
|
-
message: `Check "${checkName}" execution failed: ${errorMessage}`,
|
|
1168
|
-
severity: isCriticalError ? 'critical' : 'error',
|
|
1169
|
-
category: 'logic',
|
|
1170
|
-
suggestion: isCriticalError
|
|
1171
|
-
? 'Please check your API credentials and rate limits'
|
|
1172
|
-
: undefined,
|
|
1173
|
-
replacement: undefined,
|
|
1174
|
-
});
|
|
1175
|
-
}
|
|
1176
|
-
});
|
|
1177
|
-
// Add summary information
|
|
1178
|
-
debugInfo.unshift(stoppedEarly
|
|
1179
|
-
? `🛑 Parallel execution stopped early (fail-fast): ${successfulChecks} successful, ${failedChecks} failed`
|
|
1180
|
-
: `🔍 Parallel execution completed: ${successfulChecks} successful, ${failedChecks} failed`);
|
|
1181
|
-
aggregatedSuggestions.unshift(...debugInfo);
|
|
1182
|
-
console.error(`🔧 Debug: Aggregated ${aggregatedIssues.length} issues from ${results.length} checks`);
|
|
1183
|
-
// Collect debug information when debug mode is enabled
|
|
1184
|
-
let aggregatedDebug;
|
|
1185
|
-
if (debug) {
|
|
1186
|
-
// Find the first successful result with debug information to use as template
|
|
1187
|
-
const debugResults = results
|
|
1188
|
-
.map((result, index) => ({
|
|
1189
|
-
result,
|
|
1190
|
-
checkName: checkNames[index],
|
|
1191
|
-
}))
|
|
1192
|
-
.filter(({ result }) => result.status === 'fulfilled' && result.value?.result?.debug);
|
|
1193
|
-
if (debugResults.length > 0) {
|
|
1194
|
-
const firstResult = debugResults[0].result;
|
|
1195
|
-
if (firstResult.status === 'fulfilled') {
|
|
1196
|
-
const firstDebug = firstResult.value.result.debug;
|
|
1197
|
-
const totalProcessingTime = debugResults.reduce((sum, { result }) => {
|
|
1198
|
-
if (result.status === 'fulfilled') {
|
|
1199
|
-
return sum + (result.value.result.debug.processingTime || 0);
|
|
1200
|
-
}
|
|
1201
|
-
return sum;
|
|
1202
|
-
}, 0);
|
|
1203
|
-
aggregatedDebug = {
|
|
1204
|
-
// Use first result as template for provider/model info
|
|
1205
|
-
provider: firstDebug.provider,
|
|
1206
|
-
model: firstDebug.model,
|
|
1207
|
-
apiKeySource: firstDebug.apiKeySource,
|
|
1208
|
-
// Aggregate processing time from all checks
|
|
1209
|
-
processingTime: totalProcessingTime,
|
|
1210
|
-
// Combine prompts with check names
|
|
1211
|
-
prompt: debugResults
|
|
1212
|
-
.map(({ checkName, result }) => {
|
|
1213
|
-
if (result.status === 'fulfilled') {
|
|
1214
|
-
return `[${checkName}]\n${result.value.result.debug.prompt}`;
|
|
1215
|
-
}
|
|
1216
|
-
return `[${checkName}] Error: Promise was rejected`;
|
|
1217
|
-
})
|
|
1218
|
-
.join('\n\n'),
|
|
1219
|
-
// Combine responses
|
|
1220
|
-
rawResponse: debugResults
|
|
1221
|
-
.map(({ checkName, result }) => {
|
|
1222
|
-
if (result.status === 'fulfilled') {
|
|
1223
|
-
return `[${checkName}]\n${result.value.result.debug.rawResponse}`;
|
|
1224
|
-
}
|
|
1225
|
-
return `[${checkName}] Error: Promise was rejected`;
|
|
1226
|
-
})
|
|
1227
|
-
.join('\n\n'),
|
|
1228
|
-
promptLength: debugResults.reduce((sum, { result }) => {
|
|
1229
|
-
if (result.status === 'fulfilled') {
|
|
1230
|
-
return sum + (result.value.result.debug.promptLength || 0);
|
|
1231
|
-
}
|
|
1232
|
-
return sum;
|
|
1233
|
-
}, 0),
|
|
1234
|
-
responseLength: debugResults.reduce((sum, { result }) => {
|
|
1235
|
-
if (result.status === 'fulfilled') {
|
|
1236
|
-
return sum + (result.value.result.debug.responseLength || 0);
|
|
1237
|
-
}
|
|
1238
|
-
return sum;
|
|
1239
|
-
}, 0),
|
|
1240
|
-
jsonParseSuccess: debugResults.every(({ result }) => {
|
|
1241
|
-
if (result.status === 'fulfilled') {
|
|
1242
|
-
return result.value.result.debug.jsonParseSuccess;
|
|
1243
|
-
}
|
|
1244
|
-
return false;
|
|
1245
|
-
}),
|
|
1246
|
-
errors: debugResults.flatMap(({ result, checkName }) => {
|
|
1247
|
-
if (result.status === 'fulfilled') {
|
|
1248
|
-
return (result.value.result.debug.errors || []).map((error) => `[${checkName}] ${error}`);
|
|
1249
|
-
}
|
|
1250
|
-
return [`[${checkName}] Promise was rejected`];
|
|
1251
|
-
}),
|
|
1252
|
-
timestamp: new Date().toISOString(),
|
|
1253
|
-
// Add additional debug information for parallel execution
|
|
1254
|
-
totalApiCalls: debugResults.length,
|
|
1255
|
-
apiCallDetails: debugResults.map(({ checkName, result }) => {
|
|
1256
|
-
if (result.status === 'fulfilled') {
|
|
1257
|
-
return {
|
|
1258
|
-
checkName,
|
|
1259
|
-
provider: result.value.result.debug.provider,
|
|
1260
|
-
model: result.value.result.debug.model,
|
|
1261
|
-
processingTime: result.value.result.debug.processingTime,
|
|
1262
|
-
success: result.value.result.debug.jsonParseSuccess,
|
|
1263
|
-
};
|
|
1264
|
-
}
|
|
1265
|
-
return {
|
|
1266
|
-
checkName,
|
|
1267
|
-
provider: 'unknown',
|
|
1268
|
-
model: 'unknown',
|
|
1269
|
-
processingTime: 0,
|
|
1270
|
-
success: false,
|
|
1271
|
-
};
|
|
1272
|
-
}),
|
|
1273
|
-
};
|
|
1274
|
-
}
|
|
1275
|
-
}
|
|
1276
|
-
}
|
|
1277
|
-
return {
|
|
1278
|
-
issues: aggregatedIssues,
|
|
1279
|
-
suggestions: aggregatedSuggestions,
|
|
1280
|
-
debug: aggregatedDebug,
|
|
1281
|
-
};
|
|
1282
|
-
}
|
|
1283
|
-
/**
|
|
1284
|
-
* Get available check types
|
|
1285
|
-
*/
|
|
1286
|
-
static getAvailableCheckTypes() {
|
|
1287
|
-
const registry = check_provider_registry_1.CheckProviderRegistry.getInstance();
|
|
1288
|
-
const providerTypes = registry.getAvailableProviders();
|
|
1289
|
-
// Add standard focus-based checks
|
|
1290
|
-
const standardTypes = ['security', 'performance', 'style', 'architecture', 'all'];
|
|
1291
|
-
// Combine provider types with standard types (remove duplicates)
|
|
1292
|
-
return [...new Set([...providerTypes, ...standardTypes])];
|
|
1293
|
-
}
|
|
1294
|
-
/**
|
|
1295
|
-
* Validate check types
|
|
1296
|
-
*/
|
|
1297
|
-
static validateCheckTypes(checks) {
|
|
1298
|
-
const availableChecks = CheckExecutionEngine.getAvailableCheckTypes();
|
|
1299
|
-
const valid = [];
|
|
1300
|
-
const invalid = [];
|
|
1301
|
-
for (const check of checks) {
|
|
1302
|
-
if (availableChecks.includes(check)) {
|
|
1303
|
-
valid.push(check);
|
|
1304
|
-
}
|
|
1305
|
-
else {
|
|
1306
|
-
invalid.push(check);
|
|
1307
|
-
}
|
|
1308
|
-
}
|
|
1309
|
-
return { valid, invalid };
|
|
1310
|
-
}
|
|
1311
|
-
/**
|
|
1312
|
-
* List available providers with their status
|
|
1313
|
-
*/
|
|
1314
|
-
async listProviders() {
|
|
1315
|
-
return await this.providerRegistry.listProviders();
|
|
1316
|
-
}
|
|
1317
|
-
/**
|
|
1318
|
-
* Create a mock Octokit instance for local analysis
|
|
1319
|
-
*/
|
|
1320
|
-
createMockOctokit() {
|
|
1321
|
-
// Create simple mock functions that return promises
|
|
1322
|
-
const mockGet = async () => ({
|
|
1323
|
-
data: {
|
|
1324
|
-
number: 0,
|
|
1325
|
-
title: 'Local Analysis',
|
|
1326
|
-
body: 'Local repository analysis',
|
|
1327
|
-
user: { login: 'local-user' },
|
|
1328
|
-
base: { ref: 'main' },
|
|
1329
|
-
head: { ref: 'HEAD' },
|
|
1330
|
-
},
|
|
1331
|
-
});
|
|
1332
|
-
const mockListFiles = async () => ({
|
|
1333
|
-
data: [],
|
|
1334
|
-
});
|
|
1335
|
-
const mockListComments = async () => ({
|
|
1336
|
-
data: [],
|
|
1337
|
-
});
|
|
1338
|
-
const mockCreateComment = async () => ({
|
|
1339
|
-
data: { id: 1 },
|
|
1340
|
-
});
|
|
1341
|
-
return {
|
|
1342
|
-
rest: {
|
|
1343
|
-
pulls: {
|
|
1344
|
-
get: mockGet,
|
|
1345
|
-
listFiles: mockListFiles,
|
|
1346
|
-
},
|
|
1347
|
-
issues: {
|
|
1348
|
-
listComments: mockListComments,
|
|
1349
|
-
createComment: mockCreateComment,
|
|
1350
|
-
},
|
|
1351
|
-
},
|
|
1352
|
-
request: async () => ({ data: {} }),
|
|
1353
|
-
graphql: async () => ({}),
|
|
1354
|
-
log: {
|
|
1355
|
-
debug: () => { },
|
|
1356
|
-
info: () => { },
|
|
1357
|
-
warn: () => { },
|
|
1358
|
-
error: () => { },
|
|
1359
|
-
},
|
|
1360
|
-
hook: {
|
|
1361
|
-
before: () => { },
|
|
1362
|
-
after: () => { },
|
|
1363
|
-
error: () => { },
|
|
1364
|
-
wrap: () => { },
|
|
1365
|
-
},
|
|
1366
|
-
auth: async () => ({ token: 'mock-token' }),
|
|
1367
|
-
};
|
|
1368
|
-
}
|
|
1369
|
-
/**
|
|
1370
|
-
* Create an error result
|
|
1371
|
-
*/
|
|
1372
|
-
createErrorResult(repositoryInfo, errorMessage, startTime, timestamp, checksExecuted) {
|
|
1373
|
-
const executionTime = Date.now() - startTime;
|
|
1374
|
-
return {
|
|
1375
|
-
repositoryInfo,
|
|
1376
|
-
reviewSummary: {
|
|
1377
|
-
issues: [
|
|
1378
|
-
{
|
|
1379
|
-
file: 'system',
|
|
1380
|
-
line: 0,
|
|
1381
|
-
endLine: undefined,
|
|
1382
|
-
ruleId: 'system/error',
|
|
1383
|
-
message: errorMessage,
|
|
1384
|
-
severity: 'error',
|
|
1385
|
-
category: 'logic',
|
|
1386
|
-
suggestion: undefined,
|
|
1387
|
-
replacement: undefined,
|
|
1388
|
-
},
|
|
1389
|
-
],
|
|
1390
|
-
suggestions: [`Error: ${errorMessage}`],
|
|
1391
|
-
},
|
|
1392
|
-
executionTime,
|
|
1393
|
-
timestamp,
|
|
1394
|
-
checksExecuted,
|
|
1395
|
-
};
|
|
1396
|
-
}
|
|
1397
|
-
/**
|
|
1398
|
-
* Check if a task result should trigger fail-fast behavior
|
|
1399
|
-
*/
|
|
1400
|
-
shouldFailFast(result) {
|
|
1401
|
-
// If the result has an error property, it's a failed check
|
|
1402
|
-
if (result?.error) {
|
|
1403
|
-
return true;
|
|
1404
|
-
}
|
|
1405
|
-
// If the result has a result with critical or error issues, it should fail fast
|
|
1406
|
-
if (result?.result?.issues) {
|
|
1407
|
-
return (result.result.issues || []).some((issue) => issue.severity === 'error' || issue.severity === 'critical');
|
|
1408
|
-
}
|
|
1409
|
-
return false;
|
|
1410
|
-
}
|
|
1411
|
-
/**
|
|
1412
|
-
* Check if the working directory is a valid git repository
|
|
1413
|
-
*/
|
|
1414
|
-
async isGitRepository() {
|
|
1415
|
-
try {
|
|
1416
|
-
const repositoryInfo = await this.gitAnalyzer.analyzeRepository();
|
|
1417
|
-
return repositoryInfo.isGitRepository;
|
|
1418
|
-
}
|
|
1419
|
-
catch {
|
|
1420
|
-
return false;
|
|
1421
|
-
}
|
|
1422
|
-
}
|
|
1423
|
-
/**
|
|
1424
|
-
* Evaluate failure conditions for a check result
|
|
1425
|
-
*/
|
|
1426
|
-
async evaluateFailureConditions(checkName, reviewSummary, config) {
|
|
1427
|
-
if (!config) {
|
|
1428
|
-
return [];
|
|
1429
|
-
}
|
|
1430
|
-
const checkConfig = config.checks[checkName];
|
|
1431
|
-
const checkSchema = checkConfig?.schema || '';
|
|
1432
|
-
const checkGroup = checkConfig?.group || '';
|
|
1433
|
-
// Handle new simple fail_if syntax
|
|
1434
|
-
const globalFailIf = config.fail_if;
|
|
1435
|
-
const checkFailIf = checkConfig?.fail_if;
|
|
1436
|
-
// If using new fail_if syntax
|
|
1437
|
-
if (globalFailIf || checkFailIf) {
|
|
1438
|
-
const results = [];
|
|
1439
|
-
// Evaluate global fail_if
|
|
1440
|
-
if (globalFailIf) {
|
|
1441
|
-
const failed = await this.failureEvaluator.evaluateSimpleCondition(checkName, checkSchema, checkGroup, reviewSummary, globalFailIf);
|
|
1442
|
-
if (failed) {
|
|
1443
|
-
results.push({
|
|
1444
|
-
conditionName: 'global_fail_if',
|
|
1445
|
-
expression: globalFailIf,
|
|
1446
|
-
failed: true,
|
|
1447
|
-
severity: 'error',
|
|
1448
|
-
message: 'Global failure condition met',
|
|
1449
|
-
haltExecution: false,
|
|
1450
|
-
});
|
|
1451
|
-
}
|
|
1452
|
-
}
|
|
1453
|
-
// Evaluate check-specific fail_if (overrides global if present)
|
|
1454
|
-
if (checkFailIf) {
|
|
1455
|
-
const failed = await this.failureEvaluator.evaluateSimpleCondition(checkName, checkSchema, checkGroup, reviewSummary, checkFailIf);
|
|
1456
|
-
if (failed) {
|
|
1457
|
-
results.push({
|
|
1458
|
-
conditionName: `${checkName}_fail_if`,
|
|
1459
|
-
expression: checkFailIf,
|
|
1460
|
-
failed: true,
|
|
1461
|
-
severity: 'error',
|
|
1462
|
-
message: `Check ${checkName} failure condition met`,
|
|
1463
|
-
haltExecution: false,
|
|
1464
|
-
});
|
|
1465
|
-
}
|
|
1466
|
-
}
|
|
1467
|
-
return results;
|
|
1468
|
-
}
|
|
1469
|
-
// Fall back to old failure_conditions syntax
|
|
1470
|
-
const globalConditions = config.failure_conditions;
|
|
1471
|
-
const checkConditions = checkConfig?.failure_conditions;
|
|
1472
|
-
return await this.failureEvaluator.evaluateConditions(checkName, checkSchema, checkGroup, reviewSummary, globalConditions, checkConditions);
|
|
1473
|
-
}
|
|
1474
|
-
/**
|
|
1475
|
-
* Get repository status summary
|
|
1476
|
-
*/
|
|
1477
|
-
async getRepositoryStatus() {
|
|
1478
|
-
try {
|
|
1479
|
-
const repositoryInfo = await this.gitAnalyzer.analyzeRepository();
|
|
1480
|
-
return {
|
|
1481
|
-
isGitRepository: repositoryInfo.isGitRepository,
|
|
1482
|
-
hasChanges: repositoryInfo.files.length > 0,
|
|
1483
|
-
branch: repositoryInfo.head,
|
|
1484
|
-
filesChanged: repositoryInfo.files.length,
|
|
1485
|
-
};
|
|
1486
|
-
}
|
|
1487
|
-
catch {
|
|
1488
|
-
return {
|
|
1489
|
-
isGitRepository: false,
|
|
1490
|
-
hasChanges: false,
|
|
1491
|
-
branch: 'unknown',
|
|
1492
|
-
filesChanged: 0,
|
|
1493
|
-
};
|
|
1494
|
-
}
|
|
1495
|
-
}
|
|
1496
|
-
/**
|
|
1497
|
-
* Initialize GitHub check runs for each configured check
|
|
1498
|
-
*/
|
|
1499
|
-
async initializeGitHubChecks(options, logFn) {
|
|
1500
|
-
if (!options.githubChecks?.octokit ||
|
|
1501
|
-
!options.githubChecks.owner ||
|
|
1502
|
-
!options.githubChecks.repo ||
|
|
1503
|
-
!options.githubChecks.headSha) {
|
|
1504
|
-
logFn('⚠️ GitHub checks enabled but missing required parameters');
|
|
1505
|
-
return;
|
|
1506
|
-
}
|
|
1507
|
-
try {
|
|
1508
|
-
this.githubCheckService = new github_check_service_1.GitHubCheckService(options.githubChecks.octokit);
|
|
1509
|
-
this.checkRunMap = new Map();
|
|
1510
|
-
this.githubContext = {
|
|
1511
|
-
owner: options.githubChecks.owner,
|
|
1512
|
-
repo: options.githubChecks.repo,
|
|
1513
|
-
};
|
|
1514
|
-
logFn(`🔍 Creating GitHub check runs for ${options.checks.length} checks...`);
|
|
1515
|
-
for (const checkName of options.checks) {
|
|
1516
|
-
try {
|
|
1517
|
-
const checkRunOptions = {
|
|
1518
|
-
owner: options.githubChecks.owner,
|
|
1519
|
-
repo: options.githubChecks.repo,
|
|
1520
|
-
head_sha: options.githubChecks.headSha,
|
|
1521
|
-
name: `Visor: ${checkName}`,
|
|
1522
|
-
external_id: `visor-${checkName}-${options.githubChecks.headSha.substring(0, 7)}`,
|
|
1523
|
-
};
|
|
1524
|
-
const checkRun = await this.githubCheckService.createCheckRun(checkRunOptions, {
|
|
1525
|
-
title: `${checkName} Analysis`,
|
|
1526
|
-
summary: `Running ${checkName} check using AI-powered analysis...`,
|
|
1527
|
-
});
|
|
1528
|
-
this.checkRunMap.set(checkName, checkRun);
|
|
1529
|
-
logFn(`✅ Created check run for ${checkName}: ${checkRun.url}`);
|
|
1530
|
-
}
|
|
1531
|
-
catch (error) {
|
|
1532
|
-
logFn(`❌ Failed to create check run for ${checkName}: ${error}`);
|
|
1533
|
-
}
|
|
1534
|
-
}
|
|
1535
|
-
}
|
|
1536
|
-
catch (error) {
|
|
1537
|
-
// Check if this is a permissions error
|
|
1538
|
-
if (error instanceof Error &&
|
|
1539
|
-
(error.message.includes('403') || error.message.includes('checks:write'))) {
|
|
1540
|
-
logFn('⚠️ GitHub checks API not available - insufficient permissions. Check runs will be skipped.');
|
|
1541
|
-
logFn('💡 To enable check runs, ensure your GitHub token has "checks:write" permission.');
|
|
1542
|
-
this.githubCheckService = undefined;
|
|
1543
|
-
this.checkRunMap = undefined;
|
|
1544
|
-
}
|
|
1545
|
-
else {
|
|
1546
|
-
logFn(`❌ Failed to initialize GitHub check runs: ${error}`);
|
|
1547
|
-
this.githubCheckService = undefined;
|
|
1548
|
-
this.checkRunMap = undefined;
|
|
1549
|
-
}
|
|
1550
|
-
}
|
|
1551
|
-
}
|
|
1552
|
-
/**
|
|
1553
|
-
* Update GitHub check runs to in-progress status
|
|
1554
|
-
*/
|
|
1555
|
-
async updateGitHubChecksInProgress(options) {
|
|
1556
|
-
if (!this.githubCheckService ||
|
|
1557
|
-
!this.checkRunMap ||
|
|
1558
|
-
!options.githubChecks?.owner ||
|
|
1559
|
-
!options.githubChecks.repo) {
|
|
1560
|
-
return;
|
|
1561
|
-
}
|
|
1562
|
-
for (const [checkName, checkRun] of this.checkRunMap) {
|
|
1563
|
-
try {
|
|
1564
|
-
await this.githubCheckService.updateCheckRunInProgress(options.githubChecks.owner, options.githubChecks.repo, checkRun.id, {
|
|
1565
|
-
title: `Analyzing with ${checkName}...`,
|
|
1566
|
-
summary: `AI-powered analysis is in progress for ${checkName} check.`,
|
|
1567
|
-
});
|
|
1568
|
-
console.log(`🔄 Updated ${checkName} check to in-progress status`);
|
|
1569
|
-
}
|
|
1570
|
-
catch (error) {
|
|
1571
|
-
console.error(`❌ Failed to update ${checkName} check to in-progress: ${error}`);
|
|
1572
|
-
}
|
|
1573
|
-
}
|
|
1574
|
-
}
|
|
1575
|
-
/**
|
|
1576
|
-
* Complete GitHub check runs with results
|
|
1577
|
-
*/
|
|
1578
|
-
async completeGitHubChecksWithResults(reviewSummary, options) {
|
|
1579
|
-
if (!this.githubCheckService ||
|
|
1580
|
-
!this.checkRunMap ||
|
|
1581
|
-
!options.githubChecks?.owner ||
|
|
1582
|
-
!options.githubChecks.repo) {
|
|
1583
|
-
return;
|
|
1584
|
-
}
|
|
1585
|
-
// Group issues by check name
|
|
1586
|
-
const issuesByCheck = new Map();
|
|
1587
|
-
// Initialize empty arrays for all checks
|
|
1588
|
-
for (const checkName of this.checkRunMap.keys()) {
|
|
1589
|
-
issuesByCheck.set(checkName, []);
|
|
1590
|
-
}
|
|
1591
|
-
// Group issues by their check name (extracted from ruleId prefix)
|
|
1592
|
-
for (const issue of reviewSummary.issues || []) {
|
|
1593
|
-
if (issue.ruleId && issue.ruleId.includes('/')) {
|
|
1594
|
-
const checkName = issue.ruleId.split('/')[0];
|
|
1595
|
-
if (issuesByCheck.has(checkName)) {
|
|
1596
|
-
issuesByCheck.get(checkName).push(issue);
|
|
1597
|
-
}
|
|
1598
|
-
}
|
|
1599
|
-
}
|
|
1600
|
-
console.log(`🏁 Completing ${this.checkRunMap.size} GitHub check runs...`);
|
|
1601
|
-
for (const [checkName, checkRun] of this.checkRunMap) {
|
|
1602
|
-
try {
|
|
1603
|
-
const checkIssues = issuesByCheck.get(checkName) || [];
|
|
1604
|
-
// Evaluate failure conditions for this specific check
|
|
1605
|
-
const failureResults = await this.evaluateFailureConditions(checkName, { issues: checkIssues, suggestions: [] }, options.config);
|
|
1606
|
-
await this.githubCheckService.completeCheckRun(options.githubChecks.owner, options.githubChecks.repo, checkRun.id, checkName, failureResults, checkIssues);
|
|
1607
|
-
console.log(`✅ Completed ${checkName} check with ${checkIssues.length} issues`);
|
|
1608
|
-
}
|
|
1609
|
-
catch (error) {
|
|
1610
|
-
console.error(`❌ Failed to complete ${checkName} check: ${error}`);
|
|
1611
|
-
// Try to mark the check as failed due to execution error
|
|
1612
|
-
try {
|
|
1613
|
-
await this.githubCheckService.completeCheckRun(options.githubChecks.owner, options.githubChecks.repo, checkRun.id, checkName, [], [], error instanceof Error ? error.message : 'Unknown error occurred');
|
|
1614
|
-
}
|
|
1615
|
-
catch (finalError) {
|
|
1616
|
-
console.error(`❌ Failed to mark ${checkName} check as failed: ${finalError}`);
|
|
1617
|
-
}
|
|
1618
|
-
}
|
|
1619
|
-
}
|
|
1620
|
-
}
|
|
1621
|
-
/**
|
|
1622
|
-
* Complete GitHub check runs with error status
|
|
1623
|
-
*/
|
|
1624
|
-
async completeGitHubChecksWithError(errorMessage) {
|
|
1625
|
-
if (!this.githubCheckService || !this.checkRunMap || !this.githubContext) {
|
|
1626
|
-
return;
|
|
1627
|
-
}
|
|
1628
|
-
console.log(`❌ Completing ${this.checkRunMap.size} GitHub check runs with error...`);
|
|
1629
|
-
for (const [checkName, checkRun] of this.checkRunMap) {
|
|
1630
|
-
try {
|
|
1631
|
-
await this.githubCheckService.completeCheckRun(this.githubContext.owner, this.githubContext.repo, checkRun.id, checkName, [], [], errorMessage);
|
|
1632
|
-
console.log(`❌ Completed ${checkName} check with error: ${errorMessage}`);
|
|
1633
|
-
}
|
|
1634
|
-
catch (error) {
|
|
1635
|
-
console.error(`❌ Failed to complete ${checkName} check with error: ${error}`);
|
|
1636
|
-
}
|
|
1637
|
-
}
|
|
1638
|
-
}
|
|
1639
|
-
/**
|
|
1640
|
-
* Filter checks based on their event triggers to prevent execution of checks
|
|
1641
|
-
* that shouldn't run for the current event type
|
|
1642
|
-
*/
|
|
1643
|
-
filterChecksByEvent(checks, config, prInfo, logFn) {
|
|
1644
|
-
if (!config?.checks) {
|
|
1645
|
-
// No config available, return all checks (fallback behavior)
|
|
1646
|
-
return checks;
|
|
1647
|
-
}
|
|
1648
|
-
// If we have event context from GitHub (prInfo with eventType), apply strict filtering
|
|
1649
|
-
// Otherwise (CLI, tests), use conservative filtering
|
|
1650
|
-
const prInfoWithEvent = prInfo;
|
|
1651
|
-
const hasEventContext = prInfoWithEvent && 'eventType' in prInfoWithEvent && prInfoWithEvent.eventType;
|
|
1652
|
-
if (hasEventContext) {
|
|
1653
|
-
// GitHub Action context - apply strict event filtering
|
|
1654
|
-
const currentEvent = prInfoWithEvent.eventType;
|
|
1655
|
-
logFn?.(`🔧 Debug: GitHub Action context, current event: ${currentEvent}`);
|
|
1656
|
-
const filteredChecks = [];
|
|
1657
|
-
for (const checkName of checks) {
|
|
1658
|
-
const checkConfig = config.checks[checkName];
|
|
1659
|
-
if (!checkConfig) {
|
|
1660
|
-
filteredChecks.push(checkName);
|
|
1661
|
-
continue;
|
|
1662
|
-
}
|
|
1663
|
-
const eventTriggers = checkConfig.on || [];
|
|
1664
|
-
if (eventTriggers.length === 0) {
|
|
1665
|
-
// No triggers specified, include it
|
|
1666
|
-
filteredChecks.push(checkName);
|
|
1667
|
-
logFn?.(`🔧 Debug: Check '${checkName}' has no event triggers, including`);
|
|
1668
|
-
}
|
|
1669
|
-
else if (eventTriggers.includes(currentEvent)) {
|
|
1670
|
-
// Check matches current event
|
|
1671
|
-
filteredChecks.push(checkName);
|
|
1672
|
-
logFn?.(`🔧 Debug: Check '${checkName}' matches event '${currentEvent}', including`);
|
|
1673
|
-
}
|
|
1674
|
-
else {
|
|
1675
|
-
// Check doesn't match current event
|
|
1676
|
-
logFn?.(`🔧 Debug: Check '${checkName}' does not match event '${currentEvent}' (triggers: ${JSON.stringify(eventTriggers)}), skipping`);
|
|
1677
|
-
}
|
|
1678
|
-
}
|
|
1679
|
-
return filteredChecks;
|
|
1680
|
-
}
|
|
1681
|
-
else {
|
|
1682
|
-
// CLI/Test context - conservative filtering (only exclude manual-only checks)
|
|
1683
|
-
logFn?.(`🔧 Debug: CLI/Test context, using conservative filtering`);
|
|
1684
|
-
const filteredChecks = [];
|
|
1685
|
-
for (const checkName of checks) {
|
|
1686
|
-
const checkConfig = config.checks[checkName];
|
|
1687
|
-
if (!checkConfig) {
|
|
1688
|
-
filteredChecks.push(checkName);
|
|
1689
|
-
continue;
|
|
1690
|
-
}
|
|
1691
|
-
const eventTriggers = checkConfig.on || [];
|
|
1692
|
-
// Only exclude checks that are explicitly manual-only
|
|
1693
|
-
if (eventTriggers.length === 1 && eventTriggers[0] === 'manual') {
|
|
1694
|
-
logFn?.(`🔧 Debug: Check '${checkName}' is manual-only, skipping`);
|
|
1695
|
-
}
|
|
1696
|
-
else {
|
|
1697
|
-
filteredChecks.push(checkName);
|
|
1698
|
-
logFn?.(`🔧 Debug: Check '${checkName}' included (triggers: ${JSON.stringify(eventTriggers)})`);
|
|
1699
|
-
}
|
|
1700
|
-
}
|
|
1701
|
-
return filteredChecks;
|
|
1702
|
-
}
|
|
1703
|
-
}
|
|
1704
|
-
/**
|
|
1705
|
-
* Determine the current event type from PR info
|
|
1706
|
-
*/
|
|
1707
|
-
getCurrentEventType(prInfo) {
|
|
1708
|
-
if (!prInfo) {
|
|
1709
|
-
return 'pr_opened'; // Default fallback
|
|
1710
|
-
}
|
|
1711
|
-
// For now, assume all PR-related operations are 'pr_updated' since we don't have
|
|
1712
|
-
// direct access to the original GitHub event here. This is a simplification.
|
|
1713
|
-
// In the future, we could pass the actual event type through the call chain.
|
|
1714
|
-
// The key insight is that issue-assistant should only run on issue_opened/issue_comment
|
|
1715
|
-
// events, which don't generate PRInfo objects in the first place.
|
|
1716
|
-
return 'pr_updated';
|
|
1717
|
-
}
|
|
1718
|
-
}
|
|
1719
|
-
exports.CheckExecutionEngine = CheckExecutionEngine;
|
|
1720
|
-
//# sourceMappingURL=check-execution-engine.js.map
|