@sun-asterisk/sunlint 1.3.19 → 1.3.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/core/cli-program.js +8 -1
- package/core/file-targeting-service.js +66 -15
- package/core/git-utils.js +121 -11
- package/core/github-annotate-service.js +1017 -67
- package/core/output-service.js +292 -29
- package/docs/GITHUB_ACTIONS_INTEGRATION.md +421 -0
- package/package.json +2 -2
|
@@ -7,83 +7,1033 @@
|
|
|
7
7
|
const fs = require('fs');
|
|
8
8
|
let Octokit;
|
|
9
9
|
|
|
10
|
+
// GitHub API limits
|
|
11
|
+
const MAX_COMMENTS_PER_REVIEW = 30;
|
|
12
|
+
const MAX_COMMENT_LENGTH = 65536;
|
|
13
|
+
const MAX_RETRIES = 3;
|
|
14
|
+
const RETRY_DELAY_MS = 1000;
|
|
15
|
+
|
|
10
16
|
/**
|
|
11
|
-
*
|
|
17
|
+
* Custom error classes
|
|
18
|
+
*/
|
|
19
|
+
class ValidationError extends Error {
|
|
20
|
+
constructor(message) {
|
|
21
|
+
super(message);
|
|
22
|
+
this.name = 'ValidationError';
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
class GitHubAPIError extends Error {
|
|
27
|
+
constructor(message, statusCode, originalError) {
|
|
28
|
+
super(message);
|
|
29
|
+
this.name = 'GitHubAPIError';
|
|
30
|
+
this.statusCode = statusCode;
|
|
31
|
+
this.originalError = originalError;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Logger với các levels khác nhau
|
|
37
|
+
*/
|
|
38
|
+
const logger = {
|
|
39
|
+
info: (message, data) => {
|
|
40
|
+
console.log(`[INFO] ${message}`, data ? JSON.stringify(data, null, 2) : '');
|
|
41
|
+
},
|
|
42
|
+
warn: (message, data) => {
|
|
43
|
+
console.warn(`[WARN] ${message}`, data ? JSON.stringify(data, null, 2) : '');
|
|
44
|
+
},
|
|
45
|
+
error: (message, error) => {
|
|
46
|
+
console.error(`[ERROR] ${message}`, error?.message || error);
|
|
47
|
+
if (error?.stack) {
|
|
48
|
+
console.error(error.stack);
|
|
49
|
+
}
|
|
50
|
+
},
|
|
51
|
+
debug: (message, data) => {
|
|
52
|
+
if (process.env.DEBUG === 'true') {
|
|
53
|
+
console.log(`[DEBUG] ${message}`, data ? JSON.stringify(data, null, 2) : '');
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
};
|
|
57
|
+
|
|
58
|
+
/**
|
|
59
|
+
* Sleep utility for retry mechanism
|
|
60
|
+
* @param {number} ms - Milliseconds to sleep
|
|
61
|
+
* @returns {Promise<void>}
|
|
62
|
+
*/
|
|
63
|
+
function sleep(ms) {
|
|
64
|
+
return new Promise(resolve => setTimeout(resolve, ms));
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Retry wrapper cho async functions
|
|
69
|
+
* @param {Function} fn - Async function to retry
|
|
70
|
+
* @param {number} maxRetries - Max number of retries
|
|
71
|
+
* @param {number} delayMs - Delay between retries
|
|
72
|
+
* @returns {Promise<any>}
|
|
73
|
+
*/
|
|
74
|
+
async function withRetry(fn, maxRetries = MAX_RETRIES, delayMs = RETRY_DELAY_MS) {
|
|
75
|
+
let lastError;
|
|
76
|
+
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
|
77
|
+
try {
|
|
78
|
+
return await fn();
|
|
79
|
+
} catch (error) {
|
|
80
|
+
lastError = error;
|
|
81
|
+
|
|
82
|
+
// Không retry cho validation errors hoặc 404
|
|
83
|
+
if (error instanceof ValidationError || error.status === 404) {
|
|
84
|
+
throw error;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
// Retry cho network errors và rate limits
|
|
88
|
+
const isRetryable =
|
|
89
|
+
error.status === 429 || // Rate limit
|
|
90
|
+
error.status >= 500 || // Server errors
|
|
91
|
+
error.code === 'ECONNRESET' ||
|
|
92
|
+
error.code === 'ETIMEDOUT' ||
|
|
93
|
+
error.code === 'ENOTFOUND';
|
|
94
|
+
|
|
95
|
+
if (!isRetryable || attempt === maxRetries) {
|
|
96
|
+
throw error;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
const waitTime = error.status === 429 ? delayMs * attempt * 2 : delayMs * attempt;
|
|
100
|
+
logger.warn(`Attempt ${attempt}/${maxRetries} failed, retrying in ${waitTime}ms...`, {
|
|
101
|
+
error: error.message,
|
|
102
|
+
status: error.status
|
|
103
|
+
});
|
|
104
|
+
await sleep(waitTime);
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
throw lastError;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
/**
|
|
111
|
+
* Validate input parameters
|
|
12
112
|
* @param {Object} options
|
|
13
|
-
* @
|
|
14
|
-
* @param {string} options.githubToken - GitHub token (with repo:write)
|
|
15
|
-
* @param {string} options.repo - GitHub repo in format owner/repo
|
|
16
|
-
* @param {number} options.prNumber - Pull request number
|
|
113
|
+
* @throws {ValidationError}
|
|
17
114
|
*/
|
|
18
|
-
|
|
115
|
+
function validateInput({ jsonFile, githubToken, repo, prNumber }) {
|
|
116
|
+
if (!jsonFile || typeof jsonFile !== 'string') {
|
|
117
|
+
throw new ValidationError('jsonFile is required and must be a string');
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
if (!githubToken && !process.env.GITHUB_TOKEN) {
|
|
121
|
+
throw new ValidationError('githubToken is required or GITHUB_TOKEN env var must be set');
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
if (!repo || typeof repo !== 'string') {
|
|
125
|
+
throw new ValidationError('repo is required and must be a string');
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
const repoParts = repo.split('/');
|
|
129
|
+
if (repoParts.length !== 2 || !repoParts[0] || !repoParts[1]) {
|
|
130
|
+
throw new ValidationError('repo must be in format "owner/repo"');
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
if (!prNumber || typeof prNumber !== 'number' || prNumber <= 0 || !Number.isInteger(prNumber)) {
|
|
134
|
+
throw new ValidationError('prNumber must be a positive integer');
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
/**
|
|
139
|
+
* Read and parse JSON file
|
|
140
|
+
* @param {string} jsonFile - Path to JSON file
|
|
141
|
+
* @returns {Object} Parsed JSON
|
|
142
|
+
* @throws {Error}
|
|
143
|
+
*/
|
|
144
|
+
function readJsonFile(jsonFile) {
|
|
19
145
|
if (!fs.existsSync(jsonFile)) {
|
|
20
146
|
throw new Error(`Result file not found: ${jsonFile}`);
|
|
21
147
|
}
|
|
22
|
-
|
|
23
|
-
let
|
|
148
|
+
|
|
149
|
+
let stats;
|
|
150
|
+
try {
|
|
151
|
+
stats = fs.statSync(jsonFile);
|
|
152
|
+
} catch (error) {
|
|
153
|
+
throw new Error(`Cannot access file ${jsonFile}: ${error.message}`);
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
if (!stats.isFile()) {
|
|
157
|
+
throw new Error(`Path is not a file: ${jsonFile}`);
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
if (stats.size === 0) {
|
|
161
|
+
logger.warn('Result file is empty', { jsonFile });
|
|
162
|
+
return [];
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
// Check file size (warn if > 10MB)
|
|
166
|
+
const maxSize = 10 * 1024 * 1024;
|
|
167
|
+
if (stats.size > maxSize) {
|
|
168
|
+
logger.warn(`Result file is very large (${(stats.size / 1024 / 1024).toFixed(2)}MB)`, { jsonFile });
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
let content;
|
|
172
|
+
try {
|
|
173
|
+
content = fs.readFileSync(jsonFile, 'utf8');
|
|
174
|
+
} catch (error) {
|
|
175
|
+
throw new Error(`Cannot read file ${jsonFile}: ${error.message}`);
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
try {
|
|
179
|
+
return JSON.parse(content);
|
|
180
|
+
} catch (error) {
|
|
181
|
+
throw new Error(`Invalid JSON in file ${jsonFile}: ${error.message}`);
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
/**
|
|
186
|
+
* Get git root directory
|
|
187
|
+
* @param {string} cwd - Current working directory
|
|
188
|
+
* @returns {string} Git root path
|
|
189
|
+
*/
|
|
190
|
+
function getGitRoot(cwd = process.cwd()) {
|
|
191
|
+
try {
|
|
192
|
+
const { execSync } = require('child_process');
|
|
193
|
+
const gitRoot = execSync('git rev-parse --show-toplevel', {
|
|
194
|
+
cwd,
|
|
195
|
+
encoding: 'utf8'
|
|
196
|
+
}).trim();
|
|
197
|
+
return gitRoot;
|
|
198
|
+
} catch (error) {
|
|
199
|
+
logger.warn('Not a git repository, using cwd as root');
|
|
200
|
+
return cwd;
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
/**
|
|
205
|
+
* Normalize path to be relative from git root
|
|
206
|
+
* @param {string} filePath - File path (absolute or relative)
|
|
207
|
+
* @param {string} gitRoot - Git root directory
|
|
208
|
+
* @returns {string} Normalized relative path
|
|
209
|
+
*/
|
|
210
|
+
function normalizePathFromGitRoot(filePath, gitRoot) {
|
|
211
|
+
let normalized = filePath;
|
|
212
|
+
|
|
213
|
+
// Convert absolute path to relative from git root
|
|
214
|
+
if (filePath.startsWith(gitRoot)) {
|
|
215
|
+
normalized = filePath.slice(gitRoot.length);
|
|
216
|
+
if (normalized.startsWith('/') || normalized.startsWith('\\')) {
|
|
217
|
+
normalized = normalized.slice(1);
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// Normalize path separators to forward slash
|
|
222
|
+
normalized = normalized.replace(/\\/g, '/');
|
|
223
|
+
|
|
224
|
+
return normalized;
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
/**
|
|
228
|
+
* Parse violations from JSON data
|
|
229
|
+
* @param {Array|Object} raw - Raw JSON data
|
|
230
|
+
* @param {string} gitRoot - Git root directory for path normalization
|
|
231
|
+
* @returns {Array} Array of violation objects
|
|
232
|
+
*/
|
|
233
|
+
function parseViolations(raw, gitRoot) {
|
|
234
|
+
const violations = [];
|
|
235
|
+
|
|
24
236
|
if (Array.isArray(raw)) {
|
|
25
|
-
const cwd = process.env.GITHUB_WORKSPACE || process.cwd();
|
|
26
237
|
for (const fileObj of raw) {
|
|
27
|
-
if (fileObj
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
238
|
+
if (!fileObj || typeof fileObj !== 'object') {
|
|
239
|
+
logger.warn('Skipping invalid file object', { fileObj });
|
|
240
|
+
continue;
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
if (!fileObj.filePath || !Array.isArray(fileObj.messages)) {
|
|
244
|
+
logger.debug('Skipping file object without filePath or messages', { fileObj });
|
|
245
|
+
continue;
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
// Normalize path relative to git root (same as GitHub API)
|
|
249
|
+
const relPath = normalizePathFromGitRoot(fileObj.filePath, gitRoot);
|
|
250
|
+
|
|
251
|
+
for (const msg of fileObj.messages) {
|
|
252
|
+
if (!msg || typeof msg !== 'object') {
|
|
253
|
+
logger.warn('Skipping invalid message object', { msg });
|
|
254
|
+
continue;
|
|
32
255
|
}
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
message: msg.message
|
|
40
|
-
});
|
|
256
|
+
|
|
257
|
+
// Validate line number
|
|
258
|
+
const line = parseInt(msg.line, 10);
|
|
259
|
+
if (!line || line <= 0) {
|
|
260
|
+
logger.warn('Skipping message with invalid line number', { msg, file: relPath });
|
|
261
|
+
continue;
|
|
41
262
|
}
|
|
263
|
+
|
|
264
|
+
violations.push({
|
|
265
|
+
file: relPath,
|
|
266
|
+
line: line,
|
|
267
|
+
rule: msg.ruleId || 'unknown',
|
|
268
|
+
severity: msg.severity === 2 ? 'error' : 'warning',
|
|
269
|
+
message: msg.message || 'No message provided'
|
|
270
|
+
});
|
|
42
271
|
}
|
|
43
272
|
}
|
|
273
|
+
} else if (raw && typeof raw === 'object') {
|
|
274
|
+
const rawViolations = raw.violations || [];
|
|
275
|
+
if (!Array.isArray(rawViolations)) {
|
|
276
|
+
throw new Error('violations property must be an array');
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
// Normalize paths for raw violations too
|
|
280
|
+
violations.push(...rawViolations.map(v => ({
|
|
281
|
+
...v,
|
|
282
|
+
file: normalizePathFromGitRoot(v.file, gitRoot)
|
|
283
|
+
})));
|
|
44
284
|
} else {
|
|
45
|
-
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
}
|
|
88
|
-
|
|
89
|
-
|
|
285
|
+
throw new Error('JSON data must be an array or object with violations property');
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
return violations;
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
/**
|
|
292
|
+
* Get existing review comments to avoid duplicates
|
|
293
|
+
* @param {Object} octokit - Octokit instance
|
|
294
|
+
* @param {string} owner - Repo owner
|
|
295
|
+
* @param {string} repoName - Repo name
|
|
296
|
+
* @param {number} prNumber - PR number
|
|
297
|
+
* @returns {Promise<Array>} Array of existing comments
|
|
298
|
+
*/
|
|
299
|
+
async function getExistingComments(octokit, owner, repoName, prNumber) {
|
|
300
|
+
try {
|
|
301
|
+
const comments = [];
|
|
302
|
+
let page = 1;
|
|
303
|
+
let hasMore = true;
|
|
304
|
+
|
|
305
|
+
while (hasMore) {
|
|
306
|
+
const response = await octokit.pulls.listReviewComments({
|
|
307
|
+
owner,
|
|
308
|
+
repo: repoName,
|
|
309
|
+
pull_number: prNumber,
|
|
310
|
+
per_page: 100,
|
|
311
|
+
page
|
|
312
|
+
});
|
|
313
|
+
|
|
314
|
+
comments.push(...response.data);
|
|
315
|
+
hasMore = response.data.length === 100;
|
|
316
|
+
page++;
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
return comments;
|
|
320
|
+
} catch (error) {
|
|
321
|
+
// Non-critical error, log and continue
|
|
322
|
+
logger.warn('Failed to fetch existing comments, duplicate detection disabled', {
|
|
323
|
+
error: error.message
|
|
324
|
+
});
|
|
325
|
+
return [];
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
/**
|
|
330
|
+
* Check if comment already exists
|
|
331
|
+
* @param {Array} existingComments - Existing PR comments
|
|
332
|
+
* @param {Object} newComment - New comment to check
|
|
333
|
+
* @returns {boolean}
|
|
334
|
+
*/
|
|
335
|
+
function isCommentDuplicate(existingComments, newComment) {
|
|
336
|
+
return existingComments.some(existing =>
|
|
337
|
+
existing.path === newComment.path &&
|
|
338
|
+
existing.line === newComment.line &&
|
|
339
|
+
existing.body === newComment.body
|
|
340
|
+
);
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
/**
|
|
344
|
+
* Truncate comment body if too long
|
|
345
|
+
* @param {string} body - Comment body
|
|
346
|
+
* @returns {string}
|
|
347
|
+
*/
|
|
348
|
+
function truncateComment(body) {
|
|
349
|
+
if (body.length <= MAX_COMMENT_LENGTH) {
|
|
350
|
+
return body;
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
const truncated = body.substring(0, MAX_COMMENT_LENGTH - 100);
|
|
354
|
+
return `${truncated}\n\n... (comment truncated, too long)`;
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
/**
|
|
358
|
+
* Create review comments in batches
|
|
359
|
+
* @param {Object} octokit - Octokit instance
|
|
360
|
+
* @param {string} owner - Repo owner
|
|
361
|
+
* @param {string} repoName - Repo name
|
|
362
|
+
* @param {number} prNumber - PR number
|
|
363
|
+
* @param {string} headSha - Commit SHA
|
|
364
|
+
* @param {Array} comments - Comments to post
|
|
365
|
+
* @param {boolean} hasError - Whether there are errors
|
|
366
|
+
* @returns {Promise<Array>} Array of review responses
|
|
367
|
+
*/
|
|
368
|
+
async function createReviewsInBatches(octokit, owner, repoName, prNumber, headSha, comments, hasError) {
|
|
369
|
+
const reviews = [];
|
|
370
|
+
const batches = [];
|
|
371
|
+
|
|
372
|
+
// Split comments into batches
|
|
373
|
+
for (let i = 0; i < comments.length; i += MAX_COMMENTS_PER_REVIEW) {
|
|
374
|
+
batches.push(comments.slice(i, i + MAX_COMMENTS_PER_REVIEW));
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
logger.info(`Creating ${batches.length} review(s) with ${comments.length} comment(s)`);
|
|
378
|
+
|
|
379
|
+
for (let i = 0; i < batches.length; i++) {
|
|
380
|
+
const batch = batches[i];
|
|
381
|
+
const isLastBatch = i === batches.length - 1;
|
|
382
|
+
|
|
383
|
+
// Only REQUEST_CHANGES on last batch if there are errors
|
|
384
|
+
const eventType = isLastBatch && hasError ? 'REQUEST_CHANGES' : 'COMMENT';
|
|
385
|
+
|
|
386
|
+
try {
|
|
387
|
+
const reviewRes = await withRetry(async () => {
|
|
388
|
+
return await octokit.pulls.createReview({
|
|
389
|
+
owner,
|
|
390
|
+
repo: repoName,
|
|
391
|
+
pull_number: prNumber,
|
|
392
|
+
commit_id: headSha,
|
|
393
|
+
event: eventType,
|
|
394
|
+
body: isLastBatch && batches.length > 1
|
|
395
|
+
? `SunLint found ${comments.length} issue(s) across multiple reviews.`
|
|
396
|
+
: undefined,
|
|
397
|
+
comments: batch
|
|
398
|
+
});
|
|
399
|
+
});
|
|
400
|
+
|
|
401
|
+
reviews.push(reviewRes.data);
|
|
402
|
+
logger.info(`Review ${i + 1}/${batches.length} created with ${batch.length} comment(s)`, {
|
|
403
|
+
reviewId: reviewRes.data.id
|
|
404
|
+
});
|
|
405
|
+
|
|
406
|
+
// Add delay between batches to avoid rate limiting
|
|
407
|
+
if (i < batches.length - 1) {
|
|
408
|
+
await sleep(500);
|
|
409
|
+
}
|
|
410
|
+
} catch (error) {
|
|
411
|
+
logger.error(`Failed to create review ${i + 1}/${batches.length}`, error);
|
|
412
|
+
throw new GitHubAPIError(
|
|
413
|
+
`Failed to create review: ${error.message}`,
|
|
414
|
+
error.status,
|
|
415
|
+
error
|
|
416
|
+
);
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
return reviews;
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
/**
|
|
424
|
+
* Parse patch to get valid line numbers
|
|
425
|
+
* @param {string} patch - GitHub patch string
|
|
426
|
+
* @returns {Set<number>} Set of valid line numbers
|
|
427
|
+
*/
|
|
428
|
+
function parseValidLineNumbers(patch) {
|
|
429
|
+
const validLines = new Set();
|
|
430
|
+
|
|
431
|
+
if (!patch) {
|
|
432
|
+
return validLines;
|
|
433
|
+
}
|
|
434
|
+
|
|
435
|
+
const lines = patch.split('\n');
|
|
436
|
+
let currentLine = 0;
|
|
437
|
+
|
|
438
|
+
for (const line of lines) {
|
|
439
|
+
// Parse hunk header: @@ -old_start,old_count +new_start,new_count @@
|
|
440
|
+
const hunkMatch = line.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/);
|
|
441
|
+
if (hunkMatch) {
|
|
442
|
+
currentLine = parseInt(hunkMatch[1], 10);
|
|
443
|
+
continue;
|
|
444
|
+
}
|
|
445
|
+
|
|
446
|
+
// Skip lines that don't start with +, -, or space
|
|
447
|
+
if (!line.startsWith('+') && !line.startsWith('-') && !line.startsWith(' ')) {
|
|
448
|
+
continue;
|
|
449
|
+
}
|
|
450
|
+
|
|
451
|
+
// Lines starting with + or space are valid (added or context)
|
|
452
|
+
if (line.startsWith('+') || line.startsWith(' ')) {
|
|
453
|
+
validLines.add(currentLine);
|
|
454
|
+
currentLine++;
|
|
455
|
+
} else if (line.startsWith('-')) {
|
|
456
|
+
// Deleted lines don't increment the line counter
|
|
457
|
+
continue;
|
|
458
|
+
}
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
return validLines;
|
|
462
|
+
}
|
|
463
|
+
|
|
464
|
+
/**
|
|
465
|
+
* Get PR files with detailed information for validation
|
|
466
|
+
* @param {Object} octokit - Octokit instance
|
|
467
|
+
* @param {string} owner - Repo owner
|
|
468
|
+
* @param {string} repoName - Repo name
|
|
469
|
+
* @param {number} prNumber - PR number
|
|
470
|
+
* @returns {Promise<Map>} Map of filename to file info
|
|
471
|
+
*/
|
|
472
|
+
async function getPRFilesInfo(octokit, owner, repoName, prNumber) {
|
|
473
|
+
const filesMap = new Map();
|
|
474
|
+
|
|
475
|
+
try {
|
|
476
|
+
let page = 1;
|
|
477
|
+
let hasMore = true;
|
|
478
|
+
|
|
479
|
+
while (hasMore) {
|
|
480
|
+
const response = await octokit.pulls.listFiles({
|
|
481
|
+
owner,
|
|
482
|
+
repo: repoName,
|
|
483
|
+
pull_number: prNumber,
|
|
484
|
+
per_page: 100,
|
|
485
|
+
page
|
|
486
|
+
});
|
|
487
|
+
|
|
488
|
+
for (const file of response.data) {
|
|
489
|
+
// Parse valid line numbers from patch
|
|
490
|
+
const validLines = parseValidLineNumbers(file.patch);
|
|
491
|
+
|
|
492
|
+
const fileInfo = {
|
|
493
|
+
filename: file.filename,
|
|
494
|
+
previous_filename: file.previous_filename, // For renamed files
|
|
495
|
+
additions: file.additions,
|
|
496
|
+
deletions: file.deletions,
|
|
497
|
+
changes: file.changes,
|
|
498
|
+
status: file.status, // 'added', 'removed', 'modified', 'renamed'
|
|
499
|
+
validLines: validLines, // Set of valid line numbers
|
|
500
|
+
patch: file.patch // Keep patch for debugging
|
|
501
|
+
};
|
|
502
|
+
|
|
503
|
+
// Store by current filename
|
|
504
|
+
filesMap.set(file.filename, fileInfo);
|
|
505
|
+
|
|
506
|
+
// For renamed files, also map old name to new name
|
|
507
|
+
if (file.status === 'renamed' && file.previous_filename) {
|
|
508
|
+
filesMap.set(file.previous_filename, {
|
|
509
|
+
...fileInfo,
|
|
510
|
+
isOldName: true, // Flag to indicate this is old name
|
|
511
|
+
newFilename: file.filename // Reference to new name
|
|
512
|
+
});
|
|
513
|
+
}
|
|
514
|
+
}
|
|
515
|
+
|
|
516
|
+
hasMore = response.data.length === 100;
|
|
517
|
+
page++;
|
|
518
|
+
}
|
|
519
|
+
|
|
520
|
+
return filesMap;
|
|
521
|
+
} catch (error) {
|
|
522
|
+
throw new GitHubAPIError(
|
|
523
|
+
`Failed to fetch PR files: ${error.message}`,
|
|
524
|
+
error.status,
|
|
525
|
+
error
|
|
526
|
+
);
|
|
527
|
+
}
|
|
528
|
+
}
|
|
529
|
+
|
|
530
|
+
/**
|
|
531
|
+
* Annotate GitHub PR with SunLint results
|
|
532
|
+
* @param {Object} options
|
|
533
|
+
* @param {string} options.jsonFile - Path to JSON result file
|
|
534
|
+
* @param {string} [options.githubToken] - GitHub token (with repo:write), falls back to GITHUB_TOKEN env
|
|
535
|
+
* @param {string} options.repo - GitHub repo in format owner/repo
|
|
536
|
+
* @param {number} options.prNumber - Pull request number
|
|
537
|
+
* @param {boolean} [options.skipDuplicates=true] - Skip duplicate comments
|
|
538
|
+
* @returns {Promise<Object>} Result object with summary
|
|
539
|
+
* @throws {ValidationError} When input validation fails
|
|
540
|
+
* @throws {GitHubAPIError} When GitHub API calls fail
|
|
541
|
+
* @throws {Error} For other errors
|
|
542
|
+
*/
|
|
543
|
+
async function annotate({
|
|
544
|
+
jsonFile,
|
|
545
|
+
githubToken,
|
|
546
|
+
repo,
|
|
547
|
+
prNumber,
|
|
548
|
+
skipDuplicates = true
|
|
549
|
+
}) {
|
|
550
|
+
const startTime = Date.now();
|
|
551
|
+
|
|
552
|
+
try {
|
|
553
|
+
// Step 1: Validate input
|
|
554
|
+
logger.info('Starting GitHub annotation process', { jsonFile, repo, prNumber });
|
|
555
|
+
validateInput({ jsonFile, githubToken, repo, prNumber });
|
|
556
|
+
|
|
557
|
+
// Step 2: Read and parse JSON file
|
|
558
|
+
logger.info('Reading result file', { jsonFile });
|
|
559
|
+
const raw = readJsonFile(jsonFile);
|
|
560
|
+
|
|
561
|
+
// Step 2.5: Get git root for path normalization
|
|
562
|
+
const gitRoot = getGitRoot();
|
|
563
|
+
logger.debug('Git root directory', { gitRoot });
|
|
564
|
+
|
|
565
|
+
// Step 3: Parse violations with git root normalization
|
|
566
|
+
logger.info('Parsing violations');
|
|
567
|
+
const violations = parseViolations(raw, gitRoot);
|
|
568
|
+
|
|
569
|
+
if (violations.length === 0) {
|
|
570
|
+
logger.info('No violations found');
|
|
571
|
+
return {
|
|
572
|
+
success: true,
|
|
573
|
+
message: 'No violations to comment',
|
|
574
|
+
stats: {
|
|
575
|
+
totalViolations: 0,
|
|
576
|
+
commentsCreated: 0,
|
|
577
|
+
duplicatesSkipped: 0,
|
|
578
|
+
duration: Date.now() - startTime
|
|
579
|
+
}
|
|
580
|
+
};
|
|
581
|
+
}
|
|
582
|
+
|
|
583
|
+
logger.info(`Found ${violations.length} violation(s)`, {
|
|
584
|
+
errors: violations.filter(v => v.severity === 'error').length,
|
|
585
|
+
warnings: violations.filter(v => v.severity === 'warning').length
|
|
586
|
+
});
|
|
587
|
+
|
|
588
|
+
// Step 4: Initialize Octokit
|
|
589
|
+
const token = githubToken || process.env.GITHUB_TOKEN;
|
|
590
|
+
const [owner, repoName] = repo.split('/');
|
|
591
|
+
|
|
592
|
+
if (!Octokit) {
|
|
593
|
+
logger.debug('Loading @octokit/rest');
|
|
594
|
+
Octokit = (await import('@octokit/rest')).Octokit;
|
|
595
|
+
}
|
|
596
|
+
|
|
597
|
+
const octokit = new Octokit({ auth: token });
|
|
598
|
+
|
|
599
|
+
// Step 5: Get PR info
|
|
600
|
+
logger.info('Fetching PR information');
|
|
601
|
+
let prData;
|
|
602
|
+
try {
|
|
603
|
+
const response = await withRetry(async () => {
|
|
604
|
+
return await octokit.pulls.get({
|
|
605
|
+
owner,
|
|
606
|
+
repo: repoName,
|
|
607
|
+
pull_number: prNumber
|
|
608
|
+
});
|
|
609
|
+
});
|
|
610
|
+
prData = response.data;
|
|
611
|
+
} catch (error) {
|
|
612
|
+
if (error.status === 404) {
|
|
613
|
+
throw new GitHubAPIError(
|
|
614
|
+
`PR #${prNumber} not found in ${repo}`,
|
|
615
|
+
404,
|
|
616
|
+
error
|
|
617
|
+
);
|
|
618
|
+
}
|
|
619
|
+
throw new GitHubAPIError(
|
|
620
|
+
`Failed to fetch PR: ${error.message}`,
|
|
621
|
+
error.status,
|
|
622
|
+
error
|
|
623
|
+
);
|
|
624
|
+
}
|
|
625
|
+
|
|
626
|
+
const headSha = prData.head.sha;
|
|
627
|
+
logger.info('PR information retrieved', {
|
|
628
|
+
state: prData.state,
|
|
629
|
+
sha: headSha,
|
|
630
|
+
title: prData.title
|
|
631
|
+
});
|
|
632
|
+
|
|
633
|
+
// Check if PR is open
|
|
634
|
+
if (prData.state !== 'open') {
|
|
635
|
+
logger.warn('PR is not open', { state: prData.state });
|
|
636
|
+
}
|
|
637
|
+
|
|
638
|
+
// Step 6: Get PR files
|
|
639
|
+
logger.info('Fetching PR files');
|
|
640
|
+
const prFilesInfo = await getPRFilesInfo(octokit, owner, repoName, prNumber);
|
|
641
|
+
const prFiles = Array.from(prFilesInfo.keys());
|
|
642
|
+
|
|
643
|
+
logger.info(`PR has ${prFiles.length} file(s) changed`);
|
|
644
|
+
|
|
645
|
+
// Step 7: Filter and validate violations
|
|
646
|
+
const matchingViolations = [];
|
|
647
|
+
let filesSkipped = 0;
|
|
648
|
+
let linesSkipped = 0;
|
|
649
|
+
let renamedFilesHandled = 0;
|
|
650
|
+
|
|
651
|
+
// Debug: Log sample paths for comparison
|
|
652
|
+
logger.debug('Path comparison debug:', {
|
|
653
|
+
sampleViolationFiles: violations.slice(0, 3).map(v => v.file),
|
|
654
|
+
samplePRFiles: Array.from(prFilesInfo.keys()).slice(0, 3),
|
|
655
|
+
totalViolations: violations.length,
|
|
656
|
+
totalPRFiles: prFilesInfo.size
|
|
657
|
+
});
|
|
658
|
+
|
|
659
|
+
for (const v of violations) {
|
|
660
|
+
let targetFile = v.file;
|
|
661
|
+
let fileInfo = prFilesInfo.get(targetFile);
|
|
662
|
+
|
|
663
|
+
// If file not found, check if it's a renamed file (old name)
|
|
664
|
+
if (!fileInfo && prFilesInfo.has(targetFile)) {
|
|
665
|
+
const oldFileInfo = prFilesInfo.get(targetFile);
|
|
666
|
+
if (oldFileInfo.isOldName && oldFileInfo.newFilename) {
|
|
667
|
+
logger.debug(`Mapping renamed file: ${targetFile} -> ${oldFileInfo.newFilename}`);
|
|
668
|
+
targetFile = oldFileInfo.newFilename;
|
|
669
|
+
fileInfo = prFilesInfo.get(targetFile);
|
|
670
|
+
renamedFilesHandled++;
|
|
671
|
+
}
|
|
672
|
+
}
|
|
673
|
+
|
|
674
|
+
// Skip if file not in PR
|
|
675
|
+
if (!fileInfo) {
|
|
676
|
+
logger.debug(`Skipping violation - file not in PR: ${v.file}`);
|
|
677
|
+
filesSkipped++;
|
|
678
|
+
continue;
|
|
679
|
+
}
|
|
680
|
+
|
|
681
|
+
// Skip if file is deleted/removed
|
|
682
|
+
if (fileInfo.status === 'removed') {
|
|
683
|
+
logger.debug(`Skipping violation - file removed: ${targetFile}`);
|
|
684
|
+
filesSkipped++;
|
|
685
|
+
continue;
|
|
686
|
+
}
|
|
687
|
+
|
|
688
|
+
// Validate line number against patch
|
|
689
|
+
if (fileInfo.validLines && fileInfo.validLines.size > 0) {
|
|
690
|
+
if (!fileInfo.validLines.has(v.line)) {
|
|
691
|
+
logger.debug(`Skipping violation - line ${v.line} not in PR diff: ${targetFile}`);
|
|
692
|
+
linesSkipped++;
|
|
693
|
+
continue;
|
|
694
|
+
}
|
|
695
|
+
}
|
|
696
|
+
|
|
697
|
+
// Add to matching violations with updated filename
|
|
698
|
+
matchingViolations.push({
|
|
699
|
+
...v,
|
|
700
|
+
file: targetFile // Use potentially renamed filename
|
|
701
|
+
});
|
|
702
|
+
}
|
|
703
|
+
|
|
704
|
+
if (matchingViolations.length === 0) {
|
|
705
|
+
logger.info('No violations match PR files or valid lines', {
|
|
706
|
+
totalViolations: violations.length,
|
|
707
|
+
filesSkipped,
|
|
708
|
+
linesSkipped
|
|
709
|
+
});
|
|
710
|
+
return {
|
|
711
|
+
success: true,
|
|
712
|
+
message: 'No matching PR file violations to comment',
|
|
713
|
+
stats: {
|
|
714
|
+
totalViolations: violations.length,
|
|
715
|
+
matchingViolations: 0,
|
|
716
|
+
filesSkipped,
|
|
717
|
+
linesSkipped,
|
|
718
|
+
renamedFilesHandled,
|
|
719
|
+
commentsCreated: 0,
|
|
720
|
+
duplicatesSkipped: 0,
|
|
721
|
+
duration: Date.now() - startTime
|
|
722
|
+
}
|
|
723
|
+
};
|
|
724
|
+
}
|
|
725
|
+
|
|
726
|
+
logger.info(`${matchingViolations.length} violation(s) match PR files and valid lines`, {
|
|
727
|
+
filesSkipped,
|
|
728
|
+
linesSkipped,
|
|
729
|
+
renamedFilesHandled
|
|
730
|
+
});
|
|
731
|
+
|
|
732
|
+
// Step 8: Get existing comments to avoid duplicates
|
|
733
|
+
let existingComments = [];
|
|
734
|
+
if (skipDuplicates) {
|
|
735
|
+
logger.info('Fetching existing comments for duplicate detection');
|
|
736
|
+
existingComments = await getExistingComments(octokit, owner, repoName, prNumber);
|
|
737
|
+
logger.info(`Found ${existingComments.length} existing comment(s)`);
|
|
738
|
+
}
|
|
739
|
+
|
|
740
|
+
// Step 9: Prepare review comments
|
|
741
|
+
const reviewComments = [];
|
|
742
|
+
let duplicatesSkipped = 0;
|
|
743
|
+
|
|
744
|
+
for (const v of matchingViolations) {
|
|
745
|
+
const commentBody = truncateComment(`[${v.rule}] ${v.message}`);
|
|
746
|
+
const comment = {
|
|
747
|
+
path: v.file,
|
|
748
|
+
line: v.line,
|
|
749
|
+
side: 'RIGHT',
|
|
750
|
+
body: commentBody
|
|
751
|
+
};
|
|
752
|
+
|
|
753
|
+
if (skipDuplicates && isCommentDuplicate(existingComments, comment)) {
|
|
754
|
+
duplicatesSkipped++;
|
|
755
|
+
logger.debug('Skipping duplicate comment', { file: v.file, line: v.line });
|
|
756
|
+
continue;
|
|
757
|
+
}
|
|
758
|
+
|
|
759
|
+
reviewComments.push(comment);
|
|
760
|
+
}
|
|
761
|
+
|
|
762
|
+
if (reviewComments.length === 0) {
|
|
763
|
+
logger.info('All comments are duplicates, nothing to post');
|
|
764
|
+
return {
|
|
765
|
+
success: true,
|
|
766
|
+
message: 'All comments already exist on PR',
|
|
767
|
+
stats: {
|
|
768
|
+
totalViolations: violations.length,
|
|
769
|
+
matchingViolations: matchingViolations.length,
|
|
770
|
+
filesSkipped,
|
|
771
|
+
linesSkipped,
|
|
772
|
+
renamedFilesHandled,
|
|
773
|
+
commentsCreated: 0,
|
|
774
|
+
duplicatesSkipped,
|
|
775
|
+
duration: Date.now() - startTime
|
|
776
|
+
}
|
|
777
|
+
};
|
|
778
|
+
}
|
|
779
|
+
|
|
780
|
+
logger.info(`Preparing to create ${reviewComments.length} comment(s)`, {
|
|
781
|
+
duplicatesSkipped
|
|
782
|
+
});
|
|
783
|
+
|
|
784
|
+
// Step 10: Create reviews
|
|
785
|
+
const hasError = matchingViolations.some(v => v.severity === 'error');
|
|
786
|
+
const reviews = await createReviewsInBatches(
|
|
787
|
+
octokit,
|
|
788
|
+
owner,
|
|
789
|
+
repoName,
|
|
790
|
+
prNumber,
|
|
791
|
+
headSha,
|
|
792
|
+
reviewComments,
|
|
793
|
+
hasError
|
|
794
|
+
);
|
|
795
|
+
|
|
796
|
+
const duration = Date.now() - startTime;
|
|
797
|
+
logger.info('Annotation completed successfully', {
|
|
798
|
+
reviewsCreated: reviews.length,
|
|
799
|
+
commentsCreated: reviewComments.length,
|
|
800
|
+
filesSkipped,
|
|
801
|
+
linesSkipped,
|
|
802
|
+
renamedFilesHandled,
|
|
803
|
+
duration: `${duration}ms`
|
|
804
|
+
});
|
|
805
|
+
|
|
806
|
+
return {
|
|
807
|
+
success: true,
|
|
808
|
+
message: `Created ${reviews.length} review(s) with ${reviewComments.length} comment(s)`,
|
|
809
|
+
reviews: reviews.map(r => ({
|
|
810
|
+
id: r.id,
|
|
811
|
+
html_url: r.html_url
|
|
812
|
+
})),
|
|
813
|
+
stats: {
|
|
814
|
+
totalViolations: violations.length,
|
|
815
|
+
matchingViolations: matchingViolations.length,
|
|
816
|
+
filesSkipped,
|
|
817
|
+
linesSkipped,
|
|
818
|
+
renamedFilesHandled,
|
|
819
|
+
commentsCreated: reviewComments.length,
|
|
820
|
+
duplicatesSkipped,
|
|
821
|
+
reviewsCreated: reviews.length,
|
|
822
|
+
hasErrors: hasError,
|
|
823
|
+
duration
|
|
824
|
+
}
|
|
825
|
+
};
|
|
826
|
+
|
|
827
|
+
} catch (error) {
|
|
828
|
+
logger.error('Annotation failed', error);
|
|
829
|
+
|
|
830
|
+
// Re-throw with more context
|
|
831
|
+
if (error instanceof ValidationError || error instanceof GitHubAPIError) {
|
|
832
|
+
throw error;
|
|
833
|
+
}
|
|
834
|
+
|
|
835
|
+
throw new Error(`GitHub annotation failed: ${error.message}`);
|
|
836
|
+
}
|
|
837
|
+
}
|
|
838
|
+
|
|
839
|
+
/**
|
|
840
|
+
* Post summary comment on GitHub PR
|
|
841
|
+
* @param {Object} options
|
|
842
|
+
* @param {string} options.jsonFile - Path to JSON result file
|
|
843
|
+
* @param {string} [options.githubToken] - GitHub token, falls back to GITHUB_TOKEN env
|
|
844
|
+
* @param {string} options.repo - GitHub repo in format owner/repo
|
|
845
|
+
* @param {number} options.prNumber - Pull request number
|
|
846
|
+
* @returns {Promise<Object>} Result object
|
|
847
|
+
*/
|
|
848
|
+
async function postSummaryComment({
|
|
849
|
+
jsonFile,
|
|
850
|
+
githubToken,
|
|
851
|
+
repo,
|
|
852
|
+
prNumber
|
|
853
|
+
}) {
|
|
854
|
+
const startTime = Date.now();
|
|
855
|
+
|
|
856
|
+
try {
|
|
857
|
+
// Step 1: Validate input
|
|
858
|
+
logger.info('Starting GitHub summary comment process', { jsonFile, repo, prNumber });
|
|
859
|
+
validateInput({ jsonFile, githubToken, repo, prNumber });
|
|
860
|
+
|
|
861
|
+
// Step 2: Read and parse JSON file
|
|
862
|
+
logger.info('Reading result file', { jsonFile });
|
|
863
|
+
const raw = readJsonFile(jsonFile);
|
|
864
|
+
|
|
865
|
+
// Step 2.5: Get git root for path normalization
|
|
866
|
+
const gitRoot = getGitRoot();
|
|
867
|
+
logger.debug('Git root directory', { gitRoot });
|
|
868
|
+
|
|
869
|
+
// Step 3: Parse violations with git root normalization
|
|
870
|
+
logger.info('Parsing violations for summary');
|
|
871
|
+
const violations = parseViolations(raw, gitRoot);
|
|
872
|
+
|
|
873
|
+
// Step 4: Initialize Octokit
|
|
874
|
+
const token = githubToken || process.env.GITHUB_TOKEN;
|
|
875
|
+
const [owner, repoName] = repo.split('/');
|
|
876
|
+
|
|
877
|
+
if (!Octokit) {
|
|
878
|
+
logger.debug('Loading @octokit/rest');
|
|
879
|
+
Octokit = (await import('@octokit/rest')).Octokit;
|
|
880
|
+
}
|
|
881
|
+
|
|
882
|
+
const octokit = new Octokit({ auth: token });
|
|
883
|
+
|
|
884
|
+
// Step 5: Calculate statistics
|
|
885
|
+
const totalViolations = violations.length;
|
|
886
|
+
const errorCount = violations.filter(v => v.severity === 'error').length;
|
|
887
|
+
const warningCount = violations.filter(v => v.severity === 'warning').length;
|
|
888
|
+
|
|
889
|
+
// Group by file
|
|
890
|
+
const fileGroups = {};
|
|
891
|
+
for (const v of violations) {
|
|
892
|
+
if (!fileGroups[v.file]) {
|
|
893
|
+
fileGroups[v.file] = [];
|
|
894
|
+
}
|
|
895
|
+
fileGroups[v.file].push(v);
|
|
896
|
+
}
|
|
897
|
+
|
|
898
|
+
const filesWithIssues = Object.keys(fileGroups).length;
|
|
899
|
+
const totalFiles = raw.length || filesWithIssues;
|
|
900
|
+
|
|
901
|
+
// Step 6: Generate summary markdown
|
|
902
|
+
const emoji = errorCount > 0 ? '❌' : warningCount > 0 ? '⚠️' : '✅';
|
|
903
|
+
const status = errorCount > 0 ? 'Failed' : warningCount > 0 ? 'Passed with warnings' : 'Passed';
|
|
904
|
+
|
|
905
|
+
let summary = `## ${emoji} SunLint Report\n\n`;
|
|
906
|
+
summary += `**Status:** ${status}\n\n`;
|
|
907
|
+
summary += `📊 **Summary:**\n`;
|
|
908
|
+
summary += `- Total files analyzed: ${totalFiles}\n`;
|
|
909
|
+
summary += `- Files with issues: ${filesWithIssues}\n`;
|
|
910
|
+
summary += `- Total violations: ${totalViolations}\n`;
|
|
911
|
+
summary += `- Errors: ${errorCount}\n`;
|
|
912
|
+
summary += `- Warnings: ${warningCount}\n\n`;
|
|
913
|
+
|
|
914
|
+
if (totalViolations === 0) {
|
|
915
|
+
summary += '✅ Great job! No coding standard violations found.\n';
|
|
916
|
+
} else {
|
|
917
|
+
summary += '### Top Issues:\n\n';
|
|
918
|
+
|
|
919
|
+
// List top 10 files with most issues
|
|
920
|
+
const sortedFiles = Object.entries(fileGroups)
|
|
921
|
+
.sort((a, b) => b[1].length - a[1].length)
|
|
922
|
+
.slice(0, 10);
|
|
923
|
+
|
|
924
|
+
for (const [file, fileViolations] of sortedFiles) {
|
|
925
|
+
const fileErrors = fileViolations.filter(v => v.severity === 'error').length;
|
|
926
|
+
const fileWarnings = fileViolations.filter(v => v.severity === 'warning').length;
|
|
927
|
+
summary += `- \`${file}\`: ${fileErrors} error(s), ${fileWarnings} warning(s)\n`;
|
|
928
|
+
}
|
|
929
|
+
|
|
930
|
+
if (Object.keys(fileGroups).length > 10) {
|
|
931
|
+
summary += `\n_... and ${Object.keys(fileGroups).length - 10} more file(s)_\n`;
|
|
932
|
+
}
|
|
933
|
+
|
|
934
|
+
summary += '\n⚠️ Please check the inline comments on your code for details.\n';
|
|
935
|
+
}
|
|
936
|
+
|
|
937
|
+
summary += '\n---\n';
|
|
938
|
+
summary += '<sub>Generated by [SunLint](https://github.com/sun-asterisk/engineer-excellence) • ';
|
|
939
|
+
|
|
940
|
+
// Add link to full report if available
|
|
941
|
+
if (process.env.GITHUB_RUN_ID) {
|
|
942
|
+
const runUrl = `https://github.com/${repo}/actions/runs/${process.env.GITHUB_RUN_ID}`;
|
|
943
|
+
summary += `[View full report](${runUrl})`;
|
|
944
|
+
}
|
|
945
|
+
|
|
946
|
+
summary += '</sub>\n';
|
|
947
|
+
|
|
948
|
+
// Step 7: Try to find existing SunLint comment
|
|
949
|
+
logger.info('Checking for existing summary comment');
|
|
950
|
+
let existingComment = null;
|
|
951
|
+
|
|
952
|
+
try {
|
|
953
|
+
const { data: comments } = await octokit.issues.listComments({
|
|
954
|
+
owner,
|
|
955
|
+
repo: repoName,
|
|
956
|
+
issue_number: prNumber,
|
|
957
|
+
per_page: 100
|
|
958
|
+
});
|
|
959
|
+
|
|
960
|
+
existingComment = comments.find(comment =>
|
|
961
|
+
comment.user.type === 'Bot' &&
|
|
962
|
+
comment.body.includes('SunLint Report')
|
|
963
|
+
);
|
|
964
|
+
} catch (error) {
|
|
965
|
+
logger.warn('Failed to fetch existing comments', { error: error.message });
|
|
966
|
+
}
|
|
967
|
+
|
|
968
|
+
// Step 8: Post or update comment
|
|
969
|
+
let commentResult;
|
|
970
|
+
|
|
971
|
+
try {
|
|
972
|
+
if (existingComment) {
|
|
973
|
+
logger.info('Updating existing summary comment', { commentId: existingComment.id });
|
|
974
|
+
commentResult = await withRetry(async () => {
|
|
975
|
+
return await octokit.issues.updateComment({
|
|
976
|
+
owner,
|
|
977
|
+
repo: repoName,
|
|
978
|
+
comment_id: existingComment.id,
|
|
979
|
+
body: summary
|
|
980
|
+
});
|
|
981
|
+
});
|
|
982
|
+
logger.info('Summary comment updated successfully');
|
|
983
|
+
} else {
|
|
984
|
+
logger.info('Creating new summary comment');
|
|
985
|
+
commentResult = await withRetry(async () => {
|
|
986
|
+
return await octokit.issues.createComment({
|
|
987
|
+
owner,
|
|
988
|
+
repo: repoName,
|
|
989
|
+
issue_number: prNumber,
|
|
990
|
+
body: summary
|
|
991
|
+
});
|
|
992
|
+
});
|
|
993
|
+
logger.info('Summary comment created successfully');
|
|
994
|
+
}
|
|
995
|
+
} catch (error) {
|
|
996
|
+
throw new GitHubAPIError(
|
|
997
|
+
`Failed to post summary comment: ${error.message}`,
|
|
998
|
+
error.status,
|
|
999
|
+
error
|
|
1000
|
+
);
|
|
1001
|
+
}
|
|
1002
|
+
|
|
1003
|
+
const duration = Date.now() - startTime;
|
|
1004
|
+
logger.info('Summary comment completed', {
|
|
1005
|
+
action: existingComment ? 'updated' : 'created',
|
|
1006
|
+
duration: `${duration}ms`
|
|
1007
|
+
});
|
|
1008
|
+
|
|
1009
|
+
return {
|
|
1010
|
+
success: true,
|
|
1011
|
+
action: existingComment ? 'updated' : 'created',
|
|
1012
|
+
commentId: commentResult.data.id,
|
|
1013
|
+
commentUrl: commentResult.data.html_url,
|
|
1014
|
+
stats: {
|
|
1015
|
+
totalViolations,
|
|
1016
|
+
errorCount,
|
|
1017
|
+
warningCount,
|
|
1018
|
+
filesWithIssues,
|
|
1019
|
+
duration
|
|
1020
|
+
}
|
|
1021
|
+
};
|
|
1022
|
+
|
|
1023
|
+
} catch (error) {
|
|
1024
|
+
logger.error('Summary comment failed', error);
|
|
1025
|
+
|
|
1026
|
+
if (error instanceof ValidationError || error instanceof GitHubAPIError) {
|
|
1027
|
+
throw error;
|
|
1028
|
+
}
|
|
1029
|
+
|
|
1030
|
+
throw new Error(`GitHub summary comment failed: ${error.message}`);
|
|
1031
|
+
}
|
|
1032
|
+
}
|
|
1033
|
+
|
|
1034
|
+
module.exports = {
|
|
1035
|
+
annotate,
|
|
1036
|
+
postSummaryComment,
|
|
1037
|
+
ValidationError,
|
|
1038
|
+
GitHubAPIError
|
|
1039
|
+
};
|