@sun-asterisk/sunlint 1.3.18 → 1.3.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/config/rules/enhanced-rules-registry.json +77 -18
  2. package/core/cli-program.js +9 -1
  3. package/core/github-annotate-service.js +986 -0
  4. package/core/output-service.js +294 -6
  5. package/core/summary-report-service.js +30 -30
  6. package/docs/GITHUB_ACTIONS_INTEGRATION.md +421 -0
  7. package/package.json +2 -1
  8. package/rules/common/C014_dependency_injection/symbol-based-analyzer.js +392 -280
  9. package/rules/common/C017_constructor_logic/analyzer.js +137 -503
  10. package/rules/common/C017_constructor_logic/config.json +50 -0
  11. package/rules/common/C017_constructor_logic/symbol-based-analyzer.js +463 -0
  12. package/rules/security/S006_no_plaintext_recovery_codes/symbol-based-analyzer.js +463 -21
  13. package/rules/security/S011_secure_guid_generation/README.md +255 -0
  14. package/rules/security/S011_secure_guid_generation/analyzer.js +135 -0
  15. package/rules/security/S011_secure_guid_generation/config.json +56 -0
  16. package/rules/security/S011_secure_guid_generation/symbol-based-analyzer.js +609 -0
  17. package/rules/security/S028_file_upload_size_limits/README.md +537 -0
  18. package/rules/security/S028_file_upload_size_limits/analyzer.js +202 -0
  19. package/rules/security/S028_file_upload_size_limits/config.json +186 -0
  20. package/rules/security/S028_file_upload_size_limits/symbol-based-analyzer.js +530 -0
  21. package/rules/security/S041_session_token_invalidation/README.md +303 -0
  22. package/rules/security/S041_session_token_invalidation/analyzer.js +242 -0
  23. package/rules/security/S041_session_token_invalidation/config.json +175 -0
  24. package/rules/security/S041_session_token_invalidation/regex-based-analyzer.js +411 -0
  25. package/rules/security/S041_session_token_invalidation/symbol-based-analyzer.js +674 -0
  26. package/rules/security/S044_re_authentication_required/README.md +136 -0
  27. package/rules/security/S044_re_authentication_required/analyzer.js +242 -0
  28. package/rules/security/S044_re_authentication_required/config.json +161 -0
  29. package/rules/security/S044_re_authentication_required/regex-based-analyzer.js +329 -0
  30. package/rules/security/S044_re_authentication_required/symbol-based-analyzer.js +537 -0
  31. package/rules/security/S045_brute_force_protection/README.md +345 -0
  32. package/rules/security/S045_brute_force_protection/analyzer.js +336 -0
  33. package/rules/security/S045_brute_force_protection/config.json +139 -0
  34. package/rules/security/S045_brute_force_protection/symbol-based-analyzer.js +646 -0
  35. package/rules/common/C017_constructor_logic/semantic-analyzer.js +0 -340
@@ -0,0 +1,986 @@
1
+ /**
2
+ * GitHub Annotate Service
3
+ * Đọc file JSON kết quả và comment annotation lên GitHub PR tương ứng
4
+ * Usage: githubAnnotateService.annotate({ jsonFile, githubToken, repo, prNumber })
5
+ */
6
+
7
+ const fs = require('fs');
8
+ let Octokit;
9
+
10
+ // GitHub API limits
11
+ const MAX_COMMENTS_PER_REVIEW = 30;
12
+ const MAX_COMMENT_LENGTH = 65536;
13
+ const MAX_RETRIES = 3;
14
+ const RETRY_DELAY_MS = 1000;
15
+
16
+ /**
17
+ * Custom error classes
18
+ */
19
+ class ValidationError extends Error {
20
+ constructor(message) {
21
+ super(message);
22
+ this.name = 'ValidationError';
23
+ }
24
+ }
25
+
26
+ class GitHubAPIError extends Error {
27
+ constructor(message, statusCode, originalError) {
28
+ super(message);
29
+ this.name = 'GitHubAPIError';
30
+ this.statusCode = statusCode;
31
+ this.originalError = originalError;
32
+ }
33
+ }
34
+
35
+ /**
36
+ * Logger với các levels khác nhau
37
+ */
38
+ const logger = {
39
+ info: (message, data) => {
40
+ console.log(`[INFO] ${message}`, data ? JSON.stringify(data, null, 2) : '');
41
+ },
42
+ warn: (message, data) => {
43
+ console.warn(`[WARN] ${message}`, data ? JSON.stringify(data, null, 2) : '');
44
+ },
45
+ error: (message, error) => {
46
+ console.error(`[ERROR] ${message}`, error?.message || error);
47
+ if (error?.stack) {
48
+ console.error(error.stack);
49
+ }
50
+ },
51
+ debug: (message, data) => {
52
+ if (process.env.DEBUG === 'true') {
53
+ console.log(`[DEBUG] ${message}`, data ? JSON.stringify(data, null, 2) : '');
54
+ }
55
+ }
56
+ };
57
+
58
+ /**
59
+ * Sleep utility for retry mechanism
60
+ * @param {number} ms - Milliseconds to sleep
61
+ * @returns {Promise<void>}
62
+ */
63
+ function sleep(ms) {
64
+ return new Promise(resolve => setTimeout(resolve, ms));
65
+ }
66
+
67
+ /**
68
+ * Retry wrapper cho async functions
69
+ * @param {Function} fn - Async function to retry
70
+ * @param {number} maxRetries - Max number of retries
71
+ * @param {number} delayMs - Delay between retries
72
+ * @returns {Promise<any>}
73
+ */
74
+ async function withRetry(fn, maxRetries = MAX_RETRIES, delayMs = RETRY_DELAY_MS) {
75
+ let lastError;
76
+ for (let attempt = 1; attempt <= maxRetries; attempt++) {
77
+ try {
78
+ return await fn();
79
+ } catch (error) {
80
+ lastError = error;
81
+
82
+ // Không retry cho validation errors hoặc 404
83
+ if (error instanceof ValidationError || error.status === 404) {
84
+ throw error;
85
+ }
86
+
87
+ // Retry cho network errors và rate limits
88
+ const isRetryable =
89
+ error.status === 429 || // Rate limit
90
+ error.status >= 500 || // Server errors
91
+ error.code === 'ECONNRESET' ||
92
+ error.code === 'ETIMEDOUT' ||
93
+ error.code === 'ENOTFOUND';
94
+
95
+ if (!isRetryable || attempt === maxRetries) {
96
+ throw error;
97
+ }
98
+
99
+ const waitTime = error.status === 429 ? delayMs * attempt * 2 : delayMs * attempt;
100
+ logger.warn(`Attempt ${attempt}/${maxRetries} failed, retrying in ${waitTime}ms...`, {
101
+ error: error.message,
102
+ status: error.status
103
+ });
104
+ await sleep(waitTime);
105
+ }
106
+ }
107
+ throw lastError;
108
+ }
109
+
110
+ /**
111
+ * Validate input parameters
112
+ * @param {Object} options
113
+ * @throws {ValidationError}
114
+ */
115
+ function validateInput({ jsonFile, githubToken, repo, prNumber }) {
116
+ if (!jsonFile || typeof jsonFile !== 'string') {
117
+ throw new ValidationError('jsonFile is required and must be a string');
118
+ }
119
+
120
+ if (!githubToken && !process.env.GITHUB_TOKEN) {
121
+ throw new ValidationError('githubToken is required or GITHUB_TOKEN env var must be set');
122
+ }
123
+
124
+ if (!repo || typeof repo !== 'string') {
125
+ throw new ValidationError('repo is required and must be a string');
126
+ }
127
+
128
+ const repoParts = repo.split('/');
129
+ if (repoParts.length !== 2 || !repoParts[0] || !repoParts[1]) {
130
+ throw new ValidationError('repo must be in format "owner/repo"');
131
+ }
132
+
133
+ if (!prNumber || typeof prNumber !== 'number' || prNumber <= 0 || !Number.isInteger(prNumber)) {
134
+ throw new ValidationError('prNumber must be a positive integer');
135
+ }
136
+ }
137
+
138
+ /**
139
+ * Read and parse JSON file
140
+ * @param {string} jsonFile - Path to JSON file
141
+ * @returns {Object} Parsed JSON
142
+ * @throws {Error}
143
+ */
144
+ function readJsonFile(jsonFile) {
145
+ if (!fs.existsSync(jsonFile)) {
146
+ throw new Error(`Result file not found: ${jsonFile}`);
147
+ }
148
+
149
+ let stats;
150
+ try {
151
+ stats = fs.statSync(jsonFile);
152
+ } catch (error) {
153
+ throw new Error(`Cannot access file ${jsonFile}: ${error.message}`);
154
+ }
155
+
156
+ if (!stats.isFile()) {
157
+ throw new Error(`Path is not a file: ${jsonFile}`);
158
+ }
159
+
160
+ if (stats.size === 0) {
161
+ logger.warn('Result file is empty', { jsonFile });
162
+ return [];
163
+ }
164
+
165
+ // Check file size (warn if > 10MB)
166
+ const maxSize = 10 * 1024 * 1024;
167
+ if (stats.size > maxSize) {
168
+ logger.warn(`Result file is very large (${(stats.size / 1024 / 1024).toFixed(2)}MB)`, { jsonFile });
169
+ }
170
+
171
+ let content;
172
+ try {
173
+ content = fs.readFileSync(jsonFile, 'utf8');
174
+ } catch (error) {
175
+ throw new Error(`Cannot read file ${jsonFile}: ${error.message}`);
176
+ }
177
+
178
+ try {
179
+ return JSON.parse(content);
180
+ } catch (error) {
181
+ throw new Error(`Invalid JSON in file ${jsonFile}: ${error.message}`);
182
+ }
183
+ }
184
+
185
+ /**
186
+ * Parse violations from JSON data
187
+ * @param {Array|Object} raw - Raw JSON data
188
+ * @returns {Array} Array of violation objects
189
+ */
190
+ function parseViolations(raw) {
191
+ const violations = [];
192
+ const cwd = process.env.GITHUB_WORKSPACE || process.cwd();
193
+
194
+ if (Array.isArray(raw)) {
195
+ for (const fileObj of raw) {
196
+ if (!fileObj || typeof fileObj !== 'object') {
197
+ logger.warn('Skipping invalid file object', { fileObj });
198
+ continue;
199
+ }
200
+
201
+ if (!fileObj.filePath || !Array.isArray(fileObj.messages)) {
202
+ logger.debug('Skipping file object without filePath or messages', { fileObj });
203
+ continue;
204
+ }
205
+
206
+ let relPath = fileObj.filePath;
207
+
208
+ // Convert absolute path to relative
209
+ if (relPath.startsWith(cwd)) {
210
+ relPath = relPath.slice(cwd.length);
211
+ if (relPath.startsWith('/') || relPath.startsWith('\\')) {
212
+ relPath = relPath.slice(1);
213
+ }
214
+ }
215
+
216
+ // Normalize path separators
217
+ relPath = relPath.replace(/\\/g, '/');
218
+
219
+ for (const msg of fileObj.messages) {
220
+ if (!msg || typeof msg !== 'object') {
221
+ logger.warn('Skipping invalid message object', { msg });
222
+ continue;
223
+ }
224
+
225
+ // Validate line number
226
+ const line = parseInt(msg.line, 10);
227
+ if (!line || line <= 0) {
228
+ logger.warn('Skipping message with invalid line number', { msg, file: relPath });
229
+ continue;
230
+ }
231
+
232
+ violations.push({
233
+ file: relPath,
234
+ line: line,
235
+ rule: msg.ruleId || 'unknown',
236
+ severity: msg.severity === 2 ? 'error' : 'warning',
237
+ message: msg.message || 'No message provided'
238
+ });
239
+ }
240
+ }
241
+ } else if (raw && typeof raw === 'object') {
242
+ const rawViolations = raw.violations || [];
243
+ if (!Array.isArray(rawViolations)) {
244
+ throw new Error('violations property must be an array');
245
+ }
246
+ violations.push(...rawViolations);
247
+ } else {
248
+ throw new Error('JSON data must be an array or object with violations property');
249
+ }
250
+
251
+ return violations;
252
+ }
253
+
254
+ /**
255
+ * Get existing review comments to avoid duplicates
256
+ * @param {Object} octokit - Octokit instance
257
+ * @param {string} owner - Repo owner
258
+ * @param {string} repoName - Repo name
259
+ * @param {number} prNumber - PR number
260
+ * @returns {Promise<Array>} Array of existing comments
261
+ */
262
+ async function getExistingComments(octokit, owner, repoName, prNumber) {
263
+ try {
264
+ const comments = [];
265
+ let page = 1;
266
+ let hasMore = true;
267
+
268
+ while (hasMore) {
269
+ const response = await octokit.pulls.listReviewComments({
270
+ owner,
271
+ repo: repoName,
272
+ pull_number: prNumber,
273
+ per_page: 100,
274
+ page
275
+ });
276
+
277
+ comments.push(...response.data);
278
+ hasMore = response.data.length === 100;
279
+ page++;
280
+ }
281
+
282
+ return comments;
283
+ } catch (error) {
284
+ // Non-critical error, log and continue
285
+ logger.warn('Failed to fetch existing comments, duplicate detection disabled', {
286
+ error: error.message
287
+ });
288
+ return [];
289
+ }
290
+ }
291
+
292
+ /**
293
+ * Check if comment already exists
294
+ * @param {Array} existingComments - Existing PR comments
295
+ * @param {Object} newComment - New comment to check
296
+ * @returns {boolean}
297
+ */
298
+ function isCommentDuplicate(existingComments, newComment) {
299
+ return existingComments.some(existing =>
300
+ existing.path === newComment.path &&
301
+ existing.line === newComment.line &&
302
+ existing.body === newComment.body
303
+ );
304
+ }
305
+
306
+ /**
307
+ * Truncate comment body if too long
308
+ * @param {string} body - Comment body
309
+ * @returns {string}
310
+ */
311
+ function truncateComment(body) {
312
+ if (body.length <= MAX_COMMENT_LENGTH) {
313
+ return body;
314
+ }
315
+
316
+ const truncated = body.substring(0, MAX_COMMENT_LENGTH - 100);
317
+ return `${truncated}\n\n... (comment truncated, too long)`;
318
+ }
319
+
320
+ /**
321
+ * Create review comments in batches
322
+ * @param {Object} octokit - Octokit instance
323
+ * @param {string} owner - Repo owner
324
+ * @param {string} repoName - Repo name
325
+ * @param {number} prNumber - PR number
326
+ * @param {string} headSha - Commit SHA
327
+ * @param {Array} comments - Comments to post
328
+ * @param {boolean} hasError - Whether there are errors
329
+ * @returns {Promise<Array>} Array of review responses
330
+ */
331
+ async function createReviewsInBatches(octokit, owner, repoName, prNumber, headSha, comments, hasError) {
332
+ const reviews = [];
333
+ const batches = [];
334
+
335
+ // Split comments into batches
336
+ for (let i = 0; i < comments.length; i += MAX_COMMENTS_PER_REVIEW) {
337
+ batches.push(comments.slice(i, i + MAX_COMMENTS_PER_REVIEW));
338
+ }
339
+
340
+ logger.info(`Creating ${batches.length} review(s) with ${comments.length} comment(s)`);
341
+
342
+ for (let i = 0; i < batches.length; i++) {
343
+ const batch = batches[i];
344
+ const isLastBatch = i === batches.length - 1;
345
+
346
+ // Only REQUEST_CHANGES on last batch if there are errors
347
+ const eventType = isLastBatch && hasError ? 'REQUEST_CHANGES' : 'COMMENT';
348
+
349
+ try {
350
+ const reviewRes = await withRetry(async () => {
351
+ return await octokit.pulls.createReview({
352
+ owner,
353
+ repo: repoName,
354
+ pull_number: prNumber,
355
+ commit_id: headSha,
356
+ event: eventType,
357
+ body: isLastBatch && batches.length > 1
358
+ ? `SunLint found ${comments.length} issue(s) across multiple reviews.`
359
+ : undefined,
360
+ comments: batch
361
+ });
362
+ });
363
+
364
+ reviews.push(reviewRes.data);
365
+ logger.info(`Review ${i + 1}/${batches.length} created with ${batch.length} comment(s)`, {
366
+ reviewId: reviewRes.data.id
367
+ });
368
+
369
+ // Add delay between batches to avoid rate limiting
370
+ if (i < batches.length - 1) {
371
+ await sleep(500);
372
+ }
373
+ } catch (error) {
374
+ logger.error(`Failed to create review ${i + 1}/${batches.length}`, error);
375
+ throw new GitHubAPIError(
376
+ `Failed to create review: ${error.message}`,
377
+ error.status,
378
+ error
379
+ );
380
+ }
381
+ }
382
+
383
+ return reviews;
384
+ }
385
+
386
+ /**
387
+ * Parse patch to get valid line numbers
388
+ * @param {string} patch - GitHub patch string
389
+ * @returns {Set<number>} Set of valid line numbers
390
+ */
391
+ function parseValidLineNumbers(patch) {
392
+ const validLines = new Set();
393
+
394
+ if (!patch) {
395
+ return validLines;
396
+ }
397
+
398
+ const lines = patch.split('\n');
399
+ let currentLine = 0;
400
+
401
+ for (const line of lines) {
402
+ // Parse hunk header: @@ -old_start,old_count +new_start,new_count @@
403
+ const hunkMatch = line.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/);
404
+ if (hunkMatch) {
405
+ currentLine = parseInt(hunkMatch[1], 10);
406
+ continue;
407
+ }
408
+
409
+ // Skip lines that don't start with +, -, or space
410
+ if (!line.startsWith('+') && !line.startsWith('-') && !line.startsWith(' ')) {
411
+ continue;
412
+ }
413
+
414
+ // Lines starting with + or space are valid (added or context)
415
+ if (line.startsWith('+') || line.startsWith(' ')) {
416
+ validLines.add(currentLine);
417
+ currentLine++;
418
+ } else if (line.startsWith('-')) {
419
+ // Deleted lines don't increment the line counter
420
+ continue;
421
+ }
422
+ }
423
+
424
+ return validLines;
425
+ }
426
+
427
+ /**
428
+ * Get PR files with detailed information for validation
429
+ * @param {Object} octokit - Octokit instance
430
+ * @param {string} owner - Repo owner
431
+ * @param {string} repoName - Repo name
432
+ * @param {number} prNumber - PR number
433
+ * @returns {Promise<Map>} Map of filename to file info
434
+ */
435
+ async function getPRFilesInfo(octokit, owner, repoName, prNumber) {
436
+ const filesMap = new Map();
437
+
438
+ try {
439
+ let page = 1;
440
+ let hasMore = true;
441
+
442
+ while (hasMore) {
443
+ const response = await octokit.pulls.listFiles({
444
+ owner,
445
+ repo: repoName,
446
+ pull_number: prNumber,
447
+ per_page: 100,
448
+ page
449
+ });
450
+
451
+ for (const file of response.data) {
452
+ // Parse valid line numbers from patch
453
+ const validLines = parseValidLineNumbers(file.patch);
454
+
455
+ const fileInfo = {
456
+ filename: file.filename,
457
+ previous_filename: file.previous_filename, // For renamed files
458
+ additions: file.additions,
459
+ deletions: file.deletions,
460
+ changes: file.changes,
461
+ status: file.status, // 'added', 'removed', 'modified', 'renamed'
462
+ validLines: validLines, // Set of valid line numbers
463
+ patch: file.patch // Keep patch for debugging
464
+ };
465
+
466
+ // Store by current filename
467
+ filesMap.set(file.filename, fileInfo);
468
+
469
+ // For renamed files, also map old name to new name
470
+ if (file.status === 'renamed' && file.previous_filename) {
471
+ filesMap.set(file.previous_filename, {
472
+ ...fileInfo,
473
+ isOldName: true, // Flag to indicate this is old name
474
+ newFilename: file.filename // Reference to new name
475
+ });
476
+ }
477
+ }
478
+
479
+ hasMore = response.data.length === 100;
480
+ page++;
481
+ }
482
+
483
+ return filesMap;
484
+ } catch (error) {
485
+ throw new GitHubAPIError(
486
+ `Failed to fetch PR files: ${error.message}`,
487
+ error.status,
488
+ error
489
+ );
490
+ }
491
+ }
492
+
493
+ /**
494
+ * Annotate GitHub PR with SunLint results
495
+ * @param {Object} options
496
+ * @param {string} options.jsonFile - Path to JSON result file
497
+ * @param {string} [options.githubToken] - GitHub token (with repo:write), falls back to GITHUB_TOKEN env
498
+ * @param {string} options.repo - GitHub repo in format owner/repo
499
+ * @param {number} options.prNumber - Pull request number
500
+ * @param {boolean} [options.skipDuplicates=true] - Skip duplicate comments
501
+ * @returns {Promise<Object>} Result object with summary
502
+ * @throws {ValidationError} When input validation fails
503
+ * @throws {GitHubAPIError} When GitHub API calls fail
504
+ * @throws {Error} For other errors
505
+ */
506
+ async function annotate({
507
+ jsonFile,
508
+ githubToken,
509
+ repo,
510
+ prNumber,
511
+ skipDuplicates = true
512
+ }) {
513
+ const startTime = Date.now();
514
+
515
+ try {
516
+ // Step 1: Validate input
517
+ logger.info('Starting GitHub annotation process', { jsonFile, repo, prNumber });
518
+ validateInput({ jsonFile, githubToken, repo, prNumber });
519
+
520
+ // Step 2: Read and parse JSON file
521
+ logger.info('Reading result file', { jsonFile });
522
+ const raw = readJsonFile(jsonFile);
523
+
524
+ // Step 3: Parse violations
525
+ logger.info('Parsing violations');
526
+ const violations = parseViolations(raw);
527
+
528
+ if (violations.length === 0) {
529
+ logger.info('No violations found');
530
+ return {
531
+ success: true,
532
+ message: 'No violations to comment',
533
+ stats: {
534
+ totalViolations: 0,
535
+ commentsCreated: 0,
536
+ duplicatesSkipped: 0,
537
+ duration: Date.now() - startTime
538
+ }
539
+ };
540
+ }
541
+
542
+ logger.info(`Found ${violations.length} violation(s)`, {
543
+ errors: violations.filter(v => v.severity === 'error').length,
544
+ warnings: violations.filter(v => v.severity === 'warning').length
545
+ });
546
+
547
+ // Step 4: Initialize Octokit
548
+ const token = githubToken || process.env.GITHUB_TOKEN;
549
+ const [owner, repoName] = repo.split('/');
550
+
551
+ if (!Octokit) {
552
+ logger.debug('Loading @octokit/rest');
553
+ Octokit = (await import('@octokit/rest')).Octokit;
554
+ }
555
+
556
+ const octokit = new Octokit({ auth: token });
557
+
558
+ // Step 5: Get PR info
559
+ logger.info('Fetching PR information');
560
+ let prData;
561
+ try {
562
+ const response = await withRetry(async () => {
563
+ return await octokit.pulls.get({
564
+ owner,
565
+ repo: repoName,
566
+ pull_number: prNumber
567
+ });
568
+ });
569
+ prData = response.data;
570
+ } catch (error) {
571
+ if (error.status === 404) {
572
+ throw new GitHubAPIError(
573
+ `PR #${prNumber} not found in ${repo}`,
574
+ 404,
575
+ error
576
+ );
577
+ }
578
+ throw new GitHubAPIError(
579
+ `Failed to fetch PR: ${error.message}`,
580
+ error.status,
581
+ error
582
+ );
583
+ }
584
+
585
+ const headSha = prData.head.sha;
586
+ logger.info('PR information retrieved', {
587
+ state: prData.state,
588
+ sha: headSha,
589
+ title: prData.title
590
+ });
591
+
592
+ // Check if PR is open
593
+ if (prData.state !== 'open') {
594
+ logger.warn('PR is not open', { state: prData.state });
595
+ }
596
+
597
+ // Step 6: Get PR files
598
+ logger.info('Fetching PR files');
599
+ const prFilesInfo = await getPRFilesInfo(octokit, owner, repoName, prNumber);
600
+ const prFiles = Array.from(prFilesInfo.keys());
601
+
602
+ logger.info(`PR has ${prFiles.length} file(s) changed`);
603
+
604
+ // Step 7: Filter and validate violations
605
+ const matchingViolations = [];
606
+ let filesSkipped = 0;
607
+ let linesSkipped = 0;
608
+ let renamedFilesHandled = 0;
609
+
610
+ for (const v of violations) {
611
+ let targetFile = v.file;
612
+ let fileInfo = prFilesInfo.get(targetFile);
613
+
614
+ // If file not found, check if it's a renamed file (old name)
615
+ if (!fileInfo && prFilesInfo.has(targetFile)) {
616
+ const oldFileInfo = prFilesInfo.get(targetFile);
617
+ if (oldFileInfo.isOldName && oldFileInfo.newFilename) {
618
+ logger.debug(`Mapping renamed file: ${targetFile} -> ${oldFileInfo.newFilename}`);
619
+ targetFile = oldFileInfo.newFilename;
620
+ fileInfo = prFilesInfo.get(targetFile);
621
+ renamedFilesHandled++;
622
+ }
623
+ }
624
+
625
+ // Skip if file not in PR
626
+ if (!fileInfo) {
627
+ logger.debug(`Skipping violation - file not in PR: ${v.file}`);
628
+ filesSkipped++;
629
+ continue;
630
+ }
631
+
632
+ // Skip if file is deleted/removed
633
+ if (fileInfo.status === 'removed') {
634
+ logger.debug(`Skipping violation - file removed: ${targetFile}`);
635
+ filesSkipped++;
636
+ continue;
637
+ }
638
+
639
+ // Validate line number against patch
640
+ if (fileInfo.validLines && fileInfo.validLines.size > 0) {
641
+ if (!fileInfo.validLines.has(v.line)) {
642
+ logger.debug(`Skipping violation - line ${v.line} not in PR diff: ${targetFile}`);
643
+ linesSkipped++;
644
+ continue;
645
+ }
646
+ }
647
+
648
+ // Add to matching violations with updated filename
649
+ matchingViolations.push({
650
+ ...v,
651
+ file: targetFile // Use potentially renamed filename
652
+ });
653
+ }
654
+
655
+ if (matchingViolations.length === 0) {
656
+ logger.info('No violations match PR files or valid lines', {
657
+ totalViolations: violations.length,
658
+ filesSkipped,
659
+ linesSkipped
660
+ });
661
+ return {
662
+ success: true,
663
+ message: 'No matching PR file violations to comment',
664
+ stats: {
665
+ totalViolations: violations.length,
666
+ matchingViolations: 0,
667
+ filesSkipped,
668
+ linesSkipped,
669
+ renamedFilesHandled,
670
+ commentsCreated: 0,
671
+ duplicatesSkipped: 0,
672
+ duration: Date.now() - startTime
673
+ }
674
+ };
675
+ }
676
+
677
+ logger.info(`${matchingViolations.length} violation(s) match PR files and valid lines`, {
678
+ filesSkipped,
679
+ linesSkipped,
680
+ renamedFilesHandled
681
+ });
682
+
683
+ // Step 8: Get existing comments to avoid duplicates
684
+ let existingComments = [];
685
+ if (skipDuplicates) {
686
+ logger.info('Fetching existing comments for duplicate detection');
687
+ existingComments = await getExistingComments(octokit, owner, repoName, prNumber);
688
+ logger.info(`Found ${existingComments.length} existing comment(s)`);
689
+ }
690
+
691
+ // Step 9: Prepare review comments
692
+ const reviewComments = [];
693
+ let duplicatesSkipped = 0;
694
+
695
+ for (const v of matchingViolations) {
696
+ const commentBody = truncateComment(`[${v.rule}] ${v.message}`);
697
+ const comment = {
698
+ path: v.file,
699
+ line: v.line,
700
+ side: 'RIGHT',
701
+ body: commentBody
702
+ };
703
+
704
+ if (skipDuplicates && isCommentDuplicate(existingComments, comment)) {
705
+ duplicatesSkipped++;
706
+ logger.debug('Skipping duplicate comment', { file: v.file, line: v.line });
707
+ continue;
708
+ }
709
+
710
+ reviewComments.push(comment);
711
+ }
712
+
713
+ if (reviewComments.length === 0) {
714
+ logger.info('All comments are duplicates, nothing to post');
715
+ return {
716
+ success: true,
717
+ message: 'All comments already exist on PR',
718
+ stats: {
719
+ totalViolations: violations.length,
720
+ matchingViolations: matchingViolations.length,
721
+ filesSkipped,
722
+ linesSkipped,
723
+ renamedFilesHandled,
724
+ commentsCreated: 0,
725
+ duplicatesSkipped,
726
+ duration: Date.now() - startTime
727
+ }
728
+ };
729
+ }
730
+
731
+ logger.info(`Preparing to create ${reviewComments.length} comment(s)`, {
732
+ duplicatesSkipped
733
+ });
734
+
735
+ // Step 10: Create reviews
736
+ const hasError = matchingViolations.some(v => v.severity === 'error');
737
+ const reviews = await createReviewsInBatches(
738
+ octokit,
739
+ owner,
740
+ repoName,
741
+ prNumber,
742
+ headSha,
743
+ reviewComments,
744
+ hasError
745
+ );
746
+
747
+ const duration = Date.now() - startTime;
748
+ logger.info('Annotation completed successfully', {
749
+ reviewsCreated: reviews.length,
750
+ commentsCreated: reviewComments.length,
751
+ filesSkipped,
752
+ linesSkipped,
753
+ renamedFilesHandled,
754
+ duration: `${duration}ms`
755
+ });
756
+
757
+ return {
758
+ success: true,
759
+ message: `Created ${reviews.length} review(s) with ${reviewComments.length} comment(s)`,
760
+ reviews: reviews.map(r => ({
761
+ id: r.id,
762
+ html_url: r.html_url
763
+ })),
764
+ stats: {
765
+ totalViolations: violations.length,
766
+ matchingViolations: matchingViolations.length,
767
+ filesSkipped,
768
+ linesSkipped,
769
+ renamedFilesHandled,
770
+ commentsCreated: reviewComments.length,
771
+ duplicatesSkipped,
772
+ reviewsCreated: reviews.length,
773
+ hasErrors: hasError,
774
+ duration
775
+ }
776
+ };
777
+
778
+ } catch (error) {
779
+ logger.error('Annotation failed', error);
780
+
781
+ // Re-throw with more context
782
+ if (error instanceof ValidationError || error instanceof GitHubAPIError) {
783
+ throw error;
784
+ }
785
+
786
+ throw new Error(`GitHub annotation failed: ${error.message}`);
787
+ }
788
+ }
789
+
790
+ /**
791
+ * Post summary comment on GitHub PR
792
+ * @param {Object} options
793
+ * @param {string} options.jsonFile - Path to JSON result file
794
+ * @param {string} [options.githubToken] - GitHub token, falls back to GITHUB_TOKEN env
795
+ * @param {string} options.repo - GitHub repo in format owner/repo
796
+ * @param {number} options.prNumber - Pull request number
797
+ * @returns {Promise<Object>} Result object
798
+ */
799
+ async function postSummaryComment({
800
+ jsonFile,
801
+ githubToken,
802
+ repo,
803
+ prNumber
804
+ }) {
805
+ const startTime = Date.now();
806
+
807
+ try {
808
+ // Step 1: Validate input
809
+ logger.info('Starting GitHub summary comment process', { jsonFile, repo, prNumber });
810
+ validateInput({ jsonFile, githubToken, repo, prNumber });
811
+
812
+ // Step 2: Read and parse JSON file
813
+ logger.info('Reading result file', { jsonFile });
814
+ const raw = readJsonFile(jsonFile);
815
+
816
+ // Step 3: Parse violations
817
+ logger.info('Parsing violations for summary');
818
+ const violations = parseViolations(raw);
819
+
820
+ // Step 4: Initialize Octokit
821
+ const token = githubToken || process.env.GITHUB_TOKEN;
822
+ const [owner, repoName] = repo.split('/');
823
+
824
+ if (!Octokit) {
825
+ logger.debug('Loading @octokit/rest');
826
+ Octokit = (await import('@octokit/rest')).Octokit;
827
+ }
828
+
829
+ const octokit = new Octokit({ auth: token });
830
+
831
+ // Step 5: Calculate statistics
832
+ const totalViolations = violations.length;
833
+ const errorCount = violations.filter(v => v.severity === 'error').length;
834
+ const warningCount = violations.filter(v => v.severity === 'warning').length;
835
+
836
+ // Group by file
837
+ const fileGroups = {};
838
+ for (const v of violations) {
839
+ if (!fileGroups[v.file]) {
840
+ fileGroups[v.file] = [];
841
+ }
842
+ fileGroups[v.file].push(v);
843
+ }
844
+
845
+ const filesWithIssues = Object.keys(fileGroups).length;
846
+ const totalFiles = raw.length || filesWithIssues;
847
+
848
+ // Step 6: Generate summary markdown
849
+ const emoji = errorCount > 0 ? '❌' : warningCount > 0 ? '⚠️' : '✅';
850
+ const status = errorCount > 0 ? 'Failed' : warningCount > 0 ? 'Passed with warnings' : 'Passed';
851
+
852
+ let summary = `## ${emoji} SunLint Report\n\n`;
853
+ summary += `**Status:** ${status}\n\n`;
854
+ summary += `📊 **Summary:**\n`;
855
+ summary += `- Total files analyzed: ${totalFiles}\n`;
856
+ summary += `- Files with issues: ${filesWithIssues}\n`;
857
+ summary += `- Total violations: ${totalViolations}\n`;
858
+ summary += `- Errors: ${errorCount}\n`;
859
+ summary += `- Warnings: ${warningCount}\n\n`;
860
+
861
+ if (totalViolations === 0) {
862
+ summary += '✅ Great job! No coding standard violations found.\n';
863
+ } else {
864
+ summary += '### Top Issues:\n\n';
865
+
866
+ // List top 10 files with most issues
867
+ const sortedFiles = Object.entries(fileGroups)
868
+ .sort((a, b) => b[1].length - a[1].length)
869
+ .slice(0, 10);
870
+
871
+ for (const [file, fileViolations] of sortedFiles) {
872
+ const fileErrors = fileViolations.filter(v => v.severity === 'error').length;
873
+ const fileWarnings = fileViolations.filter(v => v.severity === 'warning').length;
874
+ summary += `- \`${file}\`: ${fileErrors} error(s), ${fileWarnings} warning(s)\n`;
875
+ }
876
+
877
+ if (Object.keys(fileGroups).length > 10) {
878
+ summary += `\n_... and ${Object.keys(fileGroups).length - 10} more file(s)_\n`;
879
+ }
880
+
881
+ summary += '\n⚠️ Please check the inline comments on your code for details.\n';
882
+ }
883
+
884
+ summary += '\n---\n';
885
+ summary += '<sub>Generated by [SunLint](https://github.com/sun-asterisk/engineer-excellence) • ';
886
+
887
+ // Add link to full report if available
888
+ if (process.env.GITHUB_RUN_ID) {
889
+ const runUrl = `https://github.com/${repo}/actions/runs/${process.env.GITHUB_RUN_ID}`;
890
+ summary += `[View full report](${runUrl})`;
891
+ }
892
+
893
+ summary += '</sub>\n';
894
+
895
+ // Step 7: Try to find existing SunLint comment
896
+ logger.info('Checking for existing summary comment');
897
+ let existingComment = null;
898
+
899
+ try {
900
+ const { data: comments } = await octokit.issues.listComments({
901
+ owner,
902
+ repo: repoName,
903
+ issue_number: prNumber,
904
+ per_page: 100
905
+ });
906
+
907
+ existingComment = comments.find(comment =>
908
+ comment.user.type === 'Bot' &&
909
+ comment.body.includes('SunLint Report')
910
+ );
911
+ } catch (error) {
912
+ logger.warn('Failed to fetch existing comments', { error: error.message });
913
+ }
914
+
915
+ // Step 8: Post or update comment
916
+ let commentResult;
917
+
918
+ try {
919
+ if (existingComment) {
920
+ logger.info('Updating existing summary comment', { commentId: existingComment.id });
921
+ commentResult = await withRetry(async () => {
922
+ return await octokit.issues.updateComment({
923
+ owner,
924
+ repo: repoName,
925
+ comment_id: existingComment.id,
926
+ body: summary
927
+ });
928
+ });
929
+ logger.info('Summary comment updated successfully');
930
+ } else {
931
+ logger.info('Creating new summary comment');
932
+ commentResult = await withRetry(async () => {
933
+ return await octokit.issues.createComment({
934
+ owner,
935
+ repo: repoName,
936
+ issue_number: prNumber,
937
+ body: summary
938
+ });
939
+ });
940
+ logger.info('Summary comment created successfully');
941
+ }
942
+ } catch (error) {
943
+ throw new GitHubAPIError(
944
+ `Failed to post summary comment: ${error.message}`,
945
+ error.status,
946
+ error
947
+ );
948
+ }
949
+
950
+ const duration = Date.now() - startTime;
951
+ logger.info('Summary comment completed', {
952
+ action: existingComment ? 'updated' : 'created',
953
+ duration: `${duration}ms`
954
+ });
955
+
956
+ return {
957
+ success: true,
958
+ action: existingComment ? 'updated' : 'created',
959
+ commentId: commentResult.data.id,
960
+ commentUrl: commentResult.data.html_url,
961
+ stats: {
962
+ totalViolations,
963
+ errorCount,
964
+ warningCount,
965
+ filesWithIssues,
966
+ duration
967
+ }
968
+ };
969
+
970
+ } catch (error) {
971
+ logger.error('Summary comment failed', error);
972
+
973
+ if (error instanceof ValidationError || error instanceof GitHubAPIError) {
974
+ throw error;
975
+ }
976
+
977
+ throw new Error(`GitHub summary comment failed: ${error.message}`);
978
+ }
979
+ }
980
+
981
+ module.exports = {
982
+ annotate,
983
+ postSummaryComment,
984
+ ValidationError,
985
+ GitHubAPIError
986
+ };