@salesforce-ux/slds-linter 0.2.0 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/commands/emit.d.ts +2 -0
- package/build/commands/emit.js +48 -0
- package/build/commands/lint.d.ts +2 -0
- package/build/commands/lint.js +55 -0
- package/build/commands/report.d.ts +2 -0
- package/build/commands/report.js +66 -0
- package/build/executor/__tests__/executor.test.js +216 -0
- package/build/executor/index.d.ts +20 -0
- package/build/executor/index.js +105 -0
- package/build/index.d.ts +2 -0
- package/build/index.js +33 -0
- package/build/services/__tests__/file-scanner.test.js +47 -0
- package/build/services/artifact-processor.d.ts +6 -0
- package/build/services/artifact-processor.js +37 -0
- package/build/services/batch-processor.d.ts +29 -0
- package/build/services/batch-processor.js +84 -0
- package/build/services/config.resolver.d.ts +6 -0
- package/build/services/config.resolver.js +20 -0
- package/build/services/file-patterns.d.ts +3 -0
- package/build/services/file-patterns.js +21 -0
- package/build/services/file-scanner.d.ts +26 -0
- package/build/services/file-scanner.js +71 -0
- package/build/services/lint-runner.d.ts +17 -0
- package/build/services/lint-runner.js +69 -0
- package/build/services/report-generator.d.ts +43 -0
- package/build/services/report-generator.js +186 -0
- package/build/types/index.d.ts +75 -0
- package/build/types/index.js +0 -0
- package/build/utils/config-utils.d.ts +33 -0
- package/build/utils/config-utils.js +68 -0
- package/build/utils/editorLinkUtil.d.ts +21 -0
- package/build/utils/editorLinkUtil.js +21 -0
- package/build/utils/index.d.ts +2 -0
- package/build/utils/index.js +7 -0
- package/build/utils/lintResultsUtil.d.ts +21 -0
- package/build/utils/lintResultsUtil.js +70 -0
- package/build/utils/logger.d.ts +8 -0
- package/build/utils/logger.js +28 -0
- package/build/utils/nodeVersionUtil.d.ts +19 -0
- package/build/utils/nodeVersionUtil.js +42 -0
- package/build/workers/base.worker.d.ts +15 -0
- package/build/workers/base.worker.js +44 -0
- package/build/workers/eslint.worker.d.ts +1 -0
- package/build/workers/eslint.worker.js +50 -0
- package/build/workers/stylelint.worker.d.ts +1 -0
- package/build/workers/stylelint.worker.js +40 -0
- package/package.json +4 -4
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
export interface BatchProcessorOptions {
|
|
2
|
+
maxWorkers?: number;
|
|
3
|
+
timeoutMs?: number;
|
|
4
|
+
}
|
|
5
|
+
export interface BatchTask<T> {
|
|
6
|
+
files: string[];
|
|
7
|
+
config: T;
|
|
8
|
+
}
|
|
9
|
+
export interface BatchResult {
|
|
10
|
+
success: boolean;
|
|
11
|
+
error?: string;
|
|
12
|
+
results: any[];
|
|
13
|
+
}
|
|
14
|
+
export declare class BatchProcessor {
|
|
15
|
+
private static DEFAULT_MAX_WORKERS;
|
|
16
|
+
private static DEFAULT_TIMEOUT_MS;
|
|
17
|
+
/**
|
|
18
|
+
* Process batches of files in parallel using worker threads
|
|
19
|
+
* @param batches Array of file batches to process
|
|
20
|
+
* @param workerScript Path to the worker script
|
|
21
|
+
* @param taskConfig Configuration to pass to each worker
|
|
22
|
+
* @param options Processing options
|
|
23
|
+
*/
|
|
24
|
+
static processBatches<T>(batches: string[][], workerScript: string, taskConfig: T, options?: BatchProcessorOptions): Promise<BatchResult[]>;
|
|
25
|
+
/**
|
|
26
|
+
* Creates a new worker with timeout handling
|
|
27
|
+
*/
|
|
28
|
+
private static createWorker;
|
|
29
|
+
}
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
// src/services/batch-processor.ts
|
|
2
|
+
import { Worker } from "worker_threads";
|
|
3
|
+
import os from "os";
|
|
4
|
+
import { Logger } from "../utils/logger.js";
|
|
5
|
+
var AVAILABLE_CPUS = os.cpus().length - 1;
|
|
6
|
+
var BatchProcessor = class {
|
|
7
|
+
static DEFAULT_MAX_WORKERS = Math.max(1, Math.min(4, AVAILABLE_CPUS));
|
|
8
|
+
static DEFAULT_TIMEOUT_MS = 3e5;
|
|
9
|
+
// 5 minutes
|
|
10
|
+
/**
|
|
11
|
+
* Process batches of files in parallel using worker threads
|
|
12
|
+
* @param batches Array of file batches to process
|
|
13
|
+
* @param workerScript Path to the worker script
|
|
14
|
+
* @param taskConfig Configuration to pass to each worker
|
|
15
|
+
* @param options Processing options
|
|
16
|
+
*/
|
|
17
|
+
static async processBatches(batches, workerScript, taskConfig, options = {}) {
|
|
18
|
+
const maxWorkers = options.maxWorkers || this.DEFAULT_MAX_WORKERS;
|
|
19
|
+
const timeoutMs = options.timeoutMs || this.DEFAULT_TIMEOUT_MS;
|
|
20
|
+
Logger.debug(`Starting batch processing with ${maxWorkers} workers`);
|
|
21
|
+
Logger.debug(`Processing ${batches.length} batches`);
|
|
22
|
+
const results = [];
|
|
23
|
+
const activeWorkers = /* @__PURE__ */ new Set();
|
|
24
|
+
let currentBatchIndex = 0;
|
|
25
|
+
try {
|
|
26
|
+
while (currentBatchIndex < batches.length || activeWorkers.size > 0) {
|
|
27
|
+
while (activeWorkers.size < maxWorkers && currentBatchIndex < batches.length) {
|
|
28
|
+
const batchIndex = currentBatchIndex++;
|
|
29
|
+
const batch = batches[batchIndex];
|
|
30
|
+
const worker = this.createWorker(
|
|
31
|
+
workerScript,
|
|
32
|
+
{ files: batch, config: taskConfig },
|
|
33
|
+
timeoutMs
|
|
34
|
+
);
|
|
35
|
+
activeWorkers.add(worker);
|
|
36
|
+
worker.on("message", (result) => {
|
|
37
|
+
results.push(result);
|
|
38
|
+
activeWorkers.delete(worker);
|
|
39
|
+
Logger.debug(`Completed batch ${batchIndex} of ${batches.length}`);
|
|
40
|
+
}).on("error", (error) => {
|
|
41
|
+
results.push({
|
|
42
|
+
success: false,
|
|
43
|
+
error: error.message,
|
|
44
|
+
results: []
|
|
45
|
+
});
|
|
46
|
+
activeWorkers.delete(worker);
|
|
47
|
+
Logger.error(`Worker error in batch ${batchIndex}: ${error.message}`);
|
|
48
|
+
}).on("exit", (code) => {
|
|
49
|
+
if (code !== 0) {
|
|
50
|
+
Logger.warning(`Worker exited with code ${code}`);
|
|
51
|
+
}
|
|
52
|
+
activeWorkers.delete(worker);
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
56
|
+
}
|
|
57
|
+
return results;
|
|
58
|
+
} catch (error) {
|
|
59
|
+
Logger.error(`Batch processing failed: ${error.message}`);
|
|
60
|
+
throw error;
|
|
61
|
+
} finally {
|
|
62
|
+
for (const worker of activeWorkers) {
|
|
63
|
+
worker.terminate();
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Creates a new worker with timeout handling
|
|
69
|
+
*/
|
|
70
|
+
static createWorker(scriptPath, task, timeoutMs) {
|
|
71
|
+
const worker = new Worker(scriptPath, {
|
|
72
|
+
workerData: task
|
|
73
|
+
});
|
|
74
|
+
const timeoutId = setTimeout(() => {
|
|
75
|
+
Logger.warning(`Worker timeout after ${timeoutMs}ms`);
|
|
76
|
+
worker.terminate();
|
|
77
|
+
}, timeoutMs);
|
|
78
|
+
worker.once("exit", () => clearTimeout(timeoutId));
|
|
79
|
+
return worker;
|
|
80
|
+
}
|
|
81
|
+
};
|
|
82
|
+
export {
|
|
83
|
+
BatchProcessor
|
|
84
|
+
};
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export declare const DEFAULT_ESLINT_CONFIG_PATH: string;
|
|
2
|
+
export declare const DEFAULT_STYLELINT_CONFIG_PATH: string;
|
|
3
|
+
export declare const STYLELINT_VERSION: string;
|
|
4
|
+
export declare const ESLINT_VERSION: string;
|
|
5
|
+
export declare const LINTER_CLI_VERSION: string;
|
|
6
|
+
export declare const getRuleDescription: (ruleId: string) => string;
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
// src/services/config.resolver.ts
|
|
2
|
+
import { ruleMetadata } from "@salesforce-ux/stylelint-plugin-slds";
|
|
3
|
+
import { resolvePath } from "../utils/nodeVersionUtil.js";
|
|
4
|
+
var DEFAULT_ESLINT_CONFIG_PATH = resolvePath("@salesforce-ux/eslint-plugin-slds/.eslintrc.yml", import.meta);
|
|
5
|
+
var DEFAULT_STYLELINT_CONFIG_PATH = resolvePath("@salesforce-ux/stylelint-plugin-slds/.stylelintrc.yml", import.meta);
|
|
6
|
+
var STYLELINT_VERSION = "16.14.1";
|
|
7
|
+
var ESLINT_VERSION = "8.57.1";
|
|
8
|
+
var LINTER_CLI_VERSION = "0.2.1";
|
|
9
|
+
var getRuleDescription = (ruleId) => {
|
|
10
|
+
const ruleIdWithoutNameSpace = `${ruleId}`.replace(/\@salesforce-ux\//, "");
|
|
11
|
+
return ruleMetadata(ruleIdWithoutNameSpace)?.ruleDesc || "--";
|
|
12
|
+
};
|
|
13
|
+
export {
|
|
14
|
+
DEFAULT_ESLINT_CONFIG_PATH,
|
|
15
|
+
DEFAULT_STYLELINT_CONFIG_PATH,
|
|
16
|
+
ESLINT_VERSION,
|
|
17
|
+
LINTER_CLI_VERSION,
|
|
18
|
+
STYLELINT_VERSION,
|
|
19
|
+
getRuleDescription
|
|
20
|
+
};
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
// src/services/file-patterns.ts
|
|
2
|
+
var StyleFilePatterns = {
|
|
3
|
+
extensions: ["css", "scss", "less", "sass"],
|
|
4
|
+
exclude: [
|
|
5
|
+
"**/node_modules/**",
|
|
6
|
+
"**/dist/**",
|
|
7
|
+
"**/build/**"
|
|
8
|
+
]
|
|
9
|
+
};
|
|
10
|
+
var ComponentFilePatterns = {
|
|
11
|
+
extensions: ["html", "cmp", "component", "app", "page", "interface"],
|
|
12
|
+
exclude: [
|
|
13
|
+
"**/node_modules/**",
|
|
14
|
+
"**/dist/**",
|
|
15
|
+
"**/build/**"
|
|
16
|
+
]
|
|
17
|
+
};
|
|
18
|
+
export {
|
|
19
|
+
ComponentFilePatterns,
|
|
20
|
+
StyleFilePatterns
|
|
21
|
+
};
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
export interface FilePattern {
|
|
2
|
+
extensions: string[];
|
|
3
|
+
exclude?: string[];
|
|
4
|
+
}
|
|
5
|
+
export interface ScanOptions {
|
|
6
|
+
patterns: FilePattern;
|
|
7
|
+
batchSize?: number;
|
|
8
|
+
}
|
|
9
|
+
export declare class FileScanner {
|
|
10
|
+
private static DEFAULT_BATCH_SIZE;
|
|
11
|
+
/**
|
|
12
|
+
* Scans directory for files matching the given patterns
|
|
13
|
+
* @param directory Base directory to scan
|
|
14
|
+
* @param options Scanning options including patterns and batch size
|
|
15
|
+
* @returns Array of file paths in batches
|
|
16
|
+
*/
|
|
17
|
+
static scanFiles(directory: string, options: ScanOptions): Promise<string[][]>;
|
|
18
|
+
/**
|
|
19
|
+
* Validates that files exist and are readable
|
|
20
|
+
*/
|
|
21
|
+
private static validateFiles;
|
|
22
|
+
/**
|
|
23
|
+
* Splits array of files into batches
|
|
24
|
+
*/
|
|
25
|
+
private static createBatches;
|
|
26
|
+
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
// src/services/file-scanner.ts
|
|
2
|
+
import { promises as fs } from "fs";
|
|
3
|
+
import { Logger } from "../utils/logger.js";
|
|
4
|
+
import { globby } from "globby";
|
|
5
|
+
import { extname } from "path";
|
|
6
|
+
var FileScanner = class {
|
|
7
|
+
static DEFAULT_BATCH_SIZE = 100;
|
|
8
|
+
/**
|
|
9
|
+
* Scans directory for files matching the given patterns
|
|
10
|
+
* @param directory Base directory to scan
|
|
11
|
+
* @param options Scanning options including patterns and batch size
|
|
12
|
+
* @returns Array of file paths in batches
|
|
13
|
+
*/
|
|
14
|
+
static async scanFiles(directory, options) {
|
|
15
|
+
try {
|
|
16
|
+
Logger.debug(`Scanning directory: ${directory}`);
|
|
17
|
+
const allFiles = await globby(directory, {
|
|
18
|
+
cwd: process.cwd(),
|
|
19
|
+
expandDirectories: true,
|
|
20
|
+
unique: true,
|
|
21
|
+
ignore: options.patterns.exclude,
|
|
22
|
+
onlyFiles: true,
|
|
23
|
+
dot: true,
|
|
24
|
+
// Include.dot files
|
|
25
|
+
absolute: true,
|
|
26
|
+
gitignore: true
|
|
27
|
+
}).then((matches) => matches.filter((match) => {
|
|
28
|
+
const fileExt = extname(match).substring(1);
|
|
29
|
+
return options.patterns.extensions.includes(fileExt);
|
|
30
|
+
}));
|
|
31
|
+
const validFiles = await this.validateFiles(allFiles);
|
|
32
|
+
const batchSize = options.batchSize || this.DEFAULT_BATCH_SIZE;
|
|
33
|
+
const batches = this.createBatches(validFiles, batchSize);
|
|
34
|
+
Logger.debug(
|
|
35
|
+
`Found ${validFiles.length} files, split into ${batches.length} batches`
|
|
36
|
+
);
|
|
37
|
+
return batches;
|
|
38
|
+
} catch (error) {
|
|
39
|
+
Logger.error(`Failed to scan files: ${error.message}`);
|
|
40
|
+
throw error;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Validates that files exist and are readable
|
|
45
|
+
*/
|
|
46
|
+
static async validateFiles(files) {
|
|
47
|
+
const validFiles = [];
|
|
48
|
+
for (const file of files) {
|
|
49
|
+
try {
|
|
50
|
+
await fs.access(file, fs.constants.R_OK);
|
|
51
|
+
validFiles.push(file);
|
|
52
|
+
} catch (error) {
|
|
53
|
+
Logger.warning(`Skipping inaccessible file: ${file}`);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
return validFiles;
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Splits array of files into batches
|
|
60
|
+
*/
|
|
61
|
+
static createBatches(files, batchSize) {
|
|
62
|
+
const batches = [];
|
|
63
|
+
for (let i = 0; i < files.length; i += batchSize) {
|
|
64
|
+
batches.push(files.slice(i, i + batchSize));
|
|
65
|
+
}
|
|
66
|
+
return batches;
|
|
67
|
+
}
|
|
68
|
+
};
|
|
69
|
+
export {
|
|
70
|
+
FileScanner
|
|
71
|
+
};
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { LintResult } from '../types';
|
|
2
|
+
export interface LintOptions {
|
|
3
|
+
fix?: boolean;
|
|
4
|
+
configPath?: string;
|
|
5
|
+
maxWorkers?: number;
|
|
6
|
+
timeoutMs?: number;
|
|
7
|
+
}
|
|
8
|
+
export declare class LintRunner {
|
|
9
|
+
/**
|
|
10
|
+
* Run linting on batches of files
|
|
11
|
+
*/
|
|
12
|
+
static runLinting(fileBatches: string[][], workerType: 'style' | 'component', options?: LintOptions): Promise<LintResult[]>;
|
|
13
|
+
/**
|
|
14
|
+
* Process and normalize worker results
|
|
15
|
+
*/
|
|
16
|
+
private static processResults;
|
|
17
|
+
}
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
// src/services/lint-runner.ts
|
|
2
|
+
import path from "path";
|
|
3
|
+
import { BatchProcessor } from "./batch-processor.js";
|
|
4
|
+
import { Logger } from "../utils/logger.js";
|
|
5
|
+
import { resolveDirName } from "../utils/nodeVersionUtil.js";
|
|
6
|
+
var LintRunner = class {
|
|
7
|
+
/**
|
|
8
|
+
* Run linting on batches of files
|
|
9
|
+
*/
|
|
10
|
+
static async runLinting(fileBatches, workerType, options = {}) {
|
|
11
|
+
try {
|
|
12
|
+
const workerScript = path.resolve(
|
|
13
|
+
resolveDirName(import.meta),
|
|
14
|
+
"../workers",
|
|
15
|
+
workerType === "style" ? "stylelint.worker.js" : "eslint.worker.js"
|
|
16
|
+
);
|
|
17
|
+
const workerConfig = {
|
|
18
|
+
configPath: options.configPath,
|
|
19
|
+
fix: options.fix
|
|
20
|
+
};
|
|
21
|
+
const results = await BatchProcessor.processBatches(
|
|
22
|
+
fileBatches,
|
|
23
|
+
workerScript,
|
|
24
|
+
workerConfig,
|
|
25
|
+
{
|
|
26
|
+
maxWorkers: options.maxWorkers,
|
|
27
|
+
timeoutMs: options.timeoutMs
|
|
28
|
+
}
|
|
29
|
+
);
|
|
30
|
+
return this.processResults(results);
|
|
31
|
+
} catch (error) {
|
|
32
|
+
Logger.error(`Linting failed: ${error.message}`);
|
|
33
|
+
throw error;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Process and normalize worker results
|
|
38
|
+
*/
|
|
39
|
+
static processResults(batchResults) {
|
|
40
|
+
const results = [];
|
|
41
|
+
for (const batch of batchResults) {
|
|
42
|
+
if (!batch.success || !batch.results) {
|
|
43
|
+
Logger.warning(`Batch failed: ${batch.error}`);
|
|
44
|
+
continue;
|
|
45
|
+
}
|
|
46
|
+
for (const result of batch.results) {
|
|
47
|
+
if (result.error) {
|
|
48
|
+
Logger.warning(`File processing failed: ${result.file} - ${result.error}`);
|
|
49
|
+
continue;
|
|
50
|
+
}
|
|
51
|
+
results.push({
|
|
52
|
+
filePath: result.file,
|
|
53
|
+
errors: result.errors?.map((e) => ({
|
|
54
|
+
...e,
|
|
55
|
+
severity: 2
|
|
56
|
+
})) || [],
|
|
57
|
+
warnings: result.warnings?.map((w) => ({
|
|
58
|
+
...w,
|
|
59
|
+
severity: 1
|
|
60
|
+
})) || []
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
return results;
|
|
65
|
+
}
|
|
66
|
+
};
|
|
67
|
+
export {
|
|
68
|
+
LintRunner
|
|
69
|
+
};
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import { LintResult } from '../types';
|
|
2
|
+
import { Readable } from 'stream';
|
|
3
|
+
export interface ReportOptions {
|
|
4
|
+
outputPath?: string;
|
|
5
|
+
toolName: string;
|
|
6
|
+
toolVersion: string;
|
|
7
|
+
}
|
|
8
|
+
export declare class ReportGenerator {
|
|
9
|
+
/**
|
|
10
|
+
* Generate SARIF report from lint results with file output
|
|
11
|
+
*/
|
|
12
|
+
static generateSarifReport(results: LintResult[], options: ReportOptions): Promise<void>;
|
|
13
|
+
/**
|
|
14
|
+
* Generate SARIF report as a stream without creating a file
|
|
15
|
+
*/
|
|
16
|
+
static generateSarifReportStream(results: LintResult[], options: ReportOptions): Promise<Readable>;
|
|
17
|
+
/**
|
|
18
|
+
* Build SARIF report data in memory
|
|
19
|
+
*/
|
|
20
|
+
static buildSarifReport(results: LintResult[], options: ReportOptions): Promise<any>;
|
|
21
|
+
/**
|
|
22
|
+
* Extract unique rules from results
|
|
23
|
+
*/
|
|
24
|
+
private static extractRules;
|
|
25
|
+
/**
|
|
26
|
+
* Add lint results to SARIF report
|
|
27
|
+
*/
|
|
28
|
+
private static addResultsToSarif;
|
|
29
|
+
}
|
|
30
|
+
export declare class CsvReportGenerator {
|
|
31
|
+
/**
|
|
32
|
+
* Generate CSV report and write to file
|
|
33
|
+
*/
|
|
34
|
+
static generate(results: any[]): Promise<string>;
|
|
35
|
+
/**
|
|
36
|
+
* Generate CSV string from lint results
|
|
37
|
+
*/
|
|
38
|
+
static generateCsvString(results: any[]): string;
|
|
39
|
+
/**
|
|
40
|
+
* Convert lint results to CSV-compatible data format
|
|
41
|
+
*/
|
|
42
|
+
private static convertResultsToCsvData;
|
|
43
|
+
}
|
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
// src/services/report-generator.ts
|
|
2
|
+
import path from "path";
|
|
3
|
+
import fs, { writeFile } from "fs/promises";
|
|
4
|
+
import { mkConfig, generateCsv, asString } from "export-to-csv";
|
|
5
|
+
import { SarifBuilder, SarifRunBuilder, SarifResultBuilder, SarifRuleBuilder } from "node-sarif-builder";
|
|
6
|
+
import { createWriteStream } from "fs";
|
|
7
|
+
import { JsonStreamStringify } from "json-stream-stringify";
|
|
8
|
+
import { getRuleDescription } from "./config.resolver.js";
|
|
9
|
+
import { parseText, replaceNamespaceinRules, transformedResults } from "../utils/lintResultsUtil.js";
|
|
10
|
+
import { processArtifacts } from "./artifact-processor.js";
|
|
11
|
+
var ReportGenerator = class {
|
|
12
|
+
/**
|
|
13
|
+
* Generate SARIF report from lint results with file output
|
|
14
|
+
*/
|
|
15
|
+
static async generateSarifReport(results, options) {
|
|
16
|
+
if (!options.outputPath) {
|
|
17
|
+
return;
|
|
18
|
+
}
|
|
19
|
+
const sarifReport = await this.buildSarifReport(results, options);
|
|
20
|
+
for (const run of sarifReport.runs) {
|
|
21
|
+
await processArtifacts(run.artifacts);
|
|
22
|
+
}
|
|
23
|
+
const outputDir = path.dirname(options.outputPath);
|
|
24
|
+
await fs.mkdir(outputDir, { recursive: true });
|
|
25
|
+
const writeStream = createWriteStream(options.outputPath);
|
|
26
|
+
const jsonStream = new JsonStreamStringify(sarifReport, null, 2);
|
|
27
|
+
await new Promise((resolve, reject) => {
|
|
28
|
+
jsonStream.pipe(writeStream).on("finish", resolve).on("error", reject);
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Generate SARIF report as a stream without creating a file
|
|
33
|
+
*/
|
|
34
|
+
static async generateSarifReportStream(results, options) {
|
|
35
|
+
const sarifReport = await this.buildSarifReport(results, options);
|
|
36
|
+
return new JsonStreamStringify(sarifReport, null, 2);
|
|
37
|
+
}
|
|
38
|
+
/**
|
|
39
|
+
* Build SARIF report data in memory
|
|
40
|
+
*/
|
|
41
|
+
static async buildSarifReport(results, options) {
|
|
42
|
+
const builder = new SarifBuilder();
|
|
43
|
+
const runBuilder = new SarifRunBuilder({
|
|
44
|
+
defaultSourceLanguage: "html"
|
|
45
|
+
}).initSimple({
|
|
46
|
+
toolDriverName: options.toolName,
|
|
47
|
+
toolDriverVersion: options.toolVersion,
|
|
48
|
+
url: "https://github.com/salesforce-ux/slds-linter"
|
|
49
|
+
});
|
|
50
|
+
runBuilder.run.properties = {
|
|
51
|
+
id: Number(Math.random().toString(10).substring(2, 10)),
|
|
52
|
+
version: options.toolVersion,
|
|
53
|
+
submissionDate: (/* @__PURE__ */ new Date()).toISOString().split("T")[0],
|
|
54
|
+
language: "html",
|
|
55
|
+
status: "accepted",
|
|
56
|
+
type: "source code"
|
|
57
|
+
};
|
|
58
|
+
runBuilder.run.tool.driver.organization = "Salesforce";
|
|
59
|
+
const rules = this.extractRules(results);
|
|
60
|
+
for (const rule of rules) {
|
|
61
|
+
const ruleBuilder = new SarifRuleBuilder().initSimple({
|
|
62
|
+
ruleId: replaceNamespaceinRules(rule.id),
|
|
63
|
+
shortDescriptionText: rule.shortDescription?.text
|
|
64
|
+
});
|
|
65
|
+
runBuilder.addRule(ruleBuilder);
|
|
66
|
+
}
|
|
67
|
+
for (const result of results) {
|
|
68
|
+
this.addResultsToSarif(runBuilder, result);
|
|
69
|
+
}
|
|
70
|
+
builder.addRun(runBuilder);
|
|
71
|
+
return builder.buildSarifOutput();
|
|
72
|
+
}
|
|
73
|
+
/**
|
|
74
|
+
* Extract unique rules from results
|
|
75
|
+
*/
|
|
76
|
+
static extractRules(results) {
|
|
77
|
+
const rules = /* @__PURE__ */ new Map();
|
|
78
|
+
for (const result of results) {
|
|
79
|
+
for (const error of result.errors) {
|
|
80
|
+
if (!rules.has(error.ruleId)) {
|
|
81
|
+
rules.set(error.ruleId, {
|
|
82
|
+
id: replaceNamespaceinRules(error.ruleId),
|
|
83
|
+
shortDescription: {
|
|
84
|
+
text: getRuleDescription(replaceNamespaceinRules(error.ruleId))
|
|
85
|
+
},
|
|
86
|
+
properties: {
|
|
87
|
+
category: "Style"
|
|
88
|
+
}
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
for (const warning of result.warnings) {
|
|
93
|
+
if (!rules.has(warning.ruleId)) {
|
|
94
|
+
rules.set(warning.ruleId, {
|
|
95
|
+
id: replaceNamespaceinRules(warning.ruleId),
|
|
96
|
+
shortDescription: {
|
|
97
|
+
text: getRuleDescription(replaceNamespaceinRules(warning.ruleId))
|
|
98
|
+
},
|
|
99
|
+
properties: {
|
|
100
|
+
category: "Style"
|
|
101
|
+
}
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
return Array.from(rules.values());
|
|
107
|
+
}
|
|
108
|
+
/**
|
|
109
|
+
* Add lint results to SARIF report
|
|
110
|
+
*/
|
|
111
|
+
static addResultsToSarif(runBuilder, lintResult) {
|
|
112
|
+
lintResult.errors.forEach((error) => {
|
|
113
|
+
const resultBuilder = new SarifResultBuilder().initSimple(transformedResults(lintResult, error, "error"));
|
|
114
|
+
runBuilder.addResult(resultBuilder);
|
|
115
|
+
});
|
|
116
|
+
lintResult.warnings.forEach((warning) => {
|
|
117
|
+
const resultBuilder = new SarifResultBuilder().initSimple(transformedResults(lintResult, warning, "warning"));
|
|
118
|
+
runBuilder.addResult(resultBuilder);
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
};
|
|
122
|
+
var CsvReportGenerator = class {
|
|
123
|
+
/**
|
|
124
|
+
* Generate CSV report and write to file
|
|
125
|
+
*/
|
|
126
|
+
static async generate(results) {
|
|
127
|
+
const csvString = this.generateCsvString(results);
|
|
128
|
+
const csvReportPath = path.join(process.cwd(), "slds-linter-report.csv");
|
|
129
|
+
await writeFile(csvReportPath, csvString);
|
|
130
|
+
return csvReportPath;
|
|
131
|
+
}
|
|
132
|
+
/**
|
|
133
|
+
* Generate CSV string from lint results
|
|
134
|
+
*/
|
|
135
|
+
static generateCsvString(results) {
|
|
136
|
+
const csvData = this.convertResultsToCsvData(results);
|
|
137
|
+
return asString(csvData);
|
|
138
|
+
}
|
|
139
|
+
/**
|
|
140
|
+
* Convert lint results to CSV-compatible data format
|
|
141
|
+
*/
|
|
142
|
+
static convertResultsToCsvData(results) {
|
|
143
|
+
const cwd = process.cwd();
|
|
144
|
+
const csvConfig = mkConfig({
|
|
145
|
+
fieldSeparator: ",",
|
|
146
|
+
quoteStrings: true,
|
|
147
|
+
decimalSeparator: ".",
|
|
148
|
+
useTextFile: false,
|
|
149
|
+
useBom: true,
|
|
150
|
+
useKeysAsHeaders: true
|
|
151
|
+
});
|
|
152
|
+
const transformedResults2 = results.flatMap(
|
|
153
|
+
(result) => [
|
|
154
|
+
...result.errors.map((error) => ({
|
|
155
|
+
"File Path": path.relative(cwd, result.filePath),
|
|
156
|
+
"Message": parseText(error.message),
|
|
157
|
+
"Severity": "error",
|
|
158
|
+
"Rule ID": replaceNamespaceinRules(error.ruleId || "N/A"),
|
|
159
|
+
"Start Line": error.line,
|
|
160
|
+
"Start Column": error.column,
|
|
161
|
+
"End Line": error.endLine || error.line,
|
|
162
|
+
// Default to start line if missing
|
|
163
|
+
"End Column": error.endColumn || error.column
|
|
164
|
+
// Default to start column if missing
|
|
165
|
+
})),
|
|
166
|
+
...result.warnings.map((warning) => ({
|
|
167
|
+
"File Path": path.relative(cwd, result.filePath),
|
|
168
|
+
"Message": parseText(warning.message),
|
|
169
|
+
"Severity": "warning",
|
|
170
|
+
"Rule ID": replaceNamespaceinRules(warning.ruleId || "N/A"),
|
|
171
|
+
"Start Line": warning.line,
|
|
172
|
+
"Start Column": warning.column,
|
|
173
|
+
"End Line": warning.endLine || warning.line,
|
|
174
|
+
// Default to start line if missing
|
|
175
|
+
"End Column": warning.endColumn || warning.column
|
|
176
|
+
// Default to start column if missing
|
|
177
|
+
}))
|
|
178
|
+
]
|
|
179
|
+
);
|
|
180
|
+
return generateCsv(csvConfig)(transformedResults2);
|
|
181
|
+
}
|
|
182
|
+
};
|
|
183
|
+
export {
|
|
184
|
+
CsvReportGenerator,
|
|
185
|
+
ReportGenerator
|
|
186
|
+
};
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
export interface BaseConfig {
|
|
2
|
+
directory?: string;
|
|
3
|
+
files?: string[];
|
|
4
|
+
configStylelint?: string;
|
|
5
|
+
configEslint?: string;
|
|
6
|
+
}
|
|
7
|
+
/**
|
|
8
|
+
* CLI options interface extends BaseConfig for shared properties
|
|
9
|
+
*/
|
|
10
|
+
export interface CliOptions extends BaseConfig {
|
|
11
|
+
output?: string;
|
|
12
|
+
fix?: boolean;
|
|
13
|
+
editor?: string;
|
|
14
|
+
format?: string;
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Configuration for linting operation in the Node API
|
|
18
|
+
* Extends the common base configuration
|
|
19
|
+
*/
|
|
20
|
+
export interface LintConfig extends BaseConfig {
|
|
21
|
+
fix?: boolean;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Configuration for report generation in the Node API
|
|
25
|
+
* Extends the common base configuration
|
|
26
|
+
*/
|
|
27
|
+
export interface ReportConfig extends BaseConfig {
|
|
28
|
+
format?: 'sarif' | 'csv';
|
|
29
|
+
}
|
|
30
|
+
export interface LintResultEntry {
|
|
31
|
+
line: number;
|
|
32
|
+
column: number;
|
|
33
|
+
endColumn: number;
|
|
34
|
+
message: string;
|
|
35
|
+
ruleId: string;
|
|
36
|
+
severity: number;
|
|
37
|
+
}
|
|
38
|
+
export interface LintResult {
|
|
39
|
+
filePath: string;
|
|
40
|
+
errors: Array<LintResultEntry>;
|
|
41
|
+
warnings: Array<LintResultEntry>;
|
|
42
|
+
}
|
|
43
|
+
export type ExitCode = 0 | 1 | 2;
|
|
44
|
+
export interface WorkerConfig {
|
|
45
|
+
configPath?: string;
|
|
46
|
+
fix?: boolean;
|
|
47
|
+
}
|
|
48
|
+
export interface WorkerResult {
|
|
49
|
+
file: string;
|
|
50
|
+
error?: string;
|
|
51
|
+
warnings?: Array<{
|
|
52
|
+
line: number;
|
|
53
|
+
column: number;
|
|
54
|
+
endColumn: number;
|
|
55
|
+
message: string;
|
|
56
|
+
ruleId: string;
|
|
57
|
+
}>;
|
|
58
|
+
errors?: Array<{
|
|
59
|
+
line: number;
|
|
60
|
+
column: number;
|
|
61
|
+
endColumn: number;
|
|
62
|
+
message: string;
|
|
63
|
+
ruleId: string;
|
|
64
|
+
}>;
|
|
65
|
+
}
|
|
66
|
+
export interface SarifResultEntry {
|
|
67
|
+
level: any;
|
|
68
|
+
messageText: string;
|
|
69
|
+
ruleId: string;
|
|
70
|
+
fileUri?: string;
|
|
71
|
+
startLine?: number;
|
|
72
|
+
startColumn?: number;
|
|
73
|
+
endLine?: number;
|
|
74
|
+
endColumn?: number;
|
|
75
|
+
}
|
|
File without changes
|