qualink 0.4.0 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -101,18 +101,52 @@ For pipelines with distinct stages, call qualink once per stage with `--stage-na
101
101
 
102
102
  ## CLI usage
103
103
 
104
+ ### Single collector
105
+
104
106
  ```bash
105
107
  qualink collect <collector> --input <path> --sink elastic [flags]
106
108
  ```
107
109
 
108
- Examples:
110
+ ```bash
111
+ qualink collect eslint --input eslint-report.json --sink elastic --repo frontend-mono --category frontend --tags frontend,web
112
+ qualink collect sarif --input analyzers.sarif --sink elastic --repo backend-api --category backend --tags backend,api
113
+ qualink collect coverage-dotnet --input coverage.cobertura.xml --sink elastic --repo backend-api
114
+ ```
115
+
116
+ ### Multi-collect
117
+
118
+ Auto-discover report files in a directory tree:
119
+
120
+ ```bash
121
+ qualink collect --dir=./output --repo myapp --sink elastic
122
+ ```
123
+
124
+ Or use a config file for explicit control:
109
125
 
110
126
  ```bash
111
- qualink collect eslint --input eslint-report.json --sink elastic --repo frontend-mono --category frontend --tags frontend,web --branch main --commit-sha abc123 --pipeline-run-id 987
112
- qualink collect sarif --input analyzers.sarif --sink elastic --repo backend-api --category backend --tags backend,api --branch main --commit-sha def456 --pipeline-run-id 654
113
- qualink collect coverage-dotnet --input coverage.cobertura.xml --sink elastic --repo backend-api --category backend --tags backend,api --branch main --commit-sha def456 --pipeline-run-id 654
127
+ qualink collect --config=qualink.json --repo myapp --sink elastic
128
+ ```
114
129
 
115
- # Pipeline tracking (top-level command, not under collect)
130
+ Config file example (`qualink.json`):
131
+
132
+ ```json
133
+ [
134
+ { "type": "eslint", "input": "packages/*/eslint-report.json" },
135
+ { "type": "coverage-js", "input": "packages/*/coverage-summary.json" },
136
+ { "type": "sarif", "input": "**/*.sarif" }
137
+ ]
138
+ ```
139
+
140
+ Each entry supports optional overrides: `tags`, `category`, `project`, `solution`, `url`.
141
+ See [qualink-config.schema.json](qualink-config.schema.json) for the full schema.
142
+
143
+ Auto-discovery recognizes: `eslint-report.json`, `biome-report.json`, `coverage-summary.json`, `coverage.cobertura.xml`, `*.sarif`/`*.sarif.json`, `lhr-*.json` inside `.lighthouseci/`, `junit.xml`, and `TEST-*.xml`.
144
+
145
+ ### Pipeline tracking
146
+
147
+ Top-level command, not under `collect`:
148
+
149
+ ```bash
116
150
  qualink pipeline --status succeeded --sink elastic
117
151
  qualink pipeline --status succeeded --duration 125000 --pipeline-name "Build and Deploy"
118
152
  qualink pipeline --status succeeded --stage-name build --duration 45000
@@ -127,6 +161,7 @@ Collectors:
127
161
  - `coverage-js` (Istanbul/Vitest JSON)
128
162
  - `sarif` (Roslyn or generic SARIF JSON)
129
163
  - `coverage-dotnet` (Cobertura/OpenCover XML)
164
+ - `junit` (JUnit XML)
130
165
 
131
166
  ESLint file-level options (optional):
132
167
 
@@ -138,12 +173,19 @@ Classification metadata (optional):
138
173
  - `--category` for a single broad bucket
139
174
  - `--tags` for flexible multi-label filtering (`comma,separated`)
140
175
 
176
+ Project hierarchy (auto-detected or explicit):
177
+
178
+ - `--solution` groups related projects (auto-detected from `.sln` or workspace root `package.json`)
179
+ - `--project` identifies the individual project (auto-detected from nearest `.csproj` or `package.json`)
180
+
141
181
  Metadata auto-detection:
142
182
 
143
183
  - `repo`: from flag/env, then git origin, then current folder name
144
184
  - `branch`: from flag/env, then git branch
145
185
  - `commit_sha`: from flag/env, then git commit
146
186
  - `pipeline_run_id`: from flag/env, fallback `local-<timestamp>`
187
+ - `project`: from flag/env, then nearest `.csproj`/`package.json`
188
+ - `solution`: from flag/env, then nearest `.sln`/workspace root `package.json`
147
189
 
148
190
  If needed, you can still pass explicit values with `--repo`, `--branch`, `--commit-sha`, and `--pipeline-run-id`.
149
191
 
@@ -160,8 +202,8 @@ Dry run mode:
160
202
  Useful env fallbacks:
161
203
 
162
204
  - `QUALINK_REPO`, `QUALINK_CATEGORY`, `QUALINK_TAGS`, `QUALINK_BRANCH`, `QUALINK_COMMIT_SHA`, `QUALINK_PIPELINE_RUN_ID`
163
- - `QUALINK_PACKAGE` (monorepo package name, auto-detected from `PNPM_PACKAGE_NAME`)
164
- - `QUALINK_PROJECT` (backend project identity)
205
+ - `QUALINK_PROJECT` (auto-detected from nearest `.csproj`/`package.json` or `PNPM_PACKAGE_NAME`)
206
+ - `QUALINK_SOLUTION` (auto-detected from `.sln` or workspace root `package.json`)
165
207
  - `QUALINK_PIPELINE_PROVIDER` (auto-detected, fallback: `local`)
166
208
  - `QUALINK_ENVIRONMENT` (default: `ci`)
167
209
  - `QUALINK_SINK` (default: `elastic`)
@@ -2,6 +2,7 @@ export { biomeCommand } from "./biome.js";
2
2
  export { coverageDotnetCommand } from "./coverage-dotnet.js";
3
3
  export { coverageJsCommand } from "./coverage-js.js";
4
4
  export { eslintCommand } from "./eslint.js";
5
+ export { junitCommand } from "./junit.js";
5
6
  export { lighthouseCommand } from "./lighthouse.js";
6
7
  export { metaCommand } from "./meta.js";
7
8
  export { pipelineCommand } from "./pipeline.js";
@@ -2,6 +2,7 @@ export { biomeCommand } from "./biome.js";
2
2
  export { coverageDotnetCommand } from "./coverage-dotnet.js";
3
3
  export { coverageJsCommand } from "./coverage-js.js";
4
4
  export { eslintCommand } from "./eslint.js";
5
+ export { junitCommand } from "./junit.js";
5
6
  export { lighthouseCommand } from "./lighthouse.js";
6
7
  export { metaCommand } from "./meta.js";
7
8
  export { pipelineCommand } from "./pipeline.js";
@@ -0,0 +1,76 @@
1
+ export declare const junitCommand: import("citty").CommandDef<{
2
+ readonly input: {
3
+ readonly type: "string";
4
+ readonly required: true;
5
+ };
6
+ readonly sink: {
7
+ readonly type: "string";
8
+ readonly default: "elastic";
9
+ };
10
+ readonly repo: {
11
+ readonly type: "string";
12
+ };
13
+ readonly category: {
14
+ readonly type: "string";
15
+ };
16
+ readonly tags: {
17
+ readonly type: "string";
18
+ };
19
+ readonly branch: {
20
+ readonly type: "string";
21
+ };
22
+ readonly "commit-sha": {
23
+ readonly type: "string";
24
+ };
25
+ readonly "pipeline-run-id": {
26
+ readonly type: "string";
27
+ };
28
+ readonly "pipeline-provider": {
29
+ readonly type: "string";
30
+ };
31
+ readonly environment: {
32
+ readonly type: "string";
33
+ readonly default: "ci";
34
+ };
35
+ readonly solution: {
36
+ readonly type: "string";
37
+ };
38
+ readonly project: {
39
+ readonly type: "string";
40
+ };
41
+ readonly "collector-version": {
42
+ readonly type: "string";
43
+ };
44
+ readonly "elastic-url": {
45
+ readonly type: "string";
46
+ };
47
+ readonly "elastic-api-key": {
48
+ readonly type: "string";
49
+ };
50
+ readonly "loki-url": {
51
+ readonly type: "string";
52
+ };
53
+ readonly "loki-username": {
54
+ readonly type: "string";
55
+ };
56
+ readonly "loki-password": {
57
+ readonly type: "string";
58
+ };
59
+ readonly "loki-tenant-id": {
60
+ readonly type: "string";
61
+ };
62
+ readonly "retry-max": {
63
+ readonly type: "string";
64
+ };
65
+ readonly "retry-backoff-ms": {
66
+ readonly type: "string";
67
+ };
68
+ readonly "allow-empty": {
69
+ readonly type: "boolean";
70
+ readonly default: false;
71
+ };
72
+ readonly "dry-run": {
73
+ readonly type: "boolean";
74
+ readonly default: false;
75
+ };
76
+ }>;
@@ -0,0 +1,12 @@
1
+ import { collectJunit } from "../../collectors/junit.js";
2
+ import { createCollectorCommand } from "../command-factory.js";
3
+ import { loadTextInput } from "../load-input.js";
4
+ export const junitCommand = createCollectorCommand({
5
+ name: "junit",
6
+ description: "Collect JUnit XML test results and relay them",
7
+ async collect(args, metadata) {
8
+ const input = await loadTextInput(args);
9
+ const documents = collectJunit(input, metadata);
10
+ return { metricType: "junit", documents };
11
+ },
12
+ });
package/dist/cli/index.js CHANGED
@@ -1,6 +1,6 @@
1
1
  import { defineCommand, runMain } from "citty";
2
2
  import { CliError } from "./cli-error.js";
3
- import { biomeCommand, coverageDotnetCommand, coverageJsCommand, eslintCommand, lighthouseCommand, metaCommand, pipelineCommand, sarifCommand, } from "./commands/index.js";
3
+ import { biomeCommand, coverageDotnetCommand, coverageJsCommand, eslintCommand, junitCommand, lighthouseCommand, metaCommand, pipelineCommand, sarifCommand, } from "./commands/index.js";
4
4
  import { commonArgs, isDryRun } from "./common-args.js";
5
5
  import { parseConfig, resolveConfig } from "./multi-collect/config.js";
6
6
  import { discoverFiles } from "./multi-collect/discover.js";
@@ -33,6 +33,7 @@ const collectCommand = defineCommand({
33
33
  "coverage-js": coverageJsCommand,
34
34
  sarif: sarifCommand,
35
35
  "coverage-dotnet": coverageDotnetCommand,
36
+ junit: junitCommand,
36
37
  },
37
38
  async run({ args }) {
38
39
  const parsedArgs = args;
@@ -1,5 +1,5 @@
1
1
  import type { MetricType } from "../../types.js";
2
- export type CollectorKey = Extract<MetricType, "eslint" | "biome" | "coverage-js" | "coverage-dotnet" | "sarif" | "lighthouse">;
2
+ export type CollectorKey = Extract<MetricType, "eslint" | "biome" | "coverage-js" | "coverage-dotnet" | "sarif" | "lighthouse" | "junit">;
3
3
  export declare const COLLECTOR_KEYS: readonly CollectorKey[];
4
4
  export interface FilePattern {
5
5
  /** Match against basename only */
@@ -5,14 +5,19 @@ export const COLLECTOR_KEYS = [
5
5
  "coverage-dotnet",
6
6
  "sarif",
7
7
  "lighthouse",
8
+ "junit",
8
9
  ];
9
10
  export const COLLECTOR_PATTERNS = {
10
11
  eslint: [{ basename: "eslint-report.json" }],
11
12
  biome: [{ basename: "biome-report.json" }],
12
13
  "coverage-js": [{ basename: "coverage-summary.json" }],
13
- "coverage-dotnet": [{ basename: "coverage.cobertura.xml" }],
14
+ "coverage-dotnet": [
15
+ { basename: "coverage.cobertura.xml" },
16
+ { basename: "cobertura-coverage.xml" },
17
+ ],
14
18
  sarif: [{ extensions: [".sarif", ".sarif.json"] }],
15
19
  lighthouse: [{ prefix: "lhr-", extensions: [".json"], parentDir: ".lighthouseci" }],
20
+ junit: [{ basename: "junit.xml" }, { prefix: "TEST-", extensions: [".xml"] }],
16
21
  };
17
22
  export const IGNORED_DIRS = new Set(["node_modules", ".git"]);
18
23
  export function isCollectorKey(value) {
@@ -2,6 +2,7 @@ import { collectBiome } from "../../collectors/biome.js";
2
2
  import { collectCoverageDotnet } from "../../collectors/coverage-dotnet.js";
3
3
  import { collectCoverageJs } from "../../collectors/coverage-js.js";
4
4
  import { collectEslint } from "../../collectors/eslint.js";
5
+ import { collectJunit } from "../../collectors/junit.js";
5
6
  import { collectLighthouse } from "../../collectors/lighthouse.js";
6
7
  import { collectSarif } from "../../collectors/sarif.js";
7
8
  import { readJsonFile, readTextFile } from "../../utils/file.js";
@@ -71,5 +72,10 @@ export async function runCollector(key, filePath, metadata, urlOverride) {
71
72
  const documents = collectLighthouse(input, metadata, url);
72
73
  return { metricType: "lighthouse", documents };
73
74
  }
75
+ case "junit": {
76
+ const input = await readTextFile(filePath);
77
+ const documents = collectJunit(input, metadata);
78
+ return { metricType: "junit", documents };
79
+ }
74
80
  }
75
81
  }
@@ -0,0 +1,2 @@
1
+ import type { CommonMetadata, JunitMetricDocument } from "../types.js";
2
+ export declare function collectJunit(xmlInput: string, metadata: CommonMetadata): JunitMetricDocument[];
@@ -0,0 +1,106 @@
1
+ import { XMLParser } from "fast-xml-parser";
2
+ import { baseDocument } from "../normalize.js";
3
+ import { isRecord } from "../utils/guards.js";
4
+ function readNumber(record, key) {
5
+ const value = record[key];
6
+ if (typeof value === "number") {
7
+ return value;
8
+ }
9
+ if (typeof value === "string") {
10
+ const parsed = Number(value);
11
+ if (!Number.isNaN(parsed)) {
12
+ return parsed;
13
+ }
14
+ }
15
+ return 0;
16
+ }
17
+ function parseSuite(suite) {
18
+ const tests = readNumber(suite, "@_tests");
19
+ const failures = readNumber(suite, "@_failures");
20
+ const errors = readNumber(suite, "@_errors");
21
+ const skipped = readNumber(suite, "@_skipped");
22
+ const rawTime = suite["@_time"];
23
+ const time = typeof rawTime === "number"
24
+ ? rawTime
25
+ : typeof rawTime === "string" && rawTime.length > 0
26
+ ? Number(rawTime)
27
+ : null;
28
+ return {
29
+ tests,
30
+ failures,
31
+ errors,
32
+ skipped,
33
+ time: time !== null && !Number.isNaN(time) ? time : null,
34
+ };
35
+ }
36
+ function toArray(value) {
37
+ if (Array.isArray(value))
38
+ return value;
39
+ if (value !== undefined && value !== null)
40
+ return [value];
41
+ return [];
42
+ }
43
+ export function collectJunit(xmlInput, metadata) {
44
+ const parser = new XMLParser({
45
+ ignoreAttributes: false,
46
+ attributeNamePrefix: "@_",
47
+ parseAttributeValue: true,
48
+ });
49
+ const parsed = parser.parse(xmlInput);
50
+ if (!isRecord(parsed)) {
51
+ throw new Error("JUnit XML could not be parsed");
52
+ }
53
+ let suites;
54
+ if (parsed.testsuites !== undefined) {
55
+ if (isRecord(parsed.testsuites)) {
56
+ const raw = toArray(parsed.testsuites.testsuite);
57
+ suites = raw.filter(isRecord);
58
+ }
59
+ else {
60
+ // Empty <testsuites/> — parsed as empty string
61
+ suites = [];
62
+ }
63
+ }
64
+ else if (isRecord(parsed.testsuite)) {
65
+ suites = [parsed.testsuite];
66
+ }
67
+ else {
68
+ throw new Error("Unrecognized JUnit XML format. Expected <testsuites> or <testsuite> root");
69
+ }
70
+ let totalTests = 0;
71
+ let totalFailures = 0;
72
+ let totalErrors = 0;
73
+ let totalSkipped = 0;
74
+ let totalTime = 0;
75
+ for (const suite of suites) {
76
+ const result = parseSuite(suite);
77
+ totalTests += result.tests;
78
+ totalFailures += result.failures;
79
+ totalErrors += result.errors;
80
+ totalSkipped += result.skipped;
81
+ if (result.time !== null && totalTime !== null) {
82
+ totalTime += result.time;
83
+ }
84
+ else {
85
+ totalTime = null;
86
+ }
87
+ }
88
+ const passed = Math.max(0, totalTests - totalFailures - totalErrors - totalSkipped);
89
+ const durationMs = totalTime !== null ? Math.round(totalTime * 1000) : null;
90
+ const doc = {
91
+ ...baseDocument({
92
+ metricType: "junit",
93
+ tool: "junit",
94
+ languages: [],
95
+ metadata,
96
+ }),
97
+ tests: totalTests,
98
+ passed,
99
+ failures: totalFailures,
100
+ errors: totalErrors,
101
+ skipped: totalSkipped,
102
+ duration_ms: durationMs,
103
+ suites: suites.length,
104
+ };
105
+ return [doc];
106
+ }
@@ -6,6 +6,7 @@ const INDEX_BY_TYPE = {
6
6
  "coverage-js": "codequality-coverage-js",
7
7
  sarif: "codequality-sarif",
8
8
  "coverage-dotnet": "codequality-coverage-dotnet",
9
+ junit: "codequality-junit",
9
10
  meta: "codequality-meta",
10
11
  pipeline: "codequality-pipeline",
11
12
  };
package/dist/types.d.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  export type Language = "js" | "ts" | "csharp" | (string & {});
2
2
  export type Environment = "dev" | "test" | "prod" | "ci";
3
- export type MetricType = "biome" | "eslint" | "lighthouse" | "coverage-js" | "sarif" | "coverage-dotnet" | "meta" | "pipeline";
3
+ export type MetricType = "biome" | "eslint" | "lighthouse" | "coverage-js" | "sarif" | "coverage-dotnet" | "junit" | "meta" | "pipeline";
4
4
  export interface BaseMetricDocument {
5
5
  "@timestamp": string;
6
6
  metric_type: MetricType;
@@ -106,6 +106,16 @@ export interface DotnetCoverageMetricDocument extends CoverageMetricDocument {
106
106
  metric_type: "coverage-dotnet";
107
107
  coverage_format: "cobertura" | "opencover" | (string & {});
108
108
  }
109
+ export interface JunitMetricDocument extends BaseMetricDocument {
110
+ metric_type: "junit";
111
+ tests: number;
112
+ passed: number;
113
+ failures: number;
114
+ errors: number;
115
+ skipped: number;
116
+ duration_ms: number | null;
117
+ suites: number;
118
+ }
109
119
  export interface MetaMetricDocument extends BaseMetricDocument {
110
120
  metric_type: "meta";
111
121
  }
@@ -118,7 +128,7 @@ export interface PipelineMetricDocument extends BaseMetricDocument {
118
128
  start_time: string | null;
119
129
  stage_name: string | null;
120
130
  }
121
- export type NormalizedDocument = BiomeMetricDocument | EslintMetricDocument | LighthouseMetricDocument | CoverageJsMetricDocument | SarifMetricDocument | DotnetCoverageMetricDocument | MetaMetricDocument | PipelineMetricDocument;
131
+ export type NormalizedDocument = BiomeMetricDocument | EslintMetricDocument | LighthouseMetricDocument | CoverageJsMetricDocument | SarifMetricDocument | DotnetCoverageMetricDocument | JunitMetricDocument | MetaMetricDocument | PipelineMetricDocument;
122
132
  export interface CommonMetadata {
123
133
  repo: string;
124
134
  category: string | null;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "qualink",
3
- "version": "0.4.0",
3
+ "version": "0.5.0",
4
4
  "description": "Collect, normalize, and relay code quality metrics from CI",
5
5
  "license": "MIT",
6
6
  "type": "module",