qualink 0.3.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/README.md +79 -0
  2. package/dist/cli/command-factory.d.ts +1 -1
  3. package/dist/cli/commands/biome.d.ts +1 -1
  4. package/dist/cli/commands/coverage-dotnet.d.ts +1 -1
  5. package/dist/cli/commands/coverage-js.d.ts +1 -1
  6. package/dist/cli/commands/eslint.d.ts +1 -1
  7. package/dist/cli/commands/index.d.ts +1 -0
  8. package/dist/cli/commands/index.js +1 -0
  9. package/dist/cli/commands/lighthouse.d.ts +1 -1
  10. package/dist/cli/commands/meta.d.ts +1 -1
  11. package/dist/cli/commands/pipeline.d.ts +76 -0
  12. package/dist/cli/commands/pipeline.js +35 -0
  13. package/dist/cli/commands/sarif.d.ts +1 -1
  14. package/dist/cli/common-args.d.ts +2 -2
  15. package/dist/cli/common-args.js +1 -1
  16. package/dist/cli/detect-ci.d.ts +2 -0
  17. package/dist/cli/detect-ci.js +47 -0
  18. package/dist/cli/detect-project.js +31 -5
  19. package/dist/cli/detect-solution.d.ts +2 -0
  20. package/dist/cli/detect-solution.js +70 -0
  21. package/dist/cli/index.js +123 -1
  22. package/dist/cli/multi-collect/config.d.ts +23 -0
  23. package/dist/cli/multi-collect/config.js +105 -0
  24. package/dist/cli/multi-collect/discover.d.ts +11 -0
  25. package/dist/cli/multi-collect/discover.js +84 -0
  26. package/dist/cli/multi-collect/patterns.d.ts +16 -0
  27. package/dist/cli/multi-collect/patterns.js +20 -0
  28. package/dist/cli/multi-collect/resolve-metadata.d.ts +17 -0
  29. package/dist/cli/multi-collect/resolve-metadata.js +147 -0
  30. package/dist/cli/multi-collect/run-collector.d.ts +11 -0
  31. package/dist/cli/multi-collect/run-collector.js +75 -0
  32. package/dist/cli/parse-metadata.d.ts +1 -0
  33. package/dist/cli/parse-metadata.js +3 -3
  34. package/dist/collectors/index.d.ts +1 -0
  35. package/dist/collectors/index.js +1 -0
  36. package/dist/collectors/pipeline.d.ts +13 -0
  37. package/dist/collectors/pipeline.js +31 -0
  38. package/dist/normalize.js +1 -1
  39. package/dist/sinks/elastic.js +1 -0
  40. package/dist/types.d.ts +13 -4
  41. package/package.json +1 -1
  42. package/dist/cli/detect-package.d.ts +0 -3
  43. package/dist/cli/detect-package.js +0 -42
package/README.md CHANGED
@@ -26,6 +26,79 @@ Repo, branch, commit SHA, pipeline run ID, and provider are auto-detected from C
26
26
 
27
27
  See the [examples/](examples/) folder for copy-paste snippets for Azure DevOps and GitHub Actions.
28
28
 
29
+ ## Pipeline Tracking
30
+
31
+ Track pipeline execution metrics — which pipelines run, when, for how long, and their outcome.
32
+ Pipelines self-report by calling `qualink pipeline --status <status>` at the end of a run.
33
+
34
+ ### Azure DevOps
35
+
36
+ ```yaml
37
+ steps:
38
+ - script: echo "##vso[task.setvariable variable=PIPELINE_START]$(date +%s%3N)"
39
+ displayName: Record start time
40
+
41
+ # ... existing build/test steps ...
42
+
43
+ - script: |
44
+ END_TIME=$(date +%s%3N)
45
+ DURATION=$(( END_TIME - $(PIPELINE_START) ))
46
+ npx qualink pipeline \
47
+ --status "$(Agent.JobStatus)" \
48
+ --duration "$DURATION" \
49
+ --sink elastic
50
+ displayName: Report pipeline metrics
51
+ condition: always()
52
+ env:
53
+ ELASTIC_URL: $(ELASTIC_URL)
54
+ ELASTIC_API_KEY: $(ELASTIC_API_KEY)
55
+ ```
56
+
57
+ Auto-detected from Azure DevOps env: pipeline name (`BUILD_DEFINITIONNAME`), trigger (`BUILD_REASON`), repo, branch, commit, run ID, provider.
58
+
59
+ ### GitHub Actions
60
+
61
+ ```yaml
62
+ jobs:
63
+ build:
64
+ runs-on: ubuntu-latest
65
+ steps:
66
+ - name: Record start time
67
+ run: echo "PIPELINE_START=$(date +%s%3N)" >> "$GITHUB_ENV"
68
+
69
+ # ... existing build/test steps ...
70
+
71
+ - name: Report pipeline metrics
72
+ if: always()
73
+ run: |
74
+ END_TIME=$(date +%s%3N)
75
+ DURATION=$(( END_TIME - PIPELINE_START ))
76
+ npx qualink pipeline \
77
+ --status "${{ job.status }}" \
78
+ --duration "$DURATION" \
79
+ --sink elastic
80
+ env:
81
+ ELASTIC_URL: ${{ secrets.ELASTIC_URL }}
82
+ ELASTIC_API_KEY: ${{ secrets.ELASTIC_API_KEY }}
83
+ ```
84
+
85
+ Auto-detected from GitHub env: pipeline name (`GITHUB_WORKFLOW`), trigger (`GITHUB_EVENT_NAME`), repo, branch, commit, run ID, provider.
86
+
87
+ ### Per-stage reporting
88
+
89
+ For pipelines with distinct stages, call qualink once per stage with `--stage-name`:
90
+
91
+ ```yaml
92
+ # Azure DevOps example
93
+ - script: |
94
+ npx qualink pipeline --status "$(Agent.JobStatus)" --stage-name build --duration "$BUILD_DURATION"
95
+ condition: always()
96
+
97
+ - script: |
98
+ npx qualink pipeline --status "$(Agent.JobStatus)" --stage-name deploy --duration "$DEPLOY_DURATION"
99
+ condition: always()
100
+ ```
101
+
29
102
  ## CLI usage
30
103
 
31
104
  ```bash
@@ -38,6 +111,12 @@ Examples:
38
111
  qualink collect eslint --input eslint-report.json --sink elastic --repo frontend-mono --category frontend --tags frontend,web --branch main --commit-sha abc123 --pipeline-run-id 987
39
112
  qualink collect sarif --input analyzers.sarif --sink elastic --repo backend-api --category backend --tags backend,api --branch main --commit-sha def456 --pipeline-run-id 654
40
113
  qualink collect coverage-dotnet --input coverage.cobertura.xml --sink elastic --repo backend-api --category backend --tags backend,api --branch main --commit-sha def456 --pipeline-run-id 654
114
+
115
+ # Pipeline tracking (top-level command, not under collect)
116
+ qualink pipeline --status succeeded --sink elastic
117
+ qualink pipeline --status succeeded --duration 125000 --pipeline-name "Build and Deploy"
118
+ qualink pipeline --status succeeded --stage-name build --duration 45000
119
+ qualink pipeline --status failed --dry-run
41
120
  ```
42
121
 
43
122
  Collectors:
@@ -44,7 +44,7 @@ export declare function createCollectorCommand<TExtra extends Record<string, unk
44
44
  readonly type: "string";
45
45
  readonly default: "ci";
46
46
  };
47
- readonly package: {
47
+ readonly solution: {
48
48
  readonly type: "string";
49
49
  };
50
50
  readonly project: {
@@ -32,7 +32,7 @@ export declare const biomeCommand: import("citty").CommandDef<{
32
32
  readonly type: "string";
33
33
  readonly default: "ci";
34
34
  };
35
- readonly package: {
35
+ readonly solution: {
36
36
  readonly type: "string";
37
37
  };
38
38
  readonly project: {
@@ -32,7 +32,7 @@ export declare const coverageDotnetCommand: import("citty").CommandDef<{
32
32
  readonly type: "string";
33
33
  readonly default: "ci";
34
34
  };
35
- readonly package: {
35
+ readonly solution: {
36
36
  readonly type: "string";
37
37
  };
38
38
  readonly project: {
@@ -32,7 +32,7 @@ export declare const coverageJsCommand: import("citty").CommandDef<{
32
32
  readonly type: "string";
33
33
  readonly default: "ci";
34
34
  };
35
- readonly package: {
35
+ readonly solution: {
36
36
  readonly type: "string";
37
37
  };
38
38
  readonly project: {
@@ -32,7 +32,7 @@ export declare const eslintCommand: import("citty").CommandDef<{
32
32
  readonly type: "string";
33
33
  readonly default: "ci";
34
34
  };
35
- readonly package: {
35
+ readonly solution: {
36
36
  readonly type: "string";
37
37
  };
38
38
  readonly project: {
@@ -4,4 +4,5 @@ export { coverageJsCommand } from "./coverage-js.js";
4
4
  export { eslintCommand } from "./eslint.js";
5
5
  export { lighthouseCommand } from "./lighthouse.js";
6
6
  export { metaCommand } from "./meta.js";
7
+ export { pipelineCommand } from "./pipeline.js";
7
8
  export { sarifCommand } from "./sarif.js";
@@ -4,4 +4,5 @@ export { coverageJsCommand } from "./coverage-js.js";
4
4
  export { eslintCommand } from "./eslint.js";
5
5
  export { lighthouseCommand } from "./lighthouse.js";
6
6
  export { metaCommand } from "./meta.js";
7
+ export { pipelineCommand } from "./pipeline.js";
7
8
  export { sarifCommand } from "./sarif.js";
@@ -32,7 +32,7 @@ export declare const lighthouseCommand: import("citty").CommandDef<{
32
32
  readonly type: "string";
33
33
  readonly default: "ci";
34
34
  };
35
- readonly package: {
35
+ readonly solution: {
36
36
  readonly type: "string";
37
37
  };
38
38
  readonly project: {
@@ -32,7 +32,7 @@ export declare const metaCommand: import("citty").CommandDef<{
32
32
  readonly type: "string";
33
33
  readonly default: "ci";
34
34
  };
35
- readonly package: {
35
+ readonly solution: {
36
36
  readonly type: "string";
37
37
  };
38
38
  readonly project: {
@@ -0,0 +1,76 @@
1
+ export declare const pipelineCommand: import("citty").CommandDef<{
2
+ readonly input: {
3
+ readonly type: "string";
4
+ readonly required: true;
5
+ };
6
+ readonly sink: {
7
+ readonly type: "string";
8
+ readonly default: "elastic";
9
+ };
10
+ readonly repo: {
11
+ readonly type: "string";
12
+ };
13
+ readonly category: {
14
+ readonly type: "string";
15
+ };
16
+ readonly tags: {
17
+ readonly type: "string";
18
+ };
19
+ readonly branch: {
20
+ readonly type: "string";
21
+ };
22
+ readonly "commit-sha": {
23
+ readonly type: "string";
24
+ };
25
+ readonly "pipeline-run-id": {
26
+ readonly type: "string";
27
+ };
28
+ readonly "pipeline-provider": {
29
+ readonly type: "string";
30
+ };
31
+ readonly environment: {
32
+ readonly type: "string";
33
+ readonly default: "ci";
34
+ };
35
+ readonly solution: {
36
+ readonly type: "string";
37
+ };
38
+ readonly project: {
39
+ readonly type: "string";
40
+ };
41
+ readonly "collector-version": {
42
+ readonly type: "string";
43
+ };
44
+ readonly "elastic-url": {
45
+ readonly type: "string";
46
+ };
47
+ readonly "elastic-api-key": {
48
+ readonly type: "string";
49
+ };
50
+ readonly "loki-url": {
51
+ readonly type: "string";
52
+ };
53
+ readonly "loki-username": {
54
+ readonly type: "string";
55
+ };
56
+ readonly "loki-password": {
57
+ readonly type: "string";
58
+ };
59
+ readonly "loki-tenant-id": {
60
+ readonly type: "string";
61
+ };
62
+ readonly "retry-max": {
63
+ readonly type: "string";
64
+ };
65
+ readonly "retry-backoff-ms": {
66
+ readonly type: "string";
67
+ };
68
+ readonly "allow-empty": {
69
+ readonly type: "boolean";
70
+ readonly default: false;
71
+ };
72
+ readonly "dry-run": {
73
+ readonly type: "boolean";
74
+ readonly default: false;
75
+ };
76
+ }>;
@@ -0,0 +1,35 @@
1
+ import { collectPipeline } from "../../collectors/pipeline.js";
2
+ import { createCollectorCommand } from "../command-factory.js";
3
+ import { detectPipelineName, detectPipelineTrigger } from "../detect-ci.js";
4
+ export const pipelineCommand = createCollectorCommand({
5
+ name: "pipeline",
6
+ description: "Report pipeline execution metrics",
7
+ extraArgs: {
8
+ input: { type: "string" },
9
+ sink: { type: "string", default: "elastic" },
10
+ status: { type: "string", required: true },
11
+ "pipeline-name": { type: "string" },
12
+ trigger: { type: "string" },
13
+ duration: { type: "string" },
14
+ "start-time": { type: "string" },
15
+ "stage-name": { type: "string" },
16
+ },
17
+ async collect(args, metadata) {
18
+ const status = args.status;
19
+ const pipelineName = (args.pipelineName ?? args["pipeline-name"]);
20
+ const trigger = args.trigger;
21
+ const rawDuration = args.duration;
22
+ const startTime = (args.startTime ?? args["start-time"]);
23
+ const stageName = (args.stageName ?? args["stage-name"]);
24
+ const durationMs = rawDuration ? Number(rawDuration) : null;
25
+ const documents = collectPipeline({
26
+ status,
27
+ pipelineName: pipelineName ?? detectPipelineName(args),
28
+ trigger: trigger ?? detectPipelineTrigger(args),
29
+ durationMs: durationMs !== null && Number.isFinite(durationMs) ? durationMs : null,
30
+ startTime: startTime ?? null,
31
+ stageName: stageName ?? null,
32
+ }, metadata);
33
+ return { metricType: "pipeline", documents };
34
+ },
35
+ });
@@ -32,7 +32,7 @@ export declare const sarifCommand: import("citty").CommandDef<{
32
32
  readonly type: "string";
33
33
  readonly default: "ci";
34
34
  };
35
- readonly package: {
35
+ readonly solution: {
36
36
  readonly type: "string";
37
37
  };
38
38
  readonly project: {
@@ -10,7 +10,7 @@ export interface CommonArgs {
10
10
  pipelineRunId?: unknown;
11
11
  pipelineProvider?: unknown;
12
12
  environment?: unknown;
13
- package?: unknown;
13
+ solution?: unknown;
14
14
  project?: unknown;
15
15
  collectorVersion?: unknown;
16
16
  elasticUrl?: unknown;
@@ -60,7 +60,7 @@ export declare const commonArgs: {
60
60
  readonly type: "string";
61
61
  readonly default: "ci";
62
62
  };
63
- readonly package: {
63
+ readonly solution: {
64
64
  readonly type: "string";
65
65
  };
66
66
  readonly project: {
@@ -32,7 +32,7 @@ export const commonArgs = {
32
32
  "pipeline-run-id": { type: "string" },
33
33
  "pipeline-provider": { type: "string" },
34
34
  environment: { type: "string", default: "ci" },
35
- package: { type: "string" },
35
+ solution: { type: "string" },
36
36
  project: { type: "string" },
37
37
  "collector-version": { type: "string" },
38
38
  "elastic-url": { type: "string" },
@@ -2,4 +2,6 @@ import { type CommonArgs } from "./common-args.js";
2
2
  export declare function detectBranch(args: CommonArgs): string;
3
3
  export declare function detectCommitSha(args: CommonArgs): string;
4
4
  export declare function detectPipelineRunId(args: CommonArgs): string;
5
+ export declare function detectPipelineName(args: CommonArgs): string;
6
+ export declare function detectPipelineTrigger(args: CommonArgs): string;
5
7
  export declare function detectPipelineProvider(args: CommonArgs): string;
@@ -23,6 +23,53 @@ export function detectPipelineRunId(args) {
23
23
  process.env.CI_PIPELINE_ID ??
24
24
  `local-${Date.now().toString()}`);
25
25
  }
26
+ export function detectPipelineName(args) {
27
+ return (envOrArg(argValue(args, "pipelineName", "pipeline-name"), "QUALINK_PIPELINE_NAME") ??
28
+ process.env.BUILD_DEFINITIONNAME ??
29
+ process.env.GITHUB_WORKFLOW ??
30
+ process.env.CI_PIPELINE_NAME ??
31
+ process.env.CI_PROJECT_NAME ??
32
+ "unknown");
33
+ }
34
+ const AZURE_TRIGGER_MAP = {
35
+ IndividualCI: "push",
36
+ BatchedCI: "push",
37
+ PullRequest: "pr",
38
+ Manual: "manual",
39
+ Schedule: "schedule",
40
+ };
41
+ const GITHUB_TRIGGER_MAP = {
42
+ push: "push",
43
+ pull_request: "pr",
44
+ workflow_dispatch: "manual",
45
+ schedule: "schedule",
46
+ };
47
+ const GITLAB_TRIGGER_MAP = {
48
+ push: "push",
49
+ merge_request_event: "pr",
50
+ web: "manual",
51
+ schedule: "schedule",
52
+ api: "api",
53
+ };
54
+ export function detectPipelineTrigger(args) {
55
+ const explicit = envOrArg(argValue(args, "trigger"), "QUALINK_PIPELINE_TRIGGER");
56
+ if (explicit) {
57
+ return explicit;
58
+ }
59
+ const azureReason = process.env.BUILD_REASON;
60
+ if (azureReason) {
61
+ return AZURE_TRIGGER_MAP[azureReason] ?? azureReason;
62
+ }
63
+ const githubEvent = process.env.GITHUB_EVENT_NAME;
64
+ if (githubEvent) {
65
+ return GITHUB_TRIGGER_MAP[githubEvent] ?? githubEvent;
66
+ }
67
+ const gitlabSource = process.env.CI_PIPELINE_SOURCE;
68
+ if (gitlabSource) {
69
+ return GITLAB_TRIGGER_MAP[gitlabSource] ?? gitlabSource;
70
+ }
71
+ return "unknown";
72
+ }
26
73
  export function detectPipelineProvider(args) {
27
74
  const explicit = envOrArg(argValue(args, "pipelineProvider", "pipeline-provider"), "QUALINK_PIPELINE_PROVIDER");
28
75
  if (explicit) {
@@ -1,7 +1,14 @@
1
- import { readdirSync } from "node:fs";
2
- import { basename } from "node:path";
1
+ import { existsSync, readdirSync, readFileSync } from "node:fs";
2
+ import { basename, resolve } from "node:path";
3
3
  import { argValue, envOrArg } from "./common-args.js";
4
- import { isInsideWorkspacePackage } from "./detect-package.js";
4
+ import { runGit } from "./git.js";
5
+ function isInsideWorkspacePackage() {
6
+ const gitRoot = runGit(["rev-parse", "--show-toplevel"]);
7
+ if (!gitRoot) {
8
+ return false;
9
+ }
10
+ return resolve(".") !== resolve(gitRoot);
11
+ }
5
12
  function readCsprojName() {
6
13
  try {
7
14
  const files = readdirSync(".");
@@ -15,14 +22,33 @@ function readCsprojName() {
15
22
  }
16
23
  return undefined;
17
24
  }
25
+ function readPackageJsonName() {
26
+ try {
27
+ const pkgPath = resolve("package.json");
28
+ if (!existsSync(pkgPath)) {
29
+ return undefined;
30
+ }
31
+ const raw = JSON.parse(readFileSync(pkgPath, "utf-8"));
32
+ if (typeof raw === "object" && raw !== null && "name" in raw && typeof raw.name === "string") {
33
+ return raw.name;
34
+ }
35
+ }
36
+ catch {
37
+ // ignore
38
+ }
39
+ return undefined;
40
+ }
18
41
  export function detectProjectName(args) {
19
42
  const explicit = envOrArg(argValue(args, "project"), "QUALINK_PROJECT");
20
43
  if (explicit) {
21
44
  return explicit;
22
45
  }
23
- // Auto-detect from .csproj when running inside a project subdirectory
46
+ const pnpmName = process.env.PNPM_PACKAGE_NAME;
47
+ if (pnpmName && pnpmName.trim().length > 0) {
48
+ return pnpmName;
49
+ }
24
50
  if (isInsideWorkspacePackage()) {
25
- return readCsprojName();
51
+ return readCsprojName() ?? readPackageJsonName();
26
52
  }
27
53
  return undefined;
28
54
  }
@@ -0,0 +1,2 @@
1
+ import { type CommonArgs } from "./common-args.js";
2
+ export declare function detectSolution(args: CommonArgs): string | undefined;
@@ -0,0 +1,70 @@
1
+ import { existsSync, readdirSync, readFileSync } from "node:fs";
2
+ import { basename, dirname, join, resolve } from "node:path";
3
+ import { argValue, envOrArg } from "./common-args.js";
4
+ import { runGit } from "./git.js";
5
+ function findGitRoot() {
6
+ return runGit(["rev-parse", "--show-toplevel"]);
7
+ }
8
+ /**
9
+ * Walk up from CWD to git root looking for a .sln file.
10
+ */
11
+ function findSlnName() {
12
+ const gitRoot = findGitRoot();
13
+ if (!gitRoot) {
14
+ return undefined;
15
+ }
16
+ let current = resolve(".");
17
+ const boundary = resolve(gitRoot);
18
+ while (current.startsWith(boundary)) {
19
+ try {
20
+ const files = readdirSync(current);
21
+ const sln = files.find((f) => f.endsWith(".sln"));
22
+ if (sln) {
23
+ return basename(sln, ".sln");
24
+ }
25
+ }
26
+ catch {
27
+ // permission error, keep walking
28
+ }
29
+ const parent = dirname(current);
30
+ if (parent === current)
31
+ break;
32
+ current = parent;
33
+ }
34
+ return undefined;
35
+ }
36
+ /**
37
+ * If CWD is inside a workspace subdirectory, read the git root's
38
+ * package.json name as the workspace root (solution).
39
+ */
40
+ function findWorkspaceRootName() {
41
+ const gitRoot = findGitRoot();
42
+ if (!gitRoot) {
43
+ return undefined;
44
+ }
45
+ // Only counts as workspace if CWD != git root
46
+ if (resolve(".") === resolve(gitRoot)) {
47
+ return undefined;
48
+ }
49
+ try {
50
+ const pkgPath = join(gitRoot, "package.json");
51
+ if (!existsSync(pkgPath)) {
52
+ return undefined;
53
+ }
54
+ const raw = JSON.parse(readFileSync(pkgPath, "utf-8"));
55
+ if (typeof raw === "object" && raw !== null && "name" in raw && typeof raw.name === "string") {
56
+ return raw.name;
57
+ }
58
+ }
59
+ catch {
60
+ // ignore
61
+ }
62
+ return undefined;
63
+ }
64
+ export function detectSolution(args) {
65
+ const explicit = envOrArg(argValue(args, "solution"), "QUALINK_SOLUTION");
66
+ if (explicit) {
67
+ return explicit;
68
+ }
69
+ return findSlnName() ?? findWorkspaceRootName();
70
+ }
package/dist/cli/index.js CHANGED
@@ -1,11 +1,31 @@
1
1
  import { defineCommand, runMain } from "citty";
2
2
  import { CliError } from "./cli-error.js";
3
- import { biomeCommand, coverageDotnetCommand, coverageJsCommand, eslintCommand, lighthouseCommand, metaCommand, sarifCommand, } from "./commands/index.js";
3
+ import { biomeCommand, coverageDotnetCommand, coverageJsCommand, eslintCommand, lighthouseCommand, metaCommand, pipelineCommand, sarifCommand, } from "./commands/index.js";
4
+ import { commonArgs, isDryRun } from "./common-args.js";
5
+ import { parseConfig, resolveConfig } from "./multi-collect/config.js";
6
+ import { discoverFiles } from "./multi-collect/discover.js";
7
+ import { mergeMetadata, resolveFileMetadata, } from "./multi-collect/resolve-metadata.js";
8
+ import { runCollector } from "./multi-collect/run-collector.js";
9
+ import { parseCommonMetadata } from "./parse-metadata.js";
10
+ import { sendToSink } from "./send-to-sink.js";
11
+ // Common args without `input` (required by single-collector subcommands but not multi-collect)
12
+ const { input: _input, ...multiCollectBaseArgs } = commonArgs;
4
13
  const collectCommand = defineCommand({
5
14
  meta: {
6
15
  name: "collect",
7
16
  description: "Collect quality metrics from a specific collector",
8
17
  },
18
+ args: {
19
+ ...multiCollectBaseArgs,
20
+ dir: {
21
+ type: "string",
22
+ description: "Auto-discover report files under a directory",
23
+ },
24
+ config: {
25
+ type: "string",
26
+ description: "Config file path or inline JSON for multi-collection",
27
+ },
28
+ },
9
29
  subCommands: {
10
30
  biome: biomeCommand,
11
31
  eslint: eslintCommand,
@@ -14,7 +34,108 @@ const collectCommand = defineCommand({
14
34
  sarif: sarifCommand,
15
35
  "coverage-dotnet": coverageDotnetCommand,
16
36
  },
37
+ async run({ args }) {
38
+ const parsedArgs = args;
39
+ const dirValue = args.dir;
40
+ const configValue = args.config;
41
+ if (typeof dirValue === "string" && typeof configValue === "string") {
42
+ throw new CliError("--dir and --config are mutually exclusive", 2);
43
+ }
44
+ if (typeof dirValue === "string") {
45
+ await runDirMode(dirValue, parsedArgs);
46
+ }
47
+ else if (typeof configValue === "string") {
48
+ await runConfigMode(configValue, parsedArgs);
49
+ }
50
+ // If neither --dir nor --config, citty will show help for subcommands
51
+ },
17
52
  });
53
+ async function runDirMode(dir, args) {
54
+ const metadata = parseCommonMetadata(args);
55
+ const discovered = await discoverFiles(dir);
56
+ const accumulated = new Map();
57
+ const counts = new Map();
58
+ for (const [collectorKey, files] of discovered) {
59
+ for (const filePath of files) {
60
+ try {
61
+ const fileOverrides = resolveFileMetadata(filePath, collectorKey);
62
+ const merged = mergeMetadata(metadata, fileOverrides);
63
+ const output = await runCollector(collectorKey, filePath, merged);
64
+ accumulate(accumulated, output);
65
+ counts.set(collectorKey, (counts.get(collectorKey) ?? 0) + output.documents.length);
66
+ }
67
+ catch (error) {
68
+ const msg = error instanceof Error ? error.message : String(error);
69
+ process.stderr.write(`warning: skipping ${filePath} (${collectorKey}): ${msg}\n`);
70
+ }
71
+ }
72
+ }
73
+ await sendAll(accumulated, args);
74
+ printSummary(counts, args);
75
+ }
76
+ async function runConfigMode(configValue, args) {
77
+ const metadata = parseCommonMetadata(args);
78
+ const entries = await parseConfig(configValue);
79
+ const resolved = await resolveConfig(entries, ".");
80
+ const accumulated = new Map();
81
+ const counts = new Map();
82
+ for (const entry of resolved) {
83
+ const configOverrides = {};
84
+ if (entry.tags !== undefined)
85
+ configOverrides.tags = entry.tags;
86
+ if (entry.category !== undefined)
87
+ configOverrides.category = entry.category;
88
+ if (entry.project !== undefined)
89
+ configOverrides.projectName = entry.project;
90
+ if (entry.solution !== undefined)
91
+ configOverrides.solution = entry.solution;
92
+ for (const filePath of entry.files) {
93
+ try {
94
+ const fileOverrides = resolveFileMetadata(filePath, entry.type);
95
+ const merged = mergeMetadata(metadata, fileOverrides, configOverrides);
96
+ const output = await runCollector(entry.type, filePath, merged, entry.url);
97
+ accumulate(accumulated, output);
98
+ counts.set(entry.type, (counts.get(entry.type) ?? 0) + output.documents.length);
99
+ }
100
+ catch (error) {
101
+ const msg = error instanceof Error ? error.message : String(error);
102
+ process.stderr.write(`warning: skipping ${filePath} (${entry.type}): ${msg}\n`);
103
+ }
104
+ }
105
+ }
106
+ await sendAll(accumulated, args);
107
+ printSummary(counts, args);
108
+ }
109
+ function accumulate(map, output) {
110
+ const existing = map.get(output.metricType);
111
+ if (existing) {
112
+ existing.push(...output.documents);
113
+ }
114
+ else {
115
+ map.set(output.metricType, [...output.documents]);
116
+ }
117
+ }
118
+ async function sendAll(accumulated, args) {
119
+ for (const [metricType, documents] of accumulated) {
120
+ if (documents.length > 0) {
121
+ await sendToSink(metricType, { ...args, "allow-empty": true }, documents);
122
+ }
123
+ }
124
+ }
125
+ function printSummary(counts, args) {
126
+ const verb = isDryRun(args) ? "dry-run" : "collected";
127
+ const parts = [];
128
+ let total = 0;
129
+ for (const [key, count] of counts) {
130
+ parts.push(`${count} ${key}`);
131
+ total += count;
132
+ }
133
+ if (total === 0) {
134
+ process.stderr.write("warning: no report files found\n");
135
+ return;
136
+ }
137
+ process.stdout.write(`${verb}: ${parts.join(", ")}\n`);
138
+ }
18
139
  const main = defineCommand({
19
140
  meta: {
20
141
  name: "qualink",
@@ -24,6 +145,7 @@ const main = defineCommand({
24
145
  subCommands: {
25
146
  collect: collectCommand,
26
147
  meta: metaCommand,
148
+ pipeline: pipelineCommand,
27
149
  },
28
150
  });
29
151
  runMain(main).catch((error) => {