qualink 0.3.0 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/README.md +127 -6
  2. package/dist/cli/command-factory.d.ts +1 -1
  3. package/dist/cli/commands/biome.d.ts +1 -1
  4. package/dist/cli/commands/coverage-dotnet.d.ts +1 -1
  5. package/dist/cli/commands/coverage-js.d.ts +1 -1
  6. package/dist/cli/commands/eslint.d.ts +1 -1
  7. package/dist/cli/commands/index.d.ts +2 -0
  8. package/dist/cli/commands/index.js +2 -0
  9. package/dist/cli/commands/junit.d.ts +76 -0
  10. package/dist/cli/commands/junit.js +12 -0
  11. package/dist/cli/commands/lighthouse.d.ts +1 -1
  12. package/dist/cli/commands/meta.d.ts +1 -1
  13. package/dist/cli/commands/pipeline.d.ts +76 -0
  14. package/dist/cli/commands/pipeline.js +35 -0
  15. package/dist/cli/commands/sarif.d.ts +1 -1
  16. package/dist/cli/common-args.d.ts +2 -2
  17. package/dist/cli/common-args.js +1 -1
  18. package/dist/cli/detect-ci.d.ts +2 -0
  19. package/dist/cli/detect-ci.js +47 -0
  20. package/dist/cli/detect-project.js +31 -5
  21. package/dist/cli/detect-solution.d.ts +2 -0
  22. package/dist/cli/detect-solution.js +70 -0
  23. package/dist/cli/index.js +124 -1
  24. package/dist/cli/multi-collect/config.d.ts +23 -0
  25. package/dist/cli/multi-collect/config.js +105 -0
  26. package/dist/cli/multi-collect/discover.d.ts +11 -0
  27. package/dist/cli/multi-collect/discover.js +84 -0
  28. package/dist/cli/multi-collect/patterns.d.ts +16 -0
  29. package/dist/cli/multi-collect/patterns.js +25 -0
  30. package/dist/cli/multi-collect/resolve-metadata.d.ts +17 -0
  31. package/dist/cli/multi-collect/resolve-metadata.js +147 -0
  32. package/dist/cli/multi-collect/run-collector.d.ts +11 -0
  33. package/dist/cli/multi-collect/run-collector.js +81 -0
  34. package/dist/cli/parse-metadata.d.ts +1 -0
  35. package/dist/cli/parse-metadata.js +3 -3
  36. package/dist/collectors/index.d.ts +1 -0
  37. package/dist/collectors/index.js +1 -0
  38. package/dist/collectors/junit.d.ts +2 -0
  39. package/dist/collectors/junit.js +106 -0
  40. package/dist/collectors/pipeline.d.ts +13 -0
  41. package/dist/collectors/pipeline.js +31 -0
  42. package/dist/normalize.js +1 -1
  43. package/dist/sinks/elastic.js +2 -0
  44. package/dist/types.d.ts +23 -4
  45. package/package.json +1 -1
  46. package/dist/cli/detect-package.d.ts +0 -3
  47. package/dist/cli/detect-package.js +0 -42
package/README.md CHANGED
@@ -26,18 +26,131 @@ Repo, branch, commit SHA, pipeline run ID, and provider are auto-detected from C
26
26
 
27
27
  See the [examples/](examples/) folder for copy-paste snippets for Azure DevOps and GitHub Actions.
28
28
 
29
+ ## Pipeline Tracking
30
+
31
+ Track pipeline execution metrics — which pipelines run, when, for how long, and their outcome.
32
+ Pipelines self-report by calling `qualink pipeline --status <status>` at the end of a run.
33
+
34
+ ### Azure DevOps
35
+
36
+ ```yaml
37
+ steps:
38
+ - script: echo "##vso[task.setvariable variable=PIPELINE_START]$(date +%s%3N)"
39
+ displayName: Record start time
40
+
41
+ # ... existing build/test steps ...
42
+
43
+ - script: |
44
+ END_TIME=$(date +%s%3N)
45
+ DURATION=$(( END_TIME - $(PIPELINE_START) ))
46
+ npx qualink pipeline \
47
+ --status "$(Agent.JobStatus)" \
48
+ --duration "$DURATION" \
49
+ --sink elastic
50
+ displayName: Report pipeline metrics
51
+ condition: always()
52
+ env:
53
+ ELASTIC_URL: $(ELASTIC_URL)
54
+ ELASTIC_API_KEY: $(ELASTIC_API_KEY)
55
+ ```
56
+
57
+ Auto-detected from Azure DevOps env: pipeline name (`BUILD_DEFINITIONNAME`), trigger (`BUILD_REASON`), repo, branch, commit, run ID, provider.
58
+
59
+ ### GitHub Actions
60
+
61
+ ```yaml
62
+ jobs:
63
+ build:
64
+ runs-on: ubuntu-latest
65
+ steps:
66
+ - name: Record start time
67
+ run: echo "PIPELINE_START=$(date +%s%3N)" >> "$GITHUB_ENV"
68
+
69
+ # ... existing build/test steps ...
70
+
71
+ - name: Report pipeline metrics
72
+ if: always()
73
+ run: |
74
+ END_TIME=$(date +%s%3N)
75
+ DURATION=$(( END_TIME - PIPELINE_START ))
76
+ npx qualink pipeline \
77
+ --status "${{ job.status }}" \
78
+ --duration "$DURATION" \
79
+ --sink elastic
80
+ env:
81
+ ELASTIC_URL: ${{ secrets.ELASTIC_URL }}
82
+ ELASTIC_API_KEY: ${{ secrets.ELASTIC_API_KEY }}
83
+ ```
84
+
85
+ Auto-detected from GitHub env: pipeline name (`GITHUB_WORKFLOW`), trigger (`GITHUB_EVENT_NAME`), repo, branch, commit, run ID, provider.
86
+
87
+ ### Per-stage reporting
88
+
89
+ For pipelines with distinct stages, call qualink once per stage with `--stage-name`:
90
+
91
+ ```yaml
92
+ # Azure DevOps example
93
+ - script: |
94
+ npx qualink pipeline --status "$(Agent.JobStatus)" --stage-name build --duration "$BUILD_DURATION"
95
+ condition: always()
96
+
97
+ - script: |
98
+ npx qualink pipeline --status "$(Agent.JobStatus)" --stage-name deploy --duration "$DEPLOY_DURATION"
99
+ condition: always()
100
+ ```
101
+
29
102
  ## CLI usage
30
103
 
104
+ ### Single collector
105
+
31
106
  ```bash
32
107
  qualink collect <collector> --input <path> --sink elastic [flags]
33
108
  ```
34
109
 
35
- Examples:
110
+ ```bash
111
+ qualink collect eslint --input eslint-report.json --sink elastic --repo frontend-mono --category frontend --tags frontend,web
112
+ qualink collect sarif --input analyzers.sarif --sink elastic --repo backend-api --category backend --tags backend,api
113
+ qualink collect coverage-dotnet --input coverage.cobertura.xml --sink elastic --repo backend-api
114
+ ```
115
+
116
+ ### Multi-collect
117
+
118
+ Auto-discover report files in a directory tree:
36
119
 
37
120
  ```bash
38
- qualink collect eslint --input eslint-report.json --sink elastic --repo frontend-mono --category frontend --tags frontend,web --branch main --commit-sha abc123 --pipeline-run-id 987
39
- qualink collect sarif --input analyzers.sarif --sink elastic --repo backend-api --category backend --tags backend,api --branch main --commit-sha def456 --pipeline-run-id 654
40
- qualink collect coverage-dotnet --input coverage.cobertura.xml --sink elastic --repo backend-api --category backend --tags backend,api --branch main --commit-sha def456 --pipeline-run-id 654
121
+ qualink collect --dir=./output --repo myapp --sink elastic
122
+ ```
123
+
124
+ Or use a config file for explicit control:
125
+
126
+ ```bash
127
+ qualink collect --config=qualink.json --repo myapp --sink elastic
128
+ ```
129
+
130
+ Config file example (`qualink.json`):
131
+
132
+ ```json
133
+ [
134
+ { "type": "eslint", "input": "packages/*/eslint-report.json" },
135
+ { "type": "coverage-js", "input": "packages/*/coverage-summary.json" },
136
+ { "type": "sarif", "input": "**/*.sarif" }
137
+ ]
138
+ ```
139
+
140
+ Each entry supports optional overrides: `tags`, `category`, `project`, `solution`, `url`.
141
+ See [qualink-config.schema.json](qualink-config.schema.json) for the full schema.
142
+
143
+ Auto-discovery recognizes: `eslint-report.json`, `biome-report.json`, `coverage-summary.json`, `coverage.cobertura.xml`, `*.sarif`/`*.sarif.json`, `lhr-*.json` inside `.lighthouseci/`, `junit.xml`, and `TEST-*.xml`.
144
+
145
+ ### Pipeline tracking
146
+
147
+ Top-level command, not under `collect`:
148
+
149
+ ```bash
150
+ qualink pipeline --status succeeded --sink elastic
151
+ qualink pipeline --status succeeded --duration 125000 --pipeline-name "Build and Deploy"
152
+ qualink pipeline --status succeeded --stage-name build --duration 45000
153
+ qualink pipeline --status failed --dry-run
41
154
  ```
42
155
 
43
156
  Collectors:
@@ -48,6 +161,7 @@ Collectors:
48
161
  - `coverage-js` (Istanbul/Vitest JSON)
49
162
  - `sarif` (Roslyn or generic SARIF JSON)
50
163
  - `coverage-dotnet` (Cobertura/OpenCover XML)
164
+ - `junit` (JUnit XML)
51
165
 
52
166
  ESLint file-level options (optional):
53
167
 
@@ -59,12 +173,19 @@ Classification metadata (optional):
59
173
  - `--category` for a single broad bucket
60
174
  - `--tags` for flexible multi-label filtering (`comma,separated`)
61
175
 
176
+ Project hierarchy (auto-detected or explicit):
177
+
178
+ - `--solution` groups related projects (auto-detected from `.sln` or workspace root `package.json`)
179
+ - `--project` identifies the individual project (auto-detected from nearest `.csproj` or `package.json`)
180
+
62
181
  Metadata auto-detection:
63
182
 
64
183
  - `repo`: from flag/env, then git origin, then current folder name
65
184
  - `branch`: from flag/env, then git branch
66
185
  - `commit_sha`: from flag/env, then git commit
67
186
  - `pipeline_run_id`: from flag/env, fallback `local-<timestamp>`
187
+ - `project`: from flag/env, then nearest `.csproj`/`package.json`
188
+ - `solution`: from flag/env, then nearest `.sln`/workspace root `package.json`
68
189
 
69
190
  If needed, you can still pass explicit values with `--repo`, `--branch`, `--commit-sha`, and `--pipeline-run-id`.
70
191
 
@@ -81,8 +202,8 @@ Dry run mode:
81
202
  Useful env fallbacks:
82
203
 
83
204
  - `QUALINK_REPO`, `QUALINK_CATEGORY`, `QUALINK_TAGS`, `QUALINK_BRANCH`, `QUALINK_COMMIT_SHA`, `QUALINK_PIPELINE_RUN_ID`
84
- - `QUALINK_PACKAGE` (monorepo package name, auto-detected from `PNPM_PACKAGE_NAME`)
85
- - `QUALINK_PROJECT` (backend project identity)
205
+ - `QUALINK_PROJECT` (auto-detected from nearest `.csproj`/`package.json` or `PNPM_PACKAGE_NAME`)
206
+ - `QUALINK_SOLUTION` (auto-detected from `.sln` or workspace root `package.json`)
86
207
  - `QUALINK_PIPELINE_PROVIDER` (auto-detected, fallback: `local`)
87
208
  - `QUALINK_ENVIRONMENT` (default: `ci`)
88
209
  - `QUALINK_SINK` (default: `elastic`)
@@ -44,7 +44,7 @@ export declare function createCollectorCommand<TExtra extends Record<string, unk
44
44
  readonly type: "string";
45
45
  readonly default: "ci";
46
46
  };
47
- readonly package: {
47
+ readonly solution: {
48
48
  readonly type: "string";
49
49
  };
50
50
  readonly project: {
@@ -32,7 +32,7 @@ export declare const biomeCommand: import("citty").CommandDef<{
32
32
  readonly type: "string";
33
33
  readonly default: "ci";
34
34
  };
35
- readonly package: {
35
+ readonly solution: {
36
36
  readonly type: "string";
37
37
  };
38
38
  readonly project: {
@@ -32,7 +32,7 @@ export declare const coverageDotnetCommand: import("citty").CommandDef<{
32
32
  readonly type: "string";
33
33
  readonly default: "ci";
34
34
  };
35
- readonly package: {
35
+ readonly solution: {
36
36
  readonly type: "string";
37
37
  };
38
38
  readonly project: {
@@ -32,7 +32,7 @@ export declare const coverageJsCommand: import("citty").CommandDef<{
32
32
  readonly type: "string";
33
33
  readonly default: "ci";
34
34
  };
35
- readonly package: {
35
+ readonly solution: {
36
36
  readonly type: "string";
37
37
  };
38
38
  readonly project: {
@@ -32,7 +32,7 @@ export declare const eslintCommand: import("citty").CommandDef<{
32
32
  readonly type: "string";
33
33
  readonly default: "ci";
34
34
  };
35
- readonly package: {
35
+ readonly solution: {
36
36
  readonly type: "string";
37
37
  };
38
38
  readonly project: {
@@ -2,6 +2,8 @@ export { biomeCommand } from "./biome.js";
2
2
  export { coverageDotnetCommand } from "./coverage-dotnet.js";
3
3
  export { coverageJsCommand } from "./coverage-js.js";
4
4
  export { eslintCommand } from "./eslint.js";
5
+ export { junitCommand } from "./junit.js";
5
6
  export { lighthouseCommand } from "./lighthouse.js";
6
7
  export { metaCommand } from "./meta.js";
8
+ export { pipelineCommand } from "./pipeline.js";
7
9
  export { sarifCommand } from "./sarif.js";
@@ -2,6 +2,8 @@ export { biomeCommand } from "./biome.js";
2
2
  export { coverageDotnetCommand } from "./coverage-dotnet.js";
3
3
  export { coverageJsCommand } from "./coverage-js.js";
4
4
  export { eslintCommand } from "./eslint.js";
5
+ export { junitCommand } from "./junit.js";
5
6
  export { lighthouseCommand } from "./lighthouse.js";
6
7
  export { metaCommand } from "./meta.js";
8
+ export { pipelineCommand } from "./pipeline.js";
7
9
  export { sarifCommand } from "./sarif.js";
@@ -0,0 +1,76 @@
1
+ export declare const junitCommand: import("citty").CommandDef<{
2
+ readonly input: {
3
+ readonly type: "string";
4
+ readonly required: true;
5
+ };
6
+ readonly sink: {
7
+ readonly type: "string";
8
+ readonly default: "elastic";
9
+ };
10
+ readonly repo: {
11
+ readonly type: "string";
12
+ };
13
+ readonly category: {
14
+ readonly type: "string";
15
+ };
16
+ readonly tags: {
17
+ readonly type: "string";
18
+ };
19
+ readonly branch: {
20
+ readonly type: "string";
21
+ };
22
+ readonly "commit-sha": {
23
+ readonly type: "string";
24
+ };
25
+ readonly "pipeline-run-id": {
26
+ readonly type: "string";
27
+ };
28
+ readonly "pipeline-provider": {
29
+ readonly type: "string";
30
+ };
31
+ readonly environment: {
32
+ readonly type: "string";
33
+ readonly default: "ci";
34
+ };
35
+ readonly solution: {
36
+ readonly type: "string";
37
+ };
38
+ readonly project: {
39
+ readonly type: "string";
40
+ };
41
+ readonly "collector-version": {
42
+ readonly type: "string";
43
+ };
44
+ readonly "elastic-url": {
45
+ readonly type: "string";
46
+ };
47
+ readonly "elastic-api-key": {
48
+ readonly type: "string";
49
+ };
50
+ readonly "loki-url": {
51
+ readonly type: "string";
52
+ };
53
+ readonly "loki-username": {
54
+ readonly type: "string";
55
+ };
56
+ readonly "loki-password": {
57
+ readonly type: "string";
58
+ };
59
+ readonly "loki-tenant-id": {
60
+ readonly type: "string";
61
+ };
62
+ readonly "retry-max": {
63
+ readonly type: "string";
64
+ };
65
+ readonly "retry-backoff-ms": {
66
+ readonly type: "string";
67
+ };
68
+ readonly "allow-empty": {
69
+ readonly type: "boolean";
70
+ readonly default: false;
71
+ };
72
+ readonly "dry-run": {
73
+ readonly type: "boolean";
74
+ readonly default: false;
75
+ };
76
+ }>;
@@ -0,0 +1,12 @@
1
+ import { collectJunit } from "../../collectors/junit.js";
2
+ import { createCollectorCommand } from "../command-factory.js";
3
+ import { loadTextInput } from "../load-input.js";
4
+ export const junitCommand = createCollectorCommand({
5
+ name: "junit",
6
+ description: "Collect JUnit XML test results and relay them",
7
+ async collect(args, metadata) {
8
+ const input = await loadTextInput(args);
9
+ const documents = collectJunit(input, metadata);
10
+ return { metricType: "junit", documents };
11
+ },
12
+ });
@@ -32,7 +32,7 @@ export declare const lighthouseCommand: import("citty").CommandDef<{
32
32
  readonly type: "string";
33
33
  readonly default: "ci";
34
34
  };
35
- readonly package: {
35
+ readonly solution: {
36
36
  readonly type: "string";
37
37
  };
38
38
  readonly project: {
@@ -32,7 +32,7 @@ export declare const metaCommand: import("citty").CommandDef<{
32
32
  readonly type: "string";
33
33
  readonly default: "ci";
34
34
  };
35
- readonly package: {
35
+ readonly solution: {
36
36
  readonly type: "string";
37
37
  };
38
38
  readonly project: {
@@ -0,0 +1,76 @@
1
+ export declare const pipelineCommand: import("citty").CommandDef<{
2
+ readonly input: {
3
+ readonly type: "string";
4
+ readonly required: true;
5
+ };
6
+ readonly sink: {
7
+ readonly type: "string";
8
+ readonly default: "elastic";
9
+ };
10
+ readonly repo: {
11
+ readonly type: "string";
12
+ };
13
+ readonly category: {
14
+ readonly type: "string";
15
+ };
16
+ readonly tags: {
17
+ readonly type: "string";
18
+ };
19
+ readonly branch: {
20
+ readonly type: "string";
21
+ };
22
+ readonly "commit-sha": {
23
+ readonly type: "string";
24
+ };
25
+ readonly "pipeline-run-id": {
26
+ readonly type: "string";
27
+ };
28
+ readonly "pipeline-provider": {
29
+ readonly type: "string";
30
+ };
31
+ readonly environment: {
32
+ readonly type: "string";
33
+ readonly default: "ci";
34
+ };
35
+ readonly solution: {
36
+ readonly type: "string";
37
+ };
38
+ readonly project: {
39
+ readonly type: "string";
40
+ };
41
+ readonly "collector-version": {
42
+ readonly type: "string";
43
+ };
44
+ readonly "elastic-url": {
45
+ readonly type: "string";
46
+ };
47
+ readonly "elastic-api-key": {
48
+ readonly type: "string";
49
+ };
50
+ readonly "loki-url": {
51
+ readonly type: "string";
52
+ };
53
+ readonly "loki-username": {
54
+ readonly type: "string";
55
+ };
56
+ readonly "loki-password": {
57
+ readonly type: "string";
58
+ };
59
+ readonly "loki-tenant-id": {
60
+ readonly type: "string";
61
+ };
62
+ readonly "retry-max": {
63
+ readonly type: "string";
64
+ };
65
+ readonly "retry-backoff-ms": {
66
+ readonly type: "string";
67
+ };
68
+ readonly "allow-empty": {
69
+ readonly type: "boolean";
70
+ readonly default: false;
71
+ };
72
+ readonly "dry-run": {
73
+ readonly type: "boolean";
74
+ readonly default: false;
75
+ };
76
+ }>;
@@ -0,0 +1,35 @@
1
+ import { collectPipeline } from "../../collectors/pipeline.js";
2
+ import { createCollectorCommand } from "../command-factory.js";
3
+ import { detectPipelineName, detectPipelineTrigger } from "../detect-ci.js";
4
+ export const pipelineCommand = createCollectorCommand({
5
+ name: "pipeline",
6
+ description: "Report pipeline execution metrics",
7
+ extraArgs: {
8
+ input: { type: "string" },
9
+ sink: { type: "string", default: "elastic" },
10
+ status: { type: "string", required: true },
11
+ "pipeline-name": { type: "string" },
12
+ trigger: { type: "string" },
13
+ duration: { type: "string" },
14
+ "start-time": { type: "string" },
15
+ "stage-name": { type: "string" },
16
+ },
17
+ async collect(args, metadata) {
18
+ const status = args.status;
19
+ const pipelineName = (args.pipelineName ?? args["pipeline-name"]);
20
+ const trigger = args.trigger;
21
+ const rawDuration = args.duration;
22
+ const startTime = (args.startTime ?? args["start-time"]);
23
+ const stageName = (args.stageName ?? args["stage-name"]);
24
+ const durationMs = rawDuration ? Number(rawDuration) : null;
25
+ const documents = collectPipeline({
26
+ status,
27
+ pipelineName: pipelineName ?? detectPipelineName(args),
28
+ trigger: trigger ?? detectPipelineTrigger(args),
29
+ durationMs: durationMs !== null && Number.isFinite(durationMs) ? durationMs : null,
30
+ startTime: startTime ?? null,
31
+ stageName: stageName ?? null,
32
+ }, metadata);
33
+ return { metricType: "pipeline", documents };
34
+ },
35
+ });
@@ -32,7 +32,7 @@ export declare const sarifCommand: import("citty").CommandDef<{
32
32
  readonly type: "string";
33
33
  readonly default: "ci";
34
34
  };
35
- readonly package: {
35
+ readonly solution: {
36
36
  readonly type: "string";
37
37
  };
38
38
  readonly project: {
@@ -10,7 +10,7 @@ export interface CommonArgs {
10
10
  pipelineRunId?: unknown;
11
11
  pipelineProvider?: unknown;
12
12
  environment?: unknown;
13
- package?: unknown;
13
+ solution?: unknown;
14
14
  project?: unknown;
15
15
  collectorVersion?: unknown;
16
16
  elasticUrl?: unknown;
@@ -60,7 +60,7 @@ export declare const commonArgs: {
60
60
  readonly type: "string";
61
61
  readonly default: "ci";
62
62
  };
63
- readonly package: {
63
+ readonly solution: {
64
64
  readonly type: "string";
65
65
  };
66
66
  readonly project: {
@@ -32,7 +32,7 @@ export const commonArgs = {
32
32
  "pipeline-run-id": { type: "string" },
33
33
  "pipeline-provider": { type: "string" },
34
34
  environment: { type: "string", default: "ci" },
35
- package: { type: "string" },
35
+ solution: { type: "string" },
36
36
  project: { type: "string" },
37
37
  "collector-version": { type: "string" },
38
38
  "elastic-url": { type: "string" },
@@ -2,4 +2,6 @@ import { type CommonArgs } from "./common-args.js";
2
2
  export declare function detectBranch(args: CommonArgs): string;
3
3
  export declare function detectCommitSha(args: CommonArgs): string;
4
4
  export declare function detectPipelineRunId(args: CommonArgs): string;
5
+ export declare function detectPipelineName(args: CommonArgs): string;
6
+ export declare function detectPipelineTrigger(args: CommonArgs): string;
5
7
  export declare function detectPipelineProvider(args: CommonArgs): string;
@@ -23,6 +23,53 @@ export function detectPipelineRunId(args) {
23
23
  process.env.CI_PIPELINE_ID ??
24
24
  `local-${Date.now().toString()}`);
25
25
  }
26
+ export function detectPipelineName(args) {
27
+ return (envOrArg(argValue(args, "pipelineName", "pipeline-name"), "QUALINK_PIPELINE_NAME") ??
28
+ process.env.BUILD_DEFINITIONNAME ??
29
+ process.env.GITHUB_WORKFLOW ??
30
+ process.env.CI_PIPELINE_NAME ??
31
+ process.env.CI_PROJECT_NAME ??
32
+ "unknown");
33
+ }
34
+ const AZURE_TRIGGER_MAP = {
35
+ IndividualCI: "push",
36
+ BatchedCI: "push",
37
+ PullRequest: "pr",
38
+ Manual: "manual",
39
+ Schedule: "schedule",
40
+ };
41
+ const GITHUB_TRIGGER_MAP = {
42
+ push: "push",
43
+ pull_request: "pr",
44
+ workflow_dispatch: "manual",
45
+ schedule: "schedule",
46
+ };
47
+ const GITLAB_TRIGGER_MAP = {
48
+ push: "push",
49
+ merge_request_event: "pr",
50
+ web: "manual",
51
+ schedule: "schedule",
52
+ api: "api",
53
+ };
54
+ export function detectPipelineTrigger(args) {
55
+ const explicit = envOrArg(argValue(args, "trigger"), "QUALINK_PIPELINE_TRIGGER");
56
+ if (explicit) {
57
+ return explicit;
58
+ }
59
+ const azureReason = process.env.BUILD_REASON;
60
+ if (azureReason) {
61
+ return AZURE_TRIGGER_MAP[azureReason] ?? azureReason;
62
+ }
63
+ const githubEvent = process.env.GITHUB_EVENT_NAME;
64
+ if (githubEvent) {
65
+ return GITHUB_TRIGGER_MAP[githubEvent] ?? githubEvent;
66
+ }
67
+ const gitlabSource = process.env.CI_PIPELINE_SOURCE;
68
+ if (gitlabSource) {
69
+ return GITLAB_TRIGGER_MAP[gitlabSource] ?? gitlabSource;
70
+ }
71
+ return "unknown";
72
+ }
26
73
  export function detectPipelineProvider(args) {
27
74
  const explicit = envOrArg(argValue(args, "pipelineProvider", "pipeline-provider"), "QUALINK_PIPELINE_PROVIDER");
28
75
  if (explicit) {
@@ -1,7 +1,14 @@
1
- import { readdirSync } from "node:fs";
2
- import { basename } from "node:path";
1
+ import { existsSync, readdirSync, readFileSync } from "node:fs";
2
+ import { basename, resolve } from "node:path";
3
3
  import { argValue, envOrArg } from "./common-args.js";
4
- import { isInsideWorkspacePackage } from "./detect-package.js";
4
+ import { runGit } from "./git.js";
5
+ function isInsideWorkspacePackage() {
6
+ const gitRoot = runGit(["rev-parse", "--show-toplevel"]);
7
+ if (!gitRoot) {
8
+ return false;
9
+ }
10
+ return resolve(".") !== resolve(gitRoot);
11
+ }
5
12
  function readCsprojName() {
6
13
  try {
7
14
  const files = readdirSync(".");
@@ -15,14 +22,33 @@ function readCsprojName() {
15
22
  }
16
23
  return undefined;
17
24
  }
25
+ function readPackageJsonName() {
26
+ try {
27
+ const pkgPath = resolve("package.json");
28
+ if (!existsSync(pkgPath)) {
29
+ return undefined;
30
+ }
31
+ const raw = JSON.parse(readFileSync(pkgPath, "utf-8"));
32
+ if (typeof raw === "object" && raw !== null && "name" in raw && typeof raw.name === "string") {
33
+ return raw.name;
34
+ }
35
+ }
36
+ catch {
37
+ // ignore
38
+ }
39
+ return undefined;
40
+ }
18
41
  export function detectProjectName(args) {
19
42
  const explicit = envOrArg(argValue(args, "project"), "QUALINK_PROJECT");
20
43
  if (explicit) {
21
44
  return explicit;
22
45
  }
23
- // Auto-detect from .csproj when running inside a project subdirectory
46
+ const pnpmName = process.env.PNPM_PACKAGE_NAME;
47
+ if (pnpmName && pnpmName.trim().length > 0) {
48
+ return pnpmName;
49
+ }
24
50
  if (isInsideWorkspacePackage()) {
25
- return readCsprojName();
51
+ return readCsprojName() ?? readPackageJsonName();
26
52
  }
27
53
  return undefined;
28
54
  }
@@ -0,0 +1,2 @@
1
+ import { type CommonArgs } from "./common-args.js";
2
+ export declare function detectSolution(args: CommonArgs): string | undefined;