@cyclonedx/cdxgen 12.1.4 → 12.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (184) hide show
  1. package/README.md +47 -39
  2. package/bin/cdxgen.js +181 -90
  3. package/bin/evinse.js +4 -4
  4. package/bin/repl.js +3 -3
  5. package/bin/sign.js +102 -0
  6. package/bin/validate.js +233 -0
  7. package/bin/verify.js +69 -28
  8. package/data/queries.json +1 -1
  9. package/data/rules/ci-permissions.yaml +186 -0
  10. package/data/rules/dependency-sources.yaml +123 -0
  11. package/data/rules/package-integrity.yaml +135 -0
  12. package/data/rules/vscode-extensions.yaml +228 -0
  13. package/lib/cli/index.js +484 -440
  14. package/lib/evinser/db.js +137 -0
  15. package/lib/{helpers → evinser}/db.poku.js +2 -6
  16. package/lib/evinser/evinser.js +5 -18
  17. package/lib/evinser/swiftsem.js +1 -1
  18. package/lib/helpers/bomSigner.js +312 -0
  19. package/lib/helpers/bomSigner.poku.js +156 -0
  20. package/lib/helpers/caxa.js +1 -1
  21. package/lib/helpers/ciParsers/azurePipelines.js +295 -0
  22. package/lib/helpers/ciParsers/azurePipelines.poku.js +253 -0
  23. package/lib/helpers/ciParsers/circleCi.js +286 -0
  24. package/lib/helpers/ciParsers/circleCi.poku.js +230 -0
  25. package/lib/helpers/ciParsers/common.js +24 -0
  26. package/lib/helpers/ciParsers/githubActions.js +636 -0
  27. package/lib/helpers/ciParsers/githubActions.poku.js +802 -0
  28. package/lib/helpers/ciParsers/gitlabCi.js +213 -0
  29. package/lib/helpers/ciParsers/gitlabCi.poku.js +247 -0
  30. package/lib/helpers/ciParsers/jenkins.js +181 -0
  31. package/lib/helpers/ciParsers/jenkins.poku.js +197 -0
  32. package/lib/helpers/depsUtils.js +203 -0
  33. package/lib/helpers/depsUtils.poku.js +150 -0
  34. package/lib/helpers/display.js +429 -14
  35. package/lib/helpers/envcontext.js +23 -8
  36. package/lib/helpers/formulationParsers.js +351 -0
  37. package/lib/helpers/logger.js +14 -0
  38. package/lib/helpers/protobom.js +9 -9
  39. package/lib/helpers/pythonutils.js +305 -0
  40. package/lib/helpers/pythonutils.poku.js +469 -0
  41. package/lib/helpers/utils.js +970 -528
  42. package/lib/helpers/utils.poku.js +139 -256
  43. package/lib/helpers/versutils.js +202 -0
  44. package/lib/helpers/versutils.poku.js +315 -0
  45. package/lib/helpers/vsixutils.js +1061 -0
  46. package/lib/helpers/vsixutils.poku.js +2247 -0
  47. package/lib/managers/binary.js +19 -19
  48. package/lib/managers/docker.js +108 -1
  49. package/lib/managers/oci.js +10 -0
  50. package/lib/managers/piptree.js +4 -10
  51. package/lib/parsers/npmrc.js +92 -0
  52. package/lib/parsers/npmrc.poku.js +528 -0
  53. package/lib/server/openapi.yaml +1 -10
  54. package/lib/server/server.js +58 -16
  55. package/lib/server/server.poku.js +123 -144
  56. package/lib/stages/postgen/annotator.js +1 -1
  57. package/lib/stages/postgen/auditBom.js +197 -0
  58. package/lib/stages/postgen/auditBom.poku.js +378 -0
  59. package/lib/stages/postgen/postgen.js +54 -1
  60. package/lib/stages/postgen/postgen.poku.js +90 -1
  61. package/lib/stages/postgen/ruleEngine.js +369 -0
  62. package/lib/stages/pregen/envAudit.js +299 -0
  63. package/lib/stages/pregen/envAudit.poku.js +572 -0
  64. package/lib/stages/pregen/pregen.js +12 -8
  65. package/lib/third-party/arborist/lib/deepest-nesting-target.js +1 -1
  66. package/lib/third-party/arborist/lib/node.js +3 -3
  67. package/lib/third-party/arborist/lib/shrinkwrap.js +1 -1
  68. package/lib/third-party/arborist/lib/tree-check.js +1 -1
  69. package/lib/{helpers/validator.js → validator/bomValidator.js} +107 -47
  70. package/lib/validator/complianceEngine.js +241 -0
  71. package/lib/validator/complianceEngine.poku.js +168 -0
  72. package/lib/validator/complianceRules.js +1610 -0
  73. package/lib/validator/complianceRules.poku.js +328 -0
  74. package/lib/validator/index.js +222 -0
  75. package/lib/validator/index.poku.js +144 -0
  76. package/lib/validator/reporters/annotations.js +121 -0
  77. package/lib/validator/reporters/console.js +149 -0
  78. package/lib/validator/reporters/index.js +41 -0
  79. package/lib/validator/reporters/json.js +37 -0
  80. package/lib/validator/reporters/sarif.js +184 -0
  81. package/lib/validator/reporters.poku.js +150 -0
  82. package/package.json +8 -8
  83. package/types/bin/sign.d.ts +3 -0
  84. package/types/bin/sign.d.ts.map +1 -0
  85. package/types/bin/validate.d.ts +3 -0
  86. package/types/bin/validate.d.ts.map +1 -0
  87. package/types/helpers/utils.d.ts +0 -1
  88. package/types/lib/cli/index.d.ts +49 -52
  89. package/types/lib/cli/index.d.ts.map +1 -1
  90. package/types/lib/evinser/db.d.ts +34 -0
  91. package/types/lib/evinser/db.d.ts.map +1 -0
  92. package/types/lib/evinser/evinser.d.ts +63 -16
  93. package/types/lib/evinser/evinser.d.ts.map +1 -1
  94. package/types/lib/helpers/bomSigner.d.ts +27 -0
  95. package/types/lib/helpers/bomSigner.d.ts.map +1 -0
  96. package/types/lib/helpers/ciParsers/azurePipelines.d.ts +17 -0
  97. package/types/lib/helpers/ciParsers/azurePipelines.d.ts.map +1 -0
  98. package/types/lib/helpers/ciParsers/circleCi.d.ts +17 -0
  99. package/types/lib/helpers/ciParsers/circleCi.d.ts.map +1 -0
  100. package/types/lib/helpers/ciParsers/common.d.ts +11 -0
  101. package/types/lib/helpers/ciParsers/common.d.ts.map +1 -0
  102. package/types/lib/helpers/ciParsers/githubActions.d.ts +34 -0
  103. package/types/lib/helpers/ciParsers/githubActions.d.ts.map +1 -0
  104. package/types/lib/helpers/ciParsers/gitlabCi.d.ts +17 -0
  105. package/types/lib/helpers/ciParsers/gitlabCi.d.ts.map +1 -0
  106. package/types/lib/helpers/ciParsers/jenkins.d.ts +17 -0
  107. package/types/lib/helpers/ciParsers/jenkins.d.ts.map +1 -0
  108. package/types/lib/helpers/depsUtils.d.ts +21 -0
  109. package/types/lib/helpers/depsUtils.d.ts.map +1 -0
  110. package/types/lib/helpers/display.d.ts +111 -11
  111. package/types/lib/helpers/display.d.ts.map +1 -1
  112. package/types/lib/helpers/envcontext.d.ts +19 -7
  113. package/types/lib/helpers/envcontext.d.ts.map +1 -1
  114. package/types/lib/helpers/formulationParsers.d.ts +50 -0
  115. package/types/lib/helpers/formulationParsers.d.ts.map +1 -0
  116. package/types/lib/helpers/logger.d.ts +15 -1
  117. package/types/lib/helpers/logger.d.ts.map +1 -1
  118. package/types/lib/helpers/protobom.d.ts +2 -2
  119. package/types/lib/helpers/pythonutils.d.ts +18 -0
  120. package/types/lib/helpers/pythonutils.d.ts.map +1 -0
  121. package/types/lib/helpers/utils.d.ts +532 -128
  122. package/types/lib/helpers/utils.d.ts.map +1 -1
  123. package/types/lib/helpers/versutils.d.ts +8 -0
  124. package/types/lib/helpers/versutils.d.ts.map +1 -0
  125. package/types/lib/helpers/vsixutils.d.ts +130 -0
  126. package/types/lib/helpers/vsixutils.d.ts.map +1 -0
  127. package/types/lib/managers/docker.d.ts +12 -31
  128. package/types/lib/managers/docker.d.ts.map +1 -1
  129. package/types/lib/managers/oci.d.ts +11 -1
  130. package/types/lib/managers/oci.d.ts.map +1 -1
  131. package/types/lib/managers/piptree.d.ts.map +1 -1
  132. package/types/lib/parsers/npmrc.d.ts +26 -0
  133. package/types/lib/parsers/npmrc.d.ts.map +1 -0
  134. package/types/lib/server/server.d.ts +21 -2
  135. package/types/lib/server/server.d.ts.map +1 -1
  136. package/types/lib/stages/postgen/auditBom.d.ts +20 -0
  137. package/types/lib/stages/postgen/auditBom.d.ts.map +1 -0
  138. package/types/lib/stages/postgen/postgen.d.ts +8 -1
  139. package/types/lib/stages/postgen/postgen.d.ts.map +1 -1
  140. package/types/lib/stages/postgen/ruleEngine.d.ts +18 -0
  141. package/types/lib/stages/postgen/ruleEngine.d.ts.map +1 -0
  142. package/types/lib/stages/pregen/envAudit.d.ts +8 -0
  143. package/types/lib/stages/pregen/envAudit.d.ts.map +1 -0
  144. package/types/lib/stages/pregen/pregen.d.ts.map +1 -1
  145. package/types/lib/{helpers/validator.d.ts → validator/bomValidator.d.ts} +1 -1
  146. package/types/lib/validator/bomValidator.d.ts.map +1 -0
  147. package/types/lib/validator/complianceEngine.d.ts +66 -0
  148. package/types/lib/validator/complianceEngine.d.ts.map +1 -0
  149. package/types/lib/validator/complianceRules.d.ts +70 -0
  150. package/types/lib/validator/complianceRules.d.ts.map +1 -0
  151. package/types/lib/validator/index.d.ts +70 -0
  152. package/types/lib/validator/index.d.ts.map +1 -0
  153. package/types/lib/validator/reporters/annotations.d.ts +31 -0
  154. package/types/lib/validator/reporters/annotations.d.ts.map +1 -0
  155. package/types/lib/validator/reporters/console.d.ts +30 -0
  156. package/types/lib/validator/reporters/console.d.ts.map +1 -0
  157. package/types/lib/validator/reporters/index.d.ts +21 -0
  158. package/types/lib/validator/reporters/index.d.ts.map +1 -0
  159. package/types/lib/validator/reporters/json.d.ts +11 -0
  160. package/types/lib/validator/reporters/json.d.ts.map +1 -0
  161. package/types/lib/validator/reporters/sarif.d.ts +16 -0
  162. package/types/lib/validator/reporters/sarif.d.ts.map +1 -0
  163. package/lib/helpers/db.js +0 -162
  164. package/types/helpers/db.d.ts +0 -35
  165. package/types/helpers/db.d.ts.map +0 -1
  166. package/types/lib/helpers/db.d.ts +0 -35
  167. package/types/lib/helpers/db.d.ts.map +0 -1
  168. package/types/lib/helpers/validator.d.ts.map +0 -1
  169. package/types/managers/binary.d.ts +0 -37
  170. package/types/managers/binary.d.ts.map +0 -1
  171. package/types/managers/docker.d.ts +0 -56
  172. package/types/managers/docker.d.ts.map +0 -1
  173. package/types/managers/oci.d.ts +0 -2
  174. package/types/managers/oci.d.ts.map +0 -1
  175. package/types/managers/piptree.d.ts +0 -2
  176. package/types/managers/piptree.d.ts.map +0 -1
  177. package/types/server/server.d.ts +0 -34
  178. package/types/server/server.d.ts.map +0 -1
  179. package/types/stages/postgen/annotator.d.ts +0 -27
  180. package/types/stages/postgen/annotator.d.ts.map +0 -1
  181. package/types/stages/postgen/postgen.d.ts +0 -51
  182. package/types/stages/postgen/postgen.d.ts.map +0 -1
  183. package/types/stages/pregen/pregen.d.ts +0 -59
  184. package/types/stages/pregen/pregen.d.ts.map +0 -1
@@ -9,7 +9,7 @@ export async function parseCaxaMetadata(mfile) {
9
9
  } catch (_e) {
10
10
  return {};
11
11
  }
12
- if (!mdata || !mdata.components) {
12
+ if (!mdata?.components) {
13
13
  return {};
14
14
  }
15
15
  const { parentComponent } = mdata;
@@ -0,0 +1,295 @@
1
+ import { readFileSync } from "node:fs";
2
+
3
+ import { v4 as uuidv4 } from "uuid";
4
+ import { parse as _load } from "yaml";
5
+
6
+ import { disambiguateSteps } from "./common.js";
7
+
8
+ /**
9
+ * Parse a single Azure Pipelines YAML file and return formulation-shaped data.
10
+ *
11
+ * @param {string} f Absolute path to the YAML file
12
+ * @param {Object} _options CLI options
13
+ * @returns {{ workflows: Object[], components: Object[], services: Object[], properties: Object[], dependencies: Object[] }}
14
+ */
15
+ function parseAzurePipelinesFile(f, _options) {
16
+ const workflows = [];
17
+ const components = [];
18
+ const dependencies = [];
19
+
20
+ let raw;
21
+ try {
22
+ raw = readFileSync(f, { encoding: "utf-8" });
23
+ } catch (_e) {
24
+ return {
25
+ workflows,
26
+ components,
27
+ services: [],
28
+ properties: [],
29
+ dependencies,
30
+ };
31
+ }
32
+
33
+ let yamlObj;
34
+ try {
35
+ yamlObj = _load(raw);
36
+ } catch (_e) {
37
+ return {
38
+ workflows,
39
+ components,
40
+ services: [],
41
+ properties: [],
42
+ dependencies,
43
+ };
44
+ }
45
+
46
+ if (!yamlObj || typeof yamlObj !== "object") {
47
+ return {
48
+ workflows,
49
+ components,
50
+ services: [],
51
+ properties: [],
52
+ dependencies,
53
+ };
54
+ }
55
+
56
+ // Not an Azure Pipelines file (heuristic: must have at least one of pool, stages, jobs, steps
57
+ // and must not look like a GitLab CI file which uses a top-level `image` key)
58
+ const looksLikeAzure =
59
+ !yamlObj.image &&
60
+ (yamlObj.pool ||
61
+ yamlObj.stages ||
62
+ yamlObj.jobs ||
63
+ yamlObj.steps ||
64
+ yamlObj.trigger);
65
+ if (!looksLikeAzure) {
66
+ return {
67
+ workflows,
68
+ components,
69
+ services: [],
70
+ properties: [],
71
+ dependencies,
72
+ };
73
+ }
74
+
75
+ const workflowRef = uuidv4();
76
+ const tasks = [];
77
+ const workflowDependsOn = [];
78
+ const workflowProperties = [{ name: "cdx:azure:config", value: f }];
79
+
80
+ // Collect pool image as component
81
+ const poolImage = yamlObj.pool?.vmImage || "";
82
+ if (poolImage) {
83
+ components.push({ type: "platform", name: poolImage });
84
+ workflowProperties.push({
85
+ name: "cdx:azure:pool:vmImage",
86
+ value: poolImage,
87
+ });
88
+ }
89
+
90
+ // Collect trigger branches
91
+ const triggerBranches = [];
92
+ if (Array.isArray(yamlObj.trigger?.branches?.include)) {
93
+ triggerBranches.push(...yamlObj.trigger.branches.include);
94
+ } else if (typeof yamlObj.trigger === "string") {
95
+ triggerBranches.push(yamlObj.trigger);
96
+ } else if (Array.isArray(yamlObj.trigger)) {
97
+ triggerBranches.push(...yamlObj.trigger);
98
+ }
99
+ if (triggerBranches.length) {
100
+ workflowProperties.push({
101
+ name: "cdx:azure:trigger:branches",
102
+ value: triggerBranches.join(","),
103
+ });
104
+ }
105
+
106
+ // Stage-based pipelines.
107
+ // CycloneDX Task schema has additionalProperties: false and does NOT allow a
108
+ // nested `tasks` property — only Workflow does. We therefore flatten
109
+ // stage → job into individual tasks and record stage context via properties.
110
+ const stages = Array.isArray(yamlObj.stages) ? yamlObj.stages : [];
111
+ for (const stage of stages) {
112
+ const stageName = stage.stage || stage.displayName || "unnamed-stage";
113
+
114
+ const stageDepOn = stage.dependsOn
115
+ ? Array.isArray(stage.dependsOn)
116
+ ? stage.dependsOn
117
+ : [stage.dependsOn]
118
+ : [];
119
+
120
+ const jobs = Array.isArray(stage.jobs) ? stage.jobs : [];
121
+ for (const jobDef of jobs) {
122
+ const jobName =
123
+ jobDef.job || jobDef.deployment || jobDef.displayName || "unnamed-job";
124
+ const jobRef = uuidv4();
125
+ const steps = [];
126
+ // Combine stage- and job-level context into the task properties.
127
+ const jobProperties = [
128
+ { name: "cdx:azure:stage:name", value: stageName },
129
+ { name: "cdx:azure:job:name", value: jobName },
130
+ ];
131
+
132
+ if (stageDepOn.length) {
133
+ jobProperties.push({
134
+ name: "cdx:azure:stage:dependsOn",
135
+ value: stageDepOn.join(","),
136
+ });
137
+ }
138
+
139
+ if (stage.condition) {
140
+ jobProperties.push({
141
+ name: "cdx:azure:stage:condition",
142
+ value: stage.condition,
143
+ });
144
+ }
145
+
146
+ if (jobDef.pool?.vmImage) {
147
+ jobProperties.push({
148
+ name: "cdx:azure:job:pool:vmImage",
149
+ value: jobDef.pool.vmImage,
150
+ });
151
+ components.push({ type: "platform", name: jobDef.pool.vmImage });
152
+ }
153
+
154
+ if (jobDef.environment) {
155
+ const envName =
156
+ typeof jobDef.environment === "string"
157
+ ? jobDef.environment
158
+ : jobDef.environment?.name || "";
159
+ if (envName) {
160
+ jobProperties.push({
161
+ name: "cdx:azure:job:environment",
162
+ value: envName,
163
+ });
164
+ }
165
+ }
166
+
167
+ // Collect deployment strategy steps
168
+ const strategySteps =
169
+ jobDef.strategy?.runOnce?.deploy?.steps ||
170
+ jobDef.strategy?.rolling?.deploy?.steps ||
171
+ jobDef.strategy?.canary?.deploy?.steps ||
172
+ jobDef.steps ||
173
+ [];
174
+
175
+ for (const step of Array.isArray(strategySteps) ? strategySteps : []) {
176
+ if (typeof step !== "object") {
177
+ continue;
178
+ }
179
+ const stepName =
180
+ step.displayName ||
181
+ step.task ||
182
+ (step.script ? "script" : undefined) ||
183
+ "step";
184
+ const command = step.script || step.bash || step.powershell;
185
+ steps.push({
186
+ name: stepName,
187
+ commands: command
188
+ ? [{ executed: command.trim().split("\n")[0] }]
189
+ : undefined,
190
+ });
191
+ }
192
+
193
+ tasks.push({
194
+ "bom-ref": jobRef,
195
+ uid: jobRef,
196
+ name: `${stageName}/${jobName}`,
197
+ taskTypes: ["build"],
198
+ steps: disambiguateSteps(steps),
199
+ properties: jobProperties,
200
+ });
201
+ workflowDependsOn.push(jobRef);
202
+ }
203
+ }
204
+
205
+ // Flat (non-stage) jobs list
206
+ if (stages.length === 0 && Array.isArray(yamlObj.jobs)) {
207
+ for (const jobDef of yamlObj.jobs) {
208
+ const jobName = jobDef.job || jobDef.displayName || "unnamed-job";
209
+ const taskRef = uuidv4();
210
+ const steps = [];
211
+ const taskProperties = [{ name: "cdx:azure:job:name", value: jobName }];
212
+
213
+ for (const step of Array.isArray(jobDef.steps) ? jobDef.steps : []) {
214
+ if (typeof step !== "object") {
215
+ continue;
216
+ }
217
+ const stepName = step.displayName || step.task || "step";
218
+ const command = step.script || step.bash;
219
+ steps.push({
220
+ name: stepName,
221
+ commands: command
222
+ ? [{ executed: command.trim().split("\n")[0] }]
223
+ : undefined,
224
+ });
225
+ }
226
+
227
+ tasks.push({
228
+ "bom-ref": taskRef,
229
+ uid: taskRef,
230
+ name: jobName,
231
+ taskTypes: ["build"],
232
+ steps: disambiguateSteps(steps),
233
+ properties: taskProperties,
234
+ });
235
+ workflowDependsOn.push(taskRef);
236
+ }
237
+ }
238
+
239
+ const workflow = {
240
+ "bom-ref": workflowRef,
241
+ uid: workflowRef,
242
+ name: "Azure Pipelines",
243
+ taskTypes: ["build"],
244
+ tasks: tasks.length ? tasks : undefined,
245
+ properties: workflowProperties,
246
+ };
247
+
248
+ workflows.push(workflow);
249
+ if (workflowDependsOn.length) {
250
+ dependencies.push({ ref: workflowRef, dependsOn: workflowDependsOn });
251
+ }
252
+
253
+ return { workflows, components, services: [], properties: [], dependencies };
254
+ }
255
+
256
+ /**
257
+ * Azure Pipelines formulation parser.
258
+ *
259
+ * Matches `azure-pipelines.yml`, `azure-pipelines.yaml`, and
260
+ * `.azure-pipelines/*.yml` files and converts them into CycloneDX formulation
261
+ * workflow objects.
262
+ *
263
+ * Parser contract: `parse(files, options)` returns
264
+ * `{ workflows, components, services, properties, dependencies }`.
265
+ */
266
+ export const azurePipelinesParser = {
267
+ id: "azure-pipelines",
268
+ patterns: ["**/azure-pipelines.{yml,yaml}", ".azure-pipelines/*.{yml,yaml}"],
269
+
270
+ /**
271
+ * @param {string[]} files Matched pipeline file paths
272
+ * @param {Object} options CLI options
273
+ * @returns {{ workflows: Object[], components: Object[], services: Object[], properties: Object[], dependencies: Object[] }}
274
+ */
275
+ parse(files, options) {
276
+ const workflows = [];
277
+ const components = [];
278
+ const dependencies = [];
279
+
280
+ for (const f of files) {
281
+ const result = parseAzurePipelinesFile(f, options);
282
+ workflows.push(...result.workflows);
283
+ components.push(...result.components);
284
+ dependencies.push(...result.dependencies);
285
+ }
286
+
287
+ return {
288
+ workflows,
289
+ components,
290
+ services: [],
291
+ properties: [],
292
+ dependencies,
293
+ };
294
+ },
295
+ };
@@ -0,0 +1,253 @@
1
+ import path from "node:path";
2
+ import { fileURLToPath } from "node:url";
3
+
4
+ import { assert, describe, it } from "poku";
5
+
6
+ import { azurePipelinesParser } from "./azurePipelines.js";
7
+
8
+ const __dirname = path.dirname(fileURLToPath(import.meta.url));
9
+ const repoRoot = path.resolve(__dirname, "../../..");
10
+
11
+ describe("azurePipelinesParser", () => {
12
+ it("has correct metadata", () => {
13
+ assert.strictEqual(azurePipelinesParser.id, "azure-pipelines");
14
+ assert.ok(Array.isArray(azurePipelinesParser.patterns));
15
+ assert.ok(azurePipelinesParser.patterns.length > 0);
16
+ assert.strictEqual(typeof azurePipelinesParser.parse, "function");
17
+ });
18
+
19
+ it("returns empty arrays for no files", () => {
20
+ const result = azurePipelinesParser.parse([], {});
21
+ assert.deepStrictEqual(result.workflows, []);
22
+ assert.deepStrictEqual(result.components, []);
23
+ assert.deepStrictEqual(result.services, []);
24
+ assert.deepStrictEqual(result.properties, []);
25
+ assert.deepStrictEqual(result.dependencies, []);
26
+ });
27
+
28
+ it("parses the Azure Pipelines fixture", () => {
29
+ const f = path.join(repoRoot, "test", "data", "azure-pipelines.yml");
30
+ const result = azurePipelinesParser.parse([f], {});
31
+
32
+ assert.ok(Array.isArray(result.workflows));
33
+ assert.strictEqual(result.workflows.length, 1);
34
+
35
+ const wf = result.workflows[0];
36
+ assert.ok(wf["bom-ref"]);
37
+ assert.strictEqual(wf.name, "Azure Pipelines");
38
+ assert.ok(Array.isArray(wf.tasks));
39
+ assert.ok(wf.tasks.length > 0, "expected at least one task");
40
+
41
+ // Stages are flattened: each stage+job becomes a task named
42
+ // "StageName/JobName". Tasks must NOT have a nested `tasks` property
43
+ // (CycloneDX Task schema has additionalProperties: false).
44
+ for (const task of wf.tasks) {
45
+ assert.ok(!task.tasks, "Task must not have a nested tasks property");
46
+ }
47
+
48
+ const taskNames = wf.tasks.map((t) => t.name);
49
+ assert.ok(
50
+ taskNames.some((n) => n.startsWith("Build/")),
51
+ "expected a Build/* task",
52
+ );
53
+ assert.ok(
54
+ taskNames.some((n) => n.startsWith("DeployStaging/")),
55
+ "expected a DeployStaging/* task",
56
+ );
57
+ assert.ok(
58
+ taskNames.some((n) => n.startsWith("DeployProduction/")),
59
+ "expected a DeployProduction/* task",
60
+ );
61
+ });
62
+
63
+ it("captures pool vmImage as a component", () => {
64
+ const f = path.join(repoRoot, "test", "data", "azure-pipelines.yml");
65
+ const result = azurePipelinesParser.parse([f], {});
66
+
67
+ const compNames = result.components.map((c) => c.name);
68
+ assert.ok(
69
+ compNames.includes("ubuntu-latest"),
70
+ "expected ubuntu-latest component",
71
+ );
72
+ });
73
+
74
+ it("records trigger branches in workflow properties", () => {
75
+ const f = path.join(repoRoot, "test", "data", "azure-pipelines.yml");
76
+ const result = azurePipelinesParser.parse([f], {});
77
+
78
+ const props = result.workflows[0].properties || [];
79
+ const triggerProp = props.find(
80
+ (p) => p.name === "cdx:azure:trigger:branches",
81
+ );
82
+ assert.ok(triggerProp, "expected trigger branches property");
83
+ assert.ok(triggerProp.value.includes("main"));
84
+ });
85
+
86
+ it("produces workflow dependency links", () => {
87
+ const f = path.join(repoRoot, "test", "data", "azure-pipelines.yml");
88
+ const result = azurePipelinesParser.parse([f], {});
89
+
90
+ assert.ok(result.dependencies.length > 0);
91
+ const wfDep = result.dependencies.find(
92
+ (d) => d.ref === result.workflows[0]["bom-ref"],
93
+ );
94
+ assert.ok(wfDep);
95
+ assert.ok(wfDep.dependsOn.length > 0);
96
+ });
97
+
98
+ it("gracefully handles missing file", () => {
99
+ const result = azurePipelinesParser.parse(
100
+ ["/no/such/azure-pipelines.yml"],
101
+ {},
102
+ );
103
+ assert.deepStrictEqual(result.workflows, []);
104
+ assert.deepStrictEqual(result.components, []);
105
+ });
106
+
107
+ it("skips files that do not look like Azure Pipelines", () => {
108
+ // GitLab CI config has no `pool`, `stages` (in Azure sense), etc.
109
+ // But it does have `stages`, so let's use the CircleCI config which has `version` but no pool
110
+ const f = path.join(repoRoot, "test", "data", "circleci-config.yml");
111
+ const result = azurePipelinesParser.parse([f], {});
112
+ // CircleCI config triggers (orbs/executors) don't match Azure heuristic robustly,
113
+ // so we just verify no exception is thrown and a result is returned
114
+ assert.ok(Array.isArray(result.workflows));
115
+ assert.ok(Array.isArray(result.components));
116
+ });
117
+
118
+ it("parses azure-pipelines-flat.yml: flat jobs (no stages) extracted as tasks", () => {
119
+ const f = path.join(repoRoot, "test", "data", "azure-pipelines-flat.yml");
120
+ const result = azurePipelinesParser.parse([f], {});
121
+
122
+ assert.strictEqual(result.workflows.length, 1);
123
+ const taskNames = result.workflows[0].tasks.map((t) => t.name);
124
+ assert.ok(taskNames.includes("Lint"), "expected Lint job");
125
+ assert.ok(taskNames.includes("UnitTests"), "expected UnitTests job");
126
+ assert.ok(
127
+ taskNames.includes("IntegrationTests"),
128
+ "expected IntegrationTests job",
129
+ );
130
+ assert.ok(taskNames.includes("SecurityScan"), "expected SecurityScan job");
131
+ });
132
+
133
+ it("parses azure-pipelines-flat.yml: trigger branches recorded in workflow properties", () => {
134
+ const f = path.join(repoRoot, "test", "data", "azure-pipelines-flat.yml");
135
+ const result = azurePipelinesParser.parse([f], {});
136
+
137
+ const triggerProp = result.workflows[0].properties.find(
138
+ (p) => p.name === "cdx:azure:trigger:branches",
139
+ );
140
+ assert.ok(triggerProp, "expected cdx:azure:trigger:branches property");
141
+ assert.ok(
142
+ triggerProp.value.includes("main"),
143
+ "trigger branches must include main",
144
+ );
145
+ assert.ok(
146
+ triggerProp.value.includes("develop"),
147
+ "trigger branches must include develop",
148
+ );
149
+ });
150
+
151
+ it("parses azure-pipelines-flat.yml: job-level properties recorded", () => {
152
+ const f = path.join(repoRoot, "test", "data", "azure-pipelines-flat.yml");
153
+ const result = azurePipelinesParser.parse([f], {});
154
+
155
+ const lintTask = result.workflows[0].tasks.find((t) => t.name === "Lint");
156
+ assert.ok(lintTask, "Lint task must exist");
157
+ const jobNameProp = lintTask.properties.find(
158
+ (p) => p.name === "cdx:azure:job:name",
159
+ );
160
+ assert.ok(jobNameProp, "expected cdx:azure:job:name property on Lint task");
161
+ assert.strictEqual(jobNameProp.value, "Lint");
162
+ });
163
+
164
+ it("parses azure-pipelines-matrix.yml: multi-stage pipeline extracted", () => {
165
+ const f = path.join(repoRoot, "test", "data", "azure-pipelines-matrix.yml");
166
+ const result = azurePipelinesParser.parse([f], {});
167
+
168
+ // Each stage+job becomes a flattened task named "StageName/JobName".
169
+ const taskNames = result.workflows[0].tasks.map((t) => t.name);
170
+ assert.ok(
171
+ taskNames.some((n) => n.startsWith("Validate/")),
172
+ "expected Validate/* task",
173
+ );
174
+ assert.ok(
175
+ taskNames.some((n) => n.startsWith("Test/")),
176
+ "expected Test/* task",
177
+ );
178
+ assert.ok(
179
+ taskNames.some((n) => n.startsWith("Build/")),
180
+ "expected Build/* task",
181
+ );
182
+ assert.ok(
183
+ taskNames.some((n) => n.startsWith("DeployStaging/")),
184
+ "expected DeployStaging/* task",
185
+ );
186
+ assert.ok(
187
+ taskNames.some((n) => n.startsWith("DeployProduction/")),
188
+ "expected DeployProduction/* task",
189
+ );
190
+ });
191
+
192
+ it("parses azure-pipelines-matrix.yml: stage dependsOn recorded in properties", () => {
193
+ const f = path.join(repoRoot, "test", "data", "azure-pipelines-matrix.yml");
194
+ const result = azurePipelinesParser.parse([f], {});
195
+
196
+ // The Test stage depends on Validate; all Test/* tasks carry that property.
197
+ const testTask = result.workflows[0].tasks.find((t) =>
198
+ t.name.startsWith("Test/"),
199
+ );
200
+ assert.ok(testTask, "Test/* task must exist");
201
+ const depProp = testTask.properties.find(
202
+ (p) => p.name === "cdx:azure:stage:dependsOn",
203
+ );
204
+ assert.ok(depProp, "expected cdx:azure:stage:dependsOn property");
205
+ assert.ok(
206
+ depProp.value.includes("Validate"),
207
+ "dependsOn must reference Validate",
208
+ );
209
+ });
210
+
211
+ it("parses azure-pipelines-matrix.yml: trigger branches include release/* pattern", () => {
212
+ const f = path.join(repoRoot, "test", "data", "azure-pipelines-matrix.yml");
213
+ const result = azurePipelinesParser.parse([f], {});
214
+
215
+ const triggerProp = result.workflows[0].properties.find(
216
+ (p) => p.name === "cdx:azure:trigger:branches",
217
+ );
218
+ assert.ok(triggerProp, "expected cdx:azure:trigger:branches property");
219
+ assert.ok(triggerProp.value.includes("main"), "must include main");
220
+ assert.ok(
221
+ triggerProp.value.includes("release/*"),
222
+ "must include release/* branch pattern",
223
+ );
224
+ });
225
+
226
+ it("parses azure-pipelines-matrix.yml: ubuntu-latest pool images captured as components", () => {
227
+ const f = path.join(repoRoot, "test", "data", "azure-pipelines-matrix.yml");
228
+ const result = azurePipelinesParser.parse([f], {});
229
+
230
+ const platformComps = result.components.filter(
231
+ (c) => c.type === "platform",
232
+ );
233
+ assert.ok(
234
+ platformComps.length > 0,
235
+ "expected at least one ubuntu-latest platform component",
236
+ );
237
+ assert.ok(
238
+ platformComps.some((c) => c.name === "ubuntu-latest"),
239
+ "expected ubuntu-latest component",
240
+ );
241
+ });
242
+
243
+ it("parses multiple Azure Pipelines files: two files produce two workflows", () => {
244
+ const f1 = path.join(repoRoot, "test", "data", "azure-pipelines.yml");
245
+ const f2 = path.join(repoRoot, "test", "data", "azure-pipelines-flat.yml");
246
+ const result = azurePipelinesParser.parse([f1, f2], {});
247
+ assert.strictEqual(
248
+ result.workflows.length,
249
+ 2,
250
+ "expected two workflows for two files",
251
+ );
252
+ });
253
+ });