@cyclonedx/cdxgen 12.1.5 → 12.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +51 -40
- package/bin/cdxgen.js +194 -97
- package/bin/evinse.js +4 -4
- package/bin/repl.js +1 -1
- package/bin/sign.js +102 -0
- package/bin/validate.js +233 -0
- package/bin/verify.js +69 -28
- package/data/queries.json +1 -1
- package/data/rules/ci-permissions.yaml +186 -0
- package/data/rules/dependency-sources.yaml +123 -0
- package/data/rules/package-integrity.yaml +135 -0
- package/data/rules/vscode-extensions.yaml +228 -0
- package/lib/cli/index.js +449 -429
- package/lib/cli/index.poku.js +117 -0
- package/lib/evinser/db.js +137 -0
- package/lib/{helpers → evinser}/db.poku.js +2 -6
- package/lib/evinser/evinser.js +2 -14
- package/lib/helpers/analyzer.js +606 -3
- package/lib/helpers/analyzer.poku.js +230 -0
- package/lib/helpers/bomSigner.js +312 -0
- package/lib/helpers/bomSigner.poku.js +156 -0
- package/lib/helpers/ciParsers/azurePipelines.js +295 -0
- package/lib/helpers/ciParsers/azurePipelines.poku.js +253 -0
- package/lib/helpers/ciParsers/circleCi.js +286 -0
- package/lib/helpers/ciParsers/circleCi.poku.js +230 -0
- package/lib/helpers/ciParsers/common.js +24 -0
- package/lib/helpers/ciParsers/githubActions.js +636 -0
- package/lib/helpers/ciParsers/githubActions.poku.js +802 -0
- package/lib/helpers/ciParsers/gitlabCi.js +213 -0
- package/lib/helpers/ciParsers/gitlabCi.poku.js +247 -0
- package/lib/helpers/ciParsers/jenkins.js +181 -0
- package/lib/helpers/ciParsers/jenkins.poku.js +197 -0
- package/lib/helpers/depsUtils.js +219 -0
- package/lib/helpers/depsUtils.poku.js +207 -0
- package/lib/helpers/display.js +426 -5
- package/lib/helpers/envcontext.js +18 -3
- package/lib/helpers/formulationParsers.js +351 -0
- package/lib/helpers/logger.js +14 -0
- package/lib/helpers/protobom.js +9 -9
- package/lib/helpers/pythonutils.js +9 -0
- package/lib/helpers/remote/dependency-track.js +84 -0
- package/lib/helpers/remote/dependency-track.poku.js +119 -0
- package/lib/helpers/table.js +384 -0
- package/lib/helpers/table.poku.js +186 -0
- package/lib/helpers/utils.js +865 -416
- package/lib/helpers/utils.poku.js +172 -265
- package/lib/helpers/versutils.js +202 -0
- package/lib/helpers/versutils.poku.js +315 -0
- package/lib/helpers/vsixutils.js +1061 -0
- package/lib/helpers/vsixutils.poku.js +2247 -0
- package/lib/managers/binary.js +19 -19
- package/lib/managers/docker.js +108 -1
- package/lib/managers/oci.js +10 -0
- package/lib/managers/piptree.js +3 -9
- package/lib/parsers/npmrc.js +17 -13
- package/lib/parsers/npmrc.poku.js +41 -5
- package/lib/server/openapi.yaml +34 -1
- package/lib/server/server.js +50 -13
- package/lib/server/server.poku.js +332 -144
- package/lib/stages/postgen/annotator.js +1 -1
- package/lib/stages/postgen/auditBom.js +196 -0
- package/lib/stages/postgen/auditBom.poku.js +378 -0
- package/lib/stages/postgen/postgen.js +54 -1
- package/lib/stages/postgen/postgen.poku.js +90 -1
- package/lib/stages/postgen/ruleEngine.js +369 -0
- package/lib/stages/pregen/envAudit.js +299 -0
- package/lib/stages/pregen/envAudit.poku.js +572 -0
- package/lib/stages/pregen/pregen.js +12 -8
- package/lib/{helpers/validator.js → validator/bomValidator.js} +107 -47
- package/lib/validator/complianceEngine.js +241 -0
- package/lib/validator/complianceEngine.poku.js +168 -0
- package/lib/validator/complianceRules.js +1610 -0
- package/lib/validator/complianceRules.poku.js +328 -0
- package/lib/validator/index.js +222 -0
- package/lib/validator/index.poku.js +144 -0
- package/lib/validator/reporters/annotations.js +121 -0
- package/lib/validator/reporters/console.js +149 -0
- package/lib/validator/reporters/index.js +41 -0
- package/lib/validator/reporters/json.js +37 -0
- package/lib/validator/reporters/sarif.js +184 -0
- package/lib/validator/reporters.poku.js +150 -0
- package/package.json +8 -9
- package/types/bin/sign.d.ts +3 -0
- package/types/bin/sign.d.ts.map +1 -0
- package/types/bin/validate.d.ts +3 -0
- package/types/bin/validate.d.ts.map +1 -0
- package/types/helpers/utils.d.ts +0 -1
- package/types/lib/cli/index.d.ts +49 -52
- package/types/lib/cli/index.d.ts.map +1 -1
- package/types/lib/evinser/db.d.ts +34 -0
- package/types/lib/evinser/db.d.ts.map +1 -0
- package/types/lib/evinser/evinser.d.ts +63 -16
- package/types/lib/evinser/evinser.d.ts.map +1 -1
- package/types/lib/helpers/analyzer.d.ts.map +1 -1
- package/types/lib/helpers/bomSigner.d.ts +27 -0
- package/types/lib/helpers/bomSigner.d.ts.map +1 -0
- package/types/lib/helpers/ciParsers/azurePipelines.d.ts +17 -0
- package/types/lib/helpers/ciParsers/azurePipelines.d.ts.map +1 -0
- package/types/lib/helpers/ciParsers/circleCi.d.ts +17 -0
- package/types/lib/helpers/ciParsers/circleCi.d.ts.map +1 -0
- package/types/lib/helpers/ciParsers/common.d.ts +11 -0
- package/types/lib/helpers/ciParsers/common.d.ts.map +1 -0
- package/types/lib/helpers/ciParsers/githubActions.d.ts +34 -0
- package/types/lib/helpers/ciParsers/githubActions.d.ts.map +1 -0
- package/types/lib/helpers/ciParsers/gitlabCi.d.ts +17 -0
- package/types/lib/helpers/ciParsers/gitlabCi.d.ts.map +1 -0
- package/types/lib/helpers/ciParsers/jenkins.d.ts +17 -0
- package/types/lib/helpers/ciParsers/jenkins.d.ts.map +1 -0
- package/types/lib/helpers/depsUtils.d.ts +21 -0
- package/types/lib/helpers/depsUtils.d.ts.map +1 -0
- package/types/lib/helpers/display.d.ts +111 -11
- package/types/lib/helpers/display.d.ts.map +1 -1
- package/types/lib/helpers/envcontext.d.ts +19 -7
- package/types/lib/helpers/envcontext.d.ts.map +1 -1
- package/types/lib/helpers/formulationParsers.d.ts +50 -0
- package/types/lib/helpers/formulationParsers.d.ts.map +1 -0
- package/types/lib/helpers/logger.d.ts +15 -1
- package/types/lib/helpers/logger.d.ts.map +1 -1
- package/types/lib/helpers/protobom.d.ts +2 -2
- package/types/lib/helpers/pythonutils.d.ts +10 -1
- package/types/lib/helpers/pythonutils.d.ts.map +1 -1
- package/types/lib/helpers/remote/dependency-track.d.ts +16 -0
- package/types/lib/helpers/remote/dependency-track.d.ts.map +1 -0
- package/types/lib/helpers/table.d.ts +6 -0
- package/types/lib/helpers/table.d.ts.map +1 -0
- package/types/lib/helpers/utils.d.ts +533 -128
- package/types/lib/helpers/utils.d.ts.map +1 -1
- package/types/lib/helpers/versutils.d.ts +8 -0
- package/types/lib/helpers/versutils.d.ts.map +1 -0
- package/types/lib/helpers/vsixutils.d.ts +130 -0
- package/types/lib/helpers/vsixutils.d.ts.map +1 -0
- package/types/lib/managers/docker.d.ts +12 -31
- package/types/lib/managers/docker.d.ts.map +1 -1
- package/types/lib/managers/oci.d.ts +11 -1
- package/types/lib/managers/oci.d.ts.map +1 -1
- package/types/lib/managers/piptree.d.ts.map +1 -1
- package/types/lib/parsers/npmrc.d.ts +4 -1
- package/types/lib/parsers/npmrc.d.ts.map +1 -1
- package/types/lib/server/server.d.ts +22 -2
- package/types/lib/server/server.d.ts.map +1 -1
- package/types/lib/stages/postgen/auditBom.d.ts +20 -0
- package/types/lib/stages/postgen/auditBom.d.ts.map +1 -0
- package/types/lib/stages/postgen/postgen.d.ts +8 -1
- package/types/lib/stages/postgen/postgen.d.ts.map +1 -1
- package/types/lib/stages/postgen/ruleEngine.d.ts +18 -0
- package/types/lib/stages/postgen/ruleEngine.d.ts.map +1 -0
- package/types/lib/stages/pregen/envAudit.d.ts +8 -0
- package/types/lib/stages/pregen/envAudit.d.ts.map +1 -0
- package/types/lib/stages/pregen/pregen.d.ts.map +1 -1
- package/types/lib/{helpers/validator.d.ts → validator/bomValidator.d.ts} +1 -1
- package/types/lib/validator/bomValidator.d.ts.map +1 -0
- package/types/lib/validator/complianceEngine.d.ts +66 -0
- package/types/lib/validator/complianceEngine.d.ts.map +1 -0
- package/types/lib/validator/complianceRules.d.ts +70 -0
- package/types/lib/validator/complianceRules.d.ts.map +1 -0
- package/types/lib/validator/index.d.ts +70 -0
- package/types/lib/validator/index.d.ts.map +1 -0
- package/types/lib/validator/reporters/annotations.d.ts +31 -0
- package/types/lib/validator/reporters/annotations.d.ts.map +1 -0
- package/types/lib/validator/reporters/console.d.ts +30 -0
- package/types/lib/validator/reporters/console.d.ts.map +1 -0
- package/types/lib/validator/reporters/index.d.ts +21 -0
- package/types/lib/validator/reporters/index.d.ts.map +1 -0
- package/types/lib/validator/reporters/json.d.ts +11 -0
- package/types/lib/validator/reporters/json.d.ts.map +1 -0
- package/types/lib/validator/reporters/sarif.d.ts +16 -0
- package/types/lib/validator/reporters/sarif.d.ts.map +1 -0
- package/lib/helpers/db.js +0 -162
- package/lib/stages/pregen/env-audit.js +0 -34
- package/lib/stages/pregen/env-audit.poku.js +0 -290
- package/types/helpers/db.d.ts +0 -35
- package/types/helpers/db.d.ts.map +0 -1
- package/types/lib/helpers/db.d.ts +0 -35
- package/types/lib/helpers/db.d.ts.map +0 -1
- package/types/lib/helpers/validator.d.ts.map +0 -1
- package/types/lib/stages/pregen/env-audit.d.ts +0 -2
- package/types/lib/stages/pregen/env-audit.d.ts.map +0 -1
- package/types/managers/binary.d.ts +0 -37
- package/types/managers/binary.d.ts.map +0 -1
- package/types/managers/docker.d.ts +0 -56
- package/types/managers/docker.d.ts.map +0 -1
- package/types/managers/oci.d.ts +0 -2
- package/types/managers/oci.d.ts.map +0 -1
- package/types/managers/piptree.d.ts +0 -2
- package/types/managers/piptree.d.ts.map +0 -1
- package/types/server/server.d.ts +0 -34
- package/types/server/server.d.ts.map +0 -1
- package/types/stages/postgen/annotator.d.ts +0 -27
- package/types/stages/postgen/annotator.d.ts.map +0 -1
- package/types/stages/postgen/postgen.d.ts +0 -51
- package/types/stages/postgen/postgen.d.ts.map +0 -1
- package/types/stages/pregen/pregen.d.ts +0 -59
- package/types/stages/pregen/pregen.d.ts.map +0 -1
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import { fileURLToPath } from "node:url";
|
|
3
|
+
|
|
4
|
+
import { assert, describe, it } from "poku";
|
|
5
|
+
|
|
6
|
+
import { jenkinsParser } from "./jenkins.js";
|
|
7
|
+
|
|
8
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
9
|
+
const repoRoot = path.resolve(__dirname, "../../..");
|
|
10
|
+
|
|
11
|
+
describe("jenkinsParser", () => {
|
|
12
|
+
it("has correct metadata", () => {
|
|
13
|
+
assert.strictEqual(jenkinsParser.id, "jenkins");
|
|
14
|
+
assert.ok(Array.isArray(jenkinsParser.patterns));
|
|
15
|
+
assert.ok(jenkinsParser.patterns.length > 0);
|
|
16
|
+
assert.strictEqual(typeof jenkinsParser.parse, "function");
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
it("returns empty arrays for no files", () => {
|
|
20
|
+
const result = jenkinsParser.parse([], {});
|
|
21
|
+
assert.deepStrictEqual(result.workflows, []);
|
|
22
|
+
assert.deepStrictEqual(result.components, []);
|
|
23
|
+
assert.deepStrictEqual(result.services, []);
|
|
24
|
+
assert.deepStrictEqual(result.properties, []);
|
|
25
|
+
assert.deepStrictEqual(result.dependencies, []);
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
it("parses the Jenkinsfile fixture", () => {
|
|
29
|
+
const f = path.join(repoRoot, "test", "data", "Jenkinsfile");
|
|
30
|
+
const result = jenkinsParser.parse([f], {});
|
|
31
|
+
|
|
32
|
+
assert.ok(Array.isArray(result.workflows));
|
|
33
|
+
assert.strictEqual(result.workflows.length, 1, "expected one workflow");
|
|
34
|
+
|
|
35
|
+
const wf = result.workflows[0];
|
|
36
|
+
assert.ok(wf["bom-ref"]);
|
|
37
|
+
assert.strictEqual(wf.name, "Jenkinsfile Pipeline");
|
|
38
|
+
assert.ok(Array.isArray(wf.tasks));
|
|
39
|
+
assert.ok(wf.tasks.length > 0, "expected at least one task (stage)");
|
|
40
|
+
|
|
41
|
+
const stageNames = wf.tasks.map((t) => t.name);
|
|
42
|
+
assert.ok(stageNames.includes("Install"), "expected Install stage");
|
|
43
|
+
assert.ok(stageNames.includes("Build"), "expected Build stage");
|
|
44
|
+
assert.ok(stageNames.includes("Test"), "expected Test stage");
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
it("captures docker agent image as a component", () => {
|
|
48
|
+
const f = path.join(repoRoot, "test", "data", "Jenkinsfile");
|
|
49
|
+
const result = jenkinsParser.parse([f], {});
|
|
50
|
+
const compNames = result.components.map((c) => c.name);
|
|
51
|
+
assert.ok(
|
|
52
|
+
compNames.some((n) => n.includes("node")),
|
|
53
|
+
"expected node docker image component",
|
|
54
|
+
);
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
it("produces workflow dependency links", () => {
|
|
58
|
+
const f = path.join(repoRoot, "test", "data", "Jenkinsfile");
|
|
59
|
+
const result = jenkinsParser.parse([f], {});
|
|
60
|
+
|
|
61
|
+
assert.ok(result.dependencies.length > 0);
|
|
62
|
+
const wfDep = result.dependencies.find(
|
|
63
|
+
(d) => d.ref === result.workflows[0]["bom-ref"],
|
|
64
|
+
);
|
|
65
|
+
assert.ok(wfDep);
|
|
66
|
+
assert.ok(wfDep.dependsOn.length > 0);
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
it("gracefully handles non-declarative content", () => {
|
|
70
|
+
const f = path.join(repoRoot, "test", "data", "gitlab-ci.yml");
|
|
71
|
+
const result = jenkinsParser.parse([f], {});
|
|
72
|
+
// .gitlab-ci.yml is not a Jenkinsfile → empty result
|
|
73
|
+
assert.deepStrictEqual(result.workflows, []);
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
it("gracefully handles missing file", () => {
|
|
77
|
+
const result = jenkinsParser.parse(["/no/such/Jenkinsfile"], {});
|
|
78
|
+
assert.deepStrictEqual(result.workflows, []);
|
|
79
|
+
});
|
|
80
|
+
|
|
81
|
+
it("parses Jenkinsfile.agentany: agent any with no Docker image", () => {
|
|
82
|
+
const f = path.join(repoRoot, "test", "data", "Jenkinsfile.agentany");
|
|
83
|
+
const result = jenkinsParser.parse([f], {});
|
|
84
|
+
|
|
85
|
+
assert.strictEqual(result.workflows.length, 1);
|
|
86
|
+
// agent any → no container component
|
|
87
|
+
assert.strictEqual(
|
|
88
|
+
result.components.length,
|
|
89
|
+
0,
|
|
90
|
+
"no Docker component expected for agent any",
|
|
91
|
+
);
|
|
92
|
+
|
|
93
|
+
// agent property should record 'any'
|
|
94
|
+
const agentProp = result.workflows[0].properties.find(
|
|
95
|
+
(p) => p.name === "cdx:jenkins:agent",
|
|
96
|
+
);
|
|
97
|
+
assert.ok(agentProp, "expected cdx:jenkins:agent property");
|
|
98
|
+
assert.strictEqual(agentProp.value, "any", "agent value should be 'any'");
|
|
99
|
+
});
|
|
100
|
+
|
|
101
|
+
it("parses Jenkinsfile.agentany: all expected stages present", () => {
|
|
102
|
+
const f = path.join(repoRoot, "test", "data", "Jenkinsfile.agentany");
|
|
103
|
+
const result = jenkinsParser.parse([f], {});
|
|
104
|
+
|
|
105
|
+
const stageNames = result.workflows[0].tasks.map((t) => t.name);
|
|
106
|
+
assert.ok(stageNames.includes("Checkout"), "expected Checkout stage");
|
|
107
|
+
assert.ok(stageNames.includes("Compile"), "expected Compile stage");
|
|
108
|
+
assert.ok(stageNames.includes("Unit Tests"), "expected Unit Tests stage");
|
|
109
|
+
assert.ok(
|
|
110
|
+
stageNames.includes("Integration Tests"),
|
|
111
|
+
"expected Integration Tests stage",
|
|
112
|
+
);
|
|
113
|
+
assert.ok(stageNames.includes("Package"), "expected Package stage");
|
|
114
|
+
assert.ok(stageNames.includes("Deploy"), "expected Deploy stage");
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
it("parses Jenkinsfile.agentany: `when` condition captured", () => {
|
|
118
|
+
const f = path.join(repoRoot, "test", "data", "Jenkinsfile.agentany");
|
|
119
|
+
const result = jenkinsParser.parse([f], {});
|
|
120
|
+
|
|
121
|
+
const integTask = result.workflows[0].tasks.find(
|
|
122
|
+
(t) => t.name === "Integration Tests",
|
|
123
|
+
);
|
|
124
|
+
assert.ok(integTask, "Integration Tests task must exist");
|
|
125
|
+
const whenProp = integTask.properties.find(
|
|
126
|
+
(p) => p.name === "cdx:jenkins:stage:when",
|
|
127
|
+
);
|
|
128
|
+
assert.ok(whenProp, "expected cdx:jenkins:stage:when property");
|
|
129
|
+
assert.ok(
|
|
130
|
+
whenProp.value.includes("RUN_INTEGRATION_TESTS"),
|
|
131
|
+
"when must include param check",
|
|
132
|
+
);
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
it("parses Jenkinsfile.agentany: parallel stage detected", () => {
|
|
136
|
+
const f = path.join(repoRoot, "test", "data", "Jenkinsfile.agentany");
|
|
137
|
+
const result = jenkinsParser.parse([f], {});
|
|
138
|
+
|
|
139
|
+
const parallelTask = result.workflows[0].tasks.find(
|
|
140
|
+
(t) => t.name === "Code Analysis",
|
|
141
|
+
);
|
|
142
|
+
assert.ok(parallelTask, "Code Analysis task must exist");
|
|
143
|
+
const parallelProp = parallelTask.properties.find(
|
|
144
|
+
(p) => p.name === "cdx:jenkins:stage:parallel",
|
|
145
|
+
);
|
|
146
|
+
assert.ok(parallelProp, "expected cdx:jenkins:stage:parallel property");
|
|
147
|
+
assert.strictEqual(parallelProp.value, "true");
|
|
148
|
+
});
|
|
149
|
+
|
|
150
|
+
it("parses Jenkinsfile.multiplatform: per-stage Docker agents extracted", () => {
|
|
151
|
+
const f = path.join(repoRoot, "test", "data", "Jenkinsfile.multiplatform");
|
|
152
|
+
const result = jenkinsParser.parse([f], {});
|
|
153
|
+
|
|
154
|
+
assert.strictEqual(result.workflows.length, 1);
|
|
155
|
+
const stageNames = result.workflows[0].tasks.map((t) => t.name);
|
|
156
|
+
assert.ok(stageNames.includes("Build Linux"), "expected Build Linux stage");
|
|
157
|
+
assert.ok(
|
|
158
|
+
stageNames.includes("Build Windows"),
|
|
159
|
+
"expected Build Windows stage",
|
|
160
|
+
);
|
|
161
|
+
assert.ok(stageNames.includes("Build macOS"), "expected Build macOS stage");
|
|
162
|
+
assert.ok(stageNames.includes("Package"), "expected Package stage");
|
|
163
|
+
assert.ok(stageNames.includes("Release"), "expected Release stage");
|
|
164
|
+
|
|
165
|
+
// Build Linux uses golang:1.22-bookworm docker image
|
|
166
|
+
const compNames = result.components.map((c) => c.name);
|
|
167
|
+
assert.ok(
|
|
168
|
+
compNames.some((n) => n.includes("golang")),
|
|
169
|
+
"expected golang Docker image component from Build Linux stage",
|
|
170
|
+
);
|
|
171
|
+
});
|
|
172
|
+
|
|
173
|
+
it("parses Jenkinsfile.multiplatform: bat step in Windows stage captured", () => {
|
|
174
|
+
const f = path.join(repoRoot, "test", "data", "Jenkinsfile.multiplatform");
|
|
175
|
+
const result = jenkinsParser.parse([f], {});
|
|
176
|
+
|
|
177
|
+
const winTask = result.workflows[0].tasks.find(
|
|
178
|
+
(t) => t.name === "Build Windows",
|
|
179
|
+
);
|
|
180
|
+
assert.ok(winTask, "Build Windows task must exist");
|
|
181
|
+
|
|
182
|
+
// bat steps should be captured as steps
|
|
183
|
+
if (winTask.steps && winTask.steps.length > 0) {
|
|
184
|
+
const batStep = winTask.steps.find((s) =>
|
|
185
|
+
s.commands?.[0]?.executed?.includes("go"),
|
|
186
|
+
);
|
|
187
|
+
assert.ok(batStep, "expected a step with go command from bat");
|
|
188
|
+
}
|
|
189
|
+
});
|
|
190
|
+
|
|
191
|
+
it("parses multiple Jenkinsfiles: two files produce two workflows", () => {
|
|
192
|
+
const f1 = path.join(repoRoot, "test", "data", "Jenkinsfile");
|
|
193
|
+
const f2 = path.join(repoRoot, "test", "data", "Jenkinsfile.agentany");
|
|
194
|
+
const result = jenkinsParser.parse([f1, f2], {});
|
|
195
|
+
assert.strictEqual(result.workflows.length, 2, "expected two workflows");
|
|
196
|
+
});
|
|
197
|
+
});
|
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
import { DEBUG_MODE } from "./utils.js";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Merges two CycloneDX dependency arrays into a single deduplicated list.
|
|
5
|
+
* For each unique ref, the dependsOn and provides sets from both arrays are
|
|
6
|
+
* combined. Self-referential entries pointing to the parent component are
|
|
7
|
+
* removed from all dependsOn and provides lists.
|
|
8
|
+
*
|
|
9
|
+
* @param {Object[]} dependencies First array of dependency objects
|
|
10
|
+
* @param {Object[]} newDependencies Second array of dependency objects to merge
|
|
11
|
+
* @param {Object} parentComponent Parent component whose bom-ref is used to filter self-references
|
|
12
|
+
* @returns {Object[]} Merged and deduplicated array of dependency objects
|
|
13
|
+
*/
|
|
14
|
+
export function mergeDependencies(
|
|
15
|
+
dependencies,
|
|
16
|
+
newDependencies,
|
|
17
|
+
parentComponent = {},
|
|
18
|
+
) {
|
|
19
|
+
if (!parentComponent && DEBUG_MODE) {
|
|
20
|
+
console.log(
|
|
21
|
+
"Unable to determine parent component. Dependencies will be flattened.",
|
|
22
|
+
);
|
|
23
|
+
}
|
|
24
|
+
let providesFound = false;
|
|
25
|
+
const deps_map = {};
|
|
26
|
+
const provides_map = {};
|
|
27
|
+
const parentRef = parentComponent?.["bom-ref"]
|
|
28
|
+
? parentComponent["bom-ref"]
|
|
29
|
+
: undefined;
|
|
30
|
+
const combinedDeps = dependencies.concat(newDependencies || []);
|
|
31
|
+
for (const adep of combinedDeps) {
|
|
32
|
+
if (!deps_map[adep.ref]) {
|
|
33
|
+
deps_map[adep.ref] = new Set();
|
|
34
|
+
}
|
|
35
|
+
if (!provides_map[adep.ref]) {
|
|
36
|
+
provides_map[adep.ref] = new Set();
|
|
37
|
+
}
|
|
38
|
+
if (adep["dependsOn"]) {
|
|
39
|
+
for (const eachDepends of adep["dependsOn"]) {
|
|
40
|
+
if (!eachDepends) {
|
|
41
|
+
continue;
|
|
42
|
+
}
|
|
43
|
+
if (parentRef) {
|
|
44
|
+
if (eachDepends.toLowerCase() !== parentRef.toLowerCase()) {
|
|
45
|
+
deps_map[adep.ref].add(eachDepends);
|
|
46
|
+
}
|
|
47
|
+
} else {
|
|
48
|
+
deps_map[adep.ref].add(eachDepends);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
if (adep["provides"]) {
|
|
53
|
+
providesFound = true;
|
|
54
|
+
for (const eachProvides of adep["provides"]) {
|
|
55
|
+
// Add the entry unless it is the parent itself:
|
|
56
|
+
// when there is no parentRef every entry is kept (!parentRef is true),
|
|
57
|
+
// when parentRef exists only entries that differ from it are kept.
|
|
58
|
+
if (
|
|
59
|
+
!parentRef ||
|
|
60
|
+
eachProvides?.toLowerCase() !== parentRef?.toLowerCase()
|
|
61
|
+
) {
|
|
62
|
+
provides_map[adep.ref].add(eachProvides);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
const retlist = [];
|
|
68
|
+
for (const akey of Object.keys(deps_map)) {
|
|
69
|
+
if (providesFound) {
|
|
70
|
+
retlist.push({
|
|
71
|
+
ref: akey,
|
|
72
|
+
dependsOn: Array.from(deps_map[akey]).sort(),
|
|
73
|
+
provides: Array.from(provides_map[akey]).sort(),
|
|
74
|
+
});
|
|
75
|
+
} else {
|
|
76
|
+
retlist.push({
|
|
77
|
+
ref: akey,
|
|
78
|
+
dependsOn: Array.from(deps_map[akey]).sort(),
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
return retlist;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
/**
|
|
86
|
+
* Trim duplicate components by retaining all the properties
|
|
87
|
+
*
|
|
88
|
+
* @param {Array} components Components
|
|
89
|
+
*
|
|
90
|
+
* @returns {Array} Filtered components
|
|
91
|
+
*/
|
|
92
|
+
export function trimComponents(components) {
|
|
93
|
+
const keyCache = {};
|
|
94
|
+
const filteredComponents = [];
|
|
95
|
+
for (const comp of components) {
|
|
96
|
+
const key = (
|
|
97
|
+
comp.purl ||
|
|
98
|
+
comp["bom-ref"] ||
|
|
99
|
+
comp.name + comp.version
|
|
100
|
+
).toLowerCase();
|
|
101
|
+
if (!keyCache[key]) {
|
|
102
|
+
keyCache[key] = comp;
|
|
103
|
+
} else {
|
|
104
|
+
const existingComponent = keyCache[key];
|
|
105
|
+
// We need to retain any properties that differ
|
|
106
|
+
if (comp.properties) {
|
|
107
|
+
if (existingComponent.properties) {
|
|
108
|
+
for (const newprop of comp.properties) {
|
|
109
|
+
if (
|
|
110
|
+
!existingComponent.properties.find(
|
|
111
|
+
(prop) =>
|
|
112
|
+
prop.name === newprop.name && prop.value === newprop.value,
|
|
113
|
+
)
|
|
114
|
+
) {
|
|
115
|
+
existingComponent.properties.push(newprop);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
} else {
|
|
119
|
+
existingComponent.properties = comp.properties;
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
if (comp.hashes) {
|
|
123
|
+
if (existingComponent.hashes) {
|
|
124
|
+
for (const newhash of comp.hashes) {
|
|
125
|
+
if (
|
|
126
|
+
!existingComponent.hashes.find(
|
|
127
|
+
(hash) =>
|
|
128
|
+
hash.alg === newhash.alg && hash.content === newhash.content,
|
|
129
|
+
)
|
|
130
|
+
) {
|
|
131
|
+
existingComponent.hashes.push(newhash);
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
} else {
|
|
135
|
+
existingComponent.hashes = comp.hashes;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
// Retain all component.evidence.identity
|
|
139
|
+
if (comp?.evidence?.identity) {
|
|
140
|
+
if (!existingComponent.evidence) {
|
|
141
|
+
existingComponent.evidence = { identity: [] };
|
|
142
|
+
} else if (!existingComponent?.evidence?.identity) {
|
|
143
|
+
existingComponent.evidence.identity = [];
|
|
144
|
+
} else if (
|
|
145
|
+
existingComponent?.evidence?.identity &&
|
|
146
|
+
!Array.isArray(existingComponent.evidence.identity)
|
|
147
|
+
) {
|
|
148
|
+
existingComponent.evidence.identity = [
|
|
149
|
+
existingComponent.evidence.identity,
|
|
150
|
+
];
|
|
151
|
+
}
|
|
152
|
+
// comp.evidence.identity can be an array or object
|
|
153
|
+
// Merge the evidence.identity based on methods or objects
|
|
154
|
+
const isIdentityArray = Array.isArray(comp.evidence.identity);
|
|
155
|
+
const identities = isIdentityArray
|
|
156
|
+
? comp.evidence.identity
|
|
157
|
+
: [comp.evidence.identity];
|
|
158
|
+
for (const aident of identities) {
|
|
159
|
+
let methodBasedMerge = false;
|
|
160
|
+
if (aident?.methods?.length) {
|
|
161
|
+
for (const amethod of aident.methods) {
|
|
162
|
+
for (const existIdent of existingComponent.evidence.identity) {
|
|
163
|
+
if (existIdent.field === aident.field) {
|
|
164
|
+
if (!existIdent.methods) {
|
|
165
|
+
existIdent.methods = [];
|
|
166
|
+
}
|
|
167
|
+
let isDup = false;
|
|
168
|
+
for (const emethod of existIdent.methods) {
|
|
169
|
+
if (emethod?.value === amethod?.value) {
|
|
170
|
+
isDup = true;
|
|
171
|
+
break;
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
if (!isDup) {
|
|
175
|
+
existIdent.methods.push(amethod);
|
|
176
|
+
}
|
|
177
|
+
methodBasedMerge = true;
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
if (!methodBasedMerge && aident.field && aident.confidence) {
|
|
183
|
+
existingComponent.evidence.identity.push(aident);
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
if (!isIdentityArray) {
|
|
187
|
+
const firstIdentity = existingComponent.evidence.identity[0];
|
|
188
|
+
let identConfidence = firstIdentity?.confidence;
|
|
189
|
+
// We need to set the confidence to the max of all confidences
|
|
190
|
+
if (firstIdentity?.methods?.length > 1) {
|
|
191
|
+
for (const aidentMethod of firstIdentity.methods) {
|
|
192
|
+
if (
|
|
193
|
+
aidentMethod?.confidence &&
|
|
194
|
+
aidentMethod.confidence > identConfidence
|
|
195
|
+
) {
|
|
196
|
+
identConfidence = aidentMethod.confidence;
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
firstIdentity.confidence = identConfidence;
|
|
201
|
+
existingComponent.evidence = {
|
|
202
|
+
identity: firstIdentity,
|
|
203
|
+
};
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
// If the component is required in any of the child projects, then make it required
|
|
207
|
+
if (
|
|
208
|
+
existingComponent?.scope !== "required" &&
|
|
209
|
+
comp?.scope === "required"
|
|
210
|
+
) {
|
|
211
|
+
existingComponent.scope = "required";
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
for (const akey of Object.keys(keyCache)) {
|
|
216
|
+
filteredComponents.push(keyCache[akey]);
|
|
217
|
+
}
|
|
218
|
+
return filteredComponents;
|
|
219
|
+
}
|
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
import { assert, describe, it } from "poku";
|
|
2
|
+
|
|
3
|
+
import { mergeDependencies, trimComponents } from "./depsUtils.js";
|
|
4
|
+
|
|
5
|
+
describe("mergeDependencies()", () => {
|
|
6
|
+
it("merges two non-overlapping dependency arrays", () => {
|
|
7
|
+
const a = [{ ref: "pkg:npm/a@1", dependsOn: ["pkg:npm/b@1"] }];
|
|
8
|
+
const b = [{ ref: "pkg:npm/c@1", dependsOn: ["pkg:npm/d@1"] }];
|
|
9
|
+
const result = mergeDependencies(a, b);
|
|
10
|
+
assert.strictEqual(result.length, 2);
|
|
11
|
+
const aEntry = result.find((d) => d.ref === "pkg:npm/a@1");
|
|
12
|
+
assert.ok(aEntry);
|
|
13
|
+
assert.deepStrictEqual(aEntry.dependsOn, ["pkg:npm/b@1"]);
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
it("merges dependsOn sets for the same ref", () => {
|
|
17
|
+
const a = [{ ref: "pkg:npm/a@1", dependsOn: ["pkg:npm/b@1"] }];
|
|
18
|
+
const b = [{ ref: "pkg:npm/a@1", dependsOn: ["pkg:npm/c@1"] }];
|
|
19
|
+
const result = mergeDependencies(a, b);
|
|
20
|
+
assert.strictEqual(result.length, 1);
|
|
21
|
+
const entry = result[0];
|
|
22
|
+
assert.ok(entry.dependsOn.includes("pkg:npm/b@1"));
|
|
23
|
+
assert.ok(entry.dependsOn.includes("pkg:npm/c@1"));
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
it("deduplicates identical dependsOn entries", () => {
|
|
27
|
+
const a = [{ ref: "pkg:npm/a@1", dependsOn: ["pkg:npm/b@1"] }];
|
|
28
|
+
const b = [
|
|
29
|
+
{ ref: "pkg:npm/a@1", dependsOn: ["pkg:npm/b@1", "pkg:npm/c@1"] },
|
|
30
|
+
];
|
|
31
|
+
const result = mergeDependencies(a, b);
|
|
32
|
+
assert.strictEqual(result.length, 1);
|
|
33
|
+
assert.strictEqual(
|
|
34
|
+
result[0].dependsOn.filter((x) => x === "pkg:npm/b@1").length,
|
|
35
|
+
1,
|
|
36
|
+
);
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
it("handles undefined newDependencies gracefully", () => {
|
|
40
|
+
const a = [{ ref: "pkg:npm/a@1", dependsOn: ["pkg:npm/b@1"] }];
|
|
41
|
+
const result = mergeDependencies(a, undefined);
|
|
42
|
+
assert.strictEqual(result.length, 1);
|
|
43
|
+
assert.strictEqual(result[0].ref, "pkg:npm/a@1");
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
it("handles empty arrays", () => {
|
|
47
|
+
assert.deepStrictEqual(mergeDependencies([], []), []);
|
|
48
|
+
assert.deepStrictEqual(mergeDependencies([], undefined), []);
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
it("merges a single dependency object (non-array)", () => {
|
|
52
|
+
const a = [{ ref: "pkg:npm/a@1", dependsOn: ["pkg:npm/b@1"] }];
|
|
53
|
+
const single = { ref: "pkg:npm/c@1", dependsOn: ["pkg:npm/d@1"] };
|
|
54
|
+
const result = mergeDependencies(a, single);
|
|
55
|
+
assert.strictEqual(result.length, 2);
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
it("handles the provides field for OmniBOR / ADG links", () => {
|
|
59
|
+
const a = [
|
|
60
|
+
{
|
|
61
|
+
ref: "gitoid:commit:sha1:abc",
|
|
62
|
+
dependsOn: [],
|
|
63
|
+
provides: ["gitoid:commit:sha1:def"],
|
|
64
|
+
},
|
|
65
|
+
];
|
|
66
|
+
const b = [
|
|
67
|
+
{
|
|
68
|
+
ref: "gitoid:commit:sha1:def",
|
|
69
|
+
provides: ["gitoid:blob:sha1:001", "gitoid:blob:sha1:002"],
|
|
70
|
+
},
|
|
71
|
+
];
|
|
72
|
+
const result = mergeDependencies(a, b);
|
|
73
|
+
assert.ok(
|
|
74
|
+
result.every((d) => Array.isArray(d.provides)),
|
|
75
|
+
"all entries should have provides",
|
|
76
|
+
);
|
|
77
|
+
const defEntry = result.find((d) => d.ref === "gitoid:commit:sha1:def");
|
|
78
|
+
assert.ok(defEntry);
|
|
79
|
+
assert.ok(defEntry.provides.includes("gitoid:blob:sha1:001"));
|
|
80
|
+
assert.ok(defEntry.provides.includes("gitoid:blob:sha1:002"));
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
it("excludes parent component from dependsOn", () => {
|
|
84
|
+
const parentComponent = { "bom-ref": "pkg:npm/myapp@1.0.0" };
|
|
85
|
+
const a = [
|
|
86
|
+
{
|
|
87
|
+
ref: "pkg:npm/a@1",
|
|
88
|
+
dependsOn: ["pkg:npm/myapp@1.0.0", "pkg:npm/b@1"],
|
|
89
|
+
},
|
|
90
|
+
];
|
|
91
|
+
const result = mergeDependencies(a, [], parentComponent);
|
|
92
|
+
const entry = result.find((d) => d.ref === "pkg:npm/a@1");
|
|
93
|
+
assert.ok(
|
|
94
|
+
!entry.dependsOn.includes("pkg:npm/myapp@1.0.0"),
|
|
95
|
+
"parent should be excluded",
|
|
96
|
+
);
|
|
97
|
+
assert.ok(entry.dependsOn.includes("pkg:npm/b@1"));
|
|
98
|
+
});
|
|
99
|
+
|
|
100
|
+
it("merges parser-returned dependencies into BOM dependencies", () => {
|
|
101
|
+
const bomDeps = [{ ref: "pkg:npm/app@1", dependsOn: ["pkg:npm/lib@2"] }];
|
|
102
|
+
const parserDeps = [
|
|
103
|
+
{
|
|
104
|
+
ref: "workflow-bom-ref-1",
|
|
105
|
+
dependsOn: ["task-bom-ref-1", "task-bom-ref-2"],
|
|
106
|
+
},
|
|
107
|
+
{ ref: "task-bom-ref-1", dependsOn: ["pkg:github/actions/checkout@v4"] },
|
|
108
|
+
];
|
|
109
|
+
const result = mergeDependencies(bomDeps, parserDeps);
|
|
110
|
+
assert.strictEqual(result.length, 3);
|
|
111
|
+
const wfEntry = result.find((d) => d.ref === "workflow-bom-ref-1");
|
|
112
|
+
assert.ok(wfEntry);
|
|
113
|
+
assert.ok(wfEntry.dependsOn.includes("task-bom-ref-1"));
|
|
114
|
+
assert.ok(wfEntry.dependsOn.includes("task-bom-ref-2"));
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
it("filters out null and undefined entries from dependsOn", () => {
|
|
118
|
+
const deps = [
|
|
119
|
+
{
|
|
120
|
+
ref: "pkg:composer/foo/bar",
|
|
121
|
+
dependsOn: [null, undefined, "pkg:composer/vendor/lib@1.0"],
|
|
122
|
+
},
|
|
123
|
+
];
|
|
124
|
+
const result = mergeDependencies(deps, []);
|
|
125
|
+
assert.strictEqual(result.length, 1);
|
|
126
|
+
assert.deepStrictEqual(result[0].dependsOn, [
|
|
127
|
+
"pkg:composer/vendor/lib@1.0",
|
|
128
|
+
]);
|
|
129
|
+
assert.ok(!result[0].dependsOn.includes(null), "null must be filtered");
|
|
130
|
+
assert.ok(
|
|
131
|
+
!result[0].dependsOn.includes(undefined),
|
|
132
|
+
"undefined must be filtered",
|
|
133
|
+
);
|
|
134
|
+
});
|
|
135
|
+
|
|
136
|
+
it("filters out null and undefined from dependsOn even with a parentComponent", () => {
|
|
137
|
+
const parent = { "bom-ref": "pkg:composer/foo/bar" };
|
|
138
|
+
const deps = [
|
|
139
|
+
{
|
|
140
|
+
ref: "pkg:composer/foo/bar",
|
|
141
|
+
dependsOn: [null, "pkg:composer/vendor/lib@1.0"],
|
|
142
|
+
},
|
|
143
|
+
];
|
|
144
|
+
const result = mergeDependencies(deps, [], parent);
|
|
145
|
+
const entry = result.find((d) => d.ref === "pkg:composer/foo/bar");
|
|
146
|
+
assert.ok(entry);
|
|
147
|
+
assert.deepStrictEqual(entry.dependsOn, ["pkg:composer/vendor/lib@1.0"]);
|
|
148
|
+
assert.ok(!entry.dependsOn.includes(null), "null must be filtered");
|
|
149
|
+
});
|
|
150
|
+
});
|
|
151
|
+
|
|
152
|
+
describe("trimComponents()", () => {
|
|
153
|
+
it("retains hashes from duplicate components", () => {
|
|
154
|
+
const components = [
|
|
155
|
+
{
|
|
156
|
+
name: "jquery",
|
|
157
|
+
version: "3.5.1",
|
|
158
|
+
purl: "pkg:npm/jquery@3.5.1",
|
|
159
|
+
type: "library",
|
|
160
|
+
properties: [{ name: "SrcFile", value: "Scripts/jquery.min.js" }],
|
|
161
|
+
},
|
|
162
|
+
{
|
|
163
|
+
name: "jquery",
|
|
164
|
+
version: "3.5.1",
|
|
165
|
+
purl: "pkg:npm/jquery@3.5.1",
|
|
166
|
+
type: "framework",
|
|
167
|
+
hashes: [{ alg: "SHA-512", content: "abc123" }],
|
|
168
|
+
properties: [{ name: "SrcFile", value: "package-lock.json" }],
|
|
169
|
+
},
|
|
170
|
+
];
|
|
171
|
+
const result = trimComponents(components);
|
|
172
|
+
assert.strictEqual(result.length, 1);
|
|
173
|
+
assert.deepStrictEqual(result[0].hashes, [
|
|
174
|
+
{ alg: "SHA-512", content: "abc123" },
|
|
175
|
+
]);
|
|
176
|
+
});
|
|
177
|
+
|
|
178
|
+
it("merges and deduplicates hashes from duplicate components", () => {
|
|
179
|
+
const components = [
|
|
180
|
+
{
|
|
181
|
+
name: "jquery",
|
|
182
|
+
version: "3.5.1",
|
|
183
|
+
purl: "pkg:npm/jquery@3.5.1",
|
|
184
|
+
type: "library",
|
|
185
|
+
hashes: [{ alg: "SHA-512", content: "abc123" }],
|
|
186
|
+
properties: [{ name: "SrcFile", value: "Scripts/jquery.min.js" }],
|
|
187
|
+
},
|
|
188
|
+
{
|
|
189
|
+
name: "jquery",
|
|
190
|
+
version: "3.5.1",
|
|
191
|
+
purl: "pkg:npm/jquery@3.5.1",
|
|
192
|
+
type: "framework",
|
|
193
|
+
hashes: [
|
|
194
|
+
{ alg: "SHA-512", content: "abc123" },
|
|
195
|
+
{ alg: "SHA-256", content: "def456" },
|
|
196
|
+
],
|
|
197
|
+
properties: [{ name: "SrcFile", value: "package-lock.json" }],
|
|
198
|
+
},
|
|
199
|
+
];
|
|
200
|
+
const result = trimComponents(components);
|
|
201
|
+
assert.strictEqual(result.length, 1);
|
|
202
|
+
assert.deepStrictEqual(result[0].hashes, [
|
|
203
|
+
{ alg: "SHA-512", content: "abc123" },
|
|
204
|
+
{ alg: "SHA-256", content: "def456" },
|
|
205
|
+
]);
|
|
206
|
+
});
|
|
207
|
+
});
|