@cyclonedx/cdxgen 12.2.1 → 12.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (170) hide show
  1. package/README.md +239 -90
  2. package/bin/audit.js +191 -0
  3. package/bin/cdxgen.js +513 -167
  4. package/bin/convert.js +99 -0
  5. package/bin/evinse.js +23 -0
  6. package/bin/repl.js +339 -8
  7. package/bin/sign.js +8 -0
  8. package/bin/validate.js +8 -0
  9. package/bin/verify.js +8 -0
  10. package/data/container-knowledge-index.json +125 -0
  11. package/data/gtfobins-index.json +6296 -0
  12. package/data/lolbas-index.json +150 -0
  13. package/data/queries-darwin.json +63 -3
  14. package/data/queries-win.json +45 -3
  15. package/data/queries.json +74 -2
  16. package/data/rules/chrome-extensions.yaml +240 -0
  17. package/data/rules/ci-permissions.yaml +478 -18
  18. package/data/rules/container-risk.yaml +270 -0
  19. package/data/rules/obom-runtime.yaml +891 -0
  20. package/data/rules/package-integrity.yaml +49 -0
  21. package/data/spdx-export.schema.json +6794 -0
  22. package/data/spdx-model-v3.0.1.jsonld +15999 -0
  23. package/lib/audit/index.js +1924 -0
  24. package/lib/audit/index.poku.js +1488 -0
  25. package/lib/audit/progress.js +137 -0
  26. package/lib/audit/progress.poku.js +188 -0
  27. package/lib/audit/reporters.js +618 -0
  28. package/lib/audit/scoring.js +310 -0
  29. package/lib/audit/scoring.poku.js +341 -0
  30. package/lib/audit/targets.js +260 -0
  31. package/lib/audit/targets.poku.js +331 -0
  32. package/lib/cli/index.js +154 -11
  33. package/lib/cli/index.poku.js +251 -0
  34. package/lib/helpers/analyzer.js +446 -2
  35. package/lib/helpers/analyzer.poku.js +72 -1
  36. package/lib/helpers/annotationFormatter.js +49 -0
  37. package/lib/helpers/annotationFormatter.poku.js +44 -0
  38. package/lib/helpers/bomUtils.js +36 -0
  39. package/lib/helpers/bomUtils.poku.js +51 -0
  40. package/lib/helpers/caxa.js +2 -2
  41. package/lib/helpers/chromextutils.js +1153 -0
  42. package/lib/helpers/chromextutils.poku.js +493 -0
  43. package/lib/helpers/ciParsers/githubActions.js +1632 -45
  44. package/lib/helpers/ciParsers/githubActions.poku.js +853 -1
  45. package/lib/helpers/containerRisk.js +186 -0
  46. package/lib/helpers/containerRisk.poku.js +52 -0
  47. package/lib/helpers/display.js +241 -59
  48. package/lib/helpers/display.poku.js +162 -2
  49. package/lib/helpers/exportUtils.js +123 -0
  50. package/lib/helpers/exportUtils.poku.js +60 -0
  51. package/lib/helpers/formulationParsers.js +69 -0
  52. package/lib/helpers/formulationParsers.poku.js +44 -0
  53. package/lib/helpers/gtfobins.js +189 -0
  54. package/lib/helpers/gtfobins.poku.js +49 -0
  55. package/lib/helpers/lolbas.js +267 -0
  56. package/lib/helpers/lolbas.poku.js +39 -0
  57. package/lib/helpers/osqueryTransform.js +84 -0
  58. package/lib/helpers/osqueryTransform.poku.js +49 -0
  59. package/lib/helpers/provenanceUtils.js +193 -0
  60. package/lib/helpers/provenanceUtils.poku.js +145 -0
  61. package/lib/helpers/pylockutils.js +281 -0
  62. package/lib/helpers/pylockutils.poku.js +48 -0
  63. package/lib/helpers/registryProvenance.js +793 -0
  64. package/lib/helpers/registryProvenance.poku.js +452 -0
  65. package/lib/helpers/source.js +1267 -0
  66. package/lib/helpers/source.poku.js +771 -0
  67. package/lib/helpers/spdxUtils.js +97 -0
  68. package/lib/helpers/spdxUtils.poku.js +70 -0
  69. package/lib/helpers/unicodeScan.js +147 -0
  70. package/lib/helpers/unicodeScan.poku.js +45 -0
  71. package/lib/helpers/utils.js +700 -128
  72. package/lib/helpers/utils.poku.js +877 -80
  73. package/lib/managers/binary.js +29 -5
  74. package/lib/managers/docker.js +179 -52
  75. package/lib/managers/docker.poku.js +327 -28
  76. package/lib/managers/oci.js +107 -23
  77. package/lib/managers/oci.poku.js +132 -0
  78. package/lib/server/openapi.yaml +17 -0
  79. package/lib/server/server.js +225 -336
  80. package/lib/server/server.poku.js +16 -10
  81. package/lib/stages/postgen/annotator.js +7 -0
  82. package/lib/stages/postgen/annotator.poku.js +40 -0
  83. package/lib/stages/postgen/auditBom.js +19 -3
  84. package/lib/stages/postgen/auditBom.poku.js +1729 -67
  85. package/lib/stages/postgen/postgen.js +40 -0
  86. package/lib/stages/postgen/postgen.poku.js +47 -0
  87. package/lib/stages/postgen/ruleEngine.js +80 -2
  88. package/lib/stages/postgen/spdxConverter.js +796 -0
  89. package/lib/stages/postgen/spdxConverter.poku.js +341 -0
  90. package/lib/validator/bomValidator.js +232 -0
  91. package/lib/validator/bomValidator.poku.js +70 -0
  92. package/lib/validator/complianceRules.js +70 -7
  93. package/lib/validator/complianceRules.poku.js +30 -0
  94. package/lib/validator/reporters/annotations.js +2 -2
  95. package/lib/validator/reporters/console.js +11 -0
  96. package/lib/validator/reporters.poku.js +13 -0
  97. package/package.json +10 -7
  98. package/types/bin/audit.d.ts +3 -0
  99. package/types/bin/audit.d.ts.map +1 -0
  100. package/types/bin/convert.d.ts +3 -0
  101. package/types/bin/convert.d.ts.map +1 -0
  102. package/types/bin/repl.d.ts.map +1 -1
  103. package/types/lib/audit/index.d.ts +115 -0
  104. package/types/lib/audit/index.d.ts.map +1 -0
  105. package/types/lib/audit/progress.d.ts +27 -0
  106. package/types/lib/audit/progress.d.ts.map +1 -0
  107. package/types/lib/audit/reporters.d.ts +35 -0
  108. package/types/lib/audit/reporters.d.ts.map +1 -0
  109. package/types/lib/audit/scoring.d.ts +35 -0
  110. package/types/lib/audit/scoring.d.ts.map +1 -0
  111. package/types/lib/audit/targets.d.ts +63 -0
  112. package/types/lib/audit/targets.d.ts.map +1 -0
  113. package/types/lib/cli/index.d.ts +8 -0
  114. package/types/lib/cli/index.d.ts.map +1 -1
  115. package/types/lib/helpers/analyzer.d.ts +13 -0
  116. package/types/lib/helpers/analyzer.d.ts.map +1 -1
  117. package/types/lib/helpers/annotationFormatter.d.ts +23 -0
  118. package/types/lib/helpers/annotationFormatter.d.ts.map +1 -0
  119. package/types/lib/helpers/bomUtils.d.ts +5 -0
  120. package/types/lib/helpers/bomUtils.d.ts.map +1 -0
  121. package/types/lib/helpers/chromextutils.d.ts +97 -0
  122. package/types/lib/helpers/chromextutils.d.ts.map +1 -0
  123. package/types/lib/helpers/ciParsers/githubActions.d.ts +3 -8
  124. package/types/lib/helpers/ciParsers/githubActions.d.ts.map +1 -1
  125. package/types/lib/helpers/containerRisk.d.ts +17 -0
  126. package/types/lib/helpers/containerRisk.d.ts.map +1 -0
  127. package/types/lib/helpers/display.d.ts +4 -1
  128. package/types/lib/helpers/display.d.ts.map +1 -1
  129. package/types/lib/helpers/exportUtils.d.ts +40 -0
  130. package/types/lib/helpers/exportUtils.d.ts.map +1 -0
  131. package/types/lib/helpers/formulationParsers.d.ts.map +1 -1
  132. package/types/lib/helpers/gtfobins.d.ts +17 -0
  133. package/types/lib/helpers/gtfobins.d.ts.map +1 -0
  134. package/types/lib/helpers/lolbas.d.ts +16 -0
  135. package/types/lib/helpers/lolbas.d.ts.map +1 -0
  136. package/types/lib/helpers/osqueryTransform.d.ts +7 -0
  137. package/types/lib/helpers/osqueryTransform.d.ts.map +1 -0
  138. package/types/lib/helpers/provenanceUtils.d.ts +90 -0
  139. package/types/lib/helpers/provenanceUtils.d.ts.map +1 -0
  140. package/types/lib/helpers/pylockutils.d.ts +51 -0
  141. package/types/lib/helpers/pylockutils.d.ts.map +1 -0
  142. package/types/lib/helpers/registryProvenance.d.ts +17 -0
  143. package/types/lib/helpers/registryProvenance.d.ts.map +1 -0
  144. package/types/lib/helpers/source.d.ts +141 -0
  145. package/types/lib/helpers/source.d.ts.map +1 -0
  146. package/types/lib/helpers/spdxUtils.d.ts +2 -0
  147. package/types/lib/helpers/spdxUtils.d.ts.map +1 -0
  148. package/types/lib/helpers/unicodeScan.d.ts +46 -0
  149. package/types/lib/helpers/unicodeScan.d.ts.map +1 -0
  150. package/types/lib/helpers/utils.d.ts +29 -11
  151. package/types/lib/helpers/utils.d.ts.map +1 -1
  152. package/types/lib/managers/binary.d.ts.map +1 -1
  153. package/types/lib/managers/docker.d.ts.map +1 -1
  154. package/types/lib/managers/oci.d.ts.map +1 -1
  155. package/types/lib/server/server.d.ts +0 -36
  156. package/types/lib/server/server.d.ts.map +1 -1
  157. package/types/lib/stages/postgen/annotator.d.ts.map +1 -1
  158. package/types/lib/stages/postgen/auditBom.d.ts.map +1 -1
  159. package/types/lib/stages/postgen/postgen.d.ts.map +1 -1
  160. package/types/lib/stages/postgen/ruleEngine.d.ts.map +1 -1
  161. package/types/lib/stages/postgen/spdxConverter.d.ts +11 -0
  162. package/types/lib/stages/postgen/spdxConverter.d.ts.map +1 -0
  163. package/types/lib/validator/bomValidator.d.ts +1 -0
  164. package/types/lib/validator/bomValidator.d.ts.map +1 -1
  165. package/types/lib/validator/complianceRules.d.ts.map +1 -1
  166. package/types/lib/validator/reporters/console.d.ts.map +1 -1
  167. package/types/bin/dependencies.d.ts +0 -3
  168. package/types/bin/dependencies.d.ts.map +0 -1
  169. package/types/bin/licenses.d.ts +0 -3
  170. package/types/bin/licenses.d.ts.map +0 -1
@@ -1,8 +1,15 @@
1
1
  import { readFileSync } from "node:fs";
2
2
 
3
- import { it } from "poku";
3
+ import esmock from "esmock";
4
+ import { assert, it } from "poku";
5
+ import sinon from "sinon";
4
6
 
5
- import { printDependencyTree } from "./display.js";
7
+ import {
8
+ buildDependencyTreeLegendLines,
9
+ buildDependencyTreeLines,
10
+ printDependencyTree,
11
+ } from "./display.js";
12
+ import { REGISTRY_PROVENANCE_ICON } from "./provenanceUtils.js";
6
13
 
7
14
  it("print tree test", () => {
8
15
  const bomJson = JSON.parse(
@@ -10,3 +17,156 @@ it("print tree test", () => {
10
17
  );
11
18
  printDependencyTree(bomJson);
12
19
  });
20
+
21
+ it("prints a provenance icon for registry-backed components", async () => {
22
+ const rows = [];
23
+ const consoleLogStub = sinon.stub(console, "log");
24
+ try {
25
+ const { printTable } = await esmock("./display.js", {
26
+ "./table.js": {
27
+ createStream: () => ({
28
+ end() {
29
+ // intentional no-op for stream stub
30
+ },
31
+ write(row) {
32
+ rows.push(row);
33
+ },
34
+ }),
35
+ table: sinon.stub().returns(""),
36
+ },
37
+ "./utils.js": {
38
+ isSecureMode: false,
39
+ safeExistsSync: sinon.stub(),
40
+ toCamel: sinon.stub(),
41
+ },
42
+ });
43
+
44
+ printTable(
45
+ {
46
+ components: [
47
+ {
48
+ group: "",
49
+ name: "left-pad",
50
+ properties: [
51
+ {
52
+ name: "cdx:npm:provenanceUrl",
53
+ value:
54
+ "https://registry.npmjs.org/-/npm/v1/attestations/left-pad",
55
+ },
56
+ ],
57
+ type: "library",
58
+ version: "1.3.0",
59
+ },
60
+ {
61
+ group: "",
62
+ name: "lodash",
63
+ properties: [],
64
+ type: "library",
65
+ version: "4.17.21",
66
+ },
67
+ ],
68
+ dependencies: [],
69
+ },
70
+ undefined,
71
+ undefined,
72
+ "Found 1 trusted component.",
73
+ );
74
+
75
+ assert.strictEqual(rows[1][1], `${REGISTRY_PROVENANCE_ICON} left-pad`);
76
+ assert.strictEqual(rows[2][1], "lodash");
77
+ sinon.assert.calledWithExactly(
78
+ consoleLogStub,
79
+ "Found 1 trusted component.",
80
+ );
81
+ sinon.assert.calledWithExactly(
82
+ consoleLogStub,
83
+ `Legend: ${REGISTRY_PROVENANCE_ICON} = registry provenance or trusted publishing evidence`,
84
+ );
85
+ sinon.assert.calledWithExactly(
86
+ consoleLogStub,
87
+ `${REGISTRY_PROVENANCE_ICON} 1 component(s) include registry provenance or trusted publishing metadata.`,
88
+ );
89
+ } finally {
90
+ consoleLogStub.restore();
91
+ }
92
+ });
93
+
94
+ it("renders shared dependencies once while including dangling trees", () => {
95
+ const treeLines = buildDependencyTreeLines([
96
+ {
97
+ ref: "pkg:root/a@1.0.0",
98
+ dependsOn: ["pkg:shared/c@1.0.0"],
99
+ },
100
+ {
101
+ ref: "pkg:root/b@1.0.0",
102
+ dependsOn: ["pkg:shared/c@1.0.0"],
103
+ },
104
+ {
105
+ ref: "pkg:shared/c@1.0.0",
106
+ dependsOn: ["pkg:leaf/d@1.0.0"],
107
+ },
108
+ {
109
+ ref: "pkg:cycle/e@1.0.0",
110
+ dependsOn: ["pkg:cycle/f@1.0.0"],
111
+ },
112
+ {
113
+ ref: "pkg:cycle/f@1.0.0",
114
+ dependsOn: ["pkg:cycle/e@1.0.0"],
115
+ },
116
+ ]);
117
+
118
+ assert.deepStrictEqual(treeLines, [
119
+ "pkg:root/a@1.0.0",
120
+ "└── pkg:shared/c@1.0.0",
121
+ " └── pkg:leaf/d@1.0.0",
122
+ "pkg:root/b@1.0.0",
123
+ "└── ⤴ pkg:shared/c@1.0.0",
124
+ "pkg:cycle/e@1.0.0",
125
+ "└── pkg:cycle/f@1.0.0",
126
+ " └── ↺ pkg:cycle/e@1.0.0",
127
+ ]);
128
+ assert.deepStrictEqual(buildDependencyTreeLegendLines(treeLines), [
129
+ "Legend: ⤴ = already shown; ↺ = cycle",
130
+ ]);
131
+ });
132
+
133
+ it("omits empty providers while marking shared provides with an icon", () => {
134
+ const treeLines = buildDependencyTreeLines(
135
+ [
136
+ {
137
+ ref: "pkg:npm/app@1.0.0",
138
+ provides: ["crypto/aes", "crypto/sha256"],
139
+ },
140
+ {
141
+ ref: "pkg:npm/helper@1.0.0",
142
+ provides: ["crypto/sha256"],
143
+ },
144
+ {
145
+ ref: "pkg:npm/unused@1.0.0",
146
+ },
147
+ ],
148
+ "provides",
149
+ );
150
+
151
+ assert.deepStrictEqual(treeLines, [
152
+ "pkg:npm/app@1.0.0",
153
+ "├── crypto/aes",
154
+ "└── crypto/sha256",
155
+ "pkg:npm/helper@1.0.0",
156
+ "└── ⤴ crypto/sha256",
157
+ ]);
158
+ assert.deepStrictEqual(buildDependencyTreeLegendLines(treeLines), [
159
+ "Legend: ⤴ = already shown",
160
+ ]);
161
+ });
162
+
163
+ it("returns no legend lines when the dependency tree has no markers", () => {
164
+ assert.deepStrictEqual(
165
+ buildDependencyTreeLegendLines([
166
+ "pkg:root/a@1.0.0",
167
+ "└── pkg:shared/c@1.0.0",
168
+ " └── pkg:leaf/d@1.0.0",
169
+ ]),
170
+ [],
171
+ );
172
+ });
@@ -0,0 +1,123 @@
1
+ import path from "node:path";
2
+
3
+ const SUPPORTED_EXPORT_FORMATS = new Set(["cyclonedx", "spdx"]);
4
+ const EXPORT_FORMAT_ALIASES = {
5
+ cdx: "cyclonedx",
6
+ cyclonedx: "cyclonedx",
7
+ spdx: "spdx",
8
+ "spdx-json": "spdx",
9
+ spdx3: "spdx",
10
+ "spdx3-json": "spdx",
11
+ };
12
+
13
+ /**
14
+ * Normalize the requested export formats.
15
+ *
16
+ * @param {string|string[]|undefined|null} format Raw format value
17
+ * @returns {string[]} Normalized export formats
18
+ */
19
+ export function normalizeOutputFormats(format) {
20
+ if (format === undefined || format === null || format === "") {
21
+ return [];
22
+ }
23
+ const values = Array.isArray(format) ? format : [format];
24
+ const normalized = new Set();
25
+ for (const value of values) {
26
+ if (!value) {
27
+ continue;
28
+ }
29
+ for (const token of `${value}`.split(",")) {
30
+ const normalizedToken = EXPORT_FORMAT_ALIASES[token.trim().toLowerCase()];
31
+ if (normalizedToken && SUPPORTED_EXPORT_FORMATS.has(normalizedToken)) {
32
+ normalized.add(normalizedToken);
33
+ }
34
+ }
35
+ }
36
+ return Array.from(normalized);
37
+ }
38
+
39
+ /**
40
+ * Derive the SPDX output path from a base output path.
41
+ *
42
+ * @param {string} outputPath Output path
43
+ * @returns {string} SPDX output path
44
+ */
45
+ export function deriveSpdxOutputPath(outputPath) {
46
+ if (!outputPath) {
47
+ return "bom.spdx.json";
48
+ }
49
+ if (outputPath.endsWith(".spdx.json")) {
50
+ return outputPath;
51
+ }
52
+ if (outputPath.endsWith(".cdx.json")) {
53
+ return outputPath.replace(/\.cdx\.json$/u, ".spdx.json");
54
+ }
55
+ if (outputPath.endsWith(".json")) {
56
+ return outputPath.replace(/\.json$/u, ".spdx.json");
57
+ }
58
+ return `${outputPath}.spdx.json`;
59
+ }
60
+
61
+ /**
62
+ * Derive the CycloneDX output path from a base output path.
63
+ *
64
+ * @param {string} outputPath Output path
65
+ * @returns {string} CycloneDX output path
66
+ */
67
+ export function deriveCycloneDxOutputPath(outputPath) {
68
+ if (!outputPath) {
69
+ return "bom.json";
70
+ }
71
+ if (outputPath.endsWith(".spdx.json")) {
72
+ return outputPath.replace(/\.spdx\.json$/u, ".cdx.json");
73
+ }
74
+ return outputPath;
75
+ }
76
+
77
+ /**
78
+ * Determine the final output plan for the requested export formats.
79
+ *
80
+ * @param {object} options CLI options
81
+ * @returns {{ formats: Set<string>, outputs: Record<string, string>, explicitFormat: boolean }} Output plan
82
+ */
83
+ export function createOutputPlan(options) {
84
+ const explicitFormat =
85
+ options?.format !== undefined &&
86
+ options?.format !== null &&
87
+ options?.format !== "";
88
+ const requestedFormats = normalizeOutputFormats(options?.format);
89
+ const outputPath = options?.output || "bom.json";
90
+ const formats = new Set(
91
+ requestedFormats.length
92
+ ? requestedFormats
93
+ : [outputPath.endsWith(".spdx.json") ? "spdx" : "cyclonedx"],
94
+ );
95
+ const outputs = {};
96
+ if (formats.has("cyclonedx")) {
97
+ outputs.cyclonedx =
98
+ outputPath.endsWith(".spdx.json") && formats.size > 1
99
+ ? deriveCycloneDxOutputPath(outputPath)
100
+ : outputPath;
101
+ }
102
+ if (formats.has("spdx")) {
103
+ if (!formats.has("cyclonedx") || outputPath.endsWith(".spdx.json")) {
104
+ outputs.spdx =
105
+ outputPath === "bom.json"
106
+ ? deriveSpdxOutputPath(outputPath)
107
+ : outputPath;
108
+ } else {
109
+ outputs.spdx = deriveSpdxOutputPath(outputPath);
110
+ }
111
+ }
112
+ return { formats, outputs, explicitFormat };
113
+ }
114
+
115
+ /**
116
+ * Return the output directory for a planned export path.
117
+ *
118
+ * @param {string} outputPath Output path
119
+ * @returns {string} Output directory
120
+ */
121
+ export function getOutputDirectory(outputPath) {
122
+ return path.dirname(outputPath);
123
+ }
@@ -0,0 +1,60 @@
1
+ import { assert, describe, it } from "poku";
2
+
3
+ import {
4
+ createOutputPlan,
5
+ deriveCycloneDxOutputPath,
6
+ deriveSpdxOutputPath,
7
+ normalizeOutputFormats,
8
+ } from "./exportUtils.js";
9
+
10
+ describe("exportUtils", () => {
11
+ it("normalizes comma-separated export formats", () => {
12
+ assert.deepStrictEqual(normalizeOutputFormats("cyclonedx,spdx-json"), [
13
+ "cyclonedx",
14
+ "spdx",
15
+ ]);
16
+ });
17
+
18
+ it("normalizes repeated format flags", () => {
19
+ assert.deepStrictEqual(normalizeOutputFormats(["cyclonedx", "spdx"]), [
20
+ "cyclonedx",
21
+ "spdx",
22
+ ]);
23
+ });
24
+
25
+ it("derives SPDX and CycloneDX sibling paths", () => {
26
+ assert.strictEqual(
27
+ deriveSpdxOutputPath("/tmp/bom.cdx.json"),
28
+ "/tmp/bom.spdx.json",
29
+ );
30
+ assert.strictEqual(
31
+ deriveCycloneDxOutputPath("/tmp/bom.spdx.json"),
32
+ "/tmp/bom.cdx.json",
33
+ );
34
+ });
35
+
36
+ it("chooses SPDX automatically for .spdx.json outputs", () => {
37
+ const plan = createOutputPlan({ output: "/tmp/app.spdx.json" });
38
+ assert.strictEqual(plan.formats.has("spdx"), true);
39
+ assert.strictEqual(plan.formats.has("cyclonedx"), false);
40
+ assert.strictEqual(plan.outputs.spdx, "/tmp/app.spdx.json");
41
+ });
42
+
43
+ it("creates sibling outputs for dual exports", () => {
44
+ const plan = createOutputPlan({
45
+ format: "cyclonedx,spdx",
46
+ output: "/tmp/app.cdx.json",
47
+ });
48
+ assert.strictEqual(plan.outputs.cyclonedx, "/tmp/app.cdx.json");
49
+ assert.strictEqual(plan.outputs.spdx, "/tmp/app.spdx.json");
50
+ });
51
+
52
+ it("creates sibling outputs for repeated format flags", () => {
53
+ const plan = createOutputPlan({
54
+ format: ["cyclonedx", "spdx"],
55
+ output: "/tmp/app.cdx.json",
56
+ });
57
+ assert.strictEqual(plan.outputs.cyclonedx, "/tmp/app.cdx.json");
58
+ assert.strictEqual(plan.outputs.spdx, "/tmp/app.spdx.json");
59
+ });
60
+ });
@@ -1,3 +1,5 @@
1
+ import { readFileSync } from "node:fs";
2
+ import { basename } from "node:path";
1
3
  import process from "node:process";
2
4
 
3
5
  import { v4 as uuidv4 } from "uuid";
@@ -16,8 +18,67 @@ import {
16
18
  gitTreeHashes,
17
19
  listFiles,
18
20
  } from "./envcontext.js";
21
+ import { scanTextForHiddenUnicode } from "./unicodeScan.js";
19
22
  import { getAllFiles } from "./utils.js";
20
23
 
24
+ const README_PATTERNS = [
25
+ "**/README*.{adoc,asciidoc,markdown,md,mdx,rst,txt}",
26
+ "**/readme*.{adoc,asciidoc,markdown,md,mdx,rst,txt}",
27
+ ];
28
+
29
+ function buildReadmeSecurityComponents(discoveryPath, options) {
30
+ const matchedFiles = [];
31
+ for (const pattern of README_PATTERNS) {
32
+ const found = getAllFiles(discoveryPath, pattern, options);
33
+ if (found?.length) {
34
+ matchedFiles.push(...found);
35
+ }
36
+ }
37
+ const components = [];
38
+ for (const filePath of [...new Set(matchedFiles)]) {
39
+ let raw;
40
+ try {
41
+ raw = readFileSync(filePath, { encoding: "utf-8" });
42
+ } catch {
43
+ continue;
44
+ }
45
+ const scan = scanTextForHiddenUnicode(raw, { syntax: "markdown" });
46
+ if (!scan.hasHiddenUnicode) {
47
+ continue;
48
+ }
49
+ const properties = [
50
+ { name: "SrcFile", value: filePath },
51
+ { name: "cdx:file:kind", value: "readme" },
52
+ { name: "cdx:file:hasHiddenUnicode", value: "true" },
53
+ {
54
+ name: "cdx:file:hiddenUnicodeCodePoints",
55
+ value: scan.codePoints.join(","),
56
+ },
57
+ {
58
+ name: "cdx:file:hiddenUnicodeLineNumbers",
59
+ value: scan.lineNumbers.join(","),
60
+ },
61
+ ];
62
+ if (scan.inComments) {
63
+ properties.push({
64
+ name: "cdx:file:hiddenUnicodeInComments",
65
+ value: "true",
66
+ });
67
+ properties.push({
68
+ name: "cdx:file:hiddenUnicodeCommentCodePoints",
69
+ value: scan.commentCodePoints.join(","),
70
+ });
71
+ }
72
+ components.push({
73
+ "bom-ref": `file:${filePath}`,
74
+ name: basename(filePath),
75
+ properties,
76
+ type: "file",
77
+ });
78
+ }
79
+ return components;
80
+ }
81
+
21
82
  /**
22
83
  * The parser registry. Pre-populated with the five built-in CI system parsers.
23
84
  *
@@ -291,6 +352,14 @@ export function addFormulationSection(filePath, options, context = {}) {
291
352
  components = components.concat(ciComponents);
292
353
  }
293
354
 
355
+ const readmeSecurityComponents = buildReadmeSecurityComponents(
356
+ discoveryPath,
357
+ options,
358
+ );
359
+ if (readmeSecurityComponents.length) {
360
+ components = components.concat(readmeSecurityComponents);
361
+ }
362
+
294
363
  // ── Environment variables ─────────────────────────────────────────────────
295
364
  let environmentVars = gitBranch?.length
296
365
  ? [{ name: "GIT_BRANCH", value: gitBranch }]
@@ -0,0 +1,44 @@
1
+ import { mkdtempSync, rmSync, writeFileSync } from "node:fs";
2
+ import os from "node:os";
3
+ import path from "node:path";
4
+
5
+ import { assert, describe, it } from "poku";
6
+
7
+ import { addFormulationSection } from "./formulationParsers.js";
8
+
9
+ function getProp(obj, name) {
10
+ return obj?.properties?.find((property) => property.name === name)?.value;
11
+ }
12
+
13
+ describe("addFormulationSection()", () => {
14
+ it("adds README file components when hidden Unicode is detected", () => {
15
+ const tmpDir = mkdtempSync(path.join(os.tmpdir(), "cdxgen-formulation-"));
16
+ writeFileSync(
17
+ path.join(tmpDir, "README.md"),
18
+ "# Demo\n<!-- hidden \u200B comment -->\nContent",
19
+ );
20
+
21
+ try {
22
+ const result = addFormulationSection(tmpDir, { specVersion: 1.7 });
23
+ const formulation = result.formulation[0];
24
+ const readmeComponent = formulation.components.find(
25
+ (component) => getProp(component, "cdx:file:kind") === "readme",
26
+ );
27
+ assert.ok(readmeComponent, "expected README formulation component");
28
+ assert.strictEqual(
29
+ getProp(readmeComponent, "cdx:file:hasHiddenUnicode"),
30
+ "true",
31
+ );
32
+ assert.strictEqual(
33
+ getProp(readmeComponent, "cdx:file:hiddenUnicodeInComments"),
34
+ "true",
35
+ );
36
+ assert.match(
37
+ getProp(readmeComponent, "cdx:file:hiddenUnicodeCodePoints"),
38
+ /U\+200B/,
39
+ );
40
+ } finally {
41
+ rmSync(tmpDir, { force: true, recursive: true });
42
+ }
43
+ });
44
+ });
@@ -0,0 +1,189 @@
1
+ import { readFileSync } from "node:fs";
2
+ import { basename, join } from "node:path";
3
+
4
+ import { dirNameStr, safeExistsSync } from "./utils.js";
5
+
6
+ const GTFOBINS_INDEX_FILE = join(dirNameStr, "data", "gtfobins-index.json");
7
+ const GTFOBINS_REFERENCE_PREFIX = "https://gtfobins.github.io/gtfobins/";
8
+ const PRIVILEGED_CONTEXTS = ["sudo", "suid", "capabilities"];
9
+ const CONTAINER_ESCAPE_HELPERS = new Set([
10
+ "chroot",
11
+ "ctr",
12
+ "docker",
13
+ "kubectl",
14
+ "mount",
15
+ "nsenter",
16
+ "tar",
17
+ "unshare",
18
+ ]);
19
+ const DIRECT_ALIASES = new Map([["nodejs", "node"]]);
20
+ const VERSIONED_ALIASES = [
21
+ { pattern: /^python(?:\d+(?:\.\d+)*)?$/i, target: "python" },
22
+ { pattern: /^perl(?:\d+(?:\.\d+)*)?$/i, target: "perl" },
23
+ { pattern: /^ruby(?:\d+(?:\.\d+)*)?$/i, target: "ruby" },
24
+ { pattern: /^php(?:\d+(?:\.\d+)*)?$/i, target: "php" },
25
+ { pattern: /^lua(?:\d+(?:\.\d+)*)?$/i, target: "lua" },
26
+ { pattern: /^node(?:\d+(?:\.\d+)*)?$/i, target: "node" },
27
+ ];
28
+
29
+ const GTFOBINS_INDEX = loadGtfoBinsIndex();
30
+
31
+ function loadGtfoBinsIndex() {
32
+ if (!safeExistsSync(GTFOBINS_INDEX_FILE)) {
33
+ return { entries: {}, source: GTFOBINS_REFERENCE_PREFIX, sourceRef: "" };
34
+ }
35
+ try {
36
+ return JSON.parse(readFileSync(GTFOBINS_INDEX_FILE, "utf8"));
37
+ } catch {
38
+ return { entries: {}, source: GTFOBINS_REFERENCE_PREFIX, sourceRef: "" };
39
+ }
40
+ }
41
+
42
+ function resolveCandidateName(candidate) {
43
+ if (!candidate || typeof candidate !== "string") {
44
+ return undefined;
45
+ }
46
+ const trimmed = basename(candidate.trim());
47
+ if (!trimmed) {
48
+ return undefined;
49
+ }
50
+ const normalized = trimmed.toLowerCase();
51
+ if (GTFOBINS_INDEX.entries?.[trimmed]) {
52
+ return { canonicalName: trimmed, matchSource: "basename" };
53
+ }
54
+ if (GTFOBINS_INDEX.entries?.[normalized]) {
55
+ return { canonicalName: normalized, matchSource: "basename" };
56
+ }
57
+ const directAlias = DIRECT_ALIASES.get(normalized);
58
+ if (directAlias && GTFOBINS_INDEX.entries?.[directAlias]) {
59
+ return { canonicalName: directAlias, matchSource: "alias" };
60
+ }
61
+ for (const aliasRule of VERSIONED_ALIASES) {
62
+ if (
63
+ aliasRule.pattern.test(normalized) &&
64
+ GTFOBINS_INDEX.entries?.[aliasRule.target]
65
+ ) {
66
+ return { canonicalName: aliasRule.target, matchSource: "alias" };
67
+ }
68
+ }
69
+ return undefined;
70
+ }
71
+
72
+ function deriveRiskTags(entry, canonicalName) {
73
+ const functions = new Set(entry?.functions || []);
74
+ const contexts = new Set(entry?.contexts || []);
75
+ const riskTags = new Set();
76
+ const hasExecPrimitive =
77
+ functions.has("shell") ||
78
+ functions.has("command") ||
79
+ functions.has("reverse-shell") ||
80
+ functions.has("bind-shell");
81
+ const hasNetworkPrimitive =
82
+ functions.has("upload") ||
83
+ functions.has("download") ||
84
+ functions.has("reverse-shell") ||
85
+ functions.has("bind-shell");
86
+ if (functions.has("privilege-escalation")) {
87
+ riskTags.add("privilege-escalation");
88
+ }
89
+ if (
90
+ contexts.has("sudo") ||
91
+ contexts.has("suid") ||
92
+ contexts.has("capabilities")
93
+ ) {
94
+ riskTags.add("privilege-escalation");
95
+ }
96
+ if (hasExecPrimitive && hasNetworkPrimitive) {
97
+ riskTags.add("lateral-movement");
98
+ }
99
+ if (functions.has("upload") || functions.has("file-read")) {
100
+ riskTags.add("data-exfiltration");
101
+ }
102
+ if (functions.has("file-write") || functions.has("library-load")) {
103
+ riskTags.add("persistence");
104
+ }
105
+ if (
106
+ CONTAINER_ESCAPE_HELPERS.has(canonicalName) &&
107
+ (hasExecPrimitive ||
108
+ functions.has("privilege-escalation") ||
109
+ functions.has("library-load"))
110
+ ) {
111
+ riskTags.add("container-escape");
112
+ }
113
+ return Array.from(riskTags).sort();
114
+ }
115
+
116
+ export function getGtfoBinsMetadata(name, linkedName) {
117
+ const directMatch = resolveCandidateName(name);
118
+ if (directMatch) {
119
+ const entry = GTFOBINS_INDEX.entries[directMatch.canonicalName];
120
+ return {
121
+ canonicalName: directMatch.canonicalName,
122
+ contexts: entry.contexts,
123
+ functions: entry.functions,
124
+ matchSource: directMatch.matchSource,
125
+ mitreTechniques: entry.mitreTechniques,
126
+ privilegedContexts: entry.contexts.filter((context) =>
127
+ PRIVILEGED_CONTEXTS.includes(context),
128
+ ),
129
+ reference: `${GTFOBINS_REFERENCE_PREFIX}${encodeURIComponent(directMatch.canonicalName)}/`,
130
+ riskTags: deriveRiskTags(entry, directMatch.canonicalName),
131
+ source: GTFOBINS_INDEX.source,
132
+ sourceRef: GTFOBINS_INDEX.sourceRef,
133
+ };
134
+ }
135
+ const linkedMatch = resolveCandidateName(linkedName);
136
+ if (!linkedMatch) {
137
+ return undefined;
138
+ }
139
+ const entry = GTFOBINS_INDEX.entries[linkedMatch.canonicalName];
140
+ return {
141
+ canonicalName: linkedMatch.canonicalName,
142
+ contexts: entry.contexts,
143
+ functions: entry.functions,
144
+ matchSource: "symlink",
145
+ mitreTechniques: entry.mitreTechniques,
146
+ privilegedContexts: entry.contexts.filter((context) =>
147
+ PRIVILEGED_CONTEXTS.includes(context),
148
+ ),
149
+ reference: `${GTFOBINS_REFERENCE_PREFIX}${encodeURIComponent(linkedMatch.canonicalName)}/`,
150
+ riskTags: deriveRiskTags(entry, linkedMatch.canonicalName),
151
+ source: GTFOBINS_INDEX.source,
152
+ sourceRef: GTFOBINS_INDEX.sourceRef,
153
+ };
154
+ }
155
+
156
+ export function createGtfoBinsProperties(name, linkedName) {
157
+ const metadata = getGtfoBinsMetadata(name, linkedName);
158
+ if (!metadata) {
159
+ return [];
160
+ }
161
+ const properties = [
162
+ { name: "cdx:gtfobins:matched", value: "true" },
163
+ { name: "cdx:gtfobins:name", value: metadata.canonicalName },
164
+ { name: "cdx:gtfobins:matchSource", value: metadata.matchSource },
165
+ { name: "cdx:gtfobins:functions", value: metadata.functions.join(",") },
166
+ { name: "cdx:gtfobins:contexts", value: metadata.contexts.join(",") },
167
+ { name: "cdx:gtfobins:reference", value: metadata.reference },
168
+ { name: "cdx:gtfobins:sourceRef", value: metadata.sourceRef || "" },
169
+ ];
170
+ if (metadata.mitreTechniques.length) {
171
+ properties.push({
172
+ name: "cdx:gtfobins:mitreTechniques",
173
+ value: metadata.mitreTechniques.join(","),
174
+ });
175
+ }
176
+ if (metadata.privilegedContexts.length) {
177
+ properties.push({
178
+ name: "cdx:gtfobins:privilegedContexts",
179
+ value: metadata.privilegedContexts.join(","),
180
+ });
181
+ }
182
+ if (metadata.riskTags.length) {
183
+ properties.push({
184
+ name: "cdx:gtfobins:riskTags",
185
+ value: metadata.riskTags.join(","),
186
+ });
187
+ }
188
+ return properties;
189
+ }