@cyclonedx/cdxgen 10.3.5 → 10.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -4
- package/analyzer.js +18 -18
- package/bin/cdxgen.js +78 -77
- package/bin/evinse.js +26 -26
- package/bin/repl.js +56 -62
- package/bin/verify.js +9 -9
- package/binary.js +55 -54
- package/cbomutils.js +6 -6
- package/db.js +17 -17
- package/display.js +30 -30
- package/display.test.js +2 -2
- package/docker.js +92 -89
- package/docker.test.js +30 -30
- package/envcontext.js +15 -15
- package/envcontext.test.js +1 -1
- package/evinser.js +94 -93
- package/evinser.test.js +24 -24
- package/index.js +522 -482
- package/package.json +8 -16
- package/piptree.js +6 -6
- package/postgen.js +2 -2
- package/postgen.test.js +5 -5
- package/protobom.js +37 -7
- package/protobom.test.js +6 -6
- package/server.js +16 -16
- package/types/analyzer.d.ts +7 -4
- package/types/binary.d.ts +12 -8
- package/types/cbomutils.d.ts +1 -1
- package/types/db.d.ts +23 -11
- package/types/display.d.ts +1 -1
- package/types/docker.d.ts +52 -32
- package/types/envcontext.d.ts +40 -40
- package/types/evinser.d.ts +3436 -717
- package/types/index.d.ts +66 -40
- package/types/jest.config.d.ts +2 -2
- package/types/piptree.d.ts +6 -2
- package/types/postgen.d.ts +1 -1
- package/types/protobom.d.ts +7 -3
- package/types/protobom.d.ts.map +1 -1
- package/types/server.d.ts +1 -1
- package/types/utils.d.ts +496 -302
- package/types/validator.d.ts +1 -1
- package/utils.js +742 -675
- package/utils.test.js +716 -674
- package/validator.js +20 -17
package/README.md
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
[](https://jsr.io/@cyclonedx/cdxgen) [](https://www.npmjs.com/package/@cyclonedx/cdxgen) [](https://github.com/CycloneDX/cdxgen/releases) [](https://jsr.io/@cyclonedx/cdxgen) [](https://www.npmjs.com/package/@cyclonedx/cdxgen) [](https://github.com/CycloneDX/cdxgen/releases) [](<(https://www.npmjs.com/package/@cyclonedx/cdxgen)>) [](./LICENSE.md) [](https://github.com/CycloneDX/cdxgen/graphs/contributors)
|
|
2
2
|
|
|
3
3
|
# CycloneDX Generator
|
|
4
4
|
|
|
@@ -568,10 +568,8 @@ Follow the usual PR process, but before raising a PR, run the following commands
|
|
|
568
568
|
```bash
|
|
569
569
|
# Generate types using jsdoc syntax
|
|
570
570
|
npm run gen-types
|
|
571
|
-
# Run
|
|
571
|
+
# Run biomejs formatter and linter with auto fix
|
|
572
572
|
npm run lint
|
|
573
|
-
# Run prettier
|
|
574
|
-
npm run pretty
|
|
575
573
|
# Run jest tests
|
|
576
574
|
npm test
|
|
577
575
|
```
|
package/analyzer.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { parse } from "@babel/parser";
|
|
2
|
-
import traverse from "@babel/traverse";
|
|
3
|
-
import process from "node:process";
|
|
4
1
|
import { lstatSync, readFileSync, readdirSync } from "node:fs";
|
|
5
2
|
import { basename, isAbsolute, join, relative, resolve } from "node:path";
|
|
3
|
+
import process from "node:process";
|
|
4
|
+
import { parse } from "@babel/parser";
|
|
5
|
+
import traverse from "@babel/traverse";
|
|
6
6
|
|
|
7
7
|
const IGNORE_DIRS = process.env.ASTGEN_IGNORE_DIRS
|
|
8
8
|
? process.env.ASTGEN_IGNORE_DIRS.split(",")
|
|
@@ -26,13 +26,13 @@ const IGNORE_DIRS = process.env.ASTGEN_IGNORE_DIRS
|
|
|
26
26
|
"codemods",
|
|
27
27
|
"flow-typed",
|
|
28
28
|
"i18n",
|
|
29
|
-
"__tests__"
|
|
29
|
+
"__tests__",
|
|
30
30
|
];
|
|
31
31
|
|
|
32
32
|
const IGNORE_FILE_PATTERN = new RegExp(
|
|
33
33
|
process.env.ASTGEN_IGNORE_FILE_PATTERN ||
|
|
34
34
|
"(conf|config|test|spec|mock|\\.d)\\.(js|ts|tsx)$",
|
|
35
|
-
"i"
|
|
35
|
+
"i",
|
|
36
36
|
);
|
|
37
37
|
|
|
38
38
|
const getAllFiles = (deep, dir, extn, files, result, regex) => {
|
|
@@ -70,7 +70,7 @@ const getAllFiles = (deep, dir, extn, files, result, regex) => {
|
|
|
70
70
|
extn,
|
|
71
71
|
readdirSync(file),
|
|
72
72
|
result,
|
|
73
|
-
regex
|
|
73
|
+
regex,
|
|
74
74
|
);
|
|
75
75
|
} catch (error) {
|
|
76
76
|
continue;
|
|
@@ -105,8 +105,8 @@ const babelParserOptions = {
|
|
|
105
105
|
"numericSeparator",
|
|
106
106
|
"dynamicImport",
|
|
107
107
|
"jsx",
|
|
108
|
-
"typescript"
|
|
109
|
-
]
|
|
108
|
+
"typescript",
|
|
109
|
+
],
|
|
110
110
|
};
|
|
111
111
|
|
|
112
112
|
/**
|
|
@@ -119,7 +119,7 @@ const setFileRef = (
|
|
|
119
119
|
src,
|
|
120
120
|
file,
|
|
121
121
|
pathnode,
|
|
122
|
-
specifiers = []
|
|
122
|
+
specifiers = [],
|
|
123
123
|
) => {
|
|
124
124
|
const pathway = pathnode.value || pathnode.name;
|
|
125
125
|
const sourceLoc = pathnode.loc?.start;
|
|
@@ -144,7 +144,7 @@ const setFileRef = (
|
|
|
144
144
|
isExternal: true,
|
|
145
145
|
fileName: fileRelativeLoc,
|
|
146
146
|
lineNumber: sourceLoc && sourceLoc.line ? sourceLoc.line : undefined,
|
|
147
|
-
columnNumber: sourceLoc && sourceLoc.column ? sourceLoc.column : undefined
|
|
147
|
+
columnNumber: sourceLoc && sourceLoc.column ? sourceLoc.column : undefined,
|
|
148
148
|
};
|
|
149
149
|
// replace relative imports with full path
|
|
150
150
|
let moduleFullPath = pathway;
|
|
@@ -194,21 +194,21 @@ const fileToParseableCode = (file) => {
|
|
|
194
194
|
.replace(vueCommentRegex, (match) => match.replaceAll(/\S/g, " "))
|
|
195
195
|
.replace(
|
|
196
196
|
vueCleaningRegex,
|
|
197
|
-
(match) => match.replaceAll(/\S/g, " ").substring(1) + ";"
|
|
197
|
+
(match) => match.replaceAll(/\S/g, " ").substring(1) + ";",
|
|
198
198
|
)
|
|
199
199
|
.replace(
|
|
200
200
|
vueBindRegex,
|
|
201
201
|
(match, grA, grB, grC) =>
|
|
202
|
-
grA.replaceAll(/\S/g, " ") + grB + grC.replaceAll(/\S/g, " ")
|
|
202
|
+
grA.replaceAll(/\S/g, " ") + grB + grC.replaceAll(/\S/g, " "),
|
|
203
203
|
)
|
|
204
204
|
.replace(
|
|
205
205
|
vuePropRegex,
|
|
206
|
-
(match, grA, grB) => " " + grA.replace(/[.:@]/g, " ") + grB
|
|
206
|
+
(match, grA, grB) => " " + grA.replace(/[.:@]/g, " ") + grB,
|
|
207
207
|
)
|
|
208
208
|
.replace(
|
|
209
209
|
vueTemplateRegex,
|
|
210
210
|
(match, grA, grB, grC) =>
|
|
211
|
-
grA + grB.replaceAll("{{", "{ ").replaceAll("}}", " }") + grC
|
|
211
|
+
grA + grB.replaceAll("{{", "{ ").replaceAll("}}", " }") + grC,
|
|
212
212
|
);
|
|
213
213
|
}
|
|
214
214
|
return code;
|
|
@@ -229,7 +229,7 @@ const parseFileASTTree = (src, file, allImports, allExports) => {
|
|
|
229
229
|
src,
|
|
230
230
|
file,
|
|
231
231
|
path.node.source,
|
|
232
|
-
path.node.specifiers
|
|
232
|
+
path.node.specifiers,
|
|
233
233
|
);
|
|
234
234
|
}
|
|
235
235
|
},
|
|
@@ -263,10 +263,10 @@ const parseFileASTTree = (src, file, allImports, allExports) => {
|
|
|
263
263
|
src,
|
|
264
264
|
file,
|
|
265
265
|
path.node.source,
|
|
266
|
-
path.node.specifiers
|
|
266
|
+
path.node.specifiers,
|
|
267
267
|
);
|
|
268
268
|
}
|
|
269
|
-
}
|
|
269
|
+
},
|
|
270
270
|
});
|
|
271
271
|
};
|
|
272
272
|
|
|
@@ -282,7 +282,7 @@ const getAllSrcJSAndTSFiles = (src, deep) =>
|
|
|
282
282
|
getAllFiles(deep, src, ".ts"),
|
|
283
283
|
getAllFiles(deep, src, ".tsx"),
|
|
284
284
|
getAllFiles(deep, src, ".vue"),
|
|
285
|
-
getAllFiles(deep, src, ".svelte")
|
|
285
|
+
getAllFiles(deep, src, ".svelte"),
|
|
286
286
|
]);
|
|
287
287
|
|
|
288
288
|
/**
|
package/bin/cdxgen.js
CHANGED
|
@@ -1,34 +1,34 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
|
|
3
|
-
import
|
|
4
|
-
import { validateBom } from "../validator.js";
|
|
3
|
+
import crypto from "node:crypto";
|
|
5
4
|
import fs from "node:fs";
|
|
6
5
|
import { tmpdir } from "node:os";
|
|
7
6
|
import { basename, dirname, join, resolve } from "node:path";
|
|
8
|
-
import
|
|
9
|
-
import crypto from "node:crypto";
|
|
7
|
+
import process from "node:process";
|
|
10
8
|
import { URL, fileURLToPath } from "node:url";
|
|
9
|
+
import { findUpSync } from "find-up";
|
|
11
10
|
import globalAgent from "global-agent";
|
|
12
|
-
import
|
|
11
|
+
import { load as _load } from "js-yaml";
|
|
12
|
+
import jws from "jws";
|
|
13
13
|
import {
|
|
14
14
|
printCallStack,
|
|
15
15
|
printDependencyTree,
|
|
16
16
|
printOccurrences,
|
|
17
17
|
printReachables,
|
|
18
18
|
printServices,
|
|
19
|
-
printTable
|
|
19
|
+
printTable,
|
|
20
20
|
} from "../display.js";
|
|
21
|
-
import {
|
|
22
|
-
import { load as _load } from "js-yaml";
|
|
21
|
+
import { createBom, submitBom } from "../index.js";
|
|
23
22
|
import { postProcess } from "../postgen.js";
|
|
24
23
|
import { ATOM_DB } from "../utils.js";
|
|
24
|
+
import { validateBom } from "../validator.js";
|
|
25
25
|
|
|
26
26
|
// Support for config files
|
|
27
27
|
const configPath = findUpSync([
|
|
28
28
|
".cdxgenrc",
|
|
29
29
|
".cdxgen.json",
|
|
30
30
|
".cdxgen.yml",
|
|
31
|
-
".cdxgen.yaml"
|
|
31
|
+
".cdxgen.yaml",
|
|
32
32
|
]);
|
|
33
33
|
let config = {};
|
|
34
34
|
if (configPath) {
|
|
@@ -57,150 +57,151 @@ const args = yargs(hideBin(process.argv))
|
|
|
57
57
|
.option("output", {
|
|
58
58
|
alias: "o",
|
|
59
59
|
description: "Output file. Default bom.json",
|
|
60
|
-
default: "bom.json"
|
|
60
|
+
default: "bom.json",
|
|
61
61
|
})
|
|
62
62
|
.option("evinse-output", {
|
|
63
63
|
description:
|
|
64
64
|
"Create bom with evidence as a separate file. Default bom.json",
|
|
65
65
|
default: "bom.json",
|
|
66
|
-
hidden: true
|
|
66
|
+
hidden: true,
|
|
67
67
|
})
|
|
68
68
|
.option("type", {
|
|
69
69
|
alias: "t",
|
|
70
|
-
description: "Project type"
|
|
70
|
+
description: "Project type",
|
|
71
71
|
})
|
|
72
72
|
.option("recurse", {
|
|
73
73
|
alias: "r",
|
|
74
74
|
type: "boolean",
|
|
75
75
|
default: true,
|
|
76
76
|
description:
|
|
77
|
-
"Recurse mode suitable for mono-repos. Defaults to true. Pass --no-recurse to disable."
|
|
77
|
+
"Recurse mode suitable for mono-repos. Defaults to true. Pass --no-recurse to disable.",
|
|
78
78
|
})
|
|
79
79
|
.option("print", {
|
|
80
80
|
alias: "p",
|
|
81
81
|
type: "boolean",
|
|
82
|
-
description: "Print the SBOM as a table with tree."
|
|
82
|
+
description: "Print the SBOM as a table with tree.",
|
|
83
83
|
})
|
|
84
84
|
.option("resolve-class", {
|
|
85
85
|
alias: "c",
|
|
86
86
|
type: "boolean",
|
|
87
|
-
description: "Resolve class names for packages. jars only for now."
|
|
87
|
+
description: "Resolve class names for packages. jars only for now.",
|
|
88
88
|
})
|
|
89
89
|
.option("deep", {
|
|
90
90
|
type: "boolean",
|
|
91
91
|
description:
|
|
92
|
-
"Perform deep searches for components. Useful while scanning C/C++ apps, live OS and oci images."
|
|
92
|
+
"Perform deep searches for components. Useful while scanning C/C++ apps, live OS and oci images.",
|
|
93
93
|
})
|
|
94
94
|
.option("server-url", {
|
|
95
|
-
description: "Dependency track url. Eg: https://deptrack.cyclonedx.io"
|
|
95
|
+
description: "Dependency track url. Eg: https://deptrack.cyclonedx.io",
|
|
96
96
|
})
|
|
97
97
|
.option("api-key", {
|
|
98
|
-
description: "Dependency track api key"
|
|
98
|
+
description: "Dependency track api key",
|
|
99
99
|
})
|
|
100
100
|
.option("project-group", {
|
|
101
|
-
description: "Dependency track project group"
|
|
101
|
+
description: "Dependency track project group",
|
|
102
102
|
})
|
|
103
103
|
.option("project-name", {
|
|
104
|
-
description:
|
|
104
|
+
description:
|
|
105
|
+
"Dependency track project name. Default use the directory name",
|
|
105
106
|
})
|
|
106
107
|
.option("project-version", {
|
|
107
108
|
description: "Dependency track project version",
|
|
108
109
|
default: "",
|
|
109
|
-
type: "string"
|
|
110
|
+
type: "string",
|
|
110
111
|
})
|
|
111
112
|
.option("project-id", {
|
|
112
113
|
description:
|
|
113
114
|
"Dependency track project id. Either provide the id or the project name and version together",
|
|
114
|
-
type: "string"
|
|
115
|
+
type: "string",
|
|
115
116
|
})
|
|
116
117
|
.option("parent-project-id", {
|
|
117
118
|
description: "Dependency track parent project id",
|
|
118
|
-
type: "string"
|
|
119
|
+
type: "string",
|
|
119
120
|
})
|
|
120
121
|
.option("required-only", {
|
|
121
122
|
type: "boolean",
|
|
122
123
|
description:
|
|
123
|
-
"Include only the packages with required scope on the SBOM. Would set compositions.aggregate to incomplete unless --no-auto-compositions is passed."
|
|
124
|
+
"Include only the packages with required scope on the SBOM. Would set compositions.aggregate to incomplete unless --no-auto-compositions is passed.",
|
|
124
125
|
})
|
|
125
126
|
.option("fail-on-error", {
|
|
126
127
|
type: "boolean",
|
|
127
|
-
description: "Fail if any dependency extractor fails."
|
|
128
|
+
description: "Fail if any dependency extractor fails.",
|
|
128
129
|
})
|
|
129
130
|
.option("no-babel", {
|
|
130
131
|
type: "boolean",
|
|
131
132
|
description:
|
|
132
|
-
"Do not use babel to perform usage analysis for JavaScript/TypeScript projects."
|
|
133
|
+
"Do not use babel to perform usage analysis for JavaScript/TypeScript projects.",
|
|
133
134
|
})
|
|
134
135
|
.option("generate-key-and-sign", {
|
|
135
136
|
type: "boolean",
|
|
136
137
|
description:
|
|
137
|
-
"Generate an RSA public/private key pair and then sign the generated SBOM using JSON Web Signatures."
|
|
138
|
+
"Generate an RSA public/private key pair and then sign the generated SBOM using JSON Web Signatures.",
|
|
138
139
|
})
|
|
139
140
|
.option("server", {
|
|
140
141
|
type: "boolean",
|
|
141
|
-
description: "Run cdxgen as a server"
|
|
142
|
+
description: "Run cdxgen as a server",
|
|
142
143
|
})
|
|
143
144
|
.option("server-host", {
|
|
144
145
|
description: "Listen address",
|
|
145
|
-
default: "127.0.0.1"
|
|
146
|
+
default: "127.0.0.1",
|
|
146
147
|
})
|
|
147
148
|
.option("server-port", {
|
|
148
149
|
description: "Listen port",
|
|
149
|
-
default: "9090"
|
|
150
|
+
default: "9090",
|
|
150
151
|
})
|
|
151
152
|
.option("install-deps", {
|
|
152
153
|
type: "boolean",
|
|
153
154
|
description:
|
|
154
|
-
"Install dependencies automatically for some projects. Defaults to true but disabled for containers and oci scans. Use --no-install-deps to disable this feature."
|
|
155
|
+
"Install dependencies automatically for some projects. Defaults to true but disabled for containers and oci scans. Use --no-install-deps to disable this feature.",
|
|
155
156
|
})
|
|
156
157
|
.option("validate", {
|
|
157
158
|
type: "boolean",
|
|
158
159
|
default: true,
|
|
159
160
|
description:
|
|
160
|
-
"Validate the generated SBOM using json schema. Defaults to true. Pass --no-validate to disable."
|
|
161
|
+
"Validate the generated SBOM using json schema. Defaults to true. Pass --no-validate to disable.",
|
|
161
162
|
})
|
|
162
163
|
.option("evidence", {
|
|
163
164
|
type: "boolean",
|
|
164
165
|
default: false,
|
|
165
|
-
description: "Generate SBOM with evidence for supported languages."
|
|
166
|
+
description: "Generate SBOM with evidence for supported languages.",
|
|
166
167
|
})
|
|
167
168
|
.option("deps-slices-file", {
|
|
168
169
|
description: "Path for the parsedeps slice file created by atom.",
|
|
169
170
|
default: "deps.slices.json",
|
|
170
|
-
hidden: true
|
|
171
|
+
hidden: true,
|
|
171
172
|
})
|
|
172
173
|
.option("usages-slices-file", {
|
|
173
174
|
description: "Path for the usages slices file created by atom.",
|
|
174
175
|
default: "usages.slices.json",
|
|
175
|
-
hidden: true
|
|
176
|
+
hidden: true,
|
|
176
177
|
})
|
|
177
178
|
.option("data-flow-slices-file", {
|
|
178
179
|
description: "Path for the data-flow slices file created by atom.",
|
|
179
180
|
default: "data-flow.slices.json",
|
|
180
|
-
hidden: true
|
|
181
|
+
hidden: true,
|
|
181
182
|
})
|
|
182
183
|
.option("reachables-slices-file", {
|
|
183
184
|
description: "Path for the reachables slices file created by atom.",
|
|
184
185
|
default: "reachables.slices.json",
|
|
185
|
-
hidden: true
|
|
186
|
+
hidden: true,
|
|
186
187
|
})
|
|
187
188
|
.option("spec-version", {
|
|
188
189
|
description: "CycloneDX Specification version to use. Defaults to 1.5",
|
|
189
190
|
default: 1.5,
|
|
190
|
-
type: "number"
|
|
191
|
+
type: "number",
|
|
191
192
|
})
|
|
192
193
|
.option("filter", {
|
|
193
194
|
description:
|
|
194
|
-
"Filter components containing this word in purl or component.properties.value. Multiple values allowed."
|
|
195
|
+
"Filter components containing this word in purl or component.properties.value. Multiple values allowed.",
|
|
195
196
|
})
|
|
196
197
|
.option("only", {
|
|
197
198
|
description:
|
|
198
|
-
"Include components only containing this word in purl. Useful to generate BOM with first party components alone. Multiple values allowed."
|
|
199
|
+
"Include components only containing this word in purl. Useful to generate BOM with first party components alone. Multiple values allowed.",
|
|
199
200
|
})
|
|
200
201
|
.option("author", {
|
|
201
202
|
description:
|
|
202
203
|
"The person(s) who created the BOM. Set this value if you're intending the modify the BOM and claim authorship.",
|
|
203
|
-
default: "OWASP Foundation"
|
|
204
|
+
default: "OWASP Foundation",
|
|
204
205
|
})
|
|
205
206
|
.option("profile", {
|
|
206
207
|
description: "BOM profile to use for generation. Default generic.",
|
|
@@ -211,37 +212,37 @@ const args = yargs(hideBin(process.argv))
|
|
|
211
212
|
"operational",
|
|
212
213
|
"threat-modeling",
|
|
213
214
|
"license-compliance",
|
|
214
|
-
"generic"
|
|
215
|
-
]
|
|
215
|
+
"generic",
|
|
216
|
+
],
|
|
216
217
|
})
|
|
217
218
|
.option("lifecycle", {
|
|
218
219
|
description: "Product lifecycle for the generated BOM.",
|
|
219
220
|
hidden: true,
|
|
220
|
-
choices: ["pre-build", "build", "post-build"]
|
|
221
|
+
choices: ["pre-build", "build", "post-build"],
|
|
221
222
|
})
|
|
222
223
|
.option("exclude", {
|
|
223
|
-
description: "Additional glob pattern(s) to ignore"
|
|
224
|
+
description: "Additional glob pattern(s) to ignore",
|
|
224
225
|
})
|
|
225
226
|
.option("export-proto", {
|
|
226
227
|
type: "boolean",
|
|
227
228
|
default: false,
|
|
228
229
|
description: "Serialize and export BOM as protobuf binary.",
|
|
229
|
-
hidden: true
|
|
230
|
+
hidden: true,
|
|
230
231
|
})
|
|
231
232
|
.option("proto-bin-file", {
|
|
232
233
|
description: "Path for the serialized protobuf binary.",
|
|
233
234
|
default: "bom.cdx",
|
|
234
|
-
hidden: true
|
|
235
|
+
hidden: true,
|
|
235
236
|
})
|
|
236
237
|
.option("include-formulation", {
|
|
237
238
|
type: "boolean",
|
|
238
239
|
default: false,
|
|
239
|
-
description: "Generate formulation section using git metadata."
|
|
240
|
+
description: "Generate formulation section using git metadata.",
|
|
240
241
|
})
|
|
241
242
|
.option("include-crypto", {
|
|
242
243
|
type: "boolean",
|
|
243
244
|
default: false,
|
|
244
|
-
description: "Include crypto libraries found under formulation."
|
|
245
|
+
description: "Include crypto libraries found under formulation.",
|
|
245
246
|
})
|
|
246
247
|
.completion("completion", "Generate bash/zsh completion")
|
|
247
248
|
.array("filter")
|
|
@@ -252,11 +253,11 @@ const args = yargs(hideBin(process.argv))
|
|
|
252
253
|
type: "boolean",
|
|
253
254
|
default: true,
|
|
254
255
|
description:
|
|
255
|
-
"Automatically set compositions when the BOM was filtered. Defaults to true"
|
|
256
|
+
"Automatically set compositions when the BOM was filtered. Defaults to true",
|
|
256
257
|
})
|
|
257
258
|
.example([
|
|
258
259
|
["$0 -t java .", "Generate a Java SBOM for the current directory"],
|
|
259
|
-
["$0 --server", "Run cdxgen as a server"]
|
|
260
|
+
["$0 --server", "Run cdxgen as a server"],
|
|
260
261
|
])
|
|
261
262
|
.epilogue("for documentation, visit https://cyclonedx.github.io/cdxgen")
|
|
262
263
|
.config(config)
|
|
@@ -269,7 +270,7 @@ const args = yargs(hideBin(process.argv))
|
|
|
269
270
|
if (args.version) {
|
|
270
271
|
const packageJsonAsString = fs.readFileSync(
|
|
271
272
|
join(dirName, "..", "package.json"),
|
|
272
|
-
"utf-8"
|
|
273
|
+
"utf-8",
|
|
273
274
|
);
|
|
274
275
|
const packageJson = JSON.parse(packageJsonAsString);
|
|
275
276
|
|
|
@@ -307,7 +308,7 @@ const options = Object.assign({}, args, {
|
|
|
307
308
|
multiProject: args.recurse,
|
|
308
309
|
noBabel: args.noBabel || args.babel === false,
|
|
309
310
|
project: args.projectId,
|
|
310
|
-
deep: args.deep || args.evidence
|
|
311
|
+
deep: args.deep || args.evidence,
|
|
311
312
|
});
|
|
312
313
|
|
|
313
314
|
if (process.argv[1].includes("cbom")) {
|
|
@@ -369,11 +370,11 @@ const applyAdvancedOptions = (options) => {
|
|
|
369
370
|
"apk",
|
|
370
371
|
"aab",
|
|
371
372
|
"go",
|
|
372
|
-
"golang"
|
|
373
|
+
"golang",
|
|
373
374
|
].includes(options.projectType)
|
|
374
375
|
) {
|
|
375
376
|
console.log(
|
|
376
|
-
"PREVIEW: post-build lifecycle SBOM generation is supported only for android, dotnet, and go projects. Please specify the type using the -t argument."
|
|
377
|
+
"PREVIEW: post-build lifecycle SBOM generation is supported only for android, dotnet, and go projects. Please specify the type using the -t argument.",
|
|
377
378
|
);
|
|
378
379
|
process.exit(1);
|
|
379
380
|
}
|
|
@@ -401,20 +402,20 @@ const checkPermissions = (filePath) => {
|
|
|
401
402
|
if (!process.permission.has("fs.read", filePath)) {
|
|
402
403
|
console.log(
|
|
403
404
|
`FileSystemRead permission required. Please invoke with the argument --allow-fs-read="${resolve(
|
|
404
|
-
filePath
|
|
405
|
-
)}"
|
|
405
|
+
filePath,
|
|
406
|
+
)}"`,
|
|
406
407
|
);
|
|
407
408
|
return false;
|
|
408
409
|
}
|
|
409
410
|
if (!process.permission.has("fs.write", tmpdir())) {
|
|
410
411
|
console.log(
|
|
411
|
-
`FileSystemWrite permission required. Please invoke with the argument --allow-fs-write="${tmpdir()}"
|
|
412
|
+
`FileSystemWrite permission required. Please invoke with the argument --allow-fs-write="${tmpdir()}"`,
|
|
412
413
|
);
|
|
413
414
|
return false;
|
|
414
415
|
}
|
|
415
416
|
if (!process.permission.has("child")) {
|
|
416
417
|
console.log(
|
|
417
|
-
"ChildProcess permission is missing. This is required to spawn commands for some languages. Please invoke with the argument --allow-child-process"
|
|
418
|
+
"ChildProcess permission is missing. This is required to spawn commands for some languages. Please invoke with the argument --allow-child-process",
|
|
418
419
|
);
|
|
419
420
|
}
|
|
420
421
|
return true;
|
|
@@ -463,7 +464,7 @@ const checkPermissions = (filePath) => {
|
|
|
463
464
|
["os", "docker", "universal"].includes(options.projectType) ||
|
|
464
465
|
process.env.CI
|
|
465
466
|
? null
|
|
466
|
-
: 2
|
|
467
|
+
: 2,
|
|
467
468
|
);
|
|
468
469
|
fs.writeFileSync(jsonFile, jsonPayload);
|
|
469
470
|
}
|
|
@@ -490,19 +491,19 @@ const checkPermissions = (filePath) => {
|
|
|
490
491
|
modulusLength: 4096,
|
|
491
492
|
publicKeyEncoding: {
|
|
492
493
|
type: "spki",
|
|
493
|
-
format: "pem"
|
|
494
|
+
format: "pem",
|
|
494
495
|
},
|
|
495
496
|
privateKeyEncoding: {
|
|
496
497
|
type: "pkcs8",
|
|
497
|
-
format: "pem"
|
|
498
|
-
}
|
|
498
|
+
format: "pem",
|
|
499
|
+
},
|
|
499
500
|
});
|
|
500
501
|
fs.writeFileSync(publicKeyFile, publicKey);
|
|
501
502
|
fs.writeFileSync(privateKeyFile, privateKey);
|
|
502
503
|
console.log(
|
|
503
504
|
"Created public/private key pairs for testing purposes",
|
|
504
505
|
publicKeyFile,
|
|
505
|
-
privateKeyFile
|
|
506
|
+
privateKeyFile,
|
|
506
507
|
);
|
|
507
508
|
privateKeyToUse = privateKey;
|
|
508
509
|
jwkPublicKey = crypto
|
|
@@ -511,7 +512,7 @@ const checkPermissions = (filePath) => {
|
|
|
511
512
|
} else {
|
|
512
513
|
privateKeyToUse = fs.readFileSync(
|
|
513
514
|
process.env.SBOM_SIGN_PRIVATE_KEY,
|
|
514
|
-
"utf8"
|
|
515
|
+
"utf8",
|
|
515
516
|
);
|
|
516
517
|
if (
|
|
517
518
|
process.env.SBOM_SIGN_PUBLIC_KEY &&
|
|
@@ -519,7 +520,7 @@ const checkPermissions = (filePath) => {
|
|
|
519
520
|
) {
|
|
520
521
|
jwkPublicKey = crypto
|
|
521
522
|
.createPublicKey(
|
|
522
|
-
fs.readFileSync(process.env.SBOM_SIGN_PUBLIC_KEY, "utf8")
|
|
523
|
+
fs.readFileSync(process.env.SBOM_SIGN_PUBLIC_KEY, "utf8"),
|
|
523
524
|
)
|
|
524
525
|
.export({ format: "jwk" });
|
|
525
526
|
}
|
|
@@ -532,11 +533,11 @@ const checkPermissions = (filePath) => {
|
|
|
532
533
|
const compSignature = jws.sign({
|
|
533
534
|
header: { alg },
|
|
534
535
|
payload: comp,
|
|
535
|
-
privateKey: privateKeyToUse
|
|
536
|
+
privateKey: privateKeyToUse,
|
|
536
537
|
});
|
|
537
538
|
const compSignatureBlock = {
|
|
538
539
|
algorithm: alg,
|
|
539
|
-
value: compSignature
|
|
540
|
+
value: compSignature,
|
|
540
541
|
};
|
|
541
542
|
if (jwkPublicKey) {
|
|
542
543
|
compSignatureBlock.publicKey = jwkPublicKey;
|
|
@@ -546,12 +547,12 @@ const checkPermissions = (filePath) => {
|
|
|
546
547
|
const signature = jws.sign({
|
|
547
548
|
header: { alg },
|
|
548
549
|
payload: JSON.stringify(bomJsonUnsignedObj, null, 2),
|
|
549
|
-
privateKey: privateKeyToUse
|
|
550
|
+
privateKey: privateKeyToUse,
|
|
550
551
|
});
|
|
551
552
|
if (signature) {
|
|
552
553
|
const signatureBlock = {
|
|
553
554
|
algorithm: alg,
|
|
554
|
-
value: signature
|
|
555
|
+
value: signature,
|
|
555
556
|
};
|
|
556
557
|
if (jwkPublicKey) {
|
|
557
558
|
signatureBlock.publicKey = jwkPublicKey;
|
|
@@ -559,25 +560,25 @@ const checkPermissions = (filePath) => {
|
|
|
559
560
|
bomJsonUnsignedObj.signature = signatureBlock;
|
|
560
561
|
fs.writeFileSync(
|
|
561
562
|
jsonFile,
|
|
562
|
-
JSON.stringify(bomJsonUnsignedObj, null, null)
|
|
563
|
+
JSON.stringify(bomJsonUnsignedObj, null, null),
|
|
563
564
|
);
|
|
564
565
|
if (publicKeyFile) {
|
|
565
566
|
// Verifying this signature
|
|
566
567
|
const signatureVerification = jws.verify(
|
|
567
568
|
signature,
|
|
568
569
|
alg,
|
|
569
|
-
fs.readFileSync(publicKeyFile, "utf8")
|
|
570
|
+
fs.readFileSync(publicKeyFile, "utf8"),
|
|
570
571
|
);
|
|
571
572
|
if (signatureVerification) {
|
|
572
573
|
console.log(
|
|
573
574
|
"SBOM signature is verifiable with the public key and the algorithm",
|
|
574
575
|
publicKeyFile,
|
|
575
|
-
alg
|
|
576
|
+
alg,
|
|
576
577
|
);
|
|
577
578
|
} else {
|
|
578
579
|
console.log("SBOM signature verification was unsuccessful");
|
|
579
580
|
console.log(
|
|
580
|
-
"Check if the public key was exported in PEM format"
|
|
581
|
+
"Check if the public key was exported in PEM format",
|
|
581
582
|
);
|
|
582
583
|
}
|
|
583
584
|
}
|
|
@@ -617,17 +618,17 @@ const checkPermissions = (filePath) => {
|
|
|
617
618
|
dataFlowSlicesFile: options.dataFlowSlicesFile,
|
|
618
619
|
reachablesSlicesFile: options.reachablesSlicesFile,
|
|
619
620
|
includeCrypto: options.includeCrypto,
|
|
620
|
-
specVersion: options.specVersion
|
|
621
|
+
specVersion: options.specVersion,
|
|
621
622
|
};
|
|
622
623
|
const dbObjMap = await evinserModule.prepareDB(evinseOptions);
|
|
623
624
|
if (dbObjMap) {
|
|
624
625
|
const sliceArtefacts = await evinserModule.analyzeProject(
|
|
625
626
|
dbObjMap,
|
|
626
|
-
evinseOptions
|
|
627
|
+
evinseOptions,
|
|
627
628
|
);
|
|
628
629
|
const evinseJson = evinserModule.createEvinseFile(
|
|
629
630
|
sliceArtefacts,
|
|
630
|
-
evinseOptions
|
|
631
|
+
evinseOptions,
|
|
631
632
|
);
|
|
632
633
|
bomNSData.bomJson = evinseJson;
|
|
633
634
|
if (options.print && evinseJson) {
|