@cyclonedx/cdxgen 9.9.3 → 9.9.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -49,6 +49,8 @@ Most SBOM tools are like barcode scanners. They can scan a few package manifest
49
49
  | Gradle Cache | $HOME/caches/modules-2/files-2.1/\*\*/\*.jar | N/A | |
50
50
  | Helm Index | $HOME/.cache/helm/repository/\*\*/\*.yaml | N/A | |
51
51
  | Docker compose | docker-compose\*.yml. Images would also be scanned. | N/A | |
52
+ | Dockerfile | `*Dockerfile*` Images would also be scanned. | N/A | |
53
+ | Containerfile | `*Containerfile*`. Images would also be scanned. | N/A | |
52
54
  | Google CloudBuild configuration | cloudbuild.yaml | N/A | |
53
55
  | OpenAPI | openapi\*.json, openapi\*.yaml | N/A | |
54
56
 
@@ -173,9 +175,9 @@ Options:
173
175
  es. [boolean] [default: false]
174
176
  --spec-version CycloneDX Specification version to use. Defaults
175
177
  to 1.5 [default: 1.5]
176
- --filter Filter components containining this word in purl.
178
+ --filter Filter components containing this word in purl.
177
179
  Multiple values allowed. [array]
178
- --only Include components only containining this word in
180
+ --only Include components only containing this word in
179
181
  purl. Useful to generate BOM with first party co
180
182
  mponents alone. Multiple values allowed. [array]
181
183
  --author The person(s) who created the BOM. Set this value
@@ -443,7 +445,7 @@ This feature is powered by osquery, which is [installed](https://github.com/cycl
443
445
 
444
446
  See [evinse mode](./ADVANCED.md) in the advanced documentation.
445
447
 
446
- ## BoM signing
448
+ ## BOM signing
447
449
 
448
450
  cdxgen can sign the generated BOM json file to increase authenticity and non-repudiation capabilities. To enable this, set the following environment variables.
449
451
 
package/analyzer.js CHANGED
@@ -108,7 +108,7 @@ const setFileRef = (allImports, src, file, pathnode, specifiers = []) => {
108
108
  }
109
109
  const fileRelativeLoc = relative(src, file);
110
110
  // remove unexpected extension imports
111
- if (/\.(svg|png|jpg|d\.ts)/.test(pathway)) {
111
+ if (/\.(svg|png|jpg|json|d\.ts)/.test(pathway)) {
112
112
  return;
113
113
  }
114
114
  const importedModules = specifiers
package/bin/cdxgen.js CHANGED
@@ -193,11 +193,11 @@ const args = yargs(hideBin(process.argv))
193
193
  })
194
194
  .option("filter", {
195
195
  description:
196
- "Filter components containining this word in purl. Multiple values allowed."
196
+ "Filter components containing this word in purl. Multiple values allowed."
197
197
  })
198
198
  .option("only", {
199
199
  description:
200
- "Include components only containining this word in purl. Useful to generate BOM with first party components alone. Multiple values allowed."
200
+ "Include components only containing this word in purl. Useful to generate BOM with first party components alone. Multiple values allowed."
201
201
  })
202
202
  .option("author", {
203
203
  description:
@@ -520,7 +520,6 @@ const checkPermissions = (filePath) => {
520
520
  if (bomNSData.nsMapping && Object.keys(bomNSData.nsMapping).length) {
521
521
  const nsFile = jsonFile + ".map";
522
522
  fs.writeFileSync(nsFile, JSON.stringify(bomNSData.nsMapping));
523
- console.log("Namespace mapping file written to", nsFile);
524
523
  }
525
524
  }
526
525
  } else if (!options.print) {
package/docker.js CHANGED
@@ -30,9 +30,25 @@ let dockerConn = undefined;
30
30
  let isPodman = false;
31
31
  let isPodmanRootless = true;
32
32
  let isDockerRootless = false;
33
+ // https://github.com/containerd/containerd
34
+ let isContainerd = !!process.env.CONTAINERD_ADDRESS;
33
35
  const WIN_LOCAL_TLS = "http://localhost:2375";
34
36
  let isWinLocalTLS = false;
35
37
 
38
+ if (
39
+ !process.env.DOCKER_HOST &&
40
+ (process.env.CONTAINERD_ADDRESS ||
41
+ (process.env.XDG_RUNTIME_DIR &&
42
+ existsSync(
43
+ join(process.env.XDG_RUNTIME_DIR, "containerd-rootless", "api.sock")
44
+ )))
45
+ ) {
46
+ isContainerd = true;
47
+ }
48
+
49
+ // Cache the registry auth keys
50
+ const registry_auth_keys = {};
51
+
36
52
  /**
37
53
  * Method to get all dirs matching a name
38
54
  *
@@ -94,7 +110,17 @@ export const getOnlyDirs = (srcpath, dirName) => {
94
110
  ].filter((d) => d.endsWith(dirName));
95
111
  };
96
112
 
97
- const getDefaultOptions = () => {
113
+ const getDefaultOptions = (forRegistry) => {
114
+ let authTokenSet = false;
115
+ if (!forRegistry && process.env.DOCKER_SERVER_ADDRESS) {
116
+ forRegistry = process.env.DOCKER_SERVER_ADDRESS;
117
+ }
118
+ if (forRegistry) {
119
+ forRegistry = forRegistry.replace("http://", "").replace("https://", "");
120
+ if (forRegistry.includes("/")) {
121
+ forRegistry = forRegistry.split("/")[0];
122
+ }
123
+ }
98
124
  const opts = {
99
125
  enableUnixSockets: true,
100
126
  throwHttpErrors: true,
@@ -102,6 +128,97 @@ const getDefaultOptions = () => {
102
128
  hooks: { beforeError: [] },
103
129
  mutableDefaults: true
104
130
  };
131
+ const DOCKER_CONFIG = process.env.DOCKER_CONFIG || join(homedir(), ".docker");
132
+ // Support for private registry
133
+ if (process.env.DOCKER_AUTH_CONFIG) {
134
+ opts.headers = {
135
+ "X-Registry-Auth": process.env.DOCKER_AUTH_CONFIG
136
+ };
137
+ authTokenSet = true;
138
+ }
139
+ if (
140
+ !authTokenSet &&
141
+ process.env.DOCKER_USER &&
142
+ process.env.DOCKER_PASSWORD &&
143
+ process.env.DOCKER_EMAIL &&
144
+ forRegistry
145
+ ) {
146
+ const authPayload = {
147
+ username: process.env.DOCKER_USER,
148
+ email: process.env.DOCKER_EMAIL,
149
+ serveraddress: forRegistry
150
+ };
151
+ if (process.env.DOCKER_USER === "<token>") {
152
+ authPayload.IdentityToken = process.env.DOCKER_PASSWORD;
153
+ } else {
154
+ authPayload.password = process.env.DOCKER_PASSWORD;
155
+ }
156
+ opts.headers = {
157
+ "X-Registry-Auth": Buffer.from(JSON.stringify(authPayload)).toString(
158
+ "base64"
159
+ )
160
+ };
161
+ }
162
+ if (!authTokenSet && existsSync(join(DOCKER_CONFIG, "config.json"))) {
163
+ const configData = readFileSync(
164
+ join(DOCKER_CONFIG, "config.json"),
165
+ "utf-8"
166
+ );
167
+ if (configData) {
168
+ try {
169
+ const configJson = JSON.parse(configData);
170
+ if (configJson.auths) {
171
+ // Check if there are hardcoded tokens
172
+ for (const serverAddress of Object.keys(configJson.auths)) {
173
+ if (forRegistry && !serverAddress.includes(forRegistry)) {
174
+ continue;
175
+ }
176
+ if (configJson.auths[serverAddress].auth) {
177
+ opts.headers = {
178
+ "X-Registry-Auth": configJson.auths[serverAddress].auth
179
+ };
180
+ authTokenSet = true;
181
+ break;
182
+ } else if (configJson.credsStore) {
183
+ const helperAuthToken = getCredsFromHelper(
184
+ configJson.credsStore,
185
+ serverAddress
186
+ );
187
+ if (helperAuthToken) {
188
+ opts.headers = {
189
+ "X-Registry-Auth": helperAuthToken
190
+ };
191
+ authTokenSet = true;
192
+ break;
193
+ }
194
+ }
195
+ }
196
+ } else if (configJson.credHelpers) {
197
+ // Support for credential helpers
198
+ for (const serverAddress of Object.keys(configJson.credHelpers)) {
199
+ if (forRegistry && !serverAddress.includes(forRegistry)) {
200
+ continue;
201
+ }
202
+ if (configJson.credHelpers[serverAddress]) {
203
+ const helperAuthToken = getCredsFromHelper(
204
+ configJson.credHelpers[serverAddress],
205
+ serverAddress
206
+ );
207
+ if (helperAuthToken) {
208
+ opts.headers = {
209
+ "X-Registry-Auth": helperAuthToken
210
+ };
211
+ authTokenSet = true;
212
+ break;
213
+ }
214
+ }
215
+ }
216
+ }
217
+ } catch (err) {
218
+ // pass
219
+ }
220
+ }
221
+ }
105
222
  const userInfo = _userInfo();
106
223
  opts.podmanPrefixUrl = isWin ? "" : `http://unix:/run/podman/podman.sock:`;
107
224
  opts.podmanRootlessPrefixUrl = isWin
@@ -126,8 +243,8 @@ const getDefaultOptions = () => {
126
243
  opts.prefixUrl = isWin
127
244
  ? WIN_LOCAL_TLS
128
245
  : isDockerRootless
129
- ? `http://unix:${homedir()}/.docker/run/docker.sock:`
130
- : "http://unix:/var/run/docker.sock:";
246
+ ? `http://unix:${homedir()}/.docker/run/docker.sock:`
247
+ : "http://unix:/var/run/docker.sock:";
131
248
  }
132
249
  }
133
250
  } else {
@@ -148,22 +265,34 @@ const getDefaultOptions = () => {
148
265
  ),
149
266
  key: readFileSync(join(process.env.DOCKER_CERT_PATH, "key.pem"), "utf8")
150
267
  };
268
+ // Disable tls on empty values
269
+ // From the docker docs: Setting the DOCKER_TLS_VERIFY environment variable to any value other than the empty string is equivalent to setting the --tlsverify flag
270
+ if (
271
+ process.env.DOCKER_TLS_VERIFY &&
272
+ process.env.DOCKER_TLS_VERIFY === ""
273
+ ) {
274
+ opts.https.rejectUnauthorized = false;
275
+ console.log("TLS Verification disabled for", hostStr);
276
+ }
151
277
  }
152
278
  }
153
279
 
154
280
  return opts;
155
281
  };
156
282
 
157
- export const getConnection = async (options) => {
158
- if (!dockerConn) {
159
- const defaultOptions = getDefaultOptions();
283
+ export const getConnection = async (options, forRegistry) => {
284
+ if (isContainerd) {
285
+ return undefined;
286
+ } else if (!dockerConn) {
287
+ const defaultOptions = getDefaultOptions(forRegistry);
160
288
  const opts = Object.assign(
161
289
  {},
162
290
  {
163
291
  enableUnixSockets: defaultOptions.enableUnixSockets,
164
292
  throwHttpErrors: defaultOptions.throwHttpErrors,
165
293
  method: defaultOptions.method,
166
- prefixUrl: defaultOptions.prefixUrl
294
+ prefixUrl: defaultOptions.prefixUrl,
295
+ headers: defaultOptions.headers
167
296
  },
168
297
  options
169
298
  );
@@ -247,8 +376,8 @@ export const getConnection = async (options) => {
247
376
  return dockerConn;
248
377
  };
249
378
 
250
- export const makeRequest = async (path, method = "GET") => {
251
- const client = await getConnection();
379
+ export const makeRequest = async (path, method = "GET", forRegistry) => {
380
+ const client = await getConnection({}, forRegistry);
252
381
  if (!client) {
253
382
  return undefined;
254
383
  }
@@ -258,14 +387,15 @@ export const makeRequest = async (path, method = "GET") => {
258
387
  enableUnixSockets: true,
259
388
  method
260
389
  };
261
- const defaultOptions = getDefaultOptions();
390
+ const defaultOptions = getDefaultOptions(forRegistry);
262
391
  const opts = Object.assign(
263
392
  {},
264
393
  {
265
394
  enableUnixSockets: defaultOptions.enableUnixSockets,
266
395
  throwHttpErrors: defaultOptions.throwHttpErrors,
267
396
  method: defaultOptions.method,
268
- prefixUrl: defaultOptions.prefixUrl
397
+ prefixUrl: defaultOptions.prefixUrl,
398
+ headers: defaultOptions.headers
269
399
  },
270
400
  extraOptions
271
401
  );
@@ -331,20 +461,44 @@ export const parseImageName = (fullImageName) => {
331
461
  return nameObj;
332
462
  };
333
463
 
464
+ /**
465
+ * Prefer cli on windows or when using tcp/ssh based host.
466
+ *
467
+ * @returns boolean true if we should use the cli. false otherwise
468
+ */
469
+ const needsCliFallback = () => {
470
+ return (
471
+ isWin ||
472
+ (process.env.DOCKER_HOST &&
473
+ (process.env.DOCKER_HOST.startsWith("tcp://") ||
474
+ process.env.DOCKER_HOST.startsWith("ssh://")))
475
+ );
476
+ };
477
+
334
478
  /**
335
479
  * Method to get image to the local registry by pulling from the remote if required
336
480
  */
337
481
  export const getImage = async (fullImageName) => {
338
482
  let localData = undefined;
339
483
  let pullData = undefined;
340
- const { repo, tag, digest } = parseImageName(fullImageName);
341
- let repoWithTag = `${repo}:${tag !== "" ? tag : ":latest"}`;
484
+ const { registry, repo, tag, digest } = parseImageName(fullImageName);
485
+ let repoWithTag =
486
+ registry && registry !== "docker.io"
487
+ ? fullImageName
488
+ : `${repo}:${tag !== "" ? tag : ":latest"}`;
342
489
  // Fetch only the latest tag if none is specified
343
490
  if (tag === "" && digest === "") {
344
491
  fullImageName = fullImageName + ":latest";
345
492
  }
346
- if (isWin) {
347
- let result = spawnSync("docker", ["pull", fullImageName], {
493
+ if (isContainerd) {
494
+ console.log(
495
+ "containerd/nerdctl is currently unsupported. Export the image manually and run cdxgen against the tar image."
496
+ );
497
+ return undefined;
498
+ }
499
+ if (needsCliFallback()) {
500
+ const dockerCmd = process.env.DOCKER_CMD || "docker";
501
+ let result = spawnSync(dockerCmd, ["pull", fullImageName], {
348
502
  encoding: "utf-8"
349
503
  });
350
504
  if (result.status !== 0 || result.error) {
@@ -355,12 +509,16 @@ export const getImage = async (fullImageName) => {
355
509
  console.log(
356
510
  "Ensure Docker for Desktop is running as an administrator with 'Exposing daemon on TCP without TLS' setting turned on."
357
511
  );
512
+ } else if (result.stderr && result.stderr.includes("not found")) {
513
+ console.log(
514
+ "Set the environment variable DOCKER_CMD to use an alternative command such as nerdctl or podman."
515
+ );
358
516
  } else {
359
517
  console.log(result.stderr);
360
518
  }
361
519
  return localData;
362
520
  } else {
363
- result = spawnSync("docker", ["inspect", fullImageName], {
521
+ result = spawnSync(dockerCmd, ["inspect", fullImageName], {
364
522
  encoding: "utf-8"
365
523
  });
366
524
  if (result.status !== 0 || result.error) {
@@ -385,7 +543,11 @@ export const getImage = async (fullImageName) => {
385
543
  }
386
544
  }
387
545
  try {
388
- localData = await makeRequest(`images/${repoWithTag}/json`);
546
+ localData = await makeRequest(
547
+ `images/${repoWithTag}/json`,
548
+ "GET",
549
+ registry
550
+ );
389
551
  if (localData) {
390
552
  return localData;
391
553
  }
@@ -393,10 +555,14 @@ export const getImage = async (fullImageName) => {
393
555
  // ignore
394
556
  }
395
557
  try {
396
- localData = await makeRequest(`images/${repo}/json`);
558
+ localData = await makeRequest(`images/${repo}/json`, "GET", registry);
397
559
  } catch (err) {
398
560
  try {
399
- localData = await makeRequest(`images/${fullImageName}/json`);
561
+ localData = await makeRequest(
562
+ `images/${fullImageName}/json`,
563
+ "GET",
564
+ registry
565
+ );
400
566
  if (localData) {
401
567
  return localData;
402
568
  }
@@ -412,7 +578,8 @@ export const getImage = async (fullImageName) => {
412
578
  try {
413
579
  pullData = await makeRequest(
414
580
  `images/create?fromImage=${fullImageName}`,
415
- "POST"
581
+ "POST",
582
+ registry
416
583
  );
417
584
  if (
418
585
  pullData &&
@@ -434,7 +601,8 @@ export const getImage = async (fullImageName) => {
434
601
  }
435
602
  pullData = await makeRequest(
436
603
  `images/create?fromImage=${repoWithTag}`,
437
- "POST"
604
+ "POST",
605
+ registry
438
606
  );
439
607
  } catch (err) {
440
608
  // continue regardless of error
@@ -444,7 +612,11 @@ export const getImage = async (fullImageName) => {
444
612
  if (DEBUG_MODE) {
445
613
  console.log(`Trying with ${repoWithTag}`);
446
614
  }
447
- localData = await makeRequest(`images/${repoWithTag}/json`);
615
+ localData = await makeRequest(
616
+ `images/${repoWithTag}/json`,
617
+ "GET",
618
+ registry
619
+ );
448
620
  if (localData) {
449
621
  return localData;
450
622
  }
@@ -453,7 +625,7 @@ export const getImage = async (fullImageName) => {
453
625
  if (DEBUG_MODE) {
454
626
  console.log(`Trying with ${repo}`);
455
627
  }
456
- localData = await makeRequest(`images/${repo}/json`);
628
+ localData = await makeRequest(`images/${repo}/json`, "GET", registry);
457
629
  if (localData) {
458
630
  return localData;
459
631
  }
@@ -464,7 +636,11 @@ export const getImage = async (fullImageName) => {
464
636
  if (DEBUG_MODE) {
465
637
  console.log(`Trying with ${fullImageName}`);
466
638
  }
467
- localData = await makeRequest(`images/${fullImageName}/json`);
639
+ localData = await makeRequest(
640
+ `images/${fullImageName}/json`,
641
+ "GET",
642
+ registry
643
+ );
468
644
  } catch (err) {
469
645
  // continue regardless of error
470
646
  }
@@ -684,7 +860,7 @@ export const exportImage = async (fullImageName) => {
684
860
  if (!localData) {
685
861
  return undefined;
686
862
  }
687
- const { tag, digest } = parseImageName(fullImageName);
863
+ const { registry, tag, digest } = parseImageName(fullImageName);
688
864
  // Fetch only the latest tag if none is specified
689
865
  if (tag === "" && digest === "") {
690
866
  fullImageName = fullImageName + ":latest";
@@ -695,7 +871,7 @@ export const exportImage = async (fullImageName) => {
695
871
  // Windows containers use index.json
696
872
  const manifestIndexFile = join(tempDir, "index.json");
697
873
  // On Windows, fallback to invoking cli
698
- if (isWin) {
874
+ if (needsCliFallback()) {
699
875
  const imageTarFile = join(tempDir, "image.tar");
700
876
  console.log(
701
877
  `About to export image ${fullImageName} to ${imageTarFile} using docker cli`
@@ -722,10 +898,16 @@ export const exportImage = async (fullImageName) => {
722
898
  }
723
899
  }
724
900
  } else {
725
- const client = await getConnection();
901
+ const client = await getConnection({}, registry);
726
902
  try {
727
903
  if (DEBUG_MODE) {
728
- console.log(`About to export image ${fullImageName} to ${tempDir}`);
904
+ if (registry && registry.trim().length) {
905
+ console.log(
906
+ `About to export image ${fullImageName} from ${registry} to ${tempDir}`
907
+ );
908
+ } else {
909
+ console.log(`About to export image ${fullImageName} to ${tempDir}`);
910
+ }
729
911
  }
730
912
  await stream.pipeline(
731
913
  client.stream(`images/${fullImageName}/get`),
@@ -896,3 +1078,46 @@ export const removeImage = async (fullImageName, force = false) => {
896
1078
  );
897
1079
  return removeData;
898
1080
  };
1081
+
1082
+ export const getCredsFromHelper = (exeSuffix, serverAddress) => {
1083
+ if (registry_auth_keys[serverAddress]) {
1084
+ return registry_auth_keys[serverAddress];
1085
+ }
1086
+ let credHelperExe = `docker-credential-${exeSuffix}`;
1087
+ if (isWin) {
1088
+ credHelperExe = credHelperExe + ".exe";
1089
+ }
1090
+ const result = spawnSync(credHelperExe, ["get"], {
1091
+ input: serverAddress,
1092
+ encoding: "utf-8"
1093
+ });
1094
+ if (result.status !== 0 || result.error) {
1095
+ console.log(result.stdout, result.stderr);
1096
+ } else if (result.stdout) {
1097
+ const cmdOutput = Buffer.from(result.stdout).toString();
1098
+ try {
1099
+ const authPayload = JSON.parse(cmdOutput);
1100
+ const fixedAuthPayload = {
1101
+ username:
1102
+ authPayload.username ||
1103
+ authPayload.Username ||
1104
+ process.env.DOCKER_USER,
1105
+ password:
1106
+ authPayload.password ||
1107
+ authPayload.Secret ||
1108
+ process.env.DOCKER_PASSWORD,
1109
+ email:
1110
+ authPayload.email || authPayload.username || process.env.DOCKER_USER,
1111
+ serveraddress: serverAddress
1112
+ };
1113
+ const authKey = Buffer.from(JSON.stringify(fixedAuthPayload)).toString(
1114
+ "base64"
1115
+ );
1116
+ registry_auth_keys[serverAddress] = authKey;
1117
+ return authKey;
1118
+ } catch (err) {
1119
+ return undefined;
1120
+ }
1121
+ }
1122
+ return undefined;
1123
+ };
package/docker.test.js CHANGED
@@ -54,6 +54,15 @@ test("parseImageName tests", () => {
54
54
  digest: "",
55
55
  platform: ""
56
56
  });
57
+ expect(
58
+ parseImageName("foocorp.jfrog.io/docker/library/eclipse-temurin:latest")
59
+ ).toEqual({
60
+ registry: "foocorp.jfrog.io",
61
+ repo: "docker/library/eclipse-temurin",
62
+ tag: "latest",
63
+ digest: "",
64
+ platform: ""
65
+ });
57
66
  expect(
58
67
  parseImageName(
59
68
  "quay.io/shiftleft/scan-java@sha256:5d008306a7c5d09ba0161a3408fa3839dc2c9dd991ffb68adecc1040399fe9e1"
package/evinser.js CHANGED
@@ -498,14 +498,16 @@ export const parseSliceUsages = async (
498
498
  !isFilterableType(language, userDefinedTypesMap, atype[1])
499
499
  ) {
500
500
  if (!atype[1].includes("(") && !atype[1].includes(".py")) {
501
- typesToLookup.add(atype[1]);
501
+ typesToLookup.add(simplifyType(atype[1]));
502
502
  // Javascript calls can be resolved to a precise line number only from the call nodes
503
503
  if (
504
504
  ["javascript", "js", "ts", "typescript"].includes(language) &&
505
505
  ausageLine
506
506
  ) {
507
507
  if (atype[1].includes(":")) {
508
- typesToLookup.add(atype[1].split("::")[0].replace(/:/g, "/"));
508
+ typesToLookup.add(
509
+ simplifyType(atype[1].split("::")[0].replace(/:/g, "/"))
510
+ );
509
511
  }
510
512
  addToOverrides(lKeyOverrides, atype[1], fileName, ausageLine);
511
513
  }
@@ -532,7 +534,7 @@ export const parseSliceUsages = async (
532
534
  !acall?.resolvedMethod.includes("(") &&
533
535
  !acall?.resolvedMethod.includes(".py")
534
536
  ) {
535
- typesToLookup.add(acall?.resolvedMethod);
537
+ typesToLookup.add(simplifyType(acall?.resolvedMethod));
536
538
  // Javascript calls can be resolved to a precise line number only from the call nodes
537
539
  if (acall.lineNumber) {
538
540
  addToOverrides(
@@ -560,10 +562,12 @@ export const parseSliceUsages = async (
560
562
  for (const aparamType of acall?.paramTypes || []) {
561
563
  if (!isFilterableType(language, userDefinedTypesMap, aparamType)) {
562
564
  if (!aparamType.includes("(") && !aparamType.includes(".py")) {
563
- typesToLookup.add(aparamType);
565
+ typesToLookup.add(simplifyType(aparamType));
564
566
  if (acall.lineNumber) {
565
567
  if (aparamType.includes(":")) {
566
- typesToLookup.add(aparamType.split("::")[0].replace(/:/g, "/"));
568
+ typesToLookup.add(
569
+ simplifyType(aparamType.split("::")[0].replace(/:/g, "/"))
570
+ );
567
571
  }
568
572
  addToOverrides(
569
573
  lKeyOverrides,
@@ -609,7 +613,7 @@ export const parseSliceUsages = async (
609
613
  } else {
610
614
  // Check the namespaces db
611
615
  let nsHits = typePurlsCache[atype];
612
- if (["java", "jar"].includes(language)) {
616
+ if (!nsHits && ["java", "jar"].includes(language)) {
613
617
  nsHits = await dbObjMap.Namespaces.findAll({
614
618
  attributes: ["purl"],
615
619
  where: {
@@ -629,6 +633,9 @@ export const parseSliceUsages = async (
629
633
  }
630
634
  }
631
635
  typePurlsCache[atype] = nsHits;
636
+ } else {
637
+ // Avoid persistent lookups
638
+ typePurlsCache[atype] = [];
632
639
  }
633
640
  }
634
641
  }
@@ -834,7 +841,7 @@ export const extractEndpoints = (language, code) => {
834
841
  case "jar":
835
842
  if (
836
843
  code.startsWith("@") &&
837
- code.includes("Mapping") &&
844
+ (code.includes("Mapping") || code.includes("Path")) &&
838
845
  code.includes("(")
839
846
  ) {
840
847
  const matches = code.match(/['"](.*?)['"]/gi) || [];
@@ -1190,6 +1197,16 @@ export const framePicker = (dfFrames) => {
1190
1197
  return aframe;
1191
1198
  };
1192
1199
 
1200
+ /**
1201
+ * Method to simplify types. For example, arrays ending with [] could be simplified.
1202
+ *
1203
+ * @param {string} typeFullName Full name of the type to simplify
1204
+ * @returns Simplified type string
1205
+ */
1206
+ export const simplifyType = (typeFullName) => {
1207
+ return typeFullName.replace("[]", "");
1208
+ };
1209
+
1193
1210
  export const getClassTypeFromSignature = (language, typeFullName) => {
1194
1211
  if (["java", "jar"].includes(language) && typeFullName.includes(":")) {
1195
1212
  typeFullName = typeFullName.split(":")[0];
@@ -1225,7 +1242,7 @@ export const getClassTypeFromSignature = (language, typeFullName) => {
1225
1242
  if (typeFullName.includes("$")) {
1226
1243
  typeFullName = typeFullName.split("$")[0];
1227
1244
  }
1228
- return typeFullName;
1245
+ return simplifyType(typeFullName);
1229
1246
  };
1230
1247
 
1231
1248
  const addToOverrides = (lKeyOverrides, atype, fileName, ausageLineNumber) => {
package/index.js CHANGED
@@ -104,7 +104,8 @@ import {
104
104
  TIMEOUT_MS,
105
105
  MAX_BUFFER,
106
106
  getNugetMetadata,
107
- frameworksList
107
+ frameworksList,
108
+ parseContainerFile
108
109
  } from "./utils.js";
109
110
  import { spawnSync } from "node:child_process";
110
111
  import { fileURLToPath } from "node:url";
@@ -1859,7 +1860,7 @@ export const createNodejsBom = async (path, options) => {
1859
1860
  const parentSubComponents = [];
1860
1861
  let ppurl = "";
1861
1862
  // Docker mode requires special handling
1862
- if (["docker", "oci", "os"].includes(options.projectType)) {
1863
+ if (["docker", "oci", "container", "os"].includes(options.projectType)) {
1863
1864
  const pkgJsonFiles = getAllFiles(path, "**/package.json", options);
1864
1865
  // Are there any package.json files in the container?
1865
1866
  if (pkgJsonFiles.length) {
@@ -1879,7 +1880,7 @@ export const createNodejsBom = async (path, options) => {
1879
1880
  }
1880
1881
  let allImports = {};
1881
1882
  if (
1882
- !["docker", "oci", "os"].includes(options.projectType) &&
1883
+ !["docker", "oci", "container", "os"].includes(options.projectType) &&
1883
1884
  !options.noBabel
1884
1885
  ) {
1885
1886
  if (DEBUG_MODE) {
@@ -2752,7 +2753,7 @@ export const createGoBom = async (path, options) => {
2752
2753
  if (gomodFiles.length) {
2753
2754
  let shouldManuallyParse = false;
2754
2755
  // Use the go list -deps and go mod why commands to generate a good quality BOM for non-docker invocations
2755
- if (!["docker", "oci", "os"].includes(options.projectType)) {
2756
+ if (!["docker", "oci", "container", "os"].includes(options.projectType)) {
2756
2757
  for (const f of gomodFiles) {
2757
2758
  const basePath = dirname(f);
2758
2759
  // Ignore vendor packages
@@ -2864,7 +2865,7 @@ export const createGoBom = async (path, options) => {
2864
2865
  }
2865
2866
  }
2866
2867
  // Parse the gomod files manually. The resultant BOM would be incomplete
2867
- if (!["docker", "oci", "os"].includes(options.projectType)) {
2868
+ if (!["docker", "oci", "container", "os"].includes(options.projectType)) {
2868
2869
  console.log(
2869
2870
  "Manually parsing go.mod files. The resultant BOM would be incomplete."
2870
2871
  );
@@ -3153,7 +3154,7 @@ export const createCppBom = (path, options) => {
3153
3154
  // inside of other project types. So we currently limit this analyis only when -t argument
3154
3155
  // is used.
3155
3156
  if (
3156
- !["docker", "oci", "os"].includes(options.projectType) &&
3157
+ !["docker", "oci", "container", "os"].includes(options.projectType) &&
3157
3158
  (!options.createMultiXBom || options.deep)
3158
3159
  ) {
3159
3160
  let osPkgsList = [];
@@ -3713,6 +3714,16 @@ export const createContainerSpecLikeBom = async (path, options) => {
3713
3714
  (options.multiProject ? "**/" : "") + "*.yml",
3714
3715
  options
3715
3716
  );
3717
+ const dfFiles = getAllFiles(
3718
+ path,
3719
+ (options.multiProject ? "**/" : "") + "*Dockerfile*",
3720
+ options
3721
+ );
3722
+ const cfFiles = getAllFiles(
3723
+ path,
3724
+ (options.multiProject ? "**/" : "") + "*Containerfile*",
3725
+ options
3726
+ );
3716
3727
  const yamlFiles = getAllFiles(
3717
3728
  path,
3718
3729
  (options.multiProject ? "**/" : "") + "*.yaml",
@@ -3736,14 +3747,22 @@ export const createContainerSpecLikeBom = async (path, options) => {
3736
3747
  }
3737
3748
  // Privado.ai json files
3738
3749
  const privadoFiles = getAllFiles(path, ".privado/" + "*.json", options);
3739
- // parse yaml manifest files
3740
- if (dcFiles.length) {
3741
- for (const f of dcFiles) {
3750
+ // Parse yaml manifest files, dockerfiles or containerfiles
3751
+ if (dcFiles.length || dfFiles.length || cfFiles.length) {
3752
+ for (const f of [...dcFiles, ...dfFiles, ...cfFiles]) {
3742
3753
  if (DEBUG_MODE) {
3743
3754
  console.log(`Parsing ${f}`);
3744
3755
  }
3745
- const dcData = readFileSync(f, { encoding: "utf-8" });
3746
- const imglist = parseContainerSpecData(dcData);
3756
+
3757
+ const dData = readFileSync(f, { encoding: "utf-8" });
3758
+ let imglist = [];
3759
+ // parse yaml manifest files
3760
+ if (f.endsWith(".yml") || f.endsWith(".yaml")) {
3761
+ imglist = parseContainerSpecData(dData);
3762
+ } else {
3763
+ imglist = parseContainerFile(dData);
3764
+ }
3765
+
3747
3766
  if (imglist && imglist.length) {
3748
3767
  if (DEBUG_MODE) {
3749
3768
  console.log("Images identified in", f, "are", imglist);
@@ -5186,12 +5205,22 @@ export const createXBom = async (path, options) => {
5186
5205
  return createHelmBom(path, options);
5187
5206
  }
5188
5207
 
5189
- // Docker compose, kubernetes and skaffold
5208
+ // Docker compose, dockerfile, containerfile, kubernetes and skaffold
5190
5209
  const dcFiles = getAllFiles(
5191
5210
  path,
5192
5211
  (options.multiProject ? "**/" : "") + "docker-compose*.yml",
5193
5212
  options
5194
5213
  );
5214
+ const dfFiles = getAllFiles(
5215
+ path,
5216
+ (options.multiProject ? "**/" : "") + "*Dockerfile*",
5217
+ options
5218
+ );
5219
+ const cfFiles = getAllFiles(
5220
+ path,
5221
+ (options.multiProject ? "**/" : "") + "*Containerfile*",
5222
+ options
5223
+ );
5195
5224
  const skFiles = getAllFiles(
5196
5225
  path,
5197
5226
  (options.multiProject ? "**/" : "") + "skaffold.yaml",
@@ -5202,7 +5231,13 @@ export const createXBom = async (path, options) => {
5202
5231
  (options.multiProject ? "**/" : "") + "deployment.yaml",
5203
5232
  options
5204
5233
  );
5205
- if (dcFiles.length || skFiles.length || deplFiles.length) {
5234
+ if (
5235
+ dcFiles.length ||
5236
+ dfFiles.length ||
5237
+ cfFiles.length ||
5238
+ skFiles.length ||
5239
+ deplFiles.length
5240
+ ) {
5206
5241
  return await createContainerSpecLikeBom(path, options);
5207
5242
  }
5208
5243
 
@@ -5261,6 +5296,7 @@ export const createBom = async (path, options) => {
5261
5296
  projectType === "docker" ||
5262
5297
  projectType === "podman" ||
5263
5298
  projectType === "oci" ||
5299
+ projectType === "container" ||
5264
5300
  path.startsWith("docker.io") ||
5265
5301
  path.startsWith("quay.io") ||
5266
5302
  path.startsWith("ghcr.io") ||
@@ -5468,7 +5504,9 @@ export const createBom = async (path, options) => {
5468
5504
  options
5469
5505
  );
5470
5506
  case "universal":
5507
+ case "containerfile":
5471
5508
  case "docker-compose":
5509
+ case "dockerfile":
5472
5510
  case "swarm":
5473
5511
  case "tekton":
5474
5512
  case "kustomize":
@@ -5507,19 +5545,36 @@ export async function submitBom(args, bomContents) {
5507
5545
  if (encodedBomContents.startsWith("77u/")) {
5508
5546
  encodedBomContents = encodedBomContents.substring(4);
5509
5547
  }
5510
- let projectVersion = args.projectVersion || "master";
5511
- if (projectVersion == true) {
5512
- projectVersion = "master";
5513
- }
5514
5548
  const bomPayload = {
5515
- project: args.projectId,
5516
- projectName: args.projectName,
5517
- projectVersion: projectVersion,
5518
5549
  autoCreate: "true",
5519
5550
  bom: encodedBomContents
5520
5551
  };
5521
- if (typeof args.parentProjectId !== "undefined") {
5522
- bomPayload.parentUUID = args.parentProjectId;
5552
+ let projectVersion = args.projectVersion || "master";
5553
+ if (
5554
+ typeof args.projectId !== "undefined" ||
5555
+ (typeof args.projectName !== "undefined" &&
5556
+ typeof projectVersion !== "undefined")
5557
+ ) {
5558
+ if (typeof args.projectId !== "undefined") {
5559
+ bomPayload.project = args.projectId;
5560
+ }
5561
+ if (typeof args.projectName !== "undefined") {
5562
+ bomPayload.projectName = args.projectName;
5563
+ }
5564
+ if (typeof projectVersion !== "undefined") {
5565
+ bomPayload.projectVersion = projectVersion;
5566
+ }
5567
+ } else {
5568
+ console.log(
5569
+ "projectId, projectName and projectVersion, or all three must be provided."
5570
+ );
5571
+ return;
5572
+ }
5573
+ if (
5574
+ typeof args.parentProjectId !== "undefined" ||
5575
+ typeof args.parentUUID !== "undefined"
5576
+ ) {
5577
+ bomPayload.parentUUID = args.parentProjectId || args.parentUUID;
5523
5578
  }
5524
5579
  if (DEBUG_MODE) {
5525
5580
  console.log(
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@cyclonedx/cdxgen",
3
- "version": "9.9.3",
3
+ "version": "9.9.5",
4
4
  "description": "Creates CycloneDX Software Bill of Materials (SBOM) from source or container image",
5
5
  "homepage": "http://github.com/cyclonedx/cdxgen",
6
6
  "author": "Prabhu Subramanian <prabhu@appthreat.com>",
@@ -55,8 +55,8 @@
55
55
  "url": "https://github.com/cyclonedx/cdxgen/issues"
56
56
  },
57
57
  "dependencies": {
58
- "@babel/parser": "^7.23.0",
59
- "@babel/traverse": "^7.23.2",
58
+ "@babel/parser": "^7.23.3",
59
+ "@babel/traverse": "^7.23.3",
60
60
  "@npmcli/arborist": "7.2.0",
61
61
  "ajv": "^8.12.0",
62
62
  "ajv-formats": "^2.1.1",
@@ -83,7 +83,7 @@
83
83
  "yargs": "^17.7.2"
84
84
  },
85
85
  "optionalDependencies": {
86
- "@appthreat/atom": "1.5.6",
86
+ "@appthreat/atom": "1.6.4",
87
87
  "@cyclonedx/cdxgen-plugins-bin": "^1.4.0",
88
88
  "@cyclonedx/cdxgen-plugins-bin-arm64": "^1.4.0",
89
89
  "@cyclonedx/cdxgen-plugins-bin-ppc64": "^1.4.0",
@@ -91,7 +91,7 @@
91
91
  "compression": "^1.7.4",
92
92
  "connect": "^3.7.0",
93
93
  "jsonata": "^2.0.3",
94
- "sequelize": "^6.33.0",
94
+ "sequelize": "^6.35.0",
95
95
  "sqlite3": "^5.1.6"
96
96
  },
97
97
  "files": [
@@ -102,10 +102,10 @@
102
102
  "devDependencies": {
103
103
  "caxa": "^3.0.1",
104
104
  "docsify-cli": "^4.4.4",
105
- "eslint": "^8.52.0",
105
+ "eslint": "^8.53.0",
106
106
  "eslint-config-prettier": "^9.0.0",
107
107
  "eslint-plugin-prettier": "^5.0.1",
108
108
  "jest": "^29.7.0",
109
- "prettier": "3.0.3"
109
+ "prettier": "3.1.0"
110
110
  }
111
111
  }
package/server.js CHANGED
@@ -72,7 +72,7 @@ const parseQueryString = (q, body, options = {}) => {
72
72
  "requiredOnly",
73
73
  "noBabel",
74
74
  "installDeps",
75
- "project",
75
+ "projectId",
76
76
  "projectName",
77
77
  "projectGroup",
78
78
  "projectVersion",
package/utils.js CHANGED
@@ -19,7 +19,8 @@ import {
19
19
  readFileSync,
20
20
  rmSync,
21
21
  unlinkSync,
22
- writeFileSync
22
+ writeFileSync,
23
+ readdirSync
23
24
  } from "node:fs";
24
25
  import got from "got";
25
26
  import Arborist from "@npmcli/arborist";
@@ -4388,6 +4389,43 @@ export const recurseImageNameLookup = (keyValueObj, pkgList, imgList) => {
4388
4389
  return imgList;
4389
4390
  };
4390
4391
 
4392
+ export const parseContainerFile = function (fileContents) {
4393
+ const imgList = [];
4394
+
4395
+ let buildStageNames = [];
4396
+ for (const line of fileContents.split("\n")) {
4397
+ if (line.trim().startsWith("#")) {
4398
+ continue; // skip commented out lines
4399
+ }
4400
+
4401
+ if (line.includes("FROM")) {
4402
+ const fromStatement = line.split("FROM")[1].split("AS");
4403
+
4404
+ const imageStatement = fromStatement[0].trim();
4405
+ const buildStageName = fromStatement[1]?.trim();
4406
+
4407
+ if (buildStageNames.includes(imageStatement)) {
4408
+ if (DEBUG_MODE) {
4409
+ console.log(
4410
+ `Skipping image ${imageStatement} which uses previously seen build stage name.`
4411
+ );
4412
+ }
4413
+ continue;
4414
+ }
4415
+
4416
+ imgList.push({
4417
+ image: imageStatement
4418
+ });
4419
+
4420
+ if (buildStageName) {
4421
+ buildStageNames.push(buildStageName);
4422
+ }
4423
+ }
4424
+ }
4425
+
4426
+ return imgList;
4427
+ };
4428
+
4391
4429
  export const parseContainerSpecData = function (dcData) {
4392
4430
  const pkgList = [];
4393
4431
  const imgList = [];
@@ -6477,12 +6515,67 @@ export const parseJarManifest = function (jarMetadata) {
6477
6515
  return metadata;
6478
6516
  };
6479
6517
 
6518
+ export const parsePomProperties = function (pomProperties) {
6519
+ const properties = {};
6520
+ if (!pomProperties) {
6521
+ return properties;
6522
+ }
6523
+ pomProperties.split("\n").forEach((l) => {
6524
+ l = l.replace("\r", "");
6525
+ if (l.includes("=")) {
6526
+ const tmpA = l.split("=");
6527
+ if (tmpA && tmpA.length === 2) {
6528
+ properties[tmpA[0]] = tmpA[1].replace("\r", "");
6529
+ }
6530
+ }
6531
+ });
6532
+ return properties;
6533
+ };
6534
+
6480
6535
  export const encodeForPurl = (s) => {
6481
6536
  return s && !s.includes("%40")
6482
6537
  ? encodeURIComponent(s).replace(/%3A/g, ":").replace(/%2F/g, "/")
6483
6538
  : s;
6484
6539
  };
6485
6540
 
6541
+ /**
6542
+ * Method to get pom properties from maven directory
6543
+ *
6544
+ * @param {string} mavenDir Path to maven directory
6545
+ *
6546
+ * @return array with pom properties
6547
+ */
6548
+ export const getPomPropertiesFromMavenDir = function (mavenDir) {
6549
+ let pomProperties = {};
6550
+ if (existsSync(mavenDir) && lstatSync(mavenDir).isDirectory()) {
6551
+ let mavenDirEntries = readdirSync(mavenDir, { withFileTypes: true });
6552
+ mavenDirEntries.forEach((mavenDirEntry) => {
6553
+ if (mavenDirEntry.isDirectory()) {
6554
+ let groupDirEntries = readdirSync(
6555
+ join(mavenDirEntry.path, mavenDirEntry.name),
6556
+ { withFileTypes: true }
6557
+ );
6558
+ groupDirEntries.forEach((groupDirEntry) => {
6559
+ if (groupDirEntry.isDirectory()) {
6560
+ let pomPropertiesFile = join(
6561
+ groupDirEntry.path,
6562
+ groupDirEntry.name,
6563
+ "pom.properties"
6564
+ );
6565
+ if (existsSync(pomPropertiesFile)) {
6566
+ const pomPropertiesString = readFileSync(pomPropertiesFile, {
6567
+ encoding: "utf-8"
6568
+ });
6569
+ pomProperties = parsePomProperties(pomPropertiesString);
6570
+ }
6571
+ }
6572
+ });
6573
+ }
6574
+ });
6575
+ }
6576
+ return pomProperties;
6577
+ };
6578
+
6486
6579
  /**
6487
6580
  * Method to extract a war or ear file
6488
6581
  *
@@ -6564,13 +6657,14 @@ export const extractJarArchive = function (
6564
6657
  }
6565
6658
  const manifestDir = join(tempDir, "META-INF");
6566
6659
  const manifestFile = join(manifestDir, "MANIFEST.MF");
6660
+ const mavenDir = join(manifestDir, "maven");
6567
6661
  let jarResult = {
6568
6662
  status: 1
6569
6663
  };
6570
6664
  if (existsSync(pomname)) {
6571
6665
  jarResult = { status: 0 };
6572
6666
  } else {
6573
- jarResult = spawnSync("jar", ["-xf", jf], {
6667
+ jarResult = spawnSync("jar", ["-xf", jf, "META-INF"], {
6574
6668
  encoding: "utf-8",
6575
6669
  cwd: tempDir,
6576
6670
  shell: isWin,
@@ -6580,29 +6674,42 @@ export const extractJarArchive = function (
6580
6674
  if (jarResult.status !== 0) {
6581
6675
  console.error(jarResult.stdout, jarResult.stderr);
6582
6676
  } else {
6583
- if (existsSync(manifestFile)) {
6677
+ // When maven descriptor is available take group, name and version from pom.properties
6678
+ // META-INF/maven/${groupId}/${artifactId}/pom.properties
6679
+ // see https://maven.apache.org/shared/maven-archiver/index.html
6680
+ const pomProperties = getPomPropertiesFromMavenDir(mavenDir);
6681
+ let group = pomProperties["groupId"],
6682
+ name = pomProperties["artifactId"],
6683
+ version = pomProperties["version"],
6684
+ confidence = 1,
6685
+ technique = "manifest-analysis";
6686
+ if ((!group || !name || !version) && existsSync(manifestFile)) {
6687
+ confidence = 0.8;
6584
6688
  const jarMetadata = parseJarManifest(
6585
6689
  readFileSync(manifestFile, {
6586
6690
  encoding: "utf-8"
6587
6691
  })
6588
6692
  );
6589
- let group =
6693
+ group =
6694
+ group ||
6590
6695
  jarMetadata["Extension-Name"] ||
6591
6696
  jarMetadata["Implementation-Vendor-Id"] ||
6592
6697
  jarMetadata["Bundle-SymbolicName"] ||
6593
6698
  jarMetadata["Bundle-Vendor"] ||
6594
6699
  jarMetadata["Automatic-Module-Name"] ||
6595
6700
  "";
6596
- let version =
6701
+ version =
6702
+ version ||
6597
6703
  jarMetadata["Bundle-Version"] ||
6598
6704
  jarMetadata["Implementation-Version"] ||
6599
6705
  jarMetadata["Specification-Version"];
6600
6706
  if (version && version.includes(" ")) {
6601
6707
  version = version.split(" ")[0];
6602
6708
  }
6603
- let name = "";
6604
6709
  // Prefer jar filename to construct name and version
6605
6710
  if (!name || !version || name === "" || version === "") {
6711
+ confidence = 0.5;
6712
+ technique = "filename";
6606
6713
  const tmpA = jarname.split("-");
6607
6714
  if (tmpA && tmpA.length > 1) {
6608
6715
  const lastPart = tmpA[tmpA.length - 1];
@@ -6651,56 +6758,56 @@ export const extractJarArchive = function (
6651
6758
  break;
6652
6759
  }
6653
6760
  }
6654
- if (name && version) {
6655
- // if group is empty use name as group
6656
- group = encodeForPurl(group === "." ? name : group || name) || "";
6657
- let apkg = {
6761
+ // if group is empty use name as group
6762
+ group = group === "." ? name : group || name;
6763
+ }
6764
+ if (name && version) {
6765
+ let apkg = {
6766
+ group: group ? encodeForPurl(group) : "",
6767
+ name: name ? encodeForPurl(name) : "",
6768
+ version,
6769
+ purl: new PackageURL(
6770
+ "maven",
6658
6771
  group,
6659
- name: name ? encodeForPurl(name) : "",
6772
+ name,
6660
6773
  version,
6661
- purl: new PackageURL(
6662
- "maven",
6663
- group,
6664
- name,
6665
- version,
6666
- { type: "jar" },
6667
- null
6668
- ).toString(),
6669
- evidence: {
6670
- identity: {
6671
- field: "purl",
6672
- confidence: 0.5,
6673
- methods: [
6674
- {
6675
- technique: "filename",
6676
- confidence: 0.5,
6677
- value: jarname
6678
- }
6679
- ]
6680
- }
6681
- },
6682
- properties: [
6683
- {
6684
- name: "SrcFile",
6685
- value: jarname
6686
- }
6687
- ]
6688
- };
6689
- if (
6690
- jarNSMapping &&
6691
- jarNSMapping[apkg.purl] &&
6692
- jarNSMapping[apkg.purl].namespaces
6693
- ) {
6694
- apkg.properties.push({
6695
- name: "Namespaces",
6696
- value: jarNSMapping[apkg.purl].namespaces.join("\n")
6697
- });
6698
- }
6699
- pkgList.push(apkg);
6700
- } else {
6701
- if (DEBUG_MODE) {
6702
- console.log(`Ignored jar ${jarname}`, jarMetadata, name, version);
6703
- }
6774
+ { type: "jar" },
6775
+ null
6776
+ ).toString(),
6777
+ evidence: {
6778
+ identity: {
6779
+ field: "purl",
6780
+ confidence: confidence,
6781
+ methods: [
6782
+ {
6783
+ technique: technique,
6784
+ confidence: confidence,
6785
+ value: jarname
6786
+ }
6787
+ ]
6788
+ }
6789
+ },
6790
+ properties: [
6791
+ {
6792
+ name: "SrcFile",
6793
+ value: jarname
6794
+ }
6795
+ ]
6796
+ };
6797
+ if (
6798
+ jarNSMapping &&
6799
+ jarNSMapping[apkg.purl] &&
6800
+ jarNSMapping[apkg.purl].namespaces
6801
+ ) {
6802
+ apkg.properties.push({
6803
+ name: "Namespaces",
6804
+ value: jarNSMapping[apkg.purl].namespaces.join("\n")
6805
+ });
6806
+ }
6807
+ pkgList.push(apkg);
6808
+ } else {
6809
+ if (DEBUG_MODE) {
6810
+ console.log(`Ignored jar ${jarname}`, name, version);
6704
6811
  }
6705
6812
  }
6706
6813
  try {
@@ -6892,6 +6999,7 @@ export const getMavenCommand = (srcPath, rootPath) => {
6892
6999
  let isWrapperReady = false;
6893
7000
  let isWrapperFound = false;
6894
7001
  let findMavenFile = "mvnw";
7002
+ let mavenWrapperCmd = null;
6895
7003
  if (platform() == "win32") {
6896
7004
  findMavenFile = "mvnw.bat";
6897
7005
  if (
@@ -6910,7 +7018,7 @@ export const getMavenCommand = (srcPath, rootPath) => {
6910
7018
  } catch (e) {
6911
7019
  // continue regardless of error
6912
7020
  }
6913
- mavenCmd = resolve(join(srcPath, findMavenFile));
7021
+ mavenWrapperCmd = resolve(join(srcPath, findMavenFile));
6914
7022
  isWrapperFound = true;
6915
7023
  } else if (rootPath && existsSync(join(rootPath, findMavenFile))) {
6916
7024
  // Check if the root directory has a wrapper script
@@ -6919,7 +7027,7 @@ export const getMavenCommand = (srcPath, rootPath) => {
6919
7027
  } catch (e) {
6920
7028
  // continue regardless of error
6921
7029
  }
6922
- mavenCmd = resolve(join(rootPath, findMavenFile));
7030
+ mavenWrapperCmd = resolve(join(rootPath, findMavenFile));
6923
7031
  isWrapperFound = true;
6924
7032
  }
6925
7033
  if (isWrapperFound) {
@@ -6928,14 +7036,15 @@ export const getMavenCommand = (srcPath, rootPath) => {
6928
7036
  "Testing the wrapper script by invoking wrapper:wrapper task"
6929
7037
  );
6930
7038
  }
6931
- const result = spawnSync(mavenCmd, ["wrapper:wrapper"], {
7039
+ const result = spawnSync(mavenWrapperCmd, ["wrapper:wrapper"], {
6932
7040
  encoding: "utf-8",
6933
7041
  cwd: rootPath,
6934
7042
  timeout: TIMEOUT_MS,
6935
7043
  shell: isWin
6936
7044
  });
6937
- if (!result.error) {
7045
+ if (!result.error && !result.status) {
6938
7046
  isWrapperReady = true;
7047
+ mavenCmd = mavenWrapperCmd;
6939
7048
  } else {
6940
7049
  if (DEBUG_MODE) {
6941
7050
  console.log(
@@ -7690,7 +7799,7 @@ export const parseCmakeLikeFile = (cmakeListFile, pkgType, options = {}) => {
7690
7799
  .split(")")[0]
7691
7800
  .split(",")
7692
7801
  .filter((v) => v.length > 1);
7693
- const parentName = tmpB[0];
7802
+ const parentName = tmpB[0].replace(":", "");
7694
7803
  let parentVersion = undefined;
7695
7804
  // In case of meson.build we can find the version number after the word version
7696
7805
  // thanks to our replaces and splits
package/utils.test.js CHANGED
@@ -73,7 +73,8 @@ import {
73
73
  parsePyProjectToml,
74
74
  parseSbtTree,
75
75
  parseCmakeDotFile,
76
- parseCmakeLikeFile
76
+ parseCmakeLikeFile,
77
+ parseContainerFile
77
78
  } from "./utils.js";
78
79
  import { readFileSync } from "node:fs";
79
80
  import { parse } from "ssri";
@@ -2727,6 +2728,31 @@ test("parse container spec like files", async () => {
2727
2728
  });
2728
2729
  });
2729
2730
 
2731
+ test("parse containerfiles / dockerfiles", async () => {
2732
+ let dep_list = parseContainerFile(
2733
+ readFileSync("./test/data/Dockerfile", { encoding: "utf-8" })
2734
+ );
2735
+ expect(dep_list.length).toEqual(5);
2736
+ expect(dep_list[0]).toEqual({
2737
+ image: "hello-world"
2738
+ });
2739
+ expect(dep_list[0]).toEqual({
2740
+ image: "hello-world"
2741
+ });
2742
+ expect(dep_list[1]).toEqual({
2743
+ image: "hello-world"
2744
+ });
2745
+ expect(dep_list[2]).toEqual({
2746
+ image: "hello-world:latest"
2747
+ });
2748
+ expect(dep_list[3]).toEqual({
2749
+ image: "hello-world@sha256:1234567890abcdef"
2750
+ });
2751
+ expect(dep_list[4]).toEqual({
2752
+ image: "hello-world:latest@sha256:1234567890abcdef"
2753
+ });
2754
+ });
2755
+
2730
2756
  test("parse cloudbuild data", async () => {
2731
2757
  expect(parseCloudBuildData(null)).toEqual([]);
2732
2758
  const dep_list = parseCloudBuildData(