@cyclonedx/cdxgen 9.9.5 → 9.9.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -86,10 +86,10 @@ For go, `go mod why` command is used to identify required packages. For php, com
86
86
  ## Installing
87
87
 
88
88
  ```shell
89
- sudo npm install -g @cyclonedx/cdxgen
89
+ npm install -g @cyclonedx/cdxgen
90
90
 
91
91
  # For CycloneDX 1.4 compatibility use version 8.6.0 or pass the argument `--spec-version 1.4`
92
- sudo npm install -g @cyclonedx/cdxgen@8.6.0
92
+ npm install -g @cyclonedx/cdxgen@8.6.0
93
93
  ```
94
94
 
95
95
  If you are a [Homebrew](https://brew.sh/) user, you can also install [cdxgen](https://formulae.brew.sh/formula/cdxgen) via:
@@ -327,7 +327,7 @@ This would create a bom.json.map file with the jar - class name mapping. Refer t
327
327
 
328
328
  ## Resolving licenses
329
329
 
330
- cdxgen can automatically query public registries such as maven, npm, or nuget to resolve the package licenses. This is a time-consuming operation and is disabled by default. To enable, set the environment variable `FETCH_LICENSE` to `true`, as shown.
330
+ cdxgen can automatically query public registries such as maven, npm, or nuget to resolve the package licenses. This is a time-consuming operation and is disabled by default. To enable, set the environment variable `FETCH_LICENSE` to `true`, as shown. Ensure that `GITHUB_TOKEN` is set or provided by [built-in GITHUB_TOKEN in GitHub Actions](https://docs.github.com/en/rest/overview/rate-limits-for-the-rest-api#primary-rate-limit-for-github_token-in-github-actions), otherwise rate limiting might prevent license resolving.
331
331
 
332
332
  ```bash
333
333
  export FETCH_LICENSE=true
@@ -141,6 +141,25 @@
141
141
  "pkg:cargo/nickel",
142
142
  "pkg:cargo/yew",
143
143
  "pkg:cargo/azul",
144
- "pkg:cargo/conrod"
144
+ "pkg:cargo/conrod",
145
+ "pkg:generic/Aws",
146
+ "pkg:generic/Azure",
147
+ "pkg:generic/google",
148
+ "pkg:generic/CivetServer",
149
+ "pkg:generic/civetweb",
150
+ "pkg:generic/cpprest",
151
+ "pkg:generic/QCoreApplication",
152
+ "pkg:generic/drogon",
153
+ "pkg:generic/wfrest",
154
+ "pkg:generic/http",
155
+ "pkg:generic/fio",
156
+ "pkg:generic/onion",
157
+ "pkg:generic/lwan",
158
+ "pkg:generic/oatpp",
159
+ "pkg:generic/QDjango",
160
+ "pkg:generic/userver",
161
+ "pkg:generic/Wt/",
162
+ "pkg:generic/klone",
163
+ "pkg:generic/kcgi"
145
164
  ]
146
165
  }
@@ -1,31 +1,82 @@
1
1
  [
2
- { "license": "Apache-2.0", "group": "cloud.google.com", "name": "go" },
3
- { "license": "Apache-2.0", "group": "cloud.google.com/go", "name": "*" },
4
- { "license": "Apache-2.0", "group": "cuelang.org", "name": "go" },
5
- { "license": "MIT", "group": "pack.ag", "name": "amqp" },
6
- { "license": "Apache-2.0", "group": "google.golang.org", "name": "*" },
7
- { "license": "BSD-3-Clause", "group": "golang.org/x", "name": "*" },
8
2
  {
9
- "license": "BSD-3-Clause",
10
- "group": "dmitri.shuralyov.com/gpu",
11
- "name": "*"
3
+ "packageNamespace": "*",
4
+ "knownLicenses": [{ "license": "MIT", "urlIncludes": "mit-license" }]
12
5
  },
13
- { "license": "Apache-2.0", "group": "contrib.go.opencensus.io", "name": "*" },
14
- { "license": "Apache-2.0", "group": "git.apache.org", "name": "*" },
15
- { "license": "Apache-2.0", "group": ".", "name": "go.opencensus.io" },
16
- { "license": "MIT", "group": "sigs.k8s.io", "name": "*" },
17
- { "license": "BSD-3-Clause", "group": "rsc.io", "name": "*" },
18
- { "license": "Apache-2.0", "group": "openpitrix.io", "name": "*" },
19
- { "license": "BSD-3-Clause", "group": "modernc.org", "name": "*" },
20
- { "license": "Apache-2.0", "group": "kubesphere.io", "name": "*" },
21
- { "license": "Apache-2.0", "group": "k8s.io", "name": "*" },
22
- { "license": "Apache-2.0", "group": "istio.io", "name": "*" },
23
- { "license": "MIT", "group": "honnef.co/go", "name": "*" },
24
- { "license": "Apache-2.0", "group": ".", "name": "gotest.tools" },
25
- { "license": "Apache-2.0", "group": "gopkg.in", "name": "*" },
26
- { "license": "Apache-2.0", "group": "code.cloudfoundry.org", "name": "*" },
27
- { "license": "BSD-3-Clause", "group": "gonum.org/v1", "name": "*" },
28
- { "license": "Apache-2.0", "group": "gomodules.xyz/jsonpatch", "name": "*" },
29
- { "license": "MIT", "group": "go.uber.org", "name": "*" },
30
- { "license": "MIT", "group": "go.etcd.io", "name": "*" }
6
+ {
7
+ "packageNamespace": "pkg:golang/",
8
+ "knownLicenses": [
9
+ { "license": "Apache-2.0", "group": "cloud.google.com", "name": "go" },
10
+ { "license": "Apache-2.0", "group": "cloud.google.com/go", "name": "*" },
11
+ { "license": "Apache-2.0", "group": "cuelang.org", "name": "go" },
12
+ { "license": "MIT", "group": "pack.ag", "name": "amqp" },
13
+ { "license": "Apache-2.0", "group": "google.golang.org", "name": "*" },
14
+ { "license": "BSD-3-Clause", "group": "golang.org/x", "name": "*" },
15
+ {
16
+ "license": "BSD-3-Clause",
17
+ "group": "dmitri.shuralyov.com/gpu",
18
+ "name": "*"
19
+ },
20
+ {
21
+ "license": "Apache-2.0",
22
+ "group": "contrib.go.opencensus.io",
23
+ "name": "*"
24
+ },
25
+ { "license": "Apache-2.0", "group": "git.apache.org", "name": "*" },
26
+ { "license": "Apache-2.0", "group": ".", "name": "go.opencensus.io" },
27
+ { "license": "MIT", "group": "sigs.k8s.io", "name": "*" },
28
+ { "license": "BSD-3-Clause", "group": "rsc.io", "name": "*" },
29
+ { "license": "Apache-2.0", "group": "openpitrix.io", "name": "*" },
30
+ { "license": "BSD-3-Clause", "group": "modernc.org", "name": "*" },
31
+ { "license": "Apache-2.0", "group": "kubesphere.io", "name": "*" },
32
+ { "license": "Apache-2.0", "group": "k8s.io", "name": "*" },
33
+ { "license": "Apache-2.0", "group": "istio.io", "name": "*" },
34
+ { "license": "MIT", "group": "honnef.co/go", "name": "*" },
35
+ { "license": "Apache-2.0", "group": ".", "name": "gotest.tools" },
36
+ { "license": "Apache-2.0", "group": "gopkg.in", "name": "*" },
37
+ {
38
+ "license": "Apache-2.0",
39
+ "group": "code.cloudfoundry.org",
40
+ "name": "*"
41
+ },
42
+ { "license": "BSD-3-Clause", "group": "gonum.org/v1", "name": "*" },
43
+ {
44
+ "license": "Apache-2.0",
45
+ "group": "gomodules.xyz/jsonpatch",
46
+ "name": "*"
47
+ },
48
+ { "license": "MIT", "group": "go.uber.org", "name": "*" },
49
+ { "license": "MIT", "group": "go.etcd.io", "name": "*" }
50
+ ]
51
+ },
52
+ {
53
+ "packageNamespace": "pkg:nuget/",
54
+ "knownLicenses": [
55
+ {
56
+ "license": "MIT",
57
+ "urlIncludes": "//github.com/dotnet/standard/",
58
+ "licenseEvidence": "https://github.com/dotnet/standard/blob/release/3.0/LICENSE.TXT"
59
+ },
60
+ {
61
+ "license": "MIT",
62
+ "urlIncludes": "//github.com/dotnet/corefx/",
63
+ "licenseEvidence": "https://github.com/dotnet/corefx/blob/release/2.0.0/LICENSE.TXT"
64
+ },
65
+ {
66
+ "license": "MIT",
67
+ "urlIncludes": "//github.com/dotnet/core-setup/",
68
+ "licenseEvidence": "https://github.com/dotnet/core-setup/blob/release/2.0.0/LICENSE.TXT"
69
+ },
70
+ {
71
+ "licenseName": ".NET Library License",
72
+ "urlEndswith": "?LinkId=329770",
73
+ "licenseEvidence": "https://go.microsoft.com/fwlink/?LinkId=329770"
74
+ },
75
+ {
76
+ "licenseName": ".NET Library License",
77
+ "urlEndswith": "dotnet_library_license.htm",
78
+ "licenseEvidence": "https://dotnet.microsoft.com/en-us/dotnet_library_license.htm"
79
+ }
80
+ ]
81
+ }
31
82
  ]
package/docker.js CHANGED
@@ -416,11 +416,16 @@ export const parseImageName = (fullImageName) => {
416
416
  repo: "",
417
417
  tag: "",
418
418
  digest: "",
419
- platform: ""
419
+ platform: "",
420
+ group: "",
421
+ name: ""
420
422
  };
421
423
  if (!fullImageName) {
422
424
  return nameObj;
423
425
  }
426
+ // ensure it's lowercased
427
+ fullImageName = fullImageName.toLowerCase();
428
+
424
429
  // Extract registry name
425
430
  if (
426
431
  fullImageName.includes("/") &&
@@ -437,6 +442,7 @@ export const parseImageName = (fullImageName) => {
437
442
  fullImageName = fullImageName.replace(tmpA[0] + "/", "");
438
443
  }
439
444
  }
445
+
440
446
  // Extract digest name
441
447
  if (fullImageName.includes("@sha256:")) {
442
448
  const tmpA = fullImageName.split("@sha256:");
@@ -445,6 +451,7 @@ export const parseImageName = (fullImageName) => {
445
451
  fullImageName = fullImageName.replace("@sha256:" + nameObj.digest, "");
446
452
  }
447
453
  }
454
+
448
455
  // Extract tag name
449
456
  if (fullImageName.includes(":")) {
450
457
  const tmpA = fullImageName.split(":");
@@ -453,11 +460,20 @@ export const parseImageName = (fullImageName) => {
453
460
  fullImageName = fullImageName.replace(":" + nameObj.tag, "");
454
461
  }
455
462
  }
456
- if (fullImageName && fullImageName.startsWith("library/")) {
457
- fullImageName = fullImageName.replace("library/", "");
458
- }
463
+
459
464
  // The left over string is the repo name
460
465
  nameObj.repo = fullImageName;
466
+ nameObj.name = fullImageName;
467
+
468
+ // extract group name
469
+ if (fullImageName.includes("/")) {
470
+ const tmpA = fullImageName.split("/");
471
+ if (tmpA.length > 1) {
472
+ nameObj.name = tmpA[tmpA.length - 1];
473
+ nameObj.group = fullImageName.replace("/" + tmpA[tmpA.length - 1], "");
474
+ }
475
+ }
476
+
461
477
  return nameObj;
462
478
  };
463
479
 
package/docker.test.js CHANGED
@@ -24,35 +24,54 @@ test("parseImageName tests", () => {
24
24
  repo: "debian",
25
25
  tag: "",
26
26
  digest: "",
27
- platform: ""
27
+ platform: "",
28
+ group: "",
29
+ name: "debian"
28
30
  });
29
31
  expect(parseImageName("debian:latest")).toEqual({
30
32
  registry: "",
31
33
  repo: "debian",
32
34
  tag: "latest",
33
35
  digest: "",
34
- platform: ""
36
+ platform: "",
37
+ group: "",
38
+ name: "debian"
39
+ });
40
+ expect(parseImageName("library/debian:latest")).toEqual({
41
+ registry: "",
42
+ repo: "library/debian",
43
+ tag: "latest",
44
+ digest: "",
45
+ platform: "",
46
+ group: "library",
47
+ name: "debian"
35
48
  });
36
49
  expect(parseImageName("shiftleft/scan:v1.15.6")).toEqual({
37
50
  registry: "",
38
51
  repo: "shiftleft/scan",
39
52
  tag: "v1.15.6",
40
53
  digest: "",
41
- platform: ""
54
+ platform: "",
55
+ group: "shiftleft",
56
+ name: "scan"
42
57
  });
43
58
  expect(parseImageName("localhost:5000/shiftleft/scan:v1.15.6")).toEqual({
44
59
  registry: "localhost:5000",
45
60
  repo: "shiftleft/scan",
46
61
  tag: "v1.15.6",
47
62
  digest: "",
48
- platform: ""
63
+ platform: "",
64
+ group: "shiftleft",
65
+ name: "scan"
49
66
  });
50
67
  expect(parseImageName("localhost:5000/shiftleft/scan")).toEqual({
51
68
  registry: "localhost:5000",
52
69
  repo: "shiftleft/scan",
53
70
  tag: "",
54
71
  digest: "",
55
- platform: ""
72
+ platform: "",
73
+ group: "shiftleft",
74
+ name: "scan"
56
75
  });
57
76
  expect(
58
77
  parseImageName("foocorp.jfrog.io/docker/library/eclipse-temurin:latest")
@@ -61,7 +80,9 @@ test("parseImageName tests", () => {
61
80
  repo: "docker/library/eclipse-temurin",
62
81
  tag: "latest",
63
82
  digest: "",
64
- platform: ""
83
+ platform: "",
84
+ group: "docker/library",
85
+ name: "eclipse-temurin"
65
86
  });
66
87
  expect(
67
88
  parseImageName(
@@ -72,7 +93,9 @@ test("parseImageName tests", () => {
72
93
  repo: "shiftleft/scan-java",
73
94
  tag: "",
74
95
  digest: "5d008306a7c5d09ba0161a3408fa3839dc2c9dd991ffb68adecc1040399fe9e1",
75
- platform: ""
96
+ platform: "",
97
+ group: "shiftleft",
98
+ name: "scan-java"
76
99
  });
77
100
  }, 120000);
78
101
 
package/evinser.js CHANGED
@@ -322,6 +322,29 @@ export const analyzeProject = async (dbObjMap, options) => {
322
322
  // Load any existing purl-location information from the sbom.
323
323
  // For eg: cdxgen populates this information for javascript projects
324
324
  let { purlLocationMap, purlImportsMap } = initFromSbom(components);
325
+ // Do reachables first so that usages slicing can reuse the atom file
326
+ if (options.withReachables) {
327
+ if (
328
+ options.reachablesSlicesFile &&
329
+ fs.existsSync(options.reachablesSlicesFile)
330
+ ) {
331
+ reachablesSlicesFile = options.reachablesSlicesFile;
332
+ reachablesSlice = JSON.parse(
333
+ fs.readFileSync(options.reachablesSlicesFile, "utf-8")
334
+ );
335
+ } else {
336
+ retMap = createSlice(language, dirPath, "reachables", options);
337
+ if (retMap && retMap.slicesFile && fs.existsSync(retMap.slicesFile)) {
338
+ reachablesSlicesFile = retMap.slicesFile;
339
+ reachablesSlice = JSON.parse(
340
+ fs.readFileSync(retMap.slicesFile, "utf-8")
341
+ );
342
+ }
343
+ }
344
+ }
345
+ if (reachablesSlice && Object.keys(reachablesSlice).length) {
346
+ dataFlowFrames = await collectReachableFrames(language, reachablesSlice);
347
+ }
325
348
  // Reuse existing usages slices
326
349
  if (options.usagesSlicesFile && fs.existsSync(options.usagesSlicesFile)) {
327
350
  usageSlice = JSON.parse(fs.readFileSync(options.usagesSlicesFile, "utf-8"));
@@ -374,28 +397,6 @@ export const analyzeProject = async (dbObjMap, options) => {
374
397
  purlImportsMap
375
398
  );
376
399
  }
377
- if (options.withReachables) {
378
- if (
379
- options.reachablesSlicesFile &&
380
- fs.existsSync(options.reachablesSlicesFile)
381
- ) {
382
- reachablesSlicesFile = options.reachablesSlicesFile;
383
- reachablesSlice = JSON.parse(
384
- fs.readFileSync(options.reachablesSlicesFile, "utf-8")
385
- );
386
- } else {
387
- retMap = createSlice(language, dirPath, "reachables", options);
388
- if (retMap && retMap.slicesFile && fs.existsSync(retMap.slicesFile)) {
389
- reachablesSlicesFile = retMap.slicesFile;
390
- reachablesSlice = JSON.parse(
391
- fs.readFileSync(retMap.slicesFile, "utf-8")
392
- );
393
- }
394
- }
395
- }
396
- if (reachablesSlice && Object.keys(reachablesSlice).length) {
397
- dataFlowFrames = await collectReachableFrames(language, reachablesSlice);
398
- }
399
400
  return {
400
401
  atomFile: retMap.atomFile,
401
402
  usagesSlicesFile,
@@ -776,15 +777,19 @@ export const detectServicesFromUDT = (
776
777
  servicesMap
777
778
  ) => {
778
779
  if (
779
- ["python", "py"].includes(language) &&
780
+ ["python", "py", "c", "cpp", "c++"].includes(language) &&
780
781
  userDefinedTypes &&
781
782
  userDefinedTypes.length
782
783
  ) {
783
784
  for (const audt of userDefinedTypes) {
784
785
  if (
785
- audt.name.includes("route") ||
786
- audt.name.includes("path") ||
787
- audt.name.includes("url")
786
+ audt.name.toLowerCase().includes("route") ||
787
+ audt.name.toLowerCase().includes("path") ||
788
+ audt.name.toLowerCase().includes("url") ||
789
+ audt.name.toLowerCase().includes("registerhandler") ||
790
+ audt.name.toLowerCase().includes("endpoint") ||
791
+ audt.name.toLowerCase().includes("api") ||
792
+ audt.name.toLowerCase().includes("add_method")
788
793
  ) {
789
794
  const fields = audt.fields || [];
790
795
  if (
@@ -875,14 +880,11 @@ export const extractEndpoints = (language, code) => {
875
880
  );
876
881
  }
877
882
  break;
878
- case "py":
879
- case "python":
883
+ default:
880
884
  endpoints = (code.match(/['"](.*?)['"]/gi) || [])
881
885
  .map((v) => v.replace(/["']/g, "").replace("\n", ""))
882
886
  .filter((v) => v.length > 2);
883
887
  break;
884
- default:
885
- break;
886
888
  }
887
889
  return endpoints;
888
890
  };
@@ -910,6 +912,7 @@ export const createEvinseFile = (sliceArtefacts, options) => {
910
912
  const components = bomJson.components || [];
911
913
  let occEvidencePresent = false;
912
914
  let csEvidencePresent = false;
915
+ let servicesPresent = false;
913
916
  for (const comp of components) {
914
917
  if (!comp.purl) {
915
918
  continue;
@@ -957,6 +960,7 @@ export const createEvinseFile = (sliceArtefacts, options) => {
957
960
  }
958
961
  // Add to existing services
959
962
  bomJson.services = (bomJson.services || []).concat(services);
963
+ servicesPresent = true;
960
964
  }
961
965
  if (options.annotate) {
962
966
  if (!bomJson.annotations) {
@@ -993,7 +997,7 @@ export const createEvinseFile = (sliceArtefacts, options) => {
993
997
  bomJson.metadata.timestamp = new Date().toISOString();
994
998
  delete bomJson.signature;
995
999
  fs.writeFileSync(evinseOutFile, JSON.stringify(bomJson, null, 2));
996
- if (occEvidencePresent || csEvidencePresent) {
1000
+ if (occEvidencePresent || csEvidencePresent || servicesPresent) {
997
1001
  console.log(evinseOutFile, "created successfully.");
998
1002
  } else {
999
1003
  console.log(
package/index.js CHANGED
@@ -105,7 +105,8 @@ import {
105
105
  MAX_BUFFER,
106
106
  getNugetMetadata,
107
107
  frameworksList,
108
- parseContainerFile
108
+ parseContainerFile,
109
+ parseBitbucketPipelinesFile
109
110
  } from "./utils.js";
110
111
  import { spawnSync } from "node:child_process";
111
112
  import { fileURLToPath } from "node:url";
@@ -3148,6 +3149,18 @@ export const createCppBom = (path, options) => {
3148
3149
  retMap.parentComponent.type = "library";
3149
3150
  pkgList.push(retMap.parentComponent);
3150
3151
  }
3152
+ // Retain the dependency tree from cmake
3153
+ if (retMap.dependenciesList) {
3154
+ if (dependencies.length) {
3155
+ dependencies = mergeDependencies(
3156
+ dependencies,
3157
+ retMap.dependenciesList,
3158
+ parentComponent
3159
+ );
3160
+ } else {
3161
+ dependencies = retMap.dependenciesList;
3162
+ }
3163
+ }
3151
3164
  }
3152
3165
  }
3153
3166
  // The need for java >= 17 with atom is causing confusions since there could be C projects
@@ -3719,6 +3732,11 @@ export const createContainerSpecLikeBom = async (path, options) => {
3719
3732
  (options.multiProject ? "**/" : "") + "*Dockerfile*",
3720
3733
  options
3721
3734
  );
3735
+ const bbPipelineFiles = getAllFiles(
3736
+ path,
3737
+ (options.multiProject ? "**/" : "") + "bitbucket-pipelines.yml",
3738
+ options
3739
+ );
3722
3740
  const cfFiles = getAllFiles(
3723
3741
  path,
3724
3742
  (options.multiProject ? "**/" : "") + "*Containerfile*",
@@ -3747,27 +3765,35 @@ export const createContainerSpecLikeBom = async (path, options) => {
3747
3765
  }
3748
3766
  // Privado.ai json files
3749
3767
  const privadoFiles = getAllFiles(path, ".privado/" + "*.json", options);
3750
- // Parse yaml manifest files, dockerfiles or containerfiles
3751
- if (dcFiles.length || dfFiles.length || cfFiles.length) {
3752
- for (const f of [...dcFiles, ...dfFiles, ...cfFiles]) {
3768
+
3769
+ // Parse yaml manifest files, dockerfiles, containerfiles or bitbucket pipeline files
3770
+ if (
3771
+ dcFiles.length ||
3772
+ dfFiles.length ||
3773
+ cfFiles.length ||
3774
+ bbPipelineFiles.length
3775
+ ) {
3776
+ for (const f of [...dcFiles, ...dfFiles, ...cfFiles, ...bbPipelineFiles]) {
3753
3777
  if (DEBUG_MODE) {
3754
3778
  console.log(`Parsing ${f}`);
3755
3779
  }
3756
3780
 
3757
3781
  const dData = readFileSync(f, { encoding: "utf-8" });
3758
- let imglist = [];
3782
+ let imgList = [];
3759
3783
  // parse yaml manifest files
3760
- if (f.endsWith(".yml") || f.endsWith(".yaml")) {
3761
- imglist = parseContainerSpecData(dData);
3784
+ if (f.endsWith("bitbucket-pipelines.yml")) {
3785
+ imgList = parseBitbucketPipelinesFile(dData);
3786
+ } else if (f.endsWith(".yml") || f.endsWith(".yaml")) {
3787
+ imgList = parseContainerSpecData(dData);
3762
3788
  } else {
3763
- imglist = parseContainerFile(dData);
3789
+ imgList = parseContainerFile(dData);
3764
3790
  }
3765
3791
 
3766
- if (imglist && imglist.length) {
3792
+ if (imgList && imgList.length) {
3767
3793
  if (DEBUG_MODE) {
3768
- console.log("Images identified in", f, "are", imglist);
3794
+ console.log("Images identified in", f, "are", imgList);
3769
3795
  }
3770
- for (const img of imglist) {
3796
+ for (const img of imgList) {
3771
3797
  const commonProperties = [
3772
3798
  {
3773
3799
  name: "SrcFile",
@@ -3832,20 +3858,26 @@ export const createContainerSpecLikeBom = async (path, options) => {
3832
3858
  console.log(`Parsing image ${img.image}`);
3833
3859
  }
3834
3860
  const imageObj = parseImageName(img.image);
3861
+
3835
3862
  const pkg = {
3836
- name: imageObj.repo,
3863
+ name: imageObj.name,
3864
+ group: imageObj.group,
3837
3865
  version:
3838
3866
  imageObj.tag ||
3839
3867
  (imageObj.digest ? "sha256:" + imageObj.digest : "latest"),
3840
3868
  qualifiers: {},
3841
- properties: commonProperties
3869
+ properties: commonProperties,
3870
+ type: "container"
3842
3871
  };
3843
3872
  if (imageObj.registry) {
3844
- pkg["qualifiers"]["repository_url"] = imageObj.registry;
3873
+ pkg["qualifiers"]["repository_url"] = img.image;
3845
3874
  }
3846
3875
  if (imageObj.platform) {
3847
3876
  pkg["qualifiers"]["platform"] = imageObj.platform;
3848
3877
  }
3878
+ if (imageObj.tag) {
3879
+ pkg["qualifiers"]["tag"] = imageObj.tag;
3880
+ }
3849
3881
  // Create an entry for the oci image
3850
3882
  const imageBomData = buildBomNSData(options, [pkg], "oci", {
3851
3883
  src: img.image,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@cyclonedx/cdxgen",
3
- "version": "9.9.5",
3
+ "version": "9.9.6",
4
4
  "description": "Creates CycloneDX Software Bill of Materials (SBOM) from source or container image",
5
5
  "homepage": "http://github.com/cyclonedx/cdxgen",
6
6
  "author": "Prabhu Subramanian <prabhu@appthreat.com>",
@@ -55,13 +55,13 @@
55
55
  "url": "https://github.com/cyclonedx/cdxgen/issues"
56
56
  },
57
57
  "dependencies": {
58
- "@babel/parser": "^7.23.3",
59
- "@babel/traverse": "^7.23.3",
58
+ "@babel/parser": "^7.23.5",
59
+ "@babel/traverse": "^7.23.5",
60
60
  "@npmcli/arborist": "7.2.0",
61
61
  "ajv": "^8.12.0",
62
62
  "ajv-formats": "^2.1.1",
63
63
  "cheerio": "^1.0.0-rc.12",
64
- "edn-data": "^1.0.0",
64
+ "edn-data": "1.1.1",
65
65
  "find-up": "^6.3.0",
66
66
  "glob": "^10.3.10",
67
67
  "global-agent": "^3.0.0",
@@ -83,7 +83,7 @@
83
83
  "yargs": "^17.7.2"
84
84
  },
85
85
  "optionalDependencies": {
86
- "@appthreat/atom": "1.6.4",
86
+ "@appthreat/atom": "1.7.2",
87
87
  "@cyclonedx/cdxgen-plugins-bin": "^1.4.0",
88
88
  "@cyclonedx/cdxgen-plugins-bin-arm64": "^1.4.0",
89
89
  "@cyclonedx/cdxgen-plugins-bin-ppc64": "^1.4.0",
@@ -91,7 +91,7 @@
91
91
  "compression": "^1.7.4",
92
92
  "connect": "^3.7.0",
93
93
  "jsonata": "^2.0.3",
94
- "sequelize": "^6.35.0",
94
+ "sequelize": "^6.35.1",
95
95
  "sqlite3": "^5.1.6"
96
96
  },
97
97
  "files": [
@@ -102,7 +102,7 @@
102
102
  "devDependencies": {
103
103
  "caxa": "^3.0.1",
104
104
  "docsify-cli": "^4.4.4",
105
- "eslint": "^8.53.0",
105
+ "eslint": "^8.54.0",
106
106
  "eslint-config-prettier": "^9.0.0",
107
107
  "eslint-plugin-prettier": "^5.0.1",
108
108
  "jest": "^29.7.0",
package/server.js CHANGED
@@ -83,7 +83,8 @@ const parseQueryString = (q, body, options = {}) => {
83
83
  "filter",
84
84
  "only",
85
85
  "autoCompositions",
86
- "gitBranch"
86
+ "gitBranch",
87
+ "active"
87
88
  ];
88
89
 
89
90
  for (const param of queryParams) {
package/utils.js CHANGED
@@ -217,20 +217,10 @@ export function getLicenses(pkg, format = "xml") {
217
217
  licenseContent.id = l;
218
218
  licenseContent.url = "https://opensource.org/licenses/" + l;
219
219
  } else if (l.startsWith("http")) {
220
- if (!l.includes("opensource.org")) {
221
- licenseContent.name = "CUSTOM";
222
- } else {
223
- const possibleId = l
224
- .replace("http://www.opensource.org/licenses/", "")
225
- .toUpperCase();
226
- spdxLicenses.forEach((v) => {
227
- if (v.toUpperCase() === possibleId) {
228
- licenseContent.id = v;
229
- }
230
- });
231
- }
232
- if (l.includes("mit-license")) {
233
- licenseContent.id = "MIT";
220
+ let knownLicense = getKnownLicense(l, pkg);
221
+ if (knownLicense) {
222
+ licenseContent.id = knownLicense.id;
223
+ licenseContent.name = knownLicense.name;
234
224
  }
235
225
  // We always need a name to avoid validation errors
236
226
  // Issue: #469
@@ -252,10 +242,82 @@ export function getLicenses(pkg, format = "xml") {
252
242
  return licenseContent;
253
243
  })
254
244
  .map((l) => ({ license: l }));
245
+ } else {
246
+ let knownLicense = getKnownLicense(undefined, pkg);
247
+ if (knownLicense) {
248
+ return [{ license: knownLicense }];
249
+ }
255
250
  }
256
251
  return undefined;
257
252
  }
258
253
 
254
+ /**
255
+ * Method to retrieve known license by known-licenses.json
256
+ *
257
+ * @param {String} repoUrl Repository url
258
+ * @param {String} pkg Bom ref
259
+ * @return {Object>} Objetct with SPDX license id or license name
260
+ */
261
+ export const getKnownLicense = function (licenseUrl, pkg) {
262
+ if (licenseUrl && licenseUrl.includes("opensource.org")) {
263
+ const possibleId = licenseUrl
264
+ .toLowerCase()
265
+ .replace("https://", "http://")
266
+ .replace("http://www.opensource.org/licenses/", "");
267
+ for (const spdxLicense of spdxLicenses) {
268
+ if (spdxLicense.toLowerCase() === possibleId) {
269
+ return { id: spdxLicense };
270
+ }
271
+ }
272
+ } else if (licenseUrl && licenseUrl.includes("apache.org")) {
273
+ const possibleId = licenseUrl
274
+ .toLowerCase()
275
+ .replace("https://", "http://")
276
+ .replace("http://www.apache.org/licenses/license-", "apache-")
277
+ .replace(".txt", "");
278
+ for (const spdxLicense of spdxLicenses) {
279
+ if (spdxLicense.toLowerCase() === possibleId) {
280
+ return { id: spdxLicense };
281
+ }
282
+ }
283
+ }
284
+ for (const akLicGroup of knownLicenses) {
285
+ if (
286
+ akLicGroup.packageNamespace === "*" ||
287
+ (pkg.purl && pkg.purl.startsWith(akLicGroup.packageNamespace))
288
+ ) {
289
+ for (const akLic of akLicGroup.knownLicenses) {
290
+ if (akLic.group && akLic.name) {
291
+ if (akLic.group === "." && akLic.name === pkg.name) {
292
+ return { id: akLic.license, name: akLic.licenseName };
293
+ } else if (
294
+ pkg.group &&
295
+ pkg.group.includes(akLic.group) &&
296
+ (akLic.name === pkg.name || akLic.name === "*")
297
+ ) {
298
+ return { id: akLic.license, name: akLic.licenseName };
299
+ }
300
+ }
301
+ if (
302
+ akLic.urlIncludes &&
303
+ licenseUrl &&
304
+ licenseUrl.includes(akLic.urlIncludes)
305
+ ) {
306
+ return { id: akLic.license, name: akLic.licenseName };
307
+ }
308
+ if (
309
+ akLic.urlEndswith &&
310
+ licenseUrl &&
311
+ licenseUrl.endsWith(akLic.urlEndswith)
312
+ ) {
313
+ return { id: akLic.license, name: akLic.licenseName };
314
+ }
315
+ }
316
+ }
317
+ }
318
+ return undefined;
319
+ };
320
+
259
321
  /**
260
322
  * Tries to find a file containing the license text based on commonly
261
323
  * used naming and content types. If a candidate file is found, add
@@ -2430,7 +2492,7 @@ export const fetchPomXmlAsJson = async function ({
2430
2492
  * @param {String} name
2431
2493
  * @param {String} version
2432
2494
  *
2433
- * @return {String}
2495
+ * @return {Promise<String>}
2434
2496
  */
2435
2497
  export const fetchPomXml = async function ({
2436
2498
  urlPrefix,
@@ -2467,7 +2529,7 @@ export const parseLicenseEntryOrArrayFromPomXml = function (license) {
2467
2529
  * @param {String} name
2468
2530
  * @param {String} version
2469
2531
  *
2470
- * @return {String} License ID
2532
+ * @return {Promise<String>} License ID
2471
2533
  */
2472
2534
  export const extractLicenseCommentFromPomXml = async function ({
2473
2535
  urlPrefix,
@@ -3287,7 +3349,7 @@ export const toGitHubApiUrl = function (repoUrl, repoMetadata) {
3287
3349
  *
3288
3350
  * @param {String} repoUrl Repository url
3289
3351
  * @param {Object} repoMetadata Object containing group and package name strings
3290
- * @return {String} SPDX license id
3352
+ * @return {Promise<String>} SPDX license id
3291
3353
  */
3292
3354
  export const getRepoLicense = async function (repoUrl, repoMetadata) {
3293
3355
  let apiUrl = toGitHubApiUrl(repoUrl, repoMetadata);
@@ -3323,23 +3385,23 @@ export const getRepoLicense = async function (repoUrl, repoMetadata) {
3323
3385
  }
3324
3386
  }
3325
3387
  licObj["id"] = licenseId;
3326
- return licObj;
3388
+ if (licObj["id"] || licObj["name"]) {
3389
+ return licObj;
3390
+ }
3327
3391
  }
3328
3392
  } catch (err) {
3329
- return undefined;
3330
- }
3331
- } else if (repoMetadata) {
3332
- const group = repoMetadata.group;
3333
- const name = repoMetadata.name;
3334
- if (group && name) {
3335
- for (const akLic of knownLicenses) {
3336
- if (akLic.group === "." && akLic.name === name) {
3337
- return akLic.license;
3338
- } else if (
3339
- group.includes(akLic.group) &&
3340
- (akLic.name === name || akLic.name === "*")
3393
+ if (err && err.message) {
3394
+ if (
3395
+ err.message.includes("rate limit exceeded") &&
3396
+ !process.env.GITHUB_TOKEN
3341
3397
  ) {
3342
- return akLic.license;
3398
+ console.log(
3399
+ "Rate limit exceeded for REST API of github.com. " +
3400
+ "Please ensure GITHUB_TOKEN is set as environment variable. " +
3401
+ "See: https://docs.github.com/en/rest/overview/rate-limits-for-the-rest-api"
3402
+ );
3403
+ } else if (!err.message.includes("404")) {
3404
+ console.log(err);
3343
3405
  }
3344
3406
  }
3345
3407
  }
@@ -4393,12 +4455,14 @@ export const parseContainerFile = function (fileContents) {
4393
4455
  const imgList = [];
4394
4456
 
4395
4457
  let buildStageNames = [];
4396
- for (const line of fileContents.split("\n")) {
4397
- if (line.trim().startsWith("#")) {
4458
+ for (let line of fileContents.split("\n")) {
4459
+ line = line.trim();
4460
+
4461
+ if (line.startsWith("#")) {
4398
4462
  continue; // skip commented out lines
4399
4463
  }
4400
4464
 
4401
- if (line.includes("FROM")) {
4465
+ if (line.startsWith("FROM")) {
4402
4466
  const fromStatement = line.split("FROM")[1].split("AS");
4403
4467
 
4404
4468
  const imageStatement = fromStatement[0].trim();
@@ -4426,6 +4490,68 @@ export const parseContainerFile = function (fileContents) {
4426
4490
  return imgList;
4427
4491
  };
4428
4492
 
4493
+ export const parseBitbucketPipelinesFile = function (fileContents) {
4494
+ const imgList = [];
4495
+
4496
+ let privateImageBlockFound = false;
4497
+
4498
+ for (let line of fileContents.split("\n")) {
4499
+ line = line.trim();
4500
+ if (line.startsWith("#")) {
4501
+ continue; // skip commented out lines
4502
+ }
4503
+
4504
+ // Assume this is a private build image object
4505
+ if (line.startsWith("name:") && privateImageBlockFound) {
4506
+ const imageName = line.split("name:").pop().trim();
4507
+
4508
+ imgList.push({
4509
+ image: imageName
4510
+ });
4511
+
4512
+ privateImageBlockFound = false;
4513
+ }
4514
+
4515
+ // Docker image usage
4516
+ if (line.startsWith("image:")) {
4517
+ const imageName = line.split("image:").pop().trim();
4518
+
4519
+ /**
4520
+ * Assume this is a private build image object
4521
+ * See: https://support.atlassian.com/bitbucket-cloud/docs/use-docker-images-as-build-environments/#Using-private-build-images
4522
+ */
4523
+ if (imageName === "") {
4524
+ privateImageBlockFound = true;
4525
+ continue;
4526
+ } else {
4527
+ /**
4528
+ * Assume this is a public build image
4529
+ * See: https://support.atlassian.com/bitbucket-cloud/docs/use-docker-images-as-build-environments/#Using-public-build-images
4530
+ */
4531
+
4532
+ imgList.push({
4533
+ image: imageName
4534
+ });
4535
+ }
4536
+ }
4537
+
4538
+ // Pipe usage
4539
+ if (line.startsWith("- pipe:")) {
4540
+ let pipeName = line.split("- pipe:").pop().trim();
4541
+
4542
+ if (pipeName.startsWith("docker://")) {
4543
+ pipeName = pipeName.replace("docker://", "");
4544
+ }
4545
+
4546
+ imgList.push({
4547
+ image: pipeName
4548
+ });
4549
+ }
4550
+ }
4551
+
4552
+ return imgList;
4553
+ };
4554
+
4429
4555
  export const parseContainerSpecData = function (dcData) {
4430
4556
  const pkgList = [];
4431
4557
  const imgList = [];
@@ -4618,8 +4744,13 @@ export const parseOpenapiSpecData = function (oaData) {
4618
4744
  } catch (e) {
4619
4745
  return servlist;
4620
4746
  }
4621
- const name = oaData.info.title.replace(/ /g, "-");
4622
- const version = oaData.info.version || "latest";
4747
+
4748
+ const name =
4749
+ oaData.info && oaData.info.title
4750
+ ? oaData.info.title.replace(/ /g, "-")
4751
+ : "default-name";
4752
+ const version =
4753
+ oaData.info && oaData.info.version ? oaData.info.version : "latest";
4623
4754
  const aservice = {
4624
4755
  "bom-ref": `urn:service:${name}:${version}`,
4625
4756
  name,
@@ -5405,7 +5536,7 @@ export const parseComposerLock = function (pkgLockFile) {
5405
5536
  if (existsSync(pkgLockFile)) {
5406
5537
  let lockData = {};
5407
5538
  try {
5408
- lockData = JSON.parse(readFileSync(pkgLockFile, "utf8"));
5539
+ lockData = JSON.parse(readFileSync(pkgLockFile, { encoding: "utf-8" }));
5409
5540
  } catch (e) {
5410
5541
  console.error("Invalid composer.lock file:", pkgLockFile);
5411
5542
  return [];
@@ -5474,7 +5605,7 @@ export const parseSbtTree = (sbtTreeFile) => {
5474
5605
  const dependenciesList = [];
5475
5606
  const keys_cache = {};
5476
5607
  const level_trees = {};
5477
- const tmpA = readFileSync(sbtTreeFile, "utf-8").split("\n");
5608
+ const tmpA = readFileSync(sbtTreeFile, { encoding: "utf-8" }).split("\n");
5478
5609
  let last_level = 0;
5479
5610
  let last_purl = "";
5480
5611
  let stack = [];
@@ -5606,7 +5737,9 @@ export const parseSbtTree = (sbtTreeFile) => {
5606
5737
  export const parseSbtLock = function (pkgLockFile) {
5607
5738
  const pkgList = [];
5608
5739
  if (existsSync(pkgLockFile)) {
5609
- const lockData = JSON.parse(readFileSync(pkgLockFile, "utf8"));
5740
+ const lockData = JSON.parse(
5741
+ readFileSync(pkgLockFile, { encoding: "utf-8" })
5742
+ );
5610
5743
  if (lockData && lockData.dependencies) {
5611
5744
  for (const pkg of lockData.dependencies) {
5612
5745
  const artifacts = pkg.artifacts || undefined;
@@ -6063,7 +6196,9 @@ export const parseSwiftResolved = (resolvedFile) => {
6063
6196
  const pkgList = [];
6064
6197
  if (existsSync(resolvedFile)) {
6065
6198
  try {
6066
- const pkgData = JSON.parse(readFileSync(resolvedFile, "utf8"));
6199
+ const pkgData = JSON.parse(
6200
+ readFileSync(resolvedFile, { encoding: "utf-8" })
6201
+ );
6067
6202
  let resolvedList = [];
6068
6203
  if (pkgData.pins) {
6069
6204
  resolvedList = pkgData.pins;
@@ -6256,7 +6391,7 @@ export const collectJarNS = function (jarPath, pomPathMap = {}) {
6256
6391
  }
6257
6392
  }
6258
6393
  if (existsSync(pomname)) {
6259
- pomData = parsePomXml(readFileSync(pomname, "utf-8"));
6394
+ pomData = parsePomXml(readFileSync(pomname, { encoding: "utf-8" }));
6260
6395
  if (pomData) {
6261
6396
  const purlObj = new PackageURL(
6262
6397
  "maven",
@@ -7187,7 +7322,9 @@ export const findAppModules = function (
7187
7322
  ];
7188
7323
  executeAtom(src, args);
7189
7324
  if (existsSync(slicesFile)) {
7190
- const slicesData = JSON.parse(readFileSync(slicesFile), "utf8");
7325
+ const slicesData = JSON.parse(readFileSync(slicesFile), {
7326
+ encoding: "utf-8"
7327
+ });
7191
7328
  if (slicesData && Object.keys(slicesData) && slicesData.modules) {
7192
7329
  retList = slicesData.modules;
7193
7330
  } else {
@@ -7660,7 +7797,7 @@ export const componentSorter = (a, b) => {
7660
7797
  };
7661
7798
 
7662
7799
  export const parseCmakeDotFile = (dotFile, pkgType, options = {}) => {
7663
- const dotGraphData = readFileSync(dotFile, "utf-8");
7800
+ const dotGraphData = readFileSync(dotFile, { encoding: "utf-8" });
7664
7801
  const pkgList = [];
7665
7802
  const dependenciesMap = {};
7666
7803
  const pkgBomRefMap = {};
@@ -7769,12 +7906,13 @@ export const parseCmakeDotFile = (dotFile, pkgType, options = {}) => {
7769
7906
  };
7770
7907
 
7771
7908
  export const parseCmakeLikeFile = (cmakeListFile, pkgType, options = {}) => {
7772
- let cmakeListData = readFileSync(cmakeListFile, "utf-8");
7909
+ let cmakeListData = readFileSync(cmakeListFile, { encoding: "utf-8" });
7773
7910
  const pkgList = [];
7774
7911
  const pkgAddedMap = {};
7775
7912
  const versionSpecifiersMap = {};
7776
7913
  const versionsMap = {};
7777
7914
  let parentComponent = {};
7915
+ const templateValues = {};
7778
7916
  cmakeListData = cmakeListData
7779
7917
  .replace(/^ {2}/g, "")
7780
7918
  .replace(/\(\r\n/g, "(")
@@ -7789,7 +7927,20 @@ export const parseCmakeLikeFile = (cmakeListFile, pkgType, options = {}) => {
7789
7927
  let group = "";
7790
7928
  let path = undefined;
7791
7929
  let name_list = [];
7792
- if (l.startsWith("project") && !Object.keys(parentComponent).length) {
7930
+ if (l.startsWith("set")) {
7931
+ const tmpA = l.replace("set(", "").replace(")", "").trim().split(" ");
7932
+ if (tmpA && tmpA.length === 2) {
7933
+ templateValues[tmpA[0]] = tmpA[1];
7934
+ }
7935
+ } else if (
7936
+ l.startsWith("project") &&
7937
+ !Object.keys(parentComponent).length
7938
+ ) {
7939
+ if (l.includes("${")) {
7940
+ for (const tmplKey of Object.keys(templateValues)) {
7941
+ l = l.replace("${" + tmplKey + "}", templateValues[tmplKey] || "");
7942
+ }
7943
+ }
7793
7944
  const tmpA = l.replace("project (", "project(").split("project(");
7794
7945
  if (tmpA && tmpA.length > 1) {
7795
7946
  const tmpB = (tmpA[1] || "")
@@ -7807,7 +7958,7 @@ export const parseCmakeLikeFile = (cmakeListFile, pkgType, options = {}) => {
7807
7958
  if (versionIndex > -1 && tmpB.length > versionIndex) {
7808
7959
  parentVersion = tmpB[versionIndex + 1];
7809
7960
  }
7810
- if (parentName && parentName.length) {
7961
+ if (parentName && parentName.length && !parentName.includes("$")) {
7811
7962
  parentComponent = {
7812
7963
  group: options.projectGroup || "",
7813
7964
  name: parentName,
@@ -7989,7 +8140,7 @@ export const parseCmakeLikeFile = (cmakeListFile, pkgType, options = {}) => {
7989
8140
  methods: [
7990
8141
  {
7991
8142
  technique: "source-code-analysis",
7992
- confidence: 0,
8143
+ confidence: 0.5,
7993
8144
  value: `Filename ${cmakeListFile}`
7994
8145
  }
7995
8146
  ]
@@ -8023,7 +8174,7 @@ export const getOSPackageForFile = (afile, osPkgsList) => {
8023
8174
  ospkg.evidence = {
8024
8175
  identity: {
8025
8176
  field: "purl",
8026
- confidence: 0,
8177
+ confidence: 0.8,
8027
8178
  methods: [
8028
8179
  {
8029
8180
  technique: "filename",
@@ -8057,47 +8208,122 @@ export const getCppModules = (src, options, osPkgsList, epkgList) => {
8057
8208
  const epkgMap = {};
8058
8209
  let parentComponent = undefined;
8059
8210
  const dependsOn = [];
8211
+ (epkgList || []).forEach((p) => {
8212
+ epkgMap[p.group + "/" + p.name] = p;
8213
+ });
8060
8214
  // Let's look for any vcpkg.json file to tell us about the directory we're scanning
8061
8215
  // users can use this file to give us a clue even if they do not use vcpkg library manager
8062
8216
  if (existsSync(join(src, "vcpkg.json"))) {
8063
- const vcPkgData = JSON.parse(join(src, "vcpkg.json"));
8064
- if (
8065
- vcPkgData &&
8066
- Object.keys(vcPkgData).length &&
8067
- vcPkgData.name &&
8068
- vcPkgData.version
8069
- ) {
8217
+ const vcPkgData = JSON.parse(
8218
+ readFileSync(join(src, "vcpkg.json"), { encoding: "utf-8" })
8219
+ );
8220
+ if (vcPkgData && Object.keys(vcPkgData).length && vcPkgData.name) {
8070
8221
  const parentPurl = new PackageURL(
8071
8222
  pkgType,
8072
8223
  "",
8073
8224
  vcPkgData.name,
8074
- vcPkgData.version,
8225
+ vcPkgData.version || "",
8075
8226
  null,
8076
8227
  null
8077
8228
  ).toString();
8078
8229
  parentComponent = {
8079
8230
  name: vcPkgData.name,
8080
- version: vcPkgData.version,
8231
+ version: vcPkgData.version || "",
8081
8232
  description: vcPkgData.description,
8082
8233
  license: vcPkgData.license,
8083
8234
  purl: parentPurl,
8235
+ type: "application",
8084
8236
  "bom-ref": decodeURIComponent(parentPurl)
8085
8237
  };
8086
8238
  if (vcPkgData.homepage) {
8087
8239
  parentComponent.homepage = { url: vcPkgData.homepage };
8088
8240
  }
8089
- }
8241
+ // Are there any dependencies declared in vcpkg.json
8242
+ if (vcPkgData.dependencies && Array.isArray(vcPkgData.dependencies)) {
8243
+ for (const avcdep of vcPkgData.dependencies) {
8244
+ let avcpkgName = undefined;
8245
+ let scope = undefined;
8246
+ if (typeof avcdep === "string" || avcdep instanceof String) {
8247
+ avcpkgName = avcdep;
8248
+ } else if (Object.keys(avcdep).length && avcdep.name) {
8249
+ avcpkgName = avcdep.name;
8250
+ if (avcdep.host) {
8251
+ scope = "optional";
8252
+ }
8253
+ }
8254
+ // Is this a dependency we haven't seen before including the all lower and upper case version?
8255
+ if (
8256
+ avcpkgName &&
8257
+ !epkgMap["/" + avcpkgName] &&
8258
+ !epkgMap["/" + avcpkgName.toLowerCase()] &&
8259
+ !epkgMap["/" + avcpkgName.toUpperCase()]
8260
+ ) {
8261
+ const pkgPurl = new PackageURL(
8262
+ pkgType,
8263
+ "",
8264
+ avcpkgName,
8265
+ "",
8266
+ null,
8267
+ null
8268
+ ).toString();
8269
+ const apkg = {
8270
+ group: "",
8271
+ name: avcpkgName,
8272
+ type: pkgType,
8273
+ version: "",
8274
+ purl: pkgPurl,
8275
+ scope,
8276
+ "bom-ref": decodeURIComponent(pkgPurl),
8277
+ evidence: {
8278
+ identity: {
8279
+ field: "purl",
8280
+ confidence: 0.5,
8281
+ methods: [
8282
+ {
8283
+ technique: "source-code-analysis",
8284
+ confidence: 0.5,
8285
+ value: `Filename ${join(src, "vcpkg.json")}`
8286
+ }
8287
+ ]
8288
+ }
8289
+ }
8290
+ };
8291
+ if (!pkgAddedMap[avcpkgName]) {
8292
+ pkgList.push(apkg);
8293
+ dependsOn.push(apkg["bom-ref"]);
8294
+ pkgAddedMap[avcpkgName] = true;
8295
+ }
8296
+ }
8297
+ }
8298
+ }
8299
+ } // if
8090
8300
  } else if (existsSync(join(src, "CMakeLists.txt"))) {
8091
8301
  const retMap = parseCmakeLikeFile(join(src, "CMakeLists.txt"), pkgType);
8092
8302
  if (retMap.parentComponent && Object.keys(retMap.parentComponent).length) {
8093
8303
  parentComponent = retMap.parentComponent;
8094
8304
  }
8305
+ } else if (options.projectName && options.projectVersion) {
8306
+ parentComponent = {
8307
+ group: options.projectGroup || "",
8308
+ name: options.projectName || "",
8309
+ version: "" + options.projectVersion || "latest",
8310
+ type: "application"
8311
+ };
8312
+ const parentPurl = new PackageURL(
8313
+ pkgType,
8314
+ parentComponent.group,
8315
+ parentComponent.name,
8316
+ parentComponent.version,
8317
+ null,
8318
+ null
8319
+ ).toString();
8320
+ parentComponent.purl = parentPurl;
8321
+ parentComponent["bom-ref"] = decodeURIComponent(parentPurl);
8095
8322
  }
8096
- (epkgList || []).forEach((p) => {
8097
- epkgMap[p.name] = p;
8098
- });
8099
8323
  if (options.usagesSlicesFile && existsSync(options.usagesSlicesFile)) {
8100
- sliceData = JSON.parse(readFileSync(options.usagesSlicesFile));
8324
+ sliceData = JSON.parse(
8325
+ readFileSync(options.usagesSlicesFile, { encoding: "utf-8" })
8326
+ );
8101
8327
  if (DEBUG_MODE) {
8102
8328
  console.log("Re-using existing slices file", options.usagesSlicesFile);
8103
8329
  }
@@ -8110,7 +8336,9 @@ export const getCppModules = (src, options, osPkgsList, epkgList) => {
8110
8336
  );
8111
8337
  }
8112
8338
  const usageData = parseCUsageSlice(sliceData);
8113
- for (const afile of Object.keys(usageData)) {
8339
+ for (let afile of Object.keys(usageData)) {
8340
+ // Normalize windows separator
8341
+ afile = afile.replace("..\\", "").replace(/\\/g, "/");
8114
8342
  let fileName = basename(afile);
8115
8343
  if (!fileName || !fileName.length) {
8116
8344
  continue;
@@ -8129,14 +8357,14 @@ export const getCppModules = (src, options, osPkgsList, epkgList) => {
8129
8357
  // We need to resolve the name to an os package here
8130
8358
  let name = fileName.replace(extn, "");
8131
8359
  let apkg = getOSPackageForFile(afile, osPkgsList) ||
8132
- epkgMap[name] || {
8360
+ epkgMap[group + "/" + name] || {
8133
8361
  name,
8134
8362
  group,
8135
8363
  version: "",
8136
8364
  type: pkgType
8137
8365
  };
8138
8366
  // If this is a relative file, there is a good chance we can reuse the project group
8139
- if (!afile.startsWith(_sep)) {
8367
+ if (!afile.startsWith(_sep) && !group.length) {
8140
8368
  group = options.projectGroup || "";
8141
8369
  }
8142
8370
  if (!apkg.purl) {
@@ -8164,9 +8392,16 @@ export const getCppModules = (src, options, osPkgsList, epkgList) => {
8164
8392
  apkg["bom-ref"] = decodeURIComponent(apkg["purl"]);
8165
8393
  }
8166
8394
  if (usageData[afile]) {
8167
- const usymbols = Array.from(usageData[afile]).filter(
8168
- (v) => !v.startsWith("<") && !v.startsWith("__")
8169
- );
8395
+ const usymbols = Array.from(usageData[afile])
8396
+ .filter(
8397
+ (v) =>
8398
+ !v.startsWith("<") &&
8399
+ !v.startsWith("__") &&
8400
+ v !== "main" &&
8401
+ !v.includes("anonymous_") &&
8402
+ !v.includes(afile)
8403
+ )
8404
+ .sort();
8170
8405
  if (!apkg["properties"] && usymbols.length) {
8171
8406
  apkg["properties"] = [
8172
8407
  { name: "ImportedSymbols", value: usymbols.join(", ") }
@@ -8213,7 +8448,9 @@ export const getCppModules = (src, options, osPkgsList, epkgList) => {
8213
8448
  : [];
8214
8449
  return {
8215
8450
  parentComponent,
8216
- pkgList,
8451
+ pkgList: pkgList.sort(function (a, b) {
8452
+ return a.purl.localeCompare(b.purl);
8453
+ }),
8217
8454
  dependenciesList
8218
8455
  };
8219
8456
  };
@@ -8246,34 +8483,21 @@ export const parseCUsageSlice = (sliceData) => {
8246
8483
  continue;
8247
8484
  }
8248
8485
  const slFileName = slice.fileName;
8249
- const slLineNumber = slice.lineNumber || 0;
8250
8486
  const allLines = usageData[slFileName] || new Set();
8251
8487
  if (slice.fullName && slice.fullName.length > 3) {
8252
- allLines.add(slice.fullName + "|" + slLineNumber);
8253
8488
  if (slice.code && slice.code.startsWith("#include")) {
8254
8489
  usageData[slice.fullName] = new Set();
8490
+ } else {
8491
+ allLines.add(slice.fullName);
8255
8492
  }
8256
8493
  }
8257
8494
  for (const ausage of slice.usages) {
8258
- if (ausage.targetObj.resolvedMethod) {
8259
- allLines.add(ausage.targetObj.resolvedMethod + "|" + slLineNumber);
8260
- } else {
8261
- const targetObjName = ausage.targetObj.name.replace(/\n/g, " ");
8262
- // We need to still filter out <global>, <clinit> style targets
8263
- if (
8264
- ausage.targetObj.lineNumber === slLineNumber ||
8265
- targetObjName.startsWith("<") ||
8266
- (slice.fullName.length > 3 &&
8267
- targetObjName.includes(slice.fullName))
8268
- ) {
8269
- continue;
8270
- }
8271
- allLines.add(targetObjName + "|" + slLineNumber);
8272
- }
8273
8495
  let calls = ausage?.invokedCalls || [];
8274
8496
  calls = calls.concat(ausage?.argToCalls || []);
8275
8497
  for (const acall of calls) {
8276
- allLines.add(acall.resolvedMethod + "|" + slLineNumber);
8498
+ if (!acall.resolvedMethod.includes("->")) {
8499
+ allLines.add(acall.resolvedMethod);
8500
+ }
8277
8501
  }
8278
8502
  }
8279
8503
  if (Array.from(allLines).length) {
@@ -8465,6 +8689,13 @@ export const getNugetMetadata = async function (
8465
8689
  p.license = findLicenseId(body.catalogEntry.licenseExpression);
8466
8690
  } else if (body.catalogEntry.licenseUrl) {
8467
8691
  p.license = findLicenseId(body.catalogEntry.licenseUrl);
8692
+ if (
8693
+ typeof p.license === "string" &&
8694
+ p.license.includes("://github.com/")
8695
+ ) {
8696
+ p.license =
8697
+ (await getRepoLicense(p.license, undefined)) || p.license;
8698
+ }
8468
8699
  }
8469
8700
  if (body.catalogEntry.projectUrl) {
8470
8701
  p.repository = { url: body.catalogEntry.projectUrl };
@@ -8476,6 +8707,17 @@ export const getNugetMetadata = async function (
8476
8707
  p.version +
8477
8708
  "/"
8478
8709
  };
8710
+ if (
8711
+ (!p.license || typeof p.license === "string") &&
8712
+ typeof p.repository.url === "string" &&
8713
+ p.repository.url.includes("://github.com/")
8714
+ ) {
8715
+ // license couldn't be properly identified and is still a url,
8716
+ // therefore trying to resolve license via repository
8717
+ p.license =
8718
+ (await getRepoLicense(p.repository.url, undefined)) ||
8719
+ p.license;
8720
+ }
8479
8721
  }
8480
8722
  cdepList.push(p);
8481
8723
  }
package/utils.test.js CHANGED
@@ -74,7 +74,8 @@ import {
74
74
  parseSbtTree,
75
75
  parseCmakeDotFile,
76
76
  parseCmakeLikeFile,
77
- parseContainerFile
77
+ parseContainerFile,
78
+ parseBitbucketPipelinesFile
78
79
  } from "./utils.js";
79
80
  import { readFileSync } from "node:fs";
80
81
  import { parse } from "ssri";
@@ -2753,6 +2754,28 @@ test("parse containerfiles / dockerfiles", async () => {
2753
2754
  });
2754
2755
  });
2755
2756
 
2757
+ test("parse bitbucket-pipelines", async () => {
2758
+ let dep_list = parseBitbucketPipelinesFile(
2759
+ readFileSync("./test/data/bitbucket-pipelines.yml", { encoding: "utf-8" })
2760
+ );
2761
+ expect(dep_list.length).toEqual(5);
2762
+ expect(dep_list[0]).toEqual({
2763
+ image: "node:16"
2764
+ });
2765
+ expect(dep_list[1]).toEqual({
2766
+ image: "node:18"
2767
+ });
2768
+ expect(dep_list[2]).toEqual({
2769
+ image: "some.private.org/docker/library/node:20"
2770
+ });
2771
+ expect(dep_list[3]).toEqual({
2772
+ image: "atlassian/aws/s3-deploy:0.2.2"
2773
+ });
2774
+ expect(dep_list[4]).toEqual({
2775
+ image: "some.private.org/docker/library/some-pipe:1.0.0"
2776
+ });
2777
+ });
2778
+
2756
2779
  test("parse cloudbuild data", async () => {
2757
2780
  expect(parseCloudBuildData(null)).toEqual([]);
2758
2781
  const dep_list = parseCloudBuildData(
@@ -3083,6 +3106,18 @@ test("parseCmakeLikeFile tests", () => {
3083
3106
  type: "application",
3084
3107
  version: ""
3085
3108
  });
3109
+ retMap = parseCmakeLikeFile(
3110
+ "./test/data/cmakes/CMakeLists-tpl.txt",
3111
+ "generic"
3112
+ );
3113
+ expect(retMap.parentComponent).toEqual({
3114
+ "bom-ref": "pkg:generic/aurora-examples",
3115
+ group: "",
3116
+ name: "aurora-examples",
3117
+ purl: "pkg:generic/aurora-examples",
3118
+ type: "application",
3119
+ version: ""
3120
+ });
3086
3121
  retMap = parseCmakeLikeFile(
3087
3122
  "./test/data/cmakes/mongoc-config.cmake",
3088
3123
  "conan"