@cyclonedx/cdxgen 9.3.2 → 9.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/evinser.js ADDED
@@ -0,0 +1,827 @@
1
+ import {
2
+ executeAtom,
3
+ getAllFiles,
4
+ getGradleCommand,
5
+ getMavenCommand,
6
+ collectGradleDependencies,
7
+ collectMvnDependencies
8
+ } from "./utils.js";
9
+ import { tmpdir } from "node:os";
10
+ import path from "node:path";
11
+ import fs from "node:fs";
12
+ import * as db from "./db.js";
13
+ import { PackageURL } from "packageurl-js";
14
+ import { Op } from "sequelize";
15
+ const DB_NAME = "evinser.db";
16
+ const typePurlsCache = {};
17
+
18
+ /**
19
+ * Function to create the db for the libraries referred in the sbom.
20
+ *
21
+ * @param {object} Command line options
22
+ */
23
+ export const prepareDB = async (options) => {
24
+ const dirPath = options._[0] || ".";
25
+ const bomJsonFile = options.input;
26
+ if (!fs.existsSync(bomJsonFile)) {
27
+ console.log("Bom file doesn't exist");
28
+ return;
29
+ }
30
+ const bomJson = JSON.parse(fs.readFileSync(bomJsonFile, "utf8"));
31
+ const components = bomJson.components || [];
32
+ const { sequelize, Namespaces, Usages, DataFlows } = await db.createOrLoad(
33
+ DB_NAME,
34
+ options.dbPath
35
+ );
36
+ let hasMavenPkgs = false;
37
+ // We need to slice only non-maven packages
38
+ const purlsToSlice = {};
39
+ const purlsJars = {};
40
+ let usagesSlice = undefined;
41
+ for (const comp of components) {
42
+ if (!comp.purl) {
43
+ continue;
44
+ }
45
+ usagesSlice = await Usages.findByPk(comp.purl);
46
+ const namespaceSlice = await Namespaces.findByPk(comp.purl);
47
+ if ((!usagesSlice && !namespaceSlice) || options.force) {
48
+ if (comp.purl.startsWith("pkg:maven")) {
49
+ hasMavenPkgs = true;
50
+ } else if (isSlicingRequired(comp.purl)) {
51
+ purlsToSlice[comp.purl] = true;
52
+ }
53
+ }
54
+ }
55
+ // If there are maven packages we collect and store the namespaces
56
+ if (!options.skipMavenCollector && hasMavenPkgs) {
57
+ const pomXmlFiles = getAllFiles(dirPath, "**/" + "pom.xml");
58
+ const gradleFiles = getAllFiles(dirPath, "**/" + "build.gradle*");
59
+ if (pomXmlFiles && pomXmlFiles.length) {
60
+ await catalogMavenDeps(dirPath, purlsJars, Namespaces, options);
61
+ }
62
+ if (gradleFiles && gradleFiles.length) {
63
+ await catalogGradleDeps(dirPath, purlsJars, Namespaces);
64
+ }
65
+ }
66
+ for (const purl of Object.keys(purlsToSlice)) {
67
+ await createAndStoreSlice(purl, purlsJars, Usages);
68
+ }
69
+ return { sequelize, Namespaces, Usages, DataFlows };
70
+ };
71
+
72
+ export const catalogMavenDeps = async (
73
+ dirPath,
74
+ purlsJars,
75
+ Namespaces,
76
+ options = {}
77
+ ) => {
78
+ console.log("About to collect jar dependencies for the path", dirPath);
79
+ const mavenCmd = getMavenCommand(dirPath, dirPath);
80
+ // collect all jars including from the cache if data-flow mode is enabled
81
+ const jarNSMapping = collectMvnDependencies(
82
+ mavenCmd,
83
+ dirPath,
84
+ false,
85
+ options.withDeepJarCollector
86
+ );
87
+ if (jarNSMapping) {
88
+ for (const purl of Object.keys(jarNSMapping)) {
89
+ purlsJars[purl] = jarNSMapping[purl].jarFile;
90
+ await Namespaces.findOrCreate({
91
+ where: { purl },
92
+ defaults: {
93
+ purl,
94
+ data: JSON.stringify(
95
+ {
96
+ pom: jarNSMapping[purl].pom,
97
+ namespaces: jarNSMapping[purl].namespaces
98
+ },
99
+ null,
100
+ 2
101
+ )
102
+ }
103
+ });
104
+ }
105
+ }
106
+ };
107
+
108
+ export const catalogGradleDeps = async (dirPath, purlsJars, Namespaces) => {
109
+ console.log(
110
+ "About to collect jar dependencies from the gradle cache. This would take a while ..."
111
+ );
112
+ const gradleCmd = getGradleCommand(dirPath, dirPath);
113
+ // collect all jars including from the cache if data-flow mode is enabled
114
+ const jarNSMapping = collectGradleDependencies(
115
+ gradleCmd,
116
+ dirPath,
117
+ false,
118
+ true
119
+ );
120
+ if (jarNSMapping) {
121
+ for (const purl of Object.keys(jarNSMapping)) {
122
+ purlsJars[purl] = jarNSMapping[purl].jarFile;
123
+ await Namespaces.findOrCreate({
124
+ where: { purl },
125
+ defaults: {
126
+ purl,
127
+ data: JSON.stringify(
128
+ {
129
+ pom: jarNSMapping[purl].pom,
130
+ namespaces: jarNSMapping[purl].namespaces
131
+ },
132
+ null,
133
+ 2
134
+ )
135
+ }
136
+ });
137
+ }
138
+ }
139
+ console.log(
140
+ "To speed up successive re-runs, pass the argument --skip-maven-collector to evinse command."
141
+ );
142
+ };
143
+
144
+ export const createAndStoreSlice = async (purl, purlsJars, Usages) => {
145
+ const retMap = createSlice(purl, purlsJars[purl], "usages");
146
+ let sliceData = undefined;
147
+ if (retMap && retMap.slicesFile && fs.existsSync(retMap.slicesFile)) {
148
+ sliceData = await Usages.findOrCreate({
149
+ where: { purl },
150
+ defaults: {
151
+ purl,
152
+ data: fs.readFileSync(retMap.slicesFile, "utf-8")
153
+ }
154
+ });
155
+ }
156
+ if (retMap && retMap.tempDir && retMap.tempDir.startsWith(tmpdir())) {
157
+ fs.rmSync(retMap.tempDir, { recursive: true, force: true });
158
+ }
159
+ return sliceData;
160
+ };
161
+
162
+ export const createSlice = (purlOrLanguage, filePath, sliceType = "usages") => {
163
+ if (!filePath) {
164
+ return;
165
+ }
166
+ console.log(`Create ${sliceType} slice for ${purlOrLanguage} ${filePath}`);
167
+ const language = purlOrLanguage.startsWith("pkg:")
168
+ ? purlToLanguage(purlOrLanguage, filePath)
169
+ : purlOrLanguage;
170
+ if (!language) {
171
+ return undefined;
172
+ }
173
+ const tempDir = fs.mkdtempSync(path.join(tmpdir(), `atom-${sliceType}-`));
174
+ const atomFile = path.join(tempDir, "app.atom");
175
+ const slicesFile = path.join(tempDir, `${sliceType}.slices.json`);
176
+ const args = [
177
+ sliceType,
178
+ "-l",
179
+ language,
180
+ "-o",
181
+ path.resolve(atomFile),
182
+ "--slice-outfile",
183
+ path.resolve(slicesFile)
184
+ ];
185
+ // For projects with several layers, slice depth needs to be increased from the default 7 to 15 or 20
186
+ // This would increase the time but would yield more deeper paths
187
+ if (sliceType == "data-flow" && process.env.ATOM_SLICE_DEPTH) {
188
+ args.push("--slice-depth");
189
+ args.push(process.env.ATOM_SLICE_DEPTH);
190
+ }
191
+ args.push(path.resolve(filePath));
192
+ executeAtom(filePath, args);
193
+ return {
194
+ tempDir,
195
+ slicesFile,
196
+ atomFile
197
+ };
198
+ };
199
+
200
+ export const purlToLanguage = (purl, filePath) => {
201
+ let language = undefined;
202
+ const purlObj = PackageURL.fromString(purl);
203
+ switch (purlObj.type) {
204
+ case "maven":
205
+ language = filePath && filePath.endsWith(".jar") ? "jar" : "java";
206
+ break;
207
+ case "npm":
208
+ language = "javascript";
209
+ break;
210
+ case "pypi":
211
+ language = "python";
212
+ break;
213
+ }
214
+ return language;
215
+ };
216
+
217
+ /**
218
+ * Function to analyze the project
219
+ *
220
+ * @param {object} dbObjMap DB and model instances
221
+ * @param {object} Command line options
222
+ */
223
+ export const analyzeProject = async (dbObjMap, options) => {
224
+ const dirPath = options._[0] || ".";
225
+ const language = options.language;
226
+ let usageSlice = undefined;
227
+ let dataFlowSlice = undefined;
228
+ let usagesSlicesFile = undefined;
229
+ let dataFlowSlicesFile = undefined;
230
+ let purlLocationMap = {};
231
+ let dataFlowFrames = {};
232
+ let servicesMap = {};
233
+ let retMap = {};
234
+ let userDefinedTypesMap = {};
235
+ // Reuse existing usages slices
236
+ if (options.usagesSlicesFile && fs.existsSync(options.usagesSlicesFile)) {
237
+ usageSlice = JSON.parse(fs.readFileSync(options.usagesSlicesFile, "utf-8"));
238
+ usagesSlicesFile = options.usagesSlicesFile;
239
+ } else {
240
+ // Generate our own slices
241
+ retMap = createSlice(language, dirPath, "usages");
242
+ if (retMap && retMap.slicesFile && fs.existsSync(retMap.slicesFile)) {
243
+ usageSlice = JSON.parse(fs.readFileSync(retMap.slicesFile, "utf-8"));
244
+ usagesSlicesFile = retMap.slicesFile;
245
+ console.log(
246
+ `To speed up this step, cache ${usagesSlicesFile} and invoke evinse with the --usages-slices-file argument.`
247
+ );
248
+ }
249
+ }
250
+ if (usageSlice && Object.keys(usageSlice).length) {
251
+ const retMap = await parseObjectSlices(
252
+ language,
253
+ usageSlice,
254
+ dbObjMap,
255
+ servicesMap
256
+ );
257
+ purlLocationMap = retMap.purlLocationMap;
258
+ servicesMap = retMap.servicesMap;
259
+ userDefinedTypesMap = retMap.userDefinedTypesMap;
260
+ }
261
+ if (options.withDataFlow) {
262
+ if (
263
+ options.dataFlowSlicesFile &&
264
+ fs.existsSync(options.dataFlowSlicesFile)
265
+ ) {
266
+ dataFlowSlicesFile = options.dataFlowSlicesFile;
267
+ dataFlowSlice = JSON.parse(
268
+ fs.readFileSync(options.dataFlowSlicesFile, "utf-8")
269
+ );
270
+ } else {
271
+ retMap = createSlice(language, dirPath, "data-flow");
272
+ if (retMap && retMap.slicesFile && fs.existsSync(retMap.slicesFile)) {
273
+ dataFlowSlicesFile = retMap.slicesFile;
274
+ dataFlowSlice = JSON.parse(fs.readFileSync(retMap.slicesFile, "utf-8"));
275
+ console.log(
276
+ `To speed up this step, cache ${dataFlowSlicesFile} and invoke evinse with the --data-flow-slices-file argument.`
277
+ );
278
+ }
279
+ }
280
+ }
281
+ if (dataFlowSlice && Object.keys(dataFlowSlice).length) {
282
+ dataFlowFrames = await collectDataFlowFrames(
283
+ language,
284
+ userDefinedTypesMap,
285
+ dataFlowSlice,
286
+ dbObjMap
287
+ );
288
+ }
289
+ return {
290
+ atomFile: retMap.atomFile,
291
+ usagesSlicesFile,
292
+ dataFlowSlicesFile,
293
+ purlLocationMap,
294
+ servicesMap,
295
+ dataFlowFrames,
296
+ tempDir: retMap.tempDir,
297
+ userDefinedTypesMap
298
+ };
299
+ };
300
+
301
+ export const parseObjectSlices = async (
302
+ language,
303
+ usageSlice,
304
+ dbObjMap,
305
+ servicesMap = {}
306
+ ) => {
307
+ const purlLocationMap = {};
308
+ if (!usageSlice || !Object.keys(usageSlice).length) {
309
+ return purlLocationMap;
310
+ }
311
+ const userDefinedTypesMap = {};
312
+ (usageSlice.userDefinedTypes || []).forEach((ut) => {
313
+ userDefinedTypesMap[ut.name] = true;
314
+ });
315
+ for (const slice of [
316
+ ...(usageSlice.objectSlices || []),
317
+ ...(usageSlice.userDefinedTypes || [])
318
+ ]) {
319
+ // Skip the library code typically without filename
320
+ if (
321
+ !slice.fileName ||
322
+ !slice.fileName.trim().length ||
323
+ slice.fileName === "<empty>"
324
+ ) {
325
+ continue;
326
+ }
327
+ const locationKey = `${slice.fileName}${
328
+ slice.lineNumber ? "#" + slice.lineNumber : ""
329
+ }`;
330
+ await parseSliceUsages(
331
+ language,
332
+ userDefinedTypesMap,
333
+ slice.usages,
334
+ dbObjMap,
335
+ locationKey,
336
+ purlLocationMap
337
+ );
338
+ detectServicesFromUsages(language, slice, servicesMap);
339
+ }
340
+ return {
341
+ purlLocationMap,
342
+ servicesMap,
343
+ userDefinedTypesMap
344
+ };
345
+ };
346
+
347
+ /**
348
+ * The implementation of this function is based on the logic proposed in the atom slices specification
349
+ * https://github.com/AppThreat/atom/blob/main/specification/docs/slices.md#use
350
+ *
351
+ * @param {string} language Application language
352
+ * @param {object} userDefinedTypesMap User Defined types in the application
353
+ * @param {array} usages Usages array for each objectSlice
354
+ * @param {object} dbObjMap DB Models
355
+ * @param {string} locationKey Filename with line number to be used in occurrences evidence
356
+ * @param {object} purlLocationMap Object to track locations where purls are used
357
+ * @returns
358
+ */
359
+ export const parseSliceUsages = async (
360
+ language,
361
+ userDefinedTypesMap,
362
+ usages,
363
+ dbObjMap,
364
+ locationKey,
365
+ purlLocationMap
366
+ ) => {
367
+ if (!usages || !usages.length) {
368
+ return undefined;
369
+ }
370
+ const purlsSet = new Set();
371
+ const typesToLookup = new Set();
372
+ for (const ausage of usages) {
373
+ // First capture the types in the targetObj and definedBy
374
+ for (const atype of [
375
+ [ausage?.targetObj?.isExternal, ausage?.targetObj?.typeFullName],
376
+ [ausage?.targetObj?.isExternal, ausage?.targetObj?.resolvedMethod],
377
+ [ausage?.definedBy?.isExternal, ausage?.definedBy?.typeFullName],
378
+ [ausage?.definedBy?.isExternal, ausage?.definedBy?.resolvedMethod],
379
+ ...(ausage?.fields || []).map((f) => [f?.isExternal, f?.typeFullName])
380
+ ]) {
381
+ if (
382
+ atype[0] !== false &&
383
+ !isFilterableType(language, userDefinedTypesMap, atype[1])
384
+ ) {
385
+ if (!atype[1].includes("(")) {
386
+ typesToLookup.add(atype[1]);
387
+ }
388
+ typesToLookup.add(getClassTypeFromSignature(language, atype[1]));
389
+ }
390
+ }
391
+ // Now capture full method signatures from invokedCalls, argToCalls including the paramtypes
392
+ for (const acall of []
393
+ .concat(ausage?.invokedCalls || [])
394
+ .concat(ausage?.argToCalls || [])
395
+ .concat(ausage?.procedures || [])) {
396
+ if (acall.isExternal == false) {
397
+ continue;
398
+ }
399
+ if (
400
+ !isFilterableType(language, userDefinedTypesMap, acall?.resolvedMethod)
401
+ ) {
402
+ if (!acall?.resolvedMethod.includes("(")) {
403
+ typesToLookup.add(acall?.resolvedMethod);
404
+ }
405
+ typesToLookup.add(
406
+ getClassTypeFromSignature(language, acall?.resolvedMethod)
407
+ );
408
+ }
409
+ for (const aparamType of acall?.paramTypes || []) {
410
+ if (!isFilterableType(language, userDefinedTypesMap, aparamType)) {
411
+ if (!aparamType.includes("(")) {
412
+ typesToLookup.add(aparamType);
413
+ }
414
+ typesToLookup.add(getClassTypeFromSignature(language, aparamType));
415
+ }
416
+ }
417
+ }
418
+ }
419
+ for (const atype of typesToLookup) {
420
+ if (isFilterableType(language, userDefinedTypesMap, atype)) {
421
+ continue;
422
+ }
423
+ // Check the namespaces db
424
+ const nsHits =
425
+ typePurlsCache[atype] ||
426
+ (await dbObjMap.Namespaces.findAll({
427
+ attributes: ["purl"],
428
+ where: {
429
+ data: {
430
+ [Op.like]: `%${atype}%`
431
+ }
432
+ }
433
+ }));
434
+ if (nsHits && nsHits.length) {
435
+ for (const ns of nsHits) {
436
+ purlsSet.add(ns.purl);
437
+ }
438
+ typePurlsCache[atype] = nsHits;
439
+ }
440
+ }
441
+ // Update the purlLocationMap
442
+ for (const apurl of purlsSet) {
443
+ if (!purlLocationMap[apurl]) {
444
+ purlLocationMap[apurl] = new Set();
445
+ }
446
+ purlLocationMap[apurl].add(locationKey);
447
+ }
448
+ };
449
+
450
+ export const isFilterableType = (
451
+ language,
452
+ userDefinedTypesMap,
453
+ typeFullName
454
+ ) => {
455
+ if (
456
+ !typeFullName ||
457
+ ["ANY", "UNKNOWN", "VOID"].includes(typeFullName.toUpperCase())
458
+ ) {
459
+ return true;
460
+ }
461
+ if (
462
+ typeFullName.startsWith("<operator") ||
463
+ typeFullName.startsWith("<unresolved")
464
+ ) {
465
+ return true;
466
+ }
467
+ if (language && ["java", "jar"].includes(language)) {
468
+ if (
469
+ !typeFullName.includes(".") ||
470
+ typeFullName.startsWith("@") ||
471
+ typeFullName.startsWith("java.") ||
472
+ typeFullName.startsWith("sun.") ||
473
+ typeFullName.startsWith("jdk.") ||
474
+ typeFullName.startsWith("org.w3c.") ||
475
+ typeFullName.startsWith("org.xml.") ||
476
+ typeFullName.startsWith("javax.xml.")
477
+ ) {
478
+ return true;
479
+ }
480
+ }
481
+ if (userDefinedTypesMap[typeFullName]) {
482
+ return true;
483
+ }
484
+ return false;
485
+ };
486
+
487
+ /**
488
+ * Method to detect services from annotation objects in the usage slice
489
+ *
490
+ * @param {string} language Application language
491
+ * @param {array} usages Usages array for each objectSlice
492
+ * @param {object} servicesMap Existing service map
493
+ */
494
+ export const detectServicesFromUsages = (language, slice, servicesMap = {}) => {
495
+ const usages = slice.usages;
496
+ if (!usages || !["java", "jar"].includes(language)) {
497
+ return [];
498
+ }
499
+ for (const usage of usages) {
500
+ const targetObj = usage?.targetObj;
501
+ const definedBy = usage?.definedBy;
502
+ let endpoints = undefined;
503
+ let authenticated = undefined;
504
+ if (
505
+ targetObj &&
506
+ targetObj?.label === "ANNOTATION" &&
507
+ targetObj?.resolvedMethod
508
+ ) {
509
+ endpoints = extractEndpoints(language, targetObj?.resolvedMethod);
510
+ if (targetObj?.resolvedMethod.includes("auth")) {
511
+ authenticated = true;
512
+ }
513
+ } else if (
514
+ definedBy &&
515
+ definedBy?.label === "ANNOTATION" &&
516
+ definedBy?.resolvedMethod
517
+ ) {
518
+ endpoints = extractEndpoints(language, definedBy?.resolvedMethod);
519
+ if (definedBy?.resolvedMethod.includes("auth")) {
520
+ authenticated = true;
521
+ }
522
+ }
523
+ if (endpoints) {
524
+ const serviceName = constructServiceName(language, slice);
525
+ if (!servicesMap[serviceName]) {
526
+ servicesMap[serviceName] = {
527
+ endpoints: new Set(),
528
+ authenticated,
529
+ xTrustBoundary: authenticated === true ? true : undefined
530
+ };
531
+ }
532
+ for (const endpoint of endpoints) {
533
+ servicesMap[serviceName].endpoints.add(endpoint);
534
+ }
535
+ }
536
+ }
537
+ };
538
+
539
+ export const constructServiceName = (language, slice) => {
540
+ let serviceName = "service";
541
+ if (slice?.fullName) {
542
+ serviceName = slice.fullName.split(":")[0].replace(/\./g, "-");
543
+ } else if (slice?.fileName) {
544
+ serviceName = path.basename(slice.fileName).split(".")[0];
545
+ }
546
+ if (!serviceName.endsWith("service")) {
547
+ serviceName = serviceName + "-service";
548
+ }
549
+ return serviceName;
550
+ };
551
+
552
+ export const extractEndpoints = (language, code) => {
553
+ if (!code) {
554
+ return undefined;
555
+ }
556
+ let endpoints = undefined;
557
+ switch (language) {
558
+ case "java":
559
+ case "jar":
560
+ if (
561
+ code.startsWith("@") &&
562
+ code.includes("Mapping") &&
563
+ code.includes("(")
564
+ ) {
565
+ let tmpA = code.split("(");
566
+ if (tmpA.length > 1) {
567
+ tmpA = tmpA[1].split(")")[0];
568
+ if (tmpA.includes("{")) {
569
+ tmpA = tmpA.split("{");
570
+ tmpA = tmpA[tmpA.length - 1].split("}")[0];
571
+ } else if (tmpA.includes(",")) {
572
+ tmpA = tmpA.split(",")[0];
573
+ }
574
+ if (tmpA.includes("=")) {
575
+ tmpA = tmpA.split("=").reverse()[0];
576
+ }
577
+ tmpA = tmpA.replace(/"/g, "").replace(/ /g, "");
578
+ endpoints = tmpA.split(",");
579
+ return endpoints;
580
+ }
581
+ }
582
+ break;
583
+ default:
584
+ break;
585
+ }
586
+ return endpoints;
587
+ };
588
+
589
+ /**
590
+ * Function to determine if slicing is required for the given language's dependencies.
591
+ * For performance reasons, we make java operate only with namespaces
592
+ *
593
+ * @param {string} purl
594
+ * @returns
595
+ */
596
+ export const isSlicingRequired = (purl) => {
597
+ const language = purlToLanguage(purl);
598
+ return ["javascript", "python"].includes(language);
599
+ };
600
+
601
+ /**
602
+ * Method to create the SBoM with evidence file called evinse file.
603
+ *
604
+ * @param {object} sliceArtefacts Various artefacts from the slice operation
605
+ * @param {object} options Command line options
606
+ * @returns
607
+ */
608
+ export const createEvinseFile = (sliceArtefacts, options) => {
609
+ const {
610
+ tempDir,
611
+ usagesSlicesFile,
612
+ dataFlowSlicesFile,
613
+ purlLocationMap,
614
+ servicesMap,
615
+ dataFlowFrames
616
+ } = sliceArtefacts;
617
+ const bomFile = options.input;
618
+ const evinseOutFile = options.output;
619
+ const bomJson = JSON.parse(fs.readFileSync(bomFile, "utf8"));
620
+ const components = bomJson.components || [];
621
+ let occEvidencePresent = false;
622
+ let csEvidencePresent = false;
623
+ for (const comp of components) {
624
+ if (!comp.purl) {
625
+ continue;
626
+ }
627
+ const locationOccurrences = Array.from(
628
+ purlLocationMap[comp.purl] || []
629
+ ).sort();
630
+ if (locationOccurrences.length) {
631
+ if (!comp.evidence) {
632
+ comp.evidence = {};
633
+ }
634
+ if (!comp.evidence.occurrences) {
635
+ comp.evidence.occurrences = locationOccurrences
636
+ .filter((l) => !!l)
637
+ .map((l) => ({
638
+ location: l
639
+ }));
640
+ occEvidencePresent = true;
641
+ }
642
+ }
643
+ const dfFrames = dataFlowFrames[comp.purl];
644
+ if (dfFrames && dfFrames.length) {
645
+ if (!comp.evidence) {
646
+ comp.evidence = {};
647
+ }
648
+ if (!comp.evidence.callstack) {
649
+ comp.evidence.callstack = {};
650
+ }
651
+ if (!comp.evidence.callstack.frames) {
652
+ comp.evidence.callstack.frames = framePicker(dfFrames);
653
+ csEvidencePresent = true;
654
+ }
655
+ }
656
+ } // for
657
+ if (servicesMap && Object.keys(servicesMap).length) {
658
+ const services = [];
659
+ for (const serviceName of Object.keys(servicesMap)) {
660
+ services.push({
661
+ name: serviceName,
662
+ endpoints: Array.from(servicesMap[serviceName].endpoints),
663
+ authenticated: servicesMap[serviceName].authenticated,
664
+ "x-trust-boundary": servicesMap[serviceName].xTrustBoundary
665
+ });
666
+ }
667
+ // Add to existing services
668
+ bomJson.services = (bomJson.services || []).concat(services);
669
+ }
670
+ if (options.annotate) {
671
+ if (!bomJson.annotations) {
672
+ bomJson.annotations = [];
673
+ }
674
+ if (usagesSlicesFile && fs.existsSync(usagesSlicesFile)) {
675
+ bomJson.annotations.push({
676
+ subjects: [bomJson.serialNumber],
677
+ annotator: { component: bomJson.metadata.tools.components[0] },
678
+ timestamp: new Date().toISOString(),
679
+ text: fs.readFileSync(usagesSlicesFile, "utf8")
680
+ });
681
+ }
682
+ if (dataFlowSlicesFile && fs.existsSync(dataFlowSlicesFile)) {
683
+ bomJson.annotations.push({
684
+ subjects: [bomJson.serialNumber],
685
+ annotator: { component: bomJson.metadata.tools.components[0] },
686
+ timestamp: new Date().toISOString(),
687
+ text: fs.readFileSync(dataFlowSlicesFile, "utf8")
688
+ });
689
+ }
690
+ }
691
+ // Increment the version
692
+ bomJson.version = (bomJson.version || 1) + 1;
693
+ // Set the current timestamp to indicate this is newer
694
+ bomJson.metadata.timestamp = new Date().toISOString();
695
+ fs.writeFileSync(evinseOutFile, JSON.stringify(bomJson, null, 2));
696
+ if (occEvidencePresent || csEvidencePresent) {
697
+ console.log(evinseOutFile, "created successfully.");
698
+ } else {
699
+ console.log(
700
+ "Unable to identify component evidence for the input SBoM. Only java, javascript and python projects are supported by evinse."
701
+ );
702
+ }
703
+ if (tempDir && tempDir.startsWith(tmpdir())) {
704
+ fs.rmSync(tempDir, { recursive: true, force: true });
705
+ }
706
+ return bomJson;
707
+ };
708
+
709
+ /**
710
+ * Method to convert dataflow slice into usable callstack frames
711
+ * Implemented based on the logic proposed here - https://github.com/AppThreat/atom/blob/main/specification/docs/slices.md#data-flow-slice
712
+ *
713
+ * @param {string} language Application language
714
+ * @param {object} userDefinedTypesMap User Defined types in the application
715
+ * @param {object} dataFlowSlice Data flow slice object from atom
716
+ * @param {object} dbObjMap DB models
717
+ */
718
+ export const collectDataFlowFrames = async (
719
+ language,
720
+ userDefinedTypesMap,
721
+ dataFlowSlice,
722
+ dbObjMap
723
+ ) => {
724
+ const nodes = dataFlowSlice?.graph?.nodes || [];
725
+ // Cache the nodes based on the id to improve lookup
726
+ const nodeCache = {};
727
+ // purl key and an array of frames array
728
+ // CycloneDX 1.5 currently accepts only 1 frame as evidence
729
+ // so this method is more future-proof
730
+ const dfFrames = {};
731
+ for (const n of nodes) {
732
+ // Skip operator calls
733
+ if (n.name && n.name.startsWith("<operator")) {
734
+ continue;
735
+ }
736
+ nodeCache[n.id] = n;
737
+ }
738
+ const paths = dataFlowSlice?.paths || [];
739
+ for (const apath of paths) {
740
+ let aframe = [];
741
+ let referredPurls = new Set();
742
+ for (const nid of apath) {
743
+ const theNode = nodeCache[nid];
744
+ if (!theNode) {
745
+ continue;
746
+ }
747
+ const typeFullName = theNode.typeFullName;
748
+ if (!isFilterableType(language, userDefinedTypesMap, typeFullName)) {
749
+ // Check the namespaces db
750
+ const nsHits =
751
+ typePurlsCache[typeFullName] ||
752
+ (await dbObjMap.Namespaces.findAll({
753
+ attributes: ["purl"],
754
+ where: {
755
+ data: {
756
+ [Op.like]: `%${typeFullName}%`
757
+ }
758
+ }
759
+ }));
760
+ if (nsHits && nsHits.length) {
761
+ for (const ns of nsHits) {
762
+ referredPurls.add(ns.purl);
763
+ }
764
+ typePurlsCache[typeFullName] = nsHits;
765
+ } else {
766
+ console.log("Unable to identify purl for", typeFullName);
767
+ }
768
+ }
769
+ aframe.push({
770
+ package: theNode.parentPackageName || "",
771
+ module: theNode.parentClassName || "",
772
+ function: theNode.parentMethodName || "",
773
+ line: theNode.lineNumber || "",
774
+ column: theNode.columnNumber || "",
775
+ fullFilename: theNode.parentFileName || ""
776
+ });
777
+ }
778
+ referredPurls = Array.from(referredPurls);
779
+ if (referredPurls.length) {
780
+ for (const apurl of referredPurls) {
781
+ if (!dfFrames[apurl]) {
782
+ dfFrames[apurl] = [];
783
+ }
784
+ // Store this frame as an evidence for this purl
785
+ dfFrames[apurl].push(aframe);
786
+ }
787
+ }
788
+ }
789
+ return dfFrames;
790
+ };
791
+
792
+ /**
793
+ * Method to pick a callstack frame as an evidence. This method is required since CycloneDX 1.5 accepts only a single frame as evidence.
794
+ *
795
+ * @param {array} dfFrames Data flow frames
796
+ * @returns
797
+ */
798
+ export const framePicker = (dfFrames) => {
799
+ if (!dfFrames || !dfFrames.length) {
800
+ return undefined;
801
+ }
802
+ let aframe = dfFrames[0];
803
+ if (dfFrames.length > 1) {
804
+ for (let i = 1; i < dfFrames.length - 1; i++) {
805
+ if (dfFrames[i].length > 2) {
806
+ aframe = dfFrames[i];
807
+ }
808
+ }
809
+ }
810
+ return aframe;
811
+ };
812
+
813
+ export const getClassTypeFromSignature = (language, typeFullName) => {
814
+ if (["java", "jar"].includes(language) && typeFullName.includes(":")) {
815
+ typeFullName = typeFullName.split(":")[0];
816
+ const tmpA = typeFullName.split(".");
817
+ tmpA.pop();
818
+ typeFullName = tmpA.join(".");
819
+ }
820
+ if (typeFullName.startsWith("<unresolved")) {
821
+ return undefined;
822
+ }
823
+ if (typeFullName.includes("$")) {
824
+ typeFullName = typeFullName.split("$")[0];
825
+ }
826
+ return typeFullName;
827
+ };