@cyclonedx/cdxgen 9.4.0 → 9.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +51 -79
- package/analyzer.js +118 -51
- package/bin/cdxgen.js +19 -2
- package/bin/evinse.js +123 -0
- package/bin/repl.js +123 -7
- package/bin/verify.js +61 -0
- package/db.js +80 -0
- package/display.js +113 -1
- package/docker.test.js +9 -0
- package/evinser.js +1038 -0
- package/evinser.test.js +93 -0
- package/index.js +241 -115
- package/package.json +10 -5
- package/utils.js +561 -163
- package/utils.test.js +98 -16
package/evinser.js
ADDED
|
@@ -0,0 +1,1038 @@
|
|
|
1
|
+
import {
|
|
2
|
+
executeAtom,
|
|
3
|
+
getAllFiles,
|
|
4
|
+
getGradleCommand,
|
|
5
|
+
getMavenCommand,
|
|
6
|
+
collectGradleDependencies,
|
|
7
|
+
collectMvnDependencies
|
|
8
|
+
} from "./utils.js";
|
|
9
|
+
import { tmpdir } from "node:os";
|
|
10
|
+
import path from "node:path";
|
|
11
|
+
import fs from "node:fs";
|
|
12
|
+
import * as db from "./db.js";
|
|
13
|
+
import { PackageURL } from "packageurl-js";
|
|
14
|
+
import { Op } from "sequelize";
|
|
15
|
+
import process from "node:process";
|
|
16
|
+
const DB_NAME = "evinser.db";
|
|
17
|
+
const typePurlsCache = {};
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Function to create the db for the libraries referred in the sbom.
|
|
21
|
+
*
|
|
22
|
+
* @param {object} Command line options
|
|
23
|
+
*/
|
|
24
|
+
export const prepareDB = async (options) => {
|
|
25
|
+
const dirPath = options._[0] || ".";
|
|
26
|
+
const bomJsonFile = options.input;
|
|
27
|
+
if (!fs.existsSync(bomJsonFile)) {
|
|
28
|
+
console.log("Bom file doesn't exist");
|
|
29
|
+
return;
|
|
30
|
+
}
|
|
31
|
+
const bomJson = JSON.parse(fs.readFileSync(bomJsonFile, "utf8"));
|
|
32
|
+
if (bomJson.specVersion < 1.5) {
|
|
33
|
+
console.log(
|
|
34
|
+
"Evinse requires the input SBoM in CycloneDX 1.5 format or above. You can generate one by invoking cdxgen without any --spec-version argument."
|
|
35
|
+
);
|
|
36
|
+
process.exit(0);
|
|
37
|
+
}
|
|
38
|
+
const components = bomJson.components || [];
|
|
39
|
+
const { sequelize, Namespaces, Usages, DataFlows } = await db.createOrLoad(
|
|
40
|
+
DB_NAME,
|
|
41
|
+
options.dbPath
|
|
42
|
+
);
|
|
43
|
+
let hasMavenPkgs = false;
|
|
44
|
+
// We need to slice only non-maven packages
|
|
45
|
+
const purlsToSlice = {};
|
|
46
|
+
const purlsJars = {};
|
|
47
|
+
let usagesSlice = undefined;
|
|
48
|
+
for (const comp of components) {
|
|
49
|
+
if (!comp.purl) {
|
|
50
|
+
continue;
|
|
51
|
+
}
|
|
52
|
+
usagesSlice = await Usages.findByPk(comp.purl);
|
|
53
|
+
const namespaceSlice = await Namespaces.findByPk(comp.purl);
|
|
54
|
+
if ((!usagesSlice && !namespaceSlice) || options.force) {
|
|
55
|
+
if (comp.purl.startsWith("pkg:maven")) {
|
|
56
|
+
hasMavenPkgs = true;
|
|
57
|
+
} else if (isSlicingRequired(comp.purl)) {
|
|
58
|
+
purlsToSlice[comp.purl] = true;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
// If there are maven packages we collect and store the namespaces
|
|
63
|
+
if (!options.skipMavenCollector && hasMavenPkgs) {
|
|
64
|
+
const pomXmlFiles = getAllFiles(dirPath, "**/" + "pom.xml");
|
|
65
|
+
const gradleFiles = getAllFiles(dirPath, "**/" + "build.gradle*");
|
|
66
|
+
if (pomXmlFiles && pomXmlFiles.length) {
|
|
67
|
+
await catalogMavenDeps(dirPath, purlsJars, Namespaces, options);
|
|
68
|
+
}
|
|
69
|
+
if (gradleFiles && gradleFiles.length) {
|
|
70
|
+
await catalogGradleDeps(dirPath, purlsJars, Namespaces);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
for (const purl of Object.keys(purlsToSlice)) {
|
|
74
|
+
await createAndStoreSlice(purl, purlsJars, Usages);
|
|
75
|
+
}
|
|
76
|
+
return { sequelize, Namespaces, Usages, DataFlows };
|
|
77
|
+
};
|
|
78
|
+
|
|
79
|
+
export const catalogMavenDeps = async (
|
|
80
|
+
dirPath,
|
|
81
|
+
purlsJars,
|
|
82
|
+
Namespaces,
|
|
83
|
+
options = {}
|
|
84
|
+
) => {
|
|
85
|
+
console.log("About to collect jar dependencies for the path", dirPath);
|
|
86
|
+
const mavenCmd = getMavenCommand(dirPath, dirPath);
|
|
87
|
+
// collect all jars including from the cache if data-flow mode is enabled
|
|
88
|
+
const jarNSMapping = collectMvnDependencies(
|
|
89
|
+
mavenCmd,
|
|
90
|
+
dirPath,
|
|
91
|
+
false,
|
|
92
|
+
options.withDeepJarCollector
|
|
93
|
+
);
|
|
94
|
+
if (jarNSMapping) {
|
|
95
|
+
for (const purl of Object.keys(jarNSMapping)) {
|
|
96
|
+
purlsJars[purl] = jarNSMapping[purl].jarFile;
|
|
97
|
+
await Namespaces.findOrCreate({
|
|
98
|
+
where: { purl },
|
|
99
|
+
defaults: {
|
|
100
|
+
purl,
|
|
101
|
+
data: JSON.stringify(
|
|
102
|
+
{
|
|
103
|
+
pom: jarNSMapping[purl].pom,
|
|
104
|
+
namespaces: jarNSMapping[purl].namespaces
|
|
105
|
+
},
|
|
106
|
+
null,
|
|
107
|
+
2
|
|
108
|
+
)
|
|
109
|
+
}
|
|
110
|
+
});
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
};
|
|
114
|
+
|
|
115
|
+
export const catalogGradleDeps = async (dirPath, purlsJars, Namespaces) => {
|
|
116
|
+
console.log(
|
|
117
|
+
"About to collect jar dependencies from the gradle cache. This would take a while ..."
|
|
118
|
+
);
|
|
119
|
+
const gradleCmd = getGradleCommand(dirPath, dirPath);
|
|
120
|
+
// collect all jars including from the cache if data-flow mode is enabled
|
|
121
|
+
const jarNSMapping = collectGradleDependencies(
|
|
122
|
+
gradleCmd,
|
|
123
|
+
dirPath,
|
|
124
|
+
false,
|
|
125
|
+
true
|
|
126
|
+
);
|
|
127
|
+
if (jarNSMapping) {
|
|
128
|
+
for (const purl of Object.keys(jarNSMapping)) {
|
|
129
|
+
purlsJars[purl] = jarNSMapping[purl].jarFile;
|
|
130
|
+
await Namespaces.findOrCreate({
|
|
131
|
+
where: { purl },
|
|
132
|
+
defaults: {
|
|
133
|
+
purl,
|
|
134
|
+
data: JSON.stringify(
|
|
135
|
+
{
|
|
136
|
+
pom: jarNSMapping[purl].pom,
|
|
137
|
+
namespaces: jarNSMapping[purl].namespaces
|
|
138
|
+
},
|
|
139
|
+
null,
|
|
140
|
+
2
|
|
141
|
+
)
|
|
142
|
+
}
|
|
143
|
+
});
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
console.log(
|
|
147
|
+
"To speed up successive re-runs, pass the argument --skip-maven-collector to evinse command."
|
|
148
|
+
);
|
|
149
|
+
};
|
|
150
|
+
|
|
151
|
+
export const createAndStoreSlice = async (purl, purlsJars, Usages) => {
|
|
152
|
+
const retMap = createSlice(purl, purlsJars[purl], "usages");
|
|
153
|
+
let sliceData = undefined;
|
|
154
|
+
if (retMap && retMap.slicesFile && fs.existsSync(retMap.slicesFile)) {
|
|
155
|
+
sliceData = await Usages.findOrCreate({
|
|
156
|
+
where: { purl },
|
|
157
|
+
defaults: {
|
|
158
|
+
purl,
|
|
159
|
+
data: fs.readFileSync(retMap.slicesFile, "utf-8")
|
|
160
|
+
}
|
|
161
|
+
});
|
|
162
|
+
}
|
|
163
|
+
if (retMap && retMap.tempDir && retMap.tempDir.startsWith(tmpdir())) {
|
|
164
|
+
fs.rmSync(retMap.tempDir, { recursive: true, force: true });
|
|
165
|
+
}
|
|
166
|
+
return sliceData;
|
|
167
|
+
};
|
|
168
|
+
|
|
169
|
+
export const createSlice = (purlOrLanguage, filePath, sliceType = "usages") => {
|
|
170
|
+
if (!filePath) {
|
|
171
|
+
return;
|
|
172
|
+
}
|
|
173
|
+
console.log(`Create ${sliceType} slice for ${purlOrLanguage} ${filePath}`);
|
|
174
|
+
const language = purlOrLanguage.startsWith("pkg:")
|
|
175
|
+
? purlToLanguage(purlOrLanguage, filePath)
|
|
176
|
+
: purlOrLanguage;
|
|
177
|
+
if (!language) {
|
|
178
|
+
return undefined;
|
|
179
|
+
}
|
|
180
|
+
const tempDir = fs.mkdtempSync(path.join(tmpdir(), `atom-${sliceType}-`));
|
|
181
|
+
const atomFile = path.join(tempDir, "app.atom");
|
|
182
|
+
const slicesFile = path.join(tempDir, `${sliceType}.slices.json`);
|
|
183
|
+
const args = [
|
|
184
|
+
sliceType,
|
|
185
|
+
"-l",
|
|
186
|
+
language,
|
|
187
|
+
"-o",
|
|
188
|
+
path.resolve(atomFile),
|
|
189
|
+
"--slice-outfile",
|
|
190
|
+
path.resolve(slicesFile)
|
|
191
|
+
];
|
|
192
|
+
// For projects with several layers, slice depth needs to be increased from the default 7 to 15 or 20
|
|
193
|
+
// This would increase the time but would yield more deeper paths
|
|
194
|
+
if (sliceType == "data-flow" && process.env.ATOM_SLICE_DEPTH) {
|
|
195
|
+
args.push("--slice-depth");
|
|
196
|
+
args.push(process.env.ATOM_SLICE_DEPTH);
|
|
197
|
+
}
|
|
198
|
+
args.push(path.resolve(filePath));
|
|
199
|
+
executeAtom(filePath, args);
|
|
200
|
+
return {
|
|
201
|
+
tempDir,
|
|
202
|
+
slicesFile,
|
|
203
|
+
atomFile
|
|
204
|
+
};
|
|
205
|
+
};
|
|
206
|
+
|
|
207
|
+
export const purlToLanguage = (purl, filePath) => {
|
|
208
|
+
let language = undefined;
|
|
209
|
+
const purlObj = PackageURL.fromString(purl);
|
|
210
|
+
switch (purlObj.type) {
|
|
211
|
+
case "maven":
|
|
212
|
+
language = filePath && filePath.endsWith(".jar") ? "jar" : "java";
|
|
213
|
+
break;
|
|
214
|
+
case "npm":
|
|
215
|
+
language = "javascript";
|
|
216
|
+
break;
|
|
217
|
+
case "pypi":
|
|
218
|
+
language = "python";
|
|
219
|
+
break;
|
|
220
|
+
}
|
|
221
|
+
return language;
|
|
222
|
+
};
|
|
223
|
+
|
|
224
|
+
export const initFromSbom = (components) => {
|
|
225
|
+
const purlLocationMap = {};
|
|
226
|
+
const purlImportsMap = {};
|
|
227
|
+
for (const comp of components) {
|
|
228
|
+
if (!comp || !comp.evidence || !comp.evidence.occurrences) {
|
|
229
|
+
continue;
|
|
230
|
+
}
|
|
231
|
+
purlLocationMap[comp.purl] = new Set(
|
|
232
|
+
comp.evidence.occurrences.map((v) => v.location)
|
|
233
|
+
);
|
|
234
|
+
(comp.properties || [])
|
|
235
|
+
.filter((v) => v.name === "ImportedModules")
|
|
236
|
+
.forEach((v) => {
|
|
237
|
+
purlImportsMap[comp.purl] = (v.value || "").split(",");
|
|
238
|
+
});
|
|
239
|
+
}
|
|
240
|
+
return {
|
|
241
|
+
purlLocationMap,
|
|
242
|
+
purlImportsMap
|
|
243
|
+
};
|
|
244
|
+
};
|
|
245
|
+
|
|
246
|
+
/**
|
|
247
|
+
* Function to analyze the project
|
|
248
|
+
*
|
|
249
|
+
* @param {object} dbObjMap DB and model instances
|
|
250
|
+
* @param {object} Command line options
|
|
251
|
+
*/
|
|
252
|
+
export const analyzeProject = async (dbObjMap, options) => {
|
|
253
|
+
const dirPath = options._[0] || ".";
|
|
254
|
+
const language = options.language;
|
|
255
|
+
let usageSlice = undefined;
|
|
256
|
+
let dataFlowSlice = undefined;
|
|
257
|
+
let usagesSlicesFile = undefined;
|
|
258
|
+
let dataFlowSlicesFile = undefined;
|
|
259
|
+
let dataFlowFrames = {};
|
|
260
|
+
let servicesMap = {};
|
|
261
|
+
let retMap = {};
|
|
262
|
+
let userDefinedTypesMap = {};
|
|
263
|
+
const bomFile = options.input;
|
|
264
|
+
const bomJson = JSON.parse(fs.readFileSync(bomFile, "utf8"));
|
|
265
|
+
const components = bomJson.components || [];
|
|
266
|
+
// Load any existing purl-location information from the sbom.
|
|
267
|
+
// For eg: cdxgen populates this information for javascript projects
|
|
268
|
+
let { purlLocationMap, purlImportsMap } = initFromSbom(components);
|
|
269
|
+
// Reuse existing usages slices
|
|
270
|
+
if (options.usagesSlicesFile && fs.existsSync(options.usagesSlicesFile)) {
|
|
271
|
+
usageSlice = JSON.parse(fs.readFileSync(options.usagesSlicesFile, "utf-8"));
|
|
272
|
+
usagesSlicesFile = options.usagesSlicesFile;
|
|
273
|
+
} else {
|
|
274
|
+
// Generate our own slices
|
|
275
|
+
retMap = createSlice(language, dirPath, "usages");
|
|
276
|
+
if (retMap && retMap.slicesFile && fs.existsSync(retMap.slicesFile)) {
|
|
277
|
+
usageSlice = JSON.parse(fs.readFileSync(retMap.slicesFile, "utf-8"));
|
|
278
|
+
usagesSlicesFile = retMap.slicesFile;
|
|
279
|
+
console.log(
|
|
280
|
+
`To speed up this step, cache ${usagesSlicesFile} and invoke evinse with the --usages-slices-file argument.`
|
|
281
|
+
);
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
if (usageSlice && Object.keys(usageSlice).length) {
|
|
285
|
+
const retMap = await parseObjectSlices(
|
|
286
|
+
language,
|
|
287
|
+
usageSlice,
|
|
288
|
+
dbObjMap,
|
|
289
|
+
servicesMap,
|
|
290
|
+
purlLocationMap,
|
|
291
|
+
purlImportsMap
|
|
292
|
+
);
|
|
293
|
+
purlLocationMap = retMap.purlLocationMap;
|
|
294
|
+
servicesMap = retMap.servicesMap;
|
|
295
|
+
userDefinedTypesMap = retMap.userDefinedTypesMap;
|
|
296
|
+
}
|
|
297
|
+
if (options.withDataFlow) {
|
|
298
|
+
if (
|
|
299
|
+
options.dataFlowSlicesFile &&
|
|
300
|
+
fs.existsSync(options.dataFlowSlicesFile)
|
|
301
|
+
) {
|
|
302
|
+
dataFlowSlicesFile = options.dataFlowSlicesFile;
|
|
303
|
+
dataFlowSlice = JSON.parse(
|
|
304
|
+
fs.readFileSync(options.dataFlowSlicesFile, "utf-8")
|
|
305
|
+
);
|
|
306
|
+
} else {
|
|
307
|
+
retMap = createSlice(language, dirPath, "data-flow");
|
|
308
|
+
if (retMap && retMap.slicesFile && fs.existsSync(retMap.slicesFile)) {
|
|
309
|
+
dataFlowSlicesFile = retMap.slicesFile;
|
|
310
|
+
dataFlowSlice = JSON.parse(fs.readFileSync(retMap.slicesFile, "utf-8"));
|
|
311
|
+
console.log(
|
|
312
|
+
`To speed up this step, cache ${dataFlowSlicesFile} and invoke evinse with the --data-flow-slices-file argument.`
|
|
313
|
+
);
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
if (dataFlowSlice && Object.keys(dataFlowSlice).length) {
|
|
318
|
+
dataFlowFrames = await collectDataFlowFrames(
|
|
319
|
+
language,
|
|
320
|
+
userDefinedTypesMap,
|
|
321
|
+
dataFlowSlice,
|
|
322
|
+
dbObjMap,
|
|
323
|
+
purlLocationMap,
|
|
324
|
+
purlImportsMap
|
|
325
|
+
);
|
|
326
|
+
}
|
|
327
|
+
return {
|
|
328
|
+
atomFile: retMap.atomFile,
|
|
329
|
+
usagesSlicesFile,
|
|
330
|
+
dataFlowSlicesFile,
|
|
331
|
+
purlLocationMap,
|
|
332
|
+
servicesMap,
|
|
333
|
+
dataFlowFrames,
|
|
334
|
+
tempDir: retMap.tempDir,
|
|
335
|
+
userDefinedTypesMap
|
|
336
|
+
};
|
|
337
|
+
};
|
|
338
|
+
|
|
339
|
+
export const parseObjectSlices = async (
|
|
340
|
+
language,
|
|
341
|
+
usageSlice,
|
|
342
|
+
dbObjMap,
|
|
343
|
+
servicesMap = {},
|
|
344
|
+
purlLocationMap = {},
|
|
345
|
+
purlImportsMap = {}
|
|
346
|
+
) => {
|
|
347
|
+
if (!usageSlice || !Object.keys(usageSlice).length) {
|
|
348
|
+
return purlLocationMap;
|
|
349
|
+
}
|
|
350
|
+
const userDefinedTypesMap = {};
|
|
351
|
+
(usageSlice.userDefinedTypes || []).forEach((ut) => {
|
|
352
|
+
userDefinedTypesMap[ut.name] = true;
|
|
353
|
+
});
|
|
354
|
+
for (const slice of [
|
|
355
|
+
...(usageSlice.objectSlices || []),
|
|
356
|
+
...(usageSlice.userDefinedTypes || [])
|
|
357
|
+
]) {
|
|
358
|
+
// Skip the library code typically without filename
|
|
359
|
+
if (
|
|
360
|
+
!slice.fileName ||
|
|
361
|
+
!slice.fileName.trim().length ||
|
|
362
|
+
slice.fileName === "<empty>"
|
|
363
|
+
) {
|
|
364
|
+
continue;
|
|
365
|
+
}
|
|
366
|
+
const locationKey = `${slice.fileName}${
|
|
367
|
+
slice.lineNumber ? "#" + slice.lineNumber : ""
|
|
368
|
+
}`;
|
|
369
|
+
await parseSliceUsages(
|
|
370
|
+
language,
|
|
371
|
+
userDefinedTypesMap,
|
|
372
|
+
slice,
|
|
373
|
+
dbObjMap,
|
|
374
|
+
locationKey,
|
|
375
|
+
purlLocationMap,
|
|
376
|
+
purlImportsMap
|
|
377
|
+
);
|
|
378
|
+
detectServicesFromUsages(language, slice, servicesMap);
|
|
379
|
+
}
|
|
380
|
+
return {
|
|
381
|
+
purlLocationMap,
|
|
382
|
+
servicesMap,
|
|
383
|
+
userDefinedTypesMap
|
|
384
|
+
};
|
|
385
|
+
};
|
|
386
|
+
|
|
387
|
+
/**
|
|
388
|
+
* The implementation of this function is based on the logic proposed in the atom slices specification
|
|
389
|
+
* https://github.com/AppThreat/atom/blob/main/specification/docs/slices.md#use
|
|
390
|
+
*
|
|
391
|
+
* @param {string} language Application language
|
|
392
|
+
* @param {object} userDefinedTypesMap User Defined types in the application
|
|
393
|
+
* @param {array} usages Usages array for each objectSlice
|
|
394
|
+
* @param {object} dbObjMap DB Models
|
|
395
|
+
* @param {string} locationKey Filename with line number to be used in occurrences evidence
|
|
396
|
+
* @param {object} purlLocationMap Object to track locations where purls are used
|
|
397
|
+
* @param {object} purlImportsMap Object to track package urls and their import aliases
|
|
398
|
+
* @returns
|
|
399
|
+
*/
|
|
400
|
+
export const parseSliceUsages = async (
|
|
401
|
+
language,
|
|
402
|
+
userDefinedTypesMap,
|
|
403
|
+
slice,
|
|
404
|
+
dbObjMap,
|
|
405
|
+
locationKey,
|
|
406
|
+
purlLocationMap,
|
|
407
|
+
purlImportsMap
|
|
408
|
+
) => {
|
|
409
|
+
const usages = slice.usages;
|
|
410
|
+
if (!usages || !usages.length) {
|
|
411
|
+
return undefined;
|
|
412
|
+
}
|
|
413
|
+
const fileName = slice.fileName;
|
|
414
|
+
const purlsSet = new Set();
|
|
415
|
+
const typesToLookup = new Set();
|
|
416
|
+
const lKeyOverrides = {};
|
|
417
|
+
for (const ausage of usages) {
|
|
418
|
+
const ausageLine =
|
|
419
|
+
ausage?.targetObj?.lineNumber || ausage?.definedBy?.lineNumber;
|
|
420
|
+
// First capture the types in the targetObj and definedBy
|
|
421
|
+
for (const atype of [
|
|
422
|
+
[ausage?.targetObj?.isExternal, ausage?.targetObj?.typeFullName],
|
|
423
|
+
[ausage?.targetObj?.isExternal, ausage?.targetObj?.resolvedMethod],
|
|
424
|
+
[ausage?.definedBy?.isExternal, ausage?.definedBy?.typeFullName],
|
|
425
|
+
[ausage?.definedBy?.isExternal, ausage?.definedBy?.resolvedMethod],
|
|
426
|
+
...(ausage?.fields || []).map((f) => [f?.isExternal, f?.typeFullName])
|
|
427
|
+
]) {
|
|
428
|
+
if (
|
|
429
|
+
atype[0] !== false &&
|
|
430
|
+
!isFilterableType(language, userDefinedTypesMap, atype[1])
|
|
431
|
+
) {
|
|
432
|
+
if (!atype[1].includes("(")) {
|
|
433
|
+
typesToLookup.add(atype[1]);
|
|
434
|
+
// Javascript calls can be resolved to a precise line number only from the call nodes
|
|
435
|
+
if (language == "javascript" && ausageLine) {
|
|
436
|
+
if (atype[1].includes(":")) {
|
|
437
|
+
typesToLookup.add(atype[1].split("::")[0].replace(/:/g, "/"));
|
|
438
|
+
}
|
|
439
|
+
addToOverrides(lKeyOverrides, atype[1], fileName, ausageLine);
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
const maybeClassType = getClassTypeFromSignature(language, atype[1]);
|
|
443
|
+
typesToLookup.add(maybeClassType);
|
|
444
|
+
if (language == "javascript" && ausageLine) {
|
|
445
|
+
addToOverrides(lKeyOverrides, maybeClassType, fileName, ausageLine);
|
|
446
|
+
}
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
// Now capture full method signatures from invokedCalls, argToCalls including the paramtypes
|
|
450
|
+
for (const acall of []
|
|
451
|
+
.concat(ausage?.invokedCalls || [])
|
|
452
|
+
.concat(ausage?.argToCalls || [])
|
|
453
|
+
.concat(ausage?.procedures || [])) {
|
|
454
|
+
if (acall.isExternal == false) {
|
|
455
|
+
continue;
|
|
456
|
+
}
|
|
457
|
+
if (
|
|
458
|
+
!isFilterableType(language, userDefinedTypesMap, acall?.resolvedMethod)
|
|
459
|
+
) {
|
|
460
|
+
if (!acall?.resolvedMethod.includes("(")) {
|
|
461
|
+
typesToLookup.add(acall?.resolvedMethod);
|
|
462
|
+
// Javascript calls can be resolved to a precise line number only from the call nodes
|
|
463
|
+
if (language == "javascript" && acall.lineNumber) {
|
|
464
|
+
addToOverrides(
|
|
465
|
+
lKeyOverrides,
|
|
466
|
+
acall?.resolvedMethod,
|
|
467
|
+
fileName,
|
|
468
|
+
acall.lineNumber
|
|
469
|
+
);
|
|
470
|
+
}
|
|
471
|
+
}
|
|
472
|
+
const maybeClassType = getClassTypeFromSignature(
|
|
473
|
+
language,
|
|
474
|
+
acall?.resolvedMethod
|
|
475
|
+
);
|
|
476
|
+
typesToLookup.add(maybeClassType);
|
|
477
|
+
if (language == "javascript" && acall.lineNumber) {
|
|
478
|
+
addToOverrides(
|
|
479
|
+
lKeyOverrides,
|
|
480
|
+
maybeClassType,
|
|
481
|
+
fileName,
|
|
482
|
+
acall.lineNumber
|
|
483
|
+
);
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
for (const aparamType of acall?.paramTypes || []) {
|
|
487
|
+
if (!isFilterableType(language, userDefinedTypesMap, aparamType)) {
|
|
488
|
+
if (!aparamType.includes("(")) {
|
|
489
|
+
typesToLookup.add(aparamType);
|
|
490
|
+
if (language == "javascript" && acall.lineNumber) {
|
|
491
|
+
if (aparamType.includes(":")) {
|
|
492
|
+
typesToLookup.add(aparamType.split("::")[0].replace(/:/g, "/"));
|
|
493
|
+
}
|
|
494
|
+
addToOverrides(
|
|
495
|
+
lKeyOverrides,
|
|
496
|
+
aparamType,
|
|
497
|
+
fileName,
|
|
498
|
+
acall.lineNumber
|
|
499
|
+
);
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
const maybeClassType = getClassTypeFromSignature(
|
|
503
|
+
language,
|
|
504
|
+
aparamType
|
|
505
|
+
);
|
|
506
|
+
typesToLookup.add(maybeClassType);
|
|
507
|
+
if (language == "javascript" && acall.lineNumber) {
|
|
508
|
+
addToOverrides(
|
|
509
|
+
lKeyOverrides,
|
|
510
|
+
maybeClassType,
|
|
511
|
+
fileName,
|
|
512
|
+
acall.lineNumber
|
|
513
|
+
);
|
|
514
|
+
}
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
}
|
|
519
|
+
for (const atype of typesToLookup) {
|
|
520
|
+
if (isFilterableType(language, userDefinedTypesMap, atype)) {
|
|
521
|
+
continue;
|
|
522
|
+
}
|
|
523
|
+
if (purlImportsMap && Object.keys(purlImportsMap).length) {
|
|
524
|
+
for (const apurl of Object.keys(purlImportsMap)) {
|
|
525
|
+
const apurlImports = purlImportsMap[apurl];
|
|
526
|
+
if (apurlImports && apurlImports.includes(atype)) {
|
|
527
|
+
// For javasript, we set all the additional places where a call gets made
|
|
528
|
+
if (language == "javascript") {
|
|
529
|
+
if (!purlLocationMap[apurl]) {
|
|
530
|
+
purlLocationMap[apurl] = new Set();
|
|
531
|
+
}
|
|
532
|
+
if (lKeyOverrides[atype]) {
|
|
533
|
+
purlLocationMap[apurl].add(...lKeyOverrides[atype]);
|
|
534
|
+
}
|
|
535
|
+
} else {
|
|
536
|
+
// This would work well for java since each call node could be mapped to a method
|
|
537
|
+
purlsSet.add(apurl);
|
|
538
|
+
}
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
} else {
|
|
542
|
+
// Check the namespaces db
|
|
543
|
+
const nsHits =
|
|
544
|
+
typePurlsCache[atype] ||
|
|
545
|
+
(await dbObjMap.Namespaces.findAll({
|
|
546
|
+
attributes: ["purl"],
|
|
547
|
+
where: {
|
|
548
|
+
data: {
|
|
549
|
+
[Op.like]: `%${atype}%`
|
|
550
|
+
}
|
|
551
|
+
}
|
|
552
|
+
}));
|
|
553
|
+
if (nsHits && nsHits.length) {
|
|
554
|
+
for (const ns of nsHits) {
|
|
555
|
+
purlsSet.add(ns.purl);
|
|
556
|
+
}
|
|
557
|
+
typePurlsCache[atype] = nsHits;
|
|
558
|
+
}
|
|
559
|
+
}
|
|
560
|
+
}
|
|
561
|
+
// Update the purlLocationMap
|
|
562
|
+
for (const apurl of purlsSet) {
|
|
563
|
+
if (!purlLocationMap[apurl]) {
|
|
564
|
+
purlLocationMap[apurl] = new Set();
|
|
565
|
+
}
|
|
566
|
+
purlLocationMap[apurl].add(locationKey);
|
|
567
|
+
}
|
|
568
|
+
};
|
|
569
|
+
|
|
570
|
+
export const isFilterableType = (
|
|
571
|
+
language,
|
|
572
|
+
userDefinedTypesMap,
|
|
573
|
+
typeFullName
|
|
574
|
+
) => {
|
|
575
|
+
if (
|
|
576
|
+
!typeFullName ||
|
|
577
|
+
["ANY", "UNKNOWN", "VOID"].includes(typeFullName.toUpperCase())
|
|
578
|
+
) {
|
|
579
|
+
return true;
|
|
580
|
+
}
|
|
581
|
+
if (
|
|
582
|
+
typeFullName.startsWith("<operator") ||
|
|
583
|
+
typeFullName.startsWith("<unresolved") ||
|
|
584
|
+
typeFullName.startsWith("<unknownFullName")
|
|
585
|
+
) {
|
|
586
|
+
return true;
|
|
587
|
+
}
|
|
588
|
+
if (language && ["java", "jar"].includes(language)) {
|
|
589
|
+
if (
|
|
590
|
+
!typeFullName.includes(".") ||
|
|
591
|
+
typeFullName.startsWith("@") ||
|
|
592
|
+
typeFullName.startsWith("java.") ||
|
|
593
|
+
typeFullName.startsWith("sun.") ||
|
|
594
|
+
typeFullName.startsWith("jdk.") ||
|
|
595
|
+
typeFullName.startsWith("org.w3c.") ||
|
|
596
|
+
typeFullName.startsWith("org.xml.") ||
|
|
597
|
+
typeFullName.startsWith("javax.xml.")
|
|
598
|
+
) {
|
|
599
|
+
return true;
|
|
600
|
+
}
|
|
601
|
+
}
|
|
602
|
+
if (language === "javascript") {
|
|
603
|
+
if (
|
|
604
|
+
typeFullName.includes(".js") ||
|
|
605
|
+
typeFullName.includes("=>") ||
|
|
606
|
+
typeFullName.startsWith("__") ||
|
|
607
|
+
typeFullName.startsWith("{ ") ||
|
|
608
|
+
typeFullName.startsWith("JSON") ||
|
|
609
|
+
typeFullName.startsWith("void:") ||
|
|
610
|
+
typeFullName.startsWith("LAMBDA") ||
|
|
611
|
+
typeFullName.startsWith("../") ||
|
|
612
|
+
typeFullName.startsWith("node:")
|
|
613
|
+
) {
|
|
614
|
+
return true;
|
|
615
|
+
}
|
|
616
|
+
}
|
|
617
|
+
if (userDefinedTypesMap[typeFullName]) {
|
|
618
|
+
return true;
|
|
619
|
+
}
|
|
620
|
+
return false;
|
|
621
|
+
};
|
|
622
|
+
|
|
623
|
+
/**
|
|
624
|
+
* Method to detect services from annotation objects in the usage slice
|
|
625
|
+
*
|
|
626
|
+
* @param {string} language Application language
|
|
627
|
+
* @param {array} usages Usages array for each objectSlice
|
|
628
|
+
* @param {object} servicesMap Existing service map
|
|
629
|
+
*/
|
|
630
|
+
export const detectServicesFromUsages = (language, slice, servicesMap = {}) => {
|
|
631
|
+
const usages = slice.usages;
|
|
632
|
+
if (!usages) {
|
|
633
|
+
return [];
|
|
634
|
+
}
|
|
635
|
+
for (const usage of usages) {
|
|
636
|
+
const targetObj = usage?.targetObj;
|
|
637
|
+
const definedBy = usage?.definedBy;
|
|
638
|
+
let endpoints = [];
|
|
639
|
+
let authenticated = undefined;
|
|
640
|
+
if (targetObj && targetObj?.resolvedMethod) {
|
|
641
|
+
endpoints = extractEndpoints(language, targetObj?.resolvedMethod);
|
|
642
|
+
if (targetObj?.resolvedMethod.toLowerCase().includes("auth")) {
|
|
643
|
+
authenticated = true;
|
|
644
|
+
}
|
|
645
|
+
} else if (definedBy && definedBy?.resolvedMethod) {
|
|
646
|
+
endpoints = extractEndpoints(language, definedBy?.resolvedMethod);
|
|
647
|
+
if (definedBy?.resolvedMethod.toLowerCase().includes("auth")) {
|
|
648
|
+
authenticated = true;
|
|
649
|
+
}
|
|
650
|
+
}
|
|
651
|
+
if (usage.invokedCalls) {
|
|
652
|
+
for (const acall of usage.invokedCalls) {
|
|
653
|
+
if (acall.resolvedMethod) {
|
|
654
|
+
const tmpEndpoints = extractEndpoints(language, acall.resolvedMethod);
|
|
655
|
+
if (acall.resolvedMethod.toLowerCase().includes("auth")) {
|
|
656
|
+
authenticated = true;
|
|
657
|
+
}
|
|
658
|
+
if (tmpEndpoints && tmpEndpoints.length) {
|
|
659
|
+
endpoints = (endpoints || []).concat(tmpEndpoints);
|
|
660
|
+
}
|
|
661
|
+
}
|
|
662
|
+
}
|
|
663
|
+
}
|
|
664
|
+
if (endpoints && endpoints.length) {
|
|
665
|
+
const serviceName = constructServiceName(language, slice);
|
|
666
|
+
if (!servicesMap[serviceName]) {
|
|
667
|
+
servicesMap[serviceName] = {
|
|
668
|
+
endpoints: new Set(),
|
|
669
|
+
authenticated,
|
|
670
|
+
xTrustBoundary: authenticated === true ? true : undefined
|
|
671
|
+
};
|
|
672
|
+
}
|
|
673
|
+
for (const endpoint of endpoints) {
|
|
674
|
+
servicesMap[serviceName].endpoints.add(endpoint);
|
|
675
|
+
}
|
|
676
|
+
}
|
|
677
|
+
}
|
|
678
|
+
};
|
|
679
|
+
|
|
680
|
+
export const constructServiceName = (language, slice) => {
|
|
681
|
+
let serviceName = "service";
|
|
682
|
+
if (slice?.fullName) {
|
|
683
|
+
serviceName = slice.fullName.split(":")[0].replace(/\./g, "-");
|
|
684
|
+
} else if (slice?.fileName) {
|
|
685
|
+
serviceName = path.basename(slice.fileName).split(".")[0];
|
|
686
|
+
}
|
|
687
|
+
if (!serviceName.endsWith("service")) {
|
|
688
|
+
serviceName = serviceName + "-service";
|
|
689
|
+
}
|
|
690
|
+
return serviceName;
|
|
691
|
+
};
|
|
692
|
+
|
|
693
|
+
export const extractEndpoints = (language, code) => {
|
|
694
|
+
if (!code) {
|
|
695
|
+
return undefined;
|
|
696
|
+
}
|
|
697
|
+
let endpoints = undefined;
|
|
698
|
+
switch (language) {
|
|
699
|
+
case "java":
|
|
700
|
+
case "jar":
|
|
701
|
+
if (
|
|
702
|
+
code.startsWith("@") &&
|
|
703
|
+
code.includes("Mapping") &&
|
|
704
|
+
code.includes("(")
|
|
705
|
+
) {
|
|
706
|
+
const matches = code.match(/['"](.*?)['"]/gi) || [];
|
|
707
|
+
endpoints = matches
|
|
708
|
+
.map((v) => v.replace(/["']/g, ""))
|
|
709
|
+
.filter(
|
|
710
|
+
(v) =>
|
|
711
|
+
v.length &&
|
|
712
|
+
!v.startsWith(".") &&
|
|
713
|
+
v.includes("/") &&
|
|
714
|
+
!v.startsWith("@")
|
|
715
|
+
);
|
|
716
|
+
}
|
|
717
|
+
break;
|
|
718
|
+
case "javascript":
|
|
719
|
+
if (code.includes("app.") || code.includes("route")) {
|
|
720
|
+
const matches = code.match(/['"](.*?)['"]/gi) || [];
|
|
721
|
+
endpoints = matches
|
|
722
|
+
.map((v) => v.replace(/["']/g, ""))
|
|
723
|
+
.filter(
|
|
724
|
+
(v) =>
|
|
725
|
+
v.length &&
|
|
726
|
+
!v.startsWith(".") &&
|
|
727
|
+
v.includes("/") &&
|
|
728
|
+
!v.startsWith("@")
|
|
729
|
+
);
|
|
730
|
+
}
|
|
731
|
+
break;
|
|
732
|
+
default:
|
|
733
|
+
break;
|
|
734
|
+
}
|
|
735
|
+
return endpoints;
|
|
736
|
+
};
|
|
737
|
+
|
|
738
|
+
/**
|
|
739
|
+
* Function to determine if slicing is required for the given language's dependencies.
|
|
740
|
+
* For performance reasons, we make java operate only with namespaces
|
|
741
|
+
*
|
|
742
|
+
* @param {string} purl
|
|
743
|
+
* @returns
|
|
744
|
+
*/
|
|
745
|
+
export const isSlicingRequired = (purl) => {
|
|
746
|
+
const language = purlToLanguage(purl);
|
|
747
|
+
return ["python"].includes(language);
|
|
748
|
+
};
|
|
749
|
+
|
|
750
|
+
/**
|
|
751
|
+
* Method to create the SBoM with evidence file called evinse file.
|
|
752
|
+
*
|
|
753
|
+
* @param {object} sliceArtefacts Various artefacts from the slice operation
|
|
754
|
+
* @param {object} options Command line options
|
|
755
|
+
* @returns
|
|
756
|
+
*/
|
|
757
|
+
export const createEvinseFile = (sliceArtefacts, options) => {
|
|
758
|
+
const {
|
|
759
|
+
tempDir,
|
|
760
|
+
usagesSlicesFile,
|
|
761
|
+
dataFlowSlicesFile,
|
|
762
|
+
purlLocationMap,
|
|
763
|
+
servicesMap,
|
|
764
|
+
dataFlowFrames
|
|
765
|
+
} = sliceArtefacts;
|
|
766
|
+
const bomFile = options.input;
|
|
767
|
+
const evinseOutFile = options.output;
|
|
768
|
+
const bomJson = JSON.parse(fs.readFileSync(bomFile, "utf8"));
|
|
769
|
+
const components = bomJson.components || [];
|
|
770
|
+
let occEvidencePresent = false;
|
|
771
|
+
let csEvidencePresent = false;
|
|
772
|
+
for (const comp of components) {
|
|
773
|
+
if (!comp.purl) {
|
|
774
|
+
continue;
|
|
775
|
+
}
|
|
776
|
+
delete comp.signature;
|
|
777
|
+
const locationOccurrences = Array.from(
|
|
778
|
+
purlLocationMap[comp.purl] || []
|
|
779
|
+
).sort();
|
|
780
|
+
if (locationOccurrences.length) {
|
|
781
|
+
if (!comp.evidence) {
|
|
782
|
+
comp.evidence = {};
|
|
783
|
+
}
|
|
784
|
+
// This step would replace any existing occurrences
|
|
785
|
+
// This is fine as long as the input sbom was also generated by cdxgen
|
|
786
|
+
comp.evidence.occurrences = locationOccurrences
|
|
787
|
+
.filter((l) => !!l)
|
|
788
|
+
.map((l) => ({
|
|
789
|
+
location: l
|
|
790
|
+
}));
|
|
791
|
+
occEvidencePresent = true;
|
|
792
|
+
}
|
|
793
|
+
const dfFrames = dataFlowFrames[comp.purl];
|
|
794
|
+
if (dfFrames && dfFrames.length) {
|
|
795
|
+
if (!comp.evidence) {
|
|
796
|
+
comp.evidence = {};
|
|
797
|
+
}
|
|
798
|
+
if (!comp.evidence.callstack) {
|
|
799
|
+
comp.evidence.callstack = {};
|
|
800
|
+
}
|
|
801
|
+
if (!comp.evidence.callstack.frames) {
|
|
802
|
+
comp.evidence.callstack.frames = framePicker(dfFrames);
|
|
803
|
+
csEvidencePresent = true;
|
|
804
|
+
}
|
|
805
|
+
}
|
|
806
|
+
} // for
|
|
807
|
+
if (servicesMap && Object.keys(servicesMap).length) {
|
|
808
|
+
const services = [];
|
|
809
|
+
for (const serviceName of Object.keys(servicesMap)) {
|
|
810
|
+
services.push({
|
|
811
|
+
name: serviceName,
|
|
812
|
+
endpoints: Array.from(servicesMap[serviceName].endpoints),
|
|
813
|
+
authenticated: servicesMap[serviceName].authenticated,
|
|
814
|
+
"x-trust-boundary": servicesMap[serviceName].xTrustBoundary
|
|
815
|
+
});
|
|
816
|
+
}
|
|
817
|
+
// Add to existing services
|
|
818
|
+
bomJson.services = (bomJson.services || []).concat(services);
|
|
819
|
+
}
|
|
820
|
+
if (options.annotate) {
|
|
821
|
+
if (!bomJson.annotations) {
|
|
822
|
+
bomJson.annotations = [];
|
|
823
|
+
}
|
|
824
|
+
if (usagesSlicesFile && fs.existsSync(usagesSlicesFile)) {
|
|
825
|
+
bomJson.annotations.push({
|
|
826
|
+
subjects: [bomJson.serialNumber],
|
|
827
|
+
annotator: { component: bomJson.metadata.tools.components[0] },
|
|
828
|
+
timestamp: new Date().toISOString(),
|
|
829
|
+
text: fs.readFileSync(usagesSlicesFile, "utf8")
|
|
830
|
+
});
|
|
831
|
+
}
|
|
832
|
+
if (dataFlowSlicesFile && fs.existsSync(dataFlowSlicesFile)) {
|
|
833
|
+
bomJson.annotations.push({
|
|
834
|
+
subjects: [bomJson.serialNumber],
|
|
835
|
+
annotator: { component: bomJson.metadata.tools.components[0] },
|
|
836
|
+
timestamp: new Date().toISOString(),
|
|
837
|
+
text: fs.readFileSync(dataFlowSlicesFile, "utf8")
|
|
838
|
+
});
|
|
839
|
+
}
|
|
840
|
+
}
|
|
841
|
+
// Increment the version
|
|
842
|
+
bomJson.version = (bomJson.version || 1) + 1;
|
|
843
|
+
// Set the current timestamp to indicate this is newer
|
|
844
|
+
bomJson.metadata.timestamp = new Date().toISOString();
|
|
845
|
+
delete bomJson.signature;
|
|
846
|
+
fs.writeFileSync(evinseOutFile, JSON.stringify(bomJson, null, 2));
|
|
847
|
+
if (occEvidencePresent || csEvidencePresent) {
|
|
848
|
+
console.log(evinseOutFile, "created successfully.");
|
|
849
|
+
} else {
|
|
850
|
+
console.log(
|
|
851
|
+
"Unable to identify component evidence for the input SBoM. Only java, javascript and python projects are supported by evinse."
|
|
852
|
+
);
|
|
853
|
+
}
|
|
854
|
+
if (tempDir && tempDir.startsWith(tmpdir())) {
|
|
855
|
+
fs.rmSync(tempDir, { recursive: true, force: true });
|
|
856
|
+
}
|
|
857
|
+
return bomJson;
|
|
858
|
+
};
|
|
859
|
+
|
|
860
|
+
/**
|
|
861
|
+
* Method to convert dataflow slice into usable callstack frames
|
|
862
|
+
* Implemented based on the logic proposed here - https://github.com/AppThreat/atom/blob/main/specification/docs/slices.md#data-flow-slice
|
|
863
|
+
*
|
|
864
|
+
* @param {string} language Application language
|
|
865
|
+
* @param {object} userDefinedTypesMap User Defined types in the application
|
|
866
|
+
* @param {object} dataFlowSlice Data flow slice object from atom
|
|
867
|
+
* @param {object} dbObjMap DB models
|
|
868
|
+
* @param {object} purlLocationMap Object to track locations where purls are used
|
|
869
|
+
* @param {object} purlImportsMap Object to track package urls and their import aliases
|
|
870
|
+
*/
|
|
871
|
+
export const collectDataFlowFrames = async (
|
|
872
|
+
language,
|
|
873
|
+
userDefinedTypesMap,
|
|
874
|
+
dataFlowSlice,
|
|
875
|
+
dbObjMap,
|
|
876
|
+
purlLocationMap,
|
|
877
|
+
purlImportsMap
|
|
878
|
+
) => {
|
|
879
|
+
const nodes = dataFlowSlice?.graph?.nodes || [];
|
|
880
|
+
// Cache the nodes based on the id to improve lookup
|
|
881
|
+
const nodeCache = {};
|
|
882
|
+
// purl key and an array of frames array
|
|
883
|
+
// CycloneDX 1.5 currently accepts only 1 frame as evidence
|
|
884
|
+
// so this method is more future-proof
|
|
885
|
+
const dfFrames = {};
|
|
886
|
+
for (const n of nodes) {
|
|
887
|
+
nodeCache[n.id] = n;
|
|
888
|
+
}
|
|
889
|
+
const paths = dataFlowSlice?.paths || [];
|
|
890
|
+
for (const apath of paths) {
|
|
891
|
+
let aframe = [];
|
|
892
|
+
let referredPurls = new Set();
|
|
893
|
+
for (const nid of apath) {
|
|
894
|
+
const theNode = nodeCache[nid];
|
|
895
|
+
if (!theNode) {
|
|
896
|
+
continue;
|
|
897
|
+
}
|
|
898
|
+
let typeFullName = theNode.typeFullName;
|
|
899
|
+
if (language === "javascript" && typeFullName == "ANY") {
|
|
900
|
+
if (
|
|
901
|
+
theNode.code &&
|
|
902
|
+
(theNode.code.startsWith("new ") ||
|
|
903
|
+
["METHOD_PARAMETER_IN", "IDENTIFIER"].includes(theNode.label))
|
|
904
|
+
) {
|
|
905
|
+
typeFullName = theNode.code.split("(")[0].replace("new ", "");
|
|
906
|
+
} else {
|
|
907
|
+
typeFullName = theNode.fullName || theNode.name;
|
|
908
|
+
}
|
|
909
|
+
}
|
|
910
|
+
const maybeClassType = getClassTypeFromSignature(language, typeFullName);
|
|
911
|
+
if (!isFilterableType(language, userDefinedTypesMap, typeFullName)) {
|
|
912
|
+
if (purlImportsMap && Object.keys(purlImportsMap).length) {
|
|
913
|
+
for (const apurl of Object.keys(purlImportsMap)) {
|
|
914
|
+
const apurlImports = purlImportsMap[apurl];
|
|
915
|
+
if (
|
|
916
|
+
apurlImports &&
|
|
917
|
+
(apurlImports.includes(typeFullName) ||
|
|
918
|
+
apurlImports.includes(maybeClassType))
|
|
919
|
+
) {
|
|
920
|
+
referredPurls.add(apurl);
|
|
921
|
+
}
|
|
922
|
+
}
|
|
923
|
+
} else {
|
|
924
|
+
// Check the namespaces db
|
|
925
|
+
const nsHits =
|
|
926
|
+
typePurlsCache[typeFullName] ||
|
|
927
|
+
(await dbObjMap.Namespaces.findAll({
|
|
928
|
+
attributes: ["purl"],
|
|
929
|
+
where: {
|
|
930
|
+
data: {
|
|
931
|
+
[Op.like]: `%${typeFullName}%`
|
|
932
|
+
}
|
|
933
|
+
}
|
|
934
|
+
}));
|
|
935
|
+
if (nsHits && nsHits.length) {
|
|
936
|
+
for (const ns of nsHits) {
|
|
937
|
+
referredPurls.add(ns.purl);
|
|
938
|
+
}
|
|
939
|
+
typePurlsCache[typeFullName] = nsHits;
|
|
940
|
+
} else {
|
|
941
|
+
console.log("Unable to identify purl for", typeFullName);
|
|
942
|
+
}
|
|
943
|
+
}
|
|
944
|
+
}
|
|
945
|
+
let parentPackageName = theNode.parentPackageName || "";
|
|
946
|
+
if (
|
|
947
|
+
parentPackageName == "<global>" &&
|
|
948
|
+
theNode.parentClassName &&
|
|
949
|
+
theNode.parentClassName.includes("::")
|
|
950
|
+
) {
|
|
951
|
+
parentPackageName = theNode.parentClassName.split("::")[0];
|
|
952
|
+
if (parentPackageName.includes(".js")) {
|
|
953
|
+
const tmpA = parentPackageName.split("/");
|
|
954
|
+
if (tmpA.length > 1) {
|
|
955
|
+
tmpA.pop();
|
|
956
|
+
}
|
|
957
|
+
parentPackageName = tmpA.join("/");
|
|
958
|
+
}
|
|
959
|
+
}
|
|
960
|
+
aframe.push({
|
|
961
|
+
package: parentPackageName,
|
|
962
|
+
module: theNode.parentClassName || "",
|
|
963
|
+
function: theNode.parentMethodName || "",
|
|
964
|
+
line: theNode.lineNumber || undefined,
|
|
965
|
+
column: theNode.columnNumber || undefined,
|
|
966
|
+
fullFilename: theNode.parentFileName || ""
|
|
967
|
+
});
|
|
968
|
+
}
|
|
969
|
+
referredPurls = Array.from(referredPurls);
|
|
970
|
+
if (referredPurls.length) {
|
|
971
|
+
for (const apurl of referredPurls) {
|
|
972
|
+
if (!dfFrames[apurl]) {
|
|
973
|
+
dfFrames[apurl] = [];
|
|
974
|
+
}
|
|
975
|
+
// Store this frame as an evidence for this purl
|
|
976
|
+
dfFrames[apurl].push(aframe);
|
|
977
|
+
}
|
|
978
|
+
}
|
|
979
|
+
}
|
|
980
|
+
return dfFrames;
|
|
981
|
+
};
|
|
982
|
+
|
|
983
|
+
/**
|
|
984
|
+
* Method to pick a callstack frame as an evidence. This method is required since CycloneDX 1.5 accepts only a single frame as evidence.
|
|
985
|
+
*
|
|
986
|
+
* @param {array} dfFrames Data flow frames
|
|
987
|
+
* @returns
|
|
988
|
+
*/
|
|
989
|
+
export const framePicker = (dfFrames) => {
|
|
990
|
+
if (!dfFrames || !dfFrames.length) {
|
|
991
|
+
return undefined;
|
|
992
|
+
}
|
|
993
|
+
let aframe = dfFrames[0];
|
|
994
|
+
if (dfFrames.length > 1) {
|
|
995
|
+
for (let i = 1; i < dfFrames.length - 1; i++) {
|
|
996
|
+
if (dfFrames[i].length > 2) {
|
|
997
|
+
aframe = dfFrames[i];
|
|
998
|
+
}
|
|
999
|
+
}
|
|
1000
|
+
}
|
|
1001
|
+
return aframe;
|
|
1002
|
+
};
|
|
1003
|
+
|
|
1004
|
+
export const getClassTypeFromSignature = (language, typeFullName) => {
|
|
1005
|
+
if (["java", "jar"].includes(language) && typeFullName.includes(":")) {
|
|
1006
|
+
typeFullName = typeFullName.split(":")[0];
|
|
1007
|
+
const tmpA = typeFullName.split(".");
|
|
1008
|
+
tmpA.pop();
|
|
1009
|
+
typeFullName = tmpA.join(".");
|
|
1010
|
+
} else if (language === "javascript") {
|
|
1011
|
+
typeFullName = typeFullName.replace("new: ", "").replace("await ", "");
|
|
1012
|
+
if (typeFullName.includes(":")) {
|
|
1013
|
+
const tmpA = typeFullName.split("::")[0].replace(/:/g, "/").split("/");
|
|
1014
|
+
if (tmpA.length > 1) {
|
|
1015
|
+
tmpA.pop();
|
|
1016
|
+
}
|
|
1017
|
+
typeFullName = tmpA.join("/");
|
|
1018
|
+
}
|
|
1019
|
+
}
|
|
1020
|
+
if (
|
|
1021
|
+
typeFullName.startsWith("<unresolved") ||
|
|
1022
|
+
typeFullName.startsWith("<operator") ||
|
|
1023
|
+
typeFullName.startsWith("<unknownFullName")
|
|
1024
|
+
) {
|
|
1025
|
+
return undefined;
|
|
1026
|
+
}
|
|
1027
|
+
if (typeFullName.includes("$")) {
|
|
1028
|
+
typeFullName = typeFullName.split("$")[0];
|
|
1029
|
+
}
|
|
1030
|
+
return typeFullName;
|
|
1031
|
+
};
|
|
1032
|
+
|
|
1033
|
+
const addToOverrides = (lKeyOverrides, atype, fileName, ausageLineNumber) => {
|
|
1034
|
+
if (!lKeyOverrides[atype]) {
|
|
1035
|
+
lKeyOverrides[atype] = new Set();
|
|
1036
|
+
}
|
|
1037
|
+
lKeyOverrides[atype].add(`${fileName}#${ausageLineNumber}`);
|
|
1038
|
+
};
|