@nodesecure/scanner 3.1.0 → 3.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/depWalker.js CHANGED
@@ -1,304 +1,310 @@
1
- // Import Node.js Dependencies
2
- import path from "path";
3
- import { readFileSync, promises as fs } from "fs";
4
- import timers from "timers/promises";
5
- import os from "os";
6
-
7
- // Import Third-party Dependencies
8
- import combineAsyncIterators from "combine-async-iterators";
9
- import iter from "itertools";
10
- import pacote from "pacote";
11
- import Arborist from "@npmcli/arborist";
12
- import Lock from "@slimio/lock";
13
- import * as vuln from "@nodesecure/vuln";
14
- import { getLocalRegistryURL } from "@nodesecure/npm-registry-sdk";
15
- import { ScannerLoggerEvents } from "./constants.js";
16
-
17
- // Import Internal Dependencies
18
- import {
19
- mergeDependencies, getCleanDependencyName, getDependenciesWarnings, addMissingVersionFlags, isGitDependency,
20
- NPM_TOKEN
21
- } from "./utils/index.js";
22
- import { scanDirOrArchive } from "./tarball.js";
23
- import { packageMetadata } from "./npmRegistry.js";
24
- import Dependency from "./class/dependency.class.js";
25
- import Logger from "./class/logger.class.js";
26
-
27
- const { version: packageVersion } = JSON.parse(
28
- readFileSync(
29
- new URL(path.join("..", "package.json"), import.meta.url)
30
- )
31
- );
32
-
33
- export async function* searchDeepDependencies(packageName, gitURL, options) {
34
- const { exclude, currDepth = 0, parent, maxDepth } = options;
35
-
36
- const { name, version, deprecated, ...pkg } = await pacote.manifest(gitURL ?? packageName, {
37
- ...NPM_TOKEN,
38
- registry: getLocalRegistryURL(),
39
- cache: `${os.homedir()}/.npm`
40
- });
41
- const { dependencies, customResolvers } = mergeDependencies(pkg);
42
-
43
- const current = new Dependency(name, version, parent);
44
- gitURL !== null && current.isGit(gitURL);
45
- current.addFlag("isDeprecated", deprecated === true);
46
- current.addFlag("hasCustomResolver", customResolvers.size > 0);
47
- current.addFlag("hasDependencies", dependencies.size > 0);
48
-
49
- if (currDepth !== maxDepth) {
50
- const config = {
51
- exclude, currDepth: currDepth + 1, parent: current, maxDepth
52
- };
53
-
54
- const gitDependencies = iter.filter(customResolvers.entries(), ([, valueStr]) => isGitDependency(valueStr));
55
- for (const [depName, valueStr] of gitDependencies) {
56
- yield* searchDeepDependencies(depName, valueStr, config);
57
- }
58
-
59
- const depsNames = await Promise.all(iter.map(dependencies.entries(), getCleanDependencyName));
60
- for (const [fullName, cleanName, isLatest] of depsNames) {
61
- if (!isLatest) {
62
- current.addFlag("hasOutdatedDependency");
63
- }
64
-
65
- if (exclude.has(cleanName)) {
66
- exclude.get(cleanName).add(current.fullName);
67
- }
68
- else {
69
- exclude.set(cleanName, new Set([current.fullName]));
70
- yield* searchDeepDependencies(fullName, null, config);
71
- }
72
- }
73
- }
74
-
75
- yield current;
76
- }
77
-
78
- export async function* deepReadEdges(currentPackageName, { to, parent, exclude, fullLockMode }) {
79
- const { version, integrity = to.integrity } = to.package;
80
-
81
- const updatedVersion = version === "*" || typeof version === "undefined" ? "latest" : version;
82
- const current = new Dependency(currentPackageName, updatedVersion, parent);
83
-
84
- if (fullLockMode) {
85
- const { deprecated, _integrity, ...pkg } = await pacote.manifest(`${currentPackageName}@${updatedVersion}`, {
86
- ...NPM_TOKEN,
87
- registry: getLocalRegistryURL(),
88
- cache: `${os.homedir()}/.npm`
89
- });
90
- const { customResolvers } = mergeDependencies(pkg);
91
-
92
- current.addFlag("hasValidIntegrity", _integrity === integrity);
93
- current.addFlag("isDeprecated");
94
- current.addFlag("hasCustomResolver", customResolvers.size > 0);
95
- }
96
- current.addFlag("hasDependencies", to.edgesOut.size > 0);
97
-
98
- for (const [packageName, { to: toNode }] of to.edgesOut) {
99
- if (toNode === null || toNode.dev) {
100
- continue;
101
- }
102
- const cleanName = `${packageName}@${toNode.package.version}`;
103
-
104
- if (exclude.has(cleanName)) {
105
- exclude.get(cleanName).add(current.fullName);
106
- }
107
- else {
108
- exclude.set(cleanName, new Set([current.fullName]));
109
- yield* deepReadEdges(packageName, { parent: current, to: toNode, exclude });
110
- }
111
- }
112
- yield current;
113
- }
114
-
115
- export async function* getRootDependencies(manifest, options) {
116
- const { maxDepth = 4, exclude, usePackageLock, fullLockMode } = options;
117
-
118
- const { dependencies, customResolvers } = mergeDependencies(manifest, void 0);
119
- const parent = new Dependency(manifest.name, manifest.version);
120
- parent.addFlag("hasCustomResolver", customResolvers.size > 0);
121
- parent.addFlag("hasDependencies", dependencies.size > 0);
122
-
123
- let iterators;
124
- if (usePackageLock) {
125
- const arb = new Arborist({
126
- ...NPM_TOKEN,
127
- registry: getLocalRegistryURL()
128
- });
129
- let tree;
130
- try {
131
- await fs.access(path.join(process.cwd(), "node_modules"));
132
- tree = await arb.loadActual();
133
- }
134
- catch {
135
- tree = await arb.loadVirtual();
136
- }
137
-
138
- iterators = iter.filter(tree.edgesOut.entries(), ([, { to }]) => to !== null && !to.dev)
139
- .map(([packageName, { to }]) => deepReadEdges(packageName, { to, parent, fullLockMode, exclude }));
140
- }
141
- else {
142
- const configRef = { exclude, maxDepth, parent };
143
- iterators = [
144
- ...iter.filter(customResolvers.entries(), ([, valueStr]) => isGitDependency(valueStr))
145
- .map(([depName, valueStr]) => searchDeepDependencies(depName, valueStr, configRef)),
146
- ...iter.map(dependencies.entries(), ([name, ver]) => searchDeepDependencies(`${name}@${ver}`, null, configRef))
147
- ];
148
- }
149
- for await (const dep of combineAsyncIterators({}, ...iterators)) {
150
- yield dep;
151
- }
152
-
153
- // Add root dependencies to the exclude Map (because the parent is not handled by searchDeepDependencies)
154
- // if we skip this the code will fail to re-link properly dependencies in the following steps
155
- const depsName = await Promise.all(iter.map(dependencies.entries(), getCleanDependencyName));
156
- for (const [, fullRange, isLatest] of depsName) {
157
- if (!isLatest) {
158
- parent.addFlag("hasOutdatedDependency");
159
- }
160
- if (exclude.has(fullRange)) {
161
- exclude.get(fullRange).add(parent.fullName);
162
- }
163
- }
164
-
165
- yield parent;
166
- }
167
-
168
- /**
169
- * @param {*} manifest
170
- * @param {*} options
171
- * @param {Logger} logger
172
- */
173
- export async function depWalker(manifest, options = {}, logger = new Logger()) {
174
- const {
175
- forceRootAnalysis = false,
176
- usePackageLock = false,
177
- fullLockMode = false,
178
- maxDepth,
179
- vulnerabilityStrategy = vuln.strategies.NONE
180
- } = options;
181
-
182
- // Create TMP directory
183
- const tmpLocation = await fs.mkdtemp(path.join(os.tmpdir(), "/"));
184
-
185
- const payload = {
186
- id: tmpLocation.slice(-6),
187
- rootDepencyName: manifest.name,
188
- version: packageVersion,
189
- vulnerabilityStrategy,
190
- warnings: []
191
- };
192
-
193
- // We are dealing with an exclude Map to avoid checking a package more than one time in searchDeepDependencies
194
- const exclude = new Map();
195
- const dependencies = new Map();
196
-
197
- {
198
- logger
199
- .start(ScannerLoggerEvents.analysis.tree)
200
- .start(ScannerLoggerEvents.analysis.tarball)
201
- .start(ScannerLoggerEvents.analysis.registry);
202
- const fetchedMetadataPackages = new Set();
203
- const promisesToWait = [];
204
-
205
- const tarballLocker = new Lock({ maxConcurrent: 5 });
206
- tarballLocker.on("freeOne", () => logger.tick(ScannerLoggerEvents.analysis.tarball));
207
-
208
- const rootDepsOptions = { maxDepth, exclude, usePackageLock, fullLockMode };
209
- for await (const currentDep of getRootDependencies(manifest, rootDepsOptions)) {
210
- const { name, version } = currentDep;
211
- const current = currentDep.exportAsPlainObject(name === manifest.name ? 0 : void 0);
212
- let proceedDependencyAnalysis = true;
213
-
214
- if (dependencies.has(name)) {
215
- // TODO: how to handle different metadata ?
216
- const dep = dependencies.get(name);
217
-
218
- const currVersion = Object.keys(current.versions)[0];
219
- if (currVersion in dep.versions) {
220
- // The dependency has already entered the analysis
221
- // This happens if the package is used by multiple packages in the tree
222
- proceedDependencyAnalysis = false;
223
- }
224
- else {
225
- dep.versions[currVersion] = current.versions[currVersion];
226
- }
227
- }
228
- else {
229
- dependencies.set(name, current);
230
- }
231
-
232
- if (proceedDependencyAnalysis) {
233
- logger.tick(ScannerLoggerEvents.analysis.tree);
234
-
235
- // There is no need to fetch 'N' times the npm metadata for the same package.
236
- if (fetchedMetadataPackages.has(name)) {
237
- logger.tick(ScannerLoggerEvents.analysis.registry);
238
- }
239
- else {
240
- fetchedMetadataPackages.add(name);
241
- promisesToWait.push(packageMetadata(name, version, {
242
- ref: current,
243
- logger
244
- }));
245
- }
246
-
247
- promisesToWait.push(scanDirOrArchive(name, version, {
248
- ref: current.versions[version],
249
- tmpLocation: forceRootAnalysis && name === manifest.name ? null : tmpLocation,
250
- locker: tarballLocker,
251
- logger
252
- }));
253
- }
254
- }
255
-
256
- logger.end(ScannerLoggerEvents.analysis.tree);
257
-
258
- // Wait for all extraction to be done!
259
- await Promise.allSettled(promisesToWait);
260
- await timers.setImmediate();
261
-
262
- logger.end(ScannerLoggerEvents.analysis.tarball).end(ScannerLoggerEvents.analysis.registry);
263
- }
264
-
265
- const { hydratePayloadDependencies, strategy } = await vuln.setStrategy(vulnerabilityStrategy);
266
- await hydratePayloadDependencies(dependencies, {
267
- useStandardFormat: true
268
- });
269
-
270
- payload.vulnerabilityStrategy = strategy;
271
-
272
- // We do this because it "seem" impossible to link all dependencies in the first walk.
273
- // Because we are dealing with package only one time it may happen sometimes.
274
- for (const [packageName, dependency] of dependencies) {
275
- for (const [verStr, verDescriptor] of Object.entries(dependency.versions)) {
276
- verDescriptor.flags.push(...addMissingVersionFlags(new Set(verDescriptor.flags), dependency));
277
-
278
- const fullName = `${packageName}@${verStr}`;
279
- const usedDeps = exclude.get(fullName) || new Set();
280
- if (usedDeps.size === 0) {
281
- continue;
282
- }
283
-
284
- const usedBy = Object.create(null);
285
- for (const [name, version] of [...usedDeps].map((name) => name.split(" "))) {
286
- usedBy[name] = version;
287
- }
288
- Object.assign(verDescriptor.usedBy, usedBy);
289
- }
290
- }
291
-
292
- try {
293
- payload.warnings = getDependenciesWarnings(dependencies);
294
- payload.dependencies = Object.fromEntries(dependencies);
295
-
296
- return payload;
297
- }
298
- finally {
299
- await timers.setImmediate();
300
- await fs.rm(tmpLocation, { recursive: true, force: true });
301
-
302
- logger.emit(ScannerLoggerEvents.done);
303
- }
304
- }
1
+ // Import Node.js Dependencies
2
+ import path from "path";
3
+ import { readFileSync, promises as fs } from "fs";
4
+ import timers from "timers/promises";
5
+ import os from "os";
6
+
7
+ // Import Third-party Dependencies
8
+ import combineAsyncIterators from "combine-async-iterators";
9
+ import iter from "itertools";
10
+ import pacote from "pacote";
11
+ import Arborist from "@npmcli/arborist";
12
+ import Lock from "@slimio/lock";
13
+ import * as vuln from "@nodesecure/vuln";
14
+ import { getLocalRegistryURL } from "@nodesecure/npm-registry-sdk";
15
+ import { ScannerLoggerEvents } from "./constants.js";
16
+
17
+ // Import Internal Dependencies
18
+ import {
19
+ mergeDependencies, getCleanDependencyName, getDependenciesWarnings, addMissingVersionFlags, isGitDependency,
20
+ NPM_TOKEN
21
+ } from "./utils/index.js";
22
+ import { scanDirOrArchive } from "./tarball.js";
23
+ import { packageMetadata } from "./npmRegistry.js";
24
+ import Dependency from "./class/dependency.class.js";
25
+ import Logger from "./class/logger.class.js";
26
+
27
+ const { version: packageVersion } = JSON.parse(
28
+ readFileSync(
29
+ new URL(path.join("..", "package.json"), import.meta.url)
30
+ )
31
+ );
32
+
33
+ export async function* searchDeepDependencies(packageName, gitURL, options) {
34
+ const { exclude, currDepth = 0, parent, maxDepth } = options;
35
+
36
+ const { name, version, deprecated, ...pkg } = await pacote.manifest(gitURL ?? packageName, {
37
+ ...NPM_TOKEN,
38
+ registry: getLocalRegistryURL(),
39
+ cache: `${os.homedir()}/.npm`
40
+ });
41
+ const { dependencies, customResolvers } = mergeDependencies(pkg);
42
+
43
+ const current = new Dependency(name, version, parent);
44
+ gitURL !== null && current.isGit(gitURL);
45
+ current.addFlag("isDeprecated", deprecated === true);
46
+ current.addFlag("hasCustomResolver", customResolvers.size > 0);
47
+ current.addFlag("hasDependencies", dependencies.size > 0);
48
+
49
+ if (currDepth !== maxDepth) {
50
+ const config = {
51
+ exclude, currDepth: currDepth + 1, parent: current, maxDepth
52
+ };
53
+
54
+ const gitDependencies = iter.filter(customResolvers.entries(), ([, valueStr]) => isGitDependency(valueStr));
55
+ for (const [depName, valueStr] of gitDependencies) {
56
+ yield* searchDeepDependencies(depName, valueStr, config);
57
+ }
58
+
59
+ const depsNames = await Promise.all(iter.map(dependencies.entries(), getCleanDependencyName));
60
+ for (const [fullName, cleanName, isLatest] of depsNames) {
61
+ if (!isLatest) {
62
+ current.addFlag("hasOutdatedDependency");
63
+ }
64
+
65
+ if (exclude.has(cleanName)) {
66
+ exclude.get(cleanName).add(current.fullName);
67
+ }
68
+ else {
69
+ exclude.set(cleanName, new Set([current.fullName]));
70
+ yield* searchDeepDependencies(fullName, null, config);
71
+ }
72
+ }
73
+ }
74
+
75
+ yield current;
76
+ }
77
+
78
+ export async function* deepReadEdges(currentPackageName, { to, parent, exclude, fullLockMode }) {
79
+ const { version, integrity = to.integrity } = to.package;
80
+
81
+ const updatedVersion = version === "*" || typeof version === "undefined" ? "latest" : version;
82
+ const current = new Dependency(currentPackageName, updatedVersion, parent);
83
+
84
+ if (fullLockMode) {
85
+ const { deprecated, _integrity, ...pkg } = await pacote.manifest(`${currentPackageName}@${updatedVersion}`, {
86
+ ...NPM_TOKEN,
87
+ registry: getLocalRegistryURL(),
88
+ cache: `${os.homedir()}/.npm`
89
+ });
90
+ const { customResolvers } = mergeDependencies(pkg);
91
+
92
+ current.addFlag("hasValidIntegrity", _integrity === integrity);
93
+ current.addFlag("isDeprecated");
94
+ current.addFlag("hasCustomResolver", customResolvers.size > 0);
95
+ }
96
+ current.addFlag("hasDependencies", to.edgesOut.size > 0);
97
+
98
+ for (const [packageName, { to: toNode }] of to.edgesOut) {
99
+ if (toNode === null || toNode.dev) {
100
+ continue;
101
+ }
102
+ const cleanName = `${packageName}@${toNode.package.version}`;
103
+
104
+ if (exclude.has(cleanName)) {
105
+ exclude.get(cleanName).add(current.fullName);
106
+ }
107
+ else {
108
+ exclude.set(cleanName, new Set([current.fullName]));
109
+ yield* deepReadEdges(packageName, { parent: current, to: toNode, exclude });
110
+ }
111
+ }
112
+ yield current;
113
+ }
114
+
115
+ export async function* getRootDependencies(manifest, options) {
116
+ const { maxDepth = 4, exclude, usePackageLock, fullLockMode, location } = options;
117
+
118
+ const { dependencies, customResolvers } = mergeDependencies(manifest, void 0);
119
+ const parent = new Dependency(manifest.name, manifest.version);
120
+ parent.addFlag("hasCustomResolver", customResolvers.size > 0);
121
+ parent.addFlag("hasDependencies", dependencies.size > 0);
122
+
123
+ let iterators;
124
+ if (usePackageLock) {
125
+ const arb = new Arborist({
126
+ ...NPM_TOKEN,
127
+ path: location,
128
+ registry: getLocalRegistryURL()
129
+ });
130
+ let tree;
131
+ try {
132
+ await fs.access(path.join(location, "node_modules"));
133
+ tree = await arb.loadActual();
134
+ }
135
+ catch {
136
+ tree = await arb.loadVirtual();
137
+ }
138
+
139
+ iterators = [
140
+ ...iter.filter(tree.edgesOut.entries(), ([, { to }]) => to !== null && (!to.dev || to.isWorkspace))
141
+ .map(([packageName, { to }]) => [packageName, to.isWorkspace ? to.target : to])
142
+ .map(([packageName, to]) => deepReadEdges(packageName, { to, parent, fullLockMode, exclude }))
143
+ ];
144
+ }
145
+ else {
146
+ const configRef = { exclude, maxDepth, parent };
147
+ iterators = [
148
+ ...iter.filter(customResolvers.entries(), ([, valueStr]) => isGitDependency(valueStr))
149
+ .map(([depName, valueStr]) => searchDeepDependencies(depName, valueStr, configRef)),
150
+ ...iter.map(dependencies.entries(), ([name, ver]) => searchDeepDependencies(`${name}@${ver}`, null, configRef))
151
+ ];
152
+ }
153
+ for await (const dep of combineAsyncIterators({}, ...iterators)) {
154
+ yield dep;
155
+ }
156
+
157
+ // Add root dependencies to the exclude Map (because the parent is not handled by searchDeepDependencies)
158
+ // if we skip this the code will fail to re-link properly dependencies in the following steps
159
+ const depsName = await Promise.all(iter.map(dependencies.entries(), getCleanDependencyName));
160
+ for (const [, fullRange, isLatest] of depsName) {
161
+ if (!isLatest) {
162
+ parent.addFlag("hasOutdatedDependency");
163
+ }
164
+ if (exclude.has(fullRange)) {
165
+ exclude.get(fullRange).add(parent.fullName);
166
+ }
167
+ }
168
+
169
+ yield parent;
170
+ }
171
+
172
+ /**
173
+ * @param {*} manifest
174
+ * @param {*} options
175
+ * @param {Logger} logger
176
+ */
177
+ export async function depWalker(manifest, options = {}, logger = new Logger()) {
178
+ const {
179
+ forceRootAnalysis = false,
180
+ usePackageLock = false,
181
+ fullLockMode = false,
182
+ maxDepth,
183
+ location,
184
+ vulnerabilityStrategy = vuln.strategies.NONE
185
+ } = options;
186
+
187
+ // Create TMP directory
188
+ const tmpLocation = await fs.mkdtemp(path.join(os.tmpdir(), "/"));
189
+
190
+ const payload = {
191
+ id: tmpLocation.slice(-6),
192
+ rootDependencyName: manifest.name,
193
+ scannerVersion: packageVersion,
194
+ vulnerabilityStrategy,
195
+ warnings: []
196
+ };
197
+
198
+ // We are dealing with an exclude Map to avoid checking a package more than one time in searchDeepDependencies
199
+ const exclude = new Map();
200
+ const dependencies = new Map();
201
+
202
+ {
203
+ logger
204
+ .start(ScannerLoggerEvents.analysis.tree)
205
+ .start(ScannerLoggerEvents.analysis.tarball)
206
+ .start(ScannerLoggerEvents.analysis.registry);
207
+ const fetchedMetadataPackages = new Set();
208
+ const promisesToWait = [];
209
+
210
+ const tarballLocker = new Lock({ maxConcurrent: 5 });
211
+ tarballLocker.on("freeOne", () => logger.tick(ScannerLoggerEvents.analysis.tarball));
212
+
213
+ const rootDepsOptions = { maxDepth, exclude, usePackageLock, fullLockMode, location };
214
+ for await (const currentDep of getRootDependencies(manifest, rootDepsOptions)) {
215
+ const { name, version } = currentDep;
216
+ const current = currentDep.exportAsPlainObject(name === manifest.name ? 0 : void 0);
217
+ let proceedDependencyAnalysis = true;
218
+
219
+ if (dependencies.has(name)) {
220
+ // TODO: how to handle different metadata ?
221
+ const dep = dependencies.get(name);
222
+
223
+ const currVersion = Object.keys(current.versions)[0];
224
+ if (currVersion in dep.versions) {
225
+ // The dependency has already entered the analysis
226
+ // This happens if the package is used by multiple packages in the tree
227
+ proceedDependencyAnalysis = false;
228
+ }
229
+ else {
230
+ dep.versions[currVersion] = current.versions[currVersion];
231
+ }
232
+ }
233
+ else {
234
+ dependencies.set(name, current);
235
+ }
236
+
237
+ if (proceedDependencyAnalysis) {
238
+ logger.tick(ScannerLoggerEvents.analysis.tree);
239
+
240
+ // There is no need to fetch 'N' times the npm metadata for the same package.
241
+ if (fetchedMetadataPackages.has(name)) {
242
+ logger.tick(ScannerLoggerEvents.analysis.registry);
243
+ }
244
+ else {
245
+ fetchedMetadataPackages.add(name);
246
+ promisesToWait.push(packageMetadata(name, version, {
247
+ ref: current,
248
+ logger
249
+ }));
250
+ }
251
+
252
+ promisesToWait.push(scanDirOrArchive(name, version, {
253
+ ref: current.versions[version],
254
+ location,
255
+ tmpLocation: forceRootAnalysis && name === manifest.name ? null : tmpLocation,
256
+ locker: tarballLocker,
257
+ logger
258
+ }));
259
+ }
260
+ }
261
+
262
+ logger.end(ScannerLoggerEvents.analysis.tree);
263
+
264
+ // Wait for all extraction to be done!
265
+ await Promise.allSettled(promisesToWait);
266
+ await timers.setImmediate();
267
+
268
+ logger.end(ScannerLoggerEvents.analysis.tarball).end(ScannerLoggerEvents.analysis.registry);
269
+ }
270
+
271
+ const { hydratePayloadDependencies, strategy } = await vuln.setStrategy(vulnerabilityStrategy);
272
+ await hydratePayloadDependencies(dependencies, {
273
+ useStandardFormat: true
274
+ });
275
+
276
+ payload.vulnerabilityStrategy = strategy;
277
+
278
+ // We do this because it "seem" impossible to link all dependencies in the first walk.
279
+ // Because we are dealing with package only one time it may happen sometimes.
280
+ for (const [packageName, dependency] of dependencies) {
281
+ for (const [verStr, verDescriptor] of Object.entries(dependency.versions)) {
282
+ verDescriptor.flags.push(...addMissingVersionFlags(new Set(verDescriptor.flags), dependency));
283
+
284
+ const fullName = `${packageName}@${verStr}`;
285
+ const usedDeps = exclude.get(fullName) || new Set();
286
+ if (usedDeps.size === 0) {
287
+ continue;
288
+ }
289
+
290
+ const usedBy = Object.create(null);
291
+ for (const [name, version] of [...usedDeps].map((name) => name.split(" "))) {
292
+ usedBy[name] = version;
293
+ }
294
+ Object.assign(verDescriptor.usedBy, usedBy);
295
+ }
296
+ }
297
+
298
+ try {
299
+ payload.warnings = getDependenciesWarnings(dependencies);
300
+ payload.dependencies = Object.fromEntries(dependencies);
301
+
302
+ return payload;
303
+ }
304
+ finally {
305
+ await timers.setImmediate();
306
+ await fs.rm(tmpLocation, { recursive: true, force: true });
307
+
308
+ logger.emit(ScannerLoggerEvents.done);
309
+ }
310
+ }