@coana-tech/cli 14.12.58 → 14.12.61
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli.mjs +2114 -206
- package/package.json +1 -1
- package/reachability-analyzers-cli.mjs +105 -113
- package/repos/coana-tech/alucard/alucard.jar +0 -0
- package/repos/coana-tech/goana/bin/goana-darwin-amd64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-darwin-arm64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-linux-amd64.gz +0 -0
- package/repos/coana-tech/goana/bin/goana-linux-arm64.gz +0 -0
package/package.json
CHANGED
|
@@ -18277,7 +18277,7 @@ var require_lodash = __commonJS({
|
|
|
18277
18277
|
var mergeWith = createAssigner(function(object, source, srcIndex, customizer) {
|
|
18278
18278
|
baseMerge(object, source, srcIndex, customizer);
|
|
18279
18279
|
});
|
|
18280
|
-
var
|
|
18280
|
+
var omit2 = flatRest(function(object, paths) {
|
|
18281
18281
|
var result2 = {};
|
|
18282
18282
|
if (object == null) {
|
|
18283
18283
|
return result2;
|
|
@@ -18968,7 +18968,7 @@ var require_lodash = __commonJS({
|
|
|
18968
18968
|
lodash22.mixin = mixin;
|
|
18969
18969
|
lodash22.negate = negate;
|
|
18970
18970
|
lodash22.nthArg = nthArg;
|
|
18971
|
-
lodash22.omit =
|
|
18971
|
+
lodash22.omit = omit2;
|
|
18972
18972
|
lodash22.omitBy = omitBy;
|
|
18973
18973
|
lodash22.once = once6;
|
|
18974
18974
|
lodash22.orderBy = orderBy;
|
|
@@ -62479,7 +62479,7 @@ var require_micromatch = __commonJS({
|
|
|
62479
62479
|
var micromatch2 = (list2, patterns, options) => {
|
|
62480
62480
|
patterns = [].concat(patterns);
|
|
62481
62481
|
list2 = [].concat(list2);
|
|
62482
|
-
let
|
|
62482
|
+
let omit2 = /* @__PURE__ */ new Set();
|
|
62483
62483
|
let keep = /* @__PURE__ */ new Set();
|
|
62484
62484
|
let items = /* @__PURE__ */ new Set();
|
|
62485
62485
|
let negatives = 0;
|
|
@@ -62498,15 +62498,15 @@ var require_micromatch = __commonJS({
|
|
|
62498
62498
|
let match2 = negated ? !matched.isMatch : matched.isMatch;
|
|
62499
62499
|
if (!match2) continue;
|
|
62500
62500
|
if (negated) {
|
|
62501
|
-
|
|
62501
|
+
omit2.add(matched.output);
|
|
62502
62502
|
} else {
|
|
62503
|
-
|
|
62503
|
+
omit2.delete(matched.output);
|
|
62504
62504
|
keep.add(matched.output);
|
|
62505
62505
|
}
|
|
62506
62506
|
}
|
|
62507
62507
|
}
|
|
62508
62508
|
let result = negatives === patterns.length ? [...items] : [...keep];
|
|
62509
|
-
let matches = result.filter((item) => !
|
|
62509
|
+
let matches = result.filter((item) => !omit2.has(item));
|
|
62510
62510
|
if (options && matches.length === 0) {
|
|
62511
62511
|
if (options.failglob === true) {
|
|
62512
62512
|
throw new Error(`No matches found for "${patterns.join(", ")}"`);
|
|
@@ -76824,14 +76824,14 @@ async function createSocketTier1Scan(cliOptions, coanaCliVersion) {
|
|
|
76824
76824
|
throw new Error("we should never reach this point");
|
|
76825
76825
|
}
|
|
76826
76826
|
}
|
|
76827
|
-
async function sendErrorReportToSocketDashboard(stackTrace, shouldLogSharing, reportId, logContent) {
|
|
76827
|
+
async function sendErrorReportToSocketDashboard(stackTrace, shouldLogSharing, errorType, reportId, logContent) {
|
|
76828
76828
|
if (shouldLogSharing) {
|
|
76829
|
-
console.log(
|
|
76830
|
-
console.log("The report will help team Socket debug the
|
|
76831
|
-
console.log("No source code is included in the
|
|
76829
|
+
console.log(`Sending ${errorType} report to Socket`);
|
|
76830
|
+
console.log("The report will help team Socket debug the problem");
|
|
76831
|
+
console.log("No source code is included in the report");
|
|
76832
76832
|
}
|
|
76833
76833
|
try {
|
|
76834
|
-
const url2 = getSocketApiUrl("tier1-reachability-scan/failure");
|
|
76834
|
+
const url2 = errorType === "analysis-error" ? getSocketApiUrl("tier1-reachability-scan/analysis-error") : getSocketApiUrl("tier1-reachability-scan/failure");
|
|
76835
76835
|
const data2 = {
|
|
76836
76836
|
stack_trace: stackTrace,
|
|
76837
76837
|
log_content: logContent,
|
|
@@ -77070,12 +77070,18 @@ var DashboardAPI = class {
|
|
|
77070
77070
|
);
|
|
77071
77071
|
}
|
|
77072
77072
|
}
|
|
77073
|
-
async sendErrorReport(apiKey3, stackTrace, shouldLogSharing, reportId, repoUrl, projectName, logContent) {
|
|
77073
|
+
async sendErrorReport(apiKey3, stackTrace, shouldLogSharing, errorType, reportId, repoUrl, projectName, logContent) {
|
|
77074
77074
|
if (this.disableAnalyticsSharing) {
|
|
77075
77075
|
return;
|
|
77076
77076
|
}
|
|
77077
77077
|
if (this.socketMode) {
|
|
77078
|
-
await this.socketAPI.sendErrorReportToSocketDashboard(
|
|
77078
|
+
await this.socketAPI.sendErrorReportToSocketDashboard(
|
|
77079
|
+
stackTrace,
|
|
77080
|
+
shouldLogSharing,
|
|
77081
|
+
errorType,
|
|
77082
|
+
reportId,
|
|
77083
|
+
logContent
|
|
77084
|
+
);
|
|
77079
77085
|
} else {
|
|
77080
77086
|
await this.coanaAPI.sendErrorReportToCoanaDashboard(
|
|
77081
77087
|
apiKey3,
|
|
@@ -77135,13 +77141,7 @@ var DashboardAPI = class {
|
|
|
77135
77141
|
if (this.socketMode) {
|
|
77136
77142
|
return await this.socketAPI.getLatestBucketsSocket(subprojectPath, workspacePath);
|
|
77137
77143
|
} else {
|
|
77138
|
-
return await this.coanaAPI.getBucketsForLastReport(
|
|
77139
|
-
subprojectPath,
|
|
77140
|
-
workspacePath,
|
|
77141
|
-
ecosystem,
|
|
77142
|
-
reportId,
|
|
77143
|
-
apiKey3
|
|
77144
|
-
);
|
|
77144
|
+
return await this.coanaAPI.getBucketsForLastReport(subprojectPath, workspacePath, ecosystem, reportId, apiKey3);
|
|
77145
77145
|
}
|
|
77146
77146
|
}
|
|
77147
77147
|
};
|
|
@@ -98730,8 +98730,10 @@ import { existsSync as existsSync8 } from "fs";
|
|
|
98730
98730
|
import { resolve as resolve11 } from "path";
|
|
98731
98731
|
|
|
98732
98732
|
// dist/whole-program-code-aware-vulnerability-scanner/js/setup-npm-dependencies-for-analysis.js
|
|
98733
|
-
|
|
98734
|
-
import { mkdir as mkdir6
|
|
98733
|
+
var import_lodash9 = __toESM(require_lodash(), 1);
|
|
98734
|
+
import { link, mkdir as mkdir6 } from "fs/promises";
|
|
98735
|
+
import { availableParallelism } from "os";
|
|
98736
|
+
import { dirname as dirname10, join as join14, resolve as resolve10 } from "path";
|
|
98735
98737
|
|
|
98736
98738
|
// ../../node_modules/.pnpm/@isaacs+fs-minipass@4.0.1/node_modules/@isaacs/fs-minipass/dist/esm/index.js
|
|
98737
98739
|
import EE from "events";
|
|
@@ -103843,8 +103845,6 @@ var mtimeFilter = (opt) => {
|
|
|
103843
103845
|
};
|
|
103844
103846
|
|
|
103845
103847
|
// dist/whole-program-code-aware-vulnerability-scanner/js/setup-npm-dependencies-for-analysis.js
|
|
103846
|
-
var import_lodash9 = __toESM(require_lodash(), 1);
|
|
103847
|
-
import { availableParallelism } from "os";
|
|
103848
103848
|
var { chunk } = import_lodash9.default;
|
|
103849
103849
|
var ROOT_PACKAGE_METADATA_NAME = "UNIQUE_ROOT_PACKAGE_METADATA_NAME";
|
|
103850
103850
|
async function setupDependenciesForAnalysis(subprojectDir, workspaceDir, directDependencies, artifactIdToArtifact) {
|
|
@@ -103947,6 +103947,8 @@ async function downloadDependenciesToDir(dependenciesToInstall, tmpDir) {
|
|
|
103947
103947
|
tarFileName: resolve10(tmpDir, npmPackRes[idx].filename)
|
|
103948
103948
|
})));
|
|
103949
103949
|
} catch (e) {
|
|
103950
|
+
logger.debug("Error downloading dependencies:", e.message);
|
|
103951
|
+
logger.debug("Chunks in error:", chunk2.map((p) => `${p.name}@${p.version ?? "null"}_${p.resolutionString ?? "null"}${p.dependencies?.map((d) => `_${d}`).join("")}`).join(", "));
|
|
103950
103952
|
const messageWithoutCommand = e.message.split("\n").slice(1).join("\n");
|
|
103951
103953
|
const newFailedPackages = chunk2.filter((p) => messageWithoutCommand.includes(`/${p.name} `) || messageWithoutCommand.includes(`${p.name}@${p.version}`));
|
|
103952
103954
|
if (newFailedPackages.length === 0) {
|
|
@@ -106057,8 +106059,9 @@ function detectedOccurrencesFromAPMatchesRuby(matches, pathPrefixToPackage) {
|
|
|
106057
106059
|
if (cl.package === "<app>")
|
|
106058
106060
|
cl.package = ROOT_NODE_STR;
|
|
106059
106061
|
const prefixPath2 = `${cl.sourceLocation.filename.split("/lib/")[0]}/lib`;
|
|
106060
|
-
|
|
106061
|
-
|
|
106062
|
+
const pkg = pathPrefixToPackage.get(prefixPath2);
|
|
106063
|
+
if (pkg) {
|
|
106064
|
+
cl.package = pkg;
|
|
106062
106065
|
cl.sourceLocation.filename = cl.sourceLocation.filename.slice(prefixPath2.length + 1);
|
|
106063
106066
|
}
|
|
106064
106067
|
}
|
|
@@ -107237,7 +107240,7 @@ import { mkdir as mkdir8, rm as rm7, readFile as readFile11, readdir as readdir5
|
|
|
107237
107240
|
import { join as join19, relative as relative7 } from "path";
|
|
107238
107241
|
import { pipeline as pipeline2 } from "stream/promises";
|
|
107239
107242
|
var PRINT_ANALYSIS_COMMAND = false;
|
|
107240
|
-
var { uniqBy: uniqBy2, sortedUniq: sortedUniq2
|
|
107243
|
+
var { uniqBy: uniqBy2, sortedUniq: sortedUniq2 } = import_lodash19.default;
|
|
107241
107244
|
var RubyCodeAwareVulnerabilityScanner = class {
|
|
107242
107245
|
projectDir;
|
|
107243
107246
|
options;
|
|
@@ -107262,65 +107265,25 @@ var RubyCodeAwareVulnerabilityScanner = class {
|
|
|
107262
107265
|
const packagesToInstall = uniqBy2(packagesToIncludeNames ? preInstalledDepInfos.filter((n) => packagesToIncludeNames.includes(n.packageName)) : preInstalledDepInfos, "packageName");
|
|
107263
107266
|
logger.info(`Installing ${packagesToInstall.length} gems into the vendor directory`);
|
|
107264
107267
|
await mkdir8(vendorDir, { recursive: true });
|
|
107265
|
-
const failedGems =
|
|
107266
|
-
await asyncFilter(packagesToInstall, async ({ packageName, version: version3 }) => {
|
|
107268
|
+
const failedGems = await asyncFilter(packagesToInstall, async ({ packageName, version: version3 }) => {
|
|
107267
107269
|
if (!version3) {
|
|
107268
107270
|
logger.warn(`Skipping gem ${packageName} - no version information`);
|
|
107269
|
-
|
|
107270
|
-
return false;
|
|
107271
|
+
return true;
|
|
107271
107272
|
}
|
|
107272
107273
|
try {
|
|
107273
|
-
await
|
|
107274
|
-
return
|
|
107274
|
+
await downloadAndExtractGem(packageName, version3, vendorDir);
|
|
107275
|
+
return false;
|
|
107275
107276
|
} catch (e) {
|
|
107276
107277
|
logger.warn(`Failed to install gem ${packageName}@${version3}: ${e.message}`);
|
|
107277
|
-
|
|
107278
|
-
return false;
|
|
107278
|
+
return true;
|
|
107279
107279
|
}
|
|
107280
107280
|
}, 4);
|
|
107281
|
-
if (failedGems.length > 0)
|
|
107281
|
+
if (failedGems.length > 0)
|
|
107282
107282
|
logger.info(`Failed to install ${failedGems.length} gems: ${failedGems.join(", ")}`);
|
|
107283
|
-
}
|
|
107284
107283
|
this.vendorDir = vendorDir;
|
|
107285
107284
|
this.vendorDirWasCreated = true;
|
|
107286
107285
|
logger.info("Done setting up vendor directory");
|
|
107287
107286
|
}
|
|
107288
|
-
async downloadAndExtractGem(gemName, version3, vendorDir) {
|
|
107289
|
-
const gemDir = join19(vendorDir, `${gemName}-${version3}`);
|
|
107290
|
-
if (existsSync13(gemDir)) {
|
|
107291
|
-
logger.debug(`Gem ${gemName}@${version3} already extracted`);
|
|
107292
|
-
return;
|
|
107293
|
-
}
|
|
107294
|
-
const tempGemFile = join19(vendorDir, `${gemName}-${version3}.gem`);
|
|
107295
|
-
try {
|
|
107296
|
-
logger.debug(`Downloading gem ${gemName}@${version3}`);
|
|
107297
|
-
const response = await fetch(`https://rubygems.org/gems/${gemName}-${version3}.gem`);
|
|
107298
|
-
if (!response.ok) {
|
|
107299
|
-
throw new Error(`Failed to download gem: ${response.statusText}`);
|
|
107300
|
-
}
|
|
107301
|
-
if (!response.body) {
|
|
107302
|
-
throw new Error("Response body is null");
|
|
107303
|
-
}
|
|
107304
|
-
await pipeline2(response.body, createWriteStream3(tempGemFile));
|
|
107305
|
-
await mkdir8(gemDir, { recursive: true });
|
|
107306
|
-
logger.debug(`Extracting gem ${gemName}@${version3}`);
|
|
107307
|
-
await exec(["tar", "-xf", tempGemFile, "data.tar.gz"], gemDir);
|
|
107308
|
-
await exec(["tar", "-xzf", "data.tar.gz"], gemDir);
|
|
107309
|
-
await rm7(join19(gemDir, "data.tar.gz"));
|
|
107310
|
-
const hasValidStructure = [`${gemName}.gemspec`, "Rakefile"].some((f2) => existsSync13(join19(gemDir, f2)));
|
|
107311
|
-
if (!hasValidStructure) {
|
|
107312
|
-
throw new Error(`Invalid gem structure: Could not find ${gemName}.gemspec or Rakefile`);
|
|
107313
|
-
}
|
|
107314
|
-
await rm7(tempGemFile, { force: true });
|
|
107315
|
-
} catch (e) {
|
|
107316
|
-
await rm7(gemDir, { recursive: true, force: true });
|
|
107317
|
-
await rm7(tempGemFile, { force: true });
|
|
107318
|
-
throw e;
|
|
107319
|
-
}
|
|
107320
|
-
}
|
|
107321
|
-
getVendorDir() {
|
|
107322
|
-
return this.vendorDir;
|
|
107323
|
-
}
|
|
107324
107287
|
async runAnalysis(vulns, heuristic, analyzesAllVulns, _experiment) {
|
|
107325
107288
|
return await withTmpDirectory("ruby-analyzer-output", async (tmpDir) => {
|
|
107326
107289
|
if (!this.vendorDir)
|
|
@@ -107328,53 +107291,47 @@ var RubyCodeAwareVulnerabilityScanner = class {
|
|
|
107328
107291
|
const analyzerPath = join19(COANA_REPOS_PATH(), "callgraph-reachability-analyzers", "packages", "cli", "dist", "index.js");
|
|
107329
107292
|
if (!existsSync13(analyzerPath))
|
|
107330
107293
|
throw new Error(`callgraph-reachability-analyzers is not cloned`);
|
|
107331
|
-
const packagesToAnalyze = heuristic.getPackagesToIncludeInAnalysis?.(vulns);
|
|
107332
107294
|
const vulnAccPaths = sortedUniq2(vulns.flatMap((v) => v.vulnerabilityAccessPaths).sort());
|
|
107333
107295
|
const vulnsOutputFile = join19(tmpDir, "vulns.json");
|
|
107296
|
+
const reachedPackagesOutputFile = join19(tmpDir, "reached-packages.json");
|
|
107334
107297
|
const diagnosticsOutputFile = join19(tmpDir, "diagnostics.json");
|
|
107298
|
+
const packagesToAnalyze = heuristic.getPackagesToIncludeInAnalysis?.(vulns);
|
|
107335
107299
|
const { loadPathsToPackageNames, failedToFindLoadPath } = await this.computeLoadPath(packagesToAnalyze ?? []);
|
|
107336
107300
|
const loadPaths = Array.from(loadPathsToPackageNames.keys());
|
|
107337
107301
|
if (failedToFindLoadPath.length > 0) {
|
|
107338
107302
|
this.packagesExcludedUnrelatedToHeuristic.push(...failedToFindLoadPath.map((p) => p.packageName));
|
|
107339
107303
|
logger.warn(`Failed to find package installation path for ${failedToFindLoadPath.map((p) => p.packageName).join(", ")}`);
|
|
107340
107304
|
}
|
|
107341
|
-
const
|
|
107342
|
-
|
|
107343
|
-
|
|
107344
|
-
|
|
107345
|
-
|
|
107346
|
-
|
|
107347
|
-
|
|
107348
|
-
|
|
107349
|
-
|
|
107350
|
-
|
|
107351
|
-
...loadPaths,
|
|
107352
|
-
"--vulnerabilities",
|
|
107353
|
-
...vulnAccPaths,
|
|
107354
|
-
"--output-vulnerabilities",
|
|
107355
|
-
vulnsOutputFile,
|
|
107356
|
-
"--",
|
|
107357
|
-
"."
|
|
107358
|
-
];
|
|
107305
|
+
const cmd = cmdt`
|
|
107306
|
+
node --max-old-space-size=${this.options.memoryLimitInMB}
|
|
107307
|
+
${analyzerPath}
|
|
107308
|
+
--timeout ${analyzesAllVulns ? this.options.timeoutInSeconds ?? 600 : 60}
|
|
107309
|
+
--load-path ${loadPaths}
|
|
107310
|
+
--vulnerabilities ${vulnAccPaths}
|
|
107311
|
+
--output-diagnostics ${diagnosticsOutputFile}
|
|
107312
|
+
--output-reached-packages ${reachedPackagesOutputFile}
|
|
107313
|
+
--output-vulnerabilities ${vulnsOutputFile}
|
|
107314
|
+
.`;
|
|
107359
107315
|
if (PRINT_ANALYSIS_COMMAND)
|
|
107360
107316
|
logger.info("Ruby analysis command:", cmd.join(" "));
|
|
107361
107317
|
try {
|
|
107362
107318
|
this.numberAnalysesRun++;
|
|
107363
|
-
await exec(cmd);
|
|
107319
|
+
await exec(cmd, this.projectDir);
|
|
107364
107320
|
const result = JSON.parse(await readFile11(vulnsOutputFile, "utf-8"));
|
|
107365
107321
|
const relativeLoadPathsToPackageNames = new Map([...loadPathsToPackageNames.entries()].map(([k, v]) => [join19("vendor", relative7(this.vendorDir, k)), v]));
|
|
107366
|
-
const diagnostics = JSON.parse(await readFile11(diagnosticsOutputFile, "utf-8"));
|
|
107322
|
+
const { timedOut, ...diagnostics } = JSON.parse(await readFile11(diagnosticsOutputFile, "utf-8"));
|
|
107323
|
+
const reachedPackages = JSON.parse(await readFile11(reachedPackagesOutputFile, "utf-8"));
|
|
107324
|
+
logger.debug("Reached packages: %O", reachedPackages);
|
|
107367
107325
|
return {
|
|
107368
107326
|
type: "success",
|
|
107369
|
-
diagnostics:
|
|
107327
|
+
diagnostics: {
|
|
107370
107328
|
...diagnostics,
|
|
107371
|
-
timeout:
|
|
107329
|
+
timeout: timedOut,
|
|
107372
107330
|
aborted: false
|
|
107373
|
-
},
|
|
107331
|
+
},
|
|
107374
107332
|
reachedDependencies: true,
|
|
107375
|
-
terminatedEarly:
|
|
107376
|
-
affectedPurls:
|
|
107377
|
-
// TODO: add affected purls
|
|
107333
|
+
terminatedEarly: timedOut,
|
|
107334
|
+
affectedPurls: reachedPackages.map(({ name: name2, version: version3 }) => ({ type: "gem" /* GEM */, name: name2, version: version3 ?? void 0 })),
|
|
107378
107335
|
computeDetectedOccurrences: detectedOccurrencesFromAPMatchesRuby(result, relativeLoadPathsToPackageNames)
|
|
107379
107336
|
};
|
|
107380
107337
|
} catch (e) {
|
|
@@ -107406,11 +107363,10 @@ var RubyCodeAwareVulnerabilityScanner = class {
|
|
|
107406
107363
|
if (this.vendorDirWasCreated) {
|
|
107407
107364
|
for (const pkg of packagesToAnalyze) {
|
|
107408
107365
|
const libPath = join19(this.vendorDir, `${pkg.packageName}-${pkg.version}`, "lib");
|
|
107409
|
-
if (existsSync13(libPath))
|
|
107366
|
+
if (existsSync13(libPath))
|
|
107410
107367
|
loadPathsToPackageNames.set(libPath, `${pkg.packageName}@${pkg.version}`);
|
|
107411
|
-
|
|
107368
|
+
else
|
|
107412
107369
|
failedToFindLoadPath.push(pkg);
|
|
107413
|
-
}
|
|
107414
107370
|
}
|
|
107415
107371
|
return { loadPathsToPackageNames, failedToFindLoadPath };
|
|
107416
107372
|
}
|
|
@@ -107420,30 +107376,66 @@ var RubyCodeAwareVulnerabilityScanner = class {
|
|
|
107420
107376
|
for (const rubyVersion of rubyVersions) {
|
|
107421
107377
|
const gemsDir = join19(bundlerGemsDir, rubyVersion, "gems");
|
|
107422
107378
|
if (existsSync13(gemsDir)) {
|
|
107379
|
+
const nameToEntry = /* @__PURE__ */ new Map();
|
|
107380
|
+
for (const entry of await readdir5(gemsDir, { withFileTypes: true }))
|
|
107381
|
+
if (entry.isDirectory()) {
|
|
107382
|
+
const match2 = entry.name.match(/^([\w-_]+)-(\d+\.\d+\.\d+)/);
|
|
107383
|
+
if (match2)
|
|
107384
|
+
nameToEntry.set(`${match2[1]}-${match2[2]}`, entry.name);
|
|
107385
|
+
}
|
|
107423
107386
|
for (const pkg of packagesToAnalyze) {
|
|
107424
|
-
const
|
|
107425
|
-
|
|
107426
|
-
|
|
107387
|
+
const entry = nameToEntry.get(`${pkg.packageName}-${pkg.version}`);
|
|
107388
|
+
if (!entry)
|
|
107389
|
+
continue;
|
|
107390
|
+
const libDir = join19(gemsDir, entry, "lib");
|
|
107391
|
+
if (existsSync13(libDir))
|
|
107427
107392
|
loadPathsToPackageNames.set(libDir, `${pkg.packageName}@${pkg.version}`);
|
|
107428
|
-
|
|
107393
|
+
else
|
|
107429
107394
|
failedToFindLoadPath.push(pkg);
|
|
107430
|
-
}
|
|
107431
107395
|
}
|
|
107432
107396
|
}
|
|
107433
107397
|
}
|
|
107434
|
-
} else
|
|
107398
|
+
} else
|
|
107435
107399
|
for (const pkg of packagesToAnalyze) {
|
|
107436
107400
|
const libPath = join19(this.vendorDir, `${pkg.packageName}-${pkg.version}`, "lib");
|
|
107437
|
-
if (existsSync13(libPath))
|
|
107401
|
+
if (existsSync13(libPath))
|
|
107438
107402
|
loadPathsToPackageNames.set(libPath, `${pkg.packageName}@${pkg.version}`);
|
|
107439
|
-
|
|
107403
|
+
else
|
|
107440
107404
|
failedToFindLoadPath.push(pkg);
|
|
107441
|
-
}
|
|
107442
107405
|
}
|
|
107443
|
-
}
|
|
107444
107406
|
return { loadPathsToPackageNames, failedToFindLoadPath };
|
|
107445
107407
|
}
|
|
107446
107408
|
};
|
|
107409
|
+
async function downloadAndExtractGem(gemName, version3, vendorDir) {
|
|
107410
|
+
const gemDir = join19(vendorDir, `${gemName}-${version3}`);
|
|
107411
|
+
if (existsSync13(gemDir)) {
|
|
107412
|
+
logger.debug(`Gem ${gemName}@${version3} already extracted`);
|
|
107413
|
+
return;
|
|
107414
|
+
}
|
|
107415
|
+
const tempGemFile = join19(vendorDir, `${gemName}-${version3}.gem`);
|
|
107416
|
+
try {
|
|
107417
|
+
logger.debug(`Downloading gem ${gemName}@${version3}`);
|
|
107418
|
+
const response = await fetch(`https://rubygems.org/gems/${gemName}-${version3}.gem`);
|
|
107419
|
+
if (!response.ok)
|
|
107420
|
+
throw new Error(`Failed to download gem: ${response.statusText}`);
|
|
107421
|
+
if (!response.body)
|
|
107422
|
+
throw new Error("Response body is null");
|
|
107423
|
+
await pipeline2(response.body, createWriteStream3(tempGemFile));
|
|
107424
|
+
await mkdir8(gemDir, { recursive: true });
|
|
107425
|
+
logger.debug(`Extracting gem ${gemName}@${version3}`);
|
|
107426
|
+
await exec(["tar", "-xf", tempGemFile, "data.tar.gz"], gemDir);
|
|
107427
|
+
await exec(["tar", "-xzf", "data.tar.gz"], gemDir);
|
|
107428
|
+
await rm7(join19(gemDir, "data.tar.gz"));
|
|
107429
|
+
const hasValidStructure = [`${gemName}.gemspec`, "Rakefile"].some((f2) => existsSync13(join19(gemDir, f2)));
|
|
107430
|
+
if (!hasValidStructure)
|
|
107431
|
+
throw new Error(`Invalid gem structure: Could not find ${gemName}.gemspec or Rakefile`);
|
|
107432
|
+
await rm7(tempGemFile, { force: true });
|
|
107433
|
+
} catch (e) {
|
|
107434
|
+
await rm7(gemDir, { recursive: true, force: true });
|
|
107435
|
+
await rm7(tempGemFile, { force: true });
|
|
107436
|
+
throw e;
|
|
107437
|
+
}
|
|
107438
|
+
}
|
|
107447
107439
|
|
|
107448
107440
|
// dist/analyzers/ruby-analyzer.js
|
|
107449
107441
|
var { once: once5 } = import_lodash20.default;
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|