@coana-tech/cli 14.12.9 → 14.12.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/cli.mjs CHANGED
@@ -52321,7 +52321,7 @@ var require_cjs2 = __commonJS({
52321
52321
  var require_lib12 = __commonJS({
52322
52322
  "../../node_modules/.pnpm/write-file-atomic@5.0.1/node_modules/write-file-atomic/lib/index.js"(exports2, module2) {
52323
52323
  "use strict";
52324
- module2.exports = writeFile10;
52324
+ module2.exports = writeFile11;
52325
52325
  module2.exports.sync = writeFileSync4;
52326
52326
  module2.exports._getTmpname = getTmpname;
52327
52327
  module2.exports._cleanupOnExit = cleanupOnExit;
@@ -52446,7 +52446,7 @@ var require_lib12 = __commonJS({
52446
52446
  }
52447
52447
  }
52448
52448
  }
52449
- async function writeFile10(filename, data2, options, callback) {
52449
+ async function writeFile11(filename, data2, options, callback) {
52450
52450
  if (options instanceof Function) {
52451
52451
  callback = options;
52452
52452
  options = {};
@@ -83615,7 +83615,7 @@ var require_lockfile = __commonJS({
83615
83615
  }
83616
83616
  const file = _ref22;
83617
83617
  if (yield exists2(file)) {
83618
- return readFile26(file);
83618
+ return readFile27(file);
83619
83619
  }
83620
83620
  }
83621
83621
  return null;
@@ -83634,7 +83634,7 @@ var require_lockfile = __commonJS({
83634
83634
  })();
83635
83635
  let readJsonAndFile = exports3.readJsonAndFile = (() => {
83636
83636
  var _ref24 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (loc) {
83637
- const file = yield readFile26(loc);
83637
+ const file = yield readFile27(loc);
83638
83638
  try {
83639
83639
  return {
83640
83640
  object: (0, (_map || _load_map()).default)(JSON.parse(stripBOM2(file))),
@@ -83776,7 +83776,7 @@ var require_lockfile = __commonJS({
83776
83776
  if (eol !== "\n") {
83777
83777
  data2 = data2.replace(/\n/g, eol);
83778
83778
  }
83779
- yield writeFile10(path2, data2);
83779
+ yield writeFile11(path2, data2);
83780
83780
  });
83781
83781
  return function writeFilePreservingEol2(_x30, _x31) {
83782
83782
  return _ref31.apply(this, arguments);
@@ -83788,7 +83788,7 @@ var require_lockfile = __commonJS({
83788
83788
  const file = (_path || _load_path()).default.join(dir, filename);
83789
83789
  const fileLink = (_path || _load_path()).default.join(dir, filename + "-link");
83790
83790
  try {
83791
- yield writeFile10(file, "test");
83791
+ yield writeFile11(file, "test");
83792
83792
  yield link(file, fileLink);
83793
83793
  } catch (err) {
83794
83794
  return false;
@@ -83874,7 +83874,7 @@ var require_lockfile = __commonJS({
83874
83874
  };
83875
83875
  })();
83876
83876
  exports3.copy = copy;
83877
- exports3.readFile = readFile26;
83877
+ exports3.readFile = readFile27;
83878
83878
  exports3.readFileRaw = readFileRaw;
83879
83879
  exports3.normalizeOS = normalizeOS;
83880
83880
  var _fs;
@@ -83938,7 +83938,7 @@ var require_lockfile = __commonJS({
83938
83938
  const lockQueue = exports3.lockQueue = new (_blockingQueue || _load_blockingQueue()).default("fs lock");
83939
83939
  const readFileBuffer = exports3.readFileBuffer = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.readFile);
83940
83940
  const open = exports3.open = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.open);
83941
- const writeFile10 = exports3.writeFile = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.writeFile);
83941
+ const writeFile11 = exports3.writeFile = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.writeFile);
83942
83942
  const readlink2 = exports3.readlink = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.readlink);
83943
83943
  const realpath2 = exports3.realpath = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.realpath);
83944
83944
  const readdir7 = exports3.readdir = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.readdir);
@@ -83972,7 +83972,7 @@ var require_lockfile = __commonJS({
83972
83972
  });
83973
83973
  });
83974
83974
  }
83975
- function readFile26(loc) {
83975
+ function readFile27(loc) {
83976
83976
  return _readFile(loc, "utf8").then(normalizeOS);
83977
83977
  }
83978
83978
  function readFileRaw(loc) {
@@ -111894,7 +111894,7 @@ var require_summary = __commonJS({
111894
111894
  exports2.summary = exports2.markdownSummary = exports2.SUMMARY_DOCS_URL = exports2.SUMMARY_ENV_VAR = void 0;
111895
111895
  var os_1 = __require("os");
111896
111896
  var fs_1 = __require("fs");
111897
- var { access: access5, appendFile, writeFile: writeFile10 } = fs_1.promises;
111897
+ var { access: access5, appendFile, writeFile: writeFile11 } = fs_1.promises;
111898
111898
  exports2.SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
111899
111899
  exports2.SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
111900
111900
  var Summary = class {
@@ -111952,7 +111952,7 @@ var require_summary = __commonJS({
111952
111952
  return __awaiter(this, void 0, void 0, function* () {
111953
111953
  const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);
111954
111954
  const filePath = yield this.filePath();
111955
- const writeFunc = overwrite ? writeFile10 : appendFile;
111955
+ const writeFunc = overwrite ? writeFile11 : appendFile;
111956
111956
  yield writeFunc(filePath, this._buffer, { encoding: "utf8" });
111957
111957
  return this.emptyBuffer();
111958
111958
  });
@@ -190082,7 +190082,7 @@ var {
190082
190082
  } = import_index.default;
190083
190083
 
190084
190084
  // dist/index.js
190085
- import { readFile as readFile25 } from "fs/promises";
190085
+ import { readFile as readFile26 } from "fs/promises";
190086
190086
 
190087
190087
  // ../../node_modules/.pnpm/remeda@2.21.2/node_modules/remeda/dist/chunk-ANXBDSUI.js
190088
190088
  var s = { done: false, hasNext: false };
@@ -197891,6 +197891,7 @@ async function registerAnalysisMetadataSocket(subprojectPath, workspacePath, eco
197891
197891
  }
197892
197892
  async function getLatestBucketsSocket(subprojectPath, workspacePath) {
197893
197893
  try {
197894
+ if (!process.env.SOCKET_REPO_NAME || !process.env.SOCKET_BRANCH_NAME) return void 0;
197894
197895
  const url2 = getSocketApiUrl("tier1-reachability-scan/latest-buckets");
197895
197896
  const params = {
197896
197897
  workspacePath,
@@ -206006,11 +206007,47 @@ var YarnFixingManager = class extends NpmEcosystemFixingManager {
206006
206007
  // ../fixing-management/src/fixing-management/npm/npm-ecosystem-socket-fixing-manager.ts
206007
206008
  import { dirname as dirname5, join as join8, relative as relative5 } from "path";
206008
206009
  import { existsSync as existsSync11 } from "fs";
206010
+ import { readFile as readFile15, writeFile as writeFile6 } from "fs/promises";
206011
+ function applyUpgradesToPackageJson(packageJsonContent, upgrades, rangeStyle) {
206012
+ let modifiedContent = packageJsonContent;
206013
+ for (const upgrade of upgrades) {
206014
+ const escapedPackageName = upgrade.packageName.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
206015
+ const depSection = upgrade.isDev ? "devDependencies" : "dependencies";
206016
+ const pattern = new RegExp(`("${escapedPackageName}"\\s*:\\s*")([^"]*)"`, "g");
206017
+ const sectionPattern = new RegExp(`"${depSection}"\\s*:\\s*\\{[^}]*"${escapedPackageName}"\\s*:`, "s");
206018
+ if (!sectionPattern.test(modifiedContent)) {
206019
+ continue;
206020
+ }
206021
+ modifiedContent = modifiedContent.replace(pattern, (match2, prefix, currentVersion, offset) => {
206022
+ const beforeMatch = modifiedContent.substring(0, offset);
206023
+ const lastDepSection = beforeMatch.lastIndexOf(`"${depSection}"`);
206024
+ const lastOtherSection = Math.max(
206025
+ beforeMatch.lastIndexOf('"dependencies"'),
206026
+ beforeMatch.lastIndexOf('"devDependencies"'),
206027
+ beforeMatch.lastIndexOf('"peerDependencies"'),
206028
+ beforeMatch.lastIndexOf('"optionalDependencies"')
206029
+ );
206030
+ if (lastOtherSection > lastDepSection && beforeMatch.substring(lastOtherSection).includes(depSection === "dependencies" ? "devDependencies" : "dependencies")) {
206031
+ return match2;
206032
+ }
206033
+ let versionString;
206034
+ if (rangeStyle === "pin") {
206035
+ versionString = upgrade.upgradeVersion;
206036
+ } else {
206037
+ const specifierMatch = currentVersion.match(/^([^\d]*)/);
206038
+ const specifier = specifierMatch ? specifierMatch[1] : "";
206039
+ versionString = specifier + upgrade.upgradeVersion;
206040
+ }
206041
+ return `${prefix}${versionString}"`;
206042
+ });
206043
+ }
206044
+ return modifiedContent;
206045
+ }
206009
206046
  var NpmSocketUpgradeManager = class {
206010
206047
  constructor(rootDir) {
206011
206048
  this.rootDir = rootDir;
206012
206049
  }
206013
- async applySocketArtifactUpgrades(upgrades, artifacts) {
206050
+ async applySocketArtifactUpgrades(upgrades, artifacts, rangeStyle) {
206014
206051
  const subprojectToUpgrade = await this.groupUpgradesBySubprojectAndWorkspace(upgrades, artifacts);
206015
206052
  for (const [subprojectDir, workspaceToUpgrade] of subprojectToUpgrade) {
206016
206053
  const fixingManager = getFixingManagerFromPackageManager(
@@ -206018,7 +206055,13 @@ var NpmSocketUpgradeManager = class {
206018
206055
  this.rootDir,
206019
206056
  subprojectDir
206020
206057
  );
206021
- this.applySecurityFixesForSocketArtifacts(fixingManager, artifacts, workspaceToUpgrade);
206058
+ this.applySecurityFixesForSocketArtifacts(
206059
+ subprojectDir,
206060
+ fixingManager,
206061
+ artifacts,
206062
+ workspaceToUpgrade,
206063
+ rangeStyle
206064
+ );
206022
206065
  }
206023
206066
  }
206024
206067
  async groupUpgradesBySubprojectAndWorkspace(upgrades, artifacts) {
@@ -206061,7 +206104,7 @@ var NpmSocketUpgradeManager = class {
206061
206104
  }
206062
206105
  return subprojectToUpgrade;
206063
206106
  }
206064
- async applySecurityFixesForSocketArtifacts(fixingManager, artifacts, workspaceTofixes) {
206107
+ async applySecurityFixesForSocketArtifacts(subprojectDir, fixingManager, artifacts, workspaceTofixes, rangeStyle) {
206065
206108
  for (const [workspacePath, upgrades] of workspaceTofixes.entries()) {
206066
206109
  const upgradesTransformed = upgrades.map((upgrade) => ({
206067
206110
  dependencyName: getNameFromNamespaceAndName(
@@ -206076,13 +206119,20 @@ var NpmSocketUpgradeManager = class {
206076
206119
  await fixingManager.applySecurityFixesSpecificPackageManager(upgradesTransformed);
206077
206120
  const upgradesToDirectDependencies = upgrades.filter((upgrade) => artifacts[upgrade.idx].direct);
206078
206121
  if (upgradesToDirectDependencies.length === 0) continue;
206079
- for (const isDev of [false, true]) {
206080
- const upgradesOfDirectDependenciesOfType = upgradesToDirectDependencies.filter((upgrade) => artifacts[upgrade.idx].dev === isDev).map(
206081
- ({ idx, upgradeVersion }) => `${artifacts[idx].namespace ? `${artifacts[idx].namespace}/` : ""}${artifacts[idx].name}@${upgradeVersion}`
206082
- );
206083
- if (upgradesOfDirectDependenciesOfType.length === 0) continue;
206084
- await fixingManager.installSpecificPackages(workspacePath, isDev, upgradesOfDirectDependenciesOfType);
206085
- }
206122
+ const packageJsonPath = join8(subprojectDir, workspacePath, "package.json");
206123
+ const packageJsonContent = await readFile15(packageJsonPath, "utf-8");
206124
+ const upgradesWithPackageNames = upgradesToDirectDependencies.map(
206125
+ (upgrade) => {
206126
+ const artifact = artifacts[upgrade.idx];
206127
+ return {
206128
+ packageName: artifact.namespace ? `${artifact.namespace}/${artifact.name}` : artifact.name,
206129
+ upgradeVersion: upgrade.upgradeVersion,
206130
+ isDev: artifact.dev ?? false
206131
+ };
206132
+ }
206133
+ );
206134
+ const modifiedContent = applyUpgradesToPackageJson(packageJsonContent, upgradesWithPackageNames, rangeStyle);
206135
+ await writeFile6(packageJsonPath, modifiedContent, "utf-8");
206086
206136
  }
206087
206137
  await fixingManager.finalizeFixes();
206088
206138
  }
@@ -206115,7 +206165,7 @@ var RushFixingManager = class {
206115
206165
  };
206116
206166
 
206117
206167
  // ../fixing-management/src/fixing-management/nuget/nuget-fixing-manager.ts
206118
- import { readFile as readFile15, writeFile as writeFile6 } from "fs/promises";
206168
+ import { readFile as readFile16, writeFile as writeFile7 } from "fs/promises";
206119
206169
  import { join as join9 } from "path";
206120
206170
 
206121
206171
  // ../utils/src/nuget-utils.ts
@@ -206218,16 +206268,16 @@ var NugetFixingManager = class {
206218
206268
  if (projectFiles.length !== 1)
206219
206269
  throw new Error("Applying fixes to workspaces with more than 1 project file currently not supported");
206220
206270
  const projectFilePath = join9(this.getAbsWsPath(wsPath), projectFiles[0]);
206221
- const initialProjectFile = await readFile15(projectFilePath, "utf-8");
206271
+ const initialProjectFile = await readFile16(projectFilePath, "utf-8");
206222
206272
  const initialLockFile = await this.restoreWorkspaceAndParseLockFile(wsPath);
206223
206273
  await applySeries(fixesWithId, async ({ fixId, vulnerabilityFixes }) => {
206224
206274
  await this.applySecurityFixesForWorkspace(wsPath, projectFilePath, vulnerabilityFixes, dependencyTree);
206225
206275
  signalFixApplied2?.(fixId, this.subprojectPath, wsPath, vulnerabilityFixes);
206226
206276
  });
206227
- const finalProjectFile = await readFile15(projectFilePath, "utf-8");
206228
- const finalLockFile = JSON.parse(await readFile15(this.getLockFilePath(wsPath), "utf-8"));
206229
- await writeFile6(projectFilePath, initialProjectFile);
206230
- await writeFile6(this.getLockFilePath(wsPath), JSON.stringify(initialLockFile, null, 2));
206277
+ const finalProjectFile = await readFile16(projectFilePath, "utf-8");
206278
+ const finalLockFile = JSON.parse(await readFile16(this.getLockFilePath(wsPath), "utf-8"));
206279
+ await writeFile7(projectFilePath, initialProjectFile);
206280
+ await writeFile7(this.getLockFilePath(wsPath), JSON.stringify(initialLockFile, null, 2));
206231
206281
  return { projectFile: finalProjectFile, lockFile: finalLockFile };
206232
206282
  }
206233
206283
  );
@@ -206237,8 +206287,8 @@ var NugetFixingManager = class {
206237
206287
  const projectFiles = fixingInfo.projectFiles[wsPath];
206238
206288
  if (projectFiles.length !== 1)
206239
206289
  throw new Error("Applying fixes to workspaces with more than 1 project file currently not supported");
206240
- await writeFile6(join9(this.getAbsWsPath(wsPath), projectFiles[0]), finalProjectFile);
206241
- await writeFile6(this.getLockFilePath(wsPath), JSON.stringify(finalLockFile, null, 2));
206290
+ await writeFile7(join9(this.getAbsWsPath(wsPath), projectFiles[0]), finalProjectFile);
206291
+ await writeFile7(this.getLockFilePath(wsPath), JSON.stringify(finalLockFile, null, 2));
206242
206292
  });
206243
206293
  if (solutionFiles) {
206244
206294
  for (const solutionFile of solutionFiles) {
@@ -206257,7 +206307,7 @@ var NugetFixingManager = class {
206257
206307
  }
206258
206308
  }
206259
206309
  async applySecurityFixesForWorkspace(wsPath, projectFilePath, vulnFixes, dependencyTree) {
206260
- const initialProjectFile = await readFile15(projectFilePath, "utf-8");
206310
+ const initialProjectFile = await readFile16(projectFilePath, "utf-8");
206261
206311
  const initialLockFile = await this.restoreWorkspaceAndParseLockFile(wsPath);
206262
206312
  const typeCache = new Cache();
206263
206313
  const requestedCache = new Cache();
@@ -206304,7 +206354,7 @@ var NugetFixingManager = class {
206304
206354
  details.requested = requestedRange;
206305
206355
  });
206306
206356
  });
206307
- await writeFile6(projectFilePath, initialProjectFile);
206357
+ await writeFile7(projectFilePath, initialProjectFile);
206308
206358
  await applySeries(vulnFixes, async ({ dependencyIdentifier, dependencyName }) => {
206309
206359
  await applySeries(
206310
206360
  dependencyTree.transitiveDependencies[dependencyIdentifier].frameworks?.filter(
@@ -206330,7 +206380,7 @@ var NugetFixingManager = class {
206330
206380
  }
206331
206381
  );
206332
206382
  });
206333
- await writeFile6(this.getLockFilePath(wsPath), JSON.stringify(lockFileWithFixes, null, 2));
206383
+ await writeFile7(this.getLockFilePath(wsPath), JSON.stringify(lockFileWithFixes, null, 2));
206334
206384
  }
206335
206385
  async addPackage(packageName, version3, framework, wsPath) {
206336
206386
  const dir = join9(this.rootDir, this.subprojectPath, wsPath);
@@ -206347,7 +206397,7 @@ var NugetFixingManager = class {
206347
206397
  async restoreWorkspaceAndParseLockFile(wsPath) {
206348
206398
  const succeeded = await execAndLogOnFailure("dotnet restore --use-lock-file", this.getAbsWsPath(wsPath));
206349
206399
  if (!succeeded) throw new Error(`Error applying fix - could not restore project ${this.subprojectPath}/${wsPath}`);
206350
- return JSON.parse(await readFile15(this.getLockFilePath(wsPath), "utf-8"));
206400
+ return JSON.parse(await readFile16(this.getLockFilePath(wsPath), "utf-8"));
206351
206401
  }
206352
206402
  getLockFilePath(wsPath, lockFileName = "packages.lock.json") {
206353
206403
  return join9(this.getAbsWsPath(wsPath), lockFileName);
@@ -206458,10 +206508,10 @@ async function applySecurityFixes(packageManagerName, rootDir, subprojectPath, o
206458
206508
  otherModulesCommunicator
206459
206509
  ).applySecurityFixes(fixes, fixingInfo, signalFixApplied2);
206460
206510
  }
206461
- async function applySocketUpgrades(ecosystem, rootDir, upgrades, artifacts) {
206511
+ async function applySocketUpgrades(ecosystem, rootDir, upgrades, artifacts, rangeStyle) {
206462
206512
  const C2 = socketUpgradeManagerConstructors[ecosystem];
206463
206513
  if (!C2) return;
206464
- await new C2(rootDir).applySocketArtifactUpgrades(upgrades, artifacts);
206514
+ await new C2(rootDir).applySocketArtifactUpgrades(upgrades, artifacts, rangeStyle);
206465
206515
  }
206466
206516
 
206467
206517
  // dist/cli-apply-fix.js
@@ -207105,7 +207155,7 @@ function utilFormatter2() {
207105
207155
  }
207106
207156
 
207107
207157
  // ../web-compat-utils/dist/logger-singleton.js
207108
- import { readFile as readFile16 } from "fs/promises";
207158
+ import { readFile as readFile17 } from "fs/promises";
207109
207159
  var CLILogger2 = class {
207110
207160
  logger = console;
207111
207161
  writeStream;
@@ -207185,7 +207235,7 @@ var CLILogger2 = class {
207185
207235
  await this.finish();
207186
207236
  let logContent;
207187
207237
  try {
207188
- logContent = await readFile16(logFilePath, "utf-8");
207238
+ logContent = await readFile17(logFilePath, "utf-8");
207189
207239
  } catch (e) {
207190
207240
  console.error("Error reading log file", e);
207191
207241
  }
@@ -207230,13 +207280,13 @@ async function detectVariantMaven(projectDir) {
207230
207280
  // ../docker-management/src/maven/gradle-version-detector.ts
207231
207281
  import { existsSync as existsSync13 } from "fs";
207232
207282
  import { join as join14 } from "path";
207233
- import { readFile as readFile17 } from "fs/promises";
207283
+ import { readFile as readFile18 } from "fs/promises";
207234
207284
  async function detectVariantGradle(projectDir) {
207235
207285
  return sanitizeJvmVariant("GRADLE", projectDir, await detect(projectDir));
207236
207286
  }
207237
207287
  async function detect(projectDir) {
207238
207288
  const gradleWrapperPropertiesPath = join14(projectDir, "gradle", "wrapper", "gradle-wrapper.properties");
207239
- const gradleWrapperProperties = existsSync13(gradleWrapperPropertiesPath) ? (await readFile17(gradleWrapperPropertiesPath, "utf-8")).split("\n").map((line) => line.trim()).filter((line) => !line.startsWith("#")).filter((line) => line) : void 0;
207289
+ const gradleWrapperProperties = existsSync13(gradleWrapperPropertiesPath) ? (await readFile18(gradleWrapperPropertiesPath, "utf-8")).split("\n").map((line) => line.trim()).filter((line) => !line.startsWith("#")).filter((line) => line) : void 0;
207240
207290
  if (!gradleWrapperProperties) return void 0;
207241
207291
  const distributionUrlRegex = /.*gradle-(\d+(\.\d+(\.\d+)?)?)/;
207242
207292
  for (const prop2 of gradleWrapperProperties) {
@@ -207252,13 +207302,13 @@ async function detect(projectDir) {
207252
207302
  // ../docker-management/src/maven/sbt-version-detector.ts
207253
207303
  import { existsSync as existsSync14 } from "fs";
207254
207304
  import { join as join15 } from "path";
207255
- import { readFile as readFile18 } from "fs/promises";
207305
+ import { readFile as readFile19 } from "fs/promises";
207256
207306
  async function detectVariantSbt(projectDir) {
207257
207307
  return sanitizeJvmVariant("SBT", projectDir, await detect2(projectDir));
207258
207308
  }
207259
207309
  async function detect2(projectDir) {
207260
207310
  const sbtBuildPropertiesPath = join15(projectDir, "project", "build.properties");
207261
- const sbtBuildProperties = existsSync14(sbtBuildPropertiesPath) ? (await readFile18(sbtBuildPropertiesPath, "utf-8")).split("\n").map((line) => line.trim()).filter((line) => !line.startsWith("#")).filter((line) => line) : void 0;
207311
+ const sbtBuildProperties = existsSync14(sbtBuildPropertiesPath) ? (await readFile19(sbtBuildPropertiesPath, "utf-8")).split("\n").map((line) => line.trim()).filter((line) => !line.startsWith("#")).filter((line) => line) : void 0;
207262
207312
  if (!sbtBuildProperties) return void 0;
207263
207313
  for (const prop2 of sbtBuildProperties) {
207264
207314
  const [key, value] = prop2.split("=");
@@ -207372,7 +207422,7 @@ async function findReachabilityAnalyzersDockerImage(ecosystem) {
207372
207422
  // ../other-modules-communicator/src/other-modules-communicator.ts
207373
207423
  var import_lodash11 = __toESM(require_lodash(), 1);
207374
207424
  import { rmSync } from "fs";
207375
- import { mkdir, readFile as readFile19, writeFile as writeFile7 } from "fs/promises";
207425
+ import { mkdir, readFile as readFile20, writeFile as writeFile8 } from "fs/promises";
207376
207426
  import { platform } from "os";
207377
207427
  import { join as join19, posix as posix2, relative as relative8, sep as sep3 } from "path";
207378
207428
 
@@ -207823,7 +207873,7 @@ var OtherModulesCommunicator = class {
207823
207873
  COANA_API_KEY: this.apiKey.type === "present" ? this.apiKey.value : ""
207824
207874
  }
207825
207875
  );
207826
- return JSON.parse(await readFile19(outputFilePathThisProcess, "utf-8")).result;
207876
+ return JSON.parse(await readFile20(outputFilePathThisProcess, "utf-8")).result;
207827
207877
  }
207828
207878
  async runReachabilityAnalyzerCommand(commandName, ecosystem, subprojectPath, workspacePath, args2, env) {
207829
207879
  const tmpDir = await this.getTmpDirForSubproject(subprojectPath);
@@ -207884,7 +207934,7 @@ var OtherModulesCommunicator = class {
207884
207934
  [...args2, "-o", outputFilePathOtherProcess],
207885
207935
  env
207886
207936
  );
207887
- return JSON.parse(await readFile19(outputFilePathThisProcess, "utf-8")).result;
207937
+ return JSON.parse(await readFile20(outputFilePathThisProcess, "utf-8")).result;
207888
207938
  }
207889
207939
  async runInDocker(ecosystem, image, entryPoint, commandName, args2, subprojectPath, tmpDir, env = process.env) {
207890
207940
  if (!await pullDockerImage(image)) return false;
@@ -207913,7 +207963,7 @@ var OtherModulesCommunicator = class {
207913
207963
  const providerFileName = "provider.json";
207914
207964
  const providerFileThisProcess = join19(tmpDir, providerFileName);
207915
207965
  const providerFileOtherProcess = this.options.runWithoutDocker ? providerFileThisProcess : posix2.join(TMP_DIR_IN_DOCKER, providerFileName);
207916
- await writeFile7(providerFileThisProcess, JSON.stringify(providedOptions.provider));
207966
+ await writeFile8(providerFileThisProcess, JSON.stringify(providedOptions.provider));
207917
207967
  return ["--provider", providerFileOtherProcess];
207918
207968
  } else {
207919
207969
  return ["--as-provider"];
@@ -207957,7 +208007,7 @@ var OtherModulesCommunicator = class {
207957
208007
  const inputFileName = `${v4_default()}-runReachabilityAnalysis-input.json`;
207958
208008
  const inputFileThisProcess = join19(tmpDir, inputFileName);
207959
208009
  const inputFileOtherProcess = this.options.runWithoutDocker ? inputFileThisProcess : posix2.join(TMP_DIR_IN_DOCKER, inputFileName);
207960
- await writeFile7(
208010
+ await writeFile8(
207961
208011
  inputFileThisProcess,
207962
208012
  JSON.stringify({
207963
208013
  workspaceData,
@@ -208134,7 +208184,7 @@ function t3(...r2) {
208134
208184
  }
208135
208185
 
208136
208186
  // ../utils/src/dashboard-api/coana-api.ts
208137
- import { writeFile as writeFile8 } from "fs/promises";
208187
+ import { writeFile as writeFile9 } from "fs/promises";
208138
208188
  var import_artifact = __toESM(require_artifact_client2(), 1);
208139
208189
  var coanaAPI = process.env.PUBLIC_API_URL ?? "https://app.coana.tech/api/v1";
208140
208190
  var axiosClient2 = getAxiosClient();
@@ -208264,7 +208314,7 @@ async function sendToDashboard(report, writeReportToFile, reportId, apiKey) {
208264
208314
  try {
208265
208315
  if (writeReportToFile) {
208266
208316
  logger.info("Writing report to dashboard-report.json");
208267
- await writeFile8("dashboard-report.json", JSON.stringify(report, null, 2));
208317
+ await writeFile9("dashboard-report.json", JSON.stringify(report, null, 2));
208268
208318
  if (process.env.GITHUB_ACTIONS === "true") {
208269
208319
  logger.info("uploading dashboard-report.json as an artifact");
208270
208320
  (0, import_artifact.create)().uploadArtifact("dashboard-report", ["dashboard-report.json"], process.cwd());
@@ -209342,12 +209392,12 @@ import { join as join22, relative as relative9, resolve as resolve20 } from "pat
209342
209392
 
209343
209393
  // ../project-management/src/project-management/ecosystem-management/ecosystem-specs.ts
209344
209394
  import { existsSync as existsSync18 } from "fs";
209345
- import { readdir as readdir5, readFile as readFile22 } from "fs/promises";
209395
+ import { readdir as readdir5, readFile as readFile23 } from "fs/promises";
209346
209396
  import { join as join21, sep as sep4 } from "path";
209347
209397
 
209348
209398
  // ../utils/src/pip-utils.ts
209349
209399
  import { existsSync as existsSync17 } from "fs";
209350
- import { readFile as readFile21 } from "fs/promises";
209400
+ import { readFile as readFile22 } from "fs/promises";
209351
209401
  import { resolve as resolve19 } from "path";
209352
209402
  import util4 from "util";
209353
209403
 
@@ -209356,7 +209406,7 @@ var import_lodash13 = __toESM(require_lodash(), 1);
209356
209406
  var import_semver4 = __toESM(require_semver2(), 1);
209357
209407
  import { execFileSync as execFileSync2 } from "child_process";
209358
209408
  import { constants as constants2 } from "fs";
209359
- import { access as access4, readFile as readFile20 } from "fs/promises";
209409
+ import { access as access4, readFile as readFile21 } from "fs/promises";
209360
209410
  import { join as join20, resolve as resolve18 } from "path";
209361
209411
  import util3 from "util";
209362
209412
  var { once: once7 } = import_lodash13.default;
@@ -209365,7 +209415,7 @@ var hasPyenv = once7(async () => !(await execNeverFail("which pyenv")).error);
209365
209415
 
209366
209416
  // ../utils/src/pip-utils.ts
209367
209417
  async function isSetupPySetuptools(file) {
209368
- const content = await readFile21(file, "utf-8");
209418
+ const content = await readFile22(file, "utf-8");
209369
209419
  return content.includes("setup(") && (/^\s*from\s+(?:setuptools|distutils\.core)\s+import\s+.*setup/m.test(content) || /^\s*import\s+(?:setuptools|distutils\.core)/m.test(content));
209370
209420
  }
209371
209421
 
@@ -209447,7 +209497,7 @@ function packageManagerIfPackageJSONExistsAndValid(packageManager) {
209447
209497
  if (!existsSync18(join21(projectDir, "package.json"))) return void 0;
209448
209498
  const packageJSONPath = join21(projectDir, "package.json");
209449
209499
  try {
209450
- JSON.parse(await readFile22(packageJSONPath, "utf-8"));
209500
+ JSON.parse(await readFile23(packageJSONPath, "utf-8"));
209451
209501
  return packageManager;
209452
209502
  } catch (e) {
209453
209503
  throw new InvalidProjectFileError(projectDir, "package.json");
@@ -209708,7 +209758,7 @@ ${detailsString}` : ""}`;
209708
209758
 
209709
209759
  // dist/cli-core.js
209710
209760
  import { writeFileSync as writeFileSync3 } from "fs";
209711
- import { mkdir as mkdir2, writeFile as writeFile9 } from "fs/promises";
209761
+ import { mkdir as mkdir2, writeFile as writeFile10 } from "fs/promises";
209712
209762
 
209713
209763
  // ../../node_modules/.pnpm/kleur@4.1.5/node_modules/kleur/index.mjs
209714
209764
  var FORCE_COLOR;
@@ -210133,7 +210183,7 @@ var DEFAULT_REPORT_FILENAME_BASE = "coana-report";
210133
210183
 
210134
210184
  // dist/internal/exclude-dirs-from-configuration-files.js
210135
210185
  import { existsSync as existsSync19 } from "fs";
210136
- import { readFile as readFile23 } from "fs/promises";
210186
+ import { readFile as readFile24 } from "fs/promises";
210137
210187
  import { basename as basename6, resolve as resolve22 } from "path";
210138
210188
  var import_yaml2 = __toESM(require_dist11(), 1);
210139
210189
  async function inferExcludeDirsFromConfigurationFiles(rootWorkingDir) {
@@ -210147,7 +210197,7 @@ async function inferExcludeDirsFromConfigurationFiles(rootWorkingDir) {
210147
210197
  }
210148
210198
  async function inferExcludeDirsFromSocketConfig(socketConfigFile) {
210149
210199
  try {
210150
- const config3 = (0, import_yaml2.parse)(await readFile23(socketConfigFile, "utf8"));
210200
+ const config3 = (0, import_yaml2.parse)(await readFile24(socketConfigFile, "utf8"));
210151
210201
  const version3 = config3.version;
210152
210202
  const ignorePaths = config3[version3 === 1 ? "ignore" : "projectIgnorePaths"];
210153
210203
  if (!ignorePaths)
@@ -210722,7 +210772,7 @@ function toSocketFactsSocketDependencyTree(artifacts, vulnerabilities, tier1Reac
210722
210772
  }
210723
210773
 
210724
210774
  // dist/internal/vulnerability-scanning.js
210725
- import { readFile as readFile24 } from "fs/promises";
210775
+ import { readFile as readFile25 } from "fs/promises";
210726
210776
 
210727
210777
  // ../security-auditor/security-auditor-builder/src/mongo-connection.ts
210728
210778
  var import_mongodb = __toESM(require_lib30(), 1);
@@ -225591,7 +225641,7 @@ async function scanForVulnerabilities(dependencyTree, offlineVulnerabilityScanne
225591
225641
  }
225592
225642
  async function offlineScan(dependencyTree, offlineVulnerabilityScannerDBPath) {
225593
225643
  logger.info("using offline vulnerability scanner db");
225594
- const offlineVulnerabilityScannerDB = JSON.parse(await readFile24(offlineVulnerabilityScannerDBPath, "utf-8"));
225644
+ const offlineVulnerabilityScannerDB = JSON.parse(await readFile25(offlineVulnerabilityScannerDBPath, "utf-8"));
225595
225645
  const { ecosystemToUrlToVulnerabilityDetails, vulnerabilityDatabase } = offlineVulnerabilityScannerDB;
225596
225646
  const coanaSupportedVulnerabilitiesLoader = CoanaSupportedVulnerabilitiesLoader.create(ecosystemToUrlToVulnerabilityDetails);
225597
225647
  const vulnerabilityAccessPathLoader = CoanaSupportedVulnerabilitiesLoader.create(ecosystemToUrlToVulnerabilityDetails);
@@ -225609,7 +225659,7 @@ async function onlineScan(dependencyTree, apiKey, timeout) {
225609
225659
  }
225610
225660
 
225611
225661
  // dist/version.js
225612
- var version2 = "14.12.9";
225662
+ var version2 = "14.12.11";
225613
225663
 
225614
225664
  // dist/cli-core.js
225615
225665
  var { mapValues, omit, partition, pick } = import_lodash15.default;
@@ -225795,7 +225845,7 @@ var CliCore = class {
225795
225845
  }
225796
225846
  const socketReport = toSocketFactsSocketDependencyTree(artifacts, vulnsWithResults, this.reportId);
225797
225847
  const outputFile = resolve23(this.options.socketMode);
225798
- await writeFile9(outputFile, JSON.stringify(socketReport, null, 2));
225848
+ await writeFile10(outputFile, JSON.stringify(socketReport, null, 2));
225799
225849
  logger.info(kleur_default.green(`Socket report written to: ${outputFile}`));
225800
225850
  }
225801
225851
  async shareErrorLogWithBackend(e, shouldLogSharing) {
@@ -225813,7 +225863,7 @@ var CliCore = class {
225813
225863
  }
225814
225864
  const socketReport = toSocketFacts(report, this.reportDependencyTrees, subPjToWsPathToDirectDependencies);
225815
225865
  const outputFile = resolve23(this.options.socketMode);
225816
- await writeFile9(outputFile, JSON.stringify(socketReport, null, 2));
225866
+ await writeFile10(outputFile, JSON.stringify(socketReport, null, 2));
225817
225867
  logger.info(kleur_default.green(`Socket report written to: ${outputFile}`));
225818
225868
  return;
225819
225869
  }
@@ -226296,6 +226346,9 @@ import { join as join26, relative as relative12 } from "node:path";
226296
226346
  var import_packageurl_js2 = __toESM(require_packageurl_js(), 1);
226297
226347
  var ECOSYSTEMS_WITH_SOCKET_UPGRADES = ["NPM", "MAVEN"];
226298
226348
  async function upgradePurl(path2, upgrades, options, logFile, cliFixRunId) {
226349
+ if (options.rangeStyle && options.rangeStyle !== "pin") {
226350
+ throw new Error('Range style must be "pin"');
226351
+ }
226299
226352
  logger.initWinstonLogger(options.debug);
226300
226353
  logger.silent = options.silent;
226301
226354
  let cliRunId = cliFixRunId;
@@ -226341,7 +226394,11 @@ ${upgrades.map((upgrade) => ` ${upgrade.purl} -> ${upgrade.upgradeVersion}`).joi
226341
226394
  });
226342
226395
  });
226343
226396
  for (const [ecosystem, upgrades2] of Object.entries(ecosystemToSocketArtifactUpgrades)) {
226344
- await applySocketUpgrades(ecosystem, path2, upgrades2, artifacts);
226397
+ if (options.rangeStyle && ecosystem !== "NPM") {
226398
+ logger.warn(`Range style is only supported for npm, skipping upgrades for ${ecosystem}`);
226399
+ continue;
226400
+ }
226401
+ await applySocketUpgrades(ecosystem, path2, upgrades2, artifacts, options.rangeStyle);
226345
226402
  }
226346
226403
  if (upgradePurlRunId) {
226347
226404
  await getSocketAPI().finalizeUpgradePurlRun(upgradePurlRunId, "success");
@@ -226474,7 +226531,8 @@ async function computeFixesAndUpgradePurls(path2, options, logFile) {
226474
226531
  runWithoutDocker: options.runWithoutDocker,
226475
226532
  manifestsTarHash: options.manifestsTarHash,
226476
226533
  concurrency: "1",
226477
- globPattern: options.globPattern
226534
+ globPattern: options.globPattern,
226535
+ rangeStyle: options.rangeStyle
226478
226536
  }, autofixRunId) ?? "fixed-all";
226479
226537
  if (autofixRunId) {
226480
226538
  await getSocketAPI().finalizeAutofixRun(autofixRunId, ghsasFailedToFix.length === 0 && applyFixesStatus === "fixed-all" ? "fixed-all" : ghsasFailedToFix.length === Object.keys(ghsaToVulnerableArtifactIdsToApply).length || applyFixesStatus === "fixed-none" ? "fixed-none" : "fixed-some");
@@ -226595,7 +226653,7 @@ applyFixes.name("apply-fixes").argument("<path>", "File system path to the folde
226595
226653
  await applyFix(path2, fixIds, options);
226596
226654
  }).configureHelp({ sortOptions: true });
226597
226655
  var upgradePurls = new Command();
226598
- upgradePurls.name("upgrade-purls").argument("<path>", "File system path to the folder containing the project").argument("<specs...>", "Package upgrade specifications in the format 'purl -> newVersion' (e.g., 'pkg:maven/io.micrometer/micrometer-core@1.10.9 -> 1.15.0')").option("-g, --glob <pattern>", "Glob pattern to filter workspaces by absolute file path").option("-c, --concurrency <concurrency>", "Set the maximum number of concurrent reachability analysis runs. It's recommended to choose a concurrency level that ensures that each analysis run has at least the --memory-limit amount of memory available.", "1").option("-d, --debug", "Enable debug logging", false).option("-s, --silent", "Silence all debug/warning output", false).addOption(new Option("--run-without-docker", "Run package managers without using docker").default(process.env.RUN_WITHOUT_DOCKER === "true").hideHelp()).addOption(new Option("--socket-mode", "Use Socket for computing dependency trees").default(process.env.SOCKET_MODE === "true").hideHelp()).version(version2).action(async (path2, specs2, options) => {
226656
+ upgradePurls.name("upgrade-purls").argument("<path>", "File system path to the folder containing the project").argument("<specs...>", "Package upgrade specifications in the format 'purl -> newVersion' (e.g., 'pkg:maven/io.micrometer/micrometer-core@1.10.9 -> 1.15.0')").option("-g, --glob <pattern>", "Glob pattern to filter workspaces by absolute file path").option("-c, --concurrency <concurrency>", "Set the maximum number of concurrent reachability analysis runs. It's recommended to choose a concurrency level that ensures that each analysis run has at least the --memory-limit amount of memory available.", "1").option("-d, --debug", "Enable debug logging", false).option("-s, --silent", "Silence all debug/warning output", false).option("--range-style <style>", 'Range style to use for the output. Currently only "pin" is supported and it only works for npm.').addOption(new Option("--run-without-docker", "Run package managers without using docker").default(process.env.RUN_WITHOUT_DOCKER === "true").hideHelp()).addOption(new Option("--socket-mode", "Use Socket for computing dependency trees").default(process.env.SOCKET_MODE === "true").hideHelp()).version(version2).action(async (path2, specs2, options) => {
226599
226657
  process.env.DOCKER_IMAGE_TAG ??= version2;
226600
226658
  await withTmpDirectory("upgrade-purls", async (tmpDir) => {
226601
226659
  const logFile = join27(tmpDir, "upgrade-purls.log");
@@ -226613,8 +226671,11 @@ upgradePurls.name("upgrade-purls").argument("<path>", "File system path to the f
226613
226671
  });
226614
226672
  }).configureHelp({ sortOptions: true });
226615
226673
  var computeFixesAndUpgradePurlsCmd = new Command();
226616
- computeFixesAndUpgradePurlsCmd.name("compute-fixes-and-upgrade-purls").argument("<path>", "File system path to the folder containing the project").option("-a, --apply-fixes-to <ghsas...>", 'GHSA IDs to compute fixes for. Use "all" to compute fixes for all vulnerabilities.', []).option("--dry-run", "Show what changes would be made without actually making them", false).option("-g, --glob <pattern>", "Glob pattern to filter workspaces by absolute file path").option("-d, --debug", "Enable debug logging", false).option("-s, --silent", "Silence all debug/warning output", false).addOption(new Option("--run-without-docker", "Run package managers without using docker").default(process.env.RUN_WITHOUT_DOCKER === "true").hideHelp()).addOption(new Option("--manifests-tar-hash <hash>", "Hash of the tarball containing all manifest files already uploaded to Socket. If provided, Socket will be used for computing dependency trees.").hideHelp()).version(version2).action(async (path2, options) => {
226674
+ computeFixesAndUpgradePurlsCmd.name("compute-fixes-and-upgrade-purls").argument("<path>", "File system path to the folder containing the project").option("-a, --apply-fixes-to <ghsas...>", 'GHSA IDs to compute fixes for. Use "all" to compute fixes for all vulnerabilities.', []).option("--dry-run", "Show what changes would be made without actually making them", false).option("-g, --glob <pattern>", "Glob pattern to filter workspaces by absolute file path").option("-d, --debug", "Enable debug logging", false).option("-s, --silent", "Silence all debug/warning output", false).option("--range-style <style>", 'Range style to use for the output. Currently only "pin" is supported and it only works for npm.').addOption(new Option("--run-without-docker", "Run package managers without using docker").default(process.env.RUN_WITHOUT_DOCKER === "true").hideHelp()).addOption(new Option("--manifests-tar-hash <hash>", "Hash of the tarball containing all manifest files already uploaded to Socket. If provided, Socket will be used for computing dependency trees.").hideHelp()).version(version2).action(async (path2, options) => {
226617
226675
  process.env.DOCKER_IMAGE_TAG ??= version2;
226676
+ if (options.rangeStyle && options.rangeStyle !== "pin") {
226677
+ throw new Error('Range style must be "pin"');
226678
+ }
226618
226679
  await withTmpDirectory("compute-fixes-and-upgrade-purls", async (tmpDir) => {
226619
226680
  const logFile = join27(tmpDir, "compute-fixes-and-upgrade-purls.log");
226620
226681
  logger.initWinstonLogger(options.debug, logFile);
@@ -226624,7 +226685,7 @@ computeFixesAndUpgradePurlsCmd.name("compute-fixes-and-upgrade-purls").argument(
226624
226685
  var compareReportsCommand = new Command();
226625
226686
  compareReportsCommand.name("compare-reports").argument("<baselineReportPath>", "Path to the baseline report").argument("<newReportPath>", "Path to the new report").option("--api-key <key>", "Set the Coana dashboard API key.").option("-d, --debug", "Enable debug logging", false).option("--no-pr-comment", "Disable pull request comments (only relevant when run from a PR)", true).option("--no-block", "Do not fail with a non-zero exit code when new reachable vulnerabilities are detected", true).option("--ignore-undeterminable-reachability", "Ignore vulnerabilities with undeterminable reachability", false).action(async (baselineReportPath, newReportPath, options) => {
226626
226687
  async function readReport(reportPath) {
226627
- return JSON.parse(await readFile25(reportPath, "utf-8"));
226688
+ return JSON.parse(await readFile26(reportPath, "utf-8"));
226628
226689
  }
226629
226690
  const baselineReport = await readReport(baselineReportPath);
226630
226691
  const newReport = await readReport(newReportPath);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@coana-tech/cli",
3
- "version": "14.12.9",
3
+ "version": "14.12.11",
4
4
  "description": "Coana CLI",
5
5
  "type": "module",
6
6
  "bin": {
@@ -73366,6 +73366,7 @@ async function registerAnalysisMetadataSocket(subprojectPath, workspacePath, eco
73366
73366
  }
73367
73367
  async function getLatestBucketsSocket(subprojectPath, workspacePath) {
73368
73368
  try {
73369
+ if (!process.env.SOCKET_REPO_NAME || !process.env.SOCKET_BRANCH_NAME) return void 0;
73369
73370
  const url2 = getSocketApiUrl("tier1-reachability-scan/latest-buckets");
73370
73371
  const params = {
73371
73372
  workspacePath,