@coana-tech/cli 14.12.10 → 14.12.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/cli.mjs CHANGED
@@ -52321,7 +52321,7 @@ var require_cjs2 = __commonJS({
52321
52321
  var require_lib12 = __commonJS({
52322
52322
  "../../node_modules/.pnpm/write-file-atomic@5.0.1/node_modules/write-file-atomic/lib/index.js"(exports2, module2) {
52323
52323
  "use strict";
52324
- module2.exports = writeFile10;
52324
+ module2.exports = writeFile11;
52325
52325
  module2.exports.sync = writeFileSync4;
52326
52326
  module2.exports._getTmpname = getTmpname;
52327
52327
  module2.exports._cleanupOnExit = cleanupOnExit;
@@ -52446,7 +52446,7 @@ var require_lib12 = __commonJS({
52446
52446
  }
52447
52447
  }
52448
52448
  }
52449
- async function writeFile10(filename, data2, options, callback) {
52449
+ async function writeFile11(filename, data2, options, callback) {
52450
52450
  if (options instanceof Function) {
52451
52451
  callback = options;
52452
52452
  options = {};
@@ -83615,7 +83615,7 @@ var require_lockfile = __commonJS({
83615
83615
  }
83616
83616
  const file = _ref22;
83617
83617
  if (yield exists2(file)) {
83618
- return readFile26(file);
83618
+ return readFile27(file);
83619
83619
  }
83620
83620
  }
83621
83621
  return null;
@@ -83634,7 +83634,7 @@ var require_lockfile = __commonJS({
83634
83634
  })();
83635
83635
  let readJsonAndFile = exports3.readJsonAndFile = (() => {
83636
83636
  var _ref24 = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (loc) {
83637
- const file = yield readFile26(loc);
83637
+ const file = yield readFile27(loc);
83638
83638
  try {
83639
83639
  return {
83640
83640
  object: (0, (_map || _load_map()).default)(JSON.parse(stripBOM2(file))),
@@ -83776,7 +83776,7 @@ var require_lockfile = __commonJS({
83776
83776
  if (eol !== "\n") {
83777
83777
  data2 = data2.replace(/\n/g, eol);
83778
83778
  }
83779
- yield writeFile10(path2, data2);
83779
+ yield writeFile11(path2, data2);
83780
83780
  });
83781
83781
  return function writeFilePreservingEol2(_x30, _x31) {
83782
83782
  return _ref31.apply(this, arguments);
@@ -83788,7 +83788,7 @@ var require_lockfile = __commonJS({
83788
83788
  const file = (_path || _load_path()).default.join(dir, filename);
83789
83789
  const fileLink = (_path || _load_path()).default.join(dir, filename + "-link");
83790
83790
  try {
83791
- yield writeFile10(file, "test");
83791
+ yield writeFile11(file, "test");
83792
83792
  yield link(file, fileLink);
83793
83793
  } catch (err) {
83794
83794
  return false;
@@ -83874,7 +83874,7 @@ var require_lockfile = __commonJS({
83874
83874
  };
83875
83875
  })();
83876
83876
  exports3.copy = copy;
83877
- exports3.readFile = readFile26;
83877
+ exports3.readFile = readFile27;
83878
83878
  exports3.readFileRaw = readFileRaw;
83879
83879
  exports3.normalizeOS = normalizeOS;
83880
83880
  var _fs;
@@ -83938,7 +83938,7 @@ var require_lockfile = __commonJS({
83938
83938
  const lockQueue = exports3.lockQueue = new (_blockingQueue || _load_blockingQueue()).default("fs lock");
83939
83939
  const readFileBuffer = exports3.readFileBuffer = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.readFile);
83940
83940
  const open = exports3.open = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.open);
83941
- const writeFile10 = exports3.writeFile = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.writeFile);
83941
+ const writeFile11 = exports3.writeFile = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.writeFile);
83942
83942
  const readlink2 = exports3.readlink = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.readlink);
83943
83943
  const realpath2 = exports3.realpath = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.realpath);
83944
83944
  const readdir7 = exports3.readdir = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.readdir);
@@ -83972,7 +83972,7 @@ var require_lockfile = __commonJS({
83972
83972
  });
83973
83973
  });
83974
83974
  }
83975
- function readFile26(loc) {
83975
+ function readFile27(loc) {
83976
83976
  return _readFile(loc, "utf8").then(normalizeOS);
83977
83977
  }
83978
83978
  function readFileRaw(loc) {
@@ -111894,7 +111894,7 @@ var require_summary = __commonJS({
111894
111894
  exports2.summary = exports2.markdownSummary = exports2.SUMMARY_DOCS_URL = exports2.SUMMARY_ENV_VAR = void 0;
111895
111895
  var os_1 = __require("os");
111896
111896
  var fs_1 = __require("fs");
111897
- var { access: access5, appendFile, writeFile: writeFile10 } = fs_1.promises;
111897
+ var { access: access5, appendFile, writeFile: writeFile11 } = fs_1.promises;
111898
111898
  exports2.SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
111899
111899
  exports2.SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
111900
111900
  var Summary = class {
@@ -111952,7 +111952,7 @@ var require_summary = __commonJS({
111952
111952
  return __awaiter(this, void 0, void 0, function* () {
111953
111953
  const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);
111954
111954
  const filePath = yield this.filePath();
111955
- const writeFunc = overwrite ? writeFile10 : appendFile;
111955
+ const writeFunc = overwrite ? writeFile11 : appendFile;
111956
111956
  yield writeFunc(filePath, this._buffer, { encoding: "utf8" });
111957
111957
  return this.emptyBuffer();
111958
111958
  });
@@ -190082,7 +190082,7 @@ var {
190082
190082
  } = import_index.default;
190083
190083
 
190084
190084
  // dist/index.js
190085
- import { readFile as readFile25 } from "fs/promises";
190085
+ import { readFile as readFile26 } from "fs/promises";
190086
190086
 
190087
190087
  // ../../node_modules/.pnpm/remeda@2.21.2/node_modules/remeda/dist/chunk-ANXBDSUI.js
190088
190088
  var s = { done: false, hasNext: false };
@@ -206007,11 +206007,47 @@ var YarnFixingManager = class extends NpmEcosystemFixingManager {
206007
206007
  // ../fixing-management/src/fixing-management/npm/npm-ecosystem-socket-fixing-manager.ts
206008
206008
  import { dirname as dirname5, join as join8, relative as relative5 } from "path";
206009
206009
  import { existsSync as existsSync11 } from "fs";
206010
+ import { readFile as readFile15, writeFile as writeFile6 } from "fs/promises";
206011
+ function applyUpgradesToPackageJson(packageJsonContent, upgrades, rangeStyle) {
206012
+ let modifiedContent = packageJsonContent;
206013
+ for (const upgrade of upgrades) {
206014
+ const escapedPackageName = upgrade.packageName.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
206015
+ const depSection = upgrade.isDev ? "devDependencies" : "dependencies";
206016
+ const pattern = new RegExp(`("${escapedPackageName}"\\s*:\\s*")([^"]*)"`, "g");
206017
+ const sectionPattern = new RegExp(`"${depSection}"\\s*:\\s*\\{[^}]*"${escapedPackageName}"\\s*:`, "s");
206018
+ if (!sectionPattern.test(modifiedContent)) {
206019
+ continue;
206020
+ }
206021
+ modifiedContent = modifiedContent.replace(pattern, (match2, prefix, currentVersion, offset) => {
206022
+ const beforeMatch = modifiedContent.substring(0, offset);
206023
+ const lastDepSection = beforeMatch.lastIndexOf(`"${depSection}"`);
206024
+ const lastOtherSection = Math.max(
206025
+ beforeMatch.lastIndexOf('"dependencies"'),
206026
+ beforeMatch.lastIndexOf('"devDependencies"'),
206027
+ beforeMatch.lastIndexOf('"peerDependencies"'),
206028
+ beforeMatch.lastIndexOf('"optionalDependencies"')
206029
+ );
206030
+ if (lastOtherSection > lastDepSection && beforeMatch.substring(lastOtherSection).includes(depSection === "dependencies" ? "devDependencies" : "dependencies")) {
206031
+ return match2;
206032
+ }
206033
+ let versionString;
206034
+ if (rangeStyle === "pin") {
206035
+ versionString = upgrade.upgradeVersion;
206036
+ } else {
206037
+ const specifierMatch = currentVersion.match(/^([^\d]*)/);
206038
+ const specifier = specifierMatch ? specifierMatch[1] : "";
206039
+ versionString = specifier + upgrade.upgradeVersion;
206040
+ }
206041
+ return `${prefix}${versionString}"`;
206042
+ });
206043
+ }
206044
+ return modifiedContent;
206045
+ }
206010
206046
  var NpmSocketUpgradeManager = class {
206011
206047
  constructor(rootDir) {
206012
206048
  this.rootDir = rootDir;
206013
206049
  }
206014
- async applySocketArtifactUpgrades(upgrades, artifacts) {
206050
+ async applySocketArtifactUpgrades(upgrades, artifacts, rangeStyle) {
206015
206051
  const subprojectToUpgrade = await this.groupUpgradesBySubprojectAndWorkspace(upgrades, artifacts);
206016
206052
  for (const [subprojectDir, workspaceToUpgrade] of subprojectToUpgrade) {
206017
206053
  const fixingManager = getFixingManagerFromPackageManager(
@@ -206019,7 +206055,13 @@ var NpmSocketUpgradeManager = class {
206019
206055
  this.rootDir,
206020
206056
  subprojectDir
206021
206057
  );
206022
- this.applySecurityFixesForSocketArtifacts(fixingManager, artifacts, workspaceToUpgrade);
206058
+ this.applySecurityFixesForSocketArtifacts(
206059
+ subprojectDir,
206060
+ fixingManager,
206061
+ artifacts,
206062
+ workspaceToUpgrade,
206063
+ rangeStyle
206064
+ );
206023
206065
  }
206024
206066
  }
206025
206067
  async groupUpgradesBySubprojectAndWorkspace(upgrades, artifacts) {
@@ -206062,7 +206104,7 @@ var NpmSocketUpgradeManager = class {
206062
206104
  }
206063
206105
  return subprojectToUpgrade;
206064
206106
  }
206065
- async applySecurityFixesForSocketArtifacts(fixingManager, artifacts, workspaceTofixes) {
206107
+ async applySecurityFixesForSocketArtifacts(subprojectDir, fixingManager, artifacts, workspaceTofixes, rangeStyle) {
206066
206108
  for (const [workspacePath, upgrades] of workspaceTofixes.entries()) {
206067
206109
  const upgradesTransformed = upgrades.map((upgrade) => ({
206068
206110
  dependencyName: getNameFromNamespaceAndName(
@@ -206077,13 +206119,20 @@ var NpmSocketUpgradeManager = class {
206077
206119
  await fixingManager.applySecurityFixesSpecificPackageManager(upgradesTransformed);
206078
206120
  const upgradesToDirectDependencies = upgrades.filter((upgrade) => artifacts[upgrade.idx].direct);
206079
206121
  if (upgradesToDirectDependencies.length === 0) continue;
206080
- for (const isDev of [false, true]) {
206081
- const upgradesOfDirectDependenciesOfType = upgradesToDirectDependencies.filter((upgrade) => artifacts[upgrade.idx].dev === isDev).map(
206082
- ({ idx, upgradeVersion }) => `${artifacts[idx].namespace ? `${artifacts[idx].namespace}/` : ""}${artifacts[idx].name}@${upgradeVersion}`
206083
- );
206084
- if (upgradesOfDirectDependenciesOfType.length === 0) continue;
206085
- await fixingManager.installSpecificPackages(workspacePath, isDev, upgradesOfDirectDependenciesOfType);
206086
- }
206122
+ const packageJsonPath = join8(subprojectDir, workspacePath, "package.json");
206123
+ const packageJsonContent = await readFile15(packageJsonPath, "utf-8");
206124
+ const upgradesWithPackageNames = upgradesToDirectDependencies.map(
206125
+ (upgrade) => {
206126
+ const artifact = artifacts[upgrade.idx];
206127
+ return {
206128
+ packageName: artifact.namespace ? `${artifact.namespace}/${artifact.name}` : artifact.name,
206129
+ upgradeVersion: upgrade.upgradeVersion,
206130
+ isDev: artifact.dev ?? false
206131
+ };
206132
+ }
206133
+ );
206134
+ const modifiedContent = applyUpgradesToPackageJson(packageJsonContent, upgradesWithPackageNames, rangeStyle);
206135
+ await writeFile6(packageJsonPath, modifiedContent, "utf-8");
206087
206136
  }
206088
206137
  await fixingManager.finalizeFixes();
206089
206138
  }
@@ -206116,7 +206165,7 @@ var RushFixingManager = class {
206116
206165
  };
206117
206166
 
206118
206167
  // ../fixing-management/src/fixing-management/nuget/nuget-fixing-manager.ts
206119
- import { readFile as readFile15, writeFile as writeFile6 } from "fs/promises";
206168
+ import { readFile as readFile16, writeFile as writeFile7 } from "fs/promises";
206120
206169
  import { join as join9 } from "path";
206121
206170
 
206122
206171
  // ../utils/src/nuget-utils.ts
@@ -206219,16 +206268,16 @@ var NugetFixingManager = class {
206219
206268
  if (projectFiles.length !== 1)
206220
206269
  throw new Error("Applying fixes to workspaces with more than 1 project file currently not supported");
206221
206270
  const projectFilePath = join9(this.getAbsWsPath(wsPath), projectFiles[0]);
206222
- const initialProjectFile = await readFile15(projectFilePath, "utf-8");
206271
+ const initialProjectFile = await readFile16(projectFilePath, "utf-8");
206223
206272
  const initialLockFile = await this.restoreWorkspaceAndParseLockFile(wsPath);
206224
206273
  await applySeries(fixesWithId, async ({ fixId, vulnerabilityFixes }) => {
206225
206274
  await this.applySecurityFixesForWorkspace(wsPath, projectFilePath, vulnerabilityFixes, dependencyTree);
206226
206275
  signalFixApplied2?.(fixId, this.subprojectPath, wsPath, vulnerabilityFixes);
206227
206276
  });
206228
- const finalProjectFile = await readFile15(projectFilePath, "utf-8");
206229
- const finalLockFile = JSON.parse(await readFile15(this.getLockFilePath(wsPath), "utf-8"));
206230
- await writeFile6(projectFilePath, initialProjectFile);
206231
- await writeFile6(this.getLockFilePath(wsPath), JSON.stringify(initialLockFile, null, 2));
206277
+ const finalProjectFile = await readFile16(projectFilePath, "utf-8");
206278
+ const finalLockFile = JSON.parse(await readFile16(this.getLockFilePath(wsPath), "utf-8"));
206279
+ await writeFile7(projectFilePath, initialProjectFile);
206280
+ await writeFile7(this.getLockFilePath(wsPath), JSON.stringify(initialLockFile, null, 2));
206232
206281
  return { projectFile: finalProjectFile, lockFile: finalLockFile };
206233
206282
  }
206234
206283
  );
@@ -206238,8 +206287,8 @@ var NugetFixingManager = class {
206238
206287
  const projectFiles = fixingInfo.projectFiles[wsPath];
206239
206288
  if (projectFiles.length !== 1)
206240
206289
  throw new Error("Applying fixes to workspaces with more than 1 project file currently not supported");
206241
- await writeFile6(join9(this.getAbsWsPath(wsPath), projectFiles[0]), finalProjectFile);
206242
- await writeFile6(this.getLockFilePath(wsPath), JSON.stringify(finalLockFile, null, 2));
206290
+ await writeFile7(join9(this.getAbsWsPath(wsPath), projectFiles[0]), finalProjectFile);
206291
+ await writeFile7(this.getLockFilePath(wsPath), JSON.stringify(finalLockFile, null, 2));
206243
206292
  });
206244
206293
  if (solutionFiles) {
206245
206294
  for (const solutionFile of solutionFiles) {
@@ -206258,7 +206307,7 @@ var NugetFixingManager = class {
206258
206307
  }
206259
206308
  }
206260
206309
  async applySecurityFixesForWorkspace(wsPath, projectFilePath, vulnFixes, dependencyTree) {
206261
- const initialProjectFile = await readFile15(projectFilePath, "utf-8");
206310
+ const initialProjectFile = await readFile16(projectFilePath, "utf-8");
206262
206311
  const initialLockFile = await this.restoreWorkspaceAndParseLockFile(wsPath);
206263
206312
  const typeCache = new Cache();
206264
206313
  const requestedCache = new Cache();
@@ -206305,7 +206354,7 @@ var NugetFixingManager = class {
206305
206354
  details.requested = requestedRange;
206306
206355
  });
206307
206356
  });
206308
- await writeFile6(projectFilePath, initialProjectFile);
206357
+ await writeFile7(projectFilePath, initialProjectFile);
206309
206358
  await applySeries(vulnFixes, async ({ dependencyIdentifier, dependencyName }) => {
206310
206359
  await applySeries(
206311
206360
  dependencyTree.transitiveDependencies[dependencyIdentifier].frameworks?.filter(
@@ -206331,7 +206380,7 @@ var NugetFixingManager = class {
206331
206380
  }
206332
206381
  );
206333
206382
  });
206334
- await writeFile6(this.getLockFilePath(wsPath), JSON.stringify(lockFileWithFixes, null, 2));
206383
+ await writeFile7(this.getLockFilePath(wsPath), JSON.stringify(lockFileWithFixes, null, 2));
206335
206384
  }
206336
206385
  async addPackage(packageName, version3, framework, wsPath) {
206337
206386
  const dir = join9(this.rootDir, this.subprojectPath, wsPath);
@@ -206348,7 +206397,7 @@ var NugetFixingManager = class {
206348
206397
  async restoreWorkspaceAndParseLockFile(wsPath) {
206349
206398
  const succeeded = await execAndLogOnFailure("dotnet restore --use-lock-file", this.getAbsWsPath(wsPath));
206350
206399
  if (!succeeded) throw new Error(`Error applying fix - could not restore project ${this.subprojectPath}/${wsPath}`);
206351
- return JSON.parse(await readFile15(this.getLockFilePath(wsPath), "utf-8"));
206400
+ return JSON.parse(await readFile16(this.getLockFilePath(wsPath), "utf-8"));
206352
206401
  }
206353
206402
  getLockFilePath(wsPath, lockFileName = "packages.lock.json") {
206354
206403
  return join9(this.getAbsWsPath(wsPath), lockFileName);
@@ -206459,10 +206508,10 @@ async function applySecurityFixes(packageManagerName, rootDir, subprojectPath, o
206459
206508
  otherModulesCommunicator
206460
206509
  ).applySecurityFixes(fixes, fixingInfo, signalFixApplied2);
206461
206510
  }
206462
- async function applySocketUpgrades(ecosystem, rootDir, upgrades, artifacts) {
206511
+ async function applySocketUpgrades(ecosystem, rootDir, upgrades, artifacts, rangeStyle) {
206463
206512
  const C2 = socketUpgradeManagerConstructors[ecosystem];
206464
206513
  if (!C2) return;
206465
- await new C2(rootDir).applySocketArtifactUpgrades(upgrades, artifacts);
206514
+ await new C2(rootDir).applySocketArtifactUpgrades(upgrades, artifacts, rangeStyle);
206466
206515
  }
206467
206516
 
206468
206517
  // dist/cli-apply-fix.js
@@ -207106,7 +207155,7 @@ function utilFormatter2() {
207106
207155
  }
207107
207156
 
207108
207157
  // ../web-compat-utils/dist/logger-singleton.js
207109
- import { readFile as readFile16 } from "fs/promises";
207158
+ import { readFile as readFile17 } from "fs/promises";
207110
207159
  var CLILogger2 = class {
207111
207160
  logger = console;
207112
207161
  writeStream;
@@ -207186,7 +207235,7 @@ var CLILogger2 = class {
207186
207235
  await this.finish();
207187
207236
  let logContent;
207188
207237
  try {
207189
- logContent = await readFile16(logFilePath, "utf-8");
207238
+ logContent = await readFile17(logFilePath, "utf-8");
207190
207239
  } catch (e) {
207191
207240
  console.error("Error reading log file", e);
207192
207241
  }
@@ -207231,13 +207280,13 @@ async function detectVariantMaven(projectDir) {
207231
207280
  // ../docker-management/src/maven/gradle-version-detector.ts
207232
207281
  import { existsSync as existsSync13 } from "fs";
207233
207282
  import { join as join14 } from "path";
207234
- import { readFile as readFile17 } from "fs/promises";
207283
+ import { readFile as readFile18 } from "fs/promises";
207235
207284
  async function detectVariantGradle(projectDir) {
207236
207285
  return sanitizeJvmVariant("GRADLE", projectDir, await detect(projectDir));
207237
207286
  }
207238
207287
  async function detect(projectDir) {
207239
207288
  const gradleWrapperPropertiesPath = join14(projectDir, "gradle", "wrapper", "gradle-wrapper.properties");
207240
- const gradleWrapperProperties = existsSync13(gradleWrapperPropertiesPath) ? (await readFile17(gradleWrapperPropertiesPath, "utf-8")).split("\n").map((line) => line.trim()).filter((line) => !line.startsWith("#")).filter((line) => line) : void 0;
207289
+ const gradleWrapperProperties = existsSync13(gradleWrapperPropertiesPath) ? (await readFile18(gradleWrapperPropertiesPath, "utf-8")).split("\n").map((line) => line.trim()).filter((line) => !line.startsWith("#")).filter((line) => line) : void 0;
207241
207290
  if (!gradleWrapperProperties) return void 0;
207242
207291
  const distributionUrlRegex = /.*gradle-(\d+(\.\d+(\.\d+)?)?)/;
207243
207292
  for (const prop2 of gradleWrapperProperties) {
@@ -207253,13 +207302,13 @@ async function detect(projectDir) {
207253
207302
  // ../docker-management/src/maven/sbt-version-detector.ts
207254
207303
  import { existsSync as existsSync14 } from "fs";
207255
207304
  import { join as join15 } from "path";
207256
- import { readFile as readFile18 } from "fs/promises";
207305
+ import { readFile as readFile19 } from "fs/promises";
207257
207306
  async function detectVariantSbt(projectDir) {
207258
207307
  return sanitizeJvmVariant("SBT", projectDir, await detect2(projectDir));
207259
207308
  }
207260
207309
  async function detect2(projectDir) {
207261
207310
  const sbtBuildPropertiesPath = join15(projectDir, "project", "build.properties");
207262
- const sbtBuildProperties = existsSync14(sbtBuildPropertiesPath) ? (await readFile18(sbtBuildPropertiesPath, "utf-8")).split("\n").map((line) => line.trim()).filter((line) => !line.startsWith("#")).filter((line) => line) : void 0;
207311
+ const sbtBuildProperties = existsSync14(sbtBuildPropertiesPath) ? (await readFile19(sbtBuildPropertiesPath, "utf-8")).split("\n").map((line) => line.trim()).filter((line) => !line.startsWith("#")).filter((line) => line) : void 0;
207263
207312
  if (!sbtBuildProperties) return void 0;
207264
207313
  for (const prop2 of sbtBuildProperties) {
207265
207314
  const [key, value] = prop2.split("=");
@@ -207373,7 +207422,7 @@ async function findReachabilityAnalyzersDockerImage(ecosystem) {
207373
207422
  // ../other-modules-communicator/src/other-modules-communicator.ts
207374
207423
  var import_lodash11 = __toESM(require_lodash(), 1);
207375
207424
  import { rmSync } from "fs";
207376
- import { mkdir, readFile as readFile19, writeFile as writeFile7 } from "fs/promises";
207425
+ import { mkdir, readFile as readFile20, writeFile as writeFile8 } from "fs/promises";
207377
207426
  import { platform } from "os";
207378
207427
  import { join as join19, posix as posix2, relative as relative8, sep as sep3 } from "path";
207379
207428
 
@@ -207824,7 +207873,7 @@ var OtherModulesCommunicator = class {
207824
207873
  COANA_API_KEY: this.apiKey.type === "present" ? this.apiKey.value : ""
207825
207874
  }
207826
207875
  );
207827
- return JSON.parse(await readFile19(outputFilePathThisProcess, "utf-8")).result;
207876
+ return JSON.parse(await readFile20(outputFilePathThisProcess, "utf-8")).result;
207828
207877
  }
207829
207878
  async runReachabilityAnalyzerCommand(commandName, ecosystem, subprojectPath, workspacePath, args2, env) {
207830
207879
  const tmpDir = await this.getTmpDirForSubproject(subprojectPath);
@@ -207885,7 +207934,7 @@ var OtherModulesCommunicator = class {
207885
207934
  [...args2, "-o", outputFilePathOtherProcess],
207886
207935
  env
207887
207936
  );
207888
- return JSON.parse(await readFile19(outputFilePathThisProcess, "utf-8")).result;
207937
+ return JSON.parse(await readFile20(outputFilePathThisProcess, "utf-8")).result;
207889
207938
  }
207890
207939
  async runInDocker(ecosystem, image, entryPoint, commandName, args2, subprojectPath, tmpDir, env = process.env) {
207891
207940
  if (!await pullDockerImage(image)) return false;
@@ -207914,7 +207963,7 @@ var OtherModulesCommunicator = class {
207914
207963
  const providerFileName = "provider.json";
207915
207964
  const providerFileThisProcess = join19(tmpDir, providerFileName);
207916
207965
  const providerFileOtherProcess = this.options.runWithoutDocker ? providerFileThisProcess : posix2.join(TMP_DIR_IN_DOCKER, providerFileName);
207917
- await writeFile7(providerFileThisProcess, JSON.stringify(providedOptions.provider));
207966
+ await writeFile8(providerFileThisProcess, JSON.stringify(providedOptions.provider));
207918
207967
  return ["--provider", providerFileOtherProcess];
207919
207968
  } else {
207920
207969
  return ["--as-provider"];
@@ -207958,7 +208007,7 @@ var OtherModulesCommunicator = class {
207958
208007
  const inputFileName = `${v4_default()}-runReachabilityAnalysis-input.json`;
207959
208008
  const inputFileThisProcess = join19(tmpDir, inputFileName);
207960
208009
  const inputFileOtherProcess = this.options.runWithoutDocker ? inputFileThisProcess : posix2.join(TMP_DIR_IN_DOCKER, inputFileName);
207961
- await writeFile7(
208010
+ await writeFile8(
207962
208011
  inputFileThisProcess,
207963
208012
  JSON.stringify({
207964
208013
  workspaceData,
@@ -208135,7 +208184,7 @@ function t3(...r2) {
208135
208184
  }
208136
208185
 
208137
208186
  // ../utils/src/dashboard-api/coana-api.ts
208138
- import { writeFile as writeFile8 } from "fs/promises";
208187
+ import { writeFile as writeFile9 } from "fs/promises";
208139
208188
  var import_artifact = __toESM(require_artifact_client2(), 1);
208140
208189
  var coanaAPI = process.env.PUBLIC_API_URL ?? "https://app.coana.tech/api/v1";
208141
208190
  var axiosClient2 = getAxiosClient();
@@ -208265,7 +208314,7 @@ async function sendToDashboard(report, writeReportToFile, reportId, apiKey) {
208265
208314
  try {
208266
208315
  if (writeReportToFile) {
208267
208316
  logger.info("Writing report to dashboard-report.json");
208268
- await writeFile8("dashboard-report.json", JSON.stringify(report, null, 2));
208317
+ await writeFile9("dashboard-report.json", JSON.stringify(report, null, 2));
208269
208318
  if (process.env.GITHUB_ACTIONS === "true") {
208270
208319
  logger.info("uploading dashboard-report.json as an artifact");
208271
208320
  (0, import_artifact.create)().uploadArtifact("dashboard-report", ["dashboard-report.json"], process.cwd());
@@ -209343,12 +209392,12 @@ import { join as join22, relative as relative9, resolve as resolve20 } from "pat
209343
209392
 
209344
209393
  // ../project-management/src/project-management/ecosystem-management/ecosystem-specs.ts
209345
209394
  import { existsSync as existsSync18 } from "fs";
209346
- import { readdir as readdir5, readFile as readFile22 } from "fs/promises";
209395
+ import { readdir as readdir5, readFile as readFile23 } from "fs/promises";
209347
209396
  import { join as join21, sep as sep4 } from "path";
209348
209397
 
209349
209398
  // ../utils/src/pip-utils.ts
209350
209399
  import { existsSync as existsSync17 } from "fs";
209351
- import { readFile as readFile21 } from "fs/promises";
209400
+ import { readFile as readFile22 } from "fs/promises";
209352
209401
  import { resolve as resolve19 } from "path";
209353
209402
  import util4 from "util";
209354
209403
 
@@ -209357,7 +209406,7 @@ var import_lodash13 = __toESM(require_lodash(), 1);
209357
209406
  var import_semver4 = __toESM(require_semver2(), 1);
209358
209407
  import { execFileSync as execFileSync2 } from "child_process";
209359
209408
  import { constants as constants2 } from "fs";
209360
- import { access as access4, readFile as readFile20 } from "fs/promises";
209409
+ import { access as access4, readFile as readFile21 } from "fs/promises";
209361
209410
  import { join as join20, resolve as resolve18 } from "path";
209362
209411
  import util3 from "util";
209363
209412
  var { once: once7 } = import_lodash13.default;
@@ -209366,7 +209415,7 @@ var hasPyenv = once7(async () => !(await execNeverFail("which pyenv")).error);
209366
209415
 
209367
209416
  // ../utils/src/pip-utils.ts
209368
209417
  async function isSetupPySetuptools(file) {
209369
- const content = await readFile21(file, "utf-8");
209418
+ const content = await readFile22(file, "utf-8");
209370
209419
  return content.includes("setup(") && (/^\s*from\s+(?:setuptools|distutils\.core)\s+import\s+.*setup/m.test(content) || /^\s*import\s+(?:setuptools|distutils\.core)/m.test(content));
209371
209420
  }
209372
209421
 
@@ -209448,7 +209497,7 @@ function packageManagerIfPackageJSONExistsAndValid(packageManager) {
209448
209497
  if (!existsSync18(join21(projectDir, "package.json"))) return void 0;
209449
209498
  const packageJSONPath = join21(projectDir, "package.json");
209450
209499
  try {
209451
- JSON.parse(await readFile22(packageJSONPath, "utf-8"));
209500
+ JSON.parse(await readFile23(packageJSONPath, "utf-8"));
209452
209501
  return packageManager;
209453
209502
  } catch (e) {
209454
209503
  throw new InvalidProjectFileError(projectDir, "package.json");
@@ -209709,7 +209758,7 @@ ${detailsString}` : ""}`;
209709
209758
 
209710
209759
  // dist/cli-core.js
209711
209760
  import { writeFileSync as writeFileSync3 } from "fs";
209712
- import { mkdir as mkdir2, writeFile as writeFile9 } from "fs/promises";
209761
+ import { mkdir as mkdir2, writeFile as writeFile10 } from "fs/promises";
209713
209762
 
209714
209763
  // ../../node_modules/.pnpm/kleur@4.1.5/node_modules/kleur/index.mjs
209715
209764
  var FORCE_COLOR;
@@ -210134,7 +210183,7 @@ var DEFAULT_REPORT_FILENAME_BASE = "coana-report";
210134
210183
 
210135
210184
  // dist/internal/exclude-dirs-from-configuration-files.js
210136
210185
  import { existsSync as existsSync19 } from "fs";
210137
- import { readFile as readFile23 } from "fs/promises";
210186
+ import { readFile as readFile24 } from "fs/promises";
210138
210187
  import { basename as basename6, resolve as resolve22 } from "path";
210139
210188
  var import_yaml2 = __toESM(require_dist11(), 1);
210140
210189
  async function inferExcludeDirsFromConfigurationFiles(rootWorkingDir) {
@@ -210148,7 +210197,7 @@ async function inferExcludeDirsFromConfigurationFiles(rootWorkingDir) {
210148
210197
  }
210149
210198
  async function inferExcludeDirsFromSocketConfig(socketConfigFile) {
210150
210199
  try {
210151
- const config3 = (0, import_yaml2.parse)(await readFile23(socketConfigFile, "utf8"));
210200
+ const config3 = (0, import_yaml2.parse)(await readFile24(socketConfigFile, "utf8"));
210152
210201
  const version3 = config3.version;
210153
210202
  const ignorePaths = config3[version3 === 1 ? "ignore" : "projectIgnorePaths"];
210154
210203
  if (!ignorePaths)
@@ -210723,7 +210772,7 @@ function toSocketFactsSocketDependencyTree(artifacts, vulnerabilities, tier1Reac
210723
210772
  }
210724
210773
 
210725
210774
  // dist/internal/vulnerability-scanning.js
210726
- import { readFile as readFile24 } from "fs/promises";
210775
+ import { readFile as readFile25 } from "fs/promises";
210727
210776
 
210728
210777
  // ../security-auditor/security-auditor-builder/src/mongo-connection.ts
210729
210778
  var import_mongodb = __toESM(require_lib30(), 1);
@@ -225592,7 +225641,7 @@ async function scanForVulnerabilities(dependencyTree, offlineVulnerabilityScanne
225592
225641
  }
225593
225642
  async function offlineScan(dependencyTree, offlineVulnerabilityScannerDBPath) {
225594
225643
  logger.info("using offline vulnerability scanner db");
225595
- const offlineVulnerabilityScannerDB = JSON.parse(await readFile24(offlineVulnerabilityScannerDBPath, "utf-8"));
225644
+ const offlineVulnerabilityScannerDB = JSON.parse(await readFile25(offlineVulnerabilityScannerDBPath, "utf-8"));
225596
225645
  const { ecosystemToUrlToVulnerabilityDetails, vulnerabilityDatabase } = offlineVulnerabilityScannerDB;
225597
225646
  const coanaSupportedVulnerabilitiesLoader = CoanaSupportedVulnerabilitiesLoader.create(ecosystemToUrlToVulnerabilityDetails);
225598
225647
  const vulnerabilityAccessPathLoader = CoanaSupportedVulnerabilitiesLoader.create(ecosystemToUrlToVulnerabilityDetails);
@@ -225610,7 +225659,7 @@ async function onlineScan(dependencyTree, apiKey, timeout) {
225610
225659
  }
225611
225660
 
225612
225661
  // dist/version.js
225613
- var version2 = "14.12.10";
225662
+ var version2 = "14.12.11";
225614
225663
 
225615
225664
  // dist/cli-core.js
225616
225665
  var { mapValues, omit, partition, pick } = import_lodash15.default;
@@ -225796,7 +225845,7 @@ var CliCore = class {
225796
225845
  }
225797
225846
  const socketReport = toSocketFactsSocketDependencyTree(artifacts, vulnsWithResults, this.reportId);
225798
225847
  const outputFile = resolve23(this.options.socketMode);
225799
- await writeFile9(outputFile, JSON.stringify(socketReport, null, 2));
225848
+ await writeFile10(outputFile, JSON.stringify(socketReport, null, 2));
225800
225849
  logger.info(kleur_default.green(`Socket report written to: ${outputFile}`));
225801
225850
  }
225802
225851
  async shareErrorLogWithBackend(e, shouldLogSharing) {
@@ -225814,7 +225863,7 @@ var CliCore = class {
225814
225863
  }
225815
225864
  const socketReport = toSocketFacts(report, this.reportDependencyTrees, subPjToWsPathToDirectDependencies);
225816
225865
  const outputFile = resolve23(this.options.socketMode);
225817
- await writeFile9(outputFile, JSON.stringify(socketReport, null, 2));
225866
+ await writeFile10(outputFile, JSON.stringify(socketReport, null, 2));
225818
225867
  logger.info(kleur_default.green(`Socket report written to: ${outputFile}`));
225819
225868
  return;
225820
225869
  }
@@ -226297,6 +226346,9 @@ import { join as join26, relative as relative12 } from "node:path";
226297
226346
  var import_packageurl_js2 = __toESM(require_packageurl_js(), 1);
226298
226347
  var ECOSYSTEMS_WITH_SOCKET_UPGRADES = ["NPM", "MAVEN"];
226299
226348
  async function upgradePurl(path2, upgrades, options, logFile, cliFixRunId) {
226349
+ if (options.rangeStyle && options.rangeStyle !== "pin") {
226350
+ throw new Error('Range style must be "pin"');
226351
+ }
226300
226352
  logger.initWinstonLogger(options.debug);
226301
226353
  logger.silent = options.silent;
226302
226354
  let cliRunId = cliFixRunId;
@@ -226342,7 +226394,11 @@ ${upgrades.map((upgrade) => ` ${upgrade.purl} -> ${upgrade.upgradeVersion}`).joi
226342
226394
  });
226343
226395
  });
226344
226396
  for (const [ecosystem, upgrades2] of Object.entries(ecosystemToSocketArtifactUpgrades)) {
226345
- await applySocketUpgrades(ecosystem, path2, upgrades2, artifacts);
226397
+ if (options.rangeStyle && ecosystem !== "NPM") {
226398
+ logger.warn(`Range style is only supported for npm, skipping upgrades for ${ecosystem}`);
226399
+ continue;
226400
+ }
226401
+ await applySocketUpgrades(ecosystem, path2, upgrades2, artifacts, options.rangeStyle);
226346
226402
  }
226347
226403
  if (upgradePurlRunId) {
226348
226404
  await getSocketAPI().finalizeUpgradePurlRun(upgradePurlRunId, "success");
@@ -226475,7 +226531,8 @@ async function computeFixesAndUpgradePurls(path2, options, logFile) {
226475
226531
  runWithoutDocker: options.runWithoutDocker,
226476
226532
  manifestsTarHash: options.manifestsTarHash,
226477
226533
  concurrency: "1",
226478
- globPattern: options.globPattern
226534
+ globPattern: options.globPattern,
226535
+ rangeStyle: options.rangeStyle
226479
226536
  }, autofixRunId) ?? "fixed-all";
226480
226537
  if (autofixRunId) {
226481
226538
  await getSocketAPI().finalizeAutofixRun(autofixRunId, ghsasFailedToFix.length === 0 && applyFixesStatus === "fixed-all" ? "fixed-all" : ghsasFailedToFix.length === Object.keys(ghsaToVulnerableArtifactIdsToApply).length || applyFixesStatus === "fixed-none" ? "fixed-none" : "fixed-some");
@@ -226596,7 +226653,7 @@ applyFixes.name("apply-fixes").argument("<path>", "File system path to the folde
226596
226653
  await applyFix(path2, fixIds, options);
226597
226654
  }).configureHelp({ sortOptions: true });
226598
226655
  var upgradePurls = new Command();
226599
- upgradePurls.name("upgrade-purls").argument("<path>", "File system path to the folder containing the project").argument("<specs...>", "Package upgrade specifications in the format 'purl -> newVersion' (e.g., 'pkg:maven/io.micrometer/micrometer-core@1.10.9 -> 1.15.0')").option("-g, --glob <pattern>", "Glob pattern to filter workspaces by absolute file path").option("-c, --concurrency <concurrency>", "Set the maximum number of concurrent reachability analysis runs. It's recommended to choose a concurrency level that ensures that each analysis run has at least the --memory-limit amount of memory available.", "1").option("-d, --debug", "Enable debug logging", false).option("-s, --silent", "Silence all debug/warning output", false).addOption(new Option("--run-without-docker", "Run package managers without using docker").default(process.env.RUN_WITHOUT_DOCKER === "true").hideHelp()).addOption(new Option("--socket-mode", "Use Socket for computing dependency trees").default(process.env.SOCKET_MODE === "true").hideHelp()).version(version2).action(async (path2, specs2, options) => {
226656
+ upgradePurls.name("upgrade-purls").argument("<path>", "File system path to the folder containing the project").argument("<specs...>", "Package upgrade specifications in the format 'purl -> newVersion' (e.g., 'pkg:maven/io.micrometer/micrometer-core@1.10.9 -> 1.15.0')").option("-g, --glob <pattern>", "Glob pattern to filter workspaces by absolute file path").option("-c, --concurrency <concurrency>", "Set the maximum number of concurrent reachability analysis runs. It's recommended to choose a concurrency level that ensures that each analysis run has at least the --memory-limit amount of memory available.", "1").option("-d, --debug", "Enable debug logging", false).option("-s, --silent", "Silence all debug/warning output", false).option("--range-style <style>", 'Range style to use for the output. Currently only "pin" is supported and it only works for npm.').addOption(new Option("--run-without-docker", "Run package managers without using docker").default(process.env.RUN_WITHOUT_DOCKER === "true").hideHelp()).addOption(new Option("--socket-mode", "Use Socket for computing dependency trees").default(process.env.SOCKET_MODE === "true").hideHelp()).version(version2).action(async (path2, specs2, options) => {
226600
226657
  process.env.DOCKER_IMAGE_TAG ??= version2;
226601
226658
  await withTmpDirectory("upgrade-purls", async (tmpDir) => {
226602
226659
  const logFile = join27(tmpDir, "upgrade-purls.log");
@@ -226614,8 +226671,11 @@ upgradePurls.name("upgrade-purls").argument("<path>", "File system path to the f
226614
226671
  });
226615
226672
  }).configureHelp({ sortOptions: true });
226616
226673
  var computeFixesAndUpgradePurlsCmd = new Command();
226617
- computeFixesAndUpgradePurlsCmd.name("compute-fixes-and-upgrade-purls").argument("<path>", "File system path to the folder containing the project").option("-a, --apply-fixes-to <ghsas...>", 'GHSA IDs to compute fixes for. Use "all" to compute fixes for all vulnerabilities.', []).option("--dry-run", "Show what changes would be made without actually making them", false).option("-g, --glob <pattern>", "Glob pattern to filter workspaces by absolute file path").option("-d, --debug", "Enable debug logging", false).option("-s, --silent", "Silence all debug/warning output", false).addOption(new Option("--run-without-docker", "Run package managers without using docker").default(process.env.RUN_WITHOUT_DOCKER === "true").hideHelp()).addOption(new Option("--manifests-tar-hash <hash>", "Hash of the tarball containing all manifest files already uploaded to Socket. If provided, Socket will be used for computing dependency trees.").hideHelp()).version(version2).action(async (path2, options) => {
226674
+ computeFixesAndUpgradePurlsCmd.name("compute-fixes-and-upgrade-purls").argument("<path>", "File system path to the folder containing the project").option("-a, --apply-fixes-to <ghsas...>", 'GHSA IDs to compute fixes for. Use "all" to compute fixes for all vulnerabilities.', []).option("--dry-run", "Show what changes would be made without actually making them", false).option("-g, --glob <pattern>", "Glob pattern to filter workspaces by absolute file path").option("-d, --debug", "Enable debug logging", false).option("-s, --silent", "Silence all debug/warning output", false).option("--range-style <style>", 'Range style to use for the output. Currently only "pin" is supported and it only works for npm.').addOption(new Option("--run-without-docker", "Run package managers without using docker").default(process.env.RUN_WITHOUT_DOCKER === "true").hideHelp()).addOption(new Option("--manifests-tar-hash <hash>", "Hash of the tarball containing all manifest files already uploaded to Socket. If provided, Socket will be used for computing dependency trees.").hideHelp()).version(version2).action(async (path2, options) => {
226618
226675
  process.env.DOCKER_IMAGE_TAG ??= version2;
226676
+ if (options.rangeStyle && options.rangeStyle !== "pin") {
226677
+ throw new Error('Range style must be "pin"');
226678
+ }
226619
226679
  await withTmpDirectory("compute-fixes-and-upgrade-purls", async (tmpDir) => {
226620
226680
  const logFile = join27(tmpDir, "compute-fixes-and-upgrade-purls.log");
226621
226681
  logger.initWinstonLogger(options.debug, logFile);
@@ -226625,7 +226685,7 @@ computeFixesAndUpgradePurlsCmd.name("compute-fixes-and-upgrade-purls").argument(
226625
226685
  var compareReportsCommand = new Command();
226626
226686
  compareReportsCommand.name("compare-reports").argument("<baselineReportPath>", "Path to the baseline report").argument("<newReportPath>", "Path to the new report").option("--api-key <key>", "Set the Coana dashboard API key.").option("-d, --debug", "Enable debug logging", false).option("--no-pr-comment", "Disable pull request comments (only relevant when run from a PR)", true).option("--no-block", "Do not fail with a non-zero exit code when new reachable vulnerabilities are detected", true).option("--ignore-undeterminable-reachability", "Ignore vulnerabilities with undeterminable reachability", false).action(async (baselineReportPath, newReportPath, options) => {
226627
226687
  async function readReport(reportPath) {
226628
- return JSON.parse(await readFile25(reportPath, "utf-8"));
226688
+ return JSON.parse(await readFile26(reportPath, "utf-8"));
226629
226689
  }
226630
226690
  const baselineReport = await readReport(baselineReportPath);
226631
226691
  const newReport = await readReport(newReportPath);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@coana-tech/cli",
3
- "version": "14.12.10",
3
+ "version": "14.12.11",
4
4
  "description": "Coana CLI",
5
5
  "type": "module",
6
6
  "bin": {