@coana-tech/cli 14.12.145 → 14.12.147

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/cli.mjs CHANGED
@@ -17420,7 +17420,7 @@ var require_safe_stable_stringify = __commonJS({
17420
17420
  return circularValue;
17421
17421
  }
17422
17422
  let res = "";
17423
- let join34 = ",";
17423
+ let join36 = ",";
17424
17424
  const originalIndentation = indentation;
17425
17425
  if (Array.isArray(value2)) {
17426
17426
  if (value2.length === 0) {
@@ -17434,7 +17434,7 @@ var require_safe_stable_stringify = __commonJS({
17434
17434
  indentation += spacer;
17435
17435
  res += `
17436
17436
  ${indentation}`;
17437
- join34 = `,
17437
+ join36 = `,
17438
17438
  ${indentation}`;
17439
17439
  }
17440
17440
  const maximumValuesToStringify = Math.min(value2.length, maximumBreadth);
@@ -17442,13 +17442,13 @@ ${indentation}`;
17442
17442
  for (; i7 < maximumValuesToStringify - 1; i7++) {
17443
17443
  const tmp2 = stringifyFnReplacer(String(i7), value2, stack2, replacer, spacer, indentation);
17444
17444
  res += tmp2 !== void 0 ? tmp2 : "null";
17445
- res += join34;
17445
+ res += join36;
17446
17446
  }
17447
17447
  const tmp = stringifyFnReplacer(String(i7), value2, stack2, replacer, spacer, indentation);
17448
17448
  res += tmp !== void 0 ? tmp : "null";
17449
17449
  if (value2.length - 1 > maximumBreadth) {
17450
17450
  const removedKeys = value2.length - maximumBreadth - 1;
17451
- res += `${join34}"... ${getItemCount(removedKeys)} not stringified"`;
17451
+ res += `${join36}"... ${getItemCount(removedKeys)} not stringified"`;
17452
17452
  }
17453
17453
  if (spacer !== "") {
17454
17454
  res += `
@@ -17469,7 +17469,7 @@ ${originalIndentation}`;
17469
17469
  let separator = "";
17470
17470
  if (spacer !== "") {
17471
17471
  indentation += spacer;
17472
- join34 = `,
17472
+ join36 = `,
17473
17473
  ${indentation}`;
17474
17474
  whitespace2 = " ";
17475
17475
  }
@@ -17483,13 +17483,13 @@ ${indentation}`;
17483
17483
  const tmp = stringifyFnReplacer(key2, value2, stack2, replacer, spacer, indentation);
17484
17484
  if (tmp !== void 0) {
17485
17485
  res += `${separator}${strEscape(key2)}:${whitespace2}${tmp}`;
17486
- separator = join34;
17486
+ separator = join36;
17487
17487
  }
17488
17488
  }
17489
17489
  if (keyLength > maximumBreadth) {
17490
17490
  const removedKeys = keyLength - maximumBreadth;
17491
17491
  res += `${separator}"...":${whitespace2}"${getItemCount(removedKeys)} not stringified"`;
17492
- separator = join34;
17492
+ separator = join36;
17493
17493
  }
17494
17494
  if (spacer !== "" && separator.length > 1) {
17495
17495
  res = `
@@ -17530,7 +17530,7 @@ ${originalIndentation}`;
17530
17530
  }
17531
17531
  const originalIndentation = indentation;
17532
17532
  let res = "";
17533
- let join34 = ",";
17533
+ let join36 = ",";
17534
17534
  if (Array.isArray(value2)) {
17535
17535
  if (value2.length === 0) {
17536
17536
  return "[]";
@@ -17543,7 +17543,7 @@ ${originalIndentation}`;
17543
17543
  indentation += spacer;
17544
17544
  res += `
17545
17545
  ${indentation}`;
17546
- join34 = `,
17546
+ join36 = `,
17547
17547
  ${indentation}`;
17548
17548
  }
17549
17549
  const maximumValuesToStringify = Math.min(value2.length, maximumBreadth);
@@ -17551,13 +17551,13 @@ ${indentation}`;
17551
17551
  for (; i7 < maximumValuesToStringify - 1; i7++) {
17552
17552
  const tmp2 = stringifyArrayReplacer(String(i7), value2[i7], stack2, replacer, spacer, indentation);
17553
17553
  res += tmp2 !== void 0 ? tmp2 : "null";
17554
- res += join34;
17554
+ res += join36;
17555
17555
  }
17556
17556
  const tmp = stringifyArrayReplacer(String(i7), value2[i7], stack2, replacer, spacer, indentation);
17557
17557
  res += tmp !== void 0 ? tmp : "null";
17558
17558
  if (value2.length - 1 > maximumBreadth) {
17559
17559
  const removedKeys = value2.length - maximumBreadth - 1;
17560
- res += `${join34}"... ${getItemCount(removedKeys)} not stringified"`;
17560
+ res += `${join36}"... ${getItemCount(removedKeys)} not stringified"`;
17561
17561
  }
17562
17562
  if (spacer !== "") {
17563
17563
  res += `
@@ -17570,7 +17570,7 @@ ${originalIndentation}`;
17570
17570
  let whitespace2 = "";
17571
17571
  if (spacer !== "") {
17572
17572
  indentation += spacer;
17573
- join34 = `,
17573
+ join36 = `,
17574
17574
  ${indentation}`;
17575
17575
  whitespace2 = " ";
17576
17576
  }
@@ -17579,7 +17579,7 @@ ${indentation}`;
17579
17579
  const tmp = stringifyArrayReplacer(key2, value2[key2], stack2, replacer, spacer, indentation);
17580
17580
  if (tmp !== void 0) {
17581
17581
  res += `${separator}${strEscape(key2)}:${whitespace2}${tmp}`;
17582
- separator = join34;
17582
+ separator = join36;
17583
17583
  }
17584
17584
  }
17585
17585
  if (spacer !== "" && separator.length > 1) {
@@ -17637,20 +17637,20 @@ ${originalIndentation}`;
17637
17637
  indentation += spacer;
17638
17638
  let res2 = `
17639
17639
  ${indentation}`;
17640
- const join35 = `,
17640
+ const join37 = `,
17641
17641
  ${indentation}`;
17642
17642
  const maximumValuesToStringify = Math.min(value2.length, maximumBreadth);
17643
17643
  let i7 = 0;
17644
17644
  for (; i7 < maximumValuesToStringify - 1; i7++) {
17645
17645
  const tmp2 = stringifyIndent(String(i7), value2[i7], stack2, spacer, indentation);
17646
17646
  res2 += tmp2 !== void 0 ? tmp2 : "null";
17647
- res2 += join35;
17647
+ res2 += join37;
17648
17648
  }
17649
17649
  const tmp = stringifyIndent(String(i7), value2[i7], stack2, spacer, indentation);
17650
17650
  res2 += tmp !== void 0 ? tmp : "null";
17651
17651
  if (value2.length - 1 > maximumBreadth) {
17652
17652
  const removedKeys = value2.length - maximumBreadth - 1;
17653
- res2 += `${join35}"... ${getItemCount(removedKeys)} not stringified"`;
17653
+ res2 += `${join37}"... ${getItemCount(removedKeys)} not stringified"`;
17654
17654
  }
17655
17655
  res2 += `
17656
17656
  ${originalIndentation}`;
@@ -17666,16 +17666,16 @@ ${originalIndentation}`;
17666
17666
  return '"[Object]"';
17667
17667
  }
17668
17668
  indentation += spacer;
17669
- const join34 = `,
17669
+ const join36 = `,
17670
17670
  ${indentation}`;
17671
17671
  let res = "";
17672
17672
  let separator = "";
17673
17673
  let maximumPropertiesToStringify = Math.min(keyLength, maximumBreadth);
17674
17674
  if (isTypedArrayWithEntries(value2)) {
17675
- res += stringifyTypedArray(value2, join34, maximumBreadth);
17675
+ res += stringifyTypedArray(value2, join36, maximumBreadth);
17676
17676
  keys = keys.slice(value2.length);
17677
17677
  maximumPropertiesToStringify -= value2.length;
17678
- separator = join34;
17678
+ separator = join36;
17679
17679
  }
17680
17680
  if (deterministic) {
17681
17681
  keys = insertSort(keys);
@@ -17686,13 +17686,13 @@ ${indentation}`;
17686
17686
  const tmp = stringifyIndent(key2, value2[key2], stack2, spacer, indentation);
17687
17687
  if (tmp !== void 0) {
17688
17688
  res += `${separator}${strEscape(key2)}: ${tmp}`;
17689
- separator = join34;
17689
+ separator = join36;
17690
17690
  }
17691
17691
  }
17692
17692
  if (keyLength > maximumBreadth) {
17693
17693
  const removedKeys = keyLength - maximumBreadth;
17694
17694
  res += `${separator}"...": "${getItemCount(removedKeys)} not stringified"`;
17695
- separator = join34;
17695
+ separator = join36;
17696
17696
  }
17697
17697
  if (separator !== "") {
17698
17698
  res = `
@@ -19259,7 +19259,7 @@ var require_buffer_list = __commonJS({
19259
19259
  }
19260
19260
  }, {
19261
19261
  key: "join",
19262
- value: function join34(s6) {
19262
+ value: function join36(s6) {
19263
19263
  if (this.length === 0) return "";
19264
19264
  var p3 = this.head;
19265
19265
  var ret = "" + p3.data;
@@ -30493,7 +30493,7 @@ var require_lodash = __commonJS({
30493
30493
  }
30494
30494
  return mapped.length && mapped[0] === arrays[0] ? baseIntersection(mapped, undefined2, comparator) : [];
30495
30495
  });
30496
- function join34(array, separator) {
30496
+ function join36(array, separator) {
30497
30497
  return array == null ? "" : nativeJoin.call(array, separator);
30498
30498
  }
30499
30499
  function last2(array) {
@@ -32412,7 +32412,7 @@ var require_lodash = __commonJS({
32412
32412
  lodash16.isUndefined = isUndefined2;
32413
32413
  lodash16.isWeakMap = isWeakMap;
32414
32414
  lodash16.isWeakSet = isWeakSet;
32415
- lodash16.join = join34;
32415
+ lodash16.join = join36;
32416
32416
  lodash16.kebabCase = kebabCase;
32417
32417
  lodash16.last = last2;
32418
32418
  lodash16.lastIndexOf = lastIndexOf;
@@ -43837,7 +43837,7 @@ var require_lib7 = __commonJS({
43837
43837
  }
43838
43838
  return buf;
43839
43839
  };
43840
- var randomBytes2 = checks.hasCrypto() ? (buf, start = 0, end2 = buf.length) => (window.crypto.getRandomValues(buf.subarray(start, end2)), buf) : (buf, start, end2) => randomBytesFrom(SYSTEM, buf, start, end2);
43840
+ var randomBytes4 = checks.hasCrypto() ? (buf, start = 0, end2 = buf.length) => (window.crypto.getRandomValues(buf.subarray(start, end2)), buf) : (buf, start, end2) => randomBytesFrom(SYSTEM, buf, start, end2);
43841
43841
  var Crypto = class _Crypto extends ARandom {
43842
43842
  constructor(size = 64) {
43843
43843
  super();
@@ -43853,7 +43853,7 @@ var require_lib7 = __commonJS({
43853
43853
  }
43854
43854
  int() {
43855
43855
  if (this.i >= this.u32.length) {
43856
- randomBytes2(this.buffer);
43856
+ randomBytes4(this.buffer);
43857
43857
  this.i = 0;
43858
43858
  }
43859
43859
  return this.u32[this.i++];
@@ -44054,7 +44054,7 @@ var require_lib7 = __commonJS({
44054
44054
  };
44055
44055
  var uuidv4Bytes = (buf, rnd) => {
44056
44056
  buf = buf || new Uint8Array(16);
44057
- buf = rnd ? randomBytesFrom(rnd, buf) : randomBytes2(buf);
44057
+ buf = rnd ? randomBytesFrom(rnd, buf) : randomBytes4(buf);
44058
44058
  buf[6] = 64 | buf[6] & 15;
44059
44059
  buf[8] = 128 | buf[8] & 63;
44060
44060
  return buf;
@@ -44122,7 +44122,7 @@ var require_lib7 = __commonJS({
44122
44122
  exports2.gaussian = gaussian;
44123
44123
  exports2.geometric = geometric;
44124
44124
  exports2.normal = normal;
44125
- exports2.randomBytes = randomBytes2;
44125
+ exports2.randomBytes = randomBytes4;
44126
44126
  exports2.randomBytesFrom = randomBytesFrom;
44127
44127
  exports2.randomID = randomID;
44128
44128
  exports2.uniform = uniform;
@@ -46046,7 +46046,7 @@ var require_builder = __commonJS({
46046
46046
  }
46047
46047
  };
46048
46048
  exports2.SeqBuilder = SeqBuilder;
46049
- function join34(first2, second, ...others) {
46049
+ function join36(first2, second, ...others) {
46050
46050
  const seq = new SeqBuilder(first2, second);
46051
46051
  if (!others.length) {
46052
46052
  return seq;
@@ -46055,7 +46055,7 @@ var require_builder = __commonJS({
46055
46055
  return res.join(query);
46056
46056
  }, seq);
46057
46057
  }
46058
- exports2.join = join34;
46058
+ exports2.join = join36;
46059
46059
  var SymBuilder = class extends AbstractBuilder {
46060
46060
  constructor(opts) {
46061
46061
  super();
@@ -54889,7 +54889,7 @@ var require_cjs2 = __commonJS({
54889
54889
  var require_lib12 = __commonJS({
54890
54890
  "../../node_modules/.pnpm/write-file-atomic@5.0.1/node_modules/write-file-atomic/lib/index.js"(exports2, module2) {
54891
54891
  "use strict";
54892
- module2.exports = writeFile16;
54892
+ module2.exports = writeFile18;
54893
54893
  module2.exports.sync = writeFileSync4;
54894
54894
  module2.exports._getTmpname = getTmpname;
54895
54895
  module2.exports._cleanupOnExit = cleanupOnExit;
@@ -55014,7 +55014,7 @@ var require_lib12 = __commonJS({
55014
55014
  }
55015
55015
  }
55016
55016
  }
55017
- async function writeFile16(filename, data2, options, callback) {
55017
+ async function writeFile18(filename, data2, options, callback) {
55018
55018
  if (options instanceof Function) {
55019
55019
  callback = options;
55020
55020
  options = {};
@@ -70071,7 +70071,7 @@ var require_lockfile = __commonJS({
70071
70071
  if (eol !== "\n") {
70072
70072
  data2 = data2.replace(/\n/g, eol);
70073
70073
  }
70074
- yield writeFile16(path9, data2);
70074
+ yield writeFile18(path9, data2);
70075
70075
  });
70076
70076
  return function writeFilePreservingEol2(_x30, _x31) {
70077
70077
  return _ref31.apply(this, arguments);
@@ -70083,7 +70083,7 @@ var require_lockfile = __commonJS({
70083
70083
  const file = (_path2 || _load_path()).default.join(dir, filename);
70084
70084
  const fileLink = (_path2 || _load_path()).default.join(dir, filename + "-link");
70085
70085
  try {
70086
- yield writeFile16(file, "test");
70086
+ yield writeFile18(file, "test");
70087
70087
  yield link(file, fileLink);
70088
70088
  } catch (err) {
70089
70089
  return false;
@@ -70233,7 +70233,7 @@ var require_lockfile = __commonJS({
70233
70233
  const lockQueue = exports3.lockQueue = new (_blockingQueue || _load_blockingQueue()).default("fs lock");
70234
70234
  const readFileBuffer = exports3.readFileBuffer = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.readFile);
70235
70235
  const open = exports3.open = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.open);
70236
- const writeFile16 = exports3.writeFile = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.writeFile);
70236
+ const writeFile18 = exports3.writeFile = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.writeFile);
70237
70237
  const readlink2 = exports3.readlink = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.readlink);
70238
70238
  const realpath2 = exports3.realpath = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.realpath);
70239
70239
  const readdir7 = exports3.readdir = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.readdir);
@@ -74304,11 +74304,11 @@ ${indent3}`);
74304
74304
  const open = (0, (_promise || _load_promise()).promisify)((_fs || _load_fs()).default.open);
74305
74305
  const futimes = (0, (_promise || _load_promise()).promisify)((_fs || _load_fs()).default.futimes);
74306
74306
  const write = (0, (_promise || _load_promise()).promisify)((_fs || _load_fs()).default.write);
74307
- const unlink2 = exports3.unlink = (0, (_promise || _load_promise()).promisify)(__webpack_require__(233));
74307
+ const unlink4 = exports3.unlink = (0, (_promise || _load_promise()).promisify)(__webpack_require__(233));
74308
74308
  const copyFile2 = exports3.copyFile = (() => {
74309
74309
  var _ref = (0, (_asyncToGenerator2 || _load_asyncToGenerator()).default)(function* (data2, cleanup) {
74310
74310
  try {
74311
- yield unlink2(data2.dest);
74311
+ yield unlink4(data2.dest);
74312
74312
  yield copyFilePoly(data2.src, data2.dest, 0, data2);
74313
74313
  } finally {
74314
74314
  if (cleanup) {
@@ -122839,7 +122839,7 @@ var require_summary = __commonJS({
122839
122839
  exports2.summary = exports2.markdownSummary = exports2.SUMMARY_DOCS_URL = exports2.SUMMARY_ENV_VAR = void 0;
122840
122840
  var os_1 = __require("os");
122841
122841
  var fs_1 = __require("fs");
122842
- var { access: access5, appendFile, writeFile: writeFile16 } = fs_1.promises;
122842
+ var { access: access5, appendFile, writeFile: writeFile18 } = fs_1.promises;
122843
122843
  exports2.SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
122844
122844
  exports2.SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
122845
122845
  var Summary = class {
@@ -122897,7 +122897,7 @@ var require_summary = __commonJS({
122897
122897
  return __awaiter(this, void 0, void 0, function* () {
122898
122898
  const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);
122899
122899
  const filePath = yield this.filePath();
122900
- const writeFunc = overwrite ? writeFile16 : appendFile;
122900
+ const writeFunc = overwrite ? writeFile18 : appendFile;
122901
122901
  yield writeFunc(filePath, this._buffer, { encoding: "utf8" });
122902
122902
  return this.emptyBuffer();
122903
122903
  });
@@ -204522,9 +204522,9 @@ var {
204522
204522
  } = import_index.default;
204523
204523
 
204524
204524
  // dist/index.js
204525
- import { mkdir as mkdir7, mkdtemp as mkdtemp2, readFile as readFile35, rm as rm3, writeFile as writeFile15 } from "fs/promises";
204526
- import { tmpdir as tmpdir3 } from "os";
204527
- import { dirname as dirname26, join as join33, resolve as resolve44 } from "path";
204525
+ import { mkdir as mkdir7, mkdtemp as mkdtemp2, readFile as readFile35, rm as rm3, writeFile as writeFile17 } from "fs/promises";
204526
+ import { tmpdir as tmpdir5 } from "os";
204527
+ import { dirname as dirname26, join as join35, resolve as resolve44 } from "path";
204528
204528
 
204529
204529
  // ../../node_modules/.pnpm/remeda@2.14.0/node_modules/remeda/dist/chunk-ANXBDSUI.js
204530
204530
  var s = { done: false, hasNext: false };
@@ -209446,6 +209446,18 @@ async function sendTelemetrySocket(analysisMetadataId, telemetry) {
209446
209446
  console.warn("Failed to send telemetry to Socket:", error.message);
209447
209447
  }
209448
209448
  }
209449
+ async function sendAnalyzerTelemetrySocket(analysisMetadataId, telemetry) {
209450
+ try {
209451
+ const url2 = getSocketApiUrl("tier1-reachability-scan/add-analyzer-telemetry");
209452
+ const data2 = {
209453
+ analysis_metadata_id: analysisMetadataId,
209454
+ telemetry
209455
+ };
209456
+ await axios2.post(url2, data2, { headers: getAuthHeaders() });
209457
+ } catch (error) {
209458
+ console.warn("Failed to send analyzer telemetry to Socket:", error.message);
209459
+ }
209460
+ }
209449
209461
  async function registerDiagnosticsToAnalysisMetadataSocket(analysisMetadataId, diagnosticsData) {
209450
209462
  const abnormalExit = diagnosticsData.analysisDiagnostics.aborted ? "ABORTED" : diagnosticsData.analysisDiagnostics.timeout ? "TIMEOUT" : "NONE";
209451
209463
  try {
@@ -209477,16 +209489,17 @@ function getSocketAPI() {
209477
209489
  sendLogChunkSocket,
209478
209490
  createAnalysisMetadataSocket,
209479
209491
  sendTelemetrySocket,
209492
+ sendAnalyzerTelemetrySocket,
209480
209493
  registerDiagnosticsToAnalysisMetadataSocket
209481
209494
  };
209482
209495
  }
209483
209496
 
209484
209497
  // ../utils/src/tool-extractor.ts
209485
209498
  import { createHash } from "node:crypto";
209486
- import { createReadStream, createWriteStream as createWriteStream2, readFileSync, statSync as statSync2 } from "node:fs";
209487
- import { copyFile, cp as cp2, mkdir as mkdir4, writeFile } from "node:fs/promises";
209488
- import { tmpdir } from "node:os";
209489
- import { basename as basename3, dirname as dirname5, join as join4 } from "node:path";
209499
+ import { createReadStream, createWriteStream as createWriteStream2, readFileSync as readFileSync2, statSync as statSync2 } from "node:fs";
209500
+ import { copyFile, cp as cp2, mkdir as mkdir4, writeFile as writeFile2 } from "node:fs/promises";
209501
+ import { tmpdir as tmpdir2 } from "node:os";
209502
+ import { basename as basename3, dirname as dirname5, join as join5 } from "node:path";
209490
209503
  import { pipeline } from "node:stream/promises";
209491
209504
  import { createGunzip } from "node:zlib";
209492
209505
 
@@ -209891,6 +209904,71 @@ var TelemetryCollector = class {
209891
209904
  }
209892
209905
  };
209893
209906
 
209907
+ // ../utils/src/telemetry/analyzer-telemetry-server.ts
209908
+ import { existsSync, readFileSync, watchFile, unwatchFile } from "fs";
209909
+ import { unlink, writeFile } from "fs/promises";
209910
+ import { tmpdir } from "os";
209911
+ import { join as join2 } from "path";
209912
+ import { randomBytes } from "crypto";
209913
+ var AnalyzerTelemetryServer = class {
209914
+ constructor(handler) {
209915
+ this.handler = handler;
209916
+ const fileId = randomBytes(8).toString("hex");
209917
+ this.filePath = join2(tmpdir(), `analyzer-telemetry-${fileId}.jsonl`);
209918
+ }
209919
+ filePath;
209920
+ lastReadPosition = 0;
209921
+ watching = false;
209922
+ /**
209923
+ * Starts the server and returns the file path that analyzers should write to.
209924
+ */
209925
+ async start() {
209926
+ await writeFile(this.filePath, "");
209927
+ this.watching = true;
209928
+ watchFile(this.filePath, { interval: 1e3 }, () => {
209929
+ this.processNewEvents();
209930
+ });
209931
+ return this.filePath;
209932
+ }
209933
+ processNewEvents() {
209934
+ if (!existsSync(this.filePath)) return;
209935
+ try {
209936
+ const content = readFileSync(this.filePath, "utf-8");
209937
+ const newContent = content.substring(this.lastReadPosition);
209938
+ this.lastReadPosition = content.length;
209939
+ if (!newContent) return;
209940
+ const lines = newContent.split("\n");
209941
+ for (const line of lines) {
209942
+ if (line.trim()) {
209943
+ try {
209944
+ const event = JSON.parse(line);
209945
+ this.handler.onAnalyzerEvent(event);
209946
+ } catch {
209947
+ }
209948
+ }
209949
+ }
209950
+ } catch {
209951
+ }
209952
+ }
209953
+ /**
209954
+ * Closes the server and cleans up the file.
209955
+ */
209956
+ async close() {
209957
+ this.processNewEvents();
209958
+ this.handler.close?.();
209959
+ if (this.watching) {
209960
+ unwatchFile(this.filePath);
209961
+ this.watching = false;
209962
+ }
209963
+ if (existsSync(this.filePath)) {
209964
+ try {
209965
+ await unlink(this.filePath);
209966
+ } catch {
209967
+ }
209968
+ }
209969
+ }
209970
+ };
209971
+
209894
209972
  // ../utils/src/command-utils.ts
209895
209973
  var DEFAULT_TIMEOUT_MS = 30 * 60 * 1e3;
209896
209974
  async function execAndLogOnFailure(cmd, dir, options, logLevel = "info") {
@@ -209946,15 +210024,22 @@ function startTelemetry(pid, handler) {
209946
210024
  async function execNeverFail(cmd, dir, options) {
209947
210025
  const stopHeartbeat = options?.heartbeat ? startHeartbeat(options.heartbeat) : void 0;
209948
210026
  let stopTelemetry;
210027
+ let analyzerTelemetryServer;
210028
+ let analyzerTelemetryFilePath;
210029
+ if (options?.analyzerTelemetryHandler) {
210030
+ analyzerTelemetryServer = new AnalyzerTelemetryServer(options.analyzerTelemetryHandler);
210031
+ analyzerTelemetryFilePath = await analyzerTelemetryServer.start();
210032
+ }
209949
210033
  try {
209950
210034
  return await new Promise((resolve45) => {
209951
210035
  let args2;
209952
210036
  if (typeof cmd !== "string") [cmd, ...args2] = cmd;
209953
210037
  const timeout = options?.timeout ?? DEFAULT_TIMEOUT_MS;
210038
+ const env = analyzerTelemetryFilePath ? { ...options?.env ?? process.env, ANALYZER_TELEMETRY_FILE_PATH: analyzerTelemetryFilePath } : options?.env;
209954
210039
  const childProcess = execFile2(
209955
210040
  cmd,
209956
210041
  args2,
209957
- { ...options, cwd: dir, maxBuffer: 1024 * 1024 * 1024, shell: args2 === void 0, timeout },
210042
+ { ...options, env, cwd: dir, maxBuffer: 1024 * 1024 * 1024, shell: args2 === void 0, timeout },
209958
210043
  (error, stdout, stderr) => {
209959
210044
  resolve45({ error, stdout, stderr });
209960
210045
  }
@@ -209980,6 +210065,7 @@ async function execNeverFail(cmd, dir, options) {
209980
210065
  } finally {
209981
210066
  stopHeartbeat?.();
209982
210067
  stopTelemetry?.();
210068
+ await analyzerTelemetryServer?.close();
209983
210069
  }
209984
210070
  }
209985
210071
  async function runCommandResolveStdOut(cmd, dir, options) {
@@ -210037,9 +210123,9 @@ async function parseShellArgs(args2) {
210037
210123
  // ../utils/src/file-utils.ts
210038
210124
  var import_lodash3 = __toESM(require_lodash(), 1);
210039
210125
  var import_micromatch = __toESM(require_micromatch(), 1);
210040
- import { existsSync } from "fs";
210126
+ import { existsSync as existsSync2 } from "fs";
210041
210127
  import { access, cp, readdir, stat } from "fs/promises";
210042
- import { basename, join as join2, relative as relative2, resolve } from "path";
210128
+ import { basename, join as join3, relative as relative2, resolve } from "path";
210043
210129
 
210044
210130
  // ../web-compat-utils/src/async.ts
210045
210131
  async function asyncMap(array, mapper, concurrency = 1) {
@@ -210098,8 +210184,8 @@ function findParent(dir, predicate, wholePath) {
210098
210184
  }
210099
210185
  async function getFilesRelative(dir, excludeDirs) {
210100
210186
  async function helper(subDir, arrayOfFiles) {
210101
- for (const item of await readdir(join2(dir, subDir), { withFileTypes: true })) {
210102
- const itemPath = join2(subDir, item.name);
210187
+ for (const item of await readdir(join3(dir, subDir), { withFileTypes: true })) {
210188
+ const itemPath = join3(subDir, item.name);
210103
210189
  if (item.isDirectory()) {
210104
210190
  if (!excludeDirs?.includes(item.name)) await helper(itemPath, arrayOfFiles);
210105
210191
  } else if (item.isFile()) {
@@ -214494,7 +214580,7 @@ import fs9 from "node:fs";
214494
214580
 
214495
214581
  // ../../node_modules/.pnpm/tar@7.4.3/node_modules/tar/dist/esm/unpack.js
214496
214582
  import assert3 from "node:assert";
214497
- import { randomBytes } from "node:crypto";
214583
+ import { randomBytes as randomBytes2 } from "node:crypto";
214498
214584
  import fs8 from "node:fs";
214499
214585
  import path6 from "node:path";
214500
214586
 
@@ -214867,7 +214953,7 @@ var mkdir3 = (dir, opt, cb) => {
214867
214953
  const gid = opt.gid;
214868
214954
  const doChown = typeof uid === "number" && typeof gid === "number" && (uid !== opt.processUid || gid !== opt.processGid);
214869
214955
  const preserve = opt.preserve;
214870
- const unlink2 = opt.unlink;
214956
+ const unlink4 = opt.unlink;
214871
214957
  const cache = opt.cache;
214872
214958
  const cwd = normalizeWindowsPath(opt.cwd);
214873
214959
  const done = (er, created) => {
@@ -214899,33 +214985,33 @@ var mkdir3 = (dir, opt, cb) => {
214899
214985
  }
214900
214986
  const sub = normalizeWindowsPath(path5.relative(cwd, dir));
214901
214987
  const parts = sub.split("/");
214902
- mkdir_(cwd, parts, mode, cache, unlink2, cwd, void 0, done);
214988
+ mkdir_(cwd, parts, mode, cache, unlink4, cwd, void 0, done);
214903
214989
  };
214904
- var mkdir_ = (base, parts, mode, cache, unlink2, cwd, created, cb) => {
214990
+ var mkdir_ = (base, parts, mode, cache, unlink4, cwd, created, cb) => {
214905
214991
  if (!parts.length) {
214906
214992
  return cb(null, created);
214907
214993
  }
214908
214994
  const p3 = parts.shift();
214909
214995
  const part = normalizeWindowsPath(path5.resolve(base + "/" + p3));
214910
214996
  if (cGet(cache, part)) {
214911
- return mkdir_(part, parts, mode, cache, unlink2, cwd, created, cb);
214997
+ return mkdir_(part, parts, mode, cache, unlink4, cwd, created, cb);
214912
214998
  }
214913
- fs7.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink2, cwd, created, cb));
214999
+ fs7.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink4, cwd, created, cb));
214914
215000
  };
214915
- var onmkdir = (part, parts, mode, cache, unlink2, cwd, created, cb) => (er) => {
215001
+ var onmkdir = (part, parts, mode, cache, unlink4, cwd, created, cb) => (er) => {
214916
215002
  if (er) {
214917
215003
  fs7.lstat(part, (statEr, st) => {
214918
215004
  if (statEr) {
214919
215005
  statEr.path = statEr.path && normalizeWindowsPath(statEr.path);
214920
215006
  cb(statEr);
214921
215007
  } else if (st.isDirectory()) {
214922
- mkdir_(part, parts, mode, cache, unlink2, cwd, created, cb);
214923
- } else if (unlink2) {
215008
+ mkdir_(part, parts, mode, cache, unlink4, cwd, created, cb);
215009
+ } else if (unlink4) {
214924
215010
  fs7.unlink(part, (er2) => {
214925
215011
  if (er2) {
214926
215012
  return cb(er2);
214927
215013
  }
214928
- fs7.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink2, cwd, created, cb));
215014
+ fs7.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink4, cwd, created, cb));
214929
215015
  });
214930
215016
  } else if (st.isSymbolicLink()) {
214931
215017
  return cb(new SymlinkError(part, part + "/" + parts.join("/")));
@@ -214935,7 +215021,7 @@ var onmkdir = (part, parts, mode, cache, unlink2, cwd, created, cb) => (er) => {
214935
215021
  });
214936
215022
  } else {
214937
215023
  created = created || part;
214938
- mkdir_(part, parts, mode, cache, unlink2, cwd, created, cb);
215024
+ mkdir_(part, parts, mode, cache, unlink4, cwd, created, cb);
214939
215025
  }
214940
215026
  };
214941
215027
  var checkCwdSync = (dir) => {
@@ -214960,7 +215046,7 @@ var mkdirSync3 = (dir, opt) => {
214960
215046
  const gid = opt.gid;
214961
215047
  const doChown = typeof uid === "number" && typeof gid === "number" && (uid !== opt.processUid || gid !== opt.processGid);
214962
215048
  const preserve = opt.preserve;
214963
- const unlink2 = opt.unlink;
215049
+ const unlink4 = opt.unlink;
214964
215050
  const cache = opt.cache;
214965
215051
  const cwd = normalizeWindowsPath(opt.cwd);
214966
215052
  const done = (created2) => {
@@ -214999,7 +215085,7 @@ var mkdirSync3 = (dir, opt) => {
214999
215085
  if (st.isDirectory()) {
215000
215086
  cSet(cache, part, true);
215001
215087
  continue;
215002
- } else if (unlink2) {
215088
+ } else if (unlink4) {
215003
215089
  fs7.unlinkSync(part);
215004
215090
  fs7.mkdirSync(part, mode);
215005
215091
  created = created || part;
@@ -215024,14 +215110,14 @@ var normalizeUnicode = (s6) => {
215024
215110
  };
215025
215111
 
215026
215112
  // ../../node_modules/.pnpm/tar@7.4.3/node_modules/tar/dist/esm/path-reservations.js
215027
- import { join as join3 } from "node:path";
215113
+ import { join as join4 } from "node:path";
215028
215114
  var platform5 = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
215029
215115
  var isWindows2 = platform5 === "win32";
215030
215116
  var getDirs = (path9) => {
215031
215117
  const dirs = path9.split("/").slice(0, -1).reduce((set, path10) => {
215032
215118
  const s6 = set[set.length - 1];
215033
215119
  if (s6 !== void 0) {
215034
- path10 = join3(s6, path10);
215120
+ path10 = join4(s6, path10);
215035
215121
  }
215036
215122
  set.push(path10 || "/");
215037
215123
  return set;
@@ -215049,7 +215135,7 @@ var PathReservations = class {
215049
215135
  #running = /* @__PURE__ */ new Set();
215050
215136
  reserve(paths, fn2) {
215051
215137
  paths = isWindows2 ? ["win32 parallelization disabled"] : paths.map((p3) => {
215052
- return stripTrailingSlashes(join3(normalizeUnicode(p3))).toLowerCase();
215138
+ return stripTrailingSlashes(join4(normalizeUnicode(p3))).toLowerCase();
215053
215139
  });
215054
215140
  const dirs = new Set(paths.map((path9) => getDirs(path9)).reduce((a4, b) => a4.concat(b)));
215055
215141
  this.#reservations.set(fn2, { dirs, paths });
@@ -215189,7 +215275,7 @@ var unlinkFile = (path9, cb) => {
215189
215275
  if (!isWindows3) {
215190
215276
  return fs8.unlink(path9, cb);
215191
215277
  }
215192
- const name2 = path9 + ".DELETE." + randomBytes(16).toString("hex");
215278
+ const name2 = path9 + ".DELETE." + randomBytes2(16).toString("hex");
215193
215279
  fs8.rename(path9, name2, (er) => {
215194
215280
  if (er) {
215195
215281
  return cb(er);
@@ -215201,7 +215287,7 @@ var unlinkFileSync = (path9) => {
215201
215287
  if (!isWindows3) {
215202
215288
  return fs8.unlinkSync(path9);
215203
215289
  }
215204
- const name2 = path9 + ".DELETE." + randomBytes(16).toString("hex");
215290
+ const name2 = path9 + ".DELETE." + randomBytes2(16).toString("hex");
215205
215291
  fs8.renameSync(path9, name2);
215206
215292
  fs8.unlinkSync(name2);
215207
215293
  };
@@ -216133,13 +216219,13 @@ async function getNodeExecutable(overridePath) {
216133
216219
  const nodeArch = arch === "arm" ? "arm64" : arch;
216134
216220
  const isWindows4 = platform9 === "win32";
216135
216221
  const binaryName = isWindows4 ? `node-${platform9}-${nodeArch}.exe.gz` : `node-${platform9}-${nodeArch}.gz`;
216136
- const compressedBinaryPath = join4(extractedPath, binaryName);
216222
+ const compressedBinaryPath = join5(extractedPath, binaryName);
216137
216223
  if (!await exists(compressedBinaryPath)) {
216138
216224
  throw new Error(`Node.js binary not found: ${compressedBinaryPath}. Platform: ${platform9}-${nodeArch}`);
216139
216225
  }
216140
- const tmpDir = join4(getExtractionBaseDir(), "node-runtime");
216226
+ const tmpDir = join5(getExtractionBaseDir(), "node-runtime");
216141
216227
  await mkdir4(tmpDir, { recursive: true });
216142
- const nodeBinaryPath = join4(tmpDir, isWindows4 ? "node.exe" : "node");
216228
+ const nodeBinaryPath = join5(tmpDir, isWindows4 ? "node.exe" : "node");
216143
216229
  logger.debug(`Decompressing Node.js binary to ${nodeBinaryPath}...`);
216144
216230
  await pipeline(
216145
216231
  createReadStream(compressedBinaryPath),
@@ -216155,8 +216241,8 @@ var cliVersion;
216155
216241
  function getCliVersion() {
216156
216242
  if (cliVersion) return cliVersion;
216157
216243
  try {
216158
- const packageJsonPath = isNexeMode() ? join4(NEXE_VIRTUAL_FS_ROOT, "package.json") : join4(dirname5(dirname5(dirname5(dirname5(__filename)))), "npm-package-cli", "package.json");
216159
- const packageJson = JSON.parse(readFileSync(packageJsonPath, "utf-8"));
216244
+ const packageJsonPath = isNexeMode() ? join5(NEXE_VIRTUAL_FS_ROOT, "package.json") : join5(dirname5(dirname5(dirname5(dirname5(__filename)))), "npm-package-cli", "package.json");
216245
+ const packageJson = JSON.parse(readFileSync2(packageJsonPath, "utf-8"));
216160
216246
  if (process.env.ALWAYS_REEXTRACT_TOOLS === "true") {
216161
216247
  logger.info("ALWAYS_REEXTRACT_TOOLS is set to true, re-extracting tools");
216162
216248
  const randomVersion = Math.random().toString().slice(2, 8);
@@ -216172,7 +216258,7 @@ function getCliVersion() {
216172
216258
  }
216173
216259
  function getExtractionBaseDir() {
216174
216260
  const version4 = getCliVersion();
216175
- return join4(tmpdir(), `coana-cli-v${version4}`);
216261
+ return join5(tmpdir2(), `coana-cli-v${version4}`);
216176
216262
  }
216177
216263
  async function calculateChecksum(filePath) {
216178
216264
  const hash = createHash("sha256");
@@ -216184,8 +216270,8 @@ function loadChecksums() {
216184
216270
  if (!isNexeMode()) {
216185
216271
  throw new Error("Tool extraction is only supported in nexe mode");
216186
216272
  }
216187
- const checksumsPath = join4(NEXE_VIRTUAL_FS_ROOT, "checksums.json");
216188
- return JSON.parse(readFileSync(checksumsPath, "utf-8"));
216273
+ const checksumsPath = join5(NEXE_VIRTUAL_FS_ROOT, "checksums.json");
216274
+ return JSON.parse(readFileSync2(checksumsPath, "utf-8"));
216189
216275
  } catch (error) {
216190
216276
  logger.warn("Failed to load checksums.json:", error);
216191
216277
  throw new Error(
@@ -216196,7 +216282,7 @@ function loadChecksums() {
216196
216282
  async function verifyToolIntegrity(toolName, extractedPath, checksums) {
216197
216283
  try {
216198
216284
  for (const [relPath, expectedChecksum] of Object.entries(checksums)) {
216199
- const filePath = join4(extractedPath, relPath);
216285
+ const filePath = join5(extractedPath, relPath);
216200
216286
  if (!await exists(filePath)) {
216201
216287
  logger.debug(`File ${filePath} does not exist`);
216202
216288
  return false;
@@ -216229,8 +216315,8 @@ async function extractTool(toolName, resourcePath) {
216229
216315
  return cached.extractedPath;
216230
216316
  }
216231
216317
  const baseDir = getExtractionBaseDir();
216232
- const extractedPath = join4(baseDir, resourcePath);
216233
- const markerFile = join4(extractedPath, ".extracted");
216318
+ const extractedPath = join5(baseDir, resourcePath);
216319
+ const markerFile = join5(extractedPath, ".extracted");
216234
216320
  if (await exists(markerFile)) {
216235
216321
  const checksums = loadChecksums();
216236
216322
  const toolChecksums = checksums[toolName];
@@ -216245,7 +216331,7 @@ async function extractTool(toolName, resourcePath) {
216245
216331
  logger.debug(`Extracting ${toolName} to ${extractedPath}...`);
216246
216332
  try {
216247
216333
  await mkdir4(extractedPath, { recursive: true });
216248
- const sourcePath = isNexeMode() ? join4(NEXE_VIRTUAL_FS_ROOT, resourcePath) : resourcePath;
216334
+ const sourcePath = isNexeMode() ? join5(NEXE_VIRTUAL_FS_ROOT, resourcePath) : resourcePath;
216249
216335
  if (!await exists(sourcePath)) {
216250
216336
  throw new Error(
216251
216337
  `Tool source not found: ${sourcePath}. isNexeMode=${isNexeMode()}, NEXE_VIRTUAL_FS_ROOT=${NEXE_VIRTUAL_FS_ROOT}`
@@ -216256,12 +216342,12 @@ async function extractTool(toolName, resourcePath) {
216256
216342
  await cp2(sourcePath, extractedPath, { recursive: true });
216257
216343
  } else if (stats.isFile()) {
216258
216344
  const fileName3 = basename3(sourcePath);
216259
- const destFile = join4(extractedPath, fileName3);
216345
+ const destFile = join5(extractedPath, fileName3);
216260
216346
  await copyFile(sourcePath, destFile);
216261
216347
  } else {
216262
216348
  throw new Error(`Tool source is neither a file nor a directory: ${sourcePath}`);
216263
216349
  }
216264
- await writeFile(markerFile, (/* @__PURE__ */ new Date()).toISOString());
216350
+ await writeFile2(markerFile, (/* @__PURE__ */ new Date()).toISOString());
216265
216351
  logger.debug(`Successfully extracted ${toolName}`);
216266
216352
  extractedTools.set(toolName, { extractedPath });
216267
216353
  return extractedPath;
@@ -216308,11 +216394,11 @@ async function getNpmExecutable() {
216308
216394
  logger.debug("npm not found on system, extracting embedded npm...");
216309
216395
  extractedNpmPath = (async () => {
216310
216396
  const extractedPath = await extractTool("npm-binaries", "npm-binaries");
216311
- const npmArchivePath = join4(extractedPath, "npm-package.tar.gz");
216397
+ const npmArchivePath = join5(extractedPath, "npm-package.tar.gz");
216312
216398
  if (!await exists(npmArchivePath)) {
216313
216399
  throw new Error(`npm archive not found: ${npmArchivePath}`);
216314
216400
  }
216315
- const tmpDir = join4(getExtractionBaseDir(), "npm-runtime");
216401
+ const tmpDir = join5(getExtractionBaseDir(), "npm-runtime");
216316
216402
  await mkdir4(tmpDir, { recursive: true });
216317
216403
  logger.debug(`Extracting npm package to ${tmpDir}...`);
216318
216404
  await extract({
@@ -216321,8 +216407,8 @@ async function getNpmExecutable() {
216321
216407
  });
216322
216408
  const nodeExecutable = await getNodeExecutable();
216323
216409
  const isWindows4 = process.platform === "win32";
216324
- const npmCliPath = join4(tmpDir, "bin", "npm-cli.js");
216325
- const npmWrapperPath = join4(tmpDir, isWindows4 ? "npm.cmd" : "npm");
216410
+ const npmCliPath = join5(tmpDir, "bin", "npm-cli.js");
216411
+ const npmWrapperPath = join5(tmpDir, isWindows4 ? "npm.cmd" : "npm");
216326
216412
  if (!await exists(npmCliPath)) {
216327
216413
  throw new Error(`npm CLI not found at ${npmCliPath}`);
216328
216414
  }
@@ -216330,12 +216416,12 @@ async function getNpmExecutable() {
216330
216416
  const wrapperContent = `@echo off
216331
216417
  "${nodeExecutable}" "${npmCliPath}" %*
216332
216418
  `;
216333
- await writeFile(npmWrapperPath, wrapperContent);
216419
+ await writeFile2(npmWrapperPath, wrapperContent);
216334
216420
  } else {
216335
216421
  const wrapperContent = `#!/bin/sh
216336
216422
  exec "${nodeExecutable}" "${npmCliPath}" "$@"
216337
216423
  `;
216338
- await writeFile(npmWrapperPath, wrapperContent, { mode: 493 });
216424
+ await writeFile2(npmWrapperPath, wrapperContent, { mode: 493 });
216339
216425
  }
216340
216426
  logger.debug("npm extracted and wrapper created");
216341
216427
  return npmWrapperPath;
@@ -216362,13 +216448,13 @@ async function getUvExecutable() {
216362
216448
  const nodeArch = arch === "arm" ? "arm64" : arch;
216363
216449
  const isWindows4 = platform9 === "win32";
216364
216450
  const binaryName = isWindows4 ? `uv-${platform9}-${nodeArch}.exe.gz` : `uv-${platform9}-${nodeArch}.gz`;
216365
- const compressedBinaryPath = join4(extractedPath, binaryName);
216451
+ const compressedBinaryPath = join5(extractedPath, binaryName);
216366
216452
  if (!await exists(compressedBinaryPath)) {
216367
216453
  throw new Error(`uv binary not found: ${compressedBinaryPath}. Platform: ${platform9}-${nodeArch}`);
216368
216454
  }
216369
- const tmpDir = join4(getExtractionBaseDir(), "uv-runtime");
216455
+ const tmpDir = join5(getExtractionBaseDir(), "uv-runtime");
216370
216456
  await mkdir4(tmpDir, { recursive: true });
216371
- const uvBinaryPath = join4(tmpDir, isWindows4 ? "uv.exe" : "uv");
216457
+ const uvBinaryPath = join5(tmpDir, isWindows4 ? "uv.exe" : "uv");
216372
216458
  logger.debug(`Decompressing uv binary to ${uvBinaryPath}...`);
216373
216459
  await pipeline(
216374
216460
  createReadStream(compressedBinaryPath),
@@ -216384,9 +216470,9 @@ function handleNexeBinaryMode() {
216384
216470
  if (process.__nexe) {
216385
216471
  logger.info(`Nexe root: ${NEXE_VIRTUAL_FS_ROOT}`);
216386
216472
  process.env.COANA_ROOT = NEXE_VIRTUAL_FS_ROOT;
216387
- process.env.REPOS_PATH = join4(NEXE_VIRTUAL_FS_ROOT, "repos");
216388
- process.env.COANA_REPOS_PATH = join4(NEXE_VIRTUAL_FS_ROOT, "repos", "coana-tech");
216389
- process.env.REACHABILITY_ANALYZERS_SCRIPT_PATH = join4(NEXE_VIRTUAL_FS_ROOT, "reachability-analyzers-cli.mjs");
216473
+ process.env.REPOS_PATH = join5(NEXE_VIRTUAL_FS_ROOT, "repos");
216474
+ process.env.COANA_REPOS_PATH = join5(NEXE_VIRTUAL_FS_ROOT, "repos", "coana-tech");
216475
+ process.env.REACHABILITY_ANALYZERS_SCRIPT_PATH = join5(NEXE_VIRTUAL_FS_ROOT, "reachability-analyzers-cli.mjs");
216390
216476
  }
216391
216477
  }
216392
216478
 
@@ -217084,8 +217170,8 @@ function splitLines(text3) {
217084
217170
  }
217085
217171
 
217086
217172
  // ../fixing-management/src/fixing-management/utils/socket-patch-utils.ts
217087
- import { existsSync as existsSync2 } from "node:fs";
217088
- import { readFile as readFile3, writeFile as writeFile2 } from "node:fs/promises";
217173
+ import { existsSync as existsSync3 } from "node:fs";
217174
+ import { readFile as readFile3, writeFile as writeFile3 } from "node:fs/promises";
217089
217175
  import { resolve as resolve4 } from "node:path";
217090
217176
 
217091
217177
  // ../utils/src/version-comparison/version-satisfies.ts
@@ -218483,7 +218569,7 @@ var ecosystemMap = {
218483
218569
  RUST: {
218484
218570
  sortVersions: import_semver2.sort,
218485
218571
  isPrerelease: semverIsPrerelease,
218486
- versionSatisfiesSpecifier: (version4, specifier) => semverSatisfiesSpecifier(version4, /^\d/.test(specifier) ? `^${specifier}` : specifier),
218572
+ versionSatisfiesSpecifier: (version4, specifier) => specifier.split(",").map((part) => part.trim()).every((trimmed) => semverSatisfiesSpecifier(version4, /^\d/.test(trimmed) ? `^${trimmed}` : trimmed)),
218487
218573
  versionSatisfiesRelation: import_semver2.cmp,
218488
218574
  versionSatisfiesRange: semverSatisfiesRange
218489
218575
  },
@@ -218605,8 +218691,8 @@ async function applyPatches(ecosystem, rootDir, patches, ctxt, patchAppliedMessa
218605
218691
  }
218606
218692
  const appliedPatches = [];
218607
218693
  for (const [fullPath, patches2] of fullPathToPatches) {
218608
- if (!existsSync2(fullPath)) {
218609
- await writeFile2(fullPath, "", "utf-8");
218694
+ if (!existsSync3(fullPath)) {
218695
+ await writeFile3(fullPath, "", "utf-8");
218610
218696
  }
218611
218697
  let fileContent = await readFile3(fullPath, "utf-8");
218612
218698
  const groupedPatches = groupPatches(rootDir, patches2);
@@ -218626,7 +218712,7 @@ async function applyPatches(ecosystem, rootDir, patches, ctxt, patchAppliedMessa
218626
218712
  });
218627
218713
  appliedPatches.push(patch);
218628
218714
  }
218629
- await writeFile2(fullPath, fileContent, "utf-8");
218715
+ await writeFile3(fullPath, fileContent, "utf-8");
218630
218716
  }
218631
218717
  return appliedPatches;
218632
218718
  }
@@ -219764,13 +219850,13 @@ replace ${modulePath} ${currentVersion} => ${modulePath} ${newVersion}
219764
219850
  };
219765
219851
 
219766
219852
  // ../fixing-management/src/fixing-management/maven/gradle-fixing-manager.ts
219767
- import { existsSync as existsSync5 } from "node:fs";
219853
+ import { existsSync as existsSync6 } from "node:fs";
219768
219854
  import { readFile as readFile8 } from "node:fs/promises";
219769
- import { join as join6, resolve as resolve9 } from "node:path";
219855
+ import { join as join7, resolve as resolve9 } from "node:path";
219770
219856
 
219771
219857
  // ../fixing-management/src/fixing-management/utils/coana-patch-application.ts
219772
- import { existsSync as existsSync3 } from "node:fs";
219773
- import { readFile as readFile5, writeFile as writeFile3 } from "node:fs/promises";
219858
+ import { existsSync as existsSync4 } from "node:fs";
219859
+ import { readFile as readFile5, writeFile as writeFile4 } from "node:fs/promises";
219774
219860
  import { resolve as resolve7 } from "node:path";
219775
219861
  function detectPatchConflicts(rootDir, patchResults) {
219776
219862
  const patchesByFile = /* @__PURE__ */ new Map();
@@ -219900,8 +219986,8 @@ async function applyPatchResults(ecosystem, rootDir, patchResults) {
219900
219986
  }
219901
219987
  for (const [filePath, filePatches] of patchesByFile) {
219902
219988
  const sortedPatches = filePatches.sort((a4, b) => b.offset - a4.offset);
219903
- if (!existsSync3(filePath)) {
219904
- await writeFile3(filePath, "", "utf-8");
219989
+ if (!existsSync4(filePath)) {
219990
+ await writeFile4(filePath, "", "utf-8");
219905
219991
  }
219906
219992
  let fileContent = await readFile5(filePath, "utf-8");
219907
219993
  for (const patch of sortedPatches) {
@@ -219909,7 +219995,7 @@ async function applyPatchResults(ecosystem, rootDir, patchResults) {
219909
219995
  const end2 = patch.offset + (patch.oldText?.length ?? 0);
219910
219996
  fileContent = fileContent.substring(0, start) + patch.newText + fileContent.substring(end2);
219911
219997
  }
219912
- await writeFile3(filePath, fileContent, "utf-8");
219998
+ await writeFile4(filePath, fileContent, "utf-8");
219913
219999
  }
219914
220000
  }
219915
220001
 
@@ -219918,14 +220004,14 @@ var import_good_enough_parser = __toESM(require_cjs(), 1);
219918
220004
  import { readFile as readFile6 } from "node:fs/promises";
219919
220005
 
219920
220006
  // ../fixing-management/src/fixing-management/maven/utils.ts
219921
- import { existsSync as existsSync4 } from "node:fs";
220007
+ import { existsSync as existsSync5 } from "node:fs";
219922
220008
  import { readdir as readdir2, stat as stat3 } from "node:fs/promises";
219923
- import { join as join5, resolve as resolve8 } from "node:path";
220009
+ import { join as join6, resolve as resolve8 } from "node:path";
219924
220010
  async function getFilesMatchingRegex(dir, regex) {
219925
- if (!existsSync4(dir)) return [];
220011
+ if (!existsSync5(dir)) return [];
219926
220012
  return await asyncFilterMap(
219927
220013
  await readdir2(dir),
219928
- async (file) => (await stat3(resolve8(dir, file))).isFile() && regex.test(file) ? join5(dir, file) : void 0
220014
+ async (file) => (await stat3(resolve8(dir, file))).isFile() && regex.test(file) ? join6(dir, file) : void 0
219929
220015
  );
219930
220016
  }
219931
220017
  function getIndentationSize(fileContent) {
@@ -220507,7 +220593,7 @@ var GradleFixingManager = class {
220507
220593
  await applyPatchResults("MAVEN", this.rootDir, directPatchResults);
220508
220594
  const postFixDependencyTrees = directPatchResults.length ? await this.otherModulesCommunicator.getDependencyTrees(
220509
220595
  "GRADLE",
220510
- join6(this.rootDir, this.subprojectPath),
220596
+ join7(this.rootDir, this.subprojectPath),
220511
220597
  workspacePaths
220512
220598
  ) : dependencyTrees;
220513
220599
  const transitivePatchResults = [];
@@ -220544,14 +220630,14 @@ var GradleFixingManager = class {
220544
220630
  */
220545
220631
  async createFixPatch(workspacePath, groupId, artifactId, dependencyDetails, fixedVersion, vulnerabilityFix) {
220546
220632
  const manifestFiles = await getFilesMatchingRegex(
220547
- join6(this.rootDir, this.subprojectPath, workspacePath),
220633
+ join7(this.rootDir, this.subprojectPath, workspacePath),
220548
220634
  /\.gradle(\.kts)?$/
220549
220635
  );
220550
220636
  const absoluteCatalogueFiles = [];
220551
220637
  for (const manifestFile of manifestFiles) {
220552
220638
  const { depDecls, dependenciesBlocks, constraintsBlocks, catalogFiles } = await findDependencyDeclsAndCatalogFiles(manifestFile);
220553
220639
  absoluteCatalogueFiles.push(
220554
- ...catalogFiles.map((file) => join6(this.rootDir, this.subprojectPath, workspacePath, file))
220640
+ ...catalogFiles.map((file) => join7(this.rootDir, this.subprojectPath, workspacePath, file))
220555
220641
  );
220556
220642
  for (const depDecl of depDecls) {
220557
220643
  if (depDecl.groupId.value === groupId && depDecl.artifactId.value === artifactId && (depDecl.version.value === dependencyDetails.version || depDecl.version === void 0 || depDecl.version.isVar)) {
@@ -220573,13 +220659,13 @@ var GradleFixingManager = class {
220573
220659
  }
220574
220660
  }
220575
220661
  if (absoluteCatalogueFiles.length === 0) {
220576
- const rootProjectSettingsFile = join6(this.rootDir, this.subprojectPath, "settings.gradle");
220577
- if (existsSync5(rootProjectSettingsFile)) {
220662
+ const rootProjectSettingsFile = join7(this.rootDir, this.subprojectPath, "settings.gradle");
220663
+ if (existsSync6(rootProjectSettingsFile)) {
220578
220664
  const { catalogFiles } = await findDependencyDeclsAndCatalogFiles(rootProjectSettingsFile);
220579
- absoluteCatalogueFiles.push(...catalogFiles.map((file) => join6(this.rootDir, this.subprojectPath, file)));
220665
+ absoluteCatalogueFiles.push(...catalogFiles.map((file) => join7(this.rootDir, this.subprojectPath, file)));
220580
220666
  }
220581
220667
  const defaultVersionsCatalogFile = resolve9(this.rootDir, this.subprojectPath, "gradle", "libs.versions.toml");
220582
- if (existsSync5(defaultVersionsCatalogFile)) {
220668
+ if (existsSync6(defaultVersionsCatalogFile)) {
220583
220669
  absoluteCatalogueFiles.push(defaultVersionsCatalogFile);
220584
220670
  }
220585
220671
  }
@@ -220610,19 +220696,19 @@ var GradleFixingManager = class {
220610
220696
  }
220611
220697
  }
220612
220698
  }
220613
- const workspaceBuildGradlePath = join6(this.rootDir, this.subprojectPath, workspacePath, "build.gradle");
220614
- const workspaceBuildGradleKtsPath = join6(this.rootDir, this.subprojectPath, workspacePath, "build.gradle.kts");
220699
+ const workspaceBuildGradlePath = join7(this.rootDir, this.subprojectPath, workspacePath, "build.gradle");
220700
+ const workspaceBuildGradleKtsPath = join7(this.rootDir, this.subprojectPath, workspacePath, "build.gradle.kts");
220615
220701
  let targetBuildFile;
220616
220702
  let fileType;
220617
- if (existsSync5(workspaceBuildGradleKtsPath)) {
220703
+ if (existsSync6(workspaceBuildGradleKtsPath)) {
220618
220704
  targetBuildFile = workspaceBuildGradleKtsPath;
220619
220705
  fileType = "kotlin";
220620
- } else if (existsSync5(workspaceBuildGradlePath)) {
220706
+ } else if (existsSync6(workspaceBuildGradlePath)) {
220621
220707
  targetBuildFile = workspaceBuildGradlePath;
220622
220708
  fileType = "groovy";
220623
220709
  } else {
220624
- const rootBuildGradleKtsPath = join6(this.rootDir, this.subprojectPath, "build.gradle.kts");
220625
- if (existsSync5(rootBuildGradleKtsPath)) {
220710
+ const rootBuildGradleKtsPath = join7(this.rootDir, this.subprojectPath, "build.gradle.kts");
220711
+ if (existsSync6(rootBuildGradleKtsPath)) {
220626
220712
  targetBuildFile = workspaceBuildGradleKtsPath;
220627
220713
  fileType = "kotlin";
220628
220714
  } else {
@@ -220638,7 +220724,7 @@ var GradleFixingManager = class {
220638
220724
  4,
220639
220725
  fileType
220640
220726
  );
220641
- if (!existsSync5(targetBuildFile)) {
220727
+ if (!existsSync6(targetBuildFile)) {
220642
220728
  return {
220643
220729
  file: targetBuildFile,
220644
220730
  offset: 0,
@@ -220782,7 +220868,7 @@ var GradleFixingManager = class {
220782
220868
  */
220783
220869
  async hasExistingDependency(workspacePath, groupId, artifactId) {
220784
220870
  const manifestFiles = await getFilesMatchingRegex(
220785
- join6(this.rootDir, this.subprojectPath, workspacePath),
220871
+ join7(this.rootDir, this.subprojectPath, workspacePath),
220786
220872
  /\.gradle(\.kts)?$/
220787
220873
  );
220788
220874
  for (const manifestFile of manifestFiles) {
@@ -220806,22 +220892,22 @@ var GradleFixingManager = class {
220806
220892
  async getCatalogFiles(workspacePath) {
220807
220893
  const catalogFiles = [];
220808
220894
  const manifestFiles = await getFilesMatchingRegex(
220809
- join6(this.rootDir, this.subprojectPath, workspacePath),
220895
+ join7(this.rootDir, this.subprojectPath, workspacePath),
220810
220896
  /\.gradle(\.kts)?$/
220811
220897
  );
220812
220898
  for (const manifestFile of manifestFiles) {
220813
220899
  const { catalogFiles: fileCatalogFiles } = await findDependencyDeclsAndCatalogFiles(manifestFile);
220814
220900
  catalogFiles.push(
220815
- ...fileCatalogFiles.map((file) => join6(this.rootDir, this.subprojectPath, workspacePath, file))
220901
+ ...fileCatalogFiles.map((file) => join7(this.rootDir, this.subprojectPath, workspacePath, file))
220816
220902
  );
220817
220903
  }
220818
- const settingsFile = join6(this.rootDir, this.subprojectPath, "settings.gradle");
220819
- if (existsSync5(settingsFile)) {
220904
+ const settingsFile = join7(this.rootDir, this.subprojectPath, "settings.gradle");
220905
+ if (existsSync6(settingsFile)) {
220820
220906
  const { catalogFiles: settingsCatalogFiles } = await findDependencyDeclsAndCatalogFiles(settingsFile);
220821
- catalogFiles.push(...settingsCatalogFiles.map((file) => join6(this.rootDir, this.subprojectPath, file)));
220907
+ catalogFiles.push(...settingsCatalogFiles.map((file) => join7(this.rootDir, this.subprojectPath, file)));
220822
220908
  }
220823
- const defaultCatalog = join6(this.rootDir, this.subprojectPath, "gradle", "libs.versions.toml");
220824
- if (existsSync5(defaultCatalog)) {
220909
+ const defaultCatalog = join7(this.rootDir, this.subprojectPath, "gradle", "libs.versions.toml");
220910
+ if (existsSync6(defaultCatalog)) {
220825
220911
  catalogFiles.push(defaultCatalog);
220826
220912
  }
220827
220913
  return i3(catalogFiles);
@@ -220832,7 +220918,7 @@ var GradleFixingManager = class {
220832
220918
  async createConstraintsForFile(buildFile, fixes) {
220833
220919
  const { dependenciesBlocks, constraintsBlocks } = await findDependencyDeclsAndCatalogFiles(buildFile);
220834
220920
  const fileType = buildFile.endsWith(".kts") ? "kotlin" : "groovy";
220835
- const fileContent = existsSync5(buildFile) ? await readFile8(buildFile, "utf-8") : "";
220921
+ const fileContent = existsSync6(buildFile) ? await readFile8(buildFile, "utf-8") : "";
220836
220922
  const indentationSize = getIndentationSize(fileContent);
220837
220923
  const constraintDeclarations = fixes.map(({ dependencyDetails, fixedVersion }) => {
220838
220924
  const [groupId, artifactId] = dependencyDetails.packageName.split(":");
@@ -220881,16 +220967,16 @@ ${indent(1, indentationSize)}}
220881
220967
  * Creates bundled constraints patch for multiple dependencies
220882
220968
  */
220883
220969
  async createBundledConstraintsForWorkspace(workspacePath, fixes) {
220884
- const workspaceBuildGradlePath = join6(this.rootDir, this.subprojectPath, workspacePath, "build.gradle");
220885
- const workspaceBuildGradleKtsPath = join6(this.rootDir, this.subprojectPath, workspacePath, "build.gradle.kts");
220970
+ const workspaceBuildGradlePath = join7(this.rootDir, this.subprojectPath, workspacePath, "build.gradle");
220971
+ const workspaceBuildGradleKtsPath = join7(this.rootDir, this.subprojectPath, workspacePath, "build.gradle.kts");
220886
220972
  let buildFile;
220887
- if (existsSync5(workspaceBuildGradleKtsPath)) {
220973
+ if (existsSync6(workspaceBuildGradleKtsPath)) {
220888
220974
  buildFile = workspaceBuildGradleKtsPath;
220889
- } else if (existsSync5(workspaceBuildGradlePath)) {
220975
+ } else if (existsSync6(workspaceBuildGradlePath)) {
220890
220976
  buildFile = workspaceBuildGradlePath;
220891
220977
  } else {
220892
- const rootBuildGradleKtsPath = join6(this.rootDir, this.subprojectPath, "build.gradle.kts");
220893
- buildFile = existsSync5(rootBuildGradleKtsPath) ? workspaceBuildGradleKtsPath : workspaceBuildGradlePath;
220978
+ const rootBuildGradleKtsPath = join7(this.rootDir, this.subprojectPath, "build.gradle.kts");
220979
+ buildFile = existsSync6(rootBuildGradleKtsPath) ? workspaceBuildGradleKtsPath : workspaceBuildGradlePath;
220894
220980
  }
220895
220981
  return this.createConstraintsForFile(buildFile, fixes);
220896
220982
  }
@@ -220940,8 +221026,8 @@ import { resolve as resolve11 } from "node:path";
220940
221026
  // ../utils/src/pom-utils.ts
220941
221027
  var import_parse_xml2 = __toESM(require_dist(), 1);
220942
221028
  import { readFile as readFile9 } from "node:fs/promises";
220943
- import { existsSync as existsSync6 } from "node:fs";
220944
- import { resolve as resolve10, join as join7, relative as relative3, dirname as dirname8 } from "node:path";
221029
+ import { existsSync as existsSync7 } from "node:fs";
221030
+ import { resolve as resolve10, join as join8, relative as relative3, dirname as dirname8 } from "node:path";
220945
221031
 
220946
221032
  // ../utils/src/xml-utils.ts
220947
221033
  var import_parse_xml = __toESM(require_dist(), 1);
@@ -221072,7 +221158,7 @@ function calculateColumnPosition(byteOffset, contents2) {
221072
221158
  }
221073
221159
  async function loadPom(rootDir, pomFile, validateFile, visited = /* @__PURE__ */ new Set()) {
221074
221160
  const validatedPomFile = validateFile(resolve10(rootDir, pomFile));
221075
- if (!validatedPomFile || !existsSync6(validatedPomFile)) return void 0;
221161
+ if (!validatedPomFile || !existsSync7(validatedPomFile)) return void 0;
221076
221162
  if (visited.has(validatedPomFile)) return void 0;
221077
221163
  visited.add(validatedPomFile);
221078
221164
  const sourceText = await readFile9(validatedPomFile, "utf-8");
@@ -221422,7 +221508,7 @@ function getParentPomPath(pom) {
221422
221508
  const parentElement = projectElement.children.filter((child) => child instanceof import_parse_xml2.XmlElement).find((child) => child.name === "parent");
221423
221509
  if (!parentElement) return void 0;
221424
221510
  const relativePathElement = parentElement.children.filter((child) => child instanceof import_parse_xml2.XmlElement).find((child) => child.name === "relativePath");
221425
- const relativePath = relativePathElement?.text ?? join7("..", "pom.xml");
221511
+ const relativePath = relativePathElement?.text ?? join8("..", "pom.xml");
221426
221512
  return relativePath ? resolve10(dirname8(pom.validatedPomFile), relativePath) : void 0;
221427
221513
  }
221428
221514
  function getInsertPos(element) {
@@ -222437,7 +222523,7 @@ var GradleLockfileUpgradeHandler = class {
222437
222523
  };
222438
222524
 
222439
222525
  // ../fixing-management/src/fixing-management/maven/handlers/sbt-upgrade-handler.ts
222440
- import { existsSync as existsSync7 } from "node:fs";
222526
+ import { existsSync as existsSync8 } from "node:fs";
222441
222527
  import { readFile as readFile12 } from "node:fs/promises";
222442
222528
  import { basename as basename4, dirname as dirname11, resolve as resolve15 } from "node:path";
222443
222529
  import assert8 from "node:assert";
@@ -222749,7 +222835,7 @@ var SbtUpgradeHandler = class {
222749
222835
  scalaBinaryVersion
222750
222836
  );
222751
222837
  });
222752
- if (!existsSync7(dependencyOverridesFile)) {
222838
+ if (!existsSync8(dependencyOverridesFile)) {
222753
222839
  const indentationSize = 2;
222754
222840
  return {
222755
222841
  file: dependencyOverridesFile,
@@ -222800,12 +222886,12 @@ function findSbtWorkspace(manifestFile) {
222800
222886
  }
222801
222887
  async function getSbtRootProject(rootDir, workspacePath) {
222802
222888
  const findSbtRootDir = (workspacePath2) => {
222803
- if (existsSync7(resolve15(rootDir, workspacePath2, "project"))) {
222889
+ if (existsSync8(resolve15(rootDir, workspacePath2, "project"))) {
222804
222890
  return resolve15(rootDir, workspacePath2);
222805
222891
  }
222806
222892
  let curr = workspacePath2;
222807
222893
  while (curr !== dirname11(curr)) {
222808
- if (existsSync7(resolve15(rootDir, curr, "project"))) {
222894
+ if (existsSync8(resolve15(rootDir, curr, "project"))) {
222809
222895
  return resolve15(rootDir, curr);
222810
222896
  }
222811
222897
  curr = dirname11(curr);
@@ -222816,7 +222902,7 @@ async function getSbtRootProject(rootDir, workspacePath) {
222816
222902
  const sbtRootDir = findSbtRootDir(workspacePath);
222817
222903
  if (sbtRootDir !== void 0) {
222818
222904
  const sbtRootProjectDir = resolve15(rootDir, sbtRootDir, "project");
222819
- if (existsSync7(sbtRootProjectDir)) {
222905
+ if (existsSync8(sbtRootProjectDir)) {
222820
222906
  const projectScalaFiles = await getFilesMatchingRegex(sbtRootProjectDir, /[^/]*\.scala$/);
222821
222907
  for (const file of projectScalaFiles) {
222822
222908
  rootProject = await loadSbtProject(file, rootProject);
@@ -222892,9 +222978,9 @@ var MavenSocketUpgradeManager = class {
222892
222978
  };
222893
222979
 
222894
222980
  // ../fixing-management/src/fixing-management/maven/sbt-fixing-manager.ts
222895
- import { existsSync as existsSync8 } from "node:fs";
222981
+ import { existsSync as existsSync9 } from "node:fs";
222896
222982
  import { readFile as readFile13 } from "node:fs/promises";
222897
- import { join as join8 } from "node:path";
222983
+ import { join as join9 } from "node:path";
222898
222984
  var SbtFixingManager = class {
222899
222985
  constructor(rootDir, subprojectPath, otherModulesCommunicator) {
222900
222986
  this.rootDir = rootDir;
@@ -222937,7 +223023,7 @@ var SbtFixingManager = class {
222937
223023
  await applyPatchResults("MAVEN", this.rootDir, directPatchResults);
222938
223024
  const postFixDependencyTrees = directPatchResults.length ? await this.otherModulesCommunicator.getDependencyTrees(
222939
223025
  "SBT",
222940
- join8(this.rootDir, this.subprojectPath),
223026
+ join9(this.rootDir, this.subprojectPath),
222941
223027
  workspacePaths
222942
223028
  ) : dependencyTrees;
222943
223029
  const transitivePatchResults = [];
@@ -223058,9 +223144,9 @@ var SbtFixingManager = class {
223058
223144
  */
223059
223145
  async createFixPatch(workspacePath, groupId, artifactId, dependencyDetails, fixedVersion, scalaBinaryVersion) {
223060
223146
  const manifestFiles = [
223061
- ...await getFilesMatchingRegex(join8(this.rootDir, this.subprojectPath, workspacePath), /\.sbt$/),
223147
+ ...await getFilesMatchingRegex(join9(this.rootDir, this.subprojectPath, workspacePath), /\.sbt$/),
223062
223148
  ...await getFilesMatchingRegex(
223063
- join8(this.rootDir, this.subprojectPath, workspacePath, "project"),
223149
+ join9(this.rootDir, this.subprojectPath, workspacePath, "project"),
223064
223150
  /[^/]*\.scala$/
223065
223151
  )
223066
223152
  ];
@@ -223078,7 +223164,7 @@ var SbtFixingManager = class {
223078
223164
  }
223079
223165
  }
223080
223166
  }
223081
- const workspaceBuildSbtPath = join8(this.rootDir, this.subprojectPath, workspacePath, "build.sbt");
223167
+ const workspaceBuildSbtPath = join9(this.rootDir, this.subprojectPath, workspacePath, "build.sbt");
223082
223168
  const moduleIdStr = getModuleIdString(
223083
223169
  groupId,
223084
223170
  artifactId,
@@ -223087,7 +223173,7 @@ var SbtFixingManager = class {
223087
223173
  void 0,
223088
223174
  scalaBinaryVersion
223089
223175
  );
223090
- if (!existsSync8(workspaceBuildSbtPath)) {
223176
+ if (!existsSync9(workspaceBuildSbtPath)) {
223091
223177
  return {
223092
223178
  file: workspaceBuildSbtPath,
223093
223179
  offset: 0,
@@ -223150,7 +223236,7 @@ var SbtFixingManager = class {
223150
223236
  * Creates bundled dependency overrides patch for multiple dependencies
223151
223237
  */
223152
223238
  async createBundledDependencyOverridesForWorkspace(workspacePath, fixes, scalaBinaryVersion) {
223153
- const workspaceBuildSbtPath = join8(this.rootDir, this.subprojectPath, workspacePath, "build.sbt");
223239
+ const workspaceBuildSbtPath = join9(this.rootDir, this.subprojectPath, workspacePath, "build.sbt");
223154
223240
  const moduleIdStrings = fixes.map(({ fix, dependencyDetails }) => {
223155
223241
  const [groupId, artifactId] = fix.dependencyName.split(":");
223156
223242
  return getModuleIdString(
@@ -223162,7 +223248,7 @@ var SbtFixingManager = class {
223162
223248
  scalaBinaryVersion
223163
223249
  );
223164
223250
  });
223165
- if (!existsSync8(workspaceBuildSbtPath)) {
223251
+ if (!existsSync9(workspaceBuildSbtPath)) {
223166
223252
  const indentationSize = 2;
223167
223253
  const overrideText = `dependencyOverrides ++= Seq(
223168
223254
  ${indent(1, indentationSize)}${moduleIdStrings.join(`,
@@ -223192,10 +223278,10 @@ ${indent(1, indentationSize)}`)}
223192
223278
  };
223193
223279
 
223194
223280
  // ../fixing-management/src/fixing-management/npm/npm-socket-upgrade-manager.ts
223195
- import { existsSync as existsSync13 } from "fs";
223281
+ import { existsSync as existsSync15 } from "fs";
223196
223282
  import { readFile as readFile18 } from "fs/promises";
223197
223283
  import assert10 from "node:assert";
223198
- import { dirname as dirname14, join as join12, relative as relative9, resolve as resolve24 } from "path";
223284
+ import { dirname as dirname14, join as join14, relative as relative9, resolve as resolve24 } from "path";
223199
223285
 
223200
223286
  // ../utils/src/npm-utils.ts
223201
223287
  import { access as access2, constants as constants2 } from "fs/promises";
@@ -228695,7 +228781,7 @@ glob.glob = glob;
228695
228781
 
228696
228782
  // ../utils/src/npm-utils.ts
228697
228783
  var import_lodash5 = __toESM(require_lodash(), 1);
228698
- import { join as join10, resolve as resolve18 } from "path";
228784
+ import { join as join12, resolve as resolve18 } from "path";
228699
228785
 
228700
228786
  // ../utils/dist/command-utils.js
228701
228787
  import assert9 from "assert";
@@ -228892,6 +228978,74 @@ var TelemetryCollector2 = class {
228892
228978
  }
228893
228979
  };
228894
228980
 
228981
+ // ../utils/dist/telemetry/analyzer-telemetry-server.js
228982
+ import { existsSync as existsSync10, readFileSync as readFileSync3, watchFile as watchFile2, unwatchFile as unwatchFile2 } from "fs";
228983
+ import { unlink as unlink2, writeFile as writeFile5 } from "fs/promises";
228984
+ import { tmpdir as tmpdir3 } from "os";
228985
+ import { join as join10 } from "path";
228986
+ import { randomBytes as randomBytes3 } from "crypto";
228987
+ var AnalyzerTelemetryServer2 = class {
228988
+ handler;
228989
+ filePath;
228990
+ lastReadPosition = 0;
228991
+ watching = false;
228992
+ constructor(handler) {
228993
+ this.handler = handler;
228994
+ const fileId = randomBytes3(8).toString("hex");
228995
+ this.filePath = join10(tmpdir3(), `analyzer-telemetry-${fileId}.jsonl`);
228996
+ }
228997
+ /**
228998
+ * Starts the server and returns the file path that analyzers should write to.
228999
+ */
229000
+ async start() {
229001
+ await writeFile5(this.filePath, "");
229002
+ this.watching = true;
229003
+ watchFile2(this.filePath, { interval: 1e3 }, () => {
229004
+ this.processNewEvents();
229005
+ });
229006
+ return this.filePath;
229007
+ }
229008
+ processNewEvents() {
229009
+ if (!existsSync10(this.filePath))
229010
+ return;
229011
+ try {
229012
+ const content = readFileSync3(this.filePath, "utf-8");
229013
+ const newContent = content.substring(this.lastReadPosition);
229014
+ this.lastReadPosition = content.length;
229015
+ if (!newContent)
229016
+ return;
229017
+ const lines = newContent.split("\n");
229018
+ for (const line of lines) {
229019
+ if (line.trim()) {
229020
+ try {
229021
+ const event = JSON.parse(line);
229022
+ this.handler.onAnalyzerEvent(event);
229023
+ } catch {
229024
+ }
229025
+ }
229026
+ }
229027
+ } catch {
229028
+ }
229029
+ }
229030
+ /**
229031
+ * Closes the server and cleans up the file.
229032
+ */
229033
+ async close() {
229034
+ this.processNewEvents();
229035
+ this.handler.close?.();
229036
+ if (this.watching) {
229037
+ unwatchFile2(this.filePath);
229038
+ this.watching = false;
229039
+ }
229040
+ if (existsSync10(this.filePath)) {
229041
+ try {
229042
+ await unlink2(this.filePath);
229043
+ } catch {
229044
+ }
229045
+ }
229046
+ }
229047
+ };
229048
+
228895
229049
  // ../utils/dist/command-utils.js
228896
229050
  var DEFAULT_TIMEOUT_MS2 = 30 * 60 * 1e3;
228897
229051
  async function execAndLogOnFailure3(cmd, dir, options, logLevel = "info") {
@@ -228945,13 +229099,20 @@ function startTelemetry2(pid, handler) {
228945
229099
  async function execNeverFail3(cmd, dir, options) {
228946
229100
  const stopHeartbeat = options?.heartbeat ? startHeartbeat2(options.heartbeat) : void 0;
228947
229101
  let stopTelemetry;
229102
+ let analyzerTelemetryServer;
229103
+ let analyzerTelemetryFilePath;
229104
+ if (options?.analyzerTelemetryHandler) {
229105
+ analyzerTelemetryServer = new AnalyzerTelemetryServer2(options.analyzerTelemetryHandler);
229106
+ analyzerTelemetryFilePath = await analyzerTelemetryServer.start();
229107
+ }
228948
229108
  try {
228949
229109
  return await new Promise((resolve45) => {
228950
229110
  let args2;
228951
229111
  if (typeof cmd !== "string")
228952
229112
  [cmd, ...args2] = cmd;
228953
229113
  const timeout = options?.timeout ?? DEFAULT_TIMEOUT_MS2;
228954
- const childProcess = execFile4(cmd, args2, { ...options, cwd: dir, maxBuffer: 1024 * 1024 * 1024, shell: args2 === void 0, timeout }, (error, stdout, stderr) => {
229114
+ const env = analyzerTelemetryFilePath ? { ...options?.env ?? process.env, ANALYZER_TELEMETRY_FILE_PATH: analyzerTelemetryFilePath } : options?.env;
229115
+ const childProcess = execFile4(cmd, args2, { ...options, env, cwd: dir, maxBuffer: 1024 * 1024 * 1024, shell: args2 === void 0, timeout }, (error, stdout, stderr) => {
228955
229116
  resolve45({ error, stdout, stderr });
228956
229117
  });
228957
229118
  if (options?.telemetryHandler && childProcess.pid) {
@@ -228976,6 +229137,7 @@ async function execNeverFail3(cmd, dir, options) {
228976
229137
  } finally {
228977
229138
  stopHeartbeat?.();
228978
229139
  stopTelemetry?.();
229140
+ await analyzerTelemetryServer?.close();
228979
229141
  }
228980
229142
  }
228981
229143
  async function runCommandResolveStdOut3(cmd, dir, options) {
@@ -229061,8 +229223,8 @@ async function runCommandResolveStdOut4(cmd, dir, options) {
229061
229223
  }
229062
229224
 
229063
229225
  // ../utils/dist/package-utils.js
229064
- import { parse as parse5, join as join9, resolve as resolve17, normalize as normalize2, dirname as dirname12, basename as basename5, relative as relative4 } from "path";
229065
- import { existsSync as existsSync9, readFileSync as readFileSync2, readdirSync as readdirSync2, statSync as statSync3, writeFileSync } from "fs";
229226
+ import { parse as parse5, join as join11, resolve as resolve17, normalize as normalize2, dirname as dirname12, basename as basename5, relative as relative4 } from "path";
229227
+ import { existsSync as existsSync11, readFileSync as readFileSync4, readdirSync as readdirSync2, statSync as statSync3, writeFileSync } from "fs";
229066
229228
  function getPackageJsonObject(workspaceRoot) {
229067
229229
  const packageJSONContent = getPackageJsonContent(workspaceRoot);
229068
229230
  if (!packageJSONContent)
@@ -229071,17 +229233,17 @@ function getPackageJsonObject(workspaceRoot) {
229071
229233
  }
229072
229234
  function getPackageJsonContent(workspaceRoot) {
229073
229235
  const packageJsonPath = getPackageJSONPath(workspaceRoot);
229074
- if (existsSync9(packageJsonPath))
229075
- return readFileSync2(packageJsonPath, "utf8");
229236
+ if (existsSync11(packageJsonPath))
229237
+ return readFileSync4(packageJsonPath, "utf8");
229076
229238
  return void 0;
229077
229239
  }
229078
229240
  function getPackageJSONPath(workspaceRoot) {
229079
- return join9(workspaceRoot, "package.json");
229241
+ return join11(workspaceRoot, "package.json");
229080
229242
  }
229081
229243
 
229082
229244
  // ../utils/src/npm-utils.ts
229083
229245
  var import_lockfile_file = __toESM(require_lib25(), 1);
229084
- import { existsSync as existsSync10 } from "fs";
229246
+ import { existsSync as existsSync12 } from "fs";
229085
229247
  var { once } = import_lodash5.default;
229086
229248
  async function getPackageVersionDetailsFromNpm(packageName, version4) {
229087
229249
  const npmViewCmd = cmdt2`npm view ${packageName}@${version4} -json`;
@@ -229134,7 +229296,7 @@ async function getWorkspacePathsFromPackageJSON(projectFolder, useDotWhenNoWorks
229134
229296
  const workspacePaths = (await glob("packages" in workspaces ? workspaces.packages : workspaces, {
229135
229297
  ignore: ["node_modules/**", "**/node_modules/**"],
229136
229298
  cwd: projectFolder
229137
- })).filter((fileOrDir) => existsSync10(resolve18(projectFolder, fileOrDir, "package.json")));
229299
+ })).filter((fileOrDir) => existsSync12(resolve18(projectFolder, fileOrDir, "package.json")));
229138
229300
  return { validProject: true, workspacePaths };
229139
229301
  }
229140
229302
  async function getWorktreePathsFromPackageJSON(projectFolder, useDotWhenNoWorkspaces = false) {
@@ -229154,8 +229316,8 @@ async function getWorktreePathsFromPackageJSON(projectFolder, useDotWhenNoWorksp
229154
229316
  continue;
229155
229317
  }
229156
229318
  for (const relWsPath of result.workspacePaths) {
229157
- workspacePaths.add(join10(currentFolder, relWsPath));
229158
- worklist.push(join10(currentFolder, relWsPath));
229319
+ workspacePaths.add(join12(currentFolder, relWsPath));
229320
+ worklist.push(join12(currentFolder, relWsPath));
229159
229321
  }
229160
229322
  }
229161
229323
  return Array.from(workspacePaths);
@@ -229197,7 +229359,7 @@ async function getYarnType(projectDir) {
229197
229359
  }
229198
229360
 
229199
229361
  // ../fixing-management/src/fixing-management/npm/npm-fixing-manager.ts
229200
- import { readFile as readFile15, writeFile as writeFile4 } from "fs/promises";
229362
+ import { readFile as readFile15, writeFile as writeFile6 } from "fs/promises";
229201
229363
  import { relative as relative5, resolve as resolve20 } from "path";
229202
229364
 
229203
229365
  // ../fixing-management/src/fixing-management/npm/npm-ecosystem-fixing-manager.ts
@@ -229276,7 +229438,7 @@ var NpmFixingManager = class extends NpmEcosystemFixingManager {
229276
229438
  const packageDetails = await getPackageVersionDetailsFromNpm(packageName, fix.fixedVersion);
229277
229439
  Object.assign(pkgObj, { version: fix.fixedVersion }, packageDetails);
229278
229440
  });
229279
- await writeFile4(pkgLockLocation, JSON.stringify(packageLock, null, 2));
229441
+ await writeFile6(pkgLockLocation, JSON.stringify(packageLock, null, 2));
229280
229442
  } catch (e) {
229281
229443
  throw new Error(`Failed to update package-lock.json ${e.stack}`);
229282
229444
  }
@@ -229297,11 +229459,11 @@ var NpmFixingManager = class extends NpmEcosystemFixingManager {
229297
229459
  };
229298
229460
 
229299
229461
  // ../fixing-management/src/fixing-management/npm/pnpm-fixing-manager.ts
229300
- import { readFile as readFile16, writeFile as writeFile5 } from "fs/promises";
229462
+ import { readFile as readFile16, writeFile as writeFile7 } from "fs/promises";
229301
229463
  import { relative as relative6, resolve as resolve21 } from "path";
229302
229464
  var import_yaml = __toESM(require_dist10(), 1);
229303
229465
  var import_lockfile_file2 = __toESM(require_lib25(), 1);
229304
- import { existsSync as existsSync11 } from "fs";
229466
+ import { existsSync as existsSync13 } from "fs";
229305
229467
  var PnpmFixingManager = class extends NpmEcosystemFixingManager {
229306
229468
  pnpmMajorVersion;
229307
229469
  async getPnpmMajorVersion() {
@@ -229414,7 +229576,7 @@ var PnpmFixingManager = class extends NpmEcosystemFixingManager {
229414
229576
  ])
229415
229577
  );
229416
229578
  const pnpmWorkspaceYamlFile = resolve21(this.rootDir, this.subprojectPath, "pnpm-workspace.yaml");
229417
- if (!existsSync11(pnpmWorkspaceYamlFile)) {
229579
+ if (!existsSync13(pnpmWorkspaceYamlFile)) {
229418
229580
  throw new Error(
229419
229581
  `pnpm-workspace.yaml could not be found in ${pnpmWorkspaceYamlFile}. The lockfile indicates that pnpm catalogs are used and they must be updated, which is not possible without a pnpm-workspace.yaml file`
229420
229582
  );
@@ -229456,7 +229618,7 @@ async function readYamlFile(workspaceYamlFile) {
229456
229618
  }
229457
229619
  async function writeYamlFile(yamlAST, workspaceYamlFile) {
229458
229620
  const workspaceYamlString = import_yaml.CST.stringify(yamlAST);
229459
- await writeFile5(workspaceYamlFile, workspaceYamlString);
229621
+ await writeFile7(workspaceYamlFile, workspaceYamlString);
229460
229622
  }
229461
229623
  function fixCatalogVersions(yamlAST, fixes) {
229462
229624
  import_yaml.CST.visit(yamlAST, (item, _path2) => {
@@ -229486,12 +229648,12 @@ function updateCatalog(update3, map2) {
229486
229648
  }
229487
229649
 
229488
229650
  // ../fixing-management/src/fixing-management/npm/yarn-fixing-manager.ts
229489
- import { readFile as readFile17, writeFile as writeFile6 } from "fs/promises";
229651
+ import { readFile as readFile17, writeFile as writeFile8 } from "fs/promises";
229490
229652
  import { relative as relative8, resolve as resolve23 } from "path";
229491
229653
 
229492
229654
  // ../utils/src/package-utils.ts
229493
- import { parse as parse7, join as join11, resolve as resolve22, normalize as normalize3, dirname as dirname13, basename as basename6, relative as relative7 } from "path";
229494
- import { existsSync as existsSync12, readFileSync as readFileSync3, readdirSync as readdirSync3, statSync as statSync4, writeFileSync as writeFileSync2 } from "fs";
229655
+ import { parse as parse7, join as join13, resolve as resolve22, normalize as normalize3, dirname as dirname13, basename as basename6, relative as relative7 } from "path";
229656
+ import { existsSync as existsSync14, readFileSync as readFileSync5, readdirSync as readdirSync3, statSync as statSync4, writeFileSync as writeFileSync2 } from "fs";
229495
229657
  function setFieldInPackageJson(workspaceRoot, field, value2) {
229496
229658
  const packageJSONContentObj = getPackageJsonObject2(workspaceRoot);
229497
229659
  if (!packageJSONContentObj) return void 0;
@@ -229508,11 +229670,11 @@ function writePackageJsonContent(workspaceRoot, packageJsonContent) {
229508
229670
  }
229509
229671
  function getPackageJsonContent2(workspaceRoot) {
229510
229672
  const packageJsonPath = getPackageJSONPath2(workspaceRoot);
229511
- if (existsSync12(packageJsonPath)) return readFileSync3(packageJsonPath, "utf8");
229673
+ if (existsSync14(packageJsonPath)) return readFileSync5(packageJsonPath, "utf8");
229512
229674
  return void 0;
229513
229675
  }
229514
229676
  function getPackageJSONPath2(workspaceRoot) {
229515
- return join11(workspaceRoot, "package.json");
229677
+ return join13(workspaceRoot, "package.json");
229516
229678
  }
229517
229679
 
229518
229680
  // ../fixing-management/src/fixing-management/npm/yarn-fixing-manager.ts
@@ -229601,7 +229763,7 @@ var YarnFixingManager = class extends NpmEcosystemFixingManager {
229601
229763
  async writeYarnObj(yarnObj, filepath) {
229602
229764
  const yarnType = await this.getYarnType();
229603
229765
  const fileString = yarnType === "classic" ? (0, import_yarnlock_parse_raw.stringifyYarnLockRawV1)(yarnObj) : (0, import_yarnlock_parse_raw.stringifyYarnLockRawV2)(yarnObj);
229604
- await writeFile6(filepath, fileString);
229766
+ await writeFile8(filepath, fileString);
229605
229767
  }
229606
229768
  async updateDependencyLists(yarnLockLocation, fixes) {
229607
229769
  const yarnLock = await this.getYarnLockObj(yarnLockLocation);
@@ -229772,7 +229934,7 @@ var NpmSocketUpgradeManager = class {
229772
229934
  workspaces = result.workspacePaths;
229773
229935
  }
229774
229936
  for (const workspace of workspaces) {
229775
- workspaceToSubproject.set(join12(subprojectDir, workspace), subprojectDir);
229937
+ workspaceToSubproject.set(join14(subprojectDir, workspace), subprojectDir);
229776
229938
  }
229777
229939
  }
229778
229940
  for (const upgrade of upgrades) {
@@ -229809,7 +229971,7 @@ var NpmSocketUpgradeManager = class {
229809
229971
  for (const [workspacePath, upgrades] of workspaceToFixes.entries()) {
229810
229972
  const upgradesToDirectDependencies = upgrades.filter((upgrade) => artifacts[upgrade.idx].direct);
229811
229973
  if (upgradesToDirectDependencies.length === 0) continue;
229812
- const packageJsonPath = join12(subprojectDir, workspacePath, "package.json");
229974
+ const packageJsonPath = join14(subprojectDir, workspacePath, "package.json");
229813
229975
  const directPatches = [];
229814
229976
  for (const upgrade of upgradesToDirectDependencies) {
229815
229977
  const artifact = artifacts[upgrade.idx];
@@ -229829,7 +229991,7 @@ var NpmSocketUpgradeManager = class {
229829
229991
  }
229830
229992
  }
229831
229993
  const lockfileName = this.getLockfileName(subprojectDir);
229832
- const lockfilePath = join12(subprojectDir, lockfileName);
229994
+ const lockfilePath = join14(subprojectDir, lockfileName);
229833
229995
  const allUpgrades = Array.from(workspaceToFixes.values()).flat();
229834
229996
  const upgradesTransformed = allUpgrades.map((upgrade) => ({
229835
229997
  dependencyName: getNameFromNamespaceAndName(
@@ -229886,11 +230048,11 @@ var NpmSocketUpgradeManager = class {
229886
230048
  }
229887
230049
  getPackageManagerForDirectory(directory) {
229888
230050
  const fullPath = resolve24(this.rootDir, directory);
229889
- if (existsSync13(join12(fullPath, "pnpm-lock.yaml")) || existsSync13(join12(fullPath, "pnpm-lock.yml"))) {
230051
+ if (existsSync15(join14(fullPath, "pnpm-lock.yaml")) || existsSync15(join14(fullPath, "pnpm-lock.yml"))) {
229890
230052
  return "PNPM";
229891
- } else if (existsSync13(join12(fullPath, "yarn.lock"))) {
230053
+ } else if (existsSync15(join14(fullPath, "yarn.lock"))) {
229892
230054
  return "YARN";
229893
- } else if (existsSync13(join12(fullPath, "package-lock.json"))) {
230055
+ } else if (existsSync15(join14(fullPath, "package-lock.json"))) {
229894
230056
  return "NPM";
229895
230057
  }
229896
230058
  throw new Error(
@@ -229899,10 +230061,10 @@ var NpmSocketUpgradeManager = class {
229899
230061
  }
229900
230062
  getLockfileName(directory) {
229901
230063
  const fullPath = resolve24(this.rootDir, directory);
229902
- if (existsSync13(join12(fullPath, "pnpm-lock.yaml"))) return "pnpm-lock.yaml";
229903
- if (existsSync13(join12(fullPath, "pnpm-lock.yml"))) return "pnpm-lock.yml";
229904
- if (existsSync13(join12(fullPath, "yarn.lock"))) return "yarn.lock";
229905
- if (existsSync13(join12(fullPath, "package-lock.json"))) return "package-lock.json";
230064
+ if (existsSync15(join14(fullPath, "pnpm-lock.yaml"))) return "pnpm-lock.yaml";
230065
+ if (existsSync15(join14(fullPath, "pnpm-lock.yml"))) return "pnpm-lock.yml";
230066
+ if (existsSync15(join14(fullPath, "yarn.lock"))) return "yarn.lock";
230067
+ if (existsSync15(join14(fullPath, "package-lock.json"))) return "package-lock.json";
229906
230068
  throw new Error(`No lockfile found in ${fullPath}`);
229907
230069
  }
229908
230070
  async createDirectDependencyPatches(mf, idx, upgradeVersion, ctxt) {
@@ -229975,8 +230137,8 @@ var RushFixingManager = class {
229975
230137
  };
229976
230138
 
229977
230139
  // ../fixing-management/src/fixing-management/nuget/nuget-fixing-manager.ts
229978
- import { readFile as readFile19, writeFile as writeFile7 } from "fs/promises";
229979
- import { join as join13 } from "path";
230140
+ import { readFile as readFile19, writeFile as writeFile9 } from "fs/promises";
230141
+ import { join as join15 } from "path";
229980
230142
 
229981
230143
  // ../utils/src/nuget-utils.ts
229982
230144
  var Cache = class _Cache {
@@ -230077,7 +230239,7 @@ var NugetFixingManager = class {
230077
230239
  const projectFiles = fixingInfo.projectFiles[wsPath];
230078
230240
  if (projectFiles.length !== 1)
230079
230241
  throw new Error("Applying fixes to workspaces with more than 1 project file currently not supported");
230080
- const projectFilePath = join13(this.getAbsWsPath(wsPath), projectFiles[0]);
230242
+ const projectFilePath = join15(this.getAbsWsPath(wsPath), projectFiles[0]);
230081
230243
  const initialProjectFile = await readFile19(projectFilePath, "utf-8");
230082
230244
  const initialLockFile = await this.restoreWorkspaceAndParseLockFile(wsPath);
230083
230245
  await applySeries(fixesWithId, async ({ fixId, vulnerabilityFixes }) => {
@@ -230086,8 +230248,8 @@ var NugetFixingManager = class {
230086
230248
  });
230087
230249
  const finalProjectFile = await readFile19(projectFilePath, "utf-8");
230088
230250
  const finalLockFile = JSON.parse(await readFile19(this.getLockFilePath(wsPath), "utf-8"));
230089
- await writeFile7(projectFilePath, initialProjectFile);
230090
- await writeFile7(this.getLockFilePath(wsPath), JSON.stringify(initialLockFile, null, 2));
230251
+ await writeFile9(projectFilePath, initialProjectFile);
230252
+ await writeFile9(this.getLockFilePath(wsPath), JSON.stringify(initialLockFile, null, 2));
230091
230253
  return { projectFile: finalProjectFile, lockFile: finalLockFile };
230092
230254
  }
230093
230255
  );
@@ -230097,21 +230259,21 @@ var NugetFixingManager = class {
230097
230259
  const projectFiles = fixingInfo.projectFiles[wsPath];
230098
230260
  if (projectFiles.length !== 1)
230099
230261
  throw new Error("Applying fixes to workspaces with more than 1 project file currently not supported");
230100
- await writeFile7(join13(this.getAbsWsPath(wsPath), projectFiles[0]), finalProjectFile);
230101
- await writeFile7(this.getLockFilePath(wsPath), JSON.stringify(finalLockFile, null, 2));
230262
+ await writeFile9(join15(this.getAbsWsPath(wsPath), projectFiles[0]), finalProjectFile);
230263
+ await writeFile9(this.getLockFilePath(wsPath), JSON.stringify(finalLockFile, null, 2));
230102
230264
  });
230103
230265
  if (solutionFiles) {
230104
230266
  for (const solutionFile of solutionFiles) {
230105
230267
  const succeeded = await execAndLogOnFailure2(
230106
230268
  cmdt`dotnet restore ${solutionFile} --use-lock-file`,
230107
- join13(this.rootDir, this.subprojectPath)
230269
+ join15(this.rootDir, this.subprojectPath)
230108
230270
  );
230109
230271
  if (!succeeded) throw new Error(`Error applying fix - could not restore project ${this.subprojectPath}`);
230110
230272
  }
230111
230273
  } else {
230112
230274
  const succeeded = await execAndLogOnFailure2(
230113
230275
  "dotnet restore --use-lock-file",
230114
- join13(this.rootDir, this.subprojectPath)
230276
+ join15(this.rootDir, this.subprojectPath)
230115
230277
  );
230116
230278
  if (!succeeded) throw new Error(`Error applying fix - could not restore project ${this.subprojectPath}`);
230117
230279
  }
@@ -230164,7 +230326,7 @@ var NugetFixingManager = class {
230164
230326
  details.requested = requestedRange;
230165
230327
  });
230166
230328
  });
230167
- await writeFile7(projectFilePath, initialProjectFile);
230329
+ await writeFile9(projectFilePath, initialProjectFile);
230168
230330
  await applySeries(vulnFixes, async ({ dependencyIdentifier, dependencyName }) => {
230169
230331
  await applySeries(
230170
230332
  dependencyTree.transitiveDependencies[dependencyIdentifier].frameworks?.filter(
@@ -230190,10 +230352,10 @@ var NugetFixingManager = class {
230190
230352
  }
230191
230353
  );
230192
230354
  });
230193
- await writeFile7(this.getLockFilePath(wsPath), JSON.stringify(lockFileWithFixes, null, 2));
230355
+ await writeFile9(this.getLockFilePath(wsPath), JSON.stringify(lockFileWithFixes, null, 2));
230194
230356
  }
230195
230357
  async addPackage(packageName, version4, framework, wsPath) {
230196
- const dir = join13(this.rootDir, this.subprojectPath, wsPath);
230358
+ const dir = join15(this.rootDir, this.subprojectPath, wsPath);
230197
230359
  const succeeded = await execAndLogOnFailure2(
230198
230360
  cmdt`dotnet add package ${packageName} --version ${version4} --no-restore --framework ${framework}`,
230199
230361
  dir
@@ -230210,10 +230372,10 @@ var NugetFixingManager = class {
230210
230372
  return JSON.parse(await readFile19(this.getLockFilePath(wsPath), "utf-8"));
230211
230373
  }
230212
230374
  getLockFilePath(wsPath, lockFileName = "packages.lock.json") {
230213
- return join13(this.getAbsWsPath(wsPath), lockFileName);
230375
+ return join15(this.getAbsWsPath(wsPath), lockFileName);
230214
230376
  }
230215
230377
  getAbsWsPath(wsPath) {
230216
- return join13(this.rootDir, this.subprojectPath, wsPath);
230378
+ return join15(this.rootDir, this.subprojectPath, wsPath);
230217
230379
  }
230218
230380
  };
230219
230381
 
@@ -230246,7 +230408,7 @@ var RubyGemsFixingManager = class {
230246
230408
  };
230247
230409
 
230248
230410
  // ../fixing-management/src/fixing-management/rust/cargo-fixing-manager.ts
230249
- import { join as join14 } from "node:path";
230411
+ import { join as join16 } from "node:path";
230250
230412
  var CargoFixingManager = class {
230251
230413
  constructor(rootDir, subprojectPath) {
230252
230414
  this.rootDir = rootDir;
@@ -230262,7 +230424,7 @@ var CargoFixingManager = class {
230262
230424
  });
230263
230425
  }
230264
230426
  async applySecurityFixesForWorkspace(workspacePath, fixes, dependencyTree) {
230265
- const subprojectPath = join14(this.rootDir, this.subprojectPath, workspacePath);
230427
+ const subprojectPath = join16(this.rootDir, this.subprojectPath, workspacePath);
230266
230428
  for (const fix of fixes) {
230267
230429
  const depTreeNode = dependencyTree.transitiveDependencies[fix.dependencyIdentifier];
230268
230430
  if (!depTreeNode || depTreeNode.version !== fix.currentVersion) throw Error("Error applying fix!");
@@ -230286,8 +230448,8 @@ import { dirname as dirname16, resolve as resolve26 } from "node:path";
230286
230448
  // ../utils/src/nuget-project-utils.ts
230287
230449
  var import_parse_xml3 = __toESM(require_dist(), 1);
230288
230450
  import { readFile as readFile20 } from "node:fs/promises";
230289
- import { dirname as dirname15, join as join15, relative as relative10, resolve as resolve25, basename as basename8, extname } from "node:path";
230290
- import { existsSync as existsSync14 } from "node:fs";
230451
+ import { dirname as dirname15, join as join17, relative as relative10, resolve as resolve25, basename as basename8, extname } from "node:path";
230452
+ import { existsSync as existsSync16 } from "node:fs";
230291
230453
 
230292
230454
  // ../utils/dist/version-comparison/version-satisfies.js
230293
230455
  var import_semver3 = __toESM(require_semver2(), 1);
@@ -231721,7 +231883,7 @@ var ecosystemMap2 = {
231721
231883
  RUST: {
231722
231884
  sortVersions: import_semver3.sort,
231723
231885
  isPrerelease: semverIsPrerelease2,
231724
- versionSatisfiesSpecifier: (version4, specifier) => semverSatisfiesSpecifier2(version4, /^\d/.test(specifier) ? `^${specifier}` : specifier),
231886
+ versionSatisfiesSpecifier: (version4, specifier) => specifier.split(",").map((part) => part.trim()).every((trimmed) => semverSatisfiesSpecifier2(version4, /^\d/.test(trimmed) ? `^${trimmed}` : trimmed)),
231725
231887
  versionSatisfiesRelation: import_semver3.cmp,
231726
231888
  versionSatisfiesRange: semverSatisfiesRange2
231727
231889
  },
@@ -231840,7 +232002,7 @@ async function loadNuGetProject(rootDir, projectFile, validateFile) {
231840
232002
  }
231841
232003
  async function loadNuGetProjectOrTarget(rootDir, projectFile, mainProject, visited, validateFile) {
231842
232004
  const validatedProjectPath = validateFile ? validateFile(resolve25(rootDir, projectFile)) : resolve25(rootDir, projectFile);
231843
- if (!validatedProjectPath || !existsSync14(validatedProjectPath)) return void 0;
232005
+ if (!validatedProjectPath || !existsSync16(validatedProjectPath)) return void 0;
231844
232006
  if (visited.has(validatedProjectPath)) return void 0;
231845
232007
  visited.set(validatedProjectPath);
231846
232008
  const sourceText = await readFile20(validatedProjectPath, "utf-8");
@@ -231915,7 +232077,7 @@ async function loadNuGetProjectOrTarget(rootDir, projectFile, mainProject, visit
231915
232077
  }
231916
232078
  async function loadPackagesConfig(rootDir, file, validateFile) {
231917
232079
  const validatedConfigPath = validateFile(resolve25(rootDir, file));
231918
- if (!validatedConfigPath || !existsSync14(validatedConfigPath)) return void 0;
232080
+ if (!validatedConfigPath || !existsSync16(validatedConfigPath)) return void 0;
231919
232081
  const sourceText = await readFile20(validatedConfigPath, "utf-8");
231920
232082
  const configXml = (0, import_parse_xml3.parseXml)(sourceText, { includeOffsets: true });
231921
232083
  const packages = extractPackagesFromXml(configXml, sourceText);
@@ -231957,7 +232119,7 @@ async function findDirectoryBuildPropsProjects(currentProject, mainProject, visi
231957
232119
  const projectsReverse = [];
231958
232120
  let currentDir = dirname15(currentProject.validatedProjectPath);
231959
232121
  while (currentDir.startsWith(currentProject.rootDir) && currentDir !== dirname15(currentDir)) {
231960
- const unvalidatedPath = join15(currentDir, "Directory.Build.props");
232122
+ const unvalidatedPath = join17(currentDir, "Directory.Build.props");
231961
232123
  const validatedPath = validateFile ? validateFile(unvalidatedPath) : unvalidatedPath;
231962
232124
  if (validatedPath && validatedPath !== currentProject.validatedProjectPath) {
231963
232125
  const directoryBuildPropsProject = await loadNuGetProjectOrTarget(
@@ -231978,7 +232140,7 @@ async function findDirectoryBuildPropsProjects(currentProject, mainProject, visi
231978
232140
  async function findDirectoryPackagesPropsProject(currentProject, mainProject, visited, validateFile) {
231979
232141
  let currentDir = dirname15(currentProject.validatedProjectPath);
231980
232142
  while (currentDir.startsWith(currentProject.rootDir) && currentDir !== dirname15(currentDir)) {
231981
- const unvalidatedPath = join15(currentDir, "Directory.Packages.props");
232143
+ const unvalidatedPath = join17(currentDir, "Directory.Packages.props");
231982
232144
  const validatedPath = validateFile ? validateFile(unvalidatedPath) : unvalidatedPath;
231983
232145
  if (validatedPath) {
231984
232146
  return validatedPath !== currentProject.validatedProjectPath ? await loadNuGetProjectOrTarget(currentProject.rootDir, unvalidatedPath, mainProject, visited, validateFile) : void 0;
@@ -232045,7 +232207,7 @@ async function handleImportElement(currentProject, importElement, mainProject, v
232045
232207
  if (!importPath) return;
232046
232208
  const resolvedPath = resolve25(dirname15(currentProject.validatedProjectPath), normalizeMSBuildPath(importPath));
232047
232209
  const validatedPath = validateFile ? validateFile(resolvedPath) : resolvedPath;
232048
- if (!validatedPath || !existsSync14(validatedPath)) return;
232210
+ if (!validatedPath || !existsSync16(validatedPath)) return;
232049
232211
  const importedProject = await loadNuGetProjectOrTarget(
232050
232212
  currentProject.rootDir,
232051
232213
  resolvedPath,
@@ -232737,7 +232899,7 @@ import { dirname as dirname18, relative as relative11, resolve as resolve28 } fr
232737
232899
  var import_picomatch6 = __toESM(require_picomatch2(), 1);
232738
232900
  var import_semver4 = __toESM(require_semver2(), 1);
232739
232901
  import assert12 from "node:assert";
232740
- import { readFile as readFile22, writeFile as writeFile8 } from "node:fs/promises";
232902
+ import { readFile as readFile22, writeFile as writeFile10 } from "node:fs/promises";
232741
232903
 
232742
232904
  // ../utils/src/cargo-utils.ts
232743
232905
  import { readFile as readFile21 } from "node:fs/promises";
@@ -232882,7 +233044,7 @@ var CargoSocketUpgradeManager = class {
232882
233044
  await this.refreshLockfiles(lockfileToArtifacts, ctxt, "LOCKFILE_ONLY");
232883
233045
  await asyncForEach(Array.from(restoreMap), async ([path9, { content, artifacts }]) => {
232884
233046
  try {
232885
- await writeFile8(path9, content);
233047
+ await writeFile10(path9, content);
232886
233048
  ctxt.statusUpdater?.({
232887
233049
  status: "success",
232888
233050
  file: relative11(this.rootDir, path9),
@@ -233092,7 +233254,7 @@ ${newDependencyLine}`
233092
233254
 
233093
233255
  // ../fixing-management/src/fixing-management/pip/pip-socket-upgrade-manager.ts
233094
233256
  var import_picomatch8 = __toESM(require_picomatch2(), 1);
233095
- import { dirname as dirname20, join as join17, resolve as resolve31 } from "node:path";
233257
+ import { dirname as dirname20, join as join19, resolve as resolve31 } from "node:path";
233096
233258
  import assert13 from "node:assert";
233097
233259
  import { readFile as readFile25 } from "node:fs/promises";
233098
233260
  var import_pip_requirements_js = __toESM(require_dist11(), 1);
@@ -233348,7 +233510,7 @@ function createPep508VersionPatches(file, idx, requirement, oldVersion, upgradeV
233348
233510
  }
233349
233511
 
233350
233512
  // ../utils/src/pip-utils.ts
233351
- import { existsSync as existsSync15 } from "node:fs";
233513
+ import { existsSync as existsSync17 } from "node:fs";
233352
233514
  import { readFile as readFile24 } from "node:fs/promises";
233353
233515
  import { dirname as dirname19, resolve as resolve30 } from "node:path";
233354
233516
  import util4 from "node:util";
@@ -233359,7 +233521,7 @@ var import_semver5 = __toESM(require_semver2(), 1);
233359
233521
  import { execFileSync } from "child_process";
233360
233522
  import { constants as constants3 } from "fs";
233361
233523
  import { access as access3, readFile as readFile23 } from "fs/promises";
233362
- import { join as join16, resolve as resolve29 } from "path";
233524
+ import { join as join18, resolve as resolve29 } from "path";
233363
233525
  import util3 from "util";
233364
233526
  var { once: once2 } = import_lodash6.default;
233365
233527
  var systemPython = once2(() => {
@@ -234124,9 +234286,9 @@ async function buildDependencyTreesFromUvLock(rootDir, uvLockFile) {
234124
234286
  const virtualPath = source.virtual;
234125
234287
  const editablePath = source.editable;
234126
234288
  if (virtualPath instanceof TOMLScalar && typeof virtualPath[value] === "string") {
234127
- localProjectNodeKeys.set(join17(dirname20(uvLockFile), virtualPath[value]), nodeKey);
234289
+ localProjectNodeKeys.set(join19(dirname20(uvLockFile), virtualPath[value]), nodeKey);
234128
234290
  } else if (editablePath instanceof TOMLScalar && typeof editablePath[value] === "string") {
234129
- localProjectNodeKeys.set(join17(dirname20(uvLockFile), editablePath[value]), nodeKey);
234291
+ localProjectNodeKeys.set(join19(dirname20(uvLockFile), editablePath[value]), nodeKey);
234130
234292
  }
234131
234293
  }
234132
234294
  }
@@ -234199,7 +234361,7 @@ import assert14 from "node:assert";
234199
234361
  var import_good_enough_parser4 = __toESM(require_cjs(), 1);
234200
234362
  init_ruby_lang();
234201
234363
  import { resolve as resolve32, dirname as dirname21, relative as relative12 } from "node:path";
234202
- import { existsSync as existsSync16, readFileSync as readFileSync4, readdirSync as readdirSync4 } from "node:fs";
234364
+ import { existsSync as existsSync18, readFileSync as readFileSync6, readdirSync as readdirSync4 } from "node:fs";
234203
234365
  init_gemspec_utils();
234204
234366
  var booleanQuery2 = import_good_enough_parser4.query.alt(
234205
234367
  import_good_enough_parser4.query.sym(/^true|false$/, (ctx, { value: value2, offset }) => {
@@ -234314,8 +234476,8 @@ var evalGemfileQuery = import_good_enough_parser4.query.sym("eval_gemfile").join
234314
234476
  if (pathEvaluated === void 0) return ctx;
234315
234477
  const rootDir = ctx.gemfile.rootDir;
234316
234478
  const file = relative12(rootDir, resolve32(rootDir, dirname21(ctx.gemfile.file), pathEvaluated));
234317
- if (!existsSync16(resolve32(rootDir, file))) return ctx;
234318
- const sourceText = readFileSync4(resolve32(rootDir, file), "utf-8");
234479
+ if (!existsSync18(resolve32(rootDir, file))) return ctx;
234480
+ const sourceText = readFileSync6(resolve32(rootDir, file), "utf-8");
234319
234481
  const parser2 = import_good_enough_parser4.lang.createLang(lang3);
234320
234482
  const cursor = parser2.parse(sourceText);
234321
234483
  const otherCtx = parser2.query(cursor, treeQuery4, {
@@ -234389,7 +234551,7 @@ var gemspecQuery = import_good_enough_parser4.query.sym("gemspec").opt(
234389
234551
  ctx.currentGem = void 0;
234390
234552
  }
234391
234553
  const searchDir = gemspecPath ? resolve32(rootDir, gemfileDir, gemspecPath) : resolve32(rootDir, gemfileDir);
234392
- if (!existsSync16(searchDir)) return ctx;
234554
+ if (!existsSync18(searchDir)) return ctx;
234393
234555
  let gemspecFiles = [];
234394
234556
  try {
234395
234557
  const entries = readdirSync4(searchDir);
@@ -234407,7 +234569,7 @@ var gemspecQuery = import_good_enough_parser4.query.sym("gemspec").opt(
234407
234569
  const gemspecFullPath = resolve32(searchDir, gemspecFile);
234408
234570
  const gemspecRelativePath = relative12(rootDir, gemspecFullPath);
234409
234571
  try {
234410
- const sourceText = readFileSync4(gemspecFullPath, "utf-8");
234572
+ const sourceText = readFileSync6(gemspecFullPath, "utf-8");
234411
234573
  const gemspec = parseGemspec(rootDir, gemspecRelativePath, sourceText);
234412
234574
  ctx.gemspecs.push(gemspec);
234413
234575
  ctx.gems.push(...gemspec.dependencies);
@@ -234550,7 +234712,7 @@ function parseGemfileLock(content) {
234550
234712
  }
234551
234713
 
234552
234714
  // ../fixing-management/src/fixing-management/rubygems/rubygems-socket-upgrade-manager.ts
234553
- import { readFile as readFile26, writeFile as writeFile9 } from "node:fs/promises";
234715
+ import { readFile as readFile26, writeFile as writeFile11 } from "node:fs/promises";
234554
234716
 
234555
234717
  // ../fixing-management/src/fixing-management/rubygems/rubygems-patch-utils.ts
234556
234718
  function createRubygemVersionPatches(gem, idx, upgradeVersion, rangeStyle, statusUpdater) {
@@ -234857,7 +235019,7 @@ var RubygemsSocketUpgradeManager = class {
234857
235019
  });
234858
235020
  await asyncForEach(Array.from(restoreMap), async ([path9, { content, artifacts }]) => {
234859
235021
  try {
234860
- await writeFile9(path9, content);
235022
+ await writeFile11(path9, content);
234861
235023
  ctxt.statusUpdater?.({
234862
235024
  status: "success",
234863
235025
  file: relative13(this.rootDir, path9),
@@ -235113,7 +235275,7 @@ async function applySocketUpgrades(ecosystem, rootDir, ctxt) {
235113
235275
 
235114
235276
  // dist/cli-apply-fix.js
235115
235277
  var import_lodash13 = __toESM(require_lodash(), 1);
235116
- import { existsSync as existsSync20 } from "fs";
235278
+ import { existsSync as existsSync22 } from "fs";
235117
235279
 
235118
235280
  // ../other-modules-communicator/src/other-modules-communicator.ts
235119
235281
  import { execFileSync as execFileSync2 } from "child_process";
@@ -235124,15 +235286,15 @@ import { resolve as resolve35 } from "path";
235124
235286
 
235125
235287
  // ../utils/dist/constants.js
235126
235288
  var import_lodash8 = __toESM(require_lodash(), 1);
235127
- import { dirname as dirname23, join as join19 } from "node:path";
235289
+ import { dirname as dirname23, join as join21 } from "node:path";
235128
235290
  import { fileURLToPath as fileURLToPath3 } from "node:url";
235129
235291
 
235130
235292
  // ../utils/dist/file-utils.js
235131
235293
  var import_lodash7 = __toESM(require_lodash(), 1);
235132
235294
  var import_micromatch2 = __toESM(require_micromatch(), 1);
235133
- import { existsSync as existsSync17 } from "fs";
235295
+ import { existsSync as existsSync19 } from "fs";
235134
235296
  import { access as access4, cp as cp3, readdir as readdir4, stat as stat4 } from "fs/promises";
235135
- import { basename as basename9, join as join18, relative as relative14, resolve as resolve34 } from "path";
235297
+ import { basename as basename9, join as join20, relative as relative14, resolve as resolve34 } from "path";
235136
235298
  var { uniq: uniq2 } = import_lodash7.default;
235137
235299
  var { isMatch: isMatch2 } = import_micromatch2.default;
235138
235300
  function* parents2(dir) {
@@ -235161,10 +235323,10 @@ var COANA_ROOT = once3(() => {
235161
235323
  return coanaRoot;
235162
235324
  });
235163
235325
  var REPOS_PATH = once3(() => {
235164
- return process.env.REPOS_PATH ?? join19(COANA_ROOT(), "repos");
235326
+ return process.env.REPOS_PATH ?? join21(COANA_ROOT(), "repos");
235165
235327
  });
235166
235328
  var COANA_REPOS_PATH = once3(() => {
235167
- return process.env.COANA_REPOS_PATH ?? join19(REPOS_PATH(), "coana-tech");
235329
+ return process.env.COANA_REPOS_PATH ?? join21(REPOS_PATH(), "coana-tech");
235168
235330
  });
235169
235331
 
235170
235332
  // ../docker-management/src/constants.ts
@@ -235178,32 +235340,32 @@ var getImageTag = () => {
235178
235340
  };
235179
235341
 
235180
235342
  // ../docker-management/src/docker-spec.ts
235181
- import { join as join20 } from "path";
235343
+ import { join as join22 } from "path";
235182
235344
  var builderSpecs = [
235183
235345
  {
235184
235346
  name: "maven-builder",
235185
- file: join20("builders", "maven", "Dockerfile"),
235347
+ file: join22("builders", "maven", "Dockerfile"),
235186
235348
  isBuilder: true
235187
235349
  },
235188
235350
  {
235189
235351
  name: "python-builder",
235190
- file: join20("builders", "python", "Dockerfile"),
235352
+ file: join22("builders", "python", "Dockerfile"),
235191
235353
  isBuilder: true
235192
235354
  },
235193
235355
  {
235194
235356
  name: "go-builder",
235195
- file: join20("builders", "go", "Dockerfile"),
235357
+ file: join22("builders", "go", "Dockerfile"),
235196
235358
  isBuilder: true
235197
235359
  }
235198
235360
  ];
235199
235361
  var packageManagerDockerSpecs = [
235200
235362
  {
235201
235363
  name: "npm-package-managers",
235202
- file: join20("package-management", "npm", "Dockerfile")
235364
+ file: join22("package-management", "npm", "Dockerfile")
235203
235365
  },
235204
235366
  {
235205
235367
  name: "maven-package-managers",
235206
- file: join20("package-management", "maven", "Dockerfile"),
235368
+ file: join22("package-management", "maven", "Dockerfile"),
235207
235369
  variants: {
235208
235370
  jdk8: {
235209
235371
  JDK_URL_AMD64: "https://github.com/adoptium/temurin8-binaries/releases/download/jdk8u442-b06/OpenJDK8U-jdk_x64_linux_hotspot_8u442b06.tar.gz",
@@ -235228,57 +235390,57 @@ var packageManagerDockerSpecs = [
235228
235390
  },
235229
235391
  {
235230
235392
  name: "pip-package-managers",
235231
- file: join20("package-management", "pip", "Dockerfile"),
235393
+ file: join22("package-management", "pip", "Dockerfile"),
235232
235394
  from: {
235233
235395
  name: "python-builder"
235234
235396
  }
235235
235397
  },
235236
235398
  {
235237
235399
  name: "go-package-manager",
235238
- file: join20("package-management", "go", "Dockerfile"),
235400
+ file: join22("package-management", "go", "Dockerfile"),
235239
235401
  from: {
235240
235402
  name: "go-builder"
235241
235403
  }
235242
235404
  },
235243
235405
  {
235244
235406
  name: "nuget-package-manager",
235245
- file: join20("package-management", "nuget", "Dockerfile")
235407
+ file: join22("package-management", "nuget", "Dockerfile")
235246
235408
  },
235247
235409
  {
235248
235410
  name: "cargo-package-manager",
235249
- file: join20("package-management", "cargo", "Dockerfile")
235411
+ file: join22("package-management", "cargo", "Dockerfile")
235250
235412
  }
235251
235413
  ];
235252
235414
  var reachabilityAnalyzerDockerSpecs = [
235253
235415
  {
235254
235416
  name: "jelly-analyzer",
235255
- file: join20("reachability-analyzers", "jelly", "Dockerfile")
235417
+ file: join22("reachability-analyzers", "jelly", "Dockerfile")
235256
235418
  },
235257
235419
  {
235258
235420
  name: "alucard-analyzer",
235259
- file: join20("reachability-analyzers", "alucard", "Dockerfile")
235421
+ file: join22("reachability-analyzers", "alucard", "Dockerfile")
235260
235422
  },
235261
235423
  {
235262
235424
  name: "mambalade-analyzer",
235263
- file: join20("reachability-analyzers", "mambalade", "Dockerfile"),
235425
+ file: join22("reachability-analyzers", "mambalade", "Dockerfile"),
235264
235426
  from: {
235265
235427
  name: "python-builder"
235266
235428
  }
235267
235429
  },
235268
235430
  {
235269
235431
  name: "goana-analyzer",
235270
- file: join20("reachability-analyzers", "goana", "Dockerfile"),
235432
+ file: join22("reachability-analyzers", "goana", "Dockerfile"),
235271
235433
  from: {
235272
235434
  name: "go-builder"
235273
235435
  }
235274
235436
  },
235275
235437
  {
235276
235438
  name: "cocoa-analyzer",
235277
- file: join20("reachability-analyzers", "cocoa", "Dockerfile")
235439
+ file: join22("reachability-analyzers", "cocoa", "Dockerfile")
235278
235440
  },
235279
235441
  {
235280
235442
  name: "rustica-analyzer",
235281
- file: join20("reachability-analyzers", "rustica", "Dockerfile")
235443
+ file: join22("reachability-analyzers", "rustica", "Dockerfile")
235282
235444
  }
235283
235445
  ];
235284
235446
  function getAllPackageManagerSpecs() {
@@ -235558,15 +235720,15 @@ async function detectVariantMaven(projectDir) {
235558
235720
  }
235559
235721
 
235560
235722
  // ../docker-management/src/maven/gradle-version-detector.ts
235561
- import { existsSync as existsSync18 } from "fs";
235562
- import { join as join21 } from "path";
235723
+ import { existsSync as existsSync20 } from "fs";
235724
+ import { join as join23 } from "path";
235563
235725
  import { readFile as readFile28 } from "fs/promises";
235564
235726
  async function detectVariantGradle(projectDir) {
235565
235727
  return sanitizeJvmVariant("GRADLE", projectDir, await detect(projectDir));
235566
235728
  }
235567
235729
  async function detect(projectDir) {
235568
- const gradleWrapperPropertiesPath = join21(projectDir, "gradle", "wrapper", "gradle-wrapper.properties");
235569
- const gradleWrapperProperties = existsSync18(gradleWrapperPropertiesPath) ? (await readFile28(gradleWrapperPropertiesPath, "utf-8")).split("\n").map((line) => line.trim()).filter((line) => !line.startsWith("#")).filter((line) => line) : void 0;
235730
+ const gradleWrapperPropertiesPath = join23(projectDir, "gradle", "wrapper", "gradle-wrapper.properties");
235731
+ const gradleWrapperProperties = existsSync20(gradleWrapperPropertiesPath) ? (await readFile28(gradleWrapperPropertiesPath, "utf-8")).split("\n").map((line) => line.trim()).filter((line) => !line.startsWith("#")).filter((line) => line) : void 0;
235570
235732
  if (!gradleWrapperProperties) return void 0;
235571
235733
  const distributionUrlRegex = /.*gradle-(\d+(\.\d+(\.\d+)?)?)/;
235572
235734
  for (const prop2 of gradleWrapperProperties) {
@@ -235580,15 +235742,15 @@ async function detect(projectDir) {
235580
235742
  }
235581
235743
 
235582
235744
  // ../docker-management/src/maven/sbt-version-detector.ts
235583
- import { existsSync as existsSync19 } from "fs";
235584
- import { join as join22 } from "path";
235745
+ import { existsSync as existsSync21 } from "fs";
235746
+ import { join as join24 } from "path";
235585
235747
  import { readFile as readFile29 } from "fs/promises";
235586
235748
  async function detectVariantSbt(projectDir) {
235587
235749
  return sanitizeJvmVariant("SBT", projectDir, await detect2(projectDir));
235588
235750
  }
235589
235751
  async function detect2(projectDir) {
235590
- const sbtBuildPropertiesPath = join22(projectDir, "project", "build.properties");
235591
- const sbtBuildProperties = existsSync19(sbtBuildPropertiesPath) ? (await readFile29(sbtBuildPropertiesPath, "utf-8")).split("\n").map((line) => line.trim()).filter((line) => !line.startsWith("#")).filter((line) => line) : void 0;
235752
+ const sbtBuildPropertiesPath = join24(projectDir, "project", "build.properties");
235753
+ const sbtBuildProperties = existsSync21(sbtBuildPropertiesPath) ? (await readFile29(sbtBuildPropertiesPath, "utf-8")).split("\n").map((line) => line.trim()).filter((line) => !line.startsWith("#")).filter((line) => line) : void 0;
235592
235754
  if (!sbtBuildProperties) return void 0;
235593
235755
  for (const prop2 of sbtBuildProperties) {
235594
235756
  const [key, value2] = prop2.split("=");
@@ -235702,18 +235864,18 @@ async function findReachabilityAnalyzersDockerImage(ecosystem) {
235702
235864
  // ../other-modules-communicator/src/other-modules-communicator.ts
235703
235865
  var import_lodash12 = __toESM(require_lodash(), 1);
235704
235866
  import { rmSync } from "fs";
235705
- import { mkdir as mkdir5, readFile as readFile30, writeFile as writeFile10 } from "fs/promises";
235867
+ import { mkdir as mkdir5, readFile as readFile30, writeFile as writeFile12 } from "fs/promises";
235706
235868
  import assert15 from "node:assert";
235707
235869
  import { platform as platform8 } from "os";
235708
- import { join as join25, posix as posix2, relative as relative16, sep as sep3 } from "path";
235870
+ import { join as join27, posix as posix2, relative as relative16, sep as sep3 } from "path";
235709
235871
 
235710
235872
  // ../utils/src/tmp-file.ts
235711
235873
  import { rm, mkdtemp, cp as cp4, lstat as lstat2 } from "fs/promises";
235712
- import { tmpdir as tmpdir2 } from "os";
235713
- import { join as join23, relative as relative15, sep as sep2, extname as extname2 } from "path";
235874
+ import { tmpdir as tmpdir4 } from "os";
235875
+ import { join as join25, relative as relative15, sep as sep2, extname as extname2 } from "path";
235714
235876
  async function createTmpDirectory(prefix) {
235715
235877
  try {
235716
- const tmpDir = await mkdtemp(join23(tmpdir2(), prefix));
235878
+ const tmpDir = await mkdtemp(join25(tmpdir4(), prefix));
235717
235879
  return tmpDir;
235718
235880
  } catch (err) {
235719
235881
  console.log("Error creating tmp directory", err);
@@ -235988,7 +236150,7 @@ import { resolve as resolve36 } from "path";
235988
236150
 
235989
236151
  // ../utils/src/constants.ts
235990
236152
  var import_lodash10 = __toESM(require_lodash(), 1);
235991
- import { dirname as dirname24, join as join24 } from "node:path";
236153
+ import { dirname as dirname24, join as join26 } from "node:path";
235992
236154
  import { fileURLToPath as fileURLToPath4 } from "node:url";
235993
236155
  var { once: once5 } = import_lodash10.default;
235994
236156
  var fileName2 = fileURLToPath4(import.meta.url);
@@ -236001,10 +236163,10 @@ var COANA_ROOT2 = once5(() => {
236001
236163
  return coanaRoot;
236002
236164
  });
236003
236165
  var REPOS_PATH2 = once5(() => {
236004
- return process.env.REPOS_PATH ?? join24(COANA_ROOT2(), "repos");
236166
+ return process.env.REPOS_PATH ?? join26(COANA_ROOT2(), "repos");
236005
236167
  });
236006
236168
  var COANA_REPOS_PATH2 = once5(() => {
236007
- return process.env.COANA_REPOS_PATH ?? join24(REPOS_PATH2(), "coana-tech");
236169
+ return process.env.COANA_REPOS_PATH ?? join26(REPOS_PATH2(), "coana-tech");
236008
236170
  });
236009
236171
  var REQUIREMENTS_FILES_SEARCH_DEPTH = 2;
236010
236172
 
@@ -236043,7 +236205,7 @@ var { memoize, once: once7, take } = import_lodash12.default;
236043
236205
  async function getReachabilityAnalyzersScriptPath() {
236044
236206
  if (isNexeMode()) {
236045
236207
  const extractedPath = await extractTool("reachability-analyzers", "reachability-analyzers-cli.mjs");
236046
- return join25(extractedPath, "reachability-analyzers-cli.mjs");
236208
+ return join27(extractedPath, "reachability-analyzers-cli.mjs");
236047
236209
  }
236048
236210
  return REACHABILITY_ANALYZERS_SCRIPT_PATH();
236049
236211
  }
@@ -236141,7 +236303,7 @@ var OtherModulesCommunicator = class {
236141
236303
  return "Running reachability analysis on package registry package";
236142
236304
  }
236143
236305
  }
236144
- return `${_cmdStr()}: (${ecosystem}) ${relative16(this.rootWorkingDir, join25(subprojectPath, workspacePath)) || "."}`;
236306
+ return `${_cmdStr()}: (${ecosystem}) ${relative16(this.rootWorkingDir, join27(subprojectPath, workspacePath)) || "."}`;
236145
236307
  }
236146
236308
  getProjectPath(subprojectPath) {
236147
236309
  return this.options.runWithoutDocker ? subprojectPath : posix2.resolve("/project", relative16(this.rootWorkingDir, subprojectPath).replaceAll(sep3, posix2.sep));
@@ -236190,7 +236352,7 @@ var OtherModulesCommunicator = class {
236190
236352
  async runPackageManagerCommandWithOutput(commandName, packageManagerName, subprojectPath, args2 = [], extraDockerArgs, env) {
236191
236353
  const tmpDir = await this.getTmpDirForSubproject(subprojectPath);
236192
236354
  const outputFileName = `${v4_default()}-${commandName}-output.json`;
236193
- const outputFilePathThisProcess = join25(tmpDir, outputFileName);
236355
+ const outputFilePathThisProcess = join27(tmpDir, outputFileName);
236194
236356
  const outputFilePathOtherProcess = this.options.runWithoutDocker ? outputFilePathThisProcess : posix2.join(TMP_DIR_IN_DOCKER, outputFileName);
236195
236357
  await this.runPackageManagerCommand(
236196
236358
  commandName,
@@ -236228,8 +236390,8 @@ var OtherModulesCommunicator = class {
236228
236390
  if (isNexeMode()) {
236229
236391
  const baseDir = getExtractionBaseDir();
236230
236392
  env.COANA_ROOT = baseDir;
236231
- env.REPOS_PATH = join25(baseDir, "repos");
236232
- env.COANA_REPOS_PATH = join25(baseDir, "repos", "coana-tech");
236393
+ env.REPOS_PATH = join27(baseDir, "repos");
236394
+ env.COANA_REPOS_PATH = join27(baseDir, "repos", "coana-tech");
236233
236395
  env.REACHABILITY_ANALYZERS_SCRIPT_PATH = scriptPath;
236234
236396
  }
236235
236397
  return Spinner.instance().wrap(
@@ -236266,7 +236428,7 @@ var OtherModulesCommunicator = class {
236266
236428
  async runReachabilityAnalyzerCommandWithOutput(commandName, ecosystem, subprojectPath, workspacePath, args2, env, rootWorkingDirOverride, displaySubprojectPath) {
236267
236429
  const tmpDir = await this.getTmpDirForSubproject(displaySubprojectPath ?? subprojectPath);
236268
236430
  const outputFileName = `${v4_default()}-${commandName}-output.json`;
236269
- const outputFilePathThisProcess = join25(tmpDir, outputFileName);
236431
+ const outputFilePathThisProcess = join27(tmpDir, outputFileName);
236270
236432
  const outputFilePathOtherProcess = this.options.runWithoutDocker ? outputFilePathThisProcess : posix2.join(TMP_DIR_IN_DOCKER, outputFileName);
236271
236433
  await this.runReachabilityAnalyzerCommand(
236272
236434
  commandName,
@@ -236305,9 +236467,9 @@ var OtherModulesCommunicator = class {
236305
236467
  if (providedOptions.type === "providee") {
236306
236468
  const tmpDir = await this.getTmpDirForSubproject(subprojectPath);
236307
236469
  const providerFileName = "provider.json";
236308
- const providerFileThisProcess = join25(tmpDir, providerFileName);
236470
+ const providerFileThisProcess = join27(tmpDir, providerFileName);
236309
236471
  const providerFileOtherProcess = this.options.runWithoutDocker ? providerFileThisProcess : posix2.join(TMP_DIR_IN_DOCKER, providerFileName);
236310
- await writeFile10(providerFileThisProcess, JSON.stringify(providedOptions.provider));
236472
+ await writeFile12(providerFileThisProcess, JSON.stringify(providedOptions.provider));
236311
236473
  return ["--provider", providerFileOtherProcess];
236312
236474
  } else {
236313
236475
  return ["--as-provider"];
@@ -236352,9 +236514,9 @@ var OtherModulesCommunicator = class {
236352
236514
  await extractAllToolsForNexeMode();
236353
236515
  }
236354
236516
  const inputFileName = `${v4_default()}-runReachabilityAnalysis-input.json`;
236355
- const inputFileThisProcess = join25(tmpDir, inputFileName);
236517
+ const inputFileThisProcess = join27(tmpDir, inputFileName);
236356
236518
  const inputFileOtherProcess = this.options.runWithoutDocker ? inputFileThisProcess : posix2.join(TMP_DIR_IN_DOCKER, inputFileName);
236357
- await writeFile10(
236519
+ await writeFile12(
236358
236520
  inputFileThisProcess,
236359
236521
  JSON.stringify({
236360
236522
  workspaceData,
@@ -236396,7 +236558,7 @@ var setUpGoModuleCache = once7(async () => {
236396
236558
  execFileSync2("chmod", ["--recursive", "+rw", tmpDir]);
236397
236559
  rmSync(tmpDir, { recursive: true, force: true });
236398
236560
  });
236399
- const [upper, work] = [join25(tmpDir, "upper"), join25(tmpDir, "work")];
236561
+ const [upper, work] = [join27(tmpDir, "upper"), join27(tmpDir, "work")];
236400
236562
  for (const dir of [upper, work]) await mkdir5(dir);
236401
236563
  const o7 = await execNeverFail2(
236402
236564
  cmdt`docker volume create --driver local --opt type=overlay
@@ -236432,7 +236594,7 @@ function abbreviateList(items, maxItems) {
236432
236594
  import { resolve as resolve37 } from "path";
236433
236595
 
236434
236596
  // ../utils/src/dashboard-api/coana-api.ts
236435
- import { writeFile as writeFile11 } from "fs/promises";
236597
+ import { writeFile as writeFile13 } from "fs/promises";
236436
236598
  var import_artifact = __toESM(require_artifact_client2(), 1);
236437
236599
  var coanaAPI = process.env.PUBLIC_API_URL ?? "https://app.coana.tech/api/v1";
236438
236600
  var axiosClient2 = getAxiosClient();
@@ -236562,7 +236724,7 @@ async function sendToDashboard(report, writeReportToFile, reportId, apiKey) {
236562
236724
  try {
236563
236725
  if (writeReportToFile) {
236564
236726
  logger.info("Writing report to dashboard-report.json");
236565
- await writeFile11("dashboard-report.json", JSON.stringify(report, null, 2));
236727
+ await writeFile13("dashboard-report.json", JSON.stringify(report, null, 2));
236566
236728
  if (process.env.GITHUB_ACTIONS === "true") {
236567
236729
  logger.info("uploading dashboard-report.json as an artifact");
236568
236730
  (0, import_artifact.create)().uploadArtifact("dashboard-report", ["dashboard-report.json"], process.cwd());
@@ -236833,7 +236995,7 @@ async function verifyFixes(fixes, otherModulesCommunicator, rootPath) {
236833
236995
  if (pathsForEachFixIdData.length !== new Set(pathsForEachFixIdData).size) {
236834
236996
  throw new Error("Multiple fix IDs found for the same subproject, workspace and ecosystem");
236835
236997
  }
236836
- const subprojectsNotFound = uniq3(fixes.filter(({ vulnerabilityInstance: v }) => !existsSync20(resolve37(rootPath, v.subprojectPath))).map(({ vulnerabilityInstance: v }) => `${v.subprojectPath}:${v.ecosystem}`));
236998
+ const subprojectsNotFound = uniq3(fixes.filter(({ vulnerabilityInstance: v }) => !existsSync22(resolve37(rootPath, v.subprojectPath))).map(({ vulnerabilityInstance: v }) => `${v.subprojectPath}:${v.ecosystem}`));
236837
236999
  if (subprojectsNotFound.length > 0) {
236838
237000
  throw new Error(`Cannot find the following subprojects: ${subprojectsNotFound.join(", ")}`);
236839
237001
  }
@@ -237647,7 +237809,7 @@ function getVulnerabilitiesFromReport(report) {
237647
237809
  var import_packageurl_js = __toESM(require_packageurl_js(), 1);
237648
237810
 
237649
237811
  // dist/cli-upgrade-purl.js
237650
- import { join as join28, relative as relative19, resolve as resolve40 } from "node:path";
237812
+ import { join as join30, relative as relative19, resolve as resolve40 } from "node:path";
237651
237813
  var import_picomatch10 = __toESM(require_picomatch2(), 1);
237652
237814
 
237653
237815
  // ../project-management/src/project-management/project-manager.ts
@@ -237656,12 +237818,12 @@ import { relative as relative18, resolve as resolve39 } from "path";
237656
237818
  // ../project-management/src/project-management/ecosystem-management/ecosystem-manager.ts
237657
237819
  var import_micromatch3 = __toESM(require_micromatch2(), 1);
237658
237820
  import { readdir as readdir6 } from "fs/promises";
237659
- import { join as join27, relative as relative17, resolve as resolve38 } from "path";
237821
+ import { join as join29, relative as relative17, resolve as resolve38 } from "path";
237660
237822
 
237661
237823
  // ../project-management/src/project-management/ecosystem-management/ecosystem-specs.ts
237662
- import { existsSync as existsSync21 } from "fs";
237824
+ import { existsSync as existsSync23 } from "fs";
237663
237825
  import { readdir as readdir5, readFile as readFile31 } from "fs/promises";
237664
- import { join as join26, sep as sep4 } from "path";
237826
+ import { join as join28, sep as sep4 } from "path";
237665
237827
  var specs = {
237666
237828
  NPM: [
237667
237829
  /* @__PURE__ */ new Map([["package.json", packageManagerIfPackageJSONExistsAndValid("NPM")]]),
@@ -237692,7 +237854,7 @@ var specs = {
237692
237854
  [
237693
237855
  /^(pyproject.toml|setup.py|requirements.*\.txt)/,
237694
237856
  async (projectDir) => {
237695
- const isPythonProject = await exists(join26(projectDir, "pyproject.toml")) || await exists(join26(projectDir, "setup.py")) && await isSetupPySetuptools(join26(projectDir, "setup.py"));
237857
+ const isPythonProject = await exists(join28(projectDir, "pyproject.toml")) || await exists(join28(projectDir, "setup.py")) && await isSetupPySetuptools(join28(projectDir, "setup.py"));
237696
237858
  if (isPythonProject) {
237697
237859
  properPythonProjects.push(projectDir + sep4);
237698
237860
  return "PIP_REQUIREMENTS";
@@ -237715,7 +237877,7 @@ var specs = {
237715
237877
  [
237716
237878
  "uv.lock",
237717
237879
  async (projectDir) => {
237718
- if (await exists(join26(projectDir, "pyproject.toml"))) {
237880
+ if (await exists(join28(projectDir, "pyproject.toml"))) {
237719
237881
  logger.warn("uv is not supported yet, using plain pyproject.toml as a fallback");
237720
237882
  return "PIP_REQUIREMENTS";
237721
237883
  } else logger.error("uv.lock found without pyproject.toml");
@@ -237736,8 +237898,8 @@ function getEcosystemSpecs(ecosystems) {
237736
237898
  }
237737
237899
  function packageManagerIfPackageJSONExistsAndValid(packageManager) {
237738
237900
  return async (projectDir) => {
237739
- if (!existsSync21(join26(projectDir, "package.json"))) return void 0;
237740
- const packageJSONPath = join26(projectDir, "package.json");
237901
+ if (!existsSync23(join28(projectDir, "package.json"))) return void 0;
237902
+ const packageJSONPath = join28(projectDir, "package.json");
237741
237903
  try {
237742
237904
  JSON.parse(await readFile31(packageJSONPath, "utf-8"));
237743
237905
  return packageManager;
@@ -237794,7 +237956,7 @@ var EcosystemManager = class _EcosystemManager {
237794
237956
  const resolvedProjectDir = resolve38(mainProjectDir, relativeProjectDir);
237795
237957
  if (config3.includeDirs.length > 0)
237796
237958
  workspacePaths = workspacePaths.filter(
237797
- (workspacePath) => isMatch3(relative17(mainProjectDir, join27(resolvedProjectDir, workspacePath)), config3.includeDirs)
237959
+ (workspacePath) => isMatch3(relative17(mainProjectDir, join29(resolvedProjectDir, workspacePath)), config3.includeDirs)
237798
237960
  );
237799
237961
  workspacePaths.filter((workspacePath) => workspacePath !== ".").forEach((workspacePath) => projectDirsAlreadyCovered.push(resolve38(resolvedProjectDir, workspacePath)));
237800
237962
  if (workspacePaths.length > 0)
@@ -237822,7 +237984,7 @@ var EcosystemManager = class _EcosystemManager {
237822
237984
  }
237823
237985
  }
237824
237986
  for (const dir of directoriesToTraverse) {
237825
- await recHelper(join27(projectDir, dir), true);
237987
+ await recHelper(join29(projectDir, dir), true);
237826
237988
  }
237827
237989
  }
237828
237990
  async function getPackageManagerName(projectDir, foundProjectFiles, foundLockFiles) {
@@ -237851,7 +238013,7 @@ var EcosystemManager = class _EcosystemManager {
237851
238013
  const filesAndDirectories = await readdir6(projectDir, { withFileTypes: true });
237852
238014
  for (const dirent of filesAndDirectories) {
237853
238015
  const fileOrDirectory = dirent.name;
237854
- const fullPath = join27(projectDir, fileOrDirectory);
238016
+ const fullPath = join29(projectDir, fileOrDirectory);
237855
238017
  if (dirent.isDirectory()) {
237856
238018
  if (shouldIgnoreDir(fileOrDirectory) || shouldIgnoreDueToExcludeDirsOrChangedFiles(config3, fullPath))
237857
238019
  continue;
@@ -237878,7 +238040,7 @@ var EcosystemManager = class _EcosystemManager {
237878
238040
  packageManagerName,
237879
238041
  subprojectPath,
237880
238042
  workspacePaths: workspacePaths.filter(
237881
- (workspacePath) => !shouldIgnoreDueToExcludeDirsOrChangedFiles(this.config, join27(subprojectPath, workspacePath))
238043
+ (workspacePath) => !shouldIgnoreDueToExcludeDirsOrChangedFiles(this.config, join29(subprojectPath, workspacePath))
237882
238044
  )
237883
238045
  }));
237884
238046
  }
@@ -238223,7 +238385,7 @@ ${workspacePathsMatchingGlob.map((wsPath) => ` ${wsPath}`).join("\n")}`);
238223
238385
  });
238224
238386
  if (vulnerabilityFixes.length === 0)
238225
238387
  return;
238226
- logger.info(`Found ${vulnerabilityFixes.length} ${vulnerabilityFixes.length === 1 ? "dependency" : "dependencies"} matching upgrade specs for ${join28(subproject.subprojectPath, wsPath)}`);
238388
+ logger.info(`Found ${vulnerabilityFixes.length} ${vulnerabilityFixes.length === 1 ? "dependency" : "dependencies"} matching upgrade specs for ${join30(subproject.subprojectPath, wsPath)}`);
238227
238389
  workspaceToFixes[wsPath] = [
238228
238390
  {
238229
238391
  fixId: "dummy",
@@ -238244,7 +238406,7 @@ ${workspacePathsMatchingGlob.map((wsPath) => ` ${wsPath}`).join("\n")}`);
238244
238406
  }
238245
238407
  }
238246
238408
  var signalFixApplied = (_fixId, subprojectPath, workspacePath, vulnerabilityFixes) => {
238247
- logger.info(`Successfully upgraded purls for: ${join28(subprojectPath, workspacePath)}`);
238409
+ logger.info(`Successfully upgraded purls for: ${join30(subprojectPath, workspacePath)}`);
238248
238410
  logger.info(`Upgraded:
238249
238411
  ${vulnerabilityFixes.map((fix) => ` ${fix.dependencyName} from ${fix.currentVersion} to ${fix.fixedVersion}`).join("\n")}`);
238250
238412
  };
@@ -238457,8 +238619,8 @@ function prettyApplyFixesTo(applyFixesToOption) {
238457
238619
 
238458
238620
  // dist/cli-core.js
238459
238621
  import assert16 from "node:assert";
238460
- import { existsSync as existsSync26, writeFileSync as writeFileSync3 } from "fs";
238461
- import { mkdir as mkdir6, writeFile as writeFile13 } from "fs/promises";
238622
+ import { existsSync as existsSync28, writeFileSync as writeFileSync3 } from "fs";
238623
+ import { mkdir as mkdir6, writeFile as writeFile15 } from "fs/promises";
238462
238624
 
238463
238625
  // ../../node_modules/.pnpm/kleur@4.1.5/node_modules/kleur/index.mjs
238464
238626
  var FORCE_COLOR;
@@ -238563,7 +238725,7 @@ var kleur_default = $;
238563
238725
  // dist/cli-core.js
238564
238726
  var import_lodash15 = __toESM(require_lodash(), 1);
238565
238727
  import os from "os";
238566
- import { join as join32, relative as relative20, resolve as resolve42 } from "path";
238728
+ import { join as join34, relative as relative20, resolve as resolve42 } from "path";
238567
238729
 
238568
238730
  // ../utils/src/dashboard-api/shared-api.ts
238569
238731
  var DashboardAPI = class {
@@ -238703,6 +238865,14 @@ var DashboardAPI = class {
238703
238865
  await this.socketAPI.sendTelemetrySocket(analysisMetadataId, telemetry);
238704
238866
  }
238705
238867
  }
238868
+ async sendAnalyzerTelemetry(analysisMetadataId, telemetry) {
238869
+ if (this.disableAnalyticsSharing) {
238870
+ return;
238871
+ }
238872
+ if (this.socketMode) {
238873
+ await this.socketAPI.sendAnalyzerTelemetrySocket(analysisMetadataId, telemetry);
238874
+ }
238875
+ }
238706
238876
  async registerDiagnosticsToAnalysisMetadata(analysisMetadataId, diagnosticsData) {
238707
238877
  if (this.disableAnalyticsSharing || !analysisMetadataId) {
238708
238878
  return;
@@ -238779,8 +238949,8 @@ var BatchedHttpLogStreamer = class {
238779
238949
  // ../utils/src/logging/socket-log-server.ts
238780
238950
  import { createServer } from "net";
238781
238951
  import { once as once8 } from "events";
238782
- import { createWriteStream as createWriteStream4, existsSync as existsSync22 } from "fs";
238783
- import { unlink } from "fs/promises";
238952
+ import { createWriteStream as createWriteStream4, existsSync as existsSync24 } from "fs";
238953
+ import { unlink as unlink3 } from "fs/promises";
238784
238954
  var SocketLogServer = class {
238785
238955
  server;
238786
238956
  socketPath;
@@ -238800,8 +238970,8 @@ var SocketLogServer = class {
238800
238970
  this.server = createServer((socket) => this.handleConnection(socket));
238801
238971
  }
238802
238972
  async start() {
238803
- if (existsSync22(this.socketPath)) {
238804
- await unlink(this.socketPath);
238973
+ if (existsSync24(this.socketPath)) {
238974
+ await unlink3(this.socketPath);
238805
238975
  }
238806
238976
  this.server.listen(this.socketPath);
238807
238977
  await once8(this.server, "listening");
@@ -238922,8 +239092,8 @@ var SocketLogServer = class {
238922
239092
  return new Promise((resolve45, reject) => {
238923
239093
  this.server.close((serverError) => {
238924
239094
  this.writeStream.end(() => {
238925
- if (existsSync22(this.socketPath)) {
238926
- unlink(this.socketPath).then(() => {
239095
+ if (existsSync24(this.socketPath)) {
239096
+ unlink3(this.socketPath).then(() => {
238927
239097
  if (serverError) reject(serverError);
238928
239098
  else resolve45();
238929
239099
  }).catch((unlinkError) => reject(serverError ?? unlinkError));
@@ -239208,16 +239378,16 @@ var bgWhiteBright = format5(107, 49);
239208
239378
  var DEFAULT_REPORT_FILENAME_BASE = "coana-report";
239209
239379
 
239210
239380
  // dist/internal/exclude-dirs-from-configuration-files.js
239211
- import { existsSync as existsSync23 } from "fs";
239381
+ import { existsSync as existsSync25 } from "fs";
239212
239382
  import { readFile as readFile32 } from "fs/promises";
239213
239383
  import { basename as basename10, resolve as resolve41 } from "path";
239214
239384
  var import_yaml2 = __toESM(require_dist12(), 1);
239215
239385
  async function inferExcludeDirsFromConfigurationFiles(rootWorkingDir) {
239216
239386
  const socketYmlConfigFile = resolve41(rootWorkingDir, "socket.yml");
239217
- if (existsSync23(socketYmlConfigFile))
239387
+ if (existsSync25(socketYmlConfigFile))
239218
239388
  return inferExcludeDirsFromSocketConfig(socketYmlConfigFile);
239219
239389
  const socketYamlConfigFile = resolve41(rootWorkingDir, "socket.yaml");
239220
- if (existsSync23(socketYamlConfigFile))
239390
+ if (existsSync25(socketYamlConfigFile))
239221
239391
  return inferExcludeDirsFromSocketConfig(socketYamlConfigFile);
239222
239392
  return void 0;
239223
239393
  }
@@ -239241,7 +239411,7 @@ async function inferExcludeDirsFromSocketConfig(socketConfigFile) {
239241
239411
  var import_fast_glob = __toESM(require_out4(), 1);
239242
239412
  var import_ignore3 = __toESM(require_ignore(), 1);
239243
239413
  import { readFile as readFile33 } from "fs/promises";
239244
- import { join as join29 } from "path";
239414
+ import { join as join31 } from "path";
239245
239415
  var DEFAULT_IGNORE_PATTERNS = [
239246
239416
  "**/node_modules/**",
239247
239417
  "**/.git/**",
@@ -239255,7 +239425,7 @@ var DEFAULT_IGNORE_PATTERNS = [
239255
239425
  ];
239256
239426
  async function loadGitignore(rootDir) {
239257
239427
  try {
239258
- const gitignorePath = join29(rootDir, ".gitignore");
239428
+ const gitignorePath = join31(rootDir, ".gitignore");
239259
239429
  const content = await readFile33(gitignorePath, "utf-8");
239260
239430
  return (0, import_ignore3.default)().add(content);
239261
239431
  } catch {
@@ -239440,7 +239610,7 @@ function transformToVulnChainNode(dependencyTree) {
239440
239610
  }
239441
239611
 
239442
239612
  // dist/internal/socket-mode-helpers-socket-dependency-trees.js
239443
- import { basename as basename11, dirname as dirname25, join as join30, sep as sep5 } from "path";
239613
+ import { basename as basename11, dirname as dirname25, join as join32, sep as sep5 } from "path";
239444
239614
  var REQUIREMENTS_FILES_SEARCH_DEPTH2 = 3;
239445
239615
  var venvExcludes = [
239446
239616
  "venv",
@@ -239565,7 +239735,7 @@ async function fetchArtifactsFromSocket(rootWorkingDirectory, manifestsTarHash,
239565
239735
  for (const file of allFiles) {
239566
239736
  const base = basename11(file);
239567
239737
  const workspaceDir = dirname25(file) || ".";
239568
- if (base === "pyproject.toml" || base === "setup.py" && await isSetupPySetuptools(join30(rootWorkingDirectory, file))) {
239738
+ if (base === "pyproject.toml" || base === "setup.py" && await isSetupPySetuptools(join32(rootWorkingDirectory, file))) {
239569
239739
  if (!properPythonProjects.includes(workspaceDir)) {
239570
239740
  properPythonProjects.push(workspaceDir);
239571
239741
  }
@@ -253880,12 +254050,12 @@ var { root: root2 } = static_exports;
253880
254050
 
253881
254051
  // ../utils/src/maven-utils.ts
253882
254052
  var import_lodash14 = __toESM(require_lodash(), 1);
253883
- import { existsSync as existsSync25, readdirSync as readdirSync5, statSync as statSync5 } from "fs";
253884
- import { join as join31 } from "path";
254053
+ import { existsSync as existsSync27, readdirSync as readdirSync5, statSync as statSync5 } from "fs";
254054
+ import { join as join33 } from "path";
253885
254055
 
253886
254056
  // ../utils/src/download-utils.ts
253887
- import { existsSync as existsSync24 } from "fs";
253888
- import { writeFile as writeFile12 } from "fs/promises";
254057
+ import { existsSync as existsSync26 } from "fs";
254058
+ import { writeFile as writeFile14 } from "fs/promises";
253889
254059
 
253890
254060
  // ../utils/src/maven-utils.ts
253891
254061
  var { memoize: memoize3 } = import_lodash14.default;
@@ -255252,7 +255422,7 @@ async function onlineScan(dependencyTree, apiKey, timeout) {
255252
255422
  }
255253
255423
 
255254
255424
  // dist/version.js
255255
- var version3 = "14.12.145";
255425
+ var version3 = "14.12.147";
255256
255426
 
255257
255427
  // dist/cli-core.js
255258
255428
  var { mapValues, omit, partition, pickBy: pickBy2 } = import_lodash15.default;
@@ -255373,8 +255543,8 @@ var CliCore = class {
255373
255543
  }
255374
255544
  async main() {
255375
255545
  const tmpDir = await createTmpDirectory("coana-cli-");
255376
- this.coanaLogPath = join32(tmpDir, "coana-log.txt");
255377
- this.coanaSocketPath = join32(tmpDir, "coana.sock");
255546
+ this.coanaLogPath = join34(tmpDir, "coana-log.txt");
255547
+ this.coanaSocketPath = join34(tmpDir, "coana.sock");
255378
255548
  if (this.options.socketMode) {
255379
255549
  logger.enableStreamBuffering();
255380
255550
  }
@@ -255425,7 +255595,7 @@ var CliCore = class {
255425
255595
  }, this.apiKey);
255426
255596
  if (this.options.manifestsTarHash) {
255427
255597
  if (this.options.excludeDirs && this.options.excludeDirs.length > 0) {
255428
- this.options.excludeDirs = this.options.excludeDirs.map((excludeDir) => excludeDir.endsWith("/*") ? excludeDir : `${excludeDir}/**/*`);
255598
+ this.options.excludeDirs = this.options.excludeDirs.map((excludeDir) => excludeDir.endsWith("/*") ? excludeDir : `${excludeDir}/**`);
255429
255599
  }
255430
255600
  await this.computeAndOutputReportSocketMode(otherModulesCommunicator);
255431
255601
  this.spinner.stop();
@@ -255529,7 +255699,7 @@ var CliCore = class {
255529
255699
  await this.shareLogIfAnalysisError(vulnsWithResults);
255530
255700
  const socketReport = toSocketFactsSocketDependencyTree(artifacts, vulnsWithResults, this.reportId, allWorkspaceDiagnostics);
255531
255701
  const outputFile = resolve42(this.options.socketMode);
255532
- await writeFile13(outputFile, JSON.stringify(socketReport, null, 2));
255702
+ await writeFile15(outputFile, JSON.stringify(socketReport, null, 2));
255533
255703
  logger.info(kleur_default.green(`Socket report written to: ${outputFile}`));
255534
255704
  }
255535
255705
  async shareLogIfAnalysisError(vulns) {
@@ -255556,7 +255726,7 @@ var CliCore = class {
255556
255726
  }
255557
255727
  const socketReport = toSocketFacts(report, this.reportDependencyTrees, subPjToWsPathToDirectDependencies);
255558
255728
  const outputFile = resolve42(this.options.socketMode);
255559
- await writeFile13(outputFile, JSON.stringify(socketReport, null, 2));
255729
+ await writeFile15(outputFile, JSON.stringify(socketReport, null, 2));
255560
255730
  logger.info(kleur_default.green(`Socket report written to: ${outputFile}`));
255561
255731
  return;
255562
255732
  }
@@ -255782,7 +255952,7 @@ Subproject: ${subproject}`);
255782
255952
  const concurrency = Number(this.options.concurrency);
255783
255953
  const shouldIncludeWorkspaceInLogs = concurrency > 1;
255784
255954
  let npmProjectDirPool;
255785
- const nodeModulesExists = existsSync26(resolve42(subprojectPath, "node_modules"));
255955
+ const nodeModulesExists = existsSync28(resolve42(subprojectPath, "node_modules"));
255786
255956
  if (ecosystem === "NPM" && concurrency > 1 && !nodeModulesExists) {
255787
255957
  const numCopies = Math.min(concurrency, workspaces.length) - 1;
255788
255958
  if (numCopies > 0) {
@@ -256130,7 +256300,7 @@ async function getGitDataToMetadataIfAvailable(rootWorkingDirectory) {
256130
256300
  }
256131
256301
 
256132
256302
  // dist/internal/analysis-debug-info-transformer.js
256133
- import { writeFile as writeFile14 } from "fs/promises";
256303
+ import { writeFile as writeFile16 } from "fs/promises";
256134
256304
  import { resolve as resolve43 } from "path";
256135
256305
  function computeIncludePackagesForVulnerability(vulnerability) {
256136
256306
  if (!vulnerability.vulnerabilityAccessPaths || typeof vulnerability.vulnerabilityAccessPaths === "string") {
@@ -256242,7 +256412,7 @@ function buildPurlString(artifact) {
256242
256412
  async function writeAnalysisDebugInfo(outputFilePath, ecosystemToWorkspaceToVulnerabilities, artifacts) {
256243
256413
  const debugInfo = toAnalysisDebugInfoFromSocketArtifacts(ecosystemToWorkspaceToVulnerabilities, artifacts);
256244
256414
  const resolvedPath = resolve43(outputFilePath);
256245
- await writeFile14(resolvedPath, JSON.stringify(debugInfo, null, 2));
256415
+ await writeFile16(resolvedPath, JSON.stringify(debugInfo, null, 2));
256246
256416
  logger.info(kleur_default.green(`Analysis debug info written to: ${resolvedPath}`));
256247
256417
  }
256248
256418
 
@@ -256275,8 +256445,8 @@ computeFixesAndUpgradePurlsCmd.name("compute-fixes-and-upgrade-purls").argument(
256275
256445
  throw new Error('Range style must be "pin"');
256276
256446
  }
256277
256447
  options.purlTypes = options.purlTypes?.map((t4) => t4.toLowerCase());
256278
- const tmpDir = await mkdtemp2(join33(tmpdir3(), "compute-fixes-and-upgrade-purls-"));
256279
- const logFile = join33(tmpDir, "compute-fixes-and-upgrade-purls.log");
256448
+ const tmpDir = await mkdtemp2(join35(tmpdir5(), "compute-fixes-and-upgrade-purls-"));
256449
+ const logFile = join35(tmpDir, "compute-fixes-and-upgrade-purls.log");
256280
256450
  logger.initWinstonLogger(options.debug, logFile);
256281
256451
  try {
256282
256452
  await initializeComputeFixesAndUpgradePurls(path9, options);
@@ -256288,7 +256458,7 @@ computeFixesAndUpgradePurlsCmd.name("compute-fixes-and-upgrade-purls").argument(
256288
256458
  if (options.outputFile) {
256289
256459
  const outputFile = resolve44(options.outputFile);
256290
256460
  await mkdir7(dirname26(outputFile), { recursive: true });
256291
- await writeFile15(outputFile, JSON.stringify(output, null, 2));
256461
+ await writeFile17(outputFile, JSON.stringify(output, null, 2));
256292
256462
  logger.info(`Result written to: ${outputFile}`);
256293
256463
  }
256294
256464
  await rm3(tmpDir, { recursive: true, force: true });