@coana-tech/cli 14.12.66 → 14.12.67

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/cli.mjs CHANGED
@@ -22174,15 +22174,15 @@ var require_file = __commonJS({
22174
22174
  _incFile(callback) {
22175
22175
  debug("_incFile", this.filename);
22176
22176
  const ext2 = path2.extname(this._basename);
22177
- const basename13 = path2.basename(this._basename, ext2);
22177
+ const basename9 = path2.basename(this._basename, ext2);
22178
22178
  const tasks = [];
22179
22179
  if (this.zippedArchive) {
22180
22180
  tasks.push(
22181
22181
  function(cb) {
22182
22182
  const num = this._created > 0 && !this.tailable ? this._created : "";
22183
22183
  this._compressFile(
22184
- path2.join(this.dirname, `${basename13}${num}${ext2}`),
22185
- path2.join(this.dirname, `${basename13}${num}${ext2}.gz`),
22184
+ path2.join(this.dirname, `${basename9}${num}${ext2}`),
22185
+ path2.join(this.dirname, `${basename9}${num}${ext2}.gz`),
22186
22186
  cb
22187
22187
  );
22188
22188
  }.bind(this)
@@ -22192,9 +22192,9 @@ var require_file = __commonJS({
22192
22192
  function(cb) {
22193
22193
  if (!this.tailable) {
22194
22194
  this._created += 1;
22195
- this._checkMaxFilesIncrementing(ext2, basename13, cb);
22195
+ this._checkMaxFilesIncrementing(ext2, basename9, cb);
22196
22196
  } else {
22197
- this._checkMaxFilesTailable(ext2, basename13, cb);
22197
+ this._checkMaxFilesTailable(ext2, basename9, cb);
22198
22198
  }
22199
22199
  }.bind(this)
22200
22200
  );
@@ -22208,9 +22208,9 @@ var require_file = __commonJS({
22208
22208
  */
22209
22209
  _getFile() {
22210
22210
  const ext2 = path2.extname(this._basename);
22211
- const basename13 = path2.basename(this._basename, ext2);
22211
+ const basename9 = path2.basename(this._basename, ext2);
22212
22212
  const isRotation = this.rotationFormat ? this.rotationFormat() : this._created;
22213
- return !this.tailable && this._created ? `${basename13}${isRotation}${ext2}` : `${basename13}${ext2}`;
22213
+ return !this.tailable && this._created ? `${basename9}${isRotation}${ext2}` : `${basename9}${ext2}`;
22214
22214
  }
22215
22215
  /**
22216
22216
  * Increment the number of files created or checked by this instance.
@@ -22220,14 +22220,14 @@ var require_file = __commonJS({
22220
22220
  * @returns {undefined}
22221
22221
  * @private
22222
22222
  */
22223
- _checkMaxFilesIncrementing(ext2, basename13, callback) {
22223
+ _checkMaxFilesIncrementing(ext2, basename9, callback) {
22224
22224
  if (!this.maxFiles || this._created < this.maxFiles) {
22225
22225
  return setImmediate(callback);
22226
22226
  }
22227
22227
  const oldest = this._created - this.maxFiles;
22228
22228
  const isOldest = oldest !== 0 ? oldest : "";
22229
22229
  const isZipped = this.zippedArchive ? ".gz" : "";
22230
- const filePath = `${basename13}${isOldest}${ext2}${isZipped}`;
22230
+ const filePath = `${basename9}${isOldest}${ext2}${isZipped}`;
22231
22231
  const target = path2.join(this.dirname, filePath);
22232
22232
  fs.unlink(target, callback);
22233
22233
  }
@@ -22242,7 +22242,7 @@ var require_file = __commonJS({
22242
22242
  * @returns {undefined}
22243
22243
  * @private
22244
22244
  */
22245
- _checkMaxFilesTailable(ext2, basename13, callback) {
22245
+ _checkMaxFilesTailable(ext2, basename9, callback) {
22246
22246
  const tasks = [];
22247
22247
  if (!this.maxFiles) {
22248
22248
  return;
@@ -22250,21 +22250,21 @@ var require_file = __commonJS({
22250
22250
  const isZipped = this.zippedArchive ? ".gz" : "";
22251
22251
  for (let x2 = this.maxFiles - 1; x2 > 1; x2--) {
22252
22252
  tasks.push(function(i7, cb) {
22253
- let fileName3 = `${basename13}${i7 - 1}${ext2}${isZipped}`;
22253
+ let fileName3 = `${basename9}${i7 - 1}${ext2}${isZipped}`;
22254
22254
  const tmppath = path2.join(this.dirname, fileName3);
22255
22255
  fs.exists(tmppath, (exists2) => {
22256
22256
  if (!exists2) {
22257
22257
  return cb(null);
22258
22258
  }
22259
- fileName3 = `${basename13}${i7}${ext2}${isZipped}`;
22259
+ fileName3 = `${basename9}${i7}${ext2}${isZipped}`;
22260
22260
  fs.rename(tmppath, path2.join(this.dirname, fileName3), cb);
22261
22261
  });
22262
22262
  }.bind(this, x2));
22263
22263
  }
22264
22264
  asyncSeries(tasks, () => {
22265
22265
  fs.rename(
22266
- path2.join(this.dirname, `${basename13}${ext2}${isZipped}`),
22267
- path2.join(this.dirname, `${basename13}1${ext2}${isZipped}`),
22266
+ path2.join(this.dirname, `${basename9}${ext2}${isZipped}`),
22267
+ path2.join(this.dirname, `${basename9}1${ext2}${isZipped}`),
22268
22268
  callback
22269
22269
  );
22270
22270
  });
@@ -25762,8 +25762,8 @@ var require_follow_redirects = __commonJS({
25762
25762
  }
25763
25763
  return parsed;
25764
25764
  }
25765
- function resolveUrl(relative18, base) {
25766
- return useNativeURL ? new URL3(relative18, base) : parseUrl(url2.resolve(base, relative18));
25765
+ function resolveUrl(relative20, base) {
25766
+ return useNativeURL ? new URL3(relative20, base) : parseUrl(url2.resolve(base, relative20));
25767
25767
  }
25768
25768
  function validateUrl(input) {
25769
25769
  if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) {
@@ -48514,9 +48514,9 @@ var require_picomatch = __commonJS({
48514
48514
  var utils = require_utils2();
48515
48515
  var constants3 = require_constants2();
48516
48516
  var isObject2 = (val2) => val2 && typeof val2 === "object" && !Array.isArray(val2);
48517
- var picomatch9 = (glob2, options, returnState = false) => {
48517
+ var picomatch11 = (glob2, options, returnState = false) => {
48518
48518
  if (Array.isArray(glob2)) {
48519
- const fns = glob2.map((input) => picomatch9(input, options, returnState));
48519
+ const fns = glob2.map((input) => picomatch11(input, options, returnState));
48520
48520
  const arrayMatcher = (str) => {
48521
48521
  for (const isMatch4 of fns) {
48522
48522
  const state2 = isMatch4(str);
@@ -48532,16 +48532,16 @@ var require_picomatch = __commonJS({
48532
48532
  }
48533
48533
  const opts = options || {};
48534
48534
  const posix3 = opts.windows;
48535
- const regex = isState ? picomatch9.compileRe(glob2, options) : picomatch9.makeRe(glob2, options, false, true);
48535
+ const regex = isState ? picomatch11.compileRe(glob2, options) : picomatch11.makeRe(glob2, options, false, true);
48536
48536
  const state = regex.state;
48537
48537
  delete regex.state;
48538
48538
  let isIgnored = () => false;
48539
48539
  if (opts.ignore) {
48540
48540
  const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null };
48541
- isIgnored = picomatch9(opts.ignore, ignoreOpts, returnState);
48541
+ isIgnored = picomatch11(opts.ignore, ignoreOpts, returnState);
48542
48542
  }
48543
48543
  const matcher = (input, returnObject = false) => {
48544
- const { isMatch: isMatch4, match: match2, output } = picomatch9.test(input, regex, options, { glob: glob2, posix: posix3 });
48544
+ const { isMatch: isMatch4, match: match2, output } = picomatch11.test(input, regex, options, { glob: glob2, posix: posix3 });
48545
48545
  const result = { glob: glob2, state, regex, posix: posix3, input, output, match: match2, isMatch: isMatch4 };
48546
48546
  if (typeof opts.onResult === "function") {
48547
48547
  opts.onResult(result);
@@ -48567,7 +48567,7 @@ var require_picomatch = __commonJS({
48567
48567
  }
48568
48568
  return matcher;
48569
48569
  };
48570
- picomatch9.test = (input, regex, options, { glob: glob2, posix: posix3 } = {}) => {
48570
+ picomatch11.test = (input, regex, options, { glob: glob2, posix: posix3 } = {}) => {
48571
48571
  if (typeof input !== "string") {
48572
48572
  throw new TypeError("Expected input to be a string");
48573
48573
  }
@@ -48584,24 +48584,24 @@ var require_picomatch = __commonJS({
48584
48584
  }
48585
48585
  if (match2 === false || opts.capture === true) {
48586
48586
  if (opts.matchBase === true || opts.basename === true) {
48587
- match2 = picomatch9.matchBase(input, regex, options, posix3);
48587
+ match2 = picomatch11.matchBase(input, regex, options, posix3);
48588
48588
  } else {
48589
48589
  match2 = regex.exec(output);
48590
48590
  }
48591
48591
  }
48592
48592
  return { isMatch: Boolean(match2), match: match2, output };
48593
48593
  };
48594
- picomatch9.matchBase = (input, glob2, options) => {
48595
- const regex = glob2 instanceof RegExp ? glob2 : picomatch9.makeRe(glob2, options);
48594
+ picomatch11.matchBase = (input, glob2, options) => {
48595
+ const regex = glob2 instanceof RegExp ? glob2 : picomatch11.makeRe(glob2, options);
48596
48596
  return regex.test(utils.basename(input));
48597
48597
  };
48598
- picomatch9.isMatch = (str, patterns, options) => picomatch9(patterns, options)(str);
48599
- picomatch9.parse = (pattern, options) => {
48600
- if (Array.isArray(pattern)) return pattern.map((p3) => picomatch9.parse(p3, options));
48598
+ picomatch11.isMatch = (str, patterns, options) => picomatch11(patterns, options)(str);
48599
+ picomatch11.parse = (pattern, options) => {
48600
+ if (Array.isArray(pattern)) return pattern.map((p3) => picomatch11.parse(p3, options));
48601
48601
  return parse14(pattern, { ...options, fastpaths: false });
48602
48602
  };
48603
- picomatch9.scan = (input, options) => scan(input, options);
48604
- picomatch9.compileRe = (state, options, returnOutput = false, returnState = false) => {
48603
+ picomatch11.scan = (input, options) => scan(input, options);
48604
+ picomatch11.compileRe = (state, options, returnOutput = false, returnState = false) => {
48605
48605
  if (returnOutput === true) {
48606
48606
  return state.output;
48607
48607
  }
@@ -48612,13 +48612,13 @@ var require_picomatch = __commonJS({
48612
48612
  if (state && state.negated === true) {
48613
48613
  source = `^(?!${source}).*$`;
48614
48614
  }
48615
- const regex = picomatch9.toRegex(source, options);
48615
+ const regex = picomatch11.toRegex(source, options);
48616
48616
  if (returnState === true) {
48617
48617
  regex.state = state;
48618
48618
  }
48619
48619
  return regex;
48620
48620
  };
48621
- picomatch9.makeRe = (input, options = {}, returnOutput = false, returnState = false) => {
48621
+ picomatch11.makeRe = (input, options = {}, returnOutput = false, returnState = false) => {
48622
48622
  if (!input || typeof input !== "string") {
48623
48623
  throw new TypeError("Expected a non-empty string");
48624
48624
  }
@@ -48629,9 +48629,9 @@ var require_picomatch = __commonJS({
48629
48629
  if (!parsed.output) {
48630
48630
  parsed = parse14(input, options);
48631
48631
  }
48632
- return picomatch9.compileRe(parsed, options, returnOutput, returnState);
48632
+ return picomatch11.compileRe(parsed, options, returnOutput, returnState);
48633
48633
  };
48634
- picomatch9.toRegex = (source, options) => {
48634
+ picomatch11.toRegex = (source, options) => {
48635
48635
  try {
48636
48636
  const opts = options || {};
48637
48637
  return new RegExp(source, opts.flags || (opts.nocase ? "i" : ""));
@@ -48640,8 +48640,8 @@ var require_picomatch = __commonJS({
48640
48640
  return /$^/;
48641
48641
  }
48642
48642
  };
48643
- picomatch9.constants = constants3;
48644
- module2.exports = picomatch9;
48643
+ picomatch11.constants = constants3;
48644
+ module2.exports = picomatch11;
48645
48645
  }
48646
48646
  });
48647
48647
 
@@ -48651,14 +48651,14 @@ var require_picomatch2 = __commonJS({
48651
48651
  "use strict";
48652
48652
  var pico = require_picomatch();
48653
48653
  var utils = require_utils2();
48654
- function picomatch9(glob2, options, returnState = false) {
48654
+ function picomatch11(glob2, options, returnState = false) {
48655
48655
  if (options && (options.windows === null || options.windows === void 0)) {
48656
48656
  options = { ...options, windows: utils.isWindows() };
48657
48657
  }
48658
48658
  return pico(glob2, options, returnState);
48659
48659
  }
48660
- Object.assign(picomatch9, pico);
48661
- module2.exports = picomatch9;
48660
+ Object.assign(picomatch11, pico);
48661
+ module2.exports = picomatch11;
48662
48662
  }
48663
48663
  });
48664
48664
 
@@ -52474,7 +52474,7 @@ var require_cjs2 = __commonJS({
52474
52474
  var require_lib12 = __commonJS({
52475
52475
  "../../node_modules/.pnpm/write-file-atomic@5.0.1/node_modules/write-file-atomic/lib/index.js"(exports2, module2) {
52476
52476
  "use strict";
52477
- module2.exports = writeFile14;
52477
+ module2.exports = writeFile16;
52478
52478
  module2.exports.sync = writeFileSync4;
52479
52479
  module2.exports._getTmpname = getTmpname;
52480
52480
  module2.exports._cleanupOnExit = cleanupOnExit;
@@ -52599,7 +52599,7 @@ var require_lib12 = __commonJS({
52599
52599
  }
52600
52600
  }
52601
52601
  }
52602
- async function writeFile14(filename, data2, options, callback) {
52602
+ async function writeFile16(filename, data2, options, callback) {
52603
52603
  if (options instanceof Function) {
52604
52604
  callback = options;
52605
52605
  options = {};
@@ -67565,16 +67565,16 @@ var require_lockfile = __commonJS({
67565
67565
  if (process.platform === "win32") {
67566
67566
  yield fsSymlink(src, dest, "junction");
67567
67567
  } else {
67568
- let relative18;
67568
+ let relative20;
67569
67569
  try {
67570
- relative18 = (_path || _load_path()).default.relative((_fs || _load_fs()).default.realpathSync((_path || _load_path()).default.dirname(dest)), (_fs || _load_fs()).default.realpathSync(src));
67570
+ relative20 = (_path || _load_path()).default.relative((_fs || _load_fs()).default.realpathSync((_path || _load_path()).default.dirname(dest)), (_fs || _load_fs()).default.realpathSync(src));
67571
67571
  } catch (err) {
67572
67572
  if (err.code !== "ENOENT") {
67573
67573
  throw err;
67574
67574
  }
67575
- relative18 = (_path || _load_path()).default.relative((_path || _load_path()).default.dirname(dest), src);
67575
+ relative20 = (_path || _load_path()).default.relative((_path || _load_path()).default.dirname(dest), src);
67576
67576
  }
67577
- yield fsSymlink(relative18 || ".", dest);
67577
+ yield fsSymlink(relative20 || ".", dest);
67578
67578
  }
67579
67579
  });
67580
67580
  return function symlink2(_x24, _x25) {
@@ -67601,17 +67601,17 @@ var require_lockfile = __commonJS({
67601
67601
  _ref28 = _i14.value;
67602
67602
  }
67603
67603
  const name = _ref28;
67604
- const relative18 = relativeDir ? (_path || _load_path()).default.join(relativeDir, name) : name;
67604
+ const relative20 = relativeDir ? (_path || _load_path()).default.join(relativeDir, name) : name;
67605
67605
  const loc = (_path || _load_path()).default.join(dir, name);
67606
67606
  const stat5 = yield lstat2(loc);
67607
67607
  files.push({
67608
- relative: relative18,
67608
+ relative: relative20,
67609
67609
  basename: name,
67610
67610
  absolute: loc,
67611
67611
  mtime: +stat5.mtime
67612
67612
  });
67613
67613
  if (stat5.isDirectory()) {
67614
- files = files.concat(yield walk(loc, relative18, ignoreBasenames));
67614
+ files = files.concat(yield walk(loc, relative20, ignoreBasenames));
67615
67615
  }
67616
67616
  }
67617
67617
  return files;
@@ -67656,7 +67656,7 @@ var require_lockfile = __commonJS({
67656
67656
  if (eol !== "\n") {
67657
67657
  data2 = data2.replace(/\n/g, eol);
67658
67658
  }
67659
- yield writeFile14(path2, data2);
67659
+ yield writeFile16(path2, data2);
67660
67660
  });
67661
67661
  return function writeFilePreservingEol2(_x30, _x31) {
67662
67662
  return _ref31.apply(this, arguments);
@@ -67668,7 +67668,7 @@ var require_lockfile = __commonJS({
67668
67668
  const file = (_path || _load_path()).default.join(dir, filename);
67669
67669
  const fileLink = (_path || _load_path()).default.join(dir, filename + "-link");
67670
67670
  try {
67671
- yield writeFile14(file, "test");
67671
+ yield writeFile16(file, "test");
67672
67672
  yield link(file, fileLink);
67673
67673
  } catch (err) {
67674
67674
  return false;
@@ -67818,7 +67818,7 @@ var require_lockfile = __commonJS({
67818
67818
  const lockQueue = exports3.lockQueue = new (_blockingQueue || _load_blockingQueue()).default("fs lock");
67819
67819
  const readFileBuffer = exports3.readFileBuffer = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.readFile);
67820
67820
  const open = exports3.open = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.open);
67821
- const writeFile14 = exports3.writeFile = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.writeFile);
67821
+ const writeFile16 = exports3.writeFile = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.writeFile);
67822
67822
  const readlink2 = exports3.readlink = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.readlink);
67823
67823
  const realpath2 = exports3.realpath = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.realpath);
67824
67824
  const readdir7 = exports3.readdir = (0, (_promise2 || _load_promise2()).promisify)((_fs || _load_fs()).default.readdir);
@@ -104191,9 +104191,9 @@ var require_picomatch3 = __commonJS({
104191
104191
  var utils = require_utils5();
104192
104192
  var constants3 = require_constants6();
104193
104193
  var isObject2 = (val2) => val2 && typeof val2 === "object" && !Array.isArray(val2);
104194
- var picomatch9 = (glob2, options, returnState = false) => {
104194
+ var picomatch11 = (glob2, options, returnState = false) => {
104195
104195
  if (Array.isArray(glob2)) {
104196
- const fns = glob2.map((input) => picomatch9(input, options, returnState));
104196
+ const fns = glob2.map((input) => picomatch11(input, options, returnState));
104197
104197
  const arrayMatcher = (str) => {
104198
104198
  for (const isMatch4 of fns) {
104199
104199
  const state2 = isMatch4(str);
@@ -104209,16 +104209,16 @@ var require_picomatch3 = __commonJS({
104209
104209
  }
104210
104210
  const opts = options || {};
104211
104211
  const posix3 = utils.isWindows(options);
104212
- const regex = isState ? picomatch9.compileRe(glob2, options) : picomatch9.makeRe(glob2, options, false, true);
104212
+ const regex = isState ? picomatch11.compileRe(glob2, options) : picomatch11.makeRe(glob2, options, false, true);
104213
104213
  const state = regex.state;
104214
104214
  delete regex.state;
104215
104215
  let isIgnored = () => false;
104216
104216
  if (opts.ignore) {
104217
104217
  const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null };
104218
- isIgnored = picomatch9(opts.ignore, ignoreOpts, returnState);
104218
+ isIgnored = picomatch11(opts.ignore, ignoreOpts, returnState);
104219
104219
  }
104220
104220
  const matcher = (input, returnObject = false) => {
104221
- const { isMatch: isMatch4, match: match2, output } = picomatch9.test(input, regex, options, { glob: glob2, posix: posix3 });
104221
+ const { isMatch: isMatch4, match: match2, output } = picomatch11.test(input, regex, options, { glob: glob2, posix: posix3 });
104222
104222
  const result = { glob: glob2, state, regex, posix: posix3, input, output, match: match2, isMatch: isMatch4 };
104223
104223
  if (typeof opts.onResult === "function") {
104224
104224
  opts.onResult(result);
@@ -104244,7 +104244,7 @@ var require_picomatch3 = __commonJS({
104244
104244
  }
104245
104245
  return matcher;
104246
104246
  };
104247
- picomatch9.test = (input, regex, options, { glob: glob2, posix: posix3 } = {}) => {
104247
+ picomatch11.test = (input, regex, options, { glob: glob2, posix: posix3 } = {}) => {
104248
104248
  if (typeof input !== "string") {
104249
104249
  throw new TypeError("Expected input to be a string");
104250
104250
  }
@@ -104261,24 +104261,24 @@ var require_picomatch3 = __commonJS({
104261
104261
  }
104262
104262
  if (match2 === false || opts.capture === true) {
104263
104263
  if (opts.matchBase === true || opts.basename === true) {
104264
- match2 = picomatch9.matchBase(input, regex, options, posix3);
104264
+ match2 = picomatch11.matchBase(input, regex, options, posix3);
104265
104265
  } else {
104266
104266
  match2 = regex.exec(output);
104267
104267
  }
104268
104268
  }
104269
104269
  return { isMatch: Boolean(match2), match: match2, output };
104270
104270
  };
104271
- picomatch9.matchBase = (input, glob2, options, posix3 = utils.isWindows(options)) => {
104272
- const regex = glob2 instanceof RegExp ? glob2 : picomatch9.makeRe(glob2, options);
104271
+ picomatch11.matchBase = (input, glob2, options, posix3 = utils.isWindows(options)) => {
104272
+ const regex = glob2 instanceof RegExp ? glob2 : picomatch11.makeRe(glob2, options);
104273
104273
  return regex.test(path2.basename(input));
104274
104274
  };
104275
- picomatch9.isMatch = (str, patterns, options) => picomatch9(patterns, options)(str);
104276
- picomatch9.parse = (pattern, options) => {
104277
- if (Array.isArray(pattern)) return pattern.map((p3) => picomatch9.parse(p3, options));
104275
+ picomatch11.isMatch = (str, patterns, options) => picomatch11(patterns, options)(str);
104276
+ picomatch11.parse = (pattern, options) => {
104277
+ if (Array.isArray(pattern)) return pattern.map((p3) => picomatch11.parse(p3, options));
104278
104278
  return parse14(pattern, { ...options, fastpaths: false });
104279
104279
  };
104280
- picomatch9.scan = (input, options) => scan(input, options);
104281
- picomatch9.compileRe = (state, options, returnOutput = false, returnState = false) => {
104280
+ picomatch11.scan = (input, options) => scan(input, options);
104281
+ picomatch11.compileRe = (state, options, returnOutput = false, returnState = false) => {
104282
104282
  if (returnOutput === true) {
104283
104283
  return state.output;
104284
104284
  }
@@ -104289,13 +104289,13 @@ var require_picomatch3 = __commonJS({
104289
104289
  if (state && state.negated === true) {
104290
104290
  source = `^(?!${source}).*$`;
104291
104291
  }
104292
- const regex = picomatch9.toRegex(source, options);
104292
+ const regex = picomatch11.toRegex(source, options);
104293
104293
  if (returnState === true) {
104294
104294
  regex.state = state;
104295
104295
  }
104296
104296
  return regex;
104297
104297
  };
104298
- picomatch9.makeRe = (input, options = {}, returnOutput = false, returnState = false) => {
104298
+ picomatch11.makeRe = (input, options = {}, returnOutput = false, returnState = false) => {
104299
104299
  if (!input || typeof input !== "string") {
104300
104300
  throw new TypeError("Expected a non-empty string");
104301
104301
  }
@@ -104306,9 +104306,9 @@ var require_picomatch3 = __commonJS({
104306
104306
  if (!parsed.output) {
104307
104307
  parsed = parse14(input, options);
104308
104308
  }
104309
- return picomatch9.compileRe(parsed, options, returnOutput, returnState);
104309
+ return picomatch11.compileRe(parsed, options, returnOutput, returnState);
104310
104310
  };
104311
- picomatch9.toRegex = (source, options) => {
104311
+ picomatch11.toRegex = (source, options) => {
104312
104312
  try {
104313
104313
  const opts = options || {};
104314
104314
  return new RegExp(source, opts.flags || (opts.nocase ? "i" : ""));
@@ -104317,8 +104317,8 @@ var require_picomatch3 = __commonJS({
104317
104317
  return /$^/;
104318
104318
  }
104319
104319
  };
104320
- picomatch9.constants = constants3;
104321
- module2.exports = picomatch9;
104320
+ picomatch11.constants = constants3;
104321
+ module2.exports = picomatch11;
104322
104322
  }
104323
104323
  });
104324
104324
 
@@ -104336,7 +104336,7 @@ var require_micromatch = __commonJS({
104336
104336
  "use strict";
104337
104337
  var util5 = __require("util");
104338
104338
  var braces = require_braces();
104339
- var picomatch9 = require_picomatch4();
104339
+ var picomatch11 = require_picomatch4();
104340
104340
  var utils = require_utils5();
104341
104341
  var isEmptyString = (val2) => val2 === "" || val2 === "./";
104342
104342
  var micromatch4 = (list, patterns, options) => {
@@ -104353,7 +104353,7 @@ var require_micromatch = __commonJS({
104353
104353
  }
104354
104354
  };
104355
104355
  for (let i7 = 0; i7 < patterns.length; i7++) {
104356
- let isMatch4 = picomatch9(String(patterns[i7]), { ...options, onResult }, true);
104356
+ let isMatch4 = picomatch11(String(patterns[i7]), { ...options, onResult }, true);
104357
104357
  let negated = isMatch4.state.negated || isMatch4.state.negatedExtglob;
104358
104358
  if (negated) negatives++;
104359
104359
  for (let item of list) {
@@ -104381,8 +104381,8 @@ var require_micromatch = __commonJS({
104381
104381
  return matches;
104382
104382
  };
104383
104383
  micromatch4.match = micromatch4;
104384
- micromatch4.matcher = (pattern, options) => picomatch9(pattern, options);
104385
- micromatch4.isMatch = (str, patterns, options) => picomatch9(patterns, options)(str);
104384
+ micromatch4.matcher = (pattern, options) => picomatch11(pattern, options);
104385
+ micromatch4.isMatch = (str, patterns, options) => picomatch11(patterns, options)(str);
104386
104386
  micromatch4.any = micromatch4.isMatch;
104387
104387
  micromatch4.not = (list, patterns, options = {}) => {
104388
104388
  patterns = [].concat(patterns).map(String);
@@ -104429,7 +104429,7 @@ var require_micromatch = __commonJS({
104429
104429
  micromatch4.some = (list, patterns, options) => {
104430
104430
  let items = [].concat(list);
104431
104431
  for (let pattern of [].concat(patterns)) {
104432
- let isMatch4 = picomatch9(String(pattern), options);
104432
+ let isMatch4 = picomatch11(String(pattern), options);
104433
104433
  if (items.some((item) => isMatch4(item))) {
104434
104434
  return true;
104435
104435
  }
@@ -104439,7 +104439,7 @@ var require_micromatch = __commonJS({
104439
104439
  micromatch4.every = (list, patterns, options) => {
104440
104440
  let items = [].concat(list);
104441
104441
  for (let pattern of [].concat(patterns)) {
104442
- let isMatch4 = picomatch9(String(pattern), options);
104442
+ let isMatch4 = picomatch11(String(pattern), options);
104443
104443
  if (!items.every((item) => isMatch4(item))) {
104444
104444
  return false;
104445
104445
  }
@@ -104450,23 +104450,23 @@ var require_micromatch = __commonJS({
104450
104450
  if (typeof str !== "string") {
104451
104451
  throw new TypeError(`Expected a string: "${util5.inspect(str)}"`);
104452
104452
  }
104453
- return [].concat(patterns).every((p3) => picomatch9(p3, options)(str));
104453
+ return [].concat(patterns).every((p3) => picomatch11(p3, options)(str));
104454
104454
  };
104455
104455
  micromatch4.capture = (glob2, input, options) => {
104456
104456
  let posix3 = utils.isWindows(options);
104457
- let regex = picomatch9.makeRe(String(glob2), { ...options, capture: true });
104457
+ let regex = picomatch11.makeRe(String(glob2), { ...options, capture: true });
104458
104458
  let match2 = regex.exec(posix3 ? utils.toPosixSlashes(input) : input);
104459
104459
  if (match2) {
104460
104460
  return match2.slice(1).map((v) => v === void 0 ? "" : v);
104461
104461
  }
104462
104462
  };
104463
- micromatch4.makeRe = (...args2) => picomatch9.makeRe(...args2);
104464
- micromatch4.scan = (...args2) => picomatch9.scan(...args2);
104463
+ micromatch4.makeRe = (...args2) => picomatch11.makeRe(...args2);
104464
+ micromatch4.scan = (...args2) => picomatch11.scan(...args2);
104465
104465
  micromatch4.parse = (patterns, options) => {
104466
104466
  let res = [];
104467
104467
  for (let pattern of [].concat(patterns || [])) {
104468
104468
  for (let str of braces(String(pattern), options)) {
104469
- res.push(picomatch9.parse(str, options));
104469
+ res.push(picomatch11.parse(str, options));
104470
104470
  }
104471
104471
  }
104472
104472
  return res;
@@ -107385,7 +107385,7 @@ var require_parseParams = __commonJS({
107385
107385
  var require_basename = __commonJS({
107386
107386
  "../../node_modules/.pnpm/@fastify+busboy@2.1.1/node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) {
107387
107387
  "use strict";
107388
- module2.exports = function basename13(path2) {
107388
+ module2.exports = function basename9(path2) {
107389
107389
  if (typeof path2 !== "string") {
107390
107390
  return "";
107391
107391
  }
@@ -107412,7 +107412,7 @@ var require_multipart = __commonJS({
107412
107412
  var Dicer = require_Dicer();
107413
107413
  var parseParams = require_parseParams();
107414
107414
  var decodeText = require_decodeText();
107415
- var basename13 = require_basename();
107415
+ var basename9 = require_basename();
107416
107416
  var getLimit2 = require_getLimit();
107417
107417
  var RE_BOUNDARY = /^boundary$/i;
107418
107418
  var RE_FIELD = /^form-data$/i;
@@ -107529,7 +107529,7 @@ var require_multipart = __commonJS({
107529
107529
  } else if (RE_FILENAME.test(parsed[i7][0])) {
107530
107530
  filename = parsed[i7][1];
107531
107531
  if (!preservePath) {
107532
- filename = basename13(filename);
107532
+ filename = basename9(filename);
107533
107533
  }
107534
107534
  }
107535
107535
  }
@@ -122915,7 +122915,7 @@ var require_summary = __commonJS({
122915
122915
  exports2.summary = exports2.markdownSummary = exports2.SUMMARY_DOCS_URL = exports2.SUMMARY_ENV_VAR = void 0;
122916
122916
  var os_1 = __require("os");
122917
122917
  var fs_1 = __require("fs");
122918
- var { access: access5, appendFile, writeFile: writeFile14 } = fs_1.promises;
122918
+ var { access: access5, appendFile, writeFile: writeFile16 } = fs_1.promises;
122919
122919
  exports2.SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY";
122920
122920
  exports2.SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary";
122921
122921
  var Summary = class {
@@ -122973,7 +122973,7 @@ var require_summary = __commonJS({
122973
122973
  return __awaiter(this, void 0, void 0, function* () {
122974
122974
  const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);
122975
122975
  const filePath = yield this.filePath();
122976
- const writeFunc = overwrite ? writeFile14 : appendFile;
122976
+ const writeFunc = overwrite ? writeFile16 : appendFile;
122977
122977
  yield writeFunc(filePath, this._buffer, { encoding: "utf8" });
122978
122978
  return this.emptyBuffer();
122979
122979
  });
@@ -126365,8 +126365,8 @@ var require_tmp = __commonJS({
126365
126365
  if (option === "name") {
126366
126366
  if (path2.isAbsolute(name))
126367
126367
  throw new Error(`${option} option must not contain an absolute path, found "${name}".`);
126368
- let basename13 = path2.basename(name);
126369
- if (basename13 === ".." || basename13 === "." || basename13 !== name)
126368
+ let basename9 = path2.basename(name);
126369
+ if (basename9 === ".." || basename9 === "." || basename9 !== name)
126370
126370
  throw new Error(`${option} option must not contain a path, found "${name}".`);
126371
126371
  } else {
126372
126372
  if (path2.isAbsolute(name) && !name.startsWith(tmpDir)) {
@@ -152234,7 +152234,7 @@ var require_micromatch2 = __commonJS({
152234
152234
  "use strict";
152235
152235
  var util5 = __require("util");
152236
152236
  var braces = require_braces2();
152237
- var picomatch9 = require_picomatch4();
152237
+ var picomatch11 = require_picomatch4();
152238
152238
  var utils = require_utils5();
152239
152239
  var isEmptyString = (v) => v === "" || v === "./";
152240
152240
  var hasBraces = (v) => {
@@ -152255,7 +152255,7 @@ var require_micromatch2 = __commonJS({
152255
152255
  }
152256
152256
  };
152257
152257
  for (let i7 = 0; i7 < patterns.length; i7++) {
152258
- let isMatch4 = picomatch9(String(patterns[i7]), { ...options, onResult }, true);
152258
+ let isMatch4 = picomatch11(String(patterns[i7]), { ...options, onResult }, true);
152259
152259
  let negated = isMatch4.state.negated || isMatch4.state.negatedExtglob;
152260
152260
  if (negated) negatives++;
152261
152261
  for (let item of list) {
@@ -152283,8 +152283,8 @@ var require_micromatch2 = __commonJS({
152283
152283
  return matches;
152284
152284
  };
152285
152285
  micromatch4.match = micromatch4;
152286
- micromatch4.matcher = (pattern, options) => picomatch9(pattern, options);
152287
- micromatch4.isMatch = (str, patterns, options) => picomatch9(patterns, options)(str);
152286
+ micromatch4.matcher = (pattern, options) => picomatch11(pattern, options);
152287
+ micromatch4.isMatch = (str, patterns, options) => picomatch11(patterns, options)(str);
152288
152288
  micromatch4.any = micromatch4.isMatch;
152289
152289
  micromatch4.not = (list, patterns, options = {}) => {
152290
152290
  patterns = [].concat(patterns).map(String);
@@ -152331,7 +152331,7 @@ var require_micromatch2 = __commonJS({
152331
152331
  micromatch4.some = (list, patterns, options) => {
152332
152332
  let items = [].concat(list);
152333
152333
  for (let pattern of [].concat(patterns)) {
152334
- let isMatch4 = picomatch9(String(pattern), options);
152334
+ let isMatch4 = picomatch11(String(pattern), options);
152335
152335
  if (items.some((item) => isMatch4(item))) {
152336
152336
  return true;
152337
152337
  }
@@ -152341,7 +152341,7 @@ var require_micromatch2 = __commonJS({
152341
152341
  micromatch4.every = (list, patterns, options) => {
152342
152342
  let items = [].concat(list);
152343
152343
  for (let pattern of [].concat(patterns)) {
152344
- let isMatch4 = picomatch9(String(pattern), options);
152344
+ let isMatch4 = picomatch11(String(pattern), options);
152345
152345
  if (!items.every((item) => isMatch4(item))) {
152346
152346
  return false;
152347
152347
  }
@@ -152352,23 +152352,23 @@ var require_micromatch2 = __commonJS({
152352
152352
  if (typeof str !== "string") {
152353
152353
  throw new TypeError(`Expected a string: "${util5.inspect(str)}"`);
152354
152354
  }
152355
- return [].concat(patterns).every((p3) => picomatch9(p3, options)(str));
152355
+ return [].concat(patterns).every((p3) => picomatch11(p3, options)(str));
152356
152356
  };
152357
152357
  micromatch4.capture = (glob2, input, options) => {
152358
152358
  let posix3 = utils.isWindows(options);
152359
- let regex = picomatch9.makeRe(String(glob2), { ...options, capture: true });
152359
+ let regex = picomatch11.makeRe(String(glob2), { ...options, capture: true });
152360
152360
  let match2 = regex.exec(posix3 ? utils.toPosixSlashes(input) : input);
152361
152361
  if (match2) {
152362
152362
  return match2.slice(1).map((v) => v === void 0 ? "" : v);
152363
152363
  }
152364
152364
  };
152365
- micromatch4.makeRe = (...args2) => picomatch9.makeRe(...args2);
152366
- micromatch4.scan = (...args2) => picomatch9.scan(...args2);
152365
+ micromatch4.makeRe = (...args2) => picomatch11.makeRe(...args2);
152366
+ micromatch4.scan = (...args2) => picomatch11.scan(...args2);
152367
152367
  micromatch4.parse = (patterns, options) => {
152368
152368
  let res = [];
152369
152369
  for (let pattern of [].concat(patterns || [])) {
152370
152370
  for (let str of braces(String(pattern), options)) {
152371
- res.push(picomatch9.parse(str, options));
152371
+ res.push(picomatch11.parse(str, options));
152372
152372
  }
152373
152373
  }
152374
152374
  return res;
@@ -201114,9 +201114,9 @@ var {
201114
201114
  } = import_index.default;
201115
201115
 
201116
201116
  // dist/index.js
201117
- import { mkdir as mkdir3, mkdtemp as mkdtemp2, readFile as readFile33, rm as rm2, writeFile as writeFile13 } from "fs/promises";
201117
+ import { mkdir as mkdir3, mkdtemp as mkdtemp2, readFile as readFile33, rm as rm2, writeFile as writeFile15 } from "fs/promises";
201118
201118
  import { tmpdir as tmpdir2 } from "os";
201119
- import { dirname as dirname19, join as join27, resolve as resolve41 } from "path";
201119
+ import { dirname as dirname18, join as join27, resolve as resolve41 } from "path";
201120
201120
 
201121
201121
  // ../../node_modules/.pnpm/remeda@2.14.0/node_modules/remeda/dist/chunk-ANXBDSUI.js
201122
201122
  var s = { done: false, hasNext: false };
@@ -211425,11 +211425,11 @@ var MavenSocketUpgradeManager = class {
211425
211425
  const pomFiles = /* @__PURE__ */ new Set();
211426
211426
  const gradleLockfiles = /* @__PURE__ */ new Set();
211427
211427
  const sbtManifestFiles = /* @__PURE__ */ new Set();
211428
- const pomMatcher = (0, import_picomatch.default)("{**/,}{*-*.,}pom{.xml,}");
211429
- const buildOutput = (0, import_picomatch.default)("{**/,}build/**");
211430
- const targetOutput = (0, import_picomatch.default)("{**/,}target/**");
211431
- const gradleLockfileMatcher = (0, import_picomatch.default)("{**/,}gradle.lockfile");
211432
- const sbtMatcher = (0, import_picomatch.default)("{**/,}{*.sbt,*.scala}");
211428
+ const pomMatcher = (0, import_picomatch.default)("{*-*.,}pom{.xml,}", { basename: true });
211429
+ const buildOutput = (0, import_picomatch.default)("build/**");
211430
+ const targetOutput = (0, import_picomatch.default)("target/**");
211431
+ const gradleLockfileMatcher = (0, import_picomatch.default)("gradle.lockfile", { basename: true });
211432
+ const sbtMatcher = (0, import_picomatch.default)(["*.sbt", "*.scala"], { basename: true });
211433
211433
  for (const manifestFile of ctxt.manifestFiles) {
211434
211434
  if (pomMatcher(manifestFile) && !buildOutput(manifestFile) && !targetOutput(manifestFile)) {
211435
211435
  pomFiles.add(resolve13(this.rootDir, manifestFile));
@@ -211778,7 +211778,7 @@ ${indent(1, indentationSize)}`)}
211778
211778
  };
211779
211779
 
211780
211780
  // ../fixing-management/src/fixing-management/npm/npm-socket-upgrade-manager.ts
211781
- import { basename as basename3, dirname as dirname6, relative as relative6, resolve as resolve17 } from "path";
211781
+ import { dirname as dirname6, relative as relative6, resolve as resolve17 } from "path";
211782
211782
 
211783
211783
  // ../utils/src/npm-utils.ts
211784
211784
  import { access, constants } from "fs/promises";
@@ -217321,10 +217321,10 @@ var Ignore = class {
217321
217321
  ignored(p3) {
217322
217322
  const fullpath = p3.fullpath();
217323
217323
  const fullpaths = `${fullpath}/`;
217324
- const relative18 = p3.relative() || ".";
217325
- const relatives = `${relative18}/`;
217324
+ const relative20 = p3.relative() || ".";
217325
+ const relatives = `${relative20}/`;
217326
217326
  for (const m4 of this.relative) {
217327
- if (m4.match(relative18) || m4.match(relatives))
217327
+ if (m4.match(relative20) || m4.match(relatives))
217328
217328
  return true;
217329
217329
  }
217330
217330
  for (const m4 of this.absolute) {
@@ -217335,9 +217335,9 @@ var Ignore = class {
217335
217335
  }
217336
217336
  childrenIgnored(p3) {
217337
217337
  const fullpath = p3.fullpath() + "/";
217338
- const relative18 = (p3.relative() || ".") + "/";
217338
+ const relative20 = (p3.relative() || ".") + "/";
217339
217339
  for (const m4 of this.relativeChildren) {
217340
- if (m4.match(relative18))
217340
+ if (m4.match(relative20))
217341
217341
  return true;
217342
217342
  }
217343
217343
  for (const m4 of this.absoluteChildren) {
@@ -219821,10 +219821,10 @@ var NpmSocketUpgradeManager = class {
219821
219821
  constructor(rootDir) {
219822
219822
  this.rootDir = rootDir;
219823
219823
  }
219824
- packageJsonMatcher = (0, import_picomatch2.default)("package.json");
219825
- packageLockMatcher = (0, import_picomatch2.default)("package-lock.json");
219826
- pnpmLockMatcher = (0, import_picomatch2.default)("{pnpm-lock.yaml,pnpm-lock.yml}");
219827
- yarnLockMatcher = (0, import_picomatch2.default)("yarn.lock");
219824
+ packageJsonMatcher = (0, import_picomatch2.default)("package.json", { basename: true });
219825
+ packageLockMatcher = (0, import_picomatch2.default)("package-lock.json", { basename: true });
219826
+ pnpmLockMatcher = (0, import_picomatch2.default)(["pnpm-lock.yaml", "pnpm-lock.yml"], { basename: true });
219827
+ yarnLockMatcher = (0, import_picomatch2.default)("yarn.lock", { basename: true });
219828
219828
  async applySocketArtifactUpgrades(ctxt) {
219829
219829
  const packageJsonToLockfiles = await this.buildPackageJsonToLockfilesMap(ctxt);
219830
219830
  const directPatches = [];
@@ -219834,11 +219834,12 @@ var NpmSocketUpgradeManager = class {
219834
219834
  assert7(artifact.name);
219835
219835
  assert7(artifact.version);
219836
219836
  const directPackageJsons = new Set(
219837
- artifact.manifestFiles?.filter((mf) => this.packageJsonMatcher(basename3(mf.file))).map((mf) => mf.file) ?? []
219837
+ artifact.manifestFiles?.filter((mf) => this.packageJsonMatcher(mf.file)).map((mf) => mf.file) ?? []
219838
219838
  );
219839
219839
  for (const mf of artifact.manifestFiles ?? []) {
219840
- const fileName3 = basename3(mf.file);
219840
+ const fileName3 = mf.file;
219841
219841
  if (this.packageJsonMatcher(fileName3)) {
219842
+ if (ctxt.wsFilter && !ctxt.wsFilter(dirname6(fileName3) || ".")) continue;
219842
219843
  const patches = await this.createDirectDependencyPatches(mf, idx, upgradeVersion, ctxt);
219843
219844
  directPatches.push(...patches);
219844
219845
  } else if (this.packageLockMatcher(fileName3) || this.pnpmLockMatcher(fileName3) || this.yarnLockMatcher(fileName3)) {
@@ -219857,13 +219858,14 @@ var NpmSocketUpgradeManager = class {
219857
219858
  const ancestor = ctxt.artifacts.find((a4) => a4.id === ancestorId);
219858
219859
  if (ancestor) {
219859
219860
  for (const mf of ancestor.manifestFiles ?? []) {
219860
- if (this.packageJsonMatcher(basename3(mf.file)) && !directPackageJsons.has(mf.file)) {
219861
+ if (this.packageJsonMatcher(mf.file) && !directPackageJsons.has(mf.file)) {
219861
219862
  transitivePackageJsons.add(mf.file);
219862
219863
  }
219863
219864
  }
219864
219865
  }
219865
219866
  }
219866
219867
  for (const packageJsonFile of transitivePackageJsons) {
219868
+ if (ctxt.wsFilter && !ctxt.wsFilter(dirname6(packageJsonFile) || ".")) continue;
219867
219869
  const patches = await this.createTransitiveDependencyPatches(packageJsonFile, idx, upgradeVersion, ctxt);
219868
219870
  transitivePatches.push(...patches);
219869
219871
  }
@@ -219893,7 +219895,16 @@ var NpmSocketUpgradeManager = class {
219893
219895
  );
219894
219896
  const lockfileToArtifacts = {};
219895
219897
  for (const { file, artifacts } of appliedTemporaryPatches) {
219896
- const lockfiles = packageJsonToLockfiles.get(file) ?? [];
219898
+ const lockfiles = packageJsonToLockfiles.get(file);
219899
+ if (!lockfiles) {
219900
+ ctxt.statusUpdater?.({
219901
+ status: "error",
219902
+ file,
219903
+ message: "No lockfile found for package.json",
219904
+ artifacts: i3(artifacts)
219905
+ });
219906
+ continue;
219907
+ }
219897
219908
  for (const lockfile2 of lockfiles) {
219898
219909
  if (!lockfileToArtifacts[lockfile2]) {
219899
219910
  lockfileToArtifacts[lockfile2] = [];
@@ -219908,14 +219919,14 @@ var NpmSocketUpgradeManager = class {
219908
219919
  ctxt.statusUpdater?.({
219909
219920
  status: "success",
219910
219921
  file: relative6(this.rootDir, path2),
219911
- message: "Restored package.json",
219922
+ message: "File restored",
219912
219923
  artifacts: i3(artifacts)
219913
219924
  });
219914
219925
  } catch (e) {
219915
219926
  ctxt.statusUpdater?.({
219916
219927
  status: "error",
219917
219928
  file: relative6(this.rootDir, path2),
219918
- message: "Could not restore package.json",
219929
+ message: "Could not restore file",
219919
219930
  artifacts: i3(artifacts)
219920
219931
  });
219921
219932
  }
@@ -220015,15 +220026,15 @@ var NpmSocketUpgradeManager = class {
220015
220026
  const lockfileDir = dirname6(resolve17(this.rootDir, lockfile2));
220016
220027
  const oldFileContent = await readFile14(resolve17(this.rootDir, lockfile2), "utf-8");
220017
220028
  let error;
220018
- if (this.packageLockMatcher(basename3(lockfile2))) {
220029
+ if (this.packageLockMatcher(lockfile2)) {
220019
220030
  const command = mode === "LOCKFILE_ONLY" ? cmdt`npm install --package-lock-only` : cmdt`npm install -f --ignore-scripts --no-fund --no-audit --no-progress`;
220020
220031
  const result = await execNeverFail(command, lockfileDir);
220021
220032
  error = result.error;
220022
- } else if (this.pnpmLockMatcher(basename3(lockfile2))) {
220033
+ } else if (this.pnpmLockMatcher(lockfile2)) {
220023
220034
  const command = mode === "LOCKFILE_ONLY" ? cmdt`pnpm install --lockfile-only` : cmdt`pnpm install --ignore-scripts --fix-lockfile --config.confirmModulesPurge=false`;
220024
220035
  const result = await execNeverFail(command, lockfileDir);
220025
220036
  error = result.error;
220026
- } else if (this.yarnLockMatcher(basename3(lockfile2))) {
220037
+ } else if (this.yarnLockMatcher(lockfile2)) {
220027
220038
  const yarnType = await getYarnType(lockfileDir);
220028
220039
  if (yarnType === "classic") {
220029
220040
  const command = mode === "LOCKFILE_ONLY" ? cmdt`yarn install --mode update-lockfile` : cmdt`yarn install --ignore-scripts --noninteractive`;
@@ -220066,7 +220077,7 @@ var NpmSocketUpgradeManager = class {
220066
220077
  status: "error",
220067
220078
  file: lockfile2,
220068
220079
  artifacts: i3(artifacts),
220069
- message: `Failed to update lockfile: ${error.message}`
220080
+ message: `Failed to update lockfile: ${error.message ?? "Unknown error"}`
220070
220081
  });
220071
220082
  }
220072
220083
  });
@@ -220078,11 +220089,11 @@ var NpmSocketUpgradeManager = class {
220078
220089
  async buildPackageJsonToLockfilesMap(ctxt) {
220079
220090
  const packageJsonToLockfiles = /* @__PURE__ */ new Map();
220080
220091
  const lockFiles = ctxt.manifestFiles.filter(
220081
- (file) => this.packageLockMatcher(basename3(file)) || this.pnpmLockMatcher(basename3(file)) || this.yarnLockMatcher(basename3(file))
220092
+ (file) => this.packageLockMatcher(file) || this.pnpmLockMatcher(file) || this.yarnLockMatcher(file)
220082
220093
  );
220083
220094
  for (const lockFile of lockFiles) {
220084
220095
  const lockFileDir = resolve17(this.rootDir, dirname6(lockFile));
220085
- const isPnpmLockFile = this.pnpmLockMatcher(basename3(lockFile));
220096
+ const isPnpmLockFile = this.pnpmLockMatcher(lockFile);
220086
220097
  const workspaces = isPnpmLockFile ? await getWorkspacePathsFromPnpmLockFile(lockFileDir, true) : await getWorkspacePathsFromPackageJSON(lockFileDir, true);
220087
220098
  for (const workspace of workspaces) {
220088
220099
  const packageJson = relative6(this.rootDir, resolve17(lockFileDir, workspace, "package.json"));
@@ -220375,7 +220386,7 @@ import { readFile as readFile17, writeFile as writeFile6 } from "fs/promises";
220375
220386
  import { resolve as resolve21 } from "path";
220376
220387
 
220377
220388
  // ../utils/src/package-utils.ts
220378
- import { parse as parse4, join as join7, resolve as resolve20, normalize as normalize3, dirname as dirname7, basename as basename4, relative as relative7 } from "path";
220389
+ import { parse as parse4, join as join7, resolve as resolve20, normalize as normalize3, dirname as dirname7, basename as basename3, relative as relative7 } from "path";
220379
220390
  import { existsSync as existsSync10, readFileSync as readFileSync2, readdirSync as readdirSync3, statSync as statSync2, writeFileSync as writeFileSync2 } from "fs";
220380
220391
  function setFieldInPackageJson(workspaceRoot, field, value2) {
220381
220392
  const packageJSONContentObj = getPackageJsonObject2(workspaceRoot);
@@ -220885,12 +220896,12 @@ var CargoFixingManager = class {
220885
220896
 
220886
220897
  // ../fixing-management/src/fixing-management/nuget/nuget-socket-upgrade-manager.ts
220887
220898
  var import_picomatch3 = __toESM(require_picomatch2(), 1);
220888
- import { basename as basename6, resolve as resolve23 } from "node:path";
220899
+ import { resolve as resolve23 } from "node:path";
220889
220900
 
220890
220901
  // ../fixing-management/src/fixing-management/nuget/nuget-project-utils.ts
220891
220902
  var import_parse_xml3 = __toESM(require_dist(), 1);
220892
220903
  import { readFile as readFile19 } from "node:fs/promises";
220893
- import { dirname as dirname8, join as join10, relative as relative8, resolve as resolve22, basename as basename5, extname } from "node:path";
220904
+ import { dirname as dirname8, join as join10, relative as relative8, resolve as resolve22, basename as basename4, extname } from "node:path";
220894
220905
  import { existsSync as existsSync11 } from "node:fs";
220895
220906
  function normalizeMSBuildPath(path2) {
220896
220907
  return path2.replace(/\\/g, "/");
@@ -221042,11 +221053,11 @@ function addReservedMSBuildProperties(currentProject, mainProject) {
221042
221053
  normalizeMSBuildPath(dirname8(mainProject.validatedProjectPath)).replace(/^[A-Za-z]:/, "")
221043
221054
  ],
221044
221055
  ["MSBuildProjectExtension", extname(mainProject.validatedProjectPath)],
221045
- ["MSBuildProjectFile", basename5(mainProject.validatedProjectPath)],
221056
+ ["MSBuildProjectFile", basename4(mainProject.validatedProjectPath)],
221046
221057
  ["MSBuildProjectFullPath", normalizeMSBuildPath(mainProject.validatedProjectPath)],
221047
- ["MSBuildProjectName", basename5(mainProject.validatedProjectPath, extname(mainProject.validatedProjectPath))],
221058
+ ["MSBuildProjectName", basename4(mainProject.validatedProjectPath, extname(mainProject.validatedProjectPath))],
221048
221059
  // MSBuildThisFile* properties - reference the current project being processed
221049
- ["MSBuildThisFile", basename5(currentProject.validatedProjectPath)],
221060
+ ["MSBuildThisFile", basename4(currentProject.validatedProjectPath)],
221050
221061
  ["MSBuildThisFileDirectory", normalizeMSBuildPath(dirname8(currentProject.validatedProjectPath)) + "/"],
221051
221062
  [
221052
221063
  "MSBuildThisFileDirectoryNoRoot",
@@ -221056,7 +221067,7 @@ function addReservedMSBuildProperties(currentProject, mainProject) {
221056
221067
  ["MSBuildThisFileFullPath", normalizeMSBuildPath(currentProject.validatedProjectPath)],
221057
221068
  [
221058
221069
  "MSBuildThisFileName",
221059
- basename5(currentProject.validatedProjectPath, extname(currentProject.validatedProjectPath))
221070
+ basename4(currentProject.validatedProjectPath, extname(currentProject.validatedProjectPath))
221060
221071
  ]
221061
221072
  ];
221062
221073
  for (const [propertyName, propertyValue] of reservedProperties) {
@@ -221302,16 +221313,27 @@ var NuGetSocketUpgradeManager = class {
221302
221313
  this.rootDir = rootDir;
221303
221314
  }
221304
221315
  // Individual matchers for specific handling
221305
- projectMatcher = (0, import_picomatch3.default)("{*.*proj,*.props,*.targets}");
221306
- packagesConfigMatcher = (0, import_picomatch3.default)("{packages.config,packages.*.config}");
221307
- lockFileMatcher = (0, import_picomatch3.default)("packages.lock.json");
221316
+ projectMatcher = (0, import_picomatch3.default)(["*.*proj", "*.props", "*.targets"], { basename: true });
221317
+ packagesConfigMatcher = (0, import_picomatch3.default)(["packages.config", "packages.*.config"], { basename: true });
221318
+ lockFileMatcher = (0, import_picomatch3.default)("packages.lock.json", { basename: true });
221308
221319
  combinedMatcher = (0, import_picomatch3.default)(
221309
- "{*.sln,*.*proj,*.targets,*.props,*.projitems,*.nuspec,packages.config,packages.*.config,packages.lock.json}"
221320
+ [
221321
+ "*.sln",
221322
+ "*.*proj",
221323
+ "*.targets",
221324
+ "*.props",
221325
+ "*.projitems",
221326
+ "*.nuspec",
221327
+ "packages.config",
221328
+ "packages.*.config",
221329
+ "packages.lock.json"
221330
+ ],
221331
+ { basename: true }
221310
221332
  );
221311
221333
  truthyValues = ["true", "yes", "on"];
221312
221334
  async applySocketArtifactUpgrades(ctxt) {
221313
221335
  const caseInsensitiveManifestFileMap = new TransformMap((key) => key.toLowerCase());
221314
- for (const file of ctxt.manifestFiles.filter((file2) => this.combinedMatcher(basename6(file2)))) {
221336
+ for (const file of ctxt.manifestFiles.filter((file2) => this.combinedMatcher(file2))) {
221315
221337
  const fullPath = resolve23(this.rootDir, file);
221316
221338
  caseInsensitiveManifestFileMap.set(fullPath, fullPath);
221317
221339
  }
@@ -221372,7 +221394,7 @@ var NuGetSocketUpgradeManager = class {
221372
221394
  )
221373
221395
  );
221374
221396
  for (const manifestFile of manifestFilesForArtifact) {
221375
- if (this.projectMatcher(basename6(manifestFile))) {
221397
+ if (this.projectMatcher(manifestFile)) {
221376
221398
  const project = await loadNuGetProject(this.rootDir, manifestFile, validateFile);
221377
221399
  if (project) {
221378
221400
  patches.push(...await this.handleProject(project, idx, upgradeVersion, ctxt));
@@ -221384,7 +221406,7 @@ var NuGetSocketUpgradeManager = class {
221384
221406
  message: `Manifest file not valid for updates`
221385
221407
  });
221386
221408
  }
221387
- } else if (this.packagesConfigMatcher(basename6(manifestFile))) {
221409
+ } else if (this.packagesConfigMatcher(manifestFile)) {
221388
221410
  const packagesConfig = await loadPackagesConfig(this.rootDir, manifestFile, validateFile);
221389
221411
  if (packagesConfig) {
221390
221412
  patches.push(...await this.handlePackagesConfig(packagesConfig, idx, upgradeVersion, ctxt));
@@ -221396,7 +221418,7 @@ var NuGetSocketUpgradeManager = class {
221396
221418
  message: `Manifest file not valid for updates: ${manifestFile}`
221397
221419
  });
221398
221420
  }
221399
- } else if (this.lockFileMatcher(basename6(manifestFile))) {
221421
+ } else if (this.lockFileMatcher(manifestFile)) {
221400
221422
  ctxt.statusUpdater?.({
221401
221423
  status: "error",
221402
221424
  file: manifestFile,
@@ -221586,7 +221608,7 @@ var NuGetSocketUpgradeManager = class {
221586
221608
  return patches;
221587
221609
  }
221588
221610
  async collectFallbackPatches(manifestFile, oldIdx, newIdx, upgradeVersion, recomputedArtifacts, ctxt, validateFile) {
221589
- if (this.projectMatcher(basename6(manifestFile))) {
221611
+ if (this.projectMatcher(manifestFile)) {
221590
221612
  const project = await loadNuGetProject(this.rootDir, manifestFile, validateFile);
221591
221613
  if (project) {
221592
221614
  return await this.handleProjectFallback(project, oldIdx, newIdx, upgradeVersion, recomputedArtifacts, ctxt);
@@ -221598,7 +221620,7 @@ var NuGetSocketUpgradeManager = class {
221598
221620
  message: `Manifest file not valid for updates`
221599
221621
  });
221600
221622
  }
221601
- } else if (this.packagesConfigMatcher(basename6(manifestFile))) {
221623
+ } else if (this.packagesConfigMatcher(manifestFile)) {
221602
221624
  const packagesConfig = await loadPackagesConfig(this.rootDir, manifestFile, validateFile);
221603
221625
  if (packagesConfig) {
221604
221626
  return await this.handlePackagesConfigFallback(
@@ -221609,7 +221631,7 @@ var NuGetSocketUpgradeManager = class {
221609
221631
  recomputedArtifacts,
221610
221632
  ctxt
221611
221633
  );
221612
- } else if (this.lockFileMatcher(basename6(manifestFile))) {
221634
+ } else if (this.lockFileMatcher(manifestFile)) {
221613
221635
  } else {
221614
221636
  ctxt.statusUpdater?.({
221615
221637
  status: "error",
@@ -221689,15 +221711,16 @@ var NuGetSocketUpgradeManager = class {
221689
221711
  };
221690
221712
 
221691
221713
  // ../fixing-management/src/fixing-management/rust/cargo-socket-upgrade-manager.ts
221692
- import { basename as basename7, dirname as dirname10, resolve as resolve25 } from "node:path";
221693
- var import_picomatch4 = __toESM(require_picomatch2(), 1);
221714
+ import { dirname as dirname10, relative as relative9, resolve as resolve25 } from "node:path";
221715
+ var import_picomatch5 = __toESM(require_picomatch2(), 1);
221694
221716
  var import_semver3 = __toESM(require_semver2(), 1);
221695
221717
  import assert9 from "node:assert";
221696
- import { readFile as readFile21 } from "node:fs/promises";
221718
+ import { readFile as readFile21, writeFile as writeFile8 } from "node:fs/promises";
221697
221719
 
221698
221720
  // ../utils/src/cargo-utils.ts
221699
221721
  import { readFile as readFile20 } from "node:fs/promises";
221700
221722
  import { dirname as dirname9, resolve as resolve24 } from "node:path";
221723
+ var import_picomatch4 = __toESM(require_picomatch2(), 1);
221701
221724
  async function getCargoTomlFilesForCargoLockFile(rootDir, cargoLockFile, cargoTomlFiles) {
221702
221725
  const lockDir = dirname9(cargoLockFile);
221703
221726
  const rootTomlFile = cargoTomlFiles.find((file) => dirname9(file) === lockDir);
@@ -221722,11 +221745,10 @@ async function getCargoTomlFilesForCargoLockFile(rootDir, cargoLockFile, cargoTo
221722
221745
  if (memberPatterns.length === 0) {
221723
221746
  return { rootTomlFile };
221724
221747
  }
221725
- const workspaceMemberDirs = new Set(await glob(memberPatterns, { ignore: excludePatterns, cwd: rootDir }));
221726
- const memberTomlFiles = cargoTomlFiles.filter((file) => {
221727
- const relativeDir = dirname9(file);
221728
- return workspaceMemberDirs.has(relativeDir);
221748
+ const matcher = (0, import_picomatch4.default)(memberPatterns, {
221749
+ ignore: excludePatterns
221729
221750
  });
221751
+ const memberTomlFiles = cargoTomlFiles.filter((file) => matcher(dirname9(file) || "."));
221730
221752
  return { rootTomlFile, memberTomlFiles };
221731
221753
  }
221732
221754
 
@@ -221735,96 +221757,139 @@ var CargoSocketUpgradeManager = class {
221735
221757
  constructor(rootDir) {
221736
221758
  this.rootDir = rootDir;
221737
221759
  }
221738
- cargoTomlMatcher = (0, import_picomatch4.default)("Cargo.toml");
221739
- cargoLockMatcher = (0, import_picomatch4.default)("Cargo.lock");
221760
+ cargoTomlMatcher = (0, import_picomatch5.default)("Cargo.toml", { basename: true });
221761
+ cargoLockMatcher = (0, import_picomatch5.default)("Cargo.lock", { basename: true });
221740
221762
  async applySocketArtifactUpgrades(ctxt) {
221741
- const tomlFiles = ctxt.manifestFiles.filter((mf) => this.cargoTomlMatcher(basename7(mf)));
221742
- const lockFiles = ctxt.manifestFiles.filter((mf) => this.cargoLockMatcher(basename7(mf)));
221763
+ const tomlFiles = ctxt.manifestFiles.filter((mf) => this.cargoTomlMatcher(mf));
221743
221764
  const tomlToLock = /* @__PURE__ */ new Map();
221744
- await asyncForEach(lockFiles, async (lockFile) => {
221745
- const rootAndMemberTomlFiles = await getCargoTomlFilesForCargoLockFile(this.rootDir, lockFile, tomlFiles);
221746
- if (!rootAndMemberTomlFiles) return;
221747
- const { rootTomlFile, memberTomlFiles } = rootAndMemberTomlFiles;
221748
- tomlToLock.set(rootTomlFile, lockFile);
221749
- for (const tomlFile of memberTomlFiles ?? []) {
221750
- tomlToLock.set(tomlFile, lockFile);
221765
+ await asyncForEach(
221766
+ ctxt.manifestFiles.filter((mf) => this.cargoLockMatcher(mf)),
221767
+ async (lockFile) => {
221768
+ const rootAndMemberTomlFiles = await getCargoTomlFilesForCargoLockFile(this.rootDir, lockFile, tomlFiles);
221769
+ if (!rootAndMemberTomlFiles) return;
221770
+ const { rootTomlFile, memberTomlFiles } = rootAndMemberTomlFiles;
221771
+ tomlToLock.set(rootTomlFile, lockFile);
221772
+ for (const tomlFile of memberTomlFiles ?? []) {
221773
+ tomlToLock.set(tomlFile, lockFile);
221774
+ }
221751
221775
  }
221752
- });
221753
- const lockfileToArtifacts = {};
221754
- const patches = await asyncFlatMap(Array.from(ctxt.upgrades), async ([idx, upgradeVersion]) => {
221776
+ );
221777
+ const directPatches = [];
221778
+ const transitivePatches = [];
221779
+ for (const [idx, upgradeVersion] of ctxt.upgrades) {
221755
221780
  const artifact = ctxt.artifacts[idx];
221756
221781
  assert9(artifact.name);
221757
221782
  assert9(artifact.version);
221758
221783
  const directCargoTomls = new Set(
221759
- artifact.manifestFiles?.map((ref) => ref.file).filter((manifestFile) => this.cargoTomlMatcher(basename7(manifestFile))) ?? []
221760
- );
221761
- const directPatches = await asyncFlatMap(
221762
- Array.from(ctxt.upgrades),
221763
- async ([idx2, upgradeVersion2]) => await asyncFlatMap(Array.from(directCargoTomls), async (cargoToml) => {
221764
- const patches2 = await this.handleCargoToml(
221765
- cargoToml,
221766
- idx2,
221767
- upgradeVersion2,
221768
- ctxt
221769
- );
221770
- const cargoLock = tomlToLock.get(cargoToml);
221771
- if (cargoLock !== void 0) {
221772
- (lockfileToArtifacts[cargoLock] ??= []).push(idx2);
221773
- patches2.push(...await this.handleCargoLock(cargoLock, idx2, upgradeVersion2, ctxt));
221774
- }
221775
- return patches2;
221776
- })
221777
- );
221778
- const ancestorCargoTomls = i3(
221779
- artifact.toplevelAncestors?.flatMap(
221780
- (ancestorId) => ctxt.artifacts.find((a4) => a4.id === ancestorId)?.manifestFiles?.map((m4) => m4.file).filter((manifestFile) => this.cargoTomlMatcher(basename7(manifestFile))).filter((cargoToml) => !directCargoTomls.has(cargoToml)) ?? []
221781
- ) ?? []
221784
+ artifact.manifestFiles?.filter((mf) => this.cargoTomlMatcher(mf.file)).map((mf) => mf.file) ?? []
221782
221785
  );
221783
- const transitivePatches = await asyncFlatMap(ancestorCargoTomls, async (cargoToml) => {
221784
- const cargoLock = tomlToLock.get(cargoToml);
221785
- if (cargoLock !== void 0) {
221786
- (lockfileToArtifacts[cargoLock] ??= []).push(idx);
221787
- return await this.handleCargoLock(cargoLock, idx, upgradeVersion, ctxt);
221786
+ for (const mf of artifact.manifestFiles ?? []) {
221787
+ const fileName3 = mf.file;
221788
+ if (this.cargoTomlMatcher(fileName3)) {
221789
+ if (ctxt.wsFilter && !ctxt.wsFilter(dirname10(fileName3) || ".")) continue;
221790
+ const patches = await this.createDirectDependencyPatches(mf, idx, upgradeVersion, ctxt);
221791
+ directPatches.push(...patches);
221792
+ } else if (this.cargoLockMatcher(fileName3)) {
221793
+ continue;
221788
221794
  } else {
221789
- return await this.handleCargoTomlAddDependency(cargoToml, idx, upgradeVersion, ctxt);
221795
+ ctxt.statusUpdater?.({
221796
+ status: "error",
221797
+ file: mf.file,
221798
+ artifacts: [idx],
221799
+ message: "Unhandled manifest file"
221800
+ });
221790
221801
  }
221791
- });
221792
- return directPatches.concat(transitivePatches);
221793
- });
221794
- await applyPatches("RUST", this.rootDir, patches, ctxt);
221795
- await asyncForEach(Object.entries(lockfileToArtifacts), async ([file, artifacts]) => {
221796
- const oldFileContent = await readFile21(resolve25(this.rootDir, file), "utf-8");
221797
- const { error } = await execNeverFail(cmdt`cargo fetch`, dirname10(resolve25(this.rootDir, file)));
221798
- const updatedFileContent = await readFile21(resolve25(this.rootDir, file), "utf-8");
221799
- if (!error) {
221802
+ }
221803
+ const transitiveCargoTomls = /* @__PURE__ */ new Set();
221804
+ for (const ancestorId of artifact.toplevelAncestors ?? []) {
221805
+ const ancestor = ctxt.artifacts.find((a4) => a4.id === ancestorId);
221806
+ if (ancestor) {
221807
+ for (const mf of ancestor.manifestFiles ?? []) {
221808
+ if (this.cargoTomlMatcher(mf.file) && !directCargoTomls.has(mf.file)) {
221809
+ transitiveCargoTomls.add(mf.file);
221810
+ }
221811
+ }
221812
+ }
221813
+ }
221814
+ for (const cargoToml of transitiveCargoTomls) {
221815
+ if (ctxt.wsFilter && !ctxt.wsFilter(dirname10(cargoToml) || ".")) continue;
221816
+ const patches = await this.createTransitiveDependencyPatches(cargoToml, idx, upgradeVersion, ctxt);
221817
+ transitivePatches.push(...patches);
221818
+ }
221819
+ }
221820
+ const restoreMap = /* @__PURE__ */ new Map();
221821
+ for (const { file, artifacts } of directPatches.concat(transitivePatches)) {
221822
+ const path2 = resolve25(this.rootDir, file);
221823
+ if (!restoreMap.has(path2)) {
221824
+ restoreMap.set(path2, {
221825
+ content: await readFile21(path2, "utf-8"),
221826
+ artifacts: []
221827
+ });
221828
+ }
221829
+ const existing = restoreMap.get(path2);
221830
+ for (const idx of artifacts) {
221831
+ if (!existing.artifacts.includes(idx)) {
221832
+ existing.artifacts.push(idx);
221833
+ }
221834
+ }
221835
+ }
221836
+ const appliedTemporaryPatches = await applyPatches(
221837
+ "RUST",
221838
+ this.rootDir,
221839
+ directPatches.concat(transitivePatches),
221840
+ ctxt,
221841
+ "Temporary patch applied"
221842
+ );
221843
+ const lockfileToArtifacts = {};
221844
+ for (const { file, artifacts } of appliedTemporaryPatches) {
221845
+ const lockfile2 = tomlToLock.get(file);
221846
+ if (!lockfile2) {
221800
221847
  ctxt.statusUpdater?.({
221801
- status: "success",
221848
+ status: "error",
221802
221849
  file,
221803
- message: "Lockfile updated",
221804
- patch: createPatch(file, oldFileContent, updatedFileContent, void 0, void 0, { context: 3 }),
221805
- artifacts
221850
+ message: "No lockfile found for Cargo.toml",
221851
+ artifacts: i3(artifacts)
221806
221852
  });
221807
- } else {
221853
+ continue;
221854
+ }
221855
+ if (!lockfileToArtifacts[lockfile2]) {
221856
+ lockfileToArtifacts[lockfile2] = [];
221857
+ }
221858
+ lockfileToArtifacts[lockfile2].push(...artifacts);
221859
+ }
221860
+ await this.refreshLockfiles(lockfileToArtifacts, ctxt, "LOCKFILE_ONLY");
221861
+ await asyncForEach(Array.from(restoreMap), async ([path2, { content, artifacts }]) => {
221862
+ try {
221863
+ await writeFile8(path2, content);
221864
+ ctxt.statusUpdater?.({
221865
+ status: "success",
221866
+ file: relative9(this.rootDir, path2),
221867
+ message: "File restored",
221868
+ artifacts: i3(artifacts)
221869
+ });
221870
+ } catch (e) {
221808
221871
  ctxt.statusUpdater?.({
221809
221872
  status: "error",
221810
- file,
221811
- artifacts: i3(artifacts),
221812
- message: `Failed to update lockfile -- error during 'cargo fetch': ${error.message}`
221873
+ file: relative9(this.rootDir, path2),
221874
+ message: "Could not restore file",
221875
+ artifacts: i3(artifacts)
221813
221876
  });
221814
221877
  }
221815
221878
  });
221879
+ await applyPatches("RUST", this.rootDir, directPatches, ctxt);
221880
+ await this.refreshLockfiles(lockfileToArtifacts, ctxt, "FULL_INSTALL");
221816
221881
  }
221817
221882
  /**
221818
221883
  * Handle Cargo.toml file updates for direct dependencies
221819
221884
  */
221820
- async handleCargoToml(tomlFile, idx, upgradeVersion, ctxt) {
221821
- const fullPath = resolve25(this.rootDir, tomlFile);
221885
+ async createDirectDependencyPatches(mf, idx, upgradeVersion, ctxt) {
221886
+ const fullPath = resolve25(this.rootDir, mf.file);
221822
221887
  const content = await readFile21(fullPath, "utf-8");
221823
221888
  const toml = parseTOML2(content);
221824
221889
  if (!toml) {
221825
221890
  ctxt.statusUpdater?.({
221826
221891
  status: "error",
221827
- file: tomlFile,
221892
+ file: mf.file,
221828
221893
  artifacts: [idx],
221829
221894
  message: `Failed to parse TOML file`
221830
221895
  });
@@ -221844,7 +221909,7 @@ var CargoSocketUpgradeManager = class {
221844
221909
  if (key === artifact.name && this.versionMatches(versionStr, artifact.version)) {
221845
221910
  const newVersionString = this.createNewVersionString(versionStr, upgradeVersion, ctxt.rangeStyle);
221846
221911
  patches.push({
221847
- file: tomlFile,
221912
+ file: mf.file,
221848
221913
  offset: dep[range][0] + 1,
221849
221914
  // Skip opening quote
221850
221915
  length: versionStr.length,
@@ -221862,7 +221927,7 @@ var CargoSocketUpgradeManager = class {
221862
221927
  if (this.versionMatches(currentVersion, artifact.version)) {
221863
221928
  const newVersionString = this.createNewVersionString(currentVersion, upgradeVersion, ctxt.rangeStyle);
221864
221929
  patches.push({
221865
- file: tomlFile,
221930
+ file: mf.file,
221866
221931
  offset: versionValue[range][0] + 1,
221867
221932
  // Skip opening quote
221868
221933
  length: currentVersion.length,
@@ -221877,77 +221942,7 @@ var CargoSocketUpgradeManager = class {
221877
221942
  if (patches.length === 0) {
221878
221943
  ctxt.statusUpdater?.({
221879
221944
  status: "error",
221880
- file: tomlFile,
221881
- artifacts: [idx],
221882
- message: `Could not find ${artifact.name} version ${artifact.version}`
221883
- });
221884
- }
221885
- return patches;
221886
- }
221887
- /**
221888
- * Handle Cargo.lock file updates for all dependencies
221889
- */
221890
- async handleCargoLock(lockFile, idx, upgradeVersion, ctxt) {
221891
- const patches = [];
221892
- const fullPath = resolve25(this.rootDir, lockFile);
221893
- const artifact = ctxt.artifacts[idx];
221894
- const content = await readFile21(fullPath, "utf-8");
221895
- const toml = parseTOML2(content);
221896
- if (!toml) {
221897
- ctxt.statusUpdater?.({
221898
- status: "error",
221899
- file: lockFile,
221900
- artifacts: [idx],
221901
- message: `Failed to parse TOML file`
221902
- });
221903
- return [];
221904
- }
221905
- const packages = toml.package;
221906
- if (!(packages instanceof TOMLArray)) {
221907
- ctxt.statusUpdater?.({
221908
- status: "error",
221909
- file: lockFile,
221910
- artifacts: [idx],
221911
- message: `No package array found in lock file`
221912
- });
221913
- return [];
221914
- }
221915
- for (const pkg of packages) {
221916
- if (!(pkg instanceof TOMLTable)) continue;
221917
- const nameValue = pkg.name;
221918
- const versionValue = pkg.version;
221919
- const checksumValue = pkg.checksum;
221920
- if (!(nameValue instanceof TOMLScalar) || !(versionValue instanceof TOMLScalar)) continue;
221921
- const name = String(nameValue[value]);
221922
- const version3 = String(versionValue[value]);
221923
- if (name === artifact.name && version3 === artifact.version) {
221924
- patches.push({
221925
- file: lockFile,
221926
- offset: versionValue[range][0] + 1,
221927
- // Skip opening quote
221928
- length: version3.length,
221929
- artifacts: [idx],
221930
- text: upgradeVersion
221931
- });
221932
- if (checksumValue instanceof TOMLScalar) {
221933
- const checksumStart = checksumValue[keyRange][0];
221934
- const checksumEnd = checksumValue[range][1];
221935
- const checksumLength = checksumEnd - checksumStart;
221936
- patches.push({
221937
- file: lockFile,
221938
- offset: checksumStart,
221939
- length: checksumLength,
221940
- artifacts: [idx],
221941
- text: ""
221942
- });
221943
- }
221944
- break;
221945
- }
221946
- }
221947
- if (patches.length === 0) {
221948
- ctxt.statusUpdater?.({
221949
- status: "error",
221950
- file: lockFile,
221945
+ file: mf.file,
221951
221946
  artifacts: [idx],
221952
221947
  message: `Could not find ${artifact.name} version ${artifact.version}`
221953
221948
  });
@@ -221976,7 +221971,7 @@ var CargoSocketUpgradeManager = class {
221976
221971
  /**
221977
221972
  * Add a new dependency to a Cargo.toml file
221978
221973
  */
221979
- async handleCargoTomlAddDependency(tomlFile, idx, upgradeVersion, ctxt) {
221974
+ async createTransitiveDependencyPatches(tomlFile, idx, upgradeVersion, ctxt) {
221980
221975
  const fullPath = resolve25(this.rootDir, tomlFile);
221981
221976
  const content = await readFile21(fullPath, "utf-8");
221982
221977
  const toml = parseTOML2(content);
@@ -222015,13 +222010,49 @@ ${newDependencyLine}`
222015
222010
  }
222016
222011
  ];
222017
222012
  }
222013
+ async refreshLockfiles(lockfileToArtifacts, ctxt, _mode) {
222014
+ await asyncForEach(Object.entries(lockfileToArtifacts), async ([lockfile2, artifacts]) => {
222015
+ const lockfileDir = dirname10(resolve25(this.rootDir, lockfile2));
222016
+ const oldFileContent = await readFile21(resolve25(this.rootDir, lockfile2), "utf-8");
222017
+ let error;
222018
+ if (this.cargoLockMatcher(lockfile2)) {
222019
+ const result = await execNeverFail(["cargo", "fetch"], lockfileDir);
222020
+ error = result.error;
222021
+ } else {
222022
+ ctxt.statusUpdater?.({
222023
+ status: "error",
222024
+ file: lockfile2,
222025
+ artifacts: i3(artifacts),
222026
+ message: "Unknown lockfile type"
222027
+ });
222028
+ return;
222029
+ }
222030
+ if (!error) {
222031
+ const finalFileContent = await readFile21(resolve25(this.rootDir, lockfile2), "utf-8");
222032
+ ctxt.statusUpdater?.({
222033
+ status: "success",
222034
+ file: lockfile2,
222035
+ message: "Lockfile updated",
222036
+ patch: createPatch(lockfile2, oldFileContent, finalFileContent, void 0, void 0, { context: 3 }),
222037
+ artifacts: i3(artifacts)
222038
+ });
222039
+ } else {
222040
+ ctxt.statusUpdater?.({
222041
+ status: "error",
222042
+ file: lockfile2,
222043
+ artifacts: i3(artifacts),
222044
+ message: `Failed to update lockfile: ${error.message ?? "Unknown error"}`
222045
+ });
222046
+ }
222047
+ });
222048
+ }
222018
222049
  };
222019
222050
 
222020
222051
  // ../fixing-management/src/fixing-management/pip/pip-socket-upgrade-manager.ts
222021
- import { dirname as dirname12, resolve as resolve29 } from "node:path";
222022
- var import_picomatch5 = __toESM(require_picomatch2(), 1);
222052
+ import { dirname as dirname12, relative as relative11, resolve as resolve29 } from "node:path";
222053
+ var import_picomatch7 = __toESM(require_picomatch2(), 1);
222023
222054
  import assert10 from "node:assert";
222024
- import { readFile as readFile24 } from "node:fs/promises";
222055
+ import { readFile as readFile24, writeFile as writeFile9 } from "node:fs/promises";
222025
222056
  var import_pip_requirements_js = __toESM(require_dist11(), 1);
222026
222057
 
222027
222058
  // ../utils/src/pip-utils.ts
@@ -222044,7 +222075,7 @@ var import_lodash5 = __toESM(require_lodash(), 1);
222044
222075
  var import_micromatch = __toESM(require_micromatch(), 1);
222045
222076
  import { existsSync as existsSync12 } from "fs";
222046
222077
  import { access as access2, cp, readdir as readdir3, stat as stat2 } from "fs/promises";
222047
- import { basename as basename8, join as join11, relative as relative9, resolve as resolve26 } from "path";
222078
+ import { basename as basename5, join as join11, relative as relative10, resolve as resolve26 } from "path";
222048
222079
  var { uniq } = import_lodash5.default;
222049
222080
  var { isMatch } = import_micromatch.default;
222050
222081
  function* parents(dir) {
@@ -222057,7 +222088,7 @@ function* parents(dir) {
222057
222088
  }
222058
222089
  function findParent(dir, predicate, wholePath) {
222059
222090
  for (const parent2 of parents(dir))
222060
- if (predicate(wholePath ? parent2 : basename8(parent2))) return parent2;
222091
+ if (predicate(wholePath ? parent2 : basename5(parent2))) return parent2;
222061
222092
  }
222062
222093
  async function getFilesRelative(dir, excludeDirs) {
222063
222094
  async function helper(subDir, arrayOfFiles) {
@@ -222098,6 +222129,7 @@ var systemPython = once2(() => {
222098
222129
  var hasPyenv = once2(async () => !(await execNeverFail("which pyenv")).error);
222099
222130
 
222100
222131
  // ../utils/src/pip-utils.ts
222132
+ var import_picomatch6 = __toESM(require_picomatch2(), 1);
222101
222133
  async function isSetupPySetuptools(file) {
222102
222134
  const content = await readFile23(file, "utf-8");
222103
222135
  return content.includes("setup(") && (/^\s*from\s+(?:setuptools|distutils\.core)\s+import\s+.*setup/m.test(content) || /^\s*import\s+(?:setuptools|distutils\.core)/m.test(content));
@@ -222126,11 +222158,10 @@ async function getPyprojectTomlFilesForLockFile(rootDir, uvLockfile, pyprojectFi
222126
222158
  if (memberPatterns.length === 0) {
222127
222159
  return { rootTomlFile };
222128
222160
  }
222129
- const workspaceMemberDirs = new Set(await glob(memberPatterns, { ignore: excludePatterns, cwd: rootDir }));
222130
- const memberTomlFiles = pyprojectFiles.filter((file) => {
222131
- const relativeDir = dirname11(file);
222132
- return workspaceMemberDirs.has(relativeDir);
222161
+ const matcher = (0, import_picomatch6.default)(memberPatterns, {
222162
+ ignore: excludePatterns
222133
222163
  });
222164
+ const memberTomlFiles = pyprojectFiles.filter((file) => matcher(dirname11(file) || "."));
222134
222165
  return { rootTomlFile, memberTomlFiles };
222135
222166
  }
222136
222167
 
@@ -222282,6 +222313,138 @@ function pipVersionSatisfiesConstraints(version3, constraints) {
222282
222313
  });
222283
222314
  }
222284
222315
 
222316
+ // ../fixing-management/src/fixing-management/pip/pip-patch-utils.ts
222317
+ function createPep508VersionPatches(file, idx, requirement, upgradeVersion, rangeStyle, baseOffset = 0) {
222318
+ const patches = [];
222319
+ if (rangeStyle === "pin") {
222320
+ if (requirement.data.versionSpec?.length) {
222321
+ const firstSpec = requirement.data.versionSpec[0];
222322
+ patches.push({
222323
+ file,
222324
+ offset: baseOffset + firstSpec.data.operator.location.startIdx,
222325
+ length: firstSpec.data.operator.location.endIdx - firstSpec.data.operator.location.startIdx,
222326
+ artifacts: [idx],
222327
+ text: "=="
222328
+ });
222329
+ if (firstSpec.data.version) {
222330
+ patches.push({
222331
+ file,
222332
+ offset: baseOffset + firstSpec.data.version.location.startIdx,
222333
+ length: firstSpec.data.version.location.endIdx - firstSpec.data.version.location.startIdx,
222334
+ artifacts: [idx],
222335
+ text: upgradeVersion
222336
+ });
222337
+ }
222338
+ for (let i7 = 1; i7 < requirement.data.versionSpec.length; i7++) {
222339
+ const currentSpec = requirement.data.versionSpec[i7];
222340
+ const previousSpec = requirement.data.versionSpec[i7 - 1];
222341
+ patches.push({
222342
+ file,
222343
+ offset: baseOffset + previousSpec.location.endIdx,
222344
+ length: currentSpec.location.endIdx - previousSpec.location.endIdx,
222345
+ artifacts: [idx],
222346
+ text: ""
222347
+ });
222348
+ }
222349
+ } else {
222350
+ patches.push({
222351
+ file,
222352
+ offset: baseOffset + requirement.location.endIdx,
222353
+ artifacts: [idx],
222354
+ text: `==${upgradeVersion}`
222355
+ });
222356
+ }
222357
+ } else if (requirement.data.versionSpec?.length) {
222358
+ let isLowerBounded = false;
222359
+ for (let i7 = 0; i7 < requirement.data.versionSpec.length; i7++) {
222360
+ const spec = requirement.data.versionSpec[i7];
222361
+ const operator = spec.data.operator.data;
222362
+ if (!spec.data.version) continue;
222363
+ if (operator === ">") {
222364
+ patches.push({
222365
+ file,
222366
+ offset: baseOffset + spec.data.operator.location.startIdx,
222367
+ length: spec.data.operator.location.endIdx - spec.data.operator.location.startIdx,
222368
+ artifacts: [idx],
222369
+ text: ">="
222370
+ });
222371
+ patches.push({
222372
+ file,
222373
+ offset: baseOffset + spec.data.version.location.startIdx,
222374
+ length: spec.data.version.location.endIdx - spec.data.version.location.startIdx,
222375
+ artifacts: [idx],
222376
+ text: upgradeVersion
222377
+ });
222378
+ isLowerBounded = true;
222379
+ } else if (operator === "<" && pipCompareVersions(spec.data.version.data, upgradeVersion) <= 0) {
222380
+ patches.push({
222381
+ file,
222382
+ offset: baseOffset + spec.data.operator.location.startIdx,
222383
+ length: spec.data.operator.location.endIdx - spec.data.operator.location.startIdx,
222384
+ artifacts: [idx],
222385
+ text: "<="
222386
+ });
222387
+ patches.push({
222388
+ file,
222389
+ offset: baseOffset + spec.data.version.location.startIdx,
222390
+ length: spec.data.version.location.endIdx - spec.data.version.location.startIdx,
222391
+ artifacts: [idx],
222392
+ text: upgradeVersion
222393
+ });
222394
+ } else if (operator === "<=" && pipCompareVersions(spec.data.version.data, upgradeVersion) < 0) {
222395
+ patches.push({
222396
+ file,
222397
+ offset: baseOffset + spec.data.version.location.startIdx,
222398
+ length: spec.data.version.location.endIdx - spec.data.version.location.startIdx,
222399
+ artifacts: [idx],
222400
+ text: upgradeVersion
222401
+ });
222402
+ } else if (operator === "!=" && pipCompareVersions(spec.data.version.data, upgradeVersion) === 0) {
222403
+ patches.push({
222404
+ file,
222405
+ offset: baseOffset + spec.data.operator.location.startIdx,
222406
+ length: spec.data.operator.location.endIdx - spec.data.operator.location.startIdx,
222407
+ artifacts: [idx],
222408
+ text: "=="
222409
+ });
222410
+ patches.push({
222411
+ file,
222412
+ offset: baseOffset + spec.data.version.location.startIdx,
222413
+ length: spec.data.version.location.endIdx - spec.data.version.location.startIdx,
222414
+ artifacts: [idx],
222415
+ text: upgradeVersion
222416
+ });
222417
+ isLowerBounded = true;
222418
+ } else if (["==", "===", "~=", ">="].includes(operator)) {
222419
+ patches.push({
222420
+ file,
222421
+ offset: baseOffset + spec.data.version.location.startIdx,
222422
+ length: spec.data.version.location.endIdx - spec.data.version.location.startIdx,
222423
+ artifacts: [idx],
222424
+ text: upgradeVersion
222425
+ });
222426
+ isLowerBounded = true;
222427
+ }
222428
+ }
222429
+ if (!isLowerBounded) {
222430
+ patches.push({
222431
+ file,
222432
+ offset: baseOffset + requirement.data.name.location.endIdx,
222433
+ artifacts: [idx],
222434
+ text: `>=${upgradeVersion},`
222435
+ });
222436
+ }
222437
+ } else {
222438
+ patches.push({
222439
+ file,
222440
+ offset: baseOffset + requirement.data.name.location.endIdx,
222441
+ artifacts: [idx],
222442
+ text: `==${upgradeVersion}`
222443
+ });
222444
+ }
222445
+ return patches;
222446
+ }
222447
+
222285
222448
  // ../fixing-management/src/fixing-management/pip/pip-socket-upgrade-manager.ts
222286
222449
  var PipSocketUpgradeManager = class {
222287
222450
  constructor(rootDir) {
@@ -222289,26 +222452,48 @@ var PipSocketUpgradeManager = class {
222289
222452
  }
222290
222453
  // glob pattern taken from depscan/workspaces/pipeline/src/glob-patterns.ts
222291
222454
  // Note, depscan glob pattern router uses recursive matching, we instead do optional prefix `(**/)?`
222292
- requirementsTxtMatcher = (0, import_picomatch5.default)(
222293
- "{**/,}{requirements{[-_.]frozen,{[-_]*,}}.txt,{**/,}requirements/*.txt}"
222455
+ requirementsTxtMatcher = (0, import_picomatch7.default)(
222456
+ "{requirements{[-_.]frozen,{[-_]*,}}.txt,{**/,}requirements/*.txt}",
222457
+ { basename: true }
222294
222458
  );
222295
- pyprojectTomlMatcher = (0, import_picomatch5.default)("{**/,}pyproject.toml");
222296
- uvLockMatcher = (0, import_picomatch5.default)("{**/,}uv.lock");
222459
+ pyprojectTomlMatcher = (0, import_picomatch7.default)("pyproject.toml", { basename: true });
222460
+ uvLockMatcher = (0, import_picomatch7.default)("uv.lock", { basename: true });
222297
222461
  async applySocketArtifactUpgrades(ctxt) {
222298
222462
  const pyprojectTomlfiles = ctxt.manifestFiles.filter((f6) => this.pyprojectTomlMatcher(f6));
222299
- const lockfileToArtifacts = {};
222300
- const patches = await asyncFlatMap(Array.from(ctxt.upgrades), async ([idx, upgradeVersion]) => {
222463
+ const tomlToLock = /* @__PURE__ */ new Map();
222464
+ await asyncForEach(
222465
+ ctxt.manifestFiles.filter((mf) => this.uvLockMatcher(mf)),
222466
+ async (lockFile) => {
222467
+ const rootAndMemberTomlFiles = await getPyprojectTomlFilesForLockFile(
222468
+ this.rootDir,
222469
+ lockFile,
222470
+ pyprojectTomlfiles
222471
+ );
222472
+ if (!rootAndMemberTomlFiles) return;
222473
+ const { rootTomlFile, memberTomlFiles } = rootAndMemberTomlFiles;
222474
+ tomlToLock.set(rootTomlFile, lockFile);
222475
+ for (const tomlFile of memberTomlFiles ?? []) {
222476
+ tomlToLock.set(tomlFile, lockFile);
222477
+ }
222478
+ }
222479
+ );
222480
+ const requirementsTxtPatches = [];
222481
+ const directUvTomlPatches = [];
222482
+ const transitiveUvTomlPatches = [];
222483
+ for (const [idx, upgradeVersion] of ctxt.upgrades) {
222301
222484
  const artifact = ctxt.artifacts[idx];
222302
222485
  assert10(artifact.name);
222303
222486
  assert10(artifact.version);
222487
+ const directRequirementsTxts = new Set(
222488
+ artifact.manifestFiles?.filter((mf) => this.requirementsTxtMatcher(mf.file)).map((mf) => mf.file) ?? []
222489
+ );
222304
222490
  const canonicalPackageName = canonicalizePyPIName(artifact.name);
222305
- const directManifestFiles = /* @__PURE__ */ new Set();
222306
- const directPatches = await asyncFlatMap(artifact.manifestFiles ?? [], async (mf) => {
222307
- directManifestFiles.add(mf.file);
222491
+ for (const mf of artifact.manifestFiles ?? []) {
222308
222492
  if (this.requirementsTxtMatcher(mf.file)) {
222309
- return await this.handleRequirementsDirect(mf, idx, upgradeVersion, ctxt);
222493
+ if (ctxt.wsFilter && !ctxt.wsFilter(dirname12(mf.file) || ".")) continue;
222494
+ const patches = await this.createRequirementsTxtDirectDependencyPatches(mf, idx, upgradeVersion, ctxt);
222495
+ requirementsTxtPatches.push(...patches);
222310
222496
  } else if (this.uvLockMatcher(mf.file)) {
222311
- (lockfileToArtifacts[mf.file] ??= []).push(idx);
222312
222497
  const rootAndMemberTomlFiles = await getPyprojectTomlFilesForLockFile(
222313
222498
  this.rootDir,
222314
222499
  mf.file,
@@ -222321,7 +222506,7 @@ var PipSocketUpgradeManager = class {
222321
222506
  artifacts: [idx],
222322
222507
  message: "Could not find pyproject.toml files for lock file"
222323
222508
  });
222324
- return [];
222509
+ continue;
222325
222510
  }
222326
222511
  const { rootTomlFile, memberTomlFiles } = rootAndMemberTomlFiles;
222327
222512
  const tomlToDeps = await getDependenciesMapFromUvLock(
@@ -222334,22 +222519,27 @@ var PipSocketUpgradeManager = class {
222334
222519
  status: "error",
222335
222520
  file: mf.file,
222336
222521
  artifacts: [idx],
222337
- message: "Could not construct dependencies map file"
222522
+ message: "Could not compute pyproject.toml files for dependency"
222338
222523
  });
222339
- return [];
222524
+ continue;
222340
222525
  }
222341
- const patches2 = [];
222342
222526
  for (const [tomlFile, { direct, transitive }] of tomlToDeps) {
222527
+ if (ctxt.wsFilter && !ctxt.wsFilter(dirname12(tomlFile) || ".")) continue;
222343
222528
  if (direct.has(canonicalPackageName)) {
222344
- patches2.push(...await this.handlePyprojectToml(tomlFile, idx, upgradeVersion, ctxt));
222529
+ directUvTomlPatches.push(
222530
+ ...await this.createPyprojectTomlDirectDependencyPatches(tomlFile, idx, upgradeVersion, ctxt)
222531
+ );
222345
222532
  }
222346
222533
  if (transitive.has(canonicalPackageName)) {
222347
- patches2.push(
222348
- ...await this.handlePyprojectTomlUvDependencyOverride(rootTomlFile, idx, upgradeVersion, ctxt)
222534
+ transitiveUvTomlPatches.push(
222535
+ ...await this.createOverrideDependencyAddPatches(rootTomlFile, idx, upgradeVersion, ctxt)
222349
222536
  );
222350
222537
  }
222538
+ directUvTomlPatches.push(
222539
+ ...await this.createOverrideDependencyUpdatePatches(rootTomlFile, idx, upgradeVersion, ctxt)
222540
+ );
222351
222541
  }
222352
- return patches2;
222542
+ continue;
222353
222543
  } else {
222354
222544
  ctxt.statusUpdater?.({
222355
222545
  status: "error",
@@ -222357,64 +222547,90 @@ var PipSocketUpgradeManager = class {
222357
222547
  artifacts: [idx],
222358
222548
  message: "Unhandled manifest file"
222359
222549
  });
222360
- return [];
222361
222550
  }
222362
- });
222363
- const ancestorManifestFiles = i3(
222364
- artifact.toplevelAncestors?.flatMap(
222365
- (ancestorId) => ctxt.artifacts.find((a4) => a4.id === ancestorId)?.manifestFiles?.map((m4) => m4.file).filter((f6) => !directManifestFiles.has(f6)) ?? []
222366
- ) ?? []
222367
- );
222368
- const transitivePatches = await asyncFlatMap(ancestorManifestFiles, async (f6) => {
222369
- if (this.requirementsTxtMatcher(f6)) {
222370
- return await this.handleRequirementsTransitive(f6, idx, upgradeVersion, ctxt);
222371
- } else if (this.uvLockMatcher(f6)) {
222372
- ctxt.statusUpdater?.({
222373
- status: "error",
222374
- file: f6,
222375
- artifacts: [idx],
222376
- message: "Unexpected top-level ancestor for uv.lock dependency"
222377
- });
222378
- return [];
222379
- } else {
222380
- ctxt.statusUpdater?.({
222381
- status: "error",
222382
- file: f6,
222383
- artifacts: [idx],
222384
- message: "Unhandled manifest file"
222385
- });
222386
- return [];
222551
+ }
222552
+ const transitiveRequirementsTxts = /* @__PURE__ */ new Set();
222553
+ for (const ancestorId of artifact.toplevelAncestors ?? []) {
222554
+ const ancestor = ctxt.artifacts.find((a4) => a4.id === ancestorId);
222555
+ if (ancestor) {
222556
+ for (const mf of ancestor.manifestFiles ?? []) {
222557
+ if (this.requirementsTxtMatcher(mf.file) && !directRequirementsTxts.has(mf.file)) {
222558
+ transitiveRequirementsTxts.add(mf.file);
222559
+ }
222560
+ }
222387
222561
  }
222388
- });
222389
- return directPatches.concat(transitivePatches);
222390
- });
222391
- await applyPatches("PIP", this.rootDir, patches, ctxt);
222392
- await asyncForEach(Object.entries(lockfileToArtifacts), async ([file, artifacts]) => {
222393
- const oldFileContent = await readFile24(resolve29(this.rootDir, file), "utf-8");
222394
- const { error } = await execNeverFail(cmdt`uv lock`, dirname12(resolve29(this.rootDir, file)));
222395
- const updatedFileContent = await readFile24(resolve29(this.rootDir, file), "utf-8");
222396
- if (!error) {
222562
+ }
222563
+ for (const requirementsTxt of transitiveRequirementsTxts) {
222564
+ if (ctxt.wsFilter && !ctxt.wsFilter(dirname12(requirementsTxt) || ".")) continue;
222565
+ const patches = await this.createRequirementsTxtTransitivePatches(requirementsTxt, idx, upgradeVersion, ctxt);
222566
+ requirementsTxtPatches.push(...patches);
222567
+ }
222568
+ }
222569
+ const restoreMap = /* @__PURE__ */ new Map();
222570
+ for (const { file, artifacts } of directUvTomlPatches.concat(transitiveUvTomlPatches)) {
222571
+ const path2 = resolve29(this.rootDir, file);
222572
+ if (!restoreMap.has(path2)) {
222573
+ restoreMap.set(path2, {
222574
+ content: await readFile24(path2, "utf-8"),
222575
+ artifacts: []
222576
+ });
222577
+ }
222578
+ const existing = restoreMap.get(path2);
222579
+ for (const idx of artifacts) {
222580
+ if (!existing.artifacts.includes(idx)) {
222581
+ existing.artifacts.push(idx);
222582
+ }
222583
+ }
222584
+ }
222585
+ const appliedTemporaryPatches = await applyPatches(
222586
+ "PIP",
222587
+ this.rootDir,
222588
+ directUvTomlPatches.concat(transitiveUvTomlPatches),
222589
+ ctxt
222590
+ );
222591
+ const lockfileToArtifacts = {};
222592
+ for (const { file, artifacts } of appliedTemporaryPatches) {
222593
+ const lockfile2 = tomlToLock.get(file);
222594
+ if (!lockfile2) {
222397
222595
  ctxt.statusUpdater?.({
222398
- status: "success",
222596
+ status: "error",
222399
222597
  file,
222400
- message: "Lockfile updated",
222401
- patch: createPatch(file, oldFileContent, updatedFileContent, void 0, void 0, { context: 3 }),
222598
+ message: "No lockfile found for pyproject.toml",
222402
222599
  artifacts: i3(artifacts)
222403
222600
  });
222404
- } else {
222601
+ continue;
222602
+ }
222603
+ if (!lockfileToArtifacts[lockfile2]) {
222604
+ lockfileToArtifacts[lockfile2] = [];
222605
+ }
222606
+ lockfileToArtifacts[lockfile2].push(...artifacts);
222607
+ }
222608
+ await this.refreshLockfiles(lockfileToArtifacts, ctxt, "LOCKFILE_ONLY");
222609
+ await asyncForEach(Array.from(restoreMap), async ([path2, { content, artifacts }]) => {
222610
+ try {
222611
+ await writeFile9(path2, content);
222612
+ ctxt.statusUpdater?.({
222613
+ status: "success",
222614
+ file: relative11(this.rootDir, path2),
222615
+ message: "File restored",
222616
+ artifacts: i3(artifacts)
222617
+ });
222618
+ } catch (e) {
222405
222619
  ctxt.statusUpdater?.({
222406
222620
  status: "error",
222407
- file,
222408
- artifacts: i3(artifacts),
222409
- message: `Failed to update lockfile -- error during 'uv lock': ${error.message}`
222621
+ file: relative11(this.rootDir, path2),
222622
+ message: "Could not restore file",
222623
+ artifacts: i3(artifacts)
222410
222624
  });
222411
222625
  }
222412
222626
  });
222627
+ await applyPatches("RUST", this.rootDir, requirementsTxtPatches.concat(directUvTomlPatches), ctxt);
222628
+ await this.refreshLockfiles(lockfileToArtifacts, ctxt, "FULL_INSTALL");
222413
222629
  }
222414
222630
  /**
222415
222631
  * Handle requirements.txt file updates
222416
222632
  */
222417
- async handleRequirementsDirect(ref, idx, upgradeVersion, ctxt) {
222633
+ async createRequirementsTxtDirectDependencyPatches(ref, idx, upgradeVersion, ctxt) {
222418
222634
  const fullPath = resolve29(this.rootDir, ref.file);
222419
222635
  const artifact = ctxt.artifacts[idx];
222420
222636
  assert10(artifact.name);
@@ -222436,91 +222652,13 @@ var PipSocketUpgradeManager = class {
222436
222652
  const requirements = (0, import_pip_requirements_js.parsePipRequirementsFileLoosely)(content, { includeLocations: true });
222437
222653
  const foundRequirement = requirements.filter((req) => req.data.type === "ProjectName").find((req) => refStart <= req.location.startIdx && req.location.endIdx <= refEnd);
222438
222654
  if (foundRequirement) {
222439
- if (ctxt.rangeStyle === "pin") {
222440
- if (foundRequirement.data.versionSpec?.length) {
222441
- const firstSpec = foundRequirement.data.versionSpec[0];
222442
- const lastSpec = foundRequirement.data.versionSpec[foundRequirement.data.versionSpec.length - 1];
222443
- const operatorToVersionWhitespace = getWhitespace(
222444
- content,
222445
- firstSpec.data.operator.location.endIdx,
222446
- firstSpec.data.version.location.startIdx
222447
- );
222448
- patches.push({
222449
- file: ref.file,
222450
- offset: firstSpec.data.operator.location.startIdx,
222451
- length: lastSpec.location.endIdx - firstSpec.data.operator.location.startIdx,
222452
- artifacts: [idx],
222453
- text: `==${operatorToVersionWhitespace}${upgradeVersion}`
222454
- });
222455
- } else {
222456
- patches.push({
222457
- file: ref.file,
222458
- offset: foundRequirement.location.endIdx,
222459
- artifacts: [idx],
222460
- text: `==${upgradeVersion}`
222461
- });
222462
- }
222463
- } else if (foundRequirement.data.versionSpec?.length) {
222464
- for (const spec of foundRequirement.data.versionSpec) {
222465
- const operator = spec.data.operator.data;
222466
- const specVersion = spec.data.version.data;
222467
- const whitespace2 = getWhitespace(
222468
- content,
222469
- spec.data.operator.location.endIdx,
222470
- spec.data.version.location.startIdx
222471
- );
222472
- const patch = operator === ">" ? {
222473
- text: `>=${whitespace2}${upgradeVersion}`,
222474
- offset: spec.data.operator.location.startIdx,
222475
- length: spec.data.version.location.endIdx - spec.data.operator.location.startIdx
222476
- } : operator === "<" ? {
222477
- text: `<=${whitespace2}${upgradeVersion}`,
222478
- offset: spec.data.operator.location.startIdx,
222479
- length: spec.data.version.location.endIdx - spec.data.operator.location.startIdx
222480
- } : operator === "!=" && pipCompareVersions(specVersion, upgradeVersion) === 0 ? {
222481
- text: "",
222482
- offset: spec.data.operator.location.startIdx,
222483
- length: spec.data.version.location.endIdx - spec.data.operator.location.startIdx
222484
- } : operator === "!=" ? void 0 : {
222485
- text: upgradeVersion,
222486
- offset: spec.data.version.location.startIdx,
222487
- length: spec.data.version.location.endIdx - spec.data.version.location.startIdx
222488
- };
222489
- if (patch) {
222490
- patches.push({ ...patch, file: ref.file, artifacts: [idx] });
222491
- }
222492
- }
222493
- const isLowerBounded = foundRequirement.data.versionSpec.some(
222494
- (spec) => ["~=", "==", "===", ">", ">="].includes(spec.data.operator.data)
222495
- );
222496
- if (!isLowerBounded) {
222497
- const firstSpec = foundRequirement.data.versionSpec[0];
222498
- const commaSpacing = foundRequirement.data.versionSpec.length > 1 ? getWhitespace(
222499
- content,
222500
- foundRequirement.data.versionSpec[0].location.endIdx + 1,
222501
- foundRequirement.data.versionSpec[1].data.operator.location.startIdx
222502
- ) : "";
222503
- patches.push({
222504
- file: ref.file,
222505
- offset: firstSpec.data.operator.location.startIdx,
222506
- artifacts: [idx],
222507
- text: `>=${upgradeVersion},${commaSpacing}`
222508
- });
222509
- }
222510
- } else {
222511
- patches.push({
222512
- file: ref.file,
222513
- offset: foundRequirement.data.name.location.endIdx,
222514
- artifacts: [idx],
222515
- text: `==${upgradeVersion}`
222516
- });
222517
- }
222655
+ patches.push(...createPep508VersionPatches(ref.file, idx, foundRequirement, upgradeVersion, ctxt.rangeStyle));
222518
222656
  } else {
222519
222657
  ctxt.statusUpdater?.({
222520
222658
  status: "error",
222521
222659
  file: ref.file,
222522
222660
  artifacts: [idx],
222523
- message: `Dependency declaration not found: ${JSON.stringify(artifact)}`
222661
+ message: `Dependency declaration not found}`
222524
222662
  });
222525
222663
  }
222526
222664
  } catch (e) {
@@ -222528,7 +222666,7 @@ var PipSocketUpgradeManager = class {
222528
222666
  status: "error",
222529
222667
  file: ref.file,
222530
222668
  artifacts: [idx],
222531
- message: `Error parsing requirements file: ${e.message}`
222669
+ message: `Error parsing requirements file: ${e.message ?? "Unknown error"}`
222532
222670
  });
222533
222671
  }
222534
222672
  return patches;
@@ -222536,7 +222674,7 @@ var PipSocketUpgradeManager = class {
222536
222674
  /**
222537
222675
  * Handle requirements.txt file updates
222538
222676
  */
222539
- async handleRequirementsTransitive(requirementsFile, idx, upgradeVersion, ctxt) {
222677
+ async createRequirementsTxtTransitivePatches(requirementsFile, idx, upgradeVersion, ctxt) {
222540
222678
  const fullPath = resolve29(this.rootDir, requirementsFile);
222541
222679
  const artifact = ctxt.artifacts[idx];
222542
222680
  assert10(artifact.name);
@@ -222559,7 +222697,7 @@ ${newText}
222559
222697
  status: "error",
222560
222698
  file: requirementsFile,
222561
222699
  artifacts: [idx],
222562
- message: `Error parsing requirements file: ${e.message}`
222700
+ message: `Error parsing requirements file: ${e.message ?? "Unknown error"}`
222563
222701
  });
222564
222702
  }
222565
222703
  return patches;
@@ -222567,7 +222705,7 @@ ${newText}
222567
222705
  /**
222568
222706
  * Handle pyproject.toml file updates for direct dependencies
222569
222707
  */
222570
- async handlePyprojectToml(tomlFile, idx, upgradeVersion, ctxt) {
222708
+ async createPyprojectTomlDirectDependencyPatches(tomlFile, idx, upgradeVersion, ctxt) {
222571
222709
  const fullPath = resolve29(this.rootDir, tomlFile);
222572
222710
  const content = await readFile24(fullPath, "utf-8");
222573
222711
  const toml = parseTOML2(content);
@@ -222594,10 +222732,10 @@ ${newText}
222594
222732
  if (parsed && parsed.data.type === "ProjectName") {
222595
222733
  if (canonicalizePyPIName(parsed.data.name.data) === canonicalizePyPIName(artifact.name) && satisfiestVersionSpecs(artifact.version, parsed.data.versionSpec)) {
222596
222734
  patches.push(
222597
- ...this.createPep508VersionPatches(
222735
+ ...createPep508VersionPatches(
222598
222736
  tomlFile,
222599
222737
  idx,
222600
- depSpec,
222738
+ parsed,
222601
222739
  upgradeVersion,
222602
222740
  ctxt.rangeStyle,
222603
222741
  element[range][0] + 1
@@ -222621,10 +222759,10 @@ ${newText}
222621
222759
  if (parsed && parsed.data.type === "ProjectName") {
222622
222760
  if (canonicalizePyPIName(parsed.data.name.data) === canonicalizePyPIName(artifact.name) && satisfiestVersionSpecs(artifact.version, parsed.data.versionSpec)) {
222623
222761
  patches.push(
222624
- ...this.createPep508VersionPatches(
222762
+ ...createPep508VersionPatches(
222625
222763
  tomlFile,
222626
222764
  idx,
222627
- depSpec,
222765
+ parsed,
222628
222766
  upgradeVersion,
222629
222767
  ctxt.rangeStyle,
222630
222768
  element[range][0] + 1
@@ -222650,10 +222788,10 @@ ${newText}
222650
222788
  if (parsed && parsed.data.type === "ProjectName") {
222651
222789
  if (canonicalizePyPIName(parsed.data.name.data) === canonicalizePyPIName(artifact.name) && satisfiestVersionSpecs(artifact.version, parsed.data.versionSpec)) {
222652
222790
  patches.push(
222653
- ...this.createPep508VersionPatches(
222791
+ ...createPep508VersionPatches(
222654
222792
  tomlFile,
222655
222793
  idx,
222656
- depSpec,
222794
+ parsed,
222657
222795
  upgradeVersion,
222658
222796
  ctxt.rangeStyle,
222659
222797
  element[range][0] + 1
@@ -222677,7 +222815,12 @@ ${newText}
222677
222815
  }
222678
222816
  return patches;
222679
222817
  }
222680
- async handlePyprojectTomlUvDependencyOverride(pyprojectToml, idx, upgradeVersion, ctxt) {
222818
+ /**
222819
+ * Update existing tool.uv.override-dependencies entries in pyproject.toml
222820
+ * Only returns patches for existing entries - does not create new ones
222821
+ * These patches should be kept (re-applied after restoration)
222822
+ */
222823
+ async createOverrideDependencyUpdatePatches(pyprojectToml, idx, upgradeVersion, ctxt) {
222681
222824
  const artifact = ctxt.artifacts[idx];
222682
222825
  assert10(artifact.name);
222683
222826
  assert10(artifact.version);
@@ -222694,24 +222837,22 @@ ${newText}
222694
222837
  });
222695
222838
  return patches;
222696
222839
  }
222697
- const toolUv = getNestedValue(toml, "tool.uv");
222698
- const overrideDeps = toolUv instanceof TOMLTable ? toolUv["override-dependencies"] : void 0;
222840
+ const overrideDeps = getNestedValue(toml, "tool.uv.override-dependencies");
222699
222841
  if (overrideDeps instanceof TOMLArray) {
222700
- for (let i7 = 0; i7 < overrideDeps.length; i7++) {
222701
- const element = overrideDeps[i7];
222702
- if (element instanceof TOMLScalar && typeof element[value] === "string") {
222703
- const overrideSpec = element[value];
222842
+ for (const overrideDep of overrideDeps) {
222843
+ if (overrideDep instanceof TOMLScalar && typeof overrideDep[value] === "string") {
222844
+ const overrideSpec = overrideDep[value];
222704
222845
  const parsed = (0, import_pip_requirements_js.parsePipRequirementsLineLoosely)(overrideSpec, { includeLocations: true });
222705
222846
  if (parsed && parsed.data.type === "ProjectName") {
222706
222847
  if (canonicalizePyPIName(parsed.data.name.data) === canonicalizePyPIName(artifact.name) && satisfiestVersionSpecs(artifact.version, parsed.data.versionSpec)) {
222707
222848
  patches.push(
222708
- ...this.createPep508VersionPatches(
222849
+ ...createPep508VersionPatches(
222709
222850
  pyprojectToml,
222710
222851
  idx,
222711
- overrideSpec,
222852
+ parsed,
222712
222853
  upgradeVersion,
222713
222854
  ctxt.rangeStyle,
222714
- element[range][0] + 1
222855
+ overrideDep[range][0] + 1
222715
222856
  // Skip opening quote
222716
222857
  )
222717
222858
  );
@@ -222719,7 +222860,53 @@ ${newText}
222719
222860
  }
222720
222861
  }
222721
222862
  }
222722
- if (patches.length) return patches;
222863
+ }
222864
+ } catch (e) {
222865
+ ctxt.statusUpdater?.({
222866
+ status: "error",
222867
+ file: pyprojectToml,
222868
+ artifacts: [idx],
222869
+ message: `Error updating uv override-dependency: ${e.message ?? "Unknown error"}`
222870
+ });
222871
+ }
222872
+ return patches;
222873
+ }
222874
+ /**
222875
+ * Add new tool.uv.override-dependencies entries in pyproject.toml
222876
+ * Only creates new entries - does not update existing ones
222877
+ * These patches are temporary and should be rolled back after lockfile update
222878
+ */
222879
+ async createOverrideDependencyAddPatches(pyprojectToml, idx, upgradeVersion, ctxt) {
222880
+ const artifact = ctxt.artifacts[idx];
222881
+ assert10(artifact.name);
222882
+ assert10(artifact.version);
222883
+ const patches = [];
222884
+ try {
222885
+ const content = await readFile24(resolve29(this.rootDir, pyprojectToml), "utf-8");
222886
+ const toml = parseTOML2(content);
222887
+ if (!toml) {
222888
+ ctxt.statusUpdater?.({
222889
+ status: "error",
222890
+ file: pyprojectToml,
222891
+ artifacts: [idx],
222892
+ message: "Failed to parse TOML file"
222893
+ });
222894
+ return patches;
222895
+ }
222896
+ const toolUv = getNestedValue(toml, "tool.uv");
222897
+ const overrideDeps = getNestedValue(toml, "tool.uv.override-dependencies");
222898
+ if (overrideDeps instanceof TOMLArray) {
222899
+ for (const overrideDep of overrideDeps) {
222900
+ if (overrideDep instanceof TOMLScalar && typeof overrideDep[value] === "string") {
222901
+ const overrideSpec = overrideDep[value];
222902
+ const parsed = (0, import_pip_requirements_js.parsePipRequirementsLineLoosely)(overrideSpec, { includeLocations: true });
222903
+ if (parsed && parsed.data.type === "ProjectName") {
222904
+ if (canonicalizePyPIName(parsed.data.name.data) === canonicalizePyPIName(artifact.name)) {
222905
+ return patches;
222906
+ }
222907
+ }
222908
+ }
222909
+ }
222723
222910
  const lastElement = overrideDeps[overrideDeps.length - 1];
222724
222911
  const insertPosition = lastElement ? lastElement[range][1] : overrideDeps[range][0] + 1;
222725
222912
  patches.push({
@@ -222728,13 +222915,12 @@ ${newText}
222728
222915
  artifacts: [idx],
222729
222916
  text: overrideDeps.length > 0 ? `,
222730
222917
  ${" ".repeat(4)}"${artifact.name}==${upgradeVersion}"` : `
222731
- ${" ".repeat(4)}'"${artifact.name}==${upgradeVersion}"`
222918
+ ${" ".repeat(4)}"${artifact.name}==${upgradeVersion}"`
222732
222919
  });
222733
222920
  } else if (toolUv instanceof TOMLTable) {
222734
- const insertPosition = toolUv[range][1];
222735
222921
  patches.push({
222736
222922
  file: pyprojectToml,
222737
- offset: insertPosition,
222923
+ offset: toolUv[range][1],
222738
222924
  artifacts: [idx],
222739
222925
  text: `
222740
222926
  override-dependencies = [
@@ -222742,16 +222928,15 @@ ${" ".repeat(4)}"${artifact.name}==${upgradeVersion}",
222742
222928
  ]`
222743
222929
  });
222744
222930
  } else {
222745
- const insertPosition = toml[range][1];
222746
222931
  patches.push({
222747
222932
  file: pyprojectToml,
222748
- offset: insertPosition,
222933
+ offset: toml[range][1],
222749
222934
  artifacts: [idx],
222750
222935
  text: `
222751
222936
 
222752
222937
  [tool.uv]
222753
222938
  override-dependencies = [
222754
- ' '.repeat(4)"${artifact.name}==${upgradeVersion}",
222939
+ ${" ".repeat(4)}"${artifact.name}==${upgradeVersion}",
222755
222940
  ]`
222756
222941
  });
222757
222942
  }
@@ -222760,97 +222945,48 @@ override-dependencies = [
222760
222945
  status: "error",
222761
222946
  file: pyprojectToml,
222762
222947
  artifacts: [idx],
222763
- message: `Error adding uv override-dependency: ${e.message}`
222948
+ message: `Error adding uv override-dependency: ${e.message ?? "Unknown error"}`
222764
222949
  });
222765
222950
  }
222766
222951
  return patches;
222767
222952
  }
222768
- /**
222769
- * Create patches for updating PEP 508 dependency specifications
222770
- * Reuses the logic from handleRequirementsDirect for consistency
222771
- */
222772
- createPep508VersionPatches(file, idx, depSpec, upgradeVersion, rangeStyle, baseOffset) {
222773
- const parsed = (0, import_pip_requirements_js.parsePipRequirementsLineLoosely)(depSpec, { includeLocations: true });
222774
- if (!parsed || parsed.data.type !== "ProjectName") return [];
222775
- const patches = [];
222776
- if (rangeStyle === "pin") {
222777
- if (parsed.data.versionSpec?.length) {
222778
- const firstSpec = parsed.data.versionSpec[0];
222779
- const lastSpec = parsed.data.versionSpec[parsed.data.versionSpec.length - 1];
222780
- const whitespace2 = firstSpec.data.version ? getWhitespace(depSpec, firstSpec.data.operator.location.endIdx, firstSpec.data.version.location.startIdx) : "";
222781
- patches.push({
222782
- file,
222783
- offset: baseOffset + firstSpec.data.operator.location.startIdx,
222784
- length: lastSpec.location.endIdx - firstSpec.data.operator.location.startIdx,
222785
- artifacts: [idx],
222786
- text: `==${whitespace2}${upgradeVersion}`
222787
- });
222953
+ async refreshLockfiles(lockfileToArtifacts, ctxt, _mode) {
222954
+ await asyncForEach(Object.entries(lockfileToArtifacts), async ([lockfile2, artifacts]) => {
222955
+ const lockfileDir = dirname12(resolve29(this.rootDir, lockfile2));
222956
+ const oldFileContent = await readFile24(resolve29(this.rootDir, lockfile2), "utf-8");
222957
+ let error;
222958
+ if (this.uvLockMatcher(lockfile2)) {
222959
+ const result = await execNeverFail(["uv", "lock"], lockfileDir);
222960
+ error = result.error;
222788
222961
  } else {
222789
- patches.push({
222790
- file,
222791
- offset: baseOffset + parsed.location.endIdx,
222792
- artifacts: [idx],
222793
- text: `==${upgradeVersion}`
222962
+ ctxt.statusUpdater?.({
222963
+ status: "error",
222964
+ file: lockfile2,
222965
+ artifacts: i3(artifacts),
222966
+ message: "Unknown lockfile type"
222794
222967
  });
222968
+ return;
222795
222969
  }
222796
- } else if (parsed.data.versionSpec?.length) {
222797
- for (const spec of parsed.data.versionSpec) {
222798
- const operator = spec.data.operator.data;
222799
- const specVersion = spec.data.version?.data;
222800
- const whitespace2 = spec.data.version ? getWhitespace(depSpec, spec.data.operator.location.endIdx, spec.data.version.location.startIdx) : "";
222801
- const patch = operator === ">" ? {
222802
- text: `>=${whitespace2}${upgradeVersion}`,
222803
- offset: baseOffset + spec.data.operator.location.startIdx,
222804
- length: (spec.data.version?.location.endIdx ?? spec.data.operator.location.endIdx) - spec.data.operator.location.startIdx
222805
- } : operator === "<" ? {
222806
- text: `<=${whitespace2}${upgradeVersion}`,
222807
- offset: baseOffset + spec.data.operator.location.startIdx,
222808
- length: (spec.data.version?.location.endIdx ?? spec.data.operator.location.endIdx) - spec.data.operator.location.startIdx
222809
- } : operator === "!=" && (specVersion ? pipCompareVersions(specVersion, upgradeVersion) === 0 : true) ? {
222810
- text: "",
222811
- offset: baseOffset + spec.data.operator.location.startIdx,
222812
- length: (spec.data.version?.location.endIdx ?? spec.data.operator.location.endIdx) - spec.data.operator.location.startIdx
222813
- } : operator === "!=" ? void 0 : {
222814
- text: upgradeVersion,
222815
- offset: baseOffset + (spec.data.version?.location.startIdx ?? spec.data.operator.location.endIdx),
222816
- length: spec.data.version ? spec.data.version.location.endIdx - spec.data.version.location.startIdx : void 0
222817
- };
222818
- if (patch) {
222819
- patches.push({ ...patch, file, artifacts: [idx] });
222820
- }
222821
- }
222822
- const isLowerBounded = parsed.data.versionSpec.some(
222823
- (spec) => ["~=", "==", "===", ">", ">="].includes(spec.data.operator.data)
222824
- );
222825
- if (!isLowerBounded) {
222826
- const firstSpec = parsed.data.versionSpec[0];
222827
- const commaSpacing = parsed.data.versionSpec.length > 1 ? getWhitespace(
222828
- depSpec,
222829
- parsed.data.versionSpec[0].location.endIdx + 1,
222830
- parsed.data.versionSpec[1].data.operator.location.startIdx
222831
- ) : "";
222832
- patches.push({
222833
- file,
222834
- offset: firstSpec.data.operator.location.startIdx,
222835
- artifacts: [idx],
222836
- text: `>=${upgradeVersion},${commaSpacing}`
222970
+ if (!error) {
222971
+ const finalFileContent = await readFile24(resolve29(this.rootDir, lockfile2), "utf-8");
222972
+ ctxt.statusUpdater?.({
222973
+ status: "success",
222974
+ file: lockfile2,
222975
+ message: "Lockfile updated",
222976
+ patch: createPatch(lockfile2, oldFileContent, finalFileContent, void 0, void 0, { context: 3 }),
222977
+ artifacts: i3(artifacts)
222978
+ });
222979
+ } else {
222980
+ ctxt.statusUpdater?.({
222981
+ status: "error",
222982
+ file: lockfile2,
222983
+ artifacts: i3(artifacts),
222984
+ message: `Failed to update lockfile: ${error.message ?? "Unknown error"}`
222837
222985
  });
222838
222986
  }
222839
- } else {
222840
- patches.push({
222841
- file,
222842
- offset: parsed.data.name.location.endIdx,
222843
- artifacts: [idx],
222844
- text: `==${upgradeVersion}`
222845
- });
222846
- }
222847
- return patches;
222987
+ });
222848
222988
  }
222849
222989
  };
222850
- function getWhitespace(content, startIdx, endIdx) {
222851
- const substring = content.substring(startIdx, endIdx);
222852
- return substring.replace(/#.*$/gm, (match2) => " ".repeat(match2.length));
222853
- }
222854
222990
  function canonicalizePyPIName(name) {
222855
222991
  return (name ?? "").trim().toLowerCase().replaceAll(/[-_.]+/gi, "-");
222856
222992
  }
@@ -222911,8 +223047,8 @@ function satisfiestVersionSpecs(version3, versionSpec) {
222911
223047
  }
222912
223048
 
222913
223049
  // ../fixing-management/src/fixing-management/rubygems/rubygems-socket-upgrade-manager.ts
222914
- import { basename as basename9, dirname as dirname14, relative as relative11, resolve as resolve31 } from "node:path";
222915
- var import_picomatch6 = __toESM(require_picomatch2(), 1);
223050
+ import { dirname as dirname14, relative as relative13, resolve as resolve31 } from "node:path";
223051
+ var import_picomatch8 = __toESM(require_picomatch2(), 1);
222916
223052
  import assert11 from "node:assert";
222917
223053
 
222918
223054
  // ../fixing-management/src/fixing-management/rubygems/gemfile-utils.ts
@@ -223025,7 +223161,7 @@ var parser = {
223025
223161
  var lang3 = { lexer, parser };
223026
223162
 
223027
223163
  // ../fixing-management/src/fixing-management/rubygems/gemfile-utils.ts
223028
- import { resolve as resolve30, dirname as dirname13, relative as relative10 } from "node:path";
223164
+ import { resolve as resolve30, dirname as dirname13, relative as relative12 } from "node:path";
223029
223165
  import { existsSync as existsSync14, readFileSync as readFileSync3 } from "node:fs";
223030
223166
  var booleanQuery = import_good_enough_parser3.query.alt(
223031
223167
  import_good_enough_parser3.query.sym(/^true|false$/, (ctx, { value: value2, offset }) => {
@@ -223133,13 +223269,13 @@ var evalGemfileQuery = import_good_enough_parser3.query.sym("eval_gemfile").join
223133
223269
  ctx.exprEndOffset = void 0;
223134
223270
  if (ctx.depth > 50) {
223135
223271
  logger.warn(
223136
- `Recursion limit hit while evaluating gemfile: ${relative10(ctx.gemfile.rootDir, resolve30(ctx.gemfile.rootDir, ctx.gemfile.file))}`
223272
+ `Recursion limit hit while evaluating gemfile: ${relative12(ctx.gemfile.rootDir, resolve30(ctx.gemfile.rootDir, ctx.gemfile.file))}`
223137
223273
  );
223138
223274
  return ctx;
223139
223275
  }
223140
223276
  if (pathEvaluated === void 0) return ctx;
223141
223277
  const rootDir = ctx.gemfile.rootDir;
223142
- const file = relative10(rootDir, resolve30(rootDir, dirname13(ctx.gemfile.file), pathEvaluated));
223278
+ const file = relative12(rootDir, resolve30(rootDir, dirname13(ctx.gemfile.file), pathEvaluated));
223143
223279
  if (!existsSync14(resolve30(rootDir, file))) return ctx;
223144
223280
  const sourceText = readFileSync3(resolve30(rootDir, file), "utf-8");
223145
223281
  const parser2 = import_good_enough_parser3.lang.createLang(lang3);
@@ -223275,12 +223411,209 @@ function parseGemfileLock(content) {
223275
223411
  }
223276
223412
 
223277
223413
  // ../fixing-management/src/fixing-management/rubygems/rubygems-socket-upgrade-manager.ts
223278
- import { readFile as readFile25, writeFile as writeFile8 } from "node:fs/promises";
223414
+ import { readFile as readFile25, writeFile as writeFile10 } from "node:fs/promises";
223415
+
223416
+ // ../fixing-management/src/fixing-management/rubygems/rubygems-patch-utils.ts
223417
+ function createRubygemVersionPatches(gem, idx, upgradeVersion, rangeStyle, statusUpdater) {
223418
+ const patches = [];
223419
+ if (rangeStyle === "pin") {
223420
+ let seenVersionSpec = false;
223421
+ gem.specs.forEach((spec, i7) => {
223422
+ if (spec.type !== "version") return;
223423
+ const evaluated = evaluate4(spec.value);
223424
+ if (!seenVersionSpec) {
223425
+ seenVersionSpec = true;
223426
+ const parsed = parseRubyGemsConstraint(evaluated);
223427
+ if (parsed.operator) {
223428
+ const operatorLoc = findInValue(spec.value, parsed.operator);
223429
+ if (!operatorLoc) {
223430
+ statusUpdater?.({
223431
+ status: "error",
223432
+ file: gem.gemfile.file,
223433
+ artifacts: [idx],
223434
+ message: `Could not find source position of operator`
223435
+ });
223436
+ return;
223437
+ }
223438
+ patches.push({
223439
+ file: operatorLoc.gemfile.file,
223440
+ offset: operatorLoc.offset,
223441
+ length: operatorLoc.text.length,
223442
+ artifacts: [idx],
223443
+ text: "="
223444
+ });
223445
+ }
223446
+ const versionLoc = findInValue(spec.value, parsed.version);
223447
+ if (!versionLoc) {
223448
+ statusUpdater?.({
223449
+ status: "error",
223450
+ file: gem.gemfile.file,
223451
+ artifacts: [idx],
223452
+ message: `Could not find source position of version`
223453
+ });
223454
+ return;
223455
+ }
223456
+ patches.push({
223457
+ file: versionLoc.gemfile.file,
223458
+ offset: versionLoc.offset,
223459
+ length: versionLoc.text.length,
223460
+ artifacts: [idx],
223461
+ text: upgradeVersion
223462
+ });
223463
+ } else if (i7 > 0) {
223464
+ patches.push({
223465
+ file: gem.gemfile.file,
223466
+ offset: spec.preceedingCommaOffset,
223467
+ length: (gem.specs[i7 + 1]?.preceedingCommaOffset !== void 0 ? gem.specs[i7 + 1].preceedingCommaOffset : spec.endOfSpecOffset) - spec.preceedingCommaOffset,
223468
+ artifacts: [idx],
223469
+ text: ""
223470
+ });
223471
+ }
223472
+ });
223473
+ if (!seenVersionSpec) {
223474
+ patches.push({
223475
+ file: gem.gemfile.file,
223476
+ offset: gem.endOfNameOffset,
223477
+ artifacts: [idx],
223478
+ text: `, "${upgradeVersion}"`
223479
+ });
223480
+ }
223481
+ } else {
223482
+ let seenVersionSpec = false;
223483
+ let hasLowerBound = false;
223484
+ gem.specs.forEach((spec) => {
223485
+ if (spec.type !== "version") return;
223486
+ seenVersionSpec = true;
223487
+ const evaluated = evaluate4(spec.value);
223488
+ const parsed = parseRubyGemsConstraint(evaluated);
223489
+ const operatorStr = parsed.operator;
223490
+ if (!operatorStr) {
223491
+ hasLowerBound = true;
223492
+ const versionLoc2 = findInValue(spec.value, parsed.version);
223493
+ if (versionLoc2) {
223494
+ patches.push({
223495
+ file: versionLoc2.gemfile.file,
223496
+ offset: versionLoc2.offset,
223497
+ length: versionLoc2.text.length,
223498
+ artifacts: [idx],
223499
+ text: upgradeVersion
223500
+ });
223501
+ }
223502
+ return;
223503
+ }
223504
+ const operatorLoc = findInValue(spec.value, operatorStr);
223505
+ if (!operatorLoc) {
223506
+ statusUpdater?.({
223507
+ status: "error",
223508
+ file: gem.gemfile.file,
223509
+ artifacts: [idx],
223510
+ message: `Could not find source position of operator '${operatorStr}'`
223511
+ });
223512
+ return;
223513
+ }
223514
+ const versionLoc = findInValue(spec.value, parsed.version);
223515
+ if (!versionLoc) {
223516
+ statusUpdater?.({
223517
+ status: "error",
223518
+ file: gem.gemfile.file,
223519
+ artifacts: [idx],
223520
+ message: `Could not find source position of version '${parsed.version}'`
223521
+ });
223522
+ return;
223523
+ }
223524
+ if (operatorStr === ">") {
223525
+ hasLowerBound = true;
223526
+ patches.push({
223527
+ file: operatorLoc.gemfile.file,
223528
+ offset: operatorLoc.offset,
223529
+ length: operatorLoc.text.length,
223530
+ artifacts: [idx],
223531
+ text: ">="
223532
+ });
223533
+ patches.push({
223534
+ file: versionLoc.gemfile.file,
223535
+ offset: versionLoc.offset,
223536
+ length: versionLoc.text.length,
223537
+ artifacts: [idx],
223538
+ text: upgradeVersion
223539
+ });
223540
+ } else if (operatorStr === "<" && rubygemsCompareVersions(parsed.version, upgradeVersion) <= 0) {
223541
+ patches.push({
223542
+ file: operatorLoc.gemfile.file,
223543
+ offset: operatorLoc.offset,
223544
+ length: operatorLoc.text.length,
223545
+ artifacts: [idx],
223546
+ text: "<="
223547
+ });
223548
+ patches.push({
223549
+ file: versionLoc.gemfile.file,
223550
+ offset: versionLoc.offset,
223551
+ length: versionLoc.text.length,
223552
+ artifacts: [idx],
223553
+ text: upgradeVersion
223554
+ });
223555
+ } else if (operatorStr === "<=" && rubygemsCompareVersions(parsed.version, upgradeVersion) < 0) {
223556
+ patches.push({
223557
+ file: versionLoc.gemfile.file,
223558
+ offset: versionLoc.offset,
223559
+ length: versionLoc.text.length,
223560
+ artifacts: [idx],
223561
+ text: upgradeVersion
223562
+ });
223563
+ } else if (operatorStr === "!=" && rubygemsCompareVersions(parsed.version, upgradeVersion) === 0) {
223564
+ hasLowerBound = true;
223565
+ patches.push({
223566
+ file: operatorLoc.gemfile.file,
223567
+ offset: operatorLoc.offset,
223568
+ length: operatorLoc.text.length,
223569
+ artifacts: [idx],
223570
+ text: "="
223571
+ });
223572
+ patches.push({
223573
+ file: versionLoc.gemfile.file,
223574
+ offset: versionLoc.offset,
223575
+ length: versionLoc.text.length,
223576
+ artifacts: [idx],
223577
+ text: upgradeVersion
223578
+ });
223579
+ } else if ([">=", "=", "~>"].includes(operatorStr)) {
223580
+ hasLowerBound = true;
223581
+ patches.push({
223582
+ file: versionLoc.gemfile.file,
223583
+ offset: versionLoc.offset,
223584
+ length: versionLoc.text.length,
223585
+ artifacts: [idx],
223586
+ text: upgradeVersion
223587
+ });
223588
+ }
223589
+ });
223590
+ if (!seenVersionSpec) {
223591
+ patches.push({
223592
+ file: gem.gemfile.file,
223593
+ offset: gem.endOfNameOffset,
223594
+ artifacts: [idx],
223595
+ text: `, "${upgradeVersion}"`
223596
+ });
223597
+ hasLowerBound = true;
223598
+ }
223599
+ if (!hasLowerBound) {
223600
+ patches.push({
223601
+ file: gem.gemfile.file,
223602
+ offset: gem.endOfNameOffset,
223603
+ artifacts: [idx],
223604
+ text: `, ">= ${upgradeVersion}"`
223605
+ });
223606
+ }
223607
+ }
223608
+ return patches;
223609
+ }
223610
+
223611
+ // ../fixing-management/src/fixing-management/rubygems/rubygems-socket-upgrade-manager.ts
223279
223612
  var RubygemsSocketUpgradeManager = class {
223280
223613
  constructor(rootDir) {
223281
223614
  this.rootDir = rootDir;
223282
223615
  }
223283
- gemfileLockMatcher = (0, import_picomatch6.default)("Gemfile.lock");
223616
+ gemfileLockMatcher = (0, import_picomatch8.default)("Gemfile.lock", { basename: true });
223284
223617
  async applySocketArtifactUpgrades(ctxt) {
223285
223618
  const directPatches = [];
223286
223619
  const transitivePatches = [];
@@ -223289,7 +223622,8 @@ var RubygemsSocketUpgradeManager = class {
223289
223622
  const artifact = ctxt.artifacts[idx];
223290
223623
  assert11(artifact.name);
223291
223624
  for (const mf of artifact.manifestFiles ?? []) {
223292
- if (this.gemfileLockMatcher(basename9(mf.file))) {
223625
+ if (this.gemfileLockMatcher(mf.file)) {
223626
+ if (ctxt.wsFilter && !ctxt.wsFilter(dirname14(mf.file) || ".")) continue;
223293
223627
  const lockfileContent = await readFile25(resolve31(this.rootDir, mf.file), "utf-8");
223294
223628
  const gemfileLock = parseGemfileLock(lockfileContent);
223295
223629
  if (!gemfileLock.gems.has(artifact.name)) {
@@ -223301,7 +223635,7 @@ var RubygemsSocketUpgradeManager = class {
223301
223635
  });
223302
223636
  continue;
223303
223637
  }
223304
- const gemfileName = relative11(this.rootDir, resolve31(this.rootDir, (dirname14(mf.file), "Gemfile")));
223638
+ const gemfileName = relative13(this.rootDir, resolve31(this.rootDir, dirname14(mf.file), "Gemfile"));
223305
223639
  gemfileToLockfile.set(resolve31(this.rootDir, gemfileName), resolve31(this.rootDir, mf.file));
223306
223640
  if (gemfileLock.directDependencies.has(artifact.name)) {
223307
223641
  directPatches.push(...await this.handleGemfile(gemfileName, idx, upgradeVersion, ctxt));
@@ -223367,18 +223701,18 @@ var RubygemsSocketUpgradeManager = class {
223367
223701
  });
223368
223702
  await asyncForEach(Array.from(restoreMap), async ([path2, { content, artifacts }]) => {
223369
223703
  try {
223370
- await writeFile8(path2, content);
223704
+ await writeFile10(path2, content);
223371
223705
  ctxt.statusUpdater?.({
223372
223706
  status: "success",
223373
- file: relative11(this.rootDir, path2),
223374
- message: "Restored Gemfile",
223707
+ file: relative13(this.rootDir, path2),
223708
+ message: "File restored",
223375
223709
  artifacts: i3(artifacts)
223376
223710
  });
223377
223711
  } catch (e) {
223378
223712
  ctxt.statusUpdater?.({
223379
223713
  status: "error",
223380
- file: relative11(this.rootDir, path2),
223381
- message: "Could not restore Gemfile",
223714
+ file: relative13(this.rootDir, path2),
223715
+ message: "Could not restore file",
223382
223716
  artifacts: i3(artifacts)
223383
223717
  });
223384
223718
  }
@@ -223434,7 +223768,7 @@ var RubygemsSocketUpgradeManager = class {
223434
223768
  }
223435
223769
  const evaluatedSpecs = evaluatedSpecOpts.filter((spec) => spec !== void 0);
223436
223770
  if (rubygemsVersionSatisfiesConstraints(version3, evaluatedSpecs)) {
223437
- patches.push(...this.createRubygemVersionPatches(gem, idx, upgradeVersion, ctxt));
223771
+ patches.push(...createRubygemVersionPatches(gem, idx, upgradeVersion, ctxt.rangeStyle, ctxt.statusUpdater));
223438
223772
  }
223439
223773
  }
223440
223774
  if (patches.length === 0) {
@@ -223455,172 +223789,6 @@ var RubygemsSocketUpgradeManager = class {
223455
223789
  }
223456
223790
  return patches;
223457
223791
  }
223458
- createRubygemVersionPatches(gem, idx, upgradeVersion, ctxt) {
223459
- const patches = [];
223460
- if (ctxt.rangeStyle === "pin") {
223461
- let seenVersionSpec = false;
223462
- gem.specs.forEach((spec, i7) => {
223463
- if (spec.type !== "version") return;
223464
- const evaluated = evaluate4(spec.value);
223465
- if (!seenVersionSpec) {
223466
- seenVersionSpec = true;
223467
- const parsed = parseRubyGemsConstraint(evaluated);
223468
- if (parsed.operator) {
223469
- const operatorLoc = findInValue(spec.value, parsed.operator);
223470
- if (!operatorLoc) {
223471
- ctxt.statusUpdater?.({
223472
- status: "error",
223473
- file: gem.gemfile.file,
223474
- artifacts: [idx],
223475
- message: `Could not find source position of operator`
223476
- });
223477
- return;
223478
- }
223479
- patches.push({
223480
- file: operatorLoc.gemfile.file,
223481
- offset: operatorLoc.offset,
223482
- length: operatorLoc.text.length,
223483
- artifacts: [idx],
223484
- text: "="
223485
- });
223486
- }
223487
- const versionLoc = findInValue(spec.value, parsed.version);
223488
- if (!versionLoc) {
223489
- ctxt.statusUpdater?.({
223490
- status: "error",
223491
- file: gem.gemfile.file,
223492
- artifacts: [idx],
223493
- message: `Could not find source position of version`
223494
- });
223495
- return;
223496
- }
223497
- patches.push({
223498
- file: versionLoc.gemfile.file,
223499
- offset: versionLoc.offset,
223500
- length: versionLoc.text.length,
223501
- artifacts: [idx],
223502
- text: upgradeVersion
223503
- });
223504
- } else if (i7 > 0) {
223505
- patches.push({
223506
- file: gem.gemfile.file,
223507
- offset: spec.preceedingCommaOffset,
223508
- length: (gem.specs[i7 + 1]?.preceedingCommaOffset !== void 0 ? gem.specs[i7 + 1].preceedingCommaOffset : spec.endOfSpecOffset) - spec.preceedingCommaOffset,
223509
- artifacts: [idx],
223510
- text: ""
223511
- });
223512
- }
223513
- });
223514
- if (!seenVersionSpec) {
223515
- patches.push({
223516
- file: gem.gemfile.file,
223517
- offset: gem.endOfNameOffset,
223518
- artifacts: [idx],
223519
- text: `, "${upgradeVersion}"`
223520
- });
223521
- }
223522
- } else {
223523
- let seenVersionSpec = false;
223524
- let hasLowerBound = false;
223525
- gem.specs.forEach((spec, i7) => {
223526
- if (spec.type !== "version") return;
223527
- seenVersionSpec = true;
223528
- const evaluated = evaluate4(spec.value);
223529
- const parsed = parseRubyGemsConstraint(evaluated);
223530
- const operatorStr = parsed.operator;
223531
- if (operatorStr) {
223532
- const operatorLoc = findInValue(spec.value, operatorStr);
223533
- if (!operatorLoc) {
223534
- ctxt.statusUpdater?.({
223535
- status: "error",
223536
- file: gem.gemfile.file,
223537
- artifacts: [idx],
223538
- message: `Could not find source position of operator '${operatorStr}'`
223539
- });
223540
- return;
223541
- }
223542
- switch (operatorStr) {
223543
- case ">": {
223544
- hasLowerBound = true;
223545
- patches.push({
223546
- file: operatorLoc.gemfile.file,
223547
- offset: operatorLoc.offset,
223548
- length: operatorLoc.text.length,
223549
- artifacts: [idx],
223550
- text: ">="
223551
- });
223552
- break;
223553
- }
223554
- case "<": {
223555
- if (rubygemsCompareVersions(upgradeVersion, parsed.version) >= 0) {
223556
- patches.push({
223557
- file: operatorLoc.gemfile.file,
223558
- offset: operatorLoc.offset,
223559
- length: operatorLoc.text.length,
223560
- artifacts: [idx],
223561
- text: "<="
223562
- });
223563
- }
223564
- break;
223565
- }
223566
- case "!=": {
223567
- if (rubygemsCompareVersions(upgradeVersion, parsed.version) === 0) {
223568
- patches.push({
223569
- file: gem.gemfile.file,
223570
- offset: spec.preceedingCommaOffset,
223571
- length: (gem.specs[i7 + 1]?.preceedingCommaOffset !== void 0 ? gem.specs[i7 + 1].preceedingCommaOffset : spec.endOfSpecOffset) - spec.preceedingCommaOffset,
223572
- artifacts: [idx],
223573
- text: ""
223574
- });
223575
- }
223576
- return;
223577
- }
223578
- default: {
223579
- hasLowerBound = true;
223580
- break;
223581
- }
223582
- }
223583
- } else {
223584
- hasLowerBound = true;
223585
- }
223586
- const versionLoc = findInValue(spec.value, parsed.version);
223587
- if (!versionLoc) {
223588
- ctxt.statusUpdater?.({
223589
- status: "error",
223590
- file: gem.gemfile.file,
223591
- artifacts: [idx],
223592
- message: `Could not find source position of version '${parsed.version}'`
223593
- });
223594
- return;
223595
- }
223596
- patches.push({
223597
- file: versionLoc.gemfile.file,
223598
- offset: versionLoc.offset,
223599
- length: versionLoc.text.length,
223600
- artifacts: [idx],
223601
- text: upgradeVersion
223602
- });
223603
- });
223604
- if (!seenVersionSpec) {
223605
- patches.push({
223606
- file: gem.gemfile.file,
223607
- offset: gem.endOfNameOffset,
223608
- artifacts: [idx],
223609
- text: `, "${upgradeVersion}"`
223610
- });
223611
- hasLowerBound = true;
223612
- }
223613
- if (!hasLowerBound) {
223614
- patches.push({
223615
- file: gem.gemfile.file,
223616
- offset: gem.endOfNameOffset,
223617
- artifacts: [idx],
223618
- text: `, >= "${upgradeVersion}"`
223619
- });
223620
- }
223621
- }
223622
- return patches;
223623
- }
223624
223792
  /**
223625
223793
  * Create a patch to add a gem dependency at the bottom of a Gemfile
223626
223794
  */
@@ -223710,7 +223878,7 @@ var import_lodash7 = __toESM(require_lodash(), 1);
223710
223878
  var import_micromatch2 = __toESM(require_micromatch(), 1);
223711
223879
  import { existsSync as existsSync15 } from "fs";
223712
223880
  import { access as access4, cp as cp2, readdir as readdir4, stat as stat3 } from "fs/promises";
223713
- import { basename as basename10, join as join13, relative as relative12, resolve as resolve32 } from "path";
223881
+ import { basename as basename6, join as join13, relative as relative14, resolve as resolve32 } from "path";
223714
223882
  var { uniq: uniq2 } = import_lodash7.default;
223715
223883
  var { isMatch: isMatch2 } = import_micromatch2.default;
223716
223884
  function* parents2(dir) {
@@ -223723,7 +223891,7 @@ function* parents2(dir) {
223723
223891
  }
223724
223892
  function findParent2(dir, predicate, wholePath) {
223725
223893
  for (const parent2 of parents2(dir))
223726
- if (predicate(wholePath ? parent2 : basename10(parent2)))
223894
+ if (predicate(wholePath ? parent2 : basename6(parent2)))
223727
223895
  return parent2;
223728
223896
  }
223729
223897
 
@@ -224601,9 +224769,9 @@ async function findReachabilityAnalyzersDockerImage(ecosystem) {
224601
224769
  // ../other-modules-communicator/src/other-modules-communicator.ts
224602
224770
  var import_lodash12 = __toESM(require_lodash(), 1);
224603
224771
  import { rmSync } from "fs";
224604
- import { mkdir, readFile as readFile29, writeFile as writeFile9 } from "fs/promises";
224772
+ import { mkdir, readFile as readFile29, writeFile as writeFile11 } from "fs/promises";
224605
224773
  import { platform } from "os";
224606
- import { join as join20, posix as posix2, relative as relative13, sep as sep3 } from "path";
224774
+ import { join as join20, posix as posix2, relative as relative15, sep as sep3 } from "path";
224607
224775
 
224608
224776
  // ../utils/src/tmp-file.ts
224609
224777
  import { rm, mkdtemp } from "fs/promises";
@@ -224921,7 +225089,7 @@ var OtherModulesCommunicator = class {
224921
225089
  }
224922
225090
  if (cmd === "getWorkspacePathsMultipleSubprojects")
224923
225091
  return `${_cmdStr()}: (${packageManagerName}) ${abbreviateList(subprojects, 10)}`;
224924
- return `${_cmdStr()}: (${packageManagerName}) ${relative13(this.rootWorkingDir, subprojectPath) || "."}`;
225092
+ return `${_cmdStr()}: (${packageManagerName}) ${relative15(this.rootWorkingDir, subprojectPath) || "."}`;
224925
225093
  }
224926
225094
  getSpinnerTextForReachabilityAnalyzerCommand(cmd, ecosystem, subprojectPath, workspacePath) {
224927
225095
  function _cmdStr() {
@@ -224934,10 +225102,10 @@ var OtherModulesCommunicator = class {
224934
225102
  return "Running reachability analysis on package registry package";
224935
225103
  }
224936
225104
  }
224937
- return `${_cmdStr()}: (${ecosystem}) ${relative13(this.rootWorkingDir, join20(subprojectPath, workspacePath)) || "."}`;
225105
+ return `${_cmdStr()}: (${ecosystem}) ${relative15(this.rootWorkingDir, join20(subprojectPath, workspacePath)) || "."}`;
224938
225106
  }
224939
225107
  getProjectPath(subprojectPath) {
224940
- return this.options.runWithoutDocker ? subprojectPath : posix2.resolve("/project", relative13(this.rootWorkingDir, subprojectPath).replaceAll(sep3, posix2.sep));
225108
+ return this.options.runWithoutDocker ? subprojectPath : posix2.resolve("/project", relative15(this.rootWorkingDir, subprojectPath).replaceAll(sep3, posix2.sep));
224941
225109
  }
224942
225110
  // options shared between package-management and reachability-analyzers
224943
225111
  commonOptions = once7(
@@ -225077,7 +225245,7 @@ var OtherModulesCommunicator = class {
225077
225245
  "getWorkspacePathsMultipleSubprojects",
225078
225246
  packageManagerName,
225079
225247
  this.rootWorkingDir,
225080
- subprojectPaths.map((subprojectPath) => relative13(this.rootWorkingDir, subprojectPath) || ".")
225248
+ subprojectPaths.map((subprojectPath) => relative15(this.rootWorkingDir, subprojectPath) || ".")
225081
225249
  );
225082
225250
  }
225083
225251
  async getProvidedArgsForSubproject(subprojectPath, providedOptions) {
@@ -225087,7 +225255,7 @@ var OtherModulesCommunicator = class {
225087
225255
  const providerFileName = "provider.json";
225088
225256
  const providerFileThisProcess = join20(tmpDir, providerFileName);
225089
225257
  const providerFileOtherProcess = this.options.runWithoutDocker ? providerFileThisProcess : posix2.join(TMP_DIR_IN_DOCKER, providerFileName);
225090
- await writeFile9(providerFileThisProcess, JSON.stringify(providedOptions.provider));
225258
+ await writeFile11(providerFileThisProcess, JSON.stringify(providedOptions.provider));
225091
225259
  return ["--provider", providerFileOtherProcess];
225092
225260
  } else {
225093
225261
  return ["--as-provider"];
@@ -225131,7 +225299,7 @@ var OtherModulesCommunicator = class {
225131
225299
  const inputFileName = `${v4_default()}-runReachabilityAnalysis-input.json`;
225132
225300
  const inputFileThisProcess = join20(tmpDir, inputFileName);
225133
225301
  const inputFileOtherProcess = this.options.runWithoutDocker ? inputFileThisProcess : posix2.join(TMP_DIR_IN_DOCKER, inputFileName);
225134
- await writeFile9(
225302
+ await writeFile11(
225135
225303
  inputFileThisProcess,
225136
225304
  JSON.stringify({
225137
225305
  workspaceData,
@@ -225207,7 +225375,7 @@ function abbreviateList(items, maxItems) {
225207
225375
  import { resolve as resolve35 } from "path";
225208
225376
 
225209
225377
  // ../utils/src/dashboard-api/coana-api.ts
225210
- import { writeFile as writeFile10 } from "fs/promises";
225378
+ import { writeFile as writeFile12 } from "fs/promises";
225211
225379
  var import_artifact = __toESM(require_artifact_client2(), 1);
225212
225380
  var coanaAPI = process.env.PUBLIC_API_URL ?? "https://app.coana.tech/api/v1";
225213
225381
  var axiosClient2 = getAxiosClient();
@@ -225337,7 +225505,7 @@ async function sendToDashboard(report, writeReportToFile, reportId, apiKey) {
225337
225505
  try {
225338
225506
  if (writeReportToFile) {
225339
225507
  logger.info("Writing report to dashboard-report.json");
225340
- await writeFile10("dashboard-report.json", JSON.stringify(report, null, 2));
225508
+ await writeFile12("dashboard-report.json", JSON.stringify(report, null, 2));
225341
225509
  if (process.env.GITHUB_ACTIONS === "true") {
225342
225510
  logger.info("uploading dashboard-report.json as an artifact");
225343
225511
  (0, import_artifact.create)().uploadArtifact("dashboard-report", ["dashboard-report.json"], process.cwd());
@@ -226410,15 +226578,15 @@ function getVulnerabilitiesFromReport(report) {
226410
226578
  var import_packageurl_js = __toESM(require_packageurl_js(), 1);
226411
226579
 
226412
226580
  // dist/cli-upgrade-purl.js
226413
- import { dirname as dirname17, join as join23, relative as relative16, resolve as resolve38 } from "node:path";
226581
+ import { join as join23, relative as relative18, resolve as resolve38 } from "node:path";
226414
226582
 
226415
226583
  // ../project-management/src/project-management/project-manager.ts
226416
- import { relative as relative15, resolve as resolve37 } from "path";
226584
+ import { relative as relative17, resolve as resolve37 } from "path";
226417
226585
 
226418
226586
  // ../project-management/src/project-management/ecosystem-management/ecosystem-manager.ts
226419
226587
  var import_micromatch3 = __toESM(require_micromatch2(), 1);
226420
226588
  import { readdir as readdir6 } from "fs/promises";
226421
- import { join as join22, relative as relative14, resolve as resolve36 } from "path";
226589
+ import { join as join22, relative as relative16, resolve as resolve36 } from "path";
226422
226590
 
226423
226591
  // ../project-management/src/project-management/ecosystem-management/ecosystem-specs.ts
226424
226592
  import { existsSync as existsSync19 } from "fs";
@@ -226556,7 +226724,7 @@ var EcosystemManager = class _EcosystemManager {
226556
226724
  const resolvedProjectDir = resolve36(mainProjectDir, relativeProjectDir);
226557
226725
  if (config3.includeDirs.length > 0)
226558
226726
  workspacePaths = workspacePaths.filter(
226559
- (workspacePath) => isMatch3(relative14(mainProjectDir, join22(resolvedProjectDir, workspacePath)), config3.includeDirs)
226727
+ (workspacePath) => isMatch3(relative16(mainProjectDir, join22(resolvedProjectDir, workspacePath)), config3.includeDirs)
226560
226728
  );
226561
226729
  workspacePaths.filter((workspacePath) => workspacePath !== ".").forEach((workspacePath) => projectDirsAlreadyCovered.push(resolve36(resolvedProjectDir, workspacePath)));
226562
226730
  if (workspacePaths.length > 0)
@@ -226597,7 +226765,7 @@ var EcosystemManager = class _EcosystemManager {
226597
226765
  return typeof packageManagerNameProvider === "function" ? await packageManagerNameProvider(projectDir) : packageManagerNameProvider;
226598
226766
  } catch (e) {
226599
226767
  if (e instanceof InvalidProjectFileError) {
226600
- const projectDirRelative = relative14(mainProjectDir, projectDir) || ".";
226768
+ const projectDirRelative = relative16(mainProjectDir, projectDir) || ".";
226601
226769
  logger.error(
226602
226770
  `Invalid ${e.fileName} file in ${projectDirRelative}. If the project is intentionally invalid, and you want Coana to skip it in the scan, then add "--exclude-dirs ${projectDirRelative}" to the Coana command.`
226603
226771
  );
@@ -226689,7 +226857,7 @@ function shouldIgnoreDir(dir) {
226689
226857
  return dirsToIgnore.includes(dir);
226690
226858
  }
226691
226859
  function shouldIgnoreDueToExcludeDirsOrChangedFiles({ mainProjectDir, excludeDirs, changedFiles }, fullPath) {
226692
- const relativeToProjectDir = relative14(mainProjectDir, fullPath) || ".";
226860
+ const relativeToProjectDir = relative16(mainProjectDir, fullPath) || ".";
226693
226861
  return !!(isMatch3(relativeToProjectDir, excludeDirs) || changedFiles && !changedFiles.some((changedFile) => changedFile.startsWith(relativeToProjectDir)));
226694
226862
  }
226695
226863
 
@@ -226737,7 +226905,7 @@ var ProjectManager = class _ProjectManager {
226737
226905
  if (subprojects.length === 0) return void 0;
226738
226906
  return ` ${ecosystem}:
226739
226907
  ${subprojects.map(
226740
- ({ subprojectPath, workspacePaths }) => ` ${relative15(this.projectDir, subprojectPath) || ". (Root)"}${workspacePaths.length > 1 || workspacePaths[0] !== "." ? ` (${workspacePaths.length} ${ecosystem === "MAVEN" ? "modules" : "workspaces"})` : ""}`
226908
+ ({ subprojectPath, workspacePaths }) => ` ${relative17(this.projectDir, subprojectPath) || ". (Root)"}${workspacePaths.length > 1 || workspacePaths[0] !== "." ? ` (${workspacePaths.length} ${ecosystem === "MAVEN" ? "modules" : "workspaces"})` : ""}`
226741
226909
  ).join("\n")}`;
226742
226910
  }).filter((line) => line).join("\n");
226743
226911
  const detailsString = Object.entries(this.ecosystemToEcosystemManager).map(([ecosystem, manager]) => {
@@ -226745,7 +226913,7 @@ ${subprojects.map(
226745
226913
  if (subprojects.length === 0) return void 0;
226746
226914
  const subprojectsString = subprojects.map(({ subprojectPath, workspacePaths, packageManagerName }) => {
226747
226915
  if (workspacePaths.length === 1 && workspacePaths[0] === ".") return void 0;
226748
- return ` ${relative15(this.projectDir, subprojectPath) || ". (Root)"}
226916
+ return ` ${relative17(this.projectDir, subprojectPath) || ". (Root)"}
226749
226917
  ${workspacePaths.map((ws) => ` ${ws === "." ? ". (Root)" : ws} - ${packageManagerName}`).join("\n")}`;
226750
226918
  }).filter((line) => line).join("\n");
226751
226919
  if (!subprojectsString) return void 0;
@@ -226840,7 +227008,7 @@ function assertDefined(value2) {
226840
227008
  }
226841
227009
 
226842
227010
  // dist/cli-upgrade-purl.js
226843
- var import_picomatch7 = __toESM(require_picomatch2(), 1);
227011
+ var import_picomatch9 = __toESM(require_picomatch2(), 1);
226844
227012
  var ECOSYSTEMS_WITH_SOCKET_UPGRADES = ["NPM", "MAVEN", "NUGET", "GO", "RUST", "PIP", "RUBYGEMS"];
226845
227013
  async function upgradePurl(rootDir, upgrades, artifacts, options, logFile, cliFixRunId) {
226846
227014
  if (options.rangeStyle && options.rangeStyle !== "pin") {
@@ -226885,16 +227053,6 @@ ${Array.from(upgrades).map(([idx, upgradeVersion]) => ` ${prettyPrintPurlUpgrade
226885
227053
  }
226886
227054
  ecosystemToSocketArtifactUpgrades.get(ecosystem).set(idx, upgradeVersion);
226887
227055
  }
226888
- const includeMatchers2 = options.include?.map((s6) => (0, import_picomatch7.default)(s6 || "."));
226889
- const excludeMatchers2 = options.exclude?.map((s6) => (0, import_picomatch7.default)(s6 || "."));
226890
- for (const artifact of artifacts.filter((a4) => a4.type === "npm" /* NPM */)) {
226891
- artifact.manifestFiles = artifact.manifestFiles?.filter((mf) => {
226892
- const dir = dirname17(mf.file) || ".";
226893
- const shouldInclude = !includeMatchers2 || includeMatchers2.some((matcher) => matcher(dir));
226894
- const shouldExclude = excludeMatchers2?.some((matcher) => matcher(dir)) ?? false;
226895
- return shouldInclude && !shouldExclude;
226896
- });
226897
- }
226898
227056
  let anyErrors = false;
226899
227057
  for (const [ecosystem, upgrades2] of ecosystemToSocketArtifactUpgrades) {
226900
227058
  if (options.rangeStyle && !["NPM", "MAVEN", "NUGET", "RUST", "PIP", "RUBYGEMS"].includes(ecosystem)) {
@@ -226908,7 +227066,7 @@ ${Array.from(upgrades).map(([idx, upgradeVersion]) => ` ${prettyPrintPurlUpgrade
226908
227066
  warn: "\u26A0\uFE0F",
226909
227067
  error: "\u274C"
226910
227068
  };
226911
- logger.info(`${statusIcons[update2.status]} ${update2.message} \u2500 ${relative16(rootDir, resolve38(rootDir, update2.file))}`);
227069
+ logger.info(`${statusIcons[update2.status]} ${update2.message} \u2500 ${relative18(rootDir, resolve38(rootDir, update2.file))}`);
226912
227070
  update2.artifacts.forEach((idx, i7) => {
226913
227071
  logger.info(`${" ".repeat(3)}${i7 === update2.artifacts.length - 1 ? "\u2514\u2500" : "\u251C\u2500"} ${prettyPrintSocketFactArtifactUpgrade(artifacts[idx], upgrades2.get(idx))}`);
226914
227072
  });
@@ -226925,6 +227083,10 @@ ${Array.from(upgrades).map(([idx, upgradeVersion]) => ` ${prettyPrintPurlUpgrade
226925
227083
  upgrades: upgrades2,
226926
227084
  artifacts,
226927
227085
  rangeStyle: options.rangeStyle,
227086
+ // Note! picomatch
227087
+ wsFilter: (0, import_picomatch9.default)(options.include?.map((s6) => s6 || ".") ?? [".", "**"], {
227088
+ ignore: options.exclude?.map((s6) => s6 || ".")
227089
+ }),
226928
227090
  statusUpdater
226929
227091
  };
226930
227092
  await applySocketUpgrades(ecosystem, rootDir, ctxt);
@@ -226957,17 +227119,13 @@ ${Array.from(upgrades).map(([idx, upgradeVersion]) => ` ${prettyPrintPurlUpgrade
226957
227119
  if (supportedSubprojects.length === 0) {
226958
227120
  throw new Error(`No supported projects found in ${rootDir}.`);
226959
227121
  }
226960
- const includeMatchers = options.include?.map((s6) => (0, import_picomatch7.default)(s6 || "."));
226961
- const excludeMatchers = options.exclude?.map((s6) => (0, import_picomatch7.default)(s6 || "."));
227122
+ const wsFilter = (0, import_picomatch9.default)(options.include?.map((s6) => s6 || ".") ?? [".", "**"], {
227123
+ ignore: options.exclude?.map((s6) => s6 || ".")
227124
+ });
226962
227125
  const subprojectPromiseQueue = new PromiseQueue(Number(options.concurrency));
226963
227126
  supportedSubprojects.forEach((subproject) => {
226964
227127
  subprojectPromiseQueue.enqueueTask(async () => {
226965
- const workspacePathsMatchingGlob = subproject.workspacePaths.filter((wsPath) => {
226966
- const relPath = relative16(rootDir, resolve38(rootDir, subproject.subprojectPath, wsPath)) || ".";
226967
- const shouldInclude = !includeMatchers || includeMatchers.some((matcher) => matcher(relPath));
226968
- const shouldExclude = excludeMatchers?.some((matcher) => matcher(relPath)) ?? false;
226969
- return shouldInclude && !shouldExclude;
226970
- });
227128
+ const workspacePathsMatchingGlob = subproject.workspacePaths.filter((wsPath) => wsFilter(relative18(rootDir, resolve38(rootDir, subproject.subprojectPath, wsPath)) || "."));
226971
227129
  if (workspacePathsMatchingGlob.length === 0)
226972
227130
  return;
226973
227131
  const filterDescription = options.include !== void 0 || options.exclude !== void 0 ? `matching filters ${options.include ? `include: [${options.include.join(", ")}]` : ""}${options.include && options.exclude ? " " : ""}${options.exclude ? `exclude: [${options.exclude.join(", ")}]` : ""}` : "";
@@ -227009,7 +227167,7 @@ ${workspacePathsMatchingGlob.map((wsPath) => ` ${wsPath}`).join("\n")}`);
227009
227167
  logger.info(`No dependencies matching upgrade specs found for subproject ${subproject.subprojectPath}`);
227010
227168
  return;
227011
227169
  }
227012
- await applySecurityFixes(subproject.packageManagerName, rootDir, relative16(rootDir, subproject.subprojectPath) || ".", otherModulesCommunicator, workspaceToFixes, fixingData, signalFixApplied);
227170
+ await applySecurityFixes(subproject.packageManagerName, rootDir, relative18(rootDir, subproject.subprojectPath) || ".", otherModulesCommunicator, workspaceToFixes, fixingData, signalFixApplied);
227013
227171
  });
227014
227172
  });
227015
227173
  await subprojectPromiseQueue.onIdle();
@@ -227024,22 +227182,22 @@ ${vulnerabilityFixes.map((fix) => ` ${fix.dependencyName} from ${fix.currentVers
227024
227182
  };
227025
227183
 
227026
227184
  // dist/internal/socket-mode-helpers-socket-dependency-trees.js
227027
- var import_picomatch8 = __toESM(require_picomatch2(), 1);
227028
- import { basename as basename11, dirname as dirname18, join as join24, sep as sep5 } from "path";
227185
+ var import_picomatch10 = __toESM(require_picomatch2(), 1);
227186
+ import { basename as basename7, dirname as dirname17, join as join24, sep as sep5 } from "path";
227029
227187
  var REQUIREMENTS_FILES_SEARCH_DEPTH2 = 3;
227030
227188
  function inferWorkspaceFromManifestPath(ecosystem, manifestPath, properPythonProjects) {
227031
227189
  switch (ecosystem) {
227032
227190
  case "NPM": {
227033
- const base = basename11(manifestPath);
227034
- const dir = dirname18(manifestPath);
227191
+ const base = basename7(manifestPath);
227192
+ const dir = dirname17(manifestPath);
227035
227193
  return base === "package.json" ? dir || "." : void 0;
227036
227194
  }
227037
227195
  case "MAVEN": {
227038
227196
  return ".";
227039
227197
  }
227040
227198
  case "PIP": {
227041
- const base = basename11(manifestPath);
227042
- const dir = dirname18(manifestPath);
227199
+ const base = basename7(manifestPath);
227200
+ const dir = dirname17(manifestPath);
227043
227201
  const workspaceDir = dir === "" ? "." : dir;
227044
227202
  if (properPythonProjects.includes(workspaceDir)) {
227045
227203
  return workspaceDir;
@@ -227062,15 +227220,15 @@ function inferWorkspaceFromManifestPath(ecosystem, manifestPath, properPythonPro
227062
227220
  return ".";
227063
227221
  }
227064
227222
  case "RUST": {
227065
- return dirname18(manifestPath) || ".";
227223
+ return dirname17(manifestPath) || ".";
227066
227224
  }
227067
227225
  case "GO": {
227068
- const base = basename11(manifestPath);
227069
- const dir = dirname18(manifestPath);
227226
+ const base = basename7(manifestPath);
227227
+ const dir = dirname17(manifestPath);
227070
227228
  return base === "go.mod" ? dir || "." : void 0;
227071
227229
  }
227072
227230
  case "RUBYGEMS": {
227073
- return dirname18(manifestPath) || ".";
227231
+ return dirname17(manifestPath) || ".";
227074
227232
  }
227075
227233
  default: {
227076
227234
  return ".";
@@ -227080,9 +227238,9 @@ function inferWorkspaceFromManifestPath(ecosystem, manifestPath, properPythonPro
227080
227238
  function inferProjectFromManifestPath(ecosystem, manifestPath) {
227081
227239
  switch (ecosystem) {
227082
227240
  case "NPM": {
227083
- const filename = basename11(manifestPath);
227241
+ const filename = basename7(manifestPath);
227084
227242
  if (["package-lock.json", "pnpm-lock.yaml", "pnpm-lock.yml", "yarn.lock"].includes(filename)) {
227085
- return dirname18(manifestPath) || ".";
227243
+ return dirname17(manifestPath) || ".";
227086
227244
  }
227087
227245
  return void 0;
227088
227246
  }
@@ -227147,8 +227305,8 @@ async function fetchArtifactsFromSocket(rootWorkingDirectory, manifestsTarHash,
227147
227305
  ];
227148
227306
  const allFiles = await getFilesRelative(rootWorkingDirectory, venvExcludes);
227149
227307
  for (const file of allFiles) {
227150
- const base = basename11(file);
227151
- const workspaceDir = dirname18(file) || ".";
227308
+ const base = basename7(file);
227309
+ const workspaceDir = dirname17(file) || ".";
227152
227310
  if (base === "pyproject.toml" || base === "setup.py" && await isSetupPySetuptools(join24(rootWorkingDirectory, file))) {
227153
227311
  if (!properPythonProjects.includes(workspaceDir)) {
227154
227312
  properPythonProjects.push(workspaceDir);
@@ -227172,11 +227330,11 @@ async function fetchArtifactsFromSocket(rootWorkingDirectory, manifestsTarHash,
227172
227330
  const manifestFiles = [];
227173
227331
  switch (ecosystem) {
227174
227332
  case "MAVEN": {
227175
- manifestFiles.push(...(await getFilesRelative(rootWorkingDirectory)).filter((file) => (0, import_picomatch8.default)("{{*-*.,}pom{.xml,},gradle.lockfile}")(basename11(file))));
227333
+ manifestFiles.push(...(await getFilesRelative(rootWorkingDirectory)).filter((file) => (0, import_picomatch10.default)("{{*-*.,}pom{.xml,},gradle.lockfile}")(basename7(file))));
227176
227334
  break;
227177
227335
  }
227178
227336
  case "NUGET": {
227179
- manifestFiles.push(...(await getFilesRelative(rootWorkingDirectory)).filter((file) => (0, import_picomatch8.default)("{*.csproj,packages.lock.json}")(basename11(file))));
227337
+ manifestFiles.push(...(await getFilesRelative(rootWorkingDirectory)).filter((file) => (0, import_picomatch10.default)("{*.csproj,packages.lock.json}")(basename7(file))));
227180
227338
  break;
227181
227339
  }
227182
227340
  case "PIP": {
@@ -227495,7 +227653,7 @@ function prettyApplyFixesTo(applyFixesToOption) {
227495
227653
 
227496
227654
  // dist/cli-core.js
227497
227655
  import { writeFileSync as writeFileSync3 } from "fs";
227498
- import { mkdir as mkdir2, writeFile as writeFile12 } from "fs/promises";
227656
+ import { mkdir as mkdir2, writeFile as writeFile14 } from "fs/promises";
227499
227657
 
227500
227658
  // ../../node_modules/.pnpm/kleur@4.1.5/node_modules/kleur/index.mjs
227501
227659
  var FORCE_COLOR;
@@ -227600,7 +227758,7 @@ var kleur_default = $;
227600
227758
  // dist/cli-core.js
227601
227759
  var import_lodash15 = __toESM(require_lodash(), 1);
227602
227760
  import os from "os";
227603
- import { join as join26, relative as relative17, resolve as resolve40 } from "path";
227761
+ import { join as join26, relative as relative19, resolve as resolve40 } from "path";
227604
227762
 
227605
227763
  // ../utils/src/dashboard-api/shared-api.ts
227606
227764
  var DashboardAPI = class {
@@ -227946,7 +228104,7 @@ var DEFAULT_REPORT_FILENAME_BASE = "coana-report";
227946
228104
  // dist/internal/exclude-dirs-from-configuration-files.js
227947
228105
  import { existsSync as existsSync20 } from "fs";
227948
228106
  import { readFile as readFile31 } from "fs/promises";
227949
- import { basename as basename12, resolve as resolve39 } from "path";
228107
+ import { basename as basename8, resolve as resolve39 } from "path";
227950
228108
  var import_yaml2 = __toESM(require_dist12(), 1);
227951
228109
  async function inferExcludeDirsFromConfigurationFiles(rootWorkingDir) {
227952
228110
  const socketYmlConfigFile = resolve39(rootWorkingDir, "socket.yml");
@@ -227966,7 +228124,7 @@ async function inferExcludeDirsFromSocketConfig(socketConfigFile) {
227966
228124
  return void 0;
227967
228125
  if (ignorePaths.some((ignorePath) => ignorePath.includes("!")))
227968
228126
  return void 0;
227969
- logger.info(`Inferring paths to exclude based on Socket config file: ${basename12(socketConfigFile)}`);
228127
+ logger.info(`Inferring paths to exclude based on Socket config file: ${basename8(socketConfigFile)}`);
227970
228128
  return config3.projectIgnorePaths;
227971
228129
  } catch (e) {
227972
228130
  return void 0;
@@ -229902,10 +230060,10 @@ function compareDocumentPosition(nodeA, nodeB) {
229902
230060
  function uniqueSort(nodes) {
229903
230061
  nodes = nodes.filter((node, i7, arr) => !arr.includes(node, i7 + 1));
229904
230062
  nodes.sort((a4, b) => {
229905
- const relative18 = compareDocumentPosition(a4, b);
229906
- if (relative18 & DocumentPosition.PRECEDING) {
230063
+ const relative20 = compareDocumentPosition(a4, b);
230064
+ if (relative20 & DocumentPosition.PRECEDING) {
229907
230065
  return -1;
229908
- } else if (relative18 & DocumentPosition.FOLLOWING) {
230066
+ } else if (relative20 & DocumentPosition.FOLLOWING) {
229909
230067
  return 1;
229910
230068
  }
229911
230069
  return 0;
@@ -241768,7 +241926,7 @@ import { join as join25 } from "path";
241768
241926
 
241769
241927
  // ../utils/src/download-utils.ts
241770
241928
  import { existsSync as existsSync21 } from "fs";
241771
- import { writeFile as writeFile11 } from "fs/promises";
241929
+ import { writeFile as writeFile13 } from "fs/promises";
241772
241930
 
241773
241931
  // ../utils/src/maven-utils.ts
241774
241932
  var { memoize: memoize3 } = import_lodash14.default;
@@ -243135,7 +243293,7 @@ async function onlineScan(dependencyTree, apiKey, timeout) {
243135
243293
  }
243136
243294
 
243137
243295
  // dist/version.js
243138
- var version2 = "14.12.66";
243296
+ var version2 = "14.12.67";
243139
243297
 
243140
243298
  // dist/cli-core.js
243141
243299
  var { mapValues, omit, partition, pick } = import_lodash15.default;
@@ -243340,7 +243498,7 @@ var CliCore = class {
243340
243498
  await this.shareLogIfAnalysisError(vulnsWithResults);
243341
243499
  const socketReport = toSocketFactsSocketDependencyTree(artifacts, vulnsWithResults, this.reportId);
243342
243500
  const outputFile = resolve40(this.options.socketMode);
243343
- await writeFile12(outputFile, JSON.stringify(socketReport, null, 2));
243501
+ await writeFile14(outputFile, JSON.stringify(socketReport, null, 2));
243344
243502
  logger.info(kleur_default.green(`Socket report written to: ${outputFile}`));
243345
243503
  }
243346
243504
  async shareLogIfAnalysisError(vulns) {
@@ -243368,7 +243526,7 @@ var CliCore = class {
243368
243526
  }
243369
243527
  const socketReport = toSocketFacts(report, this.reportDependencyTrees, subPjToWsPathToDirectDependencies);
243370
243528
  const outputFile = resolve40(this.options.socketMode);
243371
- await writeFile12(outputFile, JSON.stringify(socketReport, null, 2));
243529
+ await writeFile14(outputFile, JSON.stringify(socketReport, null, 2));
243372
243530
  logger.info(kleur_default.green(`Socket report written to: ${outputFile}`));
243373
243531
  return;
243374
243532
  }
@@ -243407,7 +243565,7 @@ var CliCore = class {
243407
243565
  const { reachabilitySupport, traditionalScaSupport, noSupport } = manager.getSubprojectsWithWorkspacePaths();
243408
243566
  await this.dashboardAPI.registerSubprojects([...reachabilitySupport, ...traditionalScaSupport, ...noSupport].map((sp) => ({
243409
243567
  ...sp,
243410
- subprojectPath: relative17(this.rootWorkingDirectory, sp.subprojectPath) || "."
243568
+ subprojectPath: relative19(this.rootWorkingDirectory, sp.subprojectPath) || "."
243411
243569
  })), this.reportId, this.apiKey);
243412
243570
  for (const unsupported of noSupport)
243413
243571
  logger.warn(unsupported.unsupportedMsg);
@@ -243436,7 +243594,7 @@ var CliCore = class {
243436
243594
  await this.spinner.succeed();
243437
243595
  } catch (error) {
243438
243596
  if (this.options.ignoreFailingWorkspaces) {
243439
- const relativeSubprojectPath = relative17(this.rootWorkingDirectory, subprojectAndWsPath.subprojectPath) || ".";
243597
+ const relativeSubprojectPath = relative19(this.rootWorkingDirectory, subprojectAndWsPath.subprojectPath) || ".";
243440
243598
  this.failedSubprojects.push({
243441
243599
  subproject: relativeSubprojectPath,
243442
243600
  error: error.message || "Unknown error"
@@ -243495,7 +243653,7 @@ Subproject: ${subproject}`);
243495
243653
  }
243496
243654
  async updateSpinnerTextOnNewSubproject(subprojectAndWsPath, numberSubprojects, index2) {
243497
243655
  this.spinner.start();
243498
- const relativeSubprojectPath = relative17(this.rootWorkingDirectory, subprojectAndWsPath.subprojectPath) || ".";
243656
+ const relativeSubprojectPath = relative19(this.rootWorkingDirectory, subprojectAndWsPath.subprojectPath) || ".";
243499
243657
  await this.spinner.setText(numberSubprojects > 1 ? `Processing subproject ${relativeSubprojectPath} (${index2 + 1}/${numberSubprojects})${+this.options.concurrency > 1 ? `. May process up to ${+this.options.concurrency - 1} other workspaces in parallel` : ""}` : `Processing ${relativeSubprojectPath}`);
243500
243658
  }
243501
243659
  async initialize() {
@@ -243573,7 +243731,7 @@ Subproject: ${subproject}`);
243573
243731
  return workspaceToAugmentedVulnerabilities[workspacePath] !== void 0;
243574
243732
  }).map((workspacePath) => {
243575
243733
  return {
243576
- subprojectPath: relative17(this.rootWorkingDirectory, subprojectPath) || ".",
243734
+ subprojectPath: relative19(this.rootWorkingDirectory, subprojectPath) || ".",
243577
243735
  workspacePath,
243578
243736
  directDependencies: projectInfo[workspacePath].dataForAnalysis.directDependenciesMap ?? {},
243579
243737
  vulnerabilities: workspaceToAugmentedVulnerabilities[workspacePath],
@@ -243703,7 +243861,7 @@ Subproject: ${subproject}`);
243703
243861
  async sendProgress(type, isStartEvent, subprojectPath, workspacePath) {
243704
243862
  await this.dashboardAPI.registerCLIProgress({
243705
243863
  type,
243706
- ...subprojectPath ? { subprojectPath: relative17(this.rootWorkingDirectory, subprojectPath) || "." } : {},
243864
+ ...subprojectPath ? { subprojectPath: relative19(this.rootWorkingDirectory, subprojectPath) || "." } : {},
243707
243865
  ...workspacePath ? { workspacePath } : {}
243708
243866
  }, isStartEvent, this.reportId, this.apiKey);
243709
243867
  }
@@ -243759,7 +243917,7 @@ Subproject: ${subproject}`);
243759
243917
  dependencyTree: workspaceToPlainDependencyTree[workspacePath],
243760
243918
  ecosystem: workspaceToPlainDependencyTree[workspacePath].ecosystem ?? "NPM",
243761
243919
  workspacePath,
243762
- subprojectPath: relative17(rootWorkingDirectory, subprojectPath) || "."
243920
+ subprojectPath: relative19(rootWorkingDirectory, subprojectPath) || "."
243763
243921
  }));
243764
243922
  if (this.options.socketMode) {
243765
243923
  this.reportDependencyTrees = workspacePaths.map((workspacePath) => ({
@@ -243767,7 +243925,7 @@ Subproject: ${subproject}`);
243767
243925
  dependencyTree: projectInfo[workspacePath].dataForAnalysis.data.dependencyTree,
243768
243926
  ecosystem: projectInfo[workspacePath].dataForAnalysis.data.dependencyTree.ecosystem ?? "NPM",
243769
243927
  workspacePath,
243770
- subprojectPath: relative17(rootWorkingDirectory, subprojectPath) || "."
243928
+ subprojectPath: relative19(rootWorkingDirectory, subprojectPath) || "."
243771
243929
  }));
243772
243930
  }
243773
243931
  if (this.shareWithDashboard)
@@ -243783,7 +243941,7 @@ Subproject: ${subproject}`);
243783
243941
  } catch (e) {
243784
243942
  logger.error(`Scanning for vulnerabilities failed for subproject ${subprojectPath} in workspace ${workspacePath}`);
243785
243943
  if (this.options.ignoreFailingWorkspaces) {
243786
- const relativeSubprojectPath = relative17(this.rootWorkingDirectory, subprojectPath) || ".";
243944
+ const relativeSubprojectPath = relative19(this.rootWorkingDirectory, subprojectPath) || ".";
243787
243945
  this.failedWorkspaces.push({
243788
243946
  subproject: relativeSubprojectPath,
243789
243947
  workspace: workspacePath,
@@ -243802,7 +243960,7 @@ Subproject: ${subproject}`);
243802
243960
  }
243803
243961
  };
243804
243962
  function getRelativeSubprojectPath(subprojectPath, projectDir) {
243805
- return relative17(projectDir, subprojectPath) || ".";
243963
+ return relative19(projectDir, subprojectPath) || ".";
243806
243964
  }
243807
243965
  function getDependencyType(vulnChainDetails, codeAwareScanResults, directDependencies, reachability) {
243808
243966
  if (reachability === "UNREACHABLE" || reachability === "UNKNOWN") {
@@ -243884,8 +244042,8 @@ computeFixesAndUpgradePurlsCmd.name("compute-fixes-and-upgrade-purls").argument(
243884
244042
  const output = await computeFixesAndUpgradePurls(path2, optionsToUse, logFile);
243885
244043
  if (options.outputFile) {
243886
244044
  const outputFile = resolve41(options.outputFile);
243887
- await mkdir3(dirname19(outputFile), { recursive: true });
243888
- await writeFile13(outputFile, JSON.stringify(output, null, 2));
244045
+ await mkdir3(dirname18(outputFile), { recursive: true });
244046
+ await writeFile15(outputFile, JSON.stringify(output, null, 2));
243889
244047
  logger.info(`Result written to: ${outputFile}`);
243890
244048
  }
243891
244049
  await rm2(tmpDir, { recursive: true, force: true });