@vercel/ruby 1.3.74 → 1.3.76
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +1017 -44
- package/package.json +5 -6
package/dist/index.js
CHANGED
|
@@ -4420,6 +4420,979 @@ function patch (fs) {
|
|
|
4420
4420
|
}
|
|
4421
4421
|
|
|
4422
4422
|
|
|
4423
|
+
/***/ }),
|
|
4424
|
+
|
|
4425
|
+
/***/ 3016:
|
|
4426
|
+
/***/ ((module) => {
|
|
4427
|
+
|
|
4428
|
+
"use strict";
|
|
4429
|
+
|
|
4430
|
+
|
|
4431
|
+
module.exports = clone
|
|
4432
|
+
|
|
4433
|
+
var getPrototypeOf = Object.getPrototypeOf || function (obj) {
|
|
4434
|
+
return obj.__proto__
|
|
4435
|
+
}
|
|
4436
|
+
|
|
4437
|
+
function clone (obj) {
|
|
4438
|
+
if (obj === null || typeof obj !== 'object')
|
|
4439
|
+
return obj
|
|
4440
|
+
|
|
4441
|
+
if (obj instanceof Object)
|
|
4442
|
+
var copy = { __proto__: getPrototypeOf(obj) }
|
|
4443
|
+
else
|
|
4444
|
+
var copy = Object.create(null)
|
|
4445
|
+
|
|
4446
|
+
Object.getOwnPropertyNames(obj).forEach(function (key) {
|
|
4447
|
+
Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key))
|
|
4448
|
+
})
|
|
4449
|
+
|
|
4450
|
+
return copy
|
|
4451
|
+
}
|
|
4452
|
+
|
|
4453
|
+
|
|
4454
|
+
/***/ }),
|
|
4455
|
+
|
|
4456
|
+
/***/ 7156:
|
|
4457
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
4458
|
+
|
|
4459
|
+
var fs = __webpack_require__(5747)
|
|
4460
|
+
var polyfills = __webpack_require__(4692)
|
|
4461
|
+
var legacy = __webpack_require__(6465)
|
|
4462
|
+
var clone = __webpack_require__(3016)
|
|
4463
|
+
|
|
4464
|
+
var util = __webpack_require__(1669)
|
|
4465
|
+
|
|
4466
|
+
/* istanbul ignore next - node 0.x polyfill */
|
|
4467
|
+
var gracefulQueue
|
|
4468
|
+
var previousSymbol
|
|
4469
|
+
|
|
4470
|
+
/* istanbul ignore else - node 0.x polyfill */
|
|
4471
|
+
if (typeof Symbol === 'function' && typeof Symbol.for === 'function') {
|
|
4472
|
+
gracefulQueue = Symbol.for('graceful-fs.queue')
|
|
4473
|
+
// This is used in testing by future versions
|
|
4474
|
+
previousSymbol = Symbol.for('graceful-fs.previous')
|
|
4475
|
+
} else {
|
|
4476
|
+
gracefulQueue = '___graceful-fs.queue'
|
|
4477
|
+
previousSymbol = '___graceful-fs.previous'
|
|
4478
|
+
}
|
|
4479
|
+
|
|
4480
|
+
function noop () {}
|
|
4481
|
+
|
|
4482
|
+
function publishQueue(context, queue) {
|
|
4483
|
+
Object.defineProperty(context, gracefulQueue, {
|
|
4484
|
+
get: function() {
|
|
4485
|
+
return queue
|
|
4486
|
+
}
|
|
4487
|
+
})
|
|
4488
|
+
}
|
|
4489
|
+
|
|
4490
|
+
var debug = noop
|
|
4491
|
+
if (util.debuglog)
|
|
4492
|
+
debug = util.debuglog('gfs4')
|
|
4493
|
+
else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || ''))
|
|
4494
|
+
debug = function() {
|
|
4495
|
+
var m = util.format.apply(util, arguments)
|
|
4496
|
+
m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ')
|
|
4497
|
+
console.error(m)
|
|
4498
|
+
}
|
|
4499
|
+
|
|
4500
|
+
// Once time initialization
|
|
4501
|
+
if (!fs[gracefulQueue]) {
|
|
4502
|
+
// This queue can be shared by multiple loaded instances
|
|
4503
|
+
var queue = global[gracefulQueue] || []
|
|
4504
|
+
publishQueue(fs, queue)
|
|
4505
|
+
|
|
4506
|
+
// Patch fs.close/closeSync to shared queue version, because we need
|
|
4507
|
+
// to retry() whenever a close happens *anywhere* in the program.
|
|
4508
|
+
// This is essential when multiple graceful-fs instances are
|
|
4509
|
+
// in play at the same time.
|
|
4510
|
+
fs.close = (function (fs$close) {
|
|
4511
|
+
function close (fd, cb) {
|
|
4512
|
+
return fs$close.call(fs, fd, function (err) {
|
|
4513
|
+
// This function uses the graceful-fs shared queue
|
|
4514
|
+
if (!err) {
|
|
4515
|
+
resetQueue()
|
|
4516
|
+
}
|
|
4517
|
+
|
|
4518
|
+
if (typeof cb === 'function')
|
|
4519
|
+
cb.apply(this, arguments)
|
|
4520
|
+
})
|
|
4521
|
+
}
|
|
4522
|
+
|
|
4523
|
+
Object.defineProperty(close, previousSymbol, {
|
|
4524
|
+
value: fs$close
|
|
4525
|
+
})
|
|
4526
|
+
return close
|
|
4527
|
+
})(fs.close)
|
|
4528
|
+
|
|
4529
|
+
fs.closeSync = (function (fs$closeSync) {
|
|
4530
|
+
function closeSync (fd) {
|
|
4531
|
+
// This function uses the graceful-fs shared queue
|
|
4532
|
+
fs$closeSync.apply(fs, arguments)
|
|
4533
|
+
resetQueue()
|
|
4534
|
+
}
|
|
4535
|
+
|
|
4536
|
+
Object.defineProperty(closeSync, previousSymbol, {
|
|
4537
|
+
value: fs$closeSync
|
|
4538
|
+
})
|
|
4539
|
+
return closeSync
|
|
4540
|
+
})(fs.closeSync)
|
|
4541
|
+
|
|
4542
|
+
if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) {
|
|
4543
|
+
process.on('exit', function() {
|
|
4544
|
+
debug(fs[gracefulQueue])
|
|
4545
|
+
__webpack_require__(2357).equal(fs[gracefulQueue].length, 0)
|
|
4546
|
+
})
|
|
4547
|
+
}
|
|
4548
|
+
}
|
|
4549
|
+
|
|
4550
|
+
if (!global[gracefulQueue]) {
|
|
4551
|
+
publishQueue(global, fs[gracefulQueue]);
|
|
4552
|
+
}
|
|
4553
|
+
|
|
4554
|
+
module.exports = patch(clone(fs))
|
|
4555
|
+
if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) {
|
|
4556
|
+
module.exports = patch(fs)
|
|
4557
|
+
fs.__patched = true;
|
|
4558
|
+
}
|
|
4559
|
+
|
|
4560
|
+
function patch (fs) {
|
|
4561
|
+
// Everything that references the open() function needs to be in here
|
|
4562
|
+
polyfills(fs)
|
|
4563
|
+
fs.gracefulify = patch
|
|
4564
|
+
|
|
4565
|
+
fs.createReadStream = createReadStream
|
|
4566
|
+
fs.createWriteStream = createWriteStream
|
|
4567
|
+
var fs$readFile = fs.readFile
|
|
4568
|
+
fs.readFile = readFile
|
|
4569
|
+
function readFile (path, options, cb) {
|
|
4570
|
+
if (typeof options === 'function')
|
|
4571
|
+
cb = options, options = null
|
|
4572
|
+
|
|
4573
|
+
return go$readFile(path, options, cb)
|
|
4574
|
+
|
|
4575
|
+
function go$readFile (path, options, cb, startTime) {
|
|
4576
|
+
return fs$readFile(path, options, function (err) {
|
|
4577
|
+
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
|
4578
|
+
enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()])
|
|
4579
|
+
else {
|
|
4580
|
+
if (typeof cb === 'function')
|
|
4581
|
+
cb.apply(this, arguments)
|
|
4582
|
+
}
|
|
4583
|
+
})
|
|
4584
|
+
}
|
|
4585
|
+
}
|
|
4586
|
+
|
|
4587
|
+
var fs$writeFile = fs.writeFile
|
|
4588
|
+
fs.writeFile = writeFile
|
|
4589
|
+
function writeFile (path, data, options, cb) {
|
|
4590
|
+
if (typeof options === 'function')
|
|
4591
|
+
cb = options, options = null
|
|
4592
|
+
|
|
4593
|
+
return go$writeFile(path, data, options, cb)
|
|
4594
|
+
|
|
4595
|
+
function go$writeFile (path, data, options, cb, startTime) {
|
|
4596
|
+
return fs$writeFile(path, data, options, function (err) {
|
|
4597
|
+
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
|
4598
|
+
enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()])
|
|
4599
|
+
else {
|
|
4600
|
+
if (typeof cb === 'function')
|
|
4601
|
+
cb.apply(this, arguments)
|
|
4602
|
+
}
|
|
4603
|
+
})
|
|
4604
|
+
}
|
|
4605
|
+
}
|
|
4606
|
+
|
|
4607
|
+
var fs$appendFile = fs.appendFile
|
|
4608
|
+
if (fs$appendFile)
|
|
4609
|
+
fs.appendFile = appendFile
|
|
4610
|
+
function appendFile (path, data, options, cb) {
|
|
4611
|
+
if (typeof options === 'function')
|
|
4612
|
+
cb = options, options = null
|
|
4613
|
+
|
|
4614
|
+
return go$appendFile(path, data, options, cb)
|
|
4615
|
+
|
|
4616
|
+
function go$appendFile (path, data, options, cb, startTime) {
|
|
4617
|
+
return fs$appendFile(path, data, options, function (err) {
|
|
4618
|
+
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
|
4619
|
+
enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()])
|
|
4620
|
+
else {
|
|
4621
|
+
if (typeof cb === 'function')
|
|
4622
|
+
cb.apply(this, arguments)
|
|
4623
|
+
}
|
|
4624
|
+
})
|
|
4625
|
+
}
|
|
4626
|
+
}
|
|
4627
|
+
|
|
4628
|
+
var fs$copyFile = fs.copyFile
|
|
4629
|
+
if (fs$copyFile)
|
|
4630
|
+
fs.copyFile = copyFile
|
|
4631
|
+
function copyFile (src, dest, flags, cb) {
|
|
4632
|
+
if (typeof flags === 'function') {
|
|
4633
|
+
cb = flags
|
|
4634
|
+
flags = 0
|
|
4635
|
+
}
|
|
4636
|
+
return go$copyFile(src, dest, flags, cb)
|
|
4637
|
+
|
|
4638
|
+
function go$copyFile (src, dest, flags, cb, startTime) {
|
|
4639
|
+
return fs$copyFile(src, dest, flags, function (err) {
|
|
4640
|
+
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
|
4641
|
+
enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()])
|
|
4642
|
+
else {
|
|
4643
|
+
if (typeof cb === 'function')
|
|
4644
|
+
cb.apply(this, arguments)
|
|
4645
|
+
}
|
|
4646
|
+
})
|
|
4647
|
+
}
|
|
4648
|
+
}
|
|
4649
|
+
|
|
4650
|
+
var fs$readdir = fs.readdir
|
|
4651
|
+
fs.readdir = readdir
|
|
4652
|
+
var noReaddirOptionVersions = /^v[0-5]\./
|
|
4653
|
+
function readdir (path, options, cb) {
|
|
4654
|
+
if (typeof options === 'function')
|
|
4655
|
+
cb = options, options = null
|
|
4656
|
+
|
|
4657
|
+
var go$readdir = noReaddirOptionVersions.test(process.version)
|
|
4658
|
+
? function go$readdir (path, options, cb, startTime) {
|
|
4659
|
+
return fs$readdir(path, fs$readdirCallback(
|
|
4660
|
+
path, options, cb, startTime
|
|
4661
|
+
))
|
|
4662
|
+
}
|
|
4663
|
+
: function go$readdir (path, options, cb, startTime) {
|
|
4664
|
+
return fs$readdir(path, options, fs$readdirCallback(
|
|
4665
|
+
path, options, cb, startTime
|
|
4666
|
+
))
|
|
4667
|
+
}
|
|
4668
|
+
|
|
4669
|
+
return go$readdir(path, options, cb)
|
|
4670
|
+
|
|
4671
|
+
function fs$readdirCallback (path, options, cb, startTime) {
|
|
4672
|
+
return function (err, files) {
|
|
4673
|
+
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
|
4674
|
+
enqueue([
|
|
4675
|
+
go$readdir,
|
|
4676
|
+
[path, options, cb],
|
|
4677
|
+
err,
|
|
4678
|
+
startTime || Date.now(),
|
|
4679
|
+
Date.now()
|
|
4680
|
+
])
|
|
4681
|
+
else {
|
|
4682
|
+
if (files && files.sort)
|
|
4683
|
+
files.sort()
|
|
4684
|
+
|
|
4685
|
+
if (typeof cb === 'function')
|
|
4686
|
+
cb.call(this, err, files)
|
|
4687
|
+
}
|
|
4688
|
+
}
|
|
4689
|
+
}
|
|
4690
|
+
}
|
|
4691
|
+
|
|
4692
|
+
if (process.version.substr(0, 4) === 'v0.8') {
|
|
4693
|
+
var legStreams = legacy(fs)
|
|
4694
|
+
ReadStream = legStreams.ReadStream
|
|
4695
|
+
WriteStream = legStreams.WriteStream
|
|
4696
|
+
}
|
|
4697
|
+
|
|
4698
|
+
var fs$ReadStream = fs.ReadStream
|
|
4699
|
+
if (fs$ReadStream) {
|
|
4700
|
+
ReadStream.prototype = Object.create(fs$ReadStream.prototype)
|
|
4701
|
+
ReadStream.prototype.open = ReadStream$open
|
|
4702
|
+
}
|
|
4703
|
+
|
|
4704
|
+
var fs$WriteStream = fs.WriteStream
|
|
4705
|
+
if (fs$WriteStream) {
|
|
4706
|
+
WriteStream.prototype = Object.create(fs$WriteStream.prototype)
|
|
4707
|
+
WriteStream.prototype.open = WriteStream$open
|
|
4708
|
+
}
|
|
4709
|
+
|
|
4710
|
+
Object.defineProperty(fs, 'ReadStream', {
|
|
4711
|
+
get: function () {
|
|
4712
|
+
return ReadStream
|
|
4713
|
+
},
|
|
4714
|
+
set: function (val) {
|
|
4715
|
+
ReadStream = val
|
|
4716
|
+
},
|
|
4717
|
+
enumerable: true,
|
|
4718
|
+
configurable: true
|
|
4719
|
+
})
|
|
4720
|
+
Object.defineProperty(fs, 'WriteStream', {
|
|
4721
|
+
get: function () {
|
|
4722
|
+
return WriteStream
|
|
4723
|
+
},
|
|
4724
|
+
set: function (val) {
|
|
4725
|
+
WriteStream = val
|
|
4726
|
+
},
|
|
4727
|
+
enumerable: true,
|
|
4728
|
+
configurable: true
|
|
4729
|
+
})
|
|
4730
|
+
|
|
4731
|
+
// legacy names
|
|
4732
|
+
var FileReadStream = ReadStream
|
|
4733
|
+
Object.defineProperty(fs, 'FileReadStream', {
|
|
4734
|
+
get: function () {
|
|
4735
|
+
return FileReadStream
|
|
4736
|
+
},
|
|
4737
|
+
set: function (val) {
|
|
4738
|
+
FileReadStream = val
|
|
4739
|
+
},
|
|
4740
|
+
enumerable: true,
|
|
4741
|
+
configurable: true
|
|
4742
|
+
})
|
|
4743
|
+
var FileWriteStream = WriteStream
|
|
4744
|
+
Object.defineProperty(fs, 'FileWriteStream', {
|
|
4745
|
+
get: function () {
|
|
4746
|
+
return FileWriteStream
|
|
4747
|
+
},
|
|
4748
|
+
set: function (val) {
|
|
4749
|
+
FileWriteStream = val
|
|
4750
|
+
},
|
|
4751
|
+
enumerable: true,
|
|
4752
|
+
configurable: true
|
|
4753
|
+
})
|
|
4754
|
+
|
|
4755
|
+
function ReadStream (path, options) {
|
|
4756
|
+
if (this instanceof ReadStream)
|
|
4757
|
+
return fs$ReadStream.apply(this, arguments), this
|
|
4758
|
+
else
|
|
4759
|
+
return ReadStream.apply(Object.create(ReadStream.prototype), arguments)
|
|
4760
|
+
}
|
|
4761
|
+
|
|
4762
|
+
function ReadStream$open () {
|
|
4763
|
+
var that = this
|
|
4764
|
+
open(that.path, that.flags, that.mode, function (err, fd) {
|
|
4765
|
+
if (err) {
|
|
4766
|
+
if (that.autoClose)
|
|
4767
|
+
that.destroy()
|
|
4768
|
+
|
|
4769
|
+
that.emit('error', err)
|
|
4770
|
+
} else {
|
|
4771
|
+
that.fd = fd
|
|
4772
|
+
that.emit('open', fd)
|
|
4773
|
+
that.read()
|
|
4774
|
+
}
|
|
4775
|
+
})
|
|
4776
|
+
}
|
|
4777
|
+
|
|
4778
|
+
function WriteStream (path, options) {
|
|
4779
|
+
if (this instanceof WriteStream)
|
|
4780
|
+
return fs$WriteStream.apply(this, arguments), this
|
|
4781
|
+
else
|
|
4782
|
+
return WriteStream.apply(Object.create(WriteStream.prototype), arguments)
|
|
4783
|
+
}
|
|
4784
|
+
|
|
4785
|
+
function WriteStream$open () {
|
|
4786
|
+
var that = this
|
|
4787
|
+
open(that.path, that.flags, that.mode, function (err, fd) {
|
|
4788
|
+
if (err) {
|
|
4789
|
+
that.destroy()
|
|
4790
|
+
that.emit('error', err)
|
|
4791
|
+
} else {
|
|
4792
|
+
that.fd = fd
|
|
4793
|
+
that.emit('open', fd)
|
|
4794
|
+
}
|
|
4795
|
+
})
|
|
4796
|
+
}
|
|
4797
|
+
|
|
4798
|
+
function createReadStream (path, options) {
|
|
4799
|
+
return new fs.ReadStream(path, options)
|
|
4800
|
+
}
|
|
4801
|
+
|
|
4802
|
+
function createWriteStream (path, options) {
|
|
4803
|
+
return new fs.WriteStream(path, options)
|
|
4804
|
+
}
|
|
4805
|
+
|
|
4806
|
+
var fs$open = fs.open
|
|
4807
|
+
fs.open = open
|
|
4808
|
+
function open (path, flags, mode, cb) {
|
|
4809
|
+
if (typeof mode === 'function')
|
|
4810
|
+
cb = mode, mode = null
|
|
4811
|
+
|
|
4812
|
+
return go$open(path, flags, mode, cb)
|
|
4813
|
+
|
|
4814
|
+
function go$open (path, flags, mode, cb, startTime) {
|
|
4815
|
+
return fs$open(path, flags, mode, function (err, fd) {
|
|
4816
|
+
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
|
|
4817
|
+
enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()])
|
|
4818
|
+
else {
|
|
4819
|
+
if (typeof cb === 'function')
|
|
4820
|
+
cb.apply(this, arguments)
|
|
4821
|
+
}
|
|
4822
|
+
})
|
|
4823
|
+
}
|
|
4824
|
+
}
|
|
4825
|
+
|
|
4826
|
+
return fs
|
|
4827
|
+
}
|
|
4828
|
+
|
|
4829
|
+
function enqueue (elem) {
|
|
4830
|
+
debug('ENQUEUE', elem[0].name, elem[1])
|
|
4831
|
+
fs[gracefulQueue].push(elem)
|
|
4832
|
+
retry()
|
|
4833
|
+
}
|
|
4834
|
+
|
|
4835
|
+
// keep track of the timeout between retry() calls
|
|
4836
|
+
var retryTimer
|
|
4837
|
+
|
|
4838
|
+
// reset the startTime and lastTime to now
|
|
4839
|
+
// this resets the start of the 60 second overall timeout as well as the
|
|
4840
|
+
// delay between attempts so that we'll retry these jobs sooner
|
|
4841
|
+
function resetQueue () {
|
|
4842
|
+
var now = Date.now()
|
|
4843
|
+
for (var i = 0; i < fs[gracefulQueue].length; ++i) {
|
|
4844
|
+
// entries that are only a length of 2 are from an older version, don't
|
|
4845
|
+
// bother modifying those since they'll be retried anyway.
|
|
4846
|
+
if (fs[gracefulQueue][i].length > 2) {
|
|
4847
|
+
fs[gracefulQueue][i][3] = now // startTime
|
|
4848
|
+
fs[gracefulQueue][i][4] = now // lastTime
|
|
4849
|
+
}
|
|
4850
|
+
}
|
|
4851
|
+
// call retry to make sure we're actively processing the queue
|
|
4852
|
+
retry()
|
|
4853
|
+
}
|
|
4854
|
+
|
|
4855
|
+
function retry () {
|
|
4856
|
+
// clear the timer and remove it to help prevent unintended concurrency
|
|
4857
|
+
clearTimeout(retryTimer)
|
|
4858
|
+
retryTimer = undefined
|
|
4859
|
+
|
|
4860
|
+
if (fs[gracefulQueue].length === 0)
|
|
4861
|
+
return
|
|
4862
|
+
|
|
4863
|
+
var elem = fs[gracefulQueue].shift()
|
|
4864
|
+
var fn = elem[0]
|
|
4865
|
+
var args = elem[1]
|
|
4866
|
+
// these items may be unset if they were added by an older graceful-fs
|
|
4867
|
+
var err = elem[2]
|
|
4868
|
+
var startTime = elem[3]
|
|
4869
|
+
var lastTime = elem[4]
|
|
4870
|
+
|
|
4871
|
+
// if we don't have a startTime we have no way of knowing if we've waited
|
|
4872
|
+
// long enough, so go ahead and retry this item now
|
|
4873
|
+
if (startTime === undefined) {
|
|
4874
|
+
debug('RETRY', fn.name, args)
|
|
4875
|
+
fn.apply(null, args)
|
|
4876
|
+
} else if (Date.now() - startTime >= 60000) {
|
|
4877
|
+
// it's been more than 60 seconds total, bail now
|
|
4878
|
+
debug('TIMEOUT', fn.name, args)
|
|
4879
|
+
var cb = args.pop()
|
|
4880
|
+
if (typeof cb === 'function')
|
|
4881
|
+
cb.call(null, err)
|
|
4882
|
+
} else {
|
|
4883
|
+
// the amount of time between the last attempt and right now
|
|
4884
|
+
var sinceAttempt = Date.now() - lastTime
|
|
4885
|
+
// the amount of time between when we first tried, and when we last tried
|
|
4886
|
+
// rounded up to at least 1
|
|
4887
|
+
var sinceStart = Math.max(lastTime - startTime, 1)
|
|
4888
|
+
// backoff. wait longer than the total time we've been retrying, but only
|
|
4889
|
+
// up to a maximum of 100ms
|
|
4890
|
+
var desiredDelay = Math.min(sinceStart * 1.2, 100)
|
|
4891
|
+
// it's been long enough since the last retry, do it again
|
|
4892
|
+
if (sinceAttempt >= desiredDelay) {
|
|
4893
|
+
debug('RETRY', fn.name, args)
|
|
4894
|
+
fn.apply(null, args.concat([startTime]))
|
|
4895
|
+
} else {
|
|
4896
|
+
// if we can't do this job yet, push it to the end of the queue
|
|
4897
|
+
// and let the next iteration check again
|
|
4898
|
+
fs[gracefulQueue].push(elem)
|
|
4899
|
+
}
|
|
4900
|
+
}
|
|
4901
|
+
|
|
4902
|
+
// schedule our next run if one isn't already scheduled
|
|
4903
|
+
if (retryTimer === undefined) {
|
|
4904
|
+
retryTimer = setTimeout(retry, 0)
|
|
4905
|
+
}
|
|
4906
|
+
}
|
|
4907
|
+
|
|
4908
|
+
|
|
4909
|
+
/***/ }),
|
|
4910
|
+
|
|
4911
|
+
/***/ 6465:
|
|
4912
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
4913
|
+
|
|
4914
|
+
var Stream = __webpack_require__(2413).Stream
|
|
4915
|
+
|
|
4916
|
+
module.exports = legacy
|
|
4917
|
+
|
|
4918
|
+
function legacy (fs) {
|
|
4919
|
+
return {
|
|
4920
|
+
ReadStream: ReadStream,
|
|
4921
|
+
WriteStream: WriteStream
|
|
4922
|
+
}
|
|
4923
|
+
|
|
4924
|
+
function ReadStream (path, options) {
|
|
4925
|
+
if (!(this instanceof ReadStream)) return new ReadStream(path, options);
|
|
4926
|
+
|
|
4927
|
+
Stream.call(this);
|
|
4928
|
+
|
|
4929
|
+
var self = this;
|
|
4930
|
+
|
|
4931
|
+
this.path = path;
|
|
4932
|
+
this.fd = null;
|
|
4933
|
+
this.readable = true;
|
|
4934
|
+
this.paused = false;
|
|
4935
|
+
|
|
4936
|
+
this.flags = 'r';
|
|
4937
|
+
this.mode = 438; /*=0666*/
|
|
4938
|
+
this.bufferSize = 64 * 1024;
|
|
4939
|
+
|
|
4940
|
+
options = options || {};
|
|
4941
|
+
|
|
4942
|
+
// Mixin options into this
|
|
4943
|
+
var keys = Object.keys(options);
|
|
4944
|
+
for (var index = 0, length = keys.length; index < length; index++) {
|
|
4945
|
+
var key = keys[index];
|
|
4946
|
+
this[key] = options[key];
|
|
4947
|
+
}
|
|
4948
|
+
|
|
4949
|
+
if (this.encoding) this.setEncoding(this.encoding);
|
|
4950
|
+
|
|
4951
|
+
if (this.start !== undefined) {
|
|
4952
|
+
if ('number' !== typeof this.start) {
|
|
4953
|
+
throw TypeError('start must be a Number');
|
|
4954
|
+
}
|
|
4955
|
+
if (this.end === undefined) {
|
|
4956
|
+
this.end = Infinity;
|
|
4957
|
+
} else if ('number' !== typeof this.end) {
|
|
4958
|
+
throw TypeError('end must be a Number');
|
|
4959
|
+
}
|
|
4960
|
+
|
|
4961
|
+
if (this.start > this.end) {
|
|
4962
|
+
throw new Error('start must be <= end');
|
|
4963
|
+
}
|
|
4964
|
+
|
|
4965
|
+
this.pos = this.start;
|
|
4966
|
+
}
|
|
4967
|
+
|
|
4968
|
+
if (this.fd !== null) {
|
|
4969
|
+
process.nextTick(function() {
|
|
4970
|
+
self._read();
|
|
4971
|
+
});
|
|
4972
|
+
return;
|
|
4973
|
+
}
|
|
4974
|
+
|
|
4975
|
+
fs.open(this.path, this.flags, this.mode, function (err, fd) {
|
|
4976
|
+
if (err) {
|
|
4977
|
+
self.emit('error', err);
|
|
4978
|
+
self.readable = false;
|
|
4979
|
+
return;
|
|
4980
|
+
}
|
|
4981
|
+
|
|
4982
|
+
self.fd = fd;
|
|
4983
|
+
self.emit('open', fd);
|
|
4984
|
+
self._read();
|
|
4985
|
+
})
|
|
4986
|
+
}
|
|
4987
|
+
|
|
4988
|
+
function WriteStream (path, options) {
|
|
4989
|
+
if (!(this instanceof WriteStream)) return new WriteStream(path, options);
|
|
4990
|
+
|
|
4991
|
+
Stream.call(this);
|
|
4992
|
+
|
|
4993
|
+
this.path = path;
|
|
4994
|
+
this.fd = null;
|
|
4995
|
+
this.writable = true;
|
|
4996
|
+
|
|
4997
|
+
this.flags = 'w';
|
|
4998
|
+
this.encoding = 'binary';
|
|
4999
|
+
this.mode = 438; /*=0666*/
|
|
5000
|
+
this.bytesWritten = 0;
|
|
5001
|
+
|
|
5002
|
+
options = options || {};
|
|
5003
|
+
|
|
5004
|
+
// Mixin options into this
|
|
5005
|
+
var keys = Object.keys(options);
|
|
5006
|
+
for (var index = 0, length = keys.length; index < length; index++) {
|
|
5007
|
+
var key = keys[index];
|
|
5008
|
+
this[key] = options[key];
|
|
5009
|
+
}
|
|
5010
|
+
|
|
5011
|
+
if (this.start !== undefined) {
|
|
5012
|
+
if ('number' !== typeof this.start) {
|
|
5013
|
+
throw TypeError('start must be a Number');
|
|
5014
|
+
}
|
|
5015
|
+
if (this.start < 0) {
|
|
5016
|
+
throw new Error('start must be >= zero');
|
|
5017
|
+
}
|
|
5018
|
+
|
|
5019
|
+
this.pos = this.start;
|
|
5020
|
+
}
|
|
5021
|
+
|
|
5022
|
+
this.busy = false;
|
|
5023
|
+
this._queue = [];
|
|
5024
|
+
|
|
5025
|
+
if (this.fd === null) {
|
|
5026
|
+
this._open = fs.open;
|
|
5027
|
+
this._queue.push([this._open, this.path, this.flags, this.mode, undefined]);
|
|
5028
|
+
this.flush();
|
|
5029
|
+
}
|
|
5030
|
+
}
|
|
5031
|
+
}
|
|
5032
|
+
|
|
5033
|
+
|
|
5034
|
+
/***/ }),
|
|
5035
|
+
|
|
5036
|
+
/***/ 4692:
|
|
5037
|
+
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
|
|
5038
|
+
|
|
5039
|
+
var constants = __webpack_require__(7619)
|
|
5040
|
+
|
|
5041
|
+
var origCwd = process.cwd
|
|
5042
|
+
var cwd = null
|
|
5043
|
+
|
|
5044
|
+
var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform
|
|
5045
|
+
|
|
5046
|
+
process.cwd = function() {
|
|
5047
|
+
if (!cwd)
|
|
5048
|
+
cwd = origCwd.call(process)
|
|
5049
|
+
return cwd
|
|
5050
|
+
}
|
|
5051
|
+
try {
|
|
5052
|
+
process.cwd()
|
|
5053
|
+
} catch (er) {}
|
|
5054
|
+
|
|
5055
|
+
// This check is needed until node.js 12 is required
|
|
5056
|
+
if (typeof process.chdir === 'function') {
|
|
5057
|
+
var chdir = process.chdir
|
|
5058
|
+
process.chdir = function (d) {
|
|
5059
|
+
cwd = null
|
|
5060
|
+
chdir.call(process, d)
|
|
5061
|
+
}
|
|
5062
|
+
if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir)
|
|
5063
|
+
}
|
|
5064
|
+
|
|
5065
|
+
module.exports = patch
|
|
5066
|
+
|
|
5067
|
+
function patch (fs) {
|
|
5068
|
+
// (re-)implement some things that are known busted or missing.
|
|
5069
|
+
|
|
5070
|
+
// lchmod, broken prior to 0.6.2
|
|
5071
|
+
// back-port the fix here.
|
|
5072
|
+
if (constants.hasOwnProperty('O_SYMLINK') &&
|
|
5073
|
+
process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
|
|
5074
|
+
patchLchmod(fs)
|
|
5075
|
+
}
|
|
5076
|
+
|
|
5077
|
+
// lutimes implementation, or no-op
|
|
5078
|
+
if (!fs.lutimes) {
|
|
5079
|
+
patchLutimes(fs)
|
|
5080
|
+
}
|
|
5081
|
+
|
|
5082
|
+
// https://github.com/isaacs/node-graceful-fs/issues/4
|
|
5083
|
+
// Chown should not fail on einval or eperm if non-root.
|
|
5084
|
+
// It should not fail on enosys ever, as this just indicates
|
|
5085
|
+
// that a fs doesn't support the intended operation.
|
|
5086
|
+
|
|
5087
|
+
fs.chown = chownFix(fs.chown)
|
|
5088
|
+
fs.fchown = chownFix(fs.fchown)
|
|
5089
|
+
fs.lchown = chownFix(fs.lchown)
|
|
5090
|
+
|
|
5091
|
+
fs.chmod = chmodFix(fs.chmod)
|
|
5092
|
+
fs.fchmod = chmodFix(fs.fchmod)
|
|
5093
|
+
fs.lchmod = chmodFix(fs.lchmod)
|
|
5094
|
+
|
|
5095
|
+
fs.chownSync = chownFixSync(fs.chownSync)
|
|
5096
|
+
fs.fchownSync = chownFixSync(fs.fchownSync)
|
|
5097
|
+
fs.lchownSync = chownFixSync(fs.lchownSync)
|
|
5098
|
+
|
|
5099
|
+
fs.chmodSync = chmodFixSync(fs.chmodSync)
|
|
5100
|
+
fs.fchmodSync = chmodFixSync(fs.fchmodSync)
|
|
5101
|
+
fs.lchmodSync = chmodFixSync(fs.lchmodSync)
|
|
5102
|
+
|
|
5103
|
+
fs.stat = statFix(fs.stat)
|
|
5104
|
+
fs.fstat = statFix(fs.fstat)
|
|
5105
|
+
fs.lstat = statFix(fs.lstat)
|
|
5106
|
+
|
|
5107
|
+
fs.statSync = statFixSync(fs.statSync)
|
|
5108
|
+
fs.fstatSync = statFixSync(fs.fstatSync)
|
|
5109
|
+
fs.lstatSync = statFixSync(fs.lstatSync)
|
|
5110
|
+
|
|
5111
|
+
// if lchmod/lchown do not exist, then make them no-ops
|
|
5112
|
+
if (fs.chmod && !fs.lchmod) {
|
|
5113
|
+
fs.lchmod = function (path, mode, cb) {
|
|
5114
|
+
if (cb) process.nextTick(cb)
|
|
5115
|
+
}
|
|
5116
|
+
fs.lchmodSync = function () {}
|
|
5117
|
+
}
|
|
5118
|
+
if (fs.chown && !fs.lchown) {
|
|
5119
|
+
fs.lchown = function (path, uid, gid, cb) {
|
|
5120
|
+
if (cb) process.nextTick(cb)
|
|
5121
|
+
}
|
|
5122
|
+
fs.lchownSync = function () {}
|
|
5123
|
+
}
|
|
5124
|
+
|
|
5125
|
+
// on Windows, A/V software can lock the directory, causing this
|
|
5126
|
+
// to fail with an EACCES or EPERM if the directory contains newly
|
|
5127
|
+
// created files. Try again on failure, for up to 60 seconds.
|
|
5128
|
+
|
|
5129
|
+
// Set the timeout this long because some Windows Anti-Virus, such as Parity
|
|
5130
|
+
// bit9, may lock files for up to a minute, causing npm package install
|
|
5131
|
+
// failures. Also, take care to yield the scheduler. Windows scheduling gives
|
|
5132
|
+
// CPU to a busy looping process, which can cause the program causing the lock
|
|
5133
|
+
// contention to be starved of CPU by node, so the contention doesn't resolve.
|
|
5134
|
+
if (platform === "win32") {
|
|
5135
|
+
fs.rename = typeof fs.rename !== 'function' ? fs.rename
|
|
5136
|
+
: (function (fs$rename) {
|
|
5137
|
+
function rename (from, to, cb) {
|
|
5138
|
+
var start = Date.now()
|
|
5139
|
+
var backoff = 0;
|
|
5140
|
+
fs$rename(from, to, function CB (er) {
|
|
5141
|
+
if (er
|
|
5142
|
+
&& (er.code === "EACCES" || er.code === "EPERM" || er.code === "EBUSY")
|
|
5143
|
+
&& Date.now() - start < 60000) {
|
|
5144
|
+
setTimeout(function() {
|
|
5145
|
+
fs.stat(to, function (stater, st) {
|
|
5146
|
+
if (stater && stater.code === "ENOENT")
|
|
5147
|
+
fs$rename(from, to, CB);
|
|
5148
|
+
else
|
|
5149
|
+
cb(er)
|
|
5150
|
+
})
|
|
5151
|
+
}, backoff)
|
|
5152
|
+
if (backoff < 100)
|
|
5153
|
+
backoff += 10;
|
|
5154
|
+
return;
|
|
5155
|
+
}
|
|
5156
|
+
if (cb) cb(er)
|
|
5157
|
+
})
|
|
5158
|
+
}
|
|
5159
|
+
if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename)
|
|
5160
|
+
return rename
|
|
5161
|
+
})(fs.rename)
|
|
5162
|
+
}
|
|
5163
|
+
|
|
5164
|
+
// if read() returns EAGAIN, then just try it again.
|
|
5165
|
+
fs.read = typeof fs.read !== 'function' ? fs.read
|
|
5166
|
+
: (function (fs$read) {
|
|
5167
|
+
function read (fd, buffer, offset, length, position, callback_) {
|
|
5168
|
+
var callback
|
|
5169
|
+
if (callback_ && typeof callback_ === 'function') {
|
|
5170
|
+
var eagCounter = 0
|
|
5171
|
+
callback = function (er, _, __) {
|
|
5172
|
+
if (er && er.code === 'EAGAIN' && eagCounter < 10) {
|
|
5173
|
+
eagCounter ++
|
|
5174
|
+
return fs$read.call(fs, fd, buffer, offset, length, position, callback)
|
|
5175
|
+
}
|
|
5176
|
+
callback_.apply(this, arguments)
|
|
5177
|
+
}
|
|
5178
|
+
}
|
|
5179
|
+
return fs$read.call(fs, fd, buffer, offset, length, position, callback)
|
|
5180
|
+
}
|
|
5181
|
+
|
|
5182
|
+
// This ensures `util.promisify` works as it does for native `fs.read`.
|
|
5183
|
+
if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read)
|
|
5184
|
+
return read
|
|
5185
|
+
})(fs.read)
|
|
5186
|
+
|
|
5187
|
+
fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync
|
|
5188
|
+
: (function (fs$readSync) { return function (fd, buffer, offset, length, position) {
|
|
5189
|
+
var eagCounter = 0
|
|
5190
|
+
while (true) {
|
|
5191
|
+
try {
|
|
5192
|
+
return fs$readSync.call(fs, fd, buffer, offset, length, position)
|
|
5193
|
+
} catch (er) {
|
|
5194
|
+
if (er.code === 'EAGAIN' && eagCounter < 10) {
|
|
5195
|
+
eagCounter ++
|
|
5196
|
+
continue
|
|
5197
|
+
}
|
|
5198
|
+
throw er
|
|
5199
|
+
}
|
|
5200
|
+
}
|
|
5201
|
+
}})(fs.readSync)
|
|
5202
|
+
|
|
5203
|
+
function patchLchmod (fs) {
|
|
5204
|
+
fs.lchmod = function (path, mode, callback) {
|
|
5205
|
+
fs.open( path
|
|
5206
|
+
, constants.O_WRONLY | constants.O_SYMLINK
|
|
5207
|
+
, mode
|
|
5208
|
+
, function (err, fd) {
|
|
5209
|
+
if (err) {
|
|
5210
|
+
if (callback) callback(err)
|
|
5211
|
+
return
|
|
5212
|
+
}
|
|
5213
|
+
// prefer to return the chmod error, if one occurs,
|
|
5214
|
+
// but still try to close, and report closing errors if they occur.
|
|
5215
|
+
fs.fchmod(fd, mode, function (err) {
|
|
5216
|
+
fs.close(fd, function(err2) {
|
|
5217
|
+
if (callback) callback(err || err2)
|
|
5218
|
+
})
|
|
5219
|
+
})
|
|
5220
|
+
})
|
|
5221
|
+
}
|
|
5222
|
+
|
|
5223
|
+
fs.lchmodSync = function (path, mode) {
|
|
5224
|
+
var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
|
|
5225
|
+
|
|
5226
|
+
// prefer to return the chmod error, if one occurs,
|
|
5227
|
+
// but still try to close, and report closing errors if they occur.
|
|
5228
|
+
var threw = true
|
|
5229
|
+
var ret
|
|
5230
|
+
try {
|
|
5231
|
+
ret = fs.fchmodSync(fd, mode)
|
|
5232
|
+
threw = false
|
|
5233
|
+
} finally {
|
|
5234
|
+
if (threw) {
|
|
5235
|
+
try {
|
|
5236
|
+
fs.closeSync(fd)
|
|
5237
|
+
} catch (er) {}
|
|
5238
|
+
} else {
|
|
5239
|
+
fs.closeSync(fd)
|
|
5240
|
+
}
|
|
5241
|
+
}
|
|
5242
|
+
return ret
|
|
5243
|
+
}
|
|
5244
|
+
}
|
|
5245
|
+
|
|
5246
|
+
function patchLutimes (fs) {
|
|
5247
|
+
if (constants.hasOwnProperty("O_SYMLINK") && fs.futimes) {
|
|
5248
|
+
fs.lutimes = function (path, at, mt, cb) {
|
|
5249
|
+
fs.open(path, constants.O_SYMLINK, function (er, fd) {
|
|
5250
|
+
if (er) {
|
|
5251
|
+
if (cb) cb(er)
|
|
5252
|
+
return
|
|
5253
|
+
}
|
|
5254
|
+
fs.futimes(fd, at, mt, function (er) {
|
|
5255
|
+
fs.close(fd, function (er2) {
|
|
5256
|
+
if (cb) cb(er || er2)
|
|
5257
|
+
})
|
|
5258
|
+
})
|
|
5259
|
+
})
|
|
5260
|
+
}
|
|
5261
|
+
|
|
5262
|
+
fs.lutimesSync = function (path, at, mt) {
|
|
5263
|
+
var fd = fs.openSync(path, constants.O_SYMLINK)
|
|
5264
|
+
var ret
|
|
5265
|
+
var threw = true
|
|
5266
|
+
try {
|
|
5267
|
+
ret = fs.futimesSync(fd, at, mt)
|
|
5268
|
+
threw = false
|
|
5269
|
+
} finally {
|
|
5270
|
+
if (threw) {
|
|
5271
|
+
try {
|
|
5272
|
+
fs.closeSync(fd)
|
|
5273
|
+
} catch (er) {}
|
|
5274
|
+
} else {
|
|
5275
|
+
fs.closeSync(fd)
|
|
5276
|
+
}
|
|
5277
|
+
}
|
|
5278
|
+
return ret
|
|
5279
|
+
}
|
|
5280
|
+
|
|
5281
|
+
} else if (fs.futimes) {
|
|
5282
|
+
fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) }
|
|
5283
|
+
fs.lutimesSync = function () {}
|
|
5284
|
+
}
|
|
5285
|
+
}
|
|
5286
|
+
|
|
5287
|
+
function chmodFix (orig) {
|
|
5288
|
+
if (!orig) return orig
|
|
5289
|
+
return function (target, mode, cb) {
|
|
5290
|
+
return orig.call(fs, target, mode, function (er) {
|
|
5291
|
+
if (chownErOk(er)) er = null
|
|
5292
|
+
if (cb) cb.apply(this, arguments)
|
|
5293
|
+
})
|
|
5294
|
+
}
|
|
5295
|
+
}
|
|
5296
|
+
|
|
5297
|
+
function chmodFixSync (orig) {
|
|
5298
|
+
if (!orig) return orig
|
|
5299
|
+
return function (target, mode) {
|
|
5300
|
+
try {
|
|
5301
|
+
return orig.call(fs, target, mode)
|
|
5302
|
+
} catch (er) {
|
|
5303
|
+
if (!chownErOk(er)) throw er
|
|
5304
|
+
}
|
|
5305
|
+
}
|
|
5306
|
+
}
|
|
5307
|
+
|
|
5308
|
+
|
|
5309
|
+
function chownFix (orig) {
|
|
5310
|
+
if (!orig) return orig
|
|
5311
|
+
return function (target, uid, gid, cb) {
|
|
5312
|
+
return orig.call(fs, target, uid, gid, function (er) {
|
|
5313
|
+
if (chownErOk(er)) er = null
|
|
5314
|
+
if (cb) cb.apply(this, arguments)
|
|
5315
|
+
})
|
|
5316
|
+
}
|
|
5317
|
+
}
|
|
5318
|
+
|
|
5319
|
+
function chownFixSync (orig) {
|
|
5320
|
+
if (!orig) return orig
|
|
5321
|
+
return function (target, uid, gid) {
|
|
5322
|
+
try {
|
|
5323
|
+
return orig.call(fs, target, uid, gid)
|
|
5324
|
+
} catch (er) {
|
|
5325
|
+
if (!chownErOk(er)) throw er
|
|
5326
|
+
}
|
|
5327
|
+
}
|
|
5328
|
+
}
|
|
5329
|
+
|
|
5330
|
+
function statFix (orig) {
|
|
5331
|
+
if (!orig) return orig
|
|
5332
|
+
// Older versions of Node erroneously returned signed integers for
|
|
5333
|
+
// uid + gid.
|
|
5334
|
+
return function (target, options, cb) {
|
|
5335
|
+
if (typeof options === 'function') {
|
|
5336
|
+
cb = options
|
|
5337
|
+
options = null
|
|
5338
|
+
}
|
|
5339
|
+
function callback (er, stats) {
|
|
5340
|
+
if (stats) {
|
|
5341
|
+
if (stats.uid < 0) stats.uid += 0x100000000
|
|
5342
|
+
if (stats.gid < 0) stats.gid += 0x100000000
|
|
5343
|
+
}
|
|
5344
|
+
if (cb) cb.apply(this, arguments)
|
|
5345
|
+
}
|
|
5346
|
+
return options ? orig.call(fs, target, options, callback)
|
|
5347
|
+
: orig.call(fs, target, callback)
|
|
5348
|
+
}
|
|
5349
|
+
}
|
|
5350
|
+
|
|
5351
|
+
function statFixSync (orig) {
|
|
5352
|
+
if (!orig) return orig
|
|
5353
|
+
// Older versions of Node erroneously returned signed integers for
|
|
5354
|
+
// uid + gid.
|
|
5355
|
+
return function (target, options) {
|
|
5356
|
+
var stats = options ? orig.call(fs, target, options)
|
|
5357
|
+
: orig.call(fs, target)
|
|
5358
|
+
if (stats) {
|
|
5359
|
+
if (stats.uid < 0) stats.uid += 0x100000000
|
|
5360
|
+
if (stats.gid < 0) stats.gid += 0x100000000
|
|
5361
|
+
}
|
|
5362
|
+
return stats;
|
|
5363
|
+
}
|
|
5364
|
+
}
|
|
5365
|
+
|
|
5366
|
+
// ENOSYS means that the fs doesn't support the op. Just ignore
|
|
5367
|
+
// that, because it doesn't matter.
|
|
5368
|
+
//
|
|
5369
|
+
// if there's no getuid, or if getuid() is something other
|
|
5370
|
+
// than 0, and the error is EINVAL or EPERM, then just ignore
|
|
5371
|
+
// it.
|
|
5372
|
+
//
|
|
5373
|
+
// This specific case is a silent failure in cp, install, tar,
|
|
5374
|
+
// and most other unix tools that manage permissions.
|
|
5375
|
+
//
|
|
5376
|
+
// When running as root, or if other types of errors are
|
|
5377
|
+
// encountered, then it's strict.
|
|
5378
|
+
function chownErOk (er) {
|
|
5379
|
+
if (!er)
|
|
5380
|
+
return true
|
|
5381
|
+
|
|
5382
|
+
if (er.code === "ENOSYS")
|
|
5383
|
+
return true
|
|
5384
|
+
|
|
5385
|
+
var nonroot = !process.getuid || process.getuid() !== 0
|
|
5386
|
+
if (nonroot) {
|
|
5387
|
+
if (er.code === "EINVAL" || er.code === "EPERM")
|
|
5388
|
+
return true
|
|
5389
|
+
}
|
|
5390
|
+
|
|
5391
|
+
return false
|
|
5392
|
+
}
|
|
5393
|
+
}
|
|
5394
|
+
|
|
5395
|
+
|
|
4423
5396
|
/***/ }),
|
|
4424
5397
|
|
|
4425
5398
|
/***/ 1611:
|
|
@@ -4624,7 +5597,7 @@ function sync (path, options) {
|
|
|
4624
5597
|
|
|
4625
5598
|
var _fs
|
|
4626
5599
|
try {
|
|
4627
|
-
_fs = __webpack_require__(
|
|
5600
|
+
_fs = __webpack_require__(7156)
|
|
4628
5601
|
} catch (_) {
|
|
4629
5602
|
_fs = __webpack_require__(5747)
|
|
4630
5603
|
}
|
|
@@ -8769,25 +9742,25 @@ async function matchPaths(configPatterns, workPath) {
|
|
|
8769
9742
|
return [];
|
|
8770
9743
|
}
|
|
8771
9744
|
const patternPaths = await Promise.all(patterns.map(async (pattern) => {
|
|
8772
|
-
const files = await build_utils_1.glob(pattern, workPath);
|
|
9745
|
+
const files = await (0, build_utils_1.glob)(pattern, workPath);
|
|
8773
9746
|
return Object.keys(files);
|
|
8774
9747
|
}));
|
|
8775
9748
|
return patternPaths.reduce((a, b) => a.concat(b), []);
|
|
8776
9749
|
}
|
|
8777
9750
|
async function bundleInstall(bundlePath, bundleDir, gemfilePath) {
|
|
8778
|
-
build_utils_1.debug(`running "bundle install --deployment"...`);
|
|
8779
|
-
const bundleAppConfig = await build_utils_1.getWriteableDirectory();
|
|
8780
|
-
const gemfileContent = await fs_extra_1.readFile(gemfilePath, 'utf8');
|
|
9751
|
+
(0, build_utils_1.debug)(`running "bundle install --deployment"...`);
|
|
9752
|
+
const bundleAppConfig = await (0, build_utils_1.getWriteableDirectory)();
|
|
9753
|
+
const gemfileContent = await (0, fs_extra_1.readFile)(gemfilePath, 'utf8');
|
|
8781
9754
|
if (gemfileContent.includes('ruby "~> 2.7.x"')) {
|
|
8782
9755
|
// Gemfile contains "2.7.x" which will cause an error message:
|
|
8783
9756
|
// "Your Ruby patchlevel is 0, but your Gemfile specified -1"
|
|
8784
9757
|
// See https://github.com/rubygems/bundler/blob/3f0638c6c8d340c2f2405ecb84eb3b39c433e36e/lib/bundler/errors.rb#L49
|
|
8785
9758
|
// We must correct to the actual version in the build container.
|
|
8786
|
-
await fs_extra_1.writeFile(gemfilePath, gemfileContent.replace('ruby "~> 2.7.x"', 'ruby "~> 2.7.0"'));
|
|
9759
|
+
await (0, fs_extra_1.writeFile)(gemfilePath, gemfileContent.replace('ruby "~> 2.7.x"', 'ruby "~> 2.7.0"'));
|
|
8787
9760
|
}
|
|
8788
|
-
await execa_1.default(bundlePath, ['install', '--deployment', '--gemfile', gemfilePath, '--path', bundleDir], {
|
|
9761
|
+
await (0, execa_1.default)(bundlePath, ['install', '--deployment', '--gemfile', gemfilePath, '--path', bundleDir], {
|
|
8789
9762
|
stdio: 'pipe',
|
|
8790
|
-
env: build_utils_1.cloneEnv(process.env, {
|
|
9763
|
+
env: (0, build_utils_1.cloneEnv)(process.env, {
|
|
8791
9764
|
BUNDLE_SILENCE_ROOT_WARNING: '1',
|
|
8792
9765
|
BUNDLE_APP_CONFIG: bundleAppConfig,
|
|
8793
9766
|
BUNDLE_JOBS: '4',
|
|
@@ -8796,50 +9769,50 @@ async function bundleInstall(bundlePath, bundleDir, gemfilePath) {
|
|
|
8796
9769
|
}
|
|
8797
9770
|
exports.version = 3;
|
|
8798
9771
|
async function build({ workPath, files, entrypoint, config, meta = {}, }) {
|
|
8799
|
-
await build_utils_1.download(files, workPath, meta);
|
|
8800
|
-
const entrypointFsDirname = path_1.join(workPath, path_1.dirname(entrypoint));
|
|
9772
|
+
await (0, build_utils_1.download)(files, workPath, meta);
|
|
9773
|
+
const entrypointFsDirname = (0, path_1.join)(workPath, (0, path_1.dirname)(entrypoint));
|
|
8801
9774
|
const gemfileName = 'Gemfile';
|
|
8802
|
-
const gemfilePath = await build_utils_1.walkParentDirs({
|
|
9775
|
+
const gemfilePath = await (0, build_utils_1.walkParentDirs)({
|
|
8803
9776
|
base: workPath,
|
|
8804
9777
|
start: entrypointFsDirname,
|
|
8805
9778
|
filename: gemfileName,
|
|
8806
9779
|
});
|
|
8807
9780
|
const gemfileContents = gemfilePath
|
|
8808
|
-
? await fs_extra_1.readFile(gemfilePath, 'utf8')
|
|
9781
|
+
? await (0, fs_extra_1.readFile)(gemfilePath, 'utf8')
|
|
8809
9782
|
: '';
|
|
8810
|
-
const { gemHome, bundlerPath, vendorPath, runtime } = await install_ruby_1.installBundler(meta, gemfileContents);
|
|
9783
|
+
const { gemHome, bundlerPath, vendorPath, runtime } = await (0, install_ruby_1.installBundler)(meta, gemfileContents);
|
|
8811
9784
|
process.env.GEM_HOME = gemHome;
|
|
8812
|
-
build_utils_1.debug(`Checking existing vendor directory at "${vendorPath}"`);
|
|
8813
|
-
const vendorDir = path_1.join(workPath, vendorPath);
|
|
8814
|
-
const bundleDir = path_1.join(workPath, 'vendor', 'bundle');
|
|
8815
|
-
const relativeVendorDir = path_1.join(entrypointFsDirname, vendorPath);
|
|
8816
|
-
const hasRootVendorDir = await fs_extra_1.pathExists(vendorDir);
|
|
8817
|
-
const hasRelativeVendorDir = await fs_extra_1.pathExists(relativeVendorDir);
|
|
9785
|
+
(0, build_utils_1.debug)(`Checking existing vendor directory at "${vendorPath}"`);
|
|
9786
|
+
const vendorDir = (0, path_1.join)(workPath, vendorPath);
|
|
9787
|
+
const bundleDir = (0, path_1.join)(workPath, 'vendor', 'bundle');
|
|
9788
|
+
const relativeVendorDir = (0, path_1.join)(entrypointFsDirname, vendorPath);
|
|
9789
|
+
const hasRootVendorDir = await (0, fs_extra_1.pathExists)(vendorDir);
|
|
9790
|
+
const hasRelativeVendorDir = await (0, fs_extra_1.pathExists)(relativeVendorDir);
|
|
8818
9791
|
const hasVendorDir = hasRootVendorDir || hasRelativeVendorDir;
|
|
8819
9792
|
if (hasRelativeVendorDir) {
|
|
8820
9793
|
if (hasRootVendorDir) {
|
|
8821
|
-
build_utils_1.debug('found two vendor directories, choosing the vendor directory relative to entrypoint');
|
|
9794
|
+
(0, build_utils_1.debug)('found two vendor directories, choosing the vendor directory relative to entrypoint');
|
|
8822
9795
|
}
|
|
8823
9796
|
else {
|
|
8824
|
-
build_utils_1.debug('found vendor directory relative to entrypoint');
|
|
9797
|
+
(0, build_utils_1.debug)('found vendor directory relative to entrypoint');
|
|
8825
9798
|
}
|
|
8826
9799
|
// vendor dir must be at the root for lambda to find it
|
|
8827
|
-
await fs_extra_1.move(relativeVendorDir, vendorDir);
|
|
9800
|
+
await (0, fs_extra_1.move)(relativeVendorDir, vendorDir);
|
|
8828
9801
|
}
|
|
8829
9802
|
else if (hasRootVendorDir) {
|
|
8830
|
-
build_utils_1.debug('found vendor directory in project root');
|
|
9803
|
+
(0, build_utils_1.debug)('found vendor directory in project root');
|
|
8831
9804
|
}
|
|
8832
|
-
await fs_extra_1.ensureDir(vendorDir);
|
|
9805
|
+
await (0, fs_extra_1.ensureDir)(vendorDir);
|
|
8833
9806
|
// no vendor directory, check for Gemfile to install
|
|
8834
9807
|
if (!hasVendorDir) {
|
|
8835
9808
|
if (gemfilePath) {
|
|
8836
|
-
build_utils_1.debug('did not find a vendor directory but found a Gemfile, bundling gems...');
|
|
8837
|
-
const fileAtRoot = path_1.relative(workPath, gemfilePath) === gemfileName;
|
|
9809
|
+
(0, build_utils_1.debug)('did not find a vendor directory but found a Gemfile, bundling gems...');
|
|
9810
|
+
const fileAtRoot = (0, path_1.relative)(workPath, gemfilePath) === gemfileName;
|
|
8838
9811
|
// If the `Gemfile` is located in the Root Directory of the project and
|
|
8839
9812
|
// the new File System API is used (`avoidTopLevelInstall`), the Install Command
|
|
8840
9813
|
// will have already installed its dependencies, so we don't need to do it again.
|
|
8841
9814
|
if (meta.avoidTopLevelInstall && fileAtRoot) {
|
|
8842
|
-
build_utils_1.debug('Skipping `bundle install` — already handled by Install Command');
|
|
9815
|
+
(0, build_utils_1.debug)('Skipping `bundle install` — already handled by Install Command');
|
|
8843
9816
|
}
|
|
8844
9817
|
else {
|
|
8845
9818
|
// try installing. this won't work if native extesions are required.
|
|
@@ -8849,27 +9822,27 @@ async function build({ workPath, files, entrypoint, config, meta = {}, }) {
|
|
|
8849
9822
|
}
|
|
8850
9823
|
}
|
|
8851
9824
|
else {
|
|
8852
|
-
build_utils_1.debug('found vendor directory, skipping "bundle install"...');
|
|
9825
|
+
(0, build_utils_1.debug)('found vendor directory, skipping "bundle install"...');
|
|
8853
9826
|
}
|
|
8854
9827
|
// try to remove gem cache to slim bundle size
|
|
8855
9828
|
try {
|
|
8856
|
-
await fs_extra_1.remove(path_1.join(vendorDir, 'cache'));
|
|
9829
|
+
await (0, fs_extra_1.remove)((0, path_1.join)(vendorDir, 'cache'));
|
|
8857
9830
|
}
|
|
8858
9831
|
catch (e) {
|
|
8859
9832
|
// don't do anything here
|
|
8860
9833
|
}
|
|
8861
|
-
const originalRbPath = path_1.join(__dirname, '..', 'vc_init.rb');
|
|
8862
|
-
const originalHandlerRbContents = await fs_extra_1.readFile(originalRbPath, 'utf8');
|
|
9834
|
+
const originalRbPath = (0, path_1.join)(__dirname, '..', 'vc_init.rb');
|
|
9835
|
+
const originalHandlerRbContents = await (0, fs_extra_1.readFile)(originalRbPath, 'utf8');
|
|
8863
9836
|
// will be used on `require_relative '$here'` or for loading rack config.ru file
|
|
8864
9837
|
// for example, `require_relative 'api/users'`
|
|
8865
|
-
build_utils_1.debug('entrypoint is', entrypoint);
|
|
9838
|
+
(0, build_utils_1.debug)('entrypoint is', entrypoint);
|
|
8866
9839
|
const userHandlerFilePath = entrypoint.replace(/\.rb$/, '');
|
|
8867
9840
|
const nowHandlerRbContents = originalHandlerRbContents.replace(/__VC_HANDLER_FILENAME/g, userHandlerFilePath);
|
|
8868
9841
|
// in order to allow the user to have `server.rb`, we need our `server.rb` to be called
|
|
8869
9842
|
// somethig else
|
|
8870
9843
|
const handlerRbFilename = 'vc__handler__ruby';
|
|
8871
|
-
await fs_extra_1.writeFile(path_1.join(workPath, `${handlerRbFilename}.rb`), nowHandlerRbContents);
|
|
8872
|
-
const outputFiles = await build_utils_1.glob('**', workPath);
|
|
9844
|
+
await (0, fs_extra_1.writeFile)((0, path_1.join)(workPath, `${handlerRbFilename}.rb`), nowHandlerRbContents);
|
|
9845
|
+
const outputFiles = await (0, build_utils_1.glob)('**', workPath);
|
|
8873
9846
|
// static analysis is impossible with ruby.
|
|
8874
9847
|
// instead, provide `includeFiles` and `excludeFiles` config options to reduce bundle size.
|
|
8875
9848
|
if (config && (config.includeFiles || config.excludeFiles)) {
|
|
@@ -8891,7 +9864,7 @@ async function build({ workPath, files, entrypoint, config, meta = {}, }) {
|
|
|
8891
9864
|
delete outputFiles[excludedPaths[i]];
|
|
8892
9865
|
}
|
|
8893
9866
|
}
|
|
8894
|
-
const lambda = await build_utils_1.createLambda({
|
|
9867
|
+
const lambda = await (0, build_utils_1.createLambda)({
|
|
8895
9868
|
files: outputFiles,
|
|
8896
9869
|
handler: `${handlerRbFilename}.vc__handler`,
|
|
8897
9870
|
runtime,
|
|
@@ -8950,7 +9923,7 @@ function getRubyPath(meta, gemfileContents) {
|
|
|
8950
9923
|
// The array is already in order so return the first
|
|
8951
9924
|
// match which will be the newest version.
|
|
8952
9925
|
selection = o;
|
|
8953
|
-
return semver_1.intersects(o.range, strVersion);
|
|
9926
|
+
return (0, semver_1.intersects)(o.range, strVersion);
|
|
8954
9927
|
});
|
|
8955
9928
|
if (!found) {
|
|
8956
9929
|
throw new build_utils_1.NowBuildError({
|
|
@@ -8976,11 +9949,11 @@ function getRubyPath(meta, gemfileContents) {
|
|
|
8976
9949
|
const result = {
|
|
8977
9950
|
gemHome,
|
|
8978
9951
|
runtime,
|
|
8979
|
-
rubyPath: path_1.join(gemHome, 'bin', 'ruby'),
|
|
8980
|
-
gemPath: path_1.join(gemHome, 'bin', 'gem'),
|
|
9952
|
+
rubyPath: (0, path_1.join)(gemHome, 'bin', 'ruby'),
|
|
9953
|
+
gemPath: (0, path_1.join)(gemHome, 'bin', 'gem'),
|
|
8981
9954
|
vendorPath: `vendor/bundle/ruby/${major}.${minor}.0`,
|
|
8982
9955
|
};
|
|
8983
|
-
build_utils_1.debug(JSON.stringify(result, null, ' '));
|
|
9956
|
+
(0, build_utils_1.debug)(JSON.stringify(result, null, ' '));
|
|
8984
9957
|
return result;
|
|
8985
9958
|
}
|
|
8986
9959
|
// downloads and installs `bundler` (respecting
|
|
@@ -8991,18 +9964,18 @@ async function installBundler(meta, gemfileContents) {
|
|
|
8991
9964
|
// If the new File System API is used (`avoidTopLevelInstall`), the Install Command
|
|
8992
9965
|
// will have already installed the dependencies, so we don't need to do it again.
|
|
8993
9966
|
if (meta.avoidTopLevelInstall) {
|
|
8994
|
-
build_utils_1.debug(`Skipping bundler installation, already installed by Install Command`);
|
|
9967
|
+
(0, build_utils_1.debug)(`Skipping bundler installation, already installed by Install Command`);
|
|
8995
9968
|
return {
|
|
8996
9969
|
gemHome,
|
|
8997
9970
|
rubyPath,
|
|
8998
9971
|
gemPath,
|
|
8999
9972
|
vendorPath,
|
|
9000
9973
|
runtime,
|
|
9001
|
-
bundlerPath: path_1.join(gemHome, 'bin', 'bundler'),
|
|
9974
|
+
bundlerPath: (0, path_1.join)(gemHome, 'bin', 'bundler'),
|
|
9002
9975
|
};
|
|
9003
9976
|
}
|
|
9004
|
-
build_utils_1.debug('installing bundler...');
|
|
9005
|
-
await execa_1.default(gemPath, ['install', 'bundler', '--no-document'], {
|
|
9977
|
+
(0, build_utils_1.debug)('installing bundler...');
|
|
9978
|
+
await (0, execa_1.default)(gemPath, ['install', 'bundler', '--no-document'], {
|
|
9006
9979
|
stdio: 'pipe',
|
|
9007
9980
|
env: {
|
|
9008
9981
|
GEM_HOME: gemHome,
|
|
@@ -9014,7 +9987,7 @@ async function installBundler(meta, gemfileContents) {
|
|
|
9014
9987
|
gemPath,
|
|
9015
9988
|
vendorPath,
|
|
9016
9989
|
runtime,
|
|
9017
|
-
bundlerPath: path_1.join(gemHome, 'bin', 'bundler'),
|
|
9990
|
+
bundlerPath: (0, path_1.join)(gemHome, 'bin', 'bundler'),
|
|
9018
9991
|
};
|
|
9019
9992
|
}
|
|
9020
9993
|
exports.installBundler = installBundler;
|
package/package.json
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@vercel/ruby",
|
|
3
3
|
"author": "Nathan Cahill <nathan@nathancahill.com>",
|
|
4
|
-
"version": "1.3.
|
|
5
|
-
"license": "
|
|
4
|
+
"version": "1.3.76",
|
|
5
|
+
"license": "Apache-2.0",
|
|
6
6
|
"main": "./dist/index",
|
|
7
7
|
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/ruby",
|
|
8
8
|
"files": [
|
|
@@ -22,12 +22,11 @@
|
|
|
22
22
|
"devDependencies": {
|
|
23
23
|
"@types/fs-extra": "8.0.0",
|
|
24
24
|
"@types/semver": "6.0.0",
|
|
25
|
-
"@vercel/build-utils": "6.7.
|
|
25
|
+
"@vercel/build-utils": "6.7.2",
|
|
26
26
|
"@vercel/ncc": "0.24.0",
|
|
27
27
|
"execa": "2.0.4",
|
|
28
28
|
"fs-extra": "^7.0.1",
|
|
29
|
-
"semver": "6.1.1"
|
|
30
|
-
"typescript": "4.3.4"
|
|
29
|
+
"semver": "6.1.1"
|
|
31
30
|
},
|
|
32
|
-
"gitHead": "
|
|
31
|
+
"gitHead": "2de365f9cfea3ce283d2bf855507c71209f1e3d8"
|
|
33
32
|
}
|