@mcp-s/skills 1.0.5 → 1.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,945 @@
1
+ import { n as __require, t as __commonJSMin } from "../rolldown-runtime.mjs";
2
+ import { n as require_readable, t as require_duplexer2 } from "./duplexer2.mjs";
3
+ import { n as require_graceful_fs, t as require_lib } from "./fs-extra.mjs";
4
+ import { t as require_Int64 } from "./node-int64.mjs";
5
+ import { t as require_bluebird } from "./bluebird.mjs";
6
+ var require_PullStream = /* @__PURE__ */ __commonJSMin(((exports, module) => {
7
+ const Stream$7 = __require("stream");
8
+ const util$2 = __require("util");
9
+ const strFunction = "function";
10
+ function PullStream() {
11
+ if (!(this instanceof PullStream)) return new PullStream();
12
+ Stream$7.Duplex.call(this, {
13
+ decodeStrings: false,
14
+ objectMode: true
15
+ });
16
+ this.buffer = Buffer.from("");
17
+ const self = this;
18
+ self.on("finish", function() {
19
+ self.finished = true;
20
+ self.emit("chunk", false);
21
+ });
22
+ }
23
+ util$2.inherits(PullStream, Stream$7.Duplex);
24
+ PullStream.prototype._write = function(chunk, e, cb) {
25
+ this.buffer = Buffer.concat([this.buffer, chunk]);
26
+ this.cb = cb;
27
+ this.emit("chunk");
28
+ };
29
+ PullStream.prototype.stream = function(eof, includeEof) {
30
+ const p = Stream$7.PassThrough();
31
+ let done;
32
+ const self = this;
33
+ function cb() {
34
+ if (typeof self.cb === strFunction) {
35
+ const callback = self.cb;
36
+ self.cb = void 0;
37
+ return callback();
38
+ }
39
+ }
40
+ function pull() {
41
+ let packet;
42
+ if (self.buffer && self.buffer.length) {
43
+ if (typeof eof === "number") {
44
+ packet = self.buffer.slice(0, eof);
45
+ self.buffer = self.buffer.slice(eof);
46
+ eof -= packet.length;
47
+ done = done || !eof;
48
+ } else {
49
+ let match = self.buffer.indexOf(eof);
50
+ if (match !== -1) {
51
+ self.match = match;
52
+ if (includeEof) match = match + eof.length;
53
+ packet = self.buffer.slice(0, match);
54
+ self.buffer = self.buffer.slice(match);
55
+ done = true;
56
+ } else {
57
+ const len = self.buffer.length - eof.length;
58
+ if (len <= 0) cb();
59
+ else {
60
+ packet = self.buffer.slice(0, len);
61
+ self.buffer = self.buffer.slice(len);
62
+ }
63
+ }
64
+ }
65
+ if (packet) p.write(packet, function() {
66
+ if (self.buffer.length === 0 || eof.length && self.buffer.length <= eof.length) cb();
67
+ });
68
+ }
69
+ if (!done) {
70
+ if (self.finished) {
71
+ self.removeListener("chunk", pull);
72
+ self.emit("error", /* @__PURE__ */ new Error("FILE_ENDED"));
73
+ return;
74
+ }
75
+ } else {
76
+ self.removeListener("chunk", pull);
77
+ p.end();
78
+ }
79
+ }
80
+ self.on("chunk", pull);
81
+ pull();
82
+ return p;
83
+ };
84
+ PullStream.prototype.pull = function(eof, includeEof) {
85
+ if (eof === 0) return Promise.resolve("");
86
+ if (!isNaN(eof) && this.buffer.length > eof) {
87
+ const data = this.buffer.slice(0, eof);
88
+ this.buffer = this.buffer.slice(eof);
89
+ return Promise.resolve(data);
90
+ }
91
+ let buffer = Buffer.from("");
92
+ const self = this;
93
+ const concatStream = new Stream$7.Transform();
94
+ concatStream._transform = function(d, e, cb) {
95
+ buffer = Buffer.concat([buffer, d]);
96
+ cb();
97
+ };
98
+ let rejectHandler;
99
+ let pullStreamRejectHandler;
100
+ return new Promise(function(resolve, reject) {
101
+ rejectHandler = reject;
102
+ pullStreamRejectHandler = function(e) {
103
+ self.__emittedError = e;
104
+ reject(e);
105
+ };
106
+ if (self.finished) return reject(/* @__PURE__ */ new Error("FILE_ENDED"));
107
+ self.once("error", pullStreamRejectHandler);
108
+ self.stream(eof, includeEof).on("error", reject).pipe(concatStream).on("finish", function() {
109
+ resolve(buffer);
110
+ }).on("error", reject);
111
+ }).finally(function() {
112
+ self.removeListener("error", rejectHandler);
113
+ self.removeListener("error", pullStreamRejectHandler);
114
+ });
115
+ };
116
+ PullStream.prototype._read = function() {};
117
+ module.exports = PullStream;
118
+ }));
119
+ var require_NoopStream = /* @__PURE__ */ __commonJSMin(((exports, module) => {
120
+ const Stream$6 = __require("stream");
121
+ const util$1 = __require("util");
122
+ function NoopStream() {
123
+ if (!(this instanceof NoopStream)) return new NoopStream();
124
+ Stream$6.Transform.call(this);
125
+ }
126
+ util$1.inherits(NoopStream, Stream$6.Transform);
127
+ NoopStream.prototype._transform = function(d, e, cb) {
128
+ cb();
129
+ };
130
+ module.exports = NoopStream;
131
+ }));
132
+ var require_BufferStream = /* @__PURE__ */ __commonJSMin(((exports, module) => {
133
+ const Stream$5 = __require("stream");
134
+ module.exports = function(entry) {
135
+ return new Promise(function(resolve, reject) {
136
+ const chunks = [];
137
+ const bufferStream = Stream$5.Transform().on("finish", function() {
138
+ resolve(Buffer.concat(chunks));
139
+ }).on("error", reject);
140
+ bufferStream._transform = function(d, e, cb) {
141
+ chunks.push(d);
142
+ cb();
143
+ };
144
+ entry.on("error", reject).pipe(bufferStream);
145
+ });
146
+ };
147
+ }));
148
+ var require_parseBuffer = /* @__PURE__ */ __commonJSMin(((exports, module) => {
149
+ const parseUIntLE = function(buffer, offset, size) {
150
+ let result;
151
+ switch (size) {
152
+ case 1:
153
+ result = buffer.readUInt8(offset);
154
+ break;
155
+ case 2:
156
+ result = buffer.readUInt16LE(offset);
157
+ break;
158
+ case 4:
159
+ result = buffer.readUInt32LE(offset);
160
+ break;
161
+ case 8:
162
+ result = Number(buffer.readBigUInt64LE(offset));
163
+ break;
164
+ default: throw new Error("Unsupported UInt LE size!");
165
+ }
166
+ return result;
167
+ };
168
+ const parse = function(buffer, format) {
169
+ const result = {};
170
+ let offset = 0;
171
+ for (const [key, size] of format) {
172
+ if (buffer.length >= offset + size) result[key] = parseUIntLE(buffer, offset, size);
173
+ else result[key] = null;
174
+ offset += size;
175
+ }
176
+ return result;
177
+ };
178
+ module.exports = { parse };
179
+ }));
180
+ var require_parseExtraField = /* @__PURE__ */ __commonJSMin(((exports, module) => {
181
+ const parseBuffer = require_parseBuffer();
182
+ module.exports = function(extraField, vars) {
183
+ let extra;
184
+ while (!extra && extraField && extraField.length) {
185
+ const candidateExtra = parseBuffer.parse(extraField, [["signature", 2], ["partSize", 2]]);
186
+ if (candidateExtra.signature === 1) {
187
+ const fieldsToExpect = [];
188
+ if (vars.uncompressedSize === 4294967295) fieldsToExpect.push(["uncompressedSize", 8]);
189
+ if (vars.compressedSize === 4294967295) fieldsToExpect.push(["compressedSize", 8]);
190
+ if (vars.offsetToLocalFileHeader === 4294967295) fieldsToExpect.push(["offsetToLocalFileHeader", 8]);
191
+ extra = parseBuffer.parse(extraField.slice(4), fieldsToExpect);
192
+ } else extraField = extraField.slice(candidateExtra.partSize + 4);
193
+ }
194
+ extra = extra || {};
195
+ if (vars.compressedSize === 4294967295) vars.compressedSize = extra.compressedSize;
196
+ if (vars.uncompressedSize === 4294967295) vars.uncompressedSize = extra.uncompressedSize;
197
+ if (vars.offsetToLocalFileHeader === 4294967295) vars.offsetToLocalFileHeader = extra.offsetToLocalFileHeader;
198
+ return extra;
199
+ };
200
+ }));
201
+ var require_parseDateTime = /* @__PURE__ */ __commonJSMin(((exports, module) => {
202
+ module.exports = function parseDateTime(date, time) {
203
+ const day = date & 31;
204
+ const month = date >> 5 & 15;
205
+ const year = (date >> 9 & 127) + 1980;
206
+ const seconds = time ? (time & 31) * 2 : 0;
207
+ const minutes = time ? time >> 5 & 63 : 0;
208
+ const hours = time ? time >> 11 : 0;
209
+ return new Date(Date.UTC(year, month - 1, day, hours, minutes, seconds));
210
+ };
211
+ }));
212
+ var require_parse = /* @__PURE__ */ __commonJSMin(((exports, module) => {
213
+ const util = __require("util");
214
+ const zlib$1 = __require("zlib");
215
+ const Stream$4 = __require("stream");
216
+ const PullStream = require_PullStream();
217
+ const NoopStream = require_NoopStream();
218
+ const BufferStream = require_BufferStream();
219
+ const parseExtraField = require_parseExtraField();
220
+ const parseDateTime = require_parseDateTime();
221
+ const pipeline = Stream$4.pipeline;
222
+ const parseBuffer = require_parseBuffer();
223
+ const endDirectorySignature = Buffer.alloc(4);
224
+ endDirectorySignature.writeUInt32LE(101010256, 0);
225
+ function Parse(opts) {
226
+ if (!(this instanceof Parse)) return new Parse(opts);
227
+ const self = this;
228
+ self._opts = opts || { verbose: false };
229
+ PullStream.call(self, self._opts);
230
+ self.on("finish", function() {
231
+ self.emit("end");
232
+ self.emit("close");
233
+ });
234
+ self._readRecord().catch(function(e) {
235
+ if (!self.__emittedError || self.__emittedError !== e) self.emit("error", e);
236
+ });
237
+ }
238
+ util.inherits(Parse, PullStream);
239
+ Parse.prototype._readRecord = function() {
240
+ const self = this;
241
+ return self.pull(4).then(function(data) {
242
+ if (data.length === 0) return;
243
+ const signature = data.readUInt32LE(0);
244
+ if (signature === 875721283) return self._readCrxHeader();
245
+ if (signature === 67324752) return self._readFile();
246
+ else if (signature === 33639248) {
247
+ self.reachedCD = true;
248
+ return self._readCentralDirectoryFileHeader();
249
+ } else if (signature === 101010256) return self._readEndOfCentralDirectoryRecord();
250
+ else if (self.reachedCD) return self.pull(endDirectorySignature, true).then(function() {
251
+ return self._readEndOfCentralDirectoryRecord();
252
+ });
253
+ else self.emit("error", /* @__PURE__ */ new Error("invalid signature: 0x" + signature.toString(16)));
254
+ }).then((function(loop) {
255
+ if (loop) return self._readRecord();
256
+ }));
257
+ };
258
+ Parse.prototype._readCrxHeader = function() {
259
+ const self = this;
260
+ return self.pull(12).then(function(data) {
261
+ self.crxHeader = parseBuffer.parse(data, [
262
+ ["version", 4],
263
+ ["pubKeyLength", 4],
264
+ ["signatureLength", 4]
265
+ ]);
266
+ return self.pull(self.crxHeader.pubKeyLength + self.crxHeader.signatureLength);
267
+ }).then(function(data) {
268
+ self.crxHeader.publicKey = data.slice(0, self.crxHeader.pubKeyLength);
269
+ self.crxHeader.signature = data.slice(self.crxHeader.pubKeyLength);
270
+ self.emit("crx-header", self.crxHeader);
271
+ return true;
272
+ });
273
+ };
274
+ Parse.prototype._readFile = function() {
275
+ const self = this;
276
+ return self.pull(26).then(function(data) {
277
+ const vars = parseBuffer.parse(data, [
278
+ ["versionsNeededToExtract", 2],
279
+ ["flags", 2],
280
+ ["compressionMethod", 2],
281
+ ["lastModifiedTime", 2],
282
+ ["lastModifiedDate", 2],
283
+ ["crc32", 4],
284
+ ["compressedSize", 4],
285
+ ["uncompressedSize", 4],
286
+ ["fileNameLength", 2],
287
+ ["extraFieldLength", 2]
288
+ ]);
289
+ vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime);
290
+ if (self.crxHeader) vars.crxHeader = self.crxHeader;
291
+ return self.pull(vars.fileNameLength).then(function(fileNameBuffer) {
292
+ const fileName = fileNameBuffer.toString("utf8");
293
+ const entry = Stream$4.PassThrough();
294
+ let __autodraining = false;
295
+ entry.autodrain = function() {
296
+ __autodraining = true;
297
+ const draining = entry.pipe(NoopStream());
298
+ draining.promise = function() {
299
+ return new Promise(function(resolve, reject) {
300
+ draining.on("finish", resolve);
301
+ draining.on("error", reject);
302
+ });
303
+ };
304
+ return draining;
305
+ };
306
+ entry.buffer = function() {
307
+ return BufferStream(entry);
308
+ };
309
+ entry.path = fileName;
310
+ entry.props = {};
311
+ entry.props.path = fileName;
312
+ entry.props.pathBuffer = fileNameBuffer;
313
+ entry.props.flags = { "isUnicode": (vars.flags & 2048) != 0 };
314
+ entry.type = vars.uncompressedSize === 0 && /[/\\]$/.test(fileName) ? "Directory" : "File";
315
+ if (self._opts.verbose) {
316
+ if (entry.type === "Directory") console.log(" creating:", fileName);
317
+ else if (entry.type === "File") if (vars.compressionMethod === 0) console.log(" extracting:", fileName);
318
+ else console.log(" inflating:", fileName);
319
+ }
320
+ return self.pull(vars.extraFieldLength).then(function(extraField) {
321
+ const extra = parseExtraField(extraField, vars);
322
+ entry.vars = vars;
323
+ entry.extra = extra;
324
+ if (self._opts.forceStream) self.push(entry);
325
+ else {
326
+ self.emit("entry", entry);
327
+ if (self._readableState.pipesCount || self._readableState.pipes && self._readableState.pipes.length) self.push(entry);
328
+ }
329
+ if (self._opts.verbose) console.log({
330
+ filename: fileName,
331
+ vars,
332
+ extra
333
+ });
334
+ const fileSizeKnown = !(vars.flags & 8) || vars.compressedSize > 0;
335
+ let eof;
336
+ entry.__autodraining = __autodraining;
337
+ const inflater = vars.compressionMethod && !__autodraining ? zlib$1.createInflateRaw() : Stream$4.PassThrough();
338
+ if (fileSizeKnown) {
339
+ entry.size = vars.uncompressedSize;
340
+ eof = vars.compressedSize;
341
+ } else {
342
+ eof = Buffer.alloc(4);
343
+ eof.writeUInt32LE(134695760, 0);
344
+ }
345
+ return new Promise(function(resolve, reject) {
346
+ pipeline(self.stream(eof), inflater, entry, function(err) {
347
+ if (err) return reject(err);
348
+ return fileSizeKnown ? resolve(fileSizeKnown) : self._processDataDescriptor(entry).then(resolve).catch(reject);
349
+ });
350
+ });
351
+ });
352
+ });
353
+ });
354
+ };
355
+ Parse.prototype._processDataDescriptor = function(entry) {
356
+ return this.pull(16).then(function(data) {
357
+ entry.size = parseBuffer.parse(data, [
358
+ ["dataDescriptorSignature", 4],
359
+ ["crc32", 4],
360
+ ["compressedSize", 4],
361
+ ["uncompressedSize", 4]
362
+ ]).uncompressedSize;
363
+ return true;
364
+ });
365
+ };
366
+ Parse.prototype._readCentralDirectoryFileHeader = function() {
367
+ const self = this;
368
+ return self.pull(42).then(function(data) {
369
+ const vars = parseBuffer.parse(data, [
370
+ ["versionMadeBy", 2],
371
+ ["versionsNeededToExtract", 2],
372
+ ["flags", 2],
373
+ ["compressionMethod", 2],
374
+ ["lastModifiedTime", 2],
375
+ ["lastModifiedDate", 2],
376
+ ["crc32", 4],
377
+ ["compressedSize", 4],
378
+ ["uncompressedSize", 4],
379
+ ["fileNameLength", 2],
380
+ ["extraFieldLength", 2],
381
+ ["fileCommentLength", 2],
382
+ ["diskNumber", 2],
383
+ ["internalFileAttributes", 2],
384
+ ["externalFileAttributes", 4],
385
+ ["offsetToLocalFileHeader", 4]
386
+ ]);
387
+ return self.pull(vars.fileNameLength).then(function(fileName) {
388
+ vars.fileName = fileName.toString("utf8");
389
+ return self.pull(vars.extraFieldLength);
390
+ }).then(function() {
391
+ return self.pull(vars.fileCommentLength);
392
+ }).then(function() {
393
+ return true;
394
+ });
395
+ });
396
+ };
397
+ Parse.prototype._readEndOfCentralDirectoryRecord = function() {
398
+ const self = this;
399
+ return self.pull(18).then(function(data) {
400
+ const vars = parseBuffer.parse(data, [
401
+ ["diskNumber", 2],
402
+ ["diskStart", 2],
403
+ ["numberOfRecordsOnDisk", 2],
404
+ ["numberOfRecords", 2],
405
+ ["sizeOfCentralDirectory", 4],
406
+ ["offsetToStartOfCentralDirectory", 4],
407
+ ["commentLength", 2]
408
+ ]);
409
+ return self.pull(vars.commentLength).then(function() {
410
+ self.end();
411
+ self.push(null);
412
+ });
413
+ });
414
+ };
415
+ Parse.prototype.promise = function() {
416
+ const self = this;
417
+ return new Promise(function(resolve, reject) {
418
+ self.on("finish", resolve);
419
+ self.on("error", reject);
420
+ });
421
+ };
422
+ module.exports = Parse;
423
+ }));
424
+ var require_parseOne = /* @__PURE__ */ __commonJSMin(((exports, module) => {
425
+ const Stream$3 = __require("stream");
426
+ const Parse = require_parse();
427
+ const duplexer2 = require_duplexer2();
428
+ const BufferStream = require_BufferStream();
429
+ function parseOne(match, opts) {
430
+ const inStream = Stream$3.PassThrough({ objectMode: true });
431
+ const outStream = Stream$3.PassThrough();
432
+ const transform = Stream$3.Transform({ objectMode: true });
433
+ const re = match instanceof RegExp ? match : match && new RegExp(match);
434
+ let found;
435
+ transform._transform = function(entry, e, cb) {
436
+ if (found || re && !re.exec(entry.path)) {
437
+ entry.autodrain();
438
+ return cb();
439
+ } else {
440
+ found = true;
441
+ out.emit("entry", entry);
442
+ entry.on("error", function(e) {
443
+ outStream.emit("error", e);
444
+ });
445
+ entry.pipe(outStream).on("error", function(err) {
446
+ cb(err);
447
+ }).on("finish", function(d) {
448
+ cb(null, d);
449
+ });
450
+ }
451
+ };
452
+ inStream.pipe(Parse(opts)).on("error", function(err) {
453
+ outStream.emit("error", err);
454
+ }).pipe(transform).on("error", Object).on("finish", function() {
455
+ if (!found) outStream.emit("error", /* @__PURE__ */ new Error("PATTERN_NOT_FOUND"));
456
+ else outStream.end();
457
+ });
458
+ const out = duplexer2(inStream, outStream);
459
+ out.buffer = function() {
460
+ return BufferStream(outStream);
461
+ };
462
+ return out;
463
+ }
464
+ module.exports = parseOne;
465
+ }));
466
+ var require_extract = /* @__PURE__ */ __commonJSMin(((exports, module) => {
467
+ module.exports = Extract;
468
+ const Parse = require_parse();
469
+ const fs = require_lib();
470
+ const path$1 = __require("path");
471
+ const stream = __require("stream");
472
+ const duplexer2 = require_duplexer2();
473
+ function Extract(opts) {
474
+ opts.path = path$1.resolve(path$1.normalize(opts.path));
475
+ const parser = new Parse(opts);
476
+ const outStream = new stream.Writable({ objectMode: true });
477
+ outStream._write = async function(entry, encoding, cb) {
478
+ const extractPath = path$1.join(opts.path, entry.path.replace(/\\/g, "/"));
479
+ if (extractPath.indexOf(opts.path) != 0) return cb();
480
+ if (entry.type == "Directory") {
481
+ await fs.ensureDir(extractPath);
482
+ return cb();
483
+ }
484
+ await fs.ensureDir(path$1.dirname(extractPath));
485
+ const writer = opts.getWriter ? opts.getWriter({ path: extractPath }) : fs.createWriteStream(extractPath);
486
+ entry.pipe(writer).on("error", cb).on("close", cb);
487
+ };
488
+ const extract = duplexer2(parser, outStream);
489
+ parser.once("crx-header", function(crxHeader) {
490
+ extract.crxHeader = crxHeader;
491
+ });
492
+ parser.pipe(outStream).on("finish", function() {
493
+ extract.emit("close");
494
+ });
495
+ extract.promise = function() {
496
+ return new Promise(function(resolve, reject) {
497
+ extract.on("close", resolve);
498
+ extract.on("error", reject);
499
+ });
500
+ };
501
+ return extract;
502
+ }
503
+ }));
504
+ var require_Decrypt = /* @__PURE__ */ __commonJSMin(((exports, module) => {
505
+ const Int64 = require_Int64();
506
+ let Stream$2 = __require("stream");
507
+ if (!Stream$2.Writable || !Stream$2.Writable.prototype.destroy) Stream$2 = require_readable();
508
+ let table;
509
+ function generateTable() {
510
+ const poly = 3988292384;
511
+ let c, n, k;
512
+ table = [];
513
+ for (n = 0; n < 256; n++) {
514
+ c = n;
515
+ for (k = 0; k < 8; k++) c = c & 1 ? poly ^ c >>> 1 : c = c >>> 1;
516
+ table[n] = c >>> 0;
517
+ }
518
+ }
519
+ function crc(ch, crc) {
520
+ if (!table) generateTable();
521
+ if (ch.charCodeAt) ch = ch.charCodeAt(0);
522
+ return (crc.readUInt32BE() >> 8 & 16777215 ^ table[(crc.readUInt32BE() ^ ch >>> 0) & 255]) >>> 0;
523
+ }
524
+ function multiply(a, b) {
525
+ const ah = a >> 16 & 65535;
526
+ const al = a & 65535;
527
+ const bh = b >> 16 & 65535;
528
+ const bl = b & 65535;
529
+ return ((ah * bl + al * bh & 65535) << 16 >>> 0) + al * bl;
530
+ }
531
+ function Decrypt() {
532
+ if (!(this instanceof Decrypt)) return new Decrypt();
533
+ this.key0 = Buffer.allocUnsafe(4);
534
+ this.key1 = Buffer.allocUnsafe(4);
535
+ this.key2 = Buffer.allocUnsafe(4);
536
+ this.key0.writeUInt32BE(305419896, 0);
537
+ this.key1.writeUInt32BE(591751049, 0);
538
+ this.key2.writeUInt32BE(878082192, 0);
539
+ }
540
+ Decrypt.prototype.update = function(h) {
541
+ this.key0.writeUInt32BE(crc(h, this.key0));
542
+ this.key1.writeUInt32BE((this.key0.readUInt32BE() & 255) + this.key1.readUInt32BE() >>> 0);
543
+ const x = new Int64(multiply(this.key1.readUInt32BE(), 134775813) + 1 & 4294967295);
544
+ const b = Buffer.alloc(8);
545
+ x.copy(b, 0);
546
+ b.copy(this.key1, 0, 4, 8);
547
+ this.key2.writeUInt32BE(crc((this.key1.readUInt32BE() >> 24 & 255) >>> 0, this.key2));
548
+ };
549
+ Decrypt.prototype.decryptByte = function(c) {
550
+ const k = (this.key2.readUInt32BE() | 2) >>> 0;
551
+ c = c ^ multiply(k, k ^ 1) >> 8 & 255;
552
+ this.update(c);
553
+ return c;
554
+ };
555
+ Decrypt.prototype.stream = function() {
556
+ const stream = Stream$2.Transform(), self = this;
557
+ stream._transform = function(d, e, cb) {
558
+ for (let i = 0; i < d.length; i++) d[i] = self.decryptByte(d[i]);
559
+ this.push(d);
560
+ cb();
561
+ };
562
+ return stream;
563
+ };
564
+ module.exports = Decrypt;
565
+ }));
566
+ var require_unzip$1 = /* @__PURE__ */ __commonJSMin(((exports, module) => {
567
+ const Decrypt = require_Decrypt();
568
+ const PullStream = require_PullStream();
569
+ const Stream$1 = __require("stream");
570
+ const zlib = __require("zlib");
571
+ const parseExtraField = require_parseExtraField();
572
+ const parseDateTime = require_parseDateTime();
573
+ const parseBuffer = require_parseBuffer();
574
+ module.exports = function unzip(source, offset, _password, directoryVars, length) {
575
+ const file = PullStream(), entry = Stream$1.PassThrough();
576
+ const req = source.stream(offset, length);
577
+ req.pipe(file).on("error", function(e) {
578
+ entry.emit("error", e);
579
+ });
580
+ entry.vars = file.pull(30).then(function(data) {
581
+ let vars = parseBuffer.parse(data, [
582
+ ["signature", 4],
583
+ ["versionsNeededToExtract", 2],
584
+ ["flags", 2],
585
+ ["compressionMethod", 2],
586
+ ["lastModifiedTime", 2],
587
+ ["lastModifiedDate", 2],
588
+ ["crc32", 4],
589
+ ["compressedSize", 4],
590
+ ["uncompressedSize", 4],
591
+ ["fileNameLength", 2],
592
+ ["extraFieldLength", 2]
593
+ ]);
594
+ vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime);
595
+ return file.pull(vars.fileNameLength).then(function(fileName) {
596
+ vars.fileName = fileName.toString("utf8");
597
+ return file.pull(vars.extraFieldLength);
598
+ }).then(function(extraField) {
599
+ let checkEncryption;
600
+ vars.extra = parseExtraField(extraField, vars);
601
+ if (directoryVars && directoryVars.compressedSize) vars = directoryVars;
602
+ if (vars.flags & 1) checkEncryption = file.pull(12).then(function(header) {
603
+ if (!_password) throw new Error("MISSING_PASSWORD");
604
+ const decrypt = Decrypt();
605
+ String(_password).split("").forEach(function(d) {
606
+ decrypt.update(d);
607
+ });
608
+ for (let i = 0; i < header.length; i++) header[i] = decrypt.decryptByte(header[i]);
609
+ vars.decrypt = decrypt;
610
+ vars.compressedSize -= 12;
611
+ const check = vars.flags & 8 ? vars.lastModifiedTime >> 8 & 255 : vars.crc32 >> 24 & 255;
612
+ if (header[11] !== check) throw new Error("BAD_PASSWORD");
613
+ return vars;
614
+ });
615
+ return Promise.resolve(checkEncryption).then(function() {
616
+ entry.emit("vars", vars);
617
+ return vars;
618
+ });
619
+ });
620
+ });
621
+ entry.vars.then(function(vars) {
622
+ const fileSizeKnown = !(vars.flags & 8) || vars.compressedSize > 0;
623
+ let eof;
624
+ const inflater = vars.compressionMethod ? zlib.createInflateRaw() : Stream$1.PassThrough();
625
+ if (fileSizeKnown) {
626
+ entry.size = vars.uncompressedSize;
627
+ eof = vars.compressedSize;
628
+ } else {
629
+ eof = Buffer.alloc(4);
630
+ eof.writeUInt32LE(134695760, 0);
631
+ }
632
+ let stream = file.stream(eof);
633
+ if (vars.decrypt) stream = stream.pipe(vars.decrypt.stream());
634
+ stream.pipe(inflater).on("error", function(err) {
635
+ entry.emit("error", err);
636
+ }).pipe(entry).on("finish", function() {
637
+ if (req.destroy) req.destroy();
638
+ else if (req.abort) req.abort();
639
+ else if (req.close) req.close();
640
+ else if (req.push) req.push();
641
+ else console.log("warning - unable to close stream");
642
+ });
643
+ }).catch(function(e) {
644
+ entry.emit("error", e);
645
+ });
646
+ return entry;
647
+ };
648
+ }));
649
+ var require_directory = /* @__PURE__ */ __commonJSMin(((exports, module) => {
650
+ const PullStream = require_PullStream();
651
+ const unzip = require_unzip$1();
652
+ const BufferStream = require_BufferStream();
653
+ const parseExtraField = require_parseExtraField();
654
+ const path = __require("path");
655
+ const fs = require_lib();
656
+ const parseDateTime = require_parseDateTime();
657
+ const parseBuffer = require_parseBuffer();
658
+ const Bluebird = require_bluebird();
659
+ const signature = Buffer.alloc(4);
660
+ signature.writeUInt32LE(101010256, 0);
661
+ function getCrxHeader(source) {
662
+ const sourceStream = source.stream(0).pipe(PullStream());
663
+ return sourceStream.pull(4).then(function(data) {
664
+ if (data.readUInt32LE(0) === 875721283) {
665
+ let crxHeader;
666
+ return sourceStream.pull(12).then(function(data) {
667
+ crxHeader = parseBuffer.parse(data, [
668
+ ["version", 4],
669
+ ["pubKeyLength", 4],
670
+ ["signatureLength", 4]
671
+ ]);
672
+ }).then(function() {
673
+ return sourceStream.pull(crxHeader.pubKeyLength + crxHeader.signatureLength);
674
+ }).then(function(data) {
675
+ crxHeader.publicKey = data.slice(0, crxHeader.pubKeyLength);
676
+ crxHeader.signature = data.slice(crxHeader.pubKeyLength);
677
+ crxHeader.size = 16 + crxHeader.pubKeyLength + crxHeader.signatureLength;
678
+ return crxHeader;
679
+ });
680
+ }
681
+ });
682
+ }
683
+ function getZip64CentralDirectory(source, zip64CDL) {
684
+ const d64loc = parseBuffer.parse(zip64CDL, [
685
+ ["signature", 4],
686
+ ["diskNumber", 4],
687
+ ["offsetToStartOfCentralDirectory", 8],
688
+ ["numberOfDisks", 4]
689
+ ]);
690
+ if (d64loc.signature != 117853008) throw new Error("invalid zip64 end of central dir locator signature (0x07064b50): 0x" + d64loc.signature.toString(16));
691
+ const dir64 = PullStream();
692
+ source.stream(d64loc.offsetToStartOfCentralDirectory).pipe(dir64);
693
+ return dir64.pull(56);
694
+ }
695
+ function parseZip64DirRecord(dir64record) {
696
+ const vars = parseBuffer.parse(dir64record, [
697
+ ["signature", 4],
698
+ ["sizeOfCentralDirectory", 8],
699
+ ["version", 2],
700
+ ["versionsNeededToExtract", 2],
701
+ ["diskNumber", 4],
702
+ ["diskStart", 4],
703
+ ["numberOfRecordsOnDisk", 8],
704
+ ["numberOfRecords", 8],
705
+ ["sizeOfCentralDirectory", 8],
706
+ ["offsetToStartOfCentralDirectory", 8]
707
+ ]);
708
+ if (vars.signature != 101075792) throw new Error("invalid zip64 end of central dir locator signature (0x06064b50): 0x0" + vars.signature.toString(16));
709
+ return vars;
710
+ }
711
+ module.exports = function centralDirectory(source, options) {
712
+ const endDir = PullStream();
713
+ const records = PullStream();
714
+ const tailSize = options && options.tailSize || 80;
715
+ let sourceSize, crxHeader, startOffset, vars;
716
+ if (options && options.crx) crxHeader = getCrxHeader(source);
717
+ return source.size().then(function(size) {
718
+ sourceSize = size;
719
+ source.stream(Math.max(0, size - tailSize)).on("error", function(error) {
720
+ endDir.emit("error", error);
721
+ }).pipe(endDir);
722
+ return endDir.pull(signature);
723
+ }).then(function() {
724
+ return Bluebird.props({
725
+ directory: endDir.pull(22),
726
+ crxHeader
727
+ });
728
+ }).then(function(d) {
729
+ const data = d.directory;
730
+ startOffset = d.crxHeader && d.crxHeader.size || 0;
731
+ vars = parseBuffer.parse(data, [
732
+ ["signature", 4],
733
+ ["diskNumber", 2],
734
+ ["diskStart", 2],
735
+ ["numberOfRecordsOnDisk", 2],
736
+ ["numberOfRecords", 2],
737
+ ["sizeOfCentralDirectory", 4],
738
+ ["offsetToStartOfCentralDirectory", 4],
739
+ ["commentLength", 2]
740
+ ]);
741
+ if (vars.diskNumber == 65535 || vars.numberOfRecords == 65535 || vars.offsetToStartOfCentralDirectory == 4294967295) {
742
+ const zip64CDLSize = 20;
743
+ const zip64CDLOffset = sourceSize - (tailSize - endDir.match + zip64CDLSize);
744
+ const zip64CDLStream = PullStream();
745
+ source.stream(zip64CDLOffset).pipe(zip64CDLStream);
746
+ return zip64CDLStream.pull(zip64CDLSize).then(function(d) {
747
+ return getZip64CentralDirectory(source, d);
748
+ }).then(function(dir64record) {
749
+ vars = parseZip64DirRecord(dir64record);
750
+ });
751
+ } else vars.offsetToStartOfCentralDirectory += startOffset;
752
+ }).then(function() {
753
+ if (vars.commentLength) return endDir.pull(vars.commentLength).then(function(comment) {
754
+ vars.comment = comment.toString("utf8");
755
+ });
756
+ }).then(function() {
757
+ source.stream(vars.offsetToStartOfCentralDirectory).pipe(records);
758
+ vars.extract = function(opts) {
759
+ if (!opts || !opts.path) throw new Error("PATH_MISSING");
760
+ opts.path = path.resolve(path.normalize(opts.path));
761
+ return vars.files.then(function(files) {
762
+ return Bluebird.map(files, async function(entry) {
763
+ const extractPath = path.join(opts.path, entry.path);
764
+ if (extractPath.indexOf(opts.path) != 0) return;
765
+ if (entry.type == "Directory") {
766
+ await fs.ensureDir(extractPath);
767
+ return;
768
+ }
769
+ await fs.ensureDir(path.dirname(extractPath));
770
+ const writer = opts.getWriter ? opts.getWriter({ path: extractPath }) : fs.createWriteStream(extractPath);
771
+ return new Promise(function(resolve, reject) {
772
+ entry.stream(opts.password).on("error", reject).pipe(writer).on("close", resolve).on("error", reject);
773
+ });
774
+ }, { concurrency: opts.concurrency > 1 ? opts.concurrency : 1 });
775
+ });
776
+ };
777
+ vars.files = Bluebird.mapSeries(Array(vars.numberOfRecords), function() {
778
+ return records.pull(46).then(function(data) {
779
+ const vars = parseBuffer.parse(data, [
780
+ ["signature", 4],
781
+ ["versionMadeBy", 2],
782
+ ["versionsNeededToExtract", 2],
783
+ ["flags", 2],
784
+ ["compressionMethod", 2],
785
+ ["lastModifiedTime", 2],
786
+ ["lastModifiedDate", 2],
787
+ ["crc32", 4],
788
+ ["compressedSize", 4],
789
+ ["uncompressedSize", 4],
790
+ ["fileNameLength", 2],
791
+ ["extraFieldLength", 2],
792
+ ["fileCommentLength", 2],
793
+ ["diskNumber", 2],
794
+ ["internalFileAttributes", 2],
795
+ ["externalFileAttributes", 4],
796
+ ["offsetToLocalFileHeader", 4]
797
+ ]);
798
+ vars.offsetToLocalFileHeader += startOffset;
799
+ vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime);
800
+ return records.pull(vars.fileNameLength).then(function(fileNameBuffer) {
801
+ vars.pathBuffer = fileNameBuffer;
802
+ vars.path = fileNameBuffer.toString("utf8");
803
+ vars.isUnicode = (vars.flags & 2048) != 0;
804
+ return records.pull(vars.extraFieldLength);
805
+ }).then(function(extraField) {
806
+ vars.extra = parseExtraField(extraField, vars);
807
+ return records.pull(vars.fileCommentLength);
808
+ }).then(function(comment) {
809
+ vars.comment = comment;
810
+ vars.type = vars.uncompressedSize === 0 && /[/\\]$/.test(vars.path) ? "Directory" : "File";
811
+ const padding = options && options.padding || 1e3;
812
+ vars.stream = function(_password) {
813
+ const totalSize = 30 + padding + (vars.extraFieldLength || 0) + (vars.fileNameLength || 0) + vars.compressedSize;
814
+ return unzip(source, vars.offsetToLocalFileHeader, _password, vars, totalSize);
815
+ };
816
+ vars.buffer = function(_password) {
817
+ return BufferStream(vars.stream(_password));
818
+ };
819
+ return vars;
820
+ });
821
+ });
822
+ });
823
+ return Bluebird.props(vars);
824
+ });
825
+ };
826
+ }));
827
+ var require_Open = /* @__PURE__ */ __commonJSMin(((exports, module) => {
828
+ const fs = require_graceful_fs();
829
+ const directory = require_directory();
830
+ const Stream = __require("stream");
831
+ module.exports = {
832
+ buffer: function(buffer, options) {
833
+ return directory({
834
+ stream: function(offset, length) {
835
+ const stream = Stream.PassThrough();
836
+ const end = length ? offset + length : void 0;
837
+ stream.end(buffer.slice(offset, end));
838
+ return stream;
839
+ },
840
+ size: function() {
841
+ return Promise.resolve(buffer.length);
842
+ }
843
+ }, options);
844
+ },
845
+ file: function(filename, options) {
846
+ return directory({
847
+ stream: function(start, length) {
848
+ const end = length ? start + length : void 0;
849
+ return fs.createReadStream(filename, {
850
+ start,
851
+ end
852
+ });
853
+ },
854
+ size: function() {
855
+ return new Promise(function(resolve, reject) {
856
+ fs.stat(filename, function(err, d) {
857
+ if (err) reject(err);
858
+ else resolve(d.size);
859
+ });
860
+ });
861
+ }
862
+ }, options);
863
+ },
864
+ url: function(request, params, options) {
865
+ if (typeof params === "string") params = { url: params };
866
+ if (!params.url) throw "URL missing";
867
+ params.headers = params.headers || {};
868
+ return directory({
869
+ stream: function(offset, length) {
870
+ const options = Object.create(params);
871
+ const end = length ? offset + length : "";
872
+ options.headers = Object.create(params.headers);
873
+ options.headers.range = "bytes=" + offset + "-" + end;
874
+ return request(options);
875
+ },
876
+ size: function() {
877
+ return new Promise(function(resolve, reject) {
878
+ const req = request(params);
879
+ req.on("response", function(d) {
880
+ req.abort();
881
+ if (!d.headers["content-length"]) reject(/* @__PURE__ */ new Error("Missing content length header"));
882
+ else resolve(d.headers["content-length"]);
883
+ }).on("error", reject);
884
+ });
885
+ }
886
+ }, options);
887
+ },
888
+ s3: function(client, params, options) {
889
+ return directory({
890
+ size: function() {
891
+ return new Promise(function(resolve, reject) {
892
+ client.headObject(params, function(err, d) {
893
+ if (err) reject(err);
894
+ else resolve(d.ContentLength);
895
+ });
896
+ });
897
+ },
898
+ stream: function(offset, length) {
899
+ const d = {};
900
+ for (const key in params) d[key] = params[key];
901
+ const end = length ? offset + length : "";
902
+ d.Range = "bytes=" + offset + "-" + end;
903
+ return client.getObject(d).createReadStream();
904
+ }
905
+ }, options);
906
+ },
907
+ s3_v3: function(client, params, options) {
908
+ const { GetObjectCommand, HeadObjectCommand } = __require("@aws-sdk/client-s3");
909
+ return directory({
910
+ size: async () => {
911
+ const head = await client.send(new HeadObjectCommand({
912
+ Bucket: params.Bucket,
913
+ Key: params.Key
914
+ }));
915
+ if (!head.ContentLength) return 0;
916
+ return head.ContentLength;
917
+ },
918
+ stream: (offset, length) => {
919
+ const stream = Stream.PassThrough();
920
+ const end = length ? offset + length : "";
921
+ client.send(new GetObjectCommand({
922
+ Bucket: params.Bucket,
923
+ Key: params.Key,
924
+ Range: `bytes=${offset}-${end}`
925
+ })).then((response) => {
926
+ response.Body.pipe(stream);
927
+ }).catch((error) => {
928
+ stream.emit("error", error);
929
+ });
930
+ return stream;
931
+ }
932
+ }, options);
933
+ },
934
+ custom: function(source, options) {
935
+ return directory(source, options);
936
+ }
937
+ };
938
+ }));
939
+ var require_unzip = /* @__PURE__ */ __commonJSMin(((exports) => {
940
+ exports.Parse = require_parse();
941
+ exports.ParseOne = require_parseOne();
942
+ exports.Extract = require_extract();
943
+ exports.Open = require_Open();
944
+ }));
945
+ export { require_unzip as t };