@squidcloud/cli 1.0.409 → 1.0.411

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -2249,6 +2249,196 @@ isStream.transform = function (stream) {
2249
2249
  };
2250
2250
 
2251
2251
 
2252
+ /***/ }),
2253
+
2254
+ /***/ 1297:
2255
+ /***/ ((module, __unused_webpack_exports, __webpack_require__) => {
2256
+
2257
+ const version = +(process.versions ? process.versions.node : "").split(".")[0] || 0;
2258
+
2259
+ module.exports = function (/*Buffer*/ inbuf, /*number*/ expectedLength) {
2260
+ var zlib = __webpack_require__(3106);
2261
+ const option = version >= 15 && expectedLength > 0 ? { maxOutputLength: expectedLength } : {};
2262
+
2263
+ return {
2264
+ inflate: function () {
2265
+ return zlib.inflateRawSync(inbuf, option);
2266
+ },
2267
+
2268
+ inflateAsync: function (/*Function*/ callback) {
2269
+ var tmp = zlib.createInflateRaw(option),
2270
+ parts = [],
2271
+ total = 0;
2272
+ tmp.on("data", function (data) {
2273
+ parts.push(data);
2274
+ total += data.length;
2275
+ });
2276
+ tmp.on("end", function () {
2277
+ var buf = Buffer.alloc(total),
2278
+ written = 0;
2279
+ buf.fill(0);
2280
+ for (var i = 0; i < parts.length; i++) {
2281
+ var part = parts[i];
2282
+ part.copy(buf, written);
2283
+ written += part.length;
2284
+ }
2285
+ callback && callback(buf);
2286
+ });
2287
+ tmp.end(inbuf);
2288
+ }
2289
+ };
2290
+ };
2291
+
2292
+
2293
+ /***/ }),
2294
+
2295
+ /***/ 1307:
2296
+ /***/ ((module) => {
2297
+
2298
+ module.exports = {
2299
+ /* The local file header */
2300
+ LOCHDR : 30, // LOC header size
2301
+ LOCSIG : 0x04034b50, // "PK\003\004"
2302
+ LOCVER : 4, // version needed to extract
2303
+ LOCFLG : 6, // general purpose bit flag
2304
+ LOCHOW : 8, // compression method
2305
+ LOCTIM : 10, // modification time (2 bytes time, 2 bytes date)
2306
+ LOCCRC : 14, // uncompressed file crc-32 value
2307
+ LOCSIZ : 18, // compressed size
2308
+ LOCLEN : 22, // uncompressed size
2309
+ LOCNAM : 26, // filename length
2310
+ LOCEXT : 28, // extra field length
2311
+
2312
+ /* The Data descriptor */
2313
+ EXTSIG : 0x08074b50, // "PK\007\008"
2314
+ EXTHDR : 16, // EXT header size
2315
+ EXTCRC : 4, // uncompressed file crc-32 value
2316
+ EXTSIZ : 8, // compressed size
2317
+ EXTLEN : 12, // uncompressed size
2318
+
2319
+ /* The central directory file header */
2320
+ CENHDR : 46, // CEN header size
2321
+ CENSIG : 0x02014b50, // "PK\001\002"
2322
+ CENVEM : 4, // version made by
2323
+ CENVER : 6, // version needed to extract
2324
+ CENFLG : 8, // encrypt, decrypt flags
2325
+ CENHOW : 10, // compression method
2326
+ CENTIM : 12, // modification time (2 bytes time, 2 bytes date)
2327
+ CENCRC : 16, // uncompressed file crc-32 value
2328
+ CENSIZ : 20, // compressed size
2329
+ CENLEN : 24, // uncompressed size
2330
+ CENNAM : 28, // filename length
2331
+ CENEXT : 30, // extra field length
2332
+ CENCOM : 32, // file comment length
2333
+ CENDSK : 34, // volume number start
2334
+ CENATT : 36, // internal file attributes
2335
+ CENATX : 38, // external file attributes (host system dependent)
2336
+ CENOFF : 42, // LOC header offset
2337
+
2338
+ /* The entries in the end of central directory */
2339
+ ENDHDR : 22, // END header size
2340
+ ENDSIG : 0x06054b50, // "PK\005\006"
2341
+ ENDSUB : 8, // number of entries on this disk
2342
+ ENDTOT : 10, // total number of entries
2343
+ ENDSIZ : 12, // central directory size in bytes
2344
+ ENDOFF : 16, // offset of first CEN header
2345
+ ENDCOM : 20, // zip file comment length
2346
+
2347
+ END64HDR : 20, // zip64 END header size
2348
+ END64SIG : 0x07064b50, // zip64 Locator signature, "PK\006\007"
2349
+ END64START : 4, // number of the disk with the start of the zip64
2350
+ END64OFF : 8, // relative offset of the zip64 end of central directory
2351
+ END64NUMDISKS : 16, // total number of disks
2352
+
2353
+ ZIP64SIG : 0x06064b50, // zip64 signature, "PK\006\006"
2354
+ ZIP64HDR : 56, // zip64 record minimum size
2355
+ ZIP64LEAD : 12, // leading bytes at the start of the record, not counted by the value stored in ZIP64SIZE
2356
+ ZIP64SIZE : 4, // zip64 size of the central directory record
2357
+ ZIP64VEM : 12, // zip64 version made by
2358
+ ZIP64VER : 14, // zip64 version needed to extract
2359
+ ZIP64DSK : 16, // zip64 number of this disk
2360
+ ZIP64DSKDIR : 20, // number of the disk with the start of the record directory
2361
+ ZIP64SUB : 24, // number of entries on this disk
2362
+ ZIP64TOT : 32, // total number of entries
2363
+ ZIP64SIZB : 40, // zip64 central directory size in bytes
2364
+ ZIP64OFF : 48, // offset of start of central directory with respect to the starting disk number
2365
+ ZIP64EXTRA : 56, // extensible data sector
2366
+
2367
+ /* Compression methods */
2368
+ STORED : 0, // no compression
2369
+ SHRUNK : 1, // shrunk
2370
+ REDUCED1 : 2, // reduced with compression factor 1
2371
+ REDUCED2 : 3, // reduced with compression factor 2
2372
+ REDUCED3 : 4, // reduced with compression factor 3
2373
+ REDUCED4 : 5, // reduced with compression factor 4
2374
+ IMPLODED : 6, // imploded
2375
+ // 7 reserved for Tokenizing compression algorithm
2376
+ DEFLATED : 8, // deflated
2377
+ ENHANCED_DEFLATED: 9, // enhanced deflated
2378
+ PKWARE : 10,// PKWare DCL imploded
2379
+ // 11 reserved by PKWARE
2380
+ BZIP2 : 12, // compressed using BZIP2
2381
+ // 13 reserved by PKWARE
2382
+ LZMA : 14, // LZMA
2383
+ // 15-17 reserved by PKWARE
2384
+ IBM_TERSE : 18, // compressed using IBM TERSE
2385
+ IBM_LZ77 : 19, // IBM LZ77 z
2386
+ AES_ENCRYPT : 99, // WinZIP AES encryption method
2387
+
2388
+ /* General purpose bit flag */
2389
+ // values can obtained with expression 2**bitnr
2390
+ FLG_ENC : 1, // Bit 0: encrypted file
2391
+ FLG_COMP1 : 2, // Bit 1, compression option
2392
+ FLG_COMP2 : 4, // Bit 2, compression option
2393
+ FLG_DESC : 8, // Bit 3, data descriptor
2394
+ FLG_ENH : 16, // Bit 4, enhanced deflating
2395
+ FLG_PATCH : 32, // Bit 5, indicates that the file is compressed patched data.
2396
+ FLG_STR : 64, // Bit 6, strong encryption (patented)
2397
+ // Bits 7-10: Currently unused.
2398
+ FLG_EFS : 2048, // Bit 11: Language encoding flag (EFS)
2399
+ // Bit 12: Reserved by PKWARE for enhanced compression.
2400
+ // Bit 13: encrypted the Central Directory (patented).
2401
+ // Bits 14-15: Reserved by PKWARE.
2402
+ FLG_MSK : 4096, // mask header values
2403
+
2404
+ /* Load type */
2405
+ FILE : 2,
2406
+ BUFFER : 1,
2407
+ NONE : 0,
2408
+
2409
+ /* 4.5 Extensible data fields */
2410
+ EF_ID : 0,
2411
+ EF_SIZE : 2,
2412
+
2413
+ /* Header IDs */
2414
+ ID_ZIP64 : 0x0001,
2415
+ ID_AVINFO : 0x0007,
2416
+ ID_PFS : 0x0008,
2417
+ ID_OS2 : 0x0009,
2418
+ ID_NTFS : 0x000a,
2419
+ ID_OPENVMS : 0x000c,
2420
+ ID_UNIX : 0x000d,
2421
+ ID_FORK : 0x000e,
2422
+ ID_PATCH : 0x000f,
2423
+ ID_X509_PKCS7 : 0x0014,
2424
+ ID_X509_CERTID_F : 0x0015,
2425
+ ID_X509_CERTID_C : 0x0016,
2426
+ ID_STRONGENC : 0x0017,
2427
+ ID_RECORD_MGT : 0x0018,
2428
+ ID_X509_PKCS7_RL : 0x0019,
2429
+ ID_IBM1 : 0x0065,
2430
+ ID_IBM2 : 0x0066,
2431
+ ID_POSZIP : 0x4690,
2432
+
2433
+ EF_ZIP64_OR_32 : 0xffffffff,
2434
+ EF_ZIP64_OR_16 : 0xffff,
2435
+ EF_ZIP64_SUNCOMP : 0,
2436
+ EF_ZIP64_SCOMP : 8,
2437
+ EF_ZIP64_RHO : 16,
2438
+ EF_ZIP64_DSN : 24
2439
+ };
2440
+
2441
+
2252
2442
  /***/ }),
2253
2443
 
2254
2444
  /***/ 1334:
@@ -2299,6 +2489,14 @@ exports.extract = __webpack_require__(7810)
2299
2489
  exports.pack = __webpack_require__(642)
2300
2490
 
2301
2491
 
2492
+ /***/ }),
2493
+
2494
+ /***/ 1421:
2495
+ /***/ ((module) => {
2496
+
2497
+ "use strict";
2498
+ module.exports = require("node:child_process");
2499
+
2302
2500
  /***/ }),
2303
2501
 
2304
2502
  /***/ 1431:
@@ -9387,6 +9585,52 @@ module.exports = class CachedInputFileSystem {
9387
9585
  };
9388
9586
 
9389
9587
 
9588
+ /***/ }),
9589
+
9590
+ /***/ 3420:
9591
+ /***/ ((__unused_webpack_module, exports) => {
9592
+
9593
+ "use strict";
9594
+
9595
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
9596
+ exports.CONNECTOR_METADATA_JSON_FILE = exports.CONNECTOR_DEV_PATH = exports.CONNECTOR_IDS = void 0;
9597
+ /**
9598
+ * List of all connector package names.
9599
+ * To prevent automatic discovery && latest version sync in 'prod' add you connector package name to
9600
+ * 'CONNECTOR_PACKAGES_TO_EXCLUDE_FROM_AUTO_SYNC' in ConnectorsService (console-backend).
9601
+ */
9602
+ exports.CONNECTOR_IDS = [
9603
+ 'confluence',
9604
+ 'cotomi',
9605
+ 'essentials',
9606
+ 'google_calendar',
9607
+ 'hubspot',
9608
+ 'jira',
9609
+ 'mail',
9610
+ 'salesforce',
9611
+ 'servicenow',
9612
+ 'slack',
9613
+ 'zendesk',
9614
+ ];
9615
+ /**
9616
+ * Maps connector IDs to their custom directory structures for local development, if needed
9617
+ */
9618
+ exports.CONNECTOR_DEV_PATH = {
9619
+ google_calendar: {
9620
+ localDevPath: 'googlecalendar/backend/dist',
9621
+ },
9622
+ mail: {
9623
+ localDevPath: 'mail/backend/dist',
9624
+ },
9625
+ slack: {
9626
+ localDevPath: 'slack/backend/dist',
9627
+ },
9628
+ // Other connectors use the default: connectors/{id}/dist
9629
+ };
9630
+ /** A file inside a connector package that contains package metadata. */
9631
+ exports.CONNECTOR_METADATA_JSON_FILE = 'connector-metadata.json';
9632
+
9633
+
9390
9634
  /***/ }),
9391
9635
 
9392
9636
  /***/ 3443:
@@ -10456,6 +10700,459 @@ function defaultCallback(err) {
10456
10700
  }
10457
10701
 
10458
10702
 
10703
+ /***/ }),
10704
+
10705
+ /***/ 3700:
10706
+ /***/ ((module, __unused_webpack_exports, __webpack_require__) => {
10707
+
10708
+ const ZipEntry = __webpack_require__(8692);
10709
+ const Headers = __webpack_require__(4072);
10710
+ const Utils = __webpack_require__(5178);
10711
+
10712
+ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) {
10713
+ var entryList = [],
10714
+ entryTable = {},
10715
+ _comment = Buffer.alloc(0),
10716
+ mainHeader = new Headers.MainHeader(),
10717
+ loadedEntries = false;
10718
+ var password = null;
10719
+ const temporary = new Set();
10720
+
10721
+ // assign options
10722
+ const opts = options;
10723
+
10724
+ const { noSort, decoder } = opts;
10725
+
10726
+ if (inBuffer) {
10727
+ // is a memory buffer
10728
+ readMainHeader(opts.readEntries);
10729
+ } else {
10730
+ // none. is a new file
10731
+ loadedEntries = true;
10732
+ }
10733
+
10734
+ function makeTemporaryFolders() {
10735
+ const foldersList = new Set();
10736
+
10737
+ // Make list of all folders in file
10738
+ for (const elem of Object.keys(entryTable)) {
10739
+ const elements = elem.split("/");
10740
+ elements.pop(); // filename
10741
+ if (!elements.length) continue; // no folders
10742
+ for (let i = 0; i < elements.length; i++) {
10743
+ const sub = elements.slice(0, i + 1).join("/") + "/";
10744
+ foldersList.add(sub);
10745
+ }
10746
+ }
10747
+
10748
+ // create missing folders as temporary
10749
+ for (const elem of foldersList) {
10750
+ if (!(elem in entryTable)) {
10751
+ const tempfolder = new ZipEntry(opts);
10752
+ tempfolder.entryName = elem;
10753
+ tempfolder.attr = 0x10;
10754
+ tempfolder.temporary = true;
10755
+ entryList.push(tempfolder);
10756
+ entryTable[tempfolder.entryName] = tempfolder;
10757
+ temporary.add(tempfolder);
10758
+ }
10759
+ }
10760
+ }
10761
+
10762
+ function readEntries() {
10763
+ loadedEntries = true;
10764
+ entryTable = {};
10765
+ if (mainHeader.diskEntries > (inBuffer.length - mainHeader.offset) / Utils.Constants.CENHDR) {
10766
+ throw Utils.Errors.DISK_ENTRY_TOO_LARGE();
10767
+ }
10768
+ entryList = new Array(mainHeader.diskEntries); // total number of entries
10769
+ var index = mainHeader.offset; // offset of first CEN header
10770
+ for (var i = 0; i < entryList.length; i++) {
10771
+ var tmp = index,
10772
+ entry = new ZipEntry(opts, inBuffer);
10773
+ entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR));
10774
+
10775
+ entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength));
10776
+
10777
+ if (entry.header.extraLength) {
10778
+ entry.extra = inBuffer.slice(tmp, (tmp += entry.header.extraLength));
10779
+ }
10780
+
10781
+ if (entry.header.commentLength) entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength);
10782
+
10783
+ index += entry.header.centralHeaderSize;
10784
+
10785
+ entryList[i] = entry;
10786
+ entryTable[entry.entryName] = entry;
10787
+ }
10788
+ temporary.clear();
10789
+ makeTemporaryFolders();
10790
+ }
10791
+
10792
+ function readMainHeader(/*Boolean*/ readNow) {
10793
+ var i = inBuffer.length - Utils.Constants.ENDHDR, // END header size
10794
+ max = Math.max(0, i - 0xffff), // 0xFFFF is the max zip file comment length
10795
+ n = max,
10796
+ endStart = inBuffer.length,
10797
+ endOffset = -1, // Start offset of the END header
10798
+ commentEnd = 0;
10799
+
10800
+ // option to search header form entire file
10801
+ const trailingSpace = typeof opts.trailingSpace === "boolean" ? opts.trailingSpace : false;
10802
+ if (trailingSpace) max = 0;
10803
+
10804
+ for (i; i >= n; i--) {
10805
+ if (inBuffer[i] !== 0x50) continue; // quick check that the byte is 'P'
10806
+ if (inBuffer.readUInt32LE(i) === Utils.Constants.ENDSIG) {
10807
+ // "PK\005\006"
10808
+ endOffset = i;
10809
+ commentEnd = i;
10810
+ endStart = i + Utils.Constants.ENDHDR;
10811
+ // We already found a regular signature, let's look just a bit further to check if there's any zip64 signature
10812
+ n = i - Utils.Constants.END64HDR;
10813
+ continue;
10814
+ }
10815
+
10816
+ if (inBuffer.readUInt32LE(i) === Utils.Constants.END64SIG) {
10817
+ // Found a zip64 signature, let's continue reading the whole zip64 record
10818
+ n = max;
10819
+ continue;
10820
+ }
10821
+
10822
+ if (inBuffer.readUInt32LE(i) === Utils.Constants.ZIP64SIG) {
10823
+ // Found the zip64 record, let's determine it's size
10824
+ endOffset = i;
10825
+ endStart = i + Utils.readBigUInt64LE(inBuffer, i + Utils.Constants.ZIP64SIZE) + Utils.Constants.ZIP64LEAD;
10826
+ break;
10827
+ }
10828
+ }
10829
+
10830
+ if (endOffset == -1) throw Utils.Errors.INVALID_FORMAT();
10831
+
10832
+ mainHeader.loadFromBinary(inBuffer.slice(endOffset, endStart));
10833
+ if (mainHeader.commentLength) {
10834
+ _comment = inBuffer.slice(commentEnd + Utils.Constants.ENDHDR);
10835
+ }
10836
+ if (readNow) readEntries();
10837
+ }
10838
+
10839
+ function sortEntries() {
10840
+ if (entryList.length > 1 && !noSort) {
10841
+ entryList.sort((a, b) => a.entryName.toLowerCase().localeCompare(b.entryName.toLowerCase()));
10842
+ }
10843
+ }
10844
+
10845
+ return {
10846
+ /**
10847
+ * Returns an array of ZipEntry objects existent in the current opened archive
10848
+ * @return Array
10849
+ */
10850
+ get entries() {
10851
+ if (!loadedEntries) {
10852
+ readEntries();
10853
+ }
10854
+ return entryList.filter((e) => !temporary.has(e));
10855
+ },
10856
+
10857
+ /**
10858
+ * Archive comment
10859
+ * @return {String}
10860
+ */
10861
+ get comment() {
10862
+ return decoder.decode(_comment);
10863
+ },
10864
+ set comment(val) {
10865
+ _comment = Utils.toBuffer(val, decoder.encode);
10866
+ mainHeader.commentLength = _comment.length;
10867
+ },
10868
+
10869
+ getEntryCount: function () {
10870
+ if (!loadedEntries) {
10871
+ return mainHeader.diskEntries;
10872
+ }
10873
+
10874
+ return entryList.length;
10875
+ },
10876
+
10877
+ forEach: function (callback) {
10878
+ this.entries.forEach(callback);
10879
+ },
10880
+
10881
+ /**
10882
+ * Returns a reference to the entry with the given name or null if entry is inexistent
10883
+ *
10884
+ * @param entryName
10885
+ * @return ZipEntry
10886
+ */
10887
+ getEntry: function (/*String*/ entryName) {
10888
+ if (!loadedEntries) {
10889
+ readEntries();
10890
+ }
10891
+ return entryTable[entryName] || null;
10892
+ },
10893
+
10894
+ /**
10895
+ * Adds the given entry to the entry list
10896
+ *
10897
+ * @param entry
10898
+ */
10899
+ setEntry: function (/*ZipEntry*/ entry) {
10900
+ if (!loadedEntries) {
10901
+ readEntries();
10902
+ }
10903
+ entryList.push(entry);
10904
+ entryTable[entry.entryName] = entry;
10905
+ mainHeader.totalEntries = entryList.length;
10906
+ },
10907
+
10908
+ /**
10909
+ * Removes the file with the given name from the entry list.
10910
+ *
10911
+ * If the entry is a directory, then all nested files and directories will be removed
10912
+ * @param entryName
10913
+ * @returns {void}
10914
+ */
10915
+ deleteFile: function (/*String*/ entryName, withsubfolders = true) {
10916
+ if (!loadedEntries) {
10917
+ readEntries();
10918
+ }
10919
+ const entry = entryTable[entryName];
10920
+ const list = this.getEntryChildren(entry, withsubfolders).map((child) => child.entryName);
10921
+
10922
+ list.forEach(this.deleteEntry);
10923
+ },
10924
+
10925
+ /**
10926
+ * Removes the entry with the given name from the entry list.
10927
+ *
10928
+ * @param {string} entryName
10929
+ * @returns {void}
10930
+ */
10931
+ deleteEntry: function (/*String*/ entryName) {
10932
+ if (!loadedEntries) {
10933
+ readEntries();
10934
+ }
10935
+ const entry = entryTable[entryName];
10936
+ const index = entryList.indexOf(entry);
10937
+ if (index >= 0) {
10938
+ entryList.splice(index, 1);
10939
+ delete entryTable[entryName];
10940
+ mainHeader.totalEntries = entryList.length;
10941
+ }
10942
+ },
10943
+
10944
+ /**
10945
+ * Iterates and returns all nested files and directories of the given entry
10946
+ *
10947
+ * @param entry
10948
+ * @return Array
10949
+ */
10950
+ getEntryChildren: function (/*ZipEntry*/ entry, subfolders = true) {
10951
+ if (!loadedEntries) {
10952
+ readEntries();
10953
+ }
10954
+ if (typeof entry === "object") {
10955
+ if (entry.isDirectory && subfolders) {
10956
+ const list = [];
10957
+ const name = entry.entryName;
10958
+
10959
+ for (const zipEntry of entryList) {
10960
+ if (zipEntry.entryName.startsWith(name)) {
10961
+ list.push(zipEntry);
10962
+ }
10963
+ }
10964
+ return list;
10965
+ } else {
10966
+ return [entry];
10967
+ }
10968
+ }
10969
+ return [];
10970
+ },
10971
+
10972
+ /**
10973
+ * How many child elements entry has
10974
+ *
10975
+ * @param {ZipEntry} entry
10976
+ * @return {integer}
10977
+ */
10978
+ getChildCount: function (entry) {
10979
+ if (entry && entry.isDirectory) {
10980
+ const list = this.getEntryChildren(entry);
10981
+ return list.includes(entry) ? list.length - 1 : list.length;
10982
+ }
10983
+ return 0;
10984
+ },
10985
+
10986
+ /**
10987
+ * Returns the zip file
10988
+ *
10989
+ * @return Buffer
10990
+ */
10991
+ compressToBuffer: function () {
10992
+ if (!loadedEntries) {
10993
+ readEntries();
10994
+ }
10995
+ sortEntries();
10996
+
10997
+ const dataBlock = [];
10998
+ const headerBlocks = [];
10999
+ let totalSize = 0;
11000
+ let dindex = 0;
11001
+
11002
+ mainHeader.size = 0;
11003
+ mainHeader.offset = 0;
11004
+ let totalEntries = 0;
11005
+
11006
+ for (const entry of this.entries) {
11007
+ // compress data and set local and entry header accordingly. Reason why is called first
11008
+ const compressedData = entry.getCompressedData();
11009
+ entry.header.offset = dindex;
11010
+
11011
+ // 1. construct local header
11012
+ const localHeader = entry.packLocalHeader();
11013
+
11014
+ // 2. offsets
11015
+ const dataLength = localHeader.length + compressedData.length;
11016
+ dindex += dataLength;
11017
+
11018
+ // 3. store values in sequence
11019
+ dataBlock.push(localHeader);
11020
+ dataBlock.push(compressedData);
11021
+
11022
+ // 4. construct central header
11023
+ const centralHeader = entry.packCentralHeader();
11024
+ headerBlocks.push(centralHeader);
11025
+ // 5. update main header
11026
+ mainHeader.size += centralHeader.length;
11027
+ totalSize += dataLength + centralHeader.length;
11028
+ totalEntries++;
11029
+ }
11030
+
11031
+ totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length
11032
+ // point to end of data and beginning of central directory first record
11033
+ mainHeader.offset = dindex;
11034
+ mainHeader.totalEntries = totalEntries;
11035
+
11036
+ dindex = 0;
11037
+ const outBuffer = Buffer.alloc(totalSize);
11038
+ // write data blocks
11039
+ for (const content of dataBlock) {
11040
+ content.copy(outBuffer, dindex);
11041
+ dindex += content.length;
11042
+ }
11043
+
11044
+ // write central directory entries
11045
+ for (const content of headerBlocks) {
11046
+ content.copy(outBuffer, dindex);
11047
+ dindex += content.length;
11048
+ }
11049
+
11050
+ // write main header
11051
+ const mh = mainHeader.toBinary();
11052
+ if (_comment) {
11053
+ _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment
11054
+ }
11055
+ mh.copy(outBuffer, dindex);
11056
+
11057
+ // Since we update entry and main header offsets,
11058
+ // they are no longer valid and we have to reset content
11059
+ // (Issue 64)
11060
+
11061
+ inBuffer = outBuffer;
11062
+ loadedEntries = false;
11063
+
11064
+ return outBuffer;
11065
+ },
11066
+
11067
+ toAsyncBuffer: function (/*Function*/ onSuccess, /*Function*/ onFail, /*Function*/ onItemStart, /*Function*/ onItemEnd) {
11068
+ try {
11069
+ if (!loadedEntries) {
11070
+ readEntries();
11071
+ }
11072
+ sortEntries();
11073
+
11074
+ const dataBlock = [];
11075
+ const centralHeaders = [];
11076
+ let totalSize = 0;
11077
+ let dindex = 0;
11078
+ let totalEntries = 0;
11079
+
11080
+ mainHeader.size = 0;
11081
+ mainHeader.offset = 0;
11082
+
11083
+ const compress2Buffer = function (entryLists) {
11084
+ if (entryLists.length > 0) {
11085
+ const entry = entryLists.shift();
11086
+ const name = entry.entryName + entry.extra.toString();
11087
+ if (onItemStart) onItemStart(name);
11088
+ entry.getCompressedDataAsync(function (compressedData) {
11089
+ if (onItemEnd) onItemEnd(name);
11090
+ entry.header.offset = dindex;
11091
+
11092
+ // 1. construct local header
11093
+ const localHeader = entry.packLocalHeader();
11094
+
11095
+ // 2. offsets
11096
+ const dataLength = localHeader.length + compressedData.length;
11097
+ dindex += dataLength;
11098
+
11099
+ // 3. store values in sequence
11100
+ dataBlock.push(localHeader);
11101
+ dataBlock.push(compressedData);
11102
+
11103
+ // central header
11104
+ const centalHeader = entry.packCentralHeader();
11105
+ centralHeaders.push(centalHeader);
11106
+ mainHeader.size += centalHeader.length;
11107
+ totalSize += dataLength + centalHeader.length;
11108
+ totalEntries++;
11109
+
11110
+ compress2Buffer(entryLists);
11111
+ });
11112
+ } else {
11113
+ totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length
11114
+ // point to end of data and beginning of central directory first record
11115
+ mainHeader.offset = dindex;
11116
+ mainHeader.totalEntries = totalEntries;
11117
+
11118
+ dindex = 0;
11119
+ const outBuffer = Buffer.alloc(totalSize);
11120
+ dataBlock.forEach(function (content) {
11121
+ content.copy(outBuffer, dindex); // write data blocks
11122
+ dindex += content.length;
11123
+ });
11124
+ centralHeaders.forEach(function (content) {
11125
+ content.copy(outBuffer, dindex); // write central directory entries
11126
+ dindex += content.length;
11127
+ });
11128
+
11129
+ const mh = mainHeader.toBinary();
11130
+ if (_comment) {
11131
+ _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment
11132
+ }
11133
+
11134
+ mh.copy(outBuffer, dindex); // write main header
11135
+
11136
+ // Since we update entry and main header offsets, they are no
11137
+ // longer valid and we have to reset content using our new buffer
11138
+ // (Issue 64)
11139
+
11140
+ inBuffer = outBuffer;
11141
+ loadedEntries = false;
11142
+
11143
+ onSuccess(outBuffer);
11144
+ }
11145
+ };
11146
+
11147
+ compress2Buffer(Array.from(this.entries));
11148
+ } catch (e) {
11149
+ onFail(e);
11150
+ }
11151
+ }
11152
+ };
11153
+ };
11154
+
11155
+
10459
11156
  /***/ }),
10460
11157
 
10461
11158
  /***/ 3702:
@@ -11455,98 +12152,450 @@ module.exports = RootsPlugin;
11455
12152
 
11456
12153
  /***/ }),
11457
12154
 
11458
- /***/ 4153:
12155
+ /***/ 4072:
12156
+ /***/ ((__unused_webpack_module, exports, __webpack_require__) => {
12157
+
12158
+ exports.EntryHeader = __webpack_require__(5029);
12159
+ exports.MainHeader = __webpack_require__(8398);
12160
+
12161
+
12162
+ /***/ }),
12163
+
12164
+ /***/ 4152:
11459
12165
  /***/ ((module, __unused_webpack_exports, __webpack_require__) => {
11460
12166
 
11461
- "use strict";
11462
- /*
11463
- MIT License http://www.opensource.org/licenses/mit-license.php
11464
- Author Tobias Koppers @sokra
11465
- */
12167
+ const fsystem = __webpack_require__(9896);
12168
+ const pth = __webpack_require__(6928);
12169
+ const Constants = __webpack_require__(1307);
12170
+ const Errors = __webpack_require__(7583);
12171
+ const isWin = typeof process === "object" && "win32" === process.platform;
11466
12172
 
12173
+ const is_Obj = (obj) => typeof obj === "object" && obj !== null;
11467
12174
 
12175
+ // generate CRC32 lookup table
12176
+ const crcTable = new Uint32Array(256).map((t, c) => {
12177
+ for (let k = 0; k < 8; k++) {
12178
+ if ((c & 1) !== 0) {
12179
+ c = 0xedb88320 ^ (c >>> 1);
12180
+ } else {
12181
+ c >>>= 1;
12182
+ }
12183
+ }
12184
+ return c >>> 0;
12185
+ });
11468
12186
 
11469
- const DescriptionFileUtils = __webpack_require__(5877);
12187
+ // UTILS functions
11470
12188
 
11471
- /** @typedef {import("./Resolver")} Resolver */
11472
- /** @typedef {import("./Resolver").ResolveRequest} ResolveRequest */
11473
- /** @typedef {import("./Resolver").ResolveStepHook} ResolveStepHook */
12189
+ function Utils(opts) {
12190
+ this.sep = pth.sep;
12191
+ this.fs = fsystem;
11474
12192
 
11475
- module.exports = class DescriptionFilePlugin {
11476
- /**
11477
- * @param {string | ResolveStepHook} source source
11478
- * @param {string[]} filenames filenames
11479
- * @param {boolean} pathIsFile pathIsFile
11480
- * @param {string | ResolveStepHook} target target
11481
- */
11482
- constructor(source, filenames, pathIsFile, target) {
11483
- this.source = source;
11484
- this.filenames = filenames;
11485
- this.pathIsFile = pathIsFile;
11486
- this.target = target;
11487
- }
12193
+ if (is_Obj(opts)) {
12194
+ // custom filesystem
12195
+ if (is_Obj(opts.fs) && typeof opts.fs.statSync === "function") {
12196
+ this.fs = opts.fs;
12197
+ }
12198
+ }
12199
+ }
11488
12200
 
11489
- /**
11490
- * @param {Resolver} resolver the resolver
11491
- * @returns {void}
11492
- */
11493
- apply(resolver) {
11494
- const target = resolver.ensureHook(this.target);
11495
- resolver
11496
- .getHook(this.source)
11497
- .tapAsync(
11498
- "DescriptionFilePlugin",
11499
- (request, resolveContext, callback) => {
11500
- const { path } = request;
11501
- if (!path) return callback();
11502
- const directory = this.pathIsFile
11503
- ? DescriptionFileUtils.cdUp(path)
11504
- : path;
11505
- if (!directory) return callback();
11506
- DescriptionFileUtils.loadDescriptionFile(
11507
- resolver,
11508
- directory,
11509
- this.filenames,
11510
- request.descriptionFilePath
11511
- ? {
11512
- path: request.descriptionFilePath,
11513
- content: request.descriptionFileData,
11514
- directory:
11515
- /** @type {string} */
11516
- (request.descriptionFileRoot),
11517
- }
11518
- : undefined,
11519
- resolveContext,
11520
- (err, result) => {
11521
- if (err) return callback(err);
11522
- if (!result) {
11523
- if (resolveContext.log) {
11524
- resolveContext.log(
11525
- `No description file found in ${directory} or above`,
11526
- );
11527
- }
11528
- return callback();
11529
- }
11530
- const relativePath = `.${path
11531
- .slice(result.directory.length)
11532
- .replace(/\\/g, "/")}`;
11533
- /** @type {ResolveRequest} */
11534
- const obj = {
11535
- ...request,
11536
- descriptionFilePath: result.path,
11537
- descriptionFileData: result.content,
11538
- descriptionFileRoot: result.directory,
11539
- relativePath,
11540
- };
11541
- resolver.doResolve(
11542
- target,
11543
- obj,
11544
- `using description file: ${result.path} (relative path: ${relativePath})`,
11545
- resolveContext,
11546
- (err, result) => {
11547
- if (err) return callback(err);
12201
+ module.exports = Utils;
11548
12202
 
11549
- // Don't allow other processing
12203
+ // INSTANTIABLE functions
12204
+
12205
+ Utils.prototype.makeDir = function (/*String*/ folder) {
12206
+ const self = this;
12207
+
12208
+ // Sync - make directories tree
12209
+ function mkdirSync(/*String*/ fpath) {
12210
+ let resolvedPath = fpath.split(self.sep)[0];
12211
+ fpath.split(self.sep).forEach(function (name) {
12212
+ if (!name || name.substr(-1, 1) === ":") return;
12213
+ resolvedPath += self.sep + name;
12214
+ var stat;
12215
+ try {
12216
+ stat = self.fs.statSync(resolvedPath);
12217
+ } catch (e) {
12218
+ self.fs.mkdirSync(resolvedPath);
12219
+ }
12220
+ if (stat && stat.isFile()) throw Errors.FILE_IN_THE_WAY(`"${resolvedPath}"`);
12221
+ });
12222
+ }
12223
+
12224
+ mkdirSync(folder);
12225
+ };
12226
+
12227
+ Utils.prototype.writeFileTo = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr) {
12228
+ const self = this;
12229
+ if (self.fs.existsSync(path)) {
12230
+ if (!overwrite) return false; // cannot overwrite
12231
+
12232
+ var stat = self.fs.statSync(path);
12233
+ if (stat.isDirectory()) {
12234
+ return false;
12235
+ }
12236
+ }
12237
+ var folder = pth.dirname(path);
12238
+ if (!self.fs.existsSync(folder)) {
12239
+ self.makeDir(folder);
12240
+ }
12241
+
12242
+ var fd;
12243
+ try {
12244
+ fd = self.fs.openSync(path, "w", 0o666); // 0666
12245
+ } catch (e) {
12246
+ self.fs.chmodSync(path, 0o666);
12247
+ fd = self.fs.openSync(path, "w", 0o666);
12248
+ }
12249
+ if (fd) {
12250
+ try {
12251
+ self.fs.writeSync(fd, content, 0, content.length, 0);
12252
+ } finally {
12253
+ self.fs.closeSync(fd);
12254
+ }
12255
+ }
12256
+ self.fs.chmodSync(path, attr || 0o666);
12257
+ return true;
12258
+ };
12259
+
12260
+ Utils.prototype.writeFileToAsync = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr, /*Function*/ callback) {
12261
+ if (typeof attr === "function") {
12262
+ callback = attr;
12263
+ attr = undefined;
12264
+ }
12265
+
12266
+ const self = this;
12267
+
12268
+ self.fs.exists(path, function (exist) {
12269
+ if (exist && !overwrite) return callback(false);
12270
+
12271
+ self.fs.stat(path, function (err, stat) {
12272
+ if (exist && stat.isDirectory()) {
12273
+ return callback(false);
12274
+ }
12275
+
12276
+ var folder = pth.dirname(path);
12277
+ self.fs.exists(folder, function (exists) {
12278
+ if (!exists) self.makeDir(folder);
12279
+
12280
+ self.fs.open(path, "w", 0o666, function (err, fd) {
12281
+ if (err) {
12282
+ self.fs.chmod(path, 0o666, function () {
12283
+ self.fs.open(path, "w", 0o666, function (err, fd) {
12284
+ self.fs.write(fd, content, 0, content.length, 0, function () {
12285
+ self.fs.close(fd, function () {
12286
+ self.fs.chmod(path, attr || 0o666, function () {
12287
+ callback(true);
12288
+ });
12289
+ });
12290
+ });
12291
+ });
12292
+ });
12293
+ } else if (fd) {
12294
+ self.fs.write(fd, content, 0, content.length, 0, function () {
12295
+ self.fs.close(fd, function () {
12296
+ self.fs.chmod(path, attr || 0o666, function () {
12297
+ callback(true);
12298
+ });
12299
+ });
12300
+ });
12301
+ } else {
12302
+ self.fs.chmod(path, attr || 0o666, function () {
12303
+ callback(true);
12304
+ });
12305
+ }
12306
+ });
12307
+ });
12308
+ });
12309
+ });
12310
+ };
12311
+
12312
+ Utils.prototype.findFiles = function (/*String*/ path) {
12313
+ const self = this;
12314
+
12315
+ function findSync(/*String*/ dir, /*RegExp*/ pattern, /*Boolean*/ recursive) {
12316
+ if (typeof pattern === "boolean") {
12317
+ recursive = pattern;
12318
+ pattern = undefined;
12319
+ }
12320
+ let files = [];
12321
+ self.fs.readdirSync(dir).forEach(function (file) {
12322
+ const path = pth.join(dir, file);
12323
+ const stat = self.fs.statSync(path);
12324
+
12325
+ if (!pattern || pattern.test(path)) {
12326
+ files.push(pth.normalize(path) + (stat.isDirectory() ? self.sep : ""));
12327
+ }
12328
+
12329
+ if (stat.isDirectory() && recursive) files = files.concat(findSync(path, pattern, recursive));
12330
+ });
12331
+ return files;
12332
+ }
12333
+
12334
+ return findSync(path, undefined, true);
12335
+ };
12336
+
12337
+ /**
12338
+ * Callback for showing if everything was done.
12339
+ *
12340
+ * @callback filelistCallback
12341
+ * @param {Error} err - Error object
12342
+ * @param {string[]} list - was request fully completed
12343
+ */
12344
+
12345
+ /**
12346
+ *
12347
+ * @param {string} dir
12348
+ * @param {filelistCallback} cb
12349
+ */
12350
+ Utils.prototype.findFilesAsync = function (dir, cb) {
12351
+ const self = this;
12352
+ let results = [];
12353
+ self.fs.readdir(dir, function (err, list) {
12354
+ if (err) return cb(err);
12355
+ let list_length = list.length;
12356
+ if (!list_length) return cb(null, results);
12357
+ list.forEach(function (file) {
12358
+ file = pth.join(dir, file);
12359
+ self.fs.stat(file, function (err, stat) {
12360
+ if (err) return cb(err);
12361
+ if (stat) {
12362
+ results.push(pth.normalize(file) + (stat.isDirectory() ? self.sep : ""));
12363
+ if (stat.isDirectory()) {
12364
+ self.findFilesAsync(file, function (err, res) {
12365
+ if (err) return cb(err);
12366
+ results = results.concat(res);
12367
+ if (!--list_length) cb(null, results);
12368
+ });
12369
+ } else {
12370
+ if (!--list_length) cb(null, results);
12371
+ }
12372
+ }
12373
+ });
12374
+ });
12375
+ });
12376
+ };
12377
+
12378
+ Utils.prototype.getAttributes = function () {};
12379
+
12380
+ Utils.prototype.setAttributes = function () {};
12381
+
12382
+ // STATIC functions
12383
+
12384
+ // crc32 single update (it is part of crc32)
12385
+ Utils.crc32update = function (crc, byte) {
12386
+ return crcTable[(crc ^ byte) & 0xff] ^ (crc >>> 8);
12387
+ };
12388
+
12389
+ Utils.crc32 = function (buf) {
12390
+ if (typeof buf === "string") {
12391
+ buf = Buffer.from(buf, "utf8");
12392
+ }
12393
+
12394
+ let len = buf.length;
12395
+ let crc = ~0;
12396
+ for (let off = 0; off < len; ) crc = Utils.crc32update(crc, buf[off++]);
12397
+ // xor and cast as uint32 number
12398
+ return ~crc >>> 0;
12399
+ };
12400
+
12401
+ Utils.methodToString = function (/*Number*/ method) {
12402
+ switch (method) {
12403
+ case Constants.STORED:
12404
+ return "STORED (" + method + ")";
12405
+ case Constants.DEFLATED:
12406
+ return "DEFLATED (" + method + ")";
12407
+ default:
12408
+ return "UNSUPPORTED (" + method + ")";
12409
+ }
12410
+ };
12411
+
12412
+ /**
12413
+ * removes ".." style path elements
12414
+ * @param {string} path - fixable path
12415
+ * @returns string - fixed filepath
12416
+ */
12417
+ Utils.canonical = function (/*string*/ path) {
12418
+ if (!path) return "";
12419
+ // trick normalize think path is absolute
12420
+ const safeSuffix = pth.posix.normalize("/" + path.split("\\").join("/"));
12421
+ return pth.join(".", safeSuffix);
12422
+ };
12423
+
12424
+ /**
12425
+ * fix file names in achive
12426
+ * @param {string} path - fixable path
12427
+ * @returns string - fixed filepath
12428
+ */
12429
+
12430
+ Utils.zipnamefix = function (path) {
12431
+ if (!path) return "";
12432
+ // trick normalize think path is absolute
12433
+ const safeSuffix = pth.posix.normalize("/" + path.split("\\").join("/"));
12434
+ return pth.posix.join(".", safeSuffix);
12435
+ };
12436
+
12437
+ /**
12438
+ *
12439
+ * @param {Array} arr
12440
+ * @param {function} callback
12441
+ * @returns
12442
+ */
12443
+ Utils.findLast = function (arr, callback) {
12444
+ if (!Array.isArray(arr)) throw new TypeError("arr is not array");
12445
+
12446
+ const len = arr.length >>> 0;
12447
+ for (let i = len - 1; i >= 0; i--) {
12448
+ if (callback(arr[i], i, arr)) {
12449
+ return arr[i];
12450
+ }
12451
+ }
12452
+ return void 0;
12453
+ };
12454
+
12455
+ // make abolute paths taking prefix as root folder
12456
+ Utils.sanitize = function (/*string*/ prefix, /*string*/ name) {
12457
+ prefix = pth.resolve(pth.normalize(prefix));
12458
+ var parts = name.split("/");
12459
+ for (var i = 0, l = parts.length; i < l; i++) {
12460
+ var path = pth.normalize(pth.join(prefix, parts.slice(i, l).join(pth.sep)));
12461
+ if (path.indexOf(prefix) === 0) {
12462
+ return path;
12463
+ }
12464
+ }
12465
+ return pth.normalize(pth.join(prefix, pth.basename(name)));
12466
+ };
12467
+
12468
+ // converts buffer, Uint8Array, string types to buffer
12469
+ Utils.toBuffer = function toBuffer(/*buffer, Uint8Array, string*/ input, /* function */ encoder) {
12470
+ if (Buffer.isBuffer(input)) {
12471
+ return input;
12472
+ } else if (input instanceof Uint8Array) {
12473
+ return Buffer.from(input);
12474
+ } else {
12475
+ // expect string all other values are invalid and return empty buffer
12476
+ return typeof input === "string" ? encoder(input) : Buffer.alloc(0);
12477
+ }
12478
+ };
12479
+
12480
+ Utils.readBigUInt64LE = function (/*Buffer*/ buffer, /*int*/ index) {
12481
+ var slice = Buffer.from(buffer.slice(index, index + 8));
12482
+ slice.swap64();
12483
+
12484
+ return parseInt(`0x${slice.toString("hex")}`);
12485
+ };
12486
+
12487
+ Utils.fromDOS2Date = function (val) {
12488
+ return new Date(((val >> 25) & 0x7f) + 1980, Math.max(((val >> 21) & 0x0f) - 1, 0), Math.max((val >> 16) & 0x1f, 1), (val >> 11) & 0x1f, (val >> 5) & 0x3f, (val & 0x1f) << 1);
12489
+ };
12490
+
12491
+ Utils.fromDate2DOS = function (val) {
12492
+ let date = 0;
12493
+ let time = 0;
12494
+ if (val.getFullYear() > 1979) {
12495
+ date = (((val.getFullYear() - 1980) & 0x7f) << 9) | ((val.getMonth() + 1) << 5) | val.getDate();
12496
+ time = (val.getHours() << 11) | (val.getMinutes() << 5) | (val.getSeconds() >> 1);
12497
+ }
12498
+ return (date << 16) | time;
12499
+ };
12500
+
12501
+ Utils.isWin = isWin; // Do we have windows system
12502
+ Utils.crcTable = crcTable;
12503
+
12504
+
12505
+ /***/ }),
12506
+
12507
+ /***/ 4153:
12508
+ /***/ ((module, __unused_webpack_exports, __webpack_require__) => {
12509
+
12510
+ "use strict";
12511
+ /*
12512
+ MIT License http://www.opensource.org/licenses/mit-license.php
12513
+ Author Tobias Koppers @sokra
12514
+ */
12515
+
12516
+
12517
+
12518
+ const DescriptionFileUtils = __webpack_require__(5877);
12519
+
12520
+ /** @typedef {import("./Resolver")} Resolver */
12521
+ /** @typedef {import("./Resolver").ResolveRequest} ResolveRequest */
12522
+ /** @typedef {import("./Resolver").ResolveStepHook} ResolveStepHook */
12523
+
12524
+ module.exports = class DescriptionFilePlugin {
12525
+ /**
12526
+ * @param {string | ResolveStepHook} source source
12527
+ * @param {string[]} filenames filenames
12528
+ * @param {boolean} pathIsFile pathIsFile
12529
+ * @param {string | ResolveStepHook} target target
12530
+ */
12531
+ constructor(source, filenames, pathIsFile, target) {
12532
+ this.source = source;
12533
+ this.filenames = filenames;
12534
+ this.pathIsFile = pathIsFile;
12535
+ this.target = target;
12536
+ }
12537
+
12538
+ /**
12539
+ * @param {Resolver} resolver the resolver
12540
+ * @returns {void}
12541
+ */
12542
+ apply(resolver) {
12543
+ const target = resolver.ensureHook(this.target);
12544
+ resolver
12545
+ .getHook(this.source)
12546
+ .tapAsync(
12547
+ "DescriptionFilePlugin",
12548
+ (request, resolveContext, callback) => {
12549
+ const { path } = request;
12550
+ if (!path) return callback();
12551
+ const directory = this.pathIsFile
12552
+ ? DescriptionFileUtils.cdUp(path)
12553
+ : path;
12554
+ if (!directory) return callback();
12555
+ DescriptionFileUtils.loadDescriptionFile(
12556
+ resolver,
12557
+ directory,
12558
+ this.filenames,
12559
+ request.descriptionFilePath
12560
+ ? {
12561
+ path: request.descriptionFilePath,
12562
+ content: request.descriptionFileData,
12563
+ directory:
12564
+ /** @type {string} */
12565
+ (request.descriptionFileRoot),
12566
+ }
12567
+ : undefined,
12568
+ resolveContext,
12569
+ (err, result) => {
12570
+ if (err) return callback(err);
12571
+ if (!result) {
12572
+ if (resolveContext.log) {
12573
+ resolveContext.log(
12574
+ `No description file found in ${directory} or above`,
12575
+ );
12576
+ }
12577
+ return callback();
12578
+ }
12579
+ const relativePath = `.${path
12580
+ .slice(result.directory.length)
12581
+ .replace(/\\/g, "/")}`;
12582
+ /** @type {ResolveRequest} */
12583
+ const obj = {
12584
+ ...request,
12585
+ descriptionFilePath: result.path,
12586
+ descriptionFileData: result.content,
12587
+ descriptionFileRoot: result.directory,
12588
+ relativePath,
12589
+ };
12590
+ resolver.doResolve(
12591
+ target,
12592
+ obj,
12593
+ `using description file: ${result.path} (relative path: ${relativePath})`,
12594
+ resolveContext,
12595
+ (err, result) => {
12596
+ if (err) return callback(err);
12597
+
12598
+ // Don't allow other processing
11550
12599
  if (result === undefined) return callback(null, null);
11551
12600
  callback(null, result);
11552
12601
  },
@@ -14388,6 +15437,46 @@ function patch (fs) {
14388
15437
  }
14389
15438
 
14390
15439
 
15440
+ /***/ }),
15441
+
15442
+ /***/ 4877:
15443
+ /***/ ((module, __unused_webpack_exports, __webpack_require__) => {
15444
+
15445
+ module.exports = function (/*Buffer*/ inbuf) {
15446
+ var zlib = __webpack_require__(3106);
15447
+
15448
+ var opts = { chunkSize: (parseInt(inbuf.length / 1024) + 1) * 1024 };
15449
+
15450
+ return {
15451
+ deflate: function () {
15452
+ return zlib.deflateRawSync(inbuf, opts);
15453
+ },
15454
+
15455
+ deflateAsync: function (/*Function*/ callback) {
15456
+ var tmp = zlib.createDeflateRaw(opts),
15457
+ parts = [],
15458
+ total = 0;
15459
+ tmp.on("data", function (data) {
15460
+ parts.push(data);
15461
+ total += data.length;
15462
+ });
15463
+ tmp.on("end", function () {
15464
+ var buf = Buffer.alloc(total),
15465
+ written = 0;
15466
+ buf.fill(0);
15467
+ for (var i = 0; i < parts.length; i++) {
15468
+ var part = parts[i];
15469
+ part.copy(buf, written);
15470
+ written += part.length;
15471
+ }
15472
+ callback && callback(buf);
15473
+ });
15474
+ tmp.end(inbuf);
15475
+ }
15476
+ };
15477
+ };
15478
+
15479
+
14391
15480
  /***/ }),
14392
15481
 
14393
15482
  /***/ 4978:
@@ -14866,44 +15955,425 @@ module.exports = HookCodeFactory;
14866
15955
 
14867
15956
  /***/ }),
14868
15957
 
14869
- /***/ 5118:
15958
+ /***/ 5029:
14870
15959
  /***/ ((module, __unused_webpack_exports, __webpack_require__) => {
14871
15960
 
14872
- "use strict";
14873
- // Copyright Joyent, Inc. and other Node contributors.
14874
- //
14875
- // Permission is hereby granted, free of charge, to any person obtaining a
14876
- // copy of this software and associated documentation files (the
14877
- // "Software"), to deal in the Software without restriction, including
14878
- // without limitation the rights to use, copy, modify, merge, publish,
14879
- // distribute, sublicense, and/or sell copies of the Software, and to permit
14880
- // persons to whom the Software is furnished to do so, subject to the
14881
- // following conditions:
14882
- //
14883
- // The above copyright notice and this permission notice shall be included
14884
- // in all copies or substantial portions of the Software.
14885
- //
14886
- // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
14887
- // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
14888
- // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
14889
- // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
14890
- // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
14891
- // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
14892
- // USE OR OTHER DEALINGS IN THE SOFTWARE.
15961
+ var Utils = __webpack_require__(5178),
15962
+ Constants = Utils.Constants;
14893
15963
 
14894
- // a transform stream is a readable/writable stream where you do
14895
- // something with the data. Sometimes it's called a "filter",
14896
- // but that's not a great name for it, since that implies a thing where
14897
- // some bits pass through, and others are simply ignored. (That would
14898
- // be a valid example of a transform, of course.)
14899
- //
14900
- // While the output is causally related to the input, it's not a
14901
- // necessarily symmetric or synchronous transformation. For example,
14902
- // a zlib stream might take multiple plain-text writes(), and then
14903
- // emit a single compressed chunk some time in the future.
14904
- //
14905
- // Here's how this works:
14906
- //
15964
+ /* The central directory file header */
15965
+ module.exports = function () {
15966
+ var _verMade = 20, // v2.0
15967
+ _version = 10, // v1.0
15968
+ _flags = 0,
15969
+ _method = 0,
15970
+ _time = 0,
15971
+ _crc = 0,
15972
+ _compressedSize = 0,
15973
+ _size = 0,
15974
+ _fnameLen = 0,
15975
+ _extraLen = 0,
15976
+ _comLen = 0,
15977
+ _diskStart = 0,
15978
+ _inattr = 0,
15979
+ _attr = 0,
15980
+ _offset = 0;
15981
+
15982
+ _verMade |= Utils.isWin ? 0x0a00 : 0x0300;
15983
+
15984
+ // Set EFS flag since filename and comment fields are all by default encoded using UTF-8.
15985
+ // Without it file names may be corrupted for other apps when file names use unicode chars
15986
+ _flags |= Constants.FLG_EFS;
15987
+
15988
+ const _localHeader = {
15989
+ extraLen: 0
15990
+ };
15991
+
15992
+ // casting
15993
+ const uint32 = (val) => Math.max(0, val) >>> 0;
15994
+ const uint16 = (val) => Math.max(0, val) & 0xffff;
15995
+ const uint8 = (val) => Math.max(0, val) & 0xff;
15996
+
15997
+ _time = Utils.fromDate2DOS(new Date());
15998
+
15999
+ return {
16000
+ get made() {
16001
+ return _verMade;
16002
+ },
16003
+ set made(val) {
16004
+ _verMade = val;
16005
+ },
16006
+
16007
+ get version() {
16008
+ return _version;
16009
+ },
16010
+ set version(val) {
16011
+ _version = val;
16012
+ },
16013
+
16014
+ get flags() {
16015
+ return _flags;
16016
+ },
16017
+ set flags(val) {
16018
+ _flags = val;
16019
+ },
16020
+
16021
+ get flags_efs() {
16022
+ return (_flags & Constants.FLG_EFS) > 0;
16023
+ },
16024
+ set flags_efs(val) {
16025
+ if (val) {
16026
+ _flags |= Constants.FLG_EFS;
16027
+ } else {
16028
+ _flags &= ~Constants.FLG_EFS;
16029
+ }
16030
+ },
16031
+
16032
+ get flags_desc() {
16033
+ return (_flags & Constants.FLG_DESC) > 0;
16034
+ },
16035
+ set flags_desc(val) {
16036
+ if (val) {
16037
+ _flags |= Constants.FLG_DESC;
16038
+ } else {
16039
+ _flags &= ~Constants.FLG_DESC;
16040
+ }
16041
+ },
16042
+
16043
+ get method() {
16044
+ return _method;
16045
+ },
16046
+ set method(val) {
16047
+ switch (val) {
16048
+ case Constants.STORED:
16049
+ this.version = 10;
16050
+ case Constants.DEFLATED:
16051
+ default:
16052
+ this.version = 20;
16053
+ }
16054
+ _method = val;
16055
+ },
16056
+
16057
+ get time() {
16058
+ return Utils.fromDOS2Date(this.timeval);
16059
+ },
16060
+ set time(val) {
16061
+ this.timeval = Utils.fromDate2DOS(val);
16062
+ },
16063
+
16064
+ get timeval() {
16065
+ return _time;
16066
+ },
16067
+ set timeval(val) {
16068
+ _time = uint32(val);
16069
+ },
16070
+
16071
+ get timeHighByte() {
16072
+ return uint8(_time >>> 8);
16073
+ },
16074
+ get crc() {
16075
+ return _crc;
16076
+ },
16077
+ set crc(val) {
16078
+ _crc = uint32(val);
16079
+ },
16080
+
16081
+ get compressedSize() {
16082
+ return _compressedSize;
16083
+ },
16084
+ set compressedSize(val) {
16085
+ _compressedSize = uint32(val);
16086
+ },
16087
+
16088
+ get size() {
16089
+ return _size;
16090
+ },
16091
+ set size(val) {
16092
+ _size = uint32(val);
16093
+ },
16094
+
16095
+ get fileNameLength() {
16096
+ return _fnameLen;
16097
+ },
16098
+ set fileNameLength(val) {
16099
+ _fnameLen = val;
16100
+ },
16101
+
16102
+ get extraLength() {
16103
+ return _extraLen;
16104
+ },
16105
+ set extraLength(val) {
16106
+ _extraLen = val;
16107
+ },
16108
+
16109
+ get extraLocalLength() {
16110
+ return _localHeader.extraLen;
16111
+ },
16112
+ set extraLocalLength(val) {
16113
+ _localHeader.extraLen = val;
16114
+ },
16115
+
16116
+ get commentLength() {
16117
+ return _comLen;
16118
+ },
16119
+ set commentLength(val) {
16120
+ _comLen = val;
16121
+ },
16122
+
16123
+ get diskNumStart() {
16124
+ return _diskStart;
16125
+ },
16126
+ set diskNumStart(val) {
16127
+ _diskStart = uint32(val);
16128
+ },
16129
+
16130
+ get inAttr() {
16131
+ return _inattr;
16132
+ },
16133
+ set inAttr(val) {
16134
+ _inattr = uint32(val);
16135
+ },
16136
+
16137
+ get attr() {
16138
+ return _attr;
16139
+ },
16140
+ set attr(val) {
16141
+ _attr = uint32(val);
16142
+ },
16143
+
16144
+ // get Unix file permissions
16145
+ get fileAttr() {
16146
+ return (_attr || 0) >> 16 & 0xfff;
16147
+ },
16148
+
16149
+ get offset() {
16150
+ return _offset;
16151
+ },
16152
+ set offset(val) {
16153
+ _offset = uint32(val);
16154
+ },
16155
+
16156
+ get encrypted() {
16157
+ return (_flags & Constants.FLG_ENC) === Constants.FLG_ENC;
16158
+ },
16159
+
16160
+ get centralHeaderSize() {
16161
+ return Constants.CENHDR + _fnameLen + _extraLen + _comLen;
16162
+ },
16163
+
16164
+ get realDataOffset() {
16165
+ return _offset + Constants.LOCHDR + _localHeader.fnameLen + _localHeader.extraLen;
16166
+ },
16167
+
16168
+ get localHeader() {
16169
+ return _localHeader;
16170
+ },
16171
+
16172
+ loadLocalHeaderFromBinary: function (/*Buffer*/ input) {
16173
+ var data = input.slice(_offset, _offset + Constants.LOCHDR);
16174
+ // 30 bytes and should start with "PK\003\004"
16175
+ if (data.readUInt32LE(0) !== Constants.LOCSIG) {
16176
+ throw Utils.Errors.INVALID_LOC();
16177
+ }
16178
+
16179
+ // version needed to extract
16180
+ _localHeader.version = data.readUInt16LE(Constants.LOCVER);
16181
+ // general purpose bit flag
16182
+ _localHeader.flags = data.readUInt16LE(Constants.LOCFLG);
16183
+ // compression method
16184
+ _localHeader.method = data.readUInt16LE(Constants.LOCHOW);
16185
+ // modification time (2 bytes time, 2 bytes date)
16186
+ _localHeader.time = data.readUInt32LE(Constants.LOCTIM);
16187
+ // uncompressed file crc-32 valu
16188
+ _localHeader.crc = data.readUInt32LE(Constants.LOCCRC);
16189
+ // compressed size
16190
+ _localHeader.compressedSize = data.readUInt32LE(Constants.LOCSIZ);
16191
+ // uncompressed size
16192
+ _localHeader.size = data.readUInt32LE(Constants.LOCLEN);
16193
+ // filename length
16194
+ _localHeader.fnameLen = data.readUInt16LE(Constants.LOCNAM);
16195
+ // extra field length
16196
+ _localHeader.extraLen = data.readUInt16LE(Constants.LOCEXT);
16197
+
16198
+ // read extra data
16199
+ const extraStart = _offset + Constants.LOCHDR + _localHeader.fnameLen;
16200
+ const extraEnd = extraStart + _localHeader.extraLen;
16201
+ return input.slice(extraStart, extraEnd);
16202
+ },
16203
+
16204
+ loadFromBinary: function (/*Buffer*/ data) {
16205
+ // data should be 46 bytes and start with "PK 01 02"
16206
+ if (data.length !== Constants.CENHDR || data.readUInt32LE(0) !== Constants.CENSIG) {
16207
+ throw Utils.Errors.INVALID_CEN();
16208
+ }
16209
+ // version made by
16210
+ _verMade = data.readUInt16LE(Constants.CENVEM);
16211
+ // version needed to extract
16212
+ _version = data.readUInt16LE(Constants.CENVER);
16213
+ // encrypt, decrypt flags
16214
+ _flags = data.readUInt16LE(Constants.CENFLG);
16215
+ // compression method
16216
+ _method = data.readUInt16LE(Constants.CENHOW);
16217
+ // modification time (2 bytes time, 2 bytes date)
16218
+ _time = data.readUInt32LE(Constants.CENTIM);
16219
+ // uncompressed file crc-32 value
16220
+ _crc = data.readUInt32LE(Constants.CENCRC);
16221
+ // compressed size
16222
+ _compressedSize = data.readUInt32LE(Constants.CENSIZ);
16223
+ // uncompressed size
16224
+ _size = data.readUInt32LE(Constants.CENLEN);
16225
+ // filename length
16226
+ _fnameLen = data.readUInt16LE(Constants.CENNAM);
16227
+ // extra field length
16228
+ _extraLen = data.readUInt16LE(Constants.CENEXT);
16229
+ // file comment length
16230
+ _comLen = data.readUInt16LE(Constants.CENCOM);
16231
+ // volume number start
16232
+ _diskStart = data.readUInt16LE(Constants.CENDSK);
16233
+ // internal file attributes
16234
+ _inattr = data.readUInt16LE(Constants.CENATT);
16235
+ // external file attributes
16236
+ _attr = data.readUInt32LE(Constants.CENATX);
16237
+ // LOC header offset
16238
+ _offset = data.readUInt32LE(Constants.CENOFF);
16239
+ },
16240
+
16241
+ localHeaderToBinary: function () {
16242
+ // LOC header size (30 bytes)
16243
+ var data = Buffer.alloc(Constants.LOCHDR);
16244
+ // "PK\003\004"
16245
+ data.writeUInt32LE(Constants.LOCSIG, 0);
16246
+ // version needed to extract
16247
+ data.writeUInt16LE(_version, Constants.LOCVER);
16248
+ // general purpose bit flag
16249
+ data.writeUInt16LE(_flags, Constants.LOCFLG);
16250
+ // compression method
16251
+ data.writeUInt16LE(_method, Constants.LOCHOW);
16252
+ // modification time (2 bytes time, 2 bytes date)
16253
+ data.writeUInt32LE(_time, Constants.LOCTIM);
16254
+ // uncompressed file crc-32 value
16255
+ data.writeUInt32LE(_crc, Constants.LOCCRC);
16256
+ // compressed size
16257
+ data.writeUInt32LE(_compressedSize, Constants.LOCSIZ);
16258
+ // uncompressed size
16259
+ data.writeUInt32LE(_size, Constants.LOCLEN);
16260
+ // filename length
16261
+ data.writeUInt16LE(_fnameLen, Constants.LOCNAM);
16262
+ // extra field length
16263
+ data.writeUInt16LE(_localHeader.extraLen, Constants.LOCEXT);
16264
+ return data;
16265
+ },
16266
+
16267
+ centralHeaderToBinary: function () {
16268
+ // CEN header size (46 bytes)
16269
+ var data = Buffer.alloc(Constants.CENHDR + _fnameLen + _extraLen + _comLen);
16270
+ // "PK\001\002"
16271
+ data.writeUInt32LE(Constants.CENSIG, 0);
16272
+ // version made by
16273
+ data.writeUInt16LE(_verMade, Constants.CENVEM);
16274
+ // version needed to extract
16275
+ data.writeUInt16LE(_version, Constants.CENVER);
16276
+ // encrypt, decrypt flags
16277
+ data.writeUInt16LE(_flags, Constants.CENFLG);
16278
+ // compression method
16279
+ data.writeUInt16LE(_method, Constants.CENHOW);
16280
+ // modification time (2 bytes time, 2 bytes date)
16281
+ data.writeUInt32LE(_time, Constants.CENTIM);
16282
+ // uncompressed file crc-32 value
16283
+ data.writeUInt32LE(_crc, Constants.CENCRC);
16284
+ // compressed size
16285
+ data.writeUInt32LE(_compressedSize, Constants.CENSIZ);
16286
+ // uncompressed size
16287
+ data.writeUInt32LE(_size, Constants.CENLEN);
16288
+ // filename length
16289
+ data.writeUInt16LE(_fnameLen, Constants.CENNAM);
16290
+ // extra field length
16291
+ data.writeUInt16LE(_extraLen, Constants.CENEXT);
16292
+ // file comment length
16293
+ data.writeUInt16LE(_comLen, Constants.CENCOM);
16294
+ // volume number start
16295
+ data.writeUInt16LE(_diskStart, Constants.CENDSK);
16296
+ // internal file attributes
16297
+ data.writeUInt16LE(_inattr, Constants.CENATT);
16298
+ // external file attributes
16299
+ data.writeUInt32LE(_attr, Constants.CENATX);
16300
+ // LOC header offset
16301
+ data.writeUInt32LE(_offset, Constants.CENOFF);
16302
+ return data;
16303
+ },
16304
+
16305
+ toJSON: function () {
16306
+ const bytes = function (nr) {
16307
+ return nr + " bytes";
16308
+ };
16309
+
16310
+ return {
16311
+ made: _verMade,
16312
+ version: _version,
16313
+ flags: _flags,
16314
+ method: Utils.methodToString(_method),
16315
+ time: this.time,
16316
+ crc: "0x" + _crc.toString(16).toUpperCase(),
16317
+ compressedSize: bytes(_compressedSize),
16318
+ size: bytes(_size),
16319
+ fileNameLength: bytes(_fnameLen),
16320
+ extraLength: bytes(_extraLen),
16321
+ commentLength: bytes(_comLen),
16322
+ diskNumStart: _diskStart,
16323
+ inAttr: _inattr,
16324
+ attr: _attr,
16325
+ offset: _offset,
16326
+ centralHeaderSize: bytes(Constants.CENHDR + _fnameLen + _extraLen + _comLen)
16327
+ };
16328
+ },
16329
+
16330
+ toString: function () {
16331
+ return JSON.stringify(this.toJSON(), null, "\t");
16332
+ }
16333
+ };
16334
+ };
16335
+
16336
+
16337
+ /***/ }),
16338
+
16339
+ /***/ 5118:
16340
+ /***/ ((module, __unused_webpack_exports, __webpack_require__) => {
16341
+
16342
+ "use strict";
16343
+ // Copyright Joyent, Inc. and other Node contributors.
16344
+ //
16345
+ // Permission is hereby granted, free of charge, to any person obtaining a
16346
+ // copy of this software and associated documentation files (the
16347
+ // "Software"), to deal in the Software without restriction, including
16348
+ // without limitation the rights to use, copy, modify, merge, publish,
16349
+ // distribute, sublicense, and/or sell copies of the Software, and to permit
16350
+ // persons to whom the Software is furnished to do so, subject to the
16351
+ // following conditions:
16352
+ //
16353
+ // The above copyright notice and this permission notice shall be included
16354
+ // in all copies or substantial portions of the Software.
16355
+ //
16356
+ // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
16357
+ // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16358
+ // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
16359
+ // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
16360
+ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
16361
+ // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
16362
+ // USE OR OTHER DEALINGS IN THE SOFTWARE.
16363
+
16364
+ // a transform stream is a readable/writable stream where you do
16365
+ // something with the data. Sometimes it's called a "filter",
16366
+ // but that's not a great name for it, since that implies a thing where
16367
+ // some bits pass through, and others are simply ignored. (That would
16368
+ // be a valid example of a transform, of course.)
16369
+ //
16370
+ // While the output is causally related to the input, it's not a
16371
+ // necessarily symmetric or synchronous transformation. For example,
16372
+ // a zlib stream might take multiple plain-text writes(), and then
16373
+ // emit a single compressed chunk some time in the future.
16374
+ //
16375
+ // Here's how this works:
16376
+ //
14907
16377
  // The Transform stream has all the aspects of the readable and writable
14908
16378
  // stream classes. When you write(chunk), that calls _write(chunk,cb)
14909
16379
  // internally, and returns false if there's a lot of pending writes
@@ -15351,6 +16821,18 @@ function SyncAsyncFileSystemDecorator(fs) {
15351
16821
  module.exports = SyncAsyncFileSystemDecorator;
15352
16822
 
15353
16823
 
16824
+ /***/ }),
16825
+
16826
+ /***/ 5178:
16827
+ /***/ ((module, __unused_webpack_exports, __webpack_require__) => {
16828
+
16829
+ module.exports = __webpack_require__(4152);
16830
+ module.exports.Constants = __webpack_require__(1307);
16831
+ module.exports.Errors = __webpack_require__(7583);
16832
+ module.exports.FileAttr = __webpack_require__(8591);
16833
+ module.exports.decoder = __webpack_require__(9886);
16834
+
16835
+
15354
16836
  /***/ }),
15355
16837
 
15356
16838
  /***/ 5303:
@@ -17511,6 +18993,152 @@ module.exports = class AliasFieldPlugin {
17511
18993
  };
17512
18994
 
17513
18995
 
18996
+ /***/ }),
18997
+
18998
+ /***/ 6205:
18999
+ /***/ ((__unused_webpack_module, exports) => {
19000
+
19001
+ "use strict";
19002
+
19003
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
19004
+ exports.SQUID_BUILT_IN_INTEGRATION_IDS = exports.BUILT_IN_STORAGE_INTEGRATION_ID = exports.BUILT_IN_QUEUE_INTEGRATION_ID = exports.BUILT_IN_DB_INTEGRATION_ID = exports.INTEGRATION_SCHEMA_TYPES = exports.HTTP_INTEGRATION_TYPES = exports.GRAPHQL_INTEGRATION_TYPES = exports.AUTH_INTEGRATION_TYPES = exports.DATA_INTEGRATION_TYPES = exports.INTEGRATION_TYPES = exports.AI_AGENTS_INTEGRATION_ID = void 0;
19005
+ exports.isBuiltInIntegrationId = isBuiltInIntegrationId;
19006
+ /** @internal */
19007
+ exports.AI_AGENTS_INTEGRATION_ID = 'ai_agents';
19008
+ /** List of all integration types supported by Squid. */
19009
+ exports.INTEGRATION_TYPES = [
19010
+ 'active_directory',
19011
+ 'ai_agents',
19012
+ 'ai_chatbot',
19013
+ 'algolia',
19014
+ 'alloydb',
19015
+ 'api',
19016
+ 'auth0',
19017
+ 'azure_cosmosdb',
19018
+ 'azure_postgresql',
19019
+ 'azure_sql',
19020
+ 'bigquery',
19021
+ 'built_in_db',
19022
+ 'built_in_gcs',
19023
+ 'built_in_queue',
19024
+ 'built_in_s3',
19025
+ 'cassandra',
19026
+ 'clickhouse',
19027
+ 'cloudsql',
19028
+ 'cockroach',
19029
+ 'cognito',
19030
+ 'connected_knowledgebases',
19031
+ 'confluence',
19032
+ 'confluent',
19033
+ 'datadog',
19034
+ 'db2',
19035
+ 'descope',
19036
+ 'documentdb',
19037
+ 'dynamodb',
19038
+ 'elasticsearch',
19039
+ 'firebase_auth',
19040
+ 'firestore',
19041
+ 'gcs',
19042
+ 'google_calendar',
19043
+ 'google_docs',
19044
+ 'google_drive',
19045
+ 'graphql',
19046
+ 'hubspot',
19047
+ 'jira',
19048
+ 'jwt_hmac',
19049
+ 'jwt_rsa',
19050
+ 'kafka',
19051
+ 'linear', // Link to docs.
19052
+ 'mariadb',
19053
+ 'monday',
19054
+ 'mongo',
19055
+ 'mssql',
19056
+ 'databricks',
19057
+ 'mysql',
19058
+ 'newrelic',
19059
+ 'okta',
19060
+ 'onedrive',
19061
+ 'oracledb',
19062
+ 'pinecone',
19063
+ 'postgres',
19064
+ 'redis',
19065
+ 's3',
19066
+ 'salesforce_crm',
19067
+ 'sap_hana',
19068
+ 'sentry',
19069
+ 'servicenow',
19070
+ 'snowflake',
19071
+ 'spanner',
19072
+ 'xata',
19073
+ 'zendesk',
19074
+ 'mail',
19075
+ 'slack',
19076
+ 'mcp',
19077
+ 'a2a',
19078
+ 'legend',
19079
+ ];
19080
+ /**
19081
+ * @category Database
19082
+ */
19083
+ exports.DATA_INTEGRATION_TYPES = [
19084
+ 'bigquery',
19085
+ 'built_in_db',
19086
+ 'clickhouse',
19087
+ 'cockroach',
19088
+ 'mongo',
19089
+ 'mssql',
19090
+ 'databricks',
19091
+ 'mysql',
19092
+ 'oracledb',
19093
+ 'postgres',
19094
+ 'sap_hana',
19095
+ 'snowflake',
19096
+ 'elasticsearch',
19097
+ 'legend',
19098
+ ];
19099
+ /**
19100
+ * @category Auth
19101
+ */
19102
+ exports.AUTH_INTEGRATION_TYPES = [
19103
+ 'auth0',
19104
+ 'jwt_rsa',
19105
+ 'jwt_hmac',
19106
+ 'cognito',
19107
+ 'okta',
19108
+ 'descope',
19109
+ 'firebase_auth',
19110
+ ];
19111
+ /** Supported integration types for GraphQL-based services. */
19112
+ exports.GRAPHQL_INTEGRATION_TYPES = ['graphql', 'linear'];
19113
+ /** Supported integration types for HTTP-based services. */
19114
+ exports.HTTP_INTEGRATION_TYPES = ['api'];
19115
+ /** Supported schema types for integrations */
19116
+ exports.INTEGRATION_SCHEMA_TYPES = ['data', 'api', 'graphql'];
19117
+ /**
19118
+ * @category Database
19119
+ */
19120
+ exports.BUILT_IN_DB_INTEGRATION_ID = 'built_in_db';
19121
+ /**
19122
+ * @category Queue
19123
+ */
19124
+ exports.BUILT_IN_QUEUE_INTEGRATION_ID = 'built_in_queue';
19125
+ /**
19126
+ * ID for the cloud specific storage integration: s3 (built_in_s3) or gcs (built_in_gcs).
19127
+ * @category
19128
+ */
19129
+ exports.BUILT_IN_STORAGE_INTEGRATION_ID = 'built_in_storage';
19130
+ /** Integration IDs used for built-in integrations by Squid. */
19131
+ exports.SQUID_BUILT_IN_INTEGRATION_IDS = [
19132
+ exports.BUILT_IN_DB_INTEGRATION_ID,
19133
+ exports.BUILT_IN_QUEUE_INTEGRATION_ID,
19134
+ exports.BUILT_IN_STORAGE_INTEGRATION_ID,
19135
+ ];
19136
+ /** Returns true if ID is a built-in integration ID in Squid. */
19137
+ function isBuiltInIntegrationId(id) {
19138
+ return exports.SQUID_BUILT_IN_INTEGRATION_IDS.includes(id);
19139
+ }
19140
+
19141
+
17514
19142
  /***/ }),
17515
19143
 
17516
19144
  /***/ 6278:
@@ -17824,6 +19452,177 @@ __webpack_require__(4291);
17824
19452
  module.exports = __webpack_require__(2203);
17825
19453
 
17826
19454
 
19455
+ /***/ }),
19456
+
19457
+ /***/ 6587:
19458
+ /***/ ((__unused_webpack_module, exports) => {
19459
+
19460
+ "use strict";
19461
+
19462
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
19463
+ exports.OPEN_AI_CREATE_SPEECH_FORMATS = exports.AI_AUDIO_CREATE_SPEECH_MODEL_NAMES = exports.AI_AUDIO_TRANSCRIPTION_MODEL_NAMES = exports.AI_IMAGE_MODEL_NAMES = exports.FLUX_MODEL_NAMES = exports.STABLE_DIFFUSION_MODEL_NAMES = exports.OPENAI_AUDIO_MODEL_NAMES = exports.OPENAI_AUDIO_CREATE_SPEECH_MODEL_NAMES = exports.OPENAI_AUDIO_TRANSCRIPTION_MODEL_NAMES = exports.OPENAI_IMAGE_MODEL_NAMES = exports.AI_EMBEDDINGS_MODEL_NAMES = exports.VOYAGE_EMBEDDING_MODEL_NAMES = exports.OPENAI_EMBEDDINGS_MODEL_NAMES = exports.VENDOR_AI_CHAT_MODEL_NAMES = exports.ANTHROPIC_CHAT_MODEL_NAMES = exports.GROK_CHAT_MODEL_NAMES = exports.GEMINI_CHAT_MODEL_NAMES = exports.OPENAI_CHAT_MODEL_NAMES = exports.AI_PROVIDER_TYPES = exports.RERANK_PROVIDERS = void 0;
19464
+ exports.isVendorAiChatModelName = isVendorAiChatModelName;
19465
+ /**
19466
+ * @category AI
19467
+ */
19468
+ exports.RERANK_PROVIDERS = ['cohere', 'none'];
19469
+ /** List of available AI provider types. See AiProviderType. */
19470
+ exports.AI_PROVIDER_TYPES = [
19471
+ 'anthropic',
19472
+ 'flux',
19473
+ 'gemini',
19474
+ 'openai',
19475
+ 'grok',
19476
+ 'stability',
19477
+ 'voyage',
19478
+ 'external',
19479
+ ];
19480
+ /**
19481
+ * @category AI
19482
+ */
19483
+ exports.OPENAI_CHAT_MODEL_NAMES = [
19484
+ 'o1',
19485
+ 'o3',
19486
+ 'o3-mini',
19487
+ 'o4-mini',
19488
+ 'gpt-5',
19489
+ 'gpt-5-mini',
19490
+ 'gpt-5-nano',
19491
+ 'gpt-4.1',
19492
+ 'gpt-4.1-mini',
19493
+ 'gpt-4.1-nano',
19494
+ 'gpt-4o',
19495
+ 'gpt-4o-mini',
19496
+ ];
19497
+ /**
19498
+ * @category AI
19499
+ */
19500
+ exports.GEMINI_CHAT_MODEL_NAMES = ['gemini-2.5-pro', 'gemini-2.5-flash', 'gemini-2.5-flash-lite'];
19501
+ /**
19502
+ * Notes:
19503
+ * - 'grok-3-mini' model os ~10x less expensive than 'grok-3'.
19504
+ * - '*-fast' models are ~2x more expensive than non-fast variants and only marginally faster.
19505
+ * - 'grok-4' cost is comparable to 'grok-3-fast'.
19506
+ *
19507
+ * @category AI
19508
+ */
19509
+ exports.GROK_CHAT_MODEL_NAMES = [
19510
+ 'grok-3',
19511
+ 'grok-3-fast',
19512
+ 'grok-3-mini',
19513
+ 'grok-3-mini-fast',
19514
+ 'grok-4',
19515
+ 'grok-4-fast-reasoning',
19516
+ 'grok-4-fast-non-reasoning',
19517
+ ];
19518
+ /**
19519
+ * @category AI
19520
+ */
19521
+ exports.ANTHROPIC_CHAT_MODEL_NAMES = [
19522
+ 'claude-3-7-sonnet-latest',
19523
+ 'claude-haiku-4-5-20251001',
19524
+ 'claude-opus-4-20250514',
19525
+ 'claude-opus-4-1-20250805',
19526
+ 'claude-sonnet-4-20250514',
19527
+ 'claude-sonnet-4-5-20250929',
19528
+ ];
19529
+ /**
19530
+ * The supported AI model names.
19531
+ * @category AI
19532
+ */
19533
+ exports.VENDOR_AI_CHAT_MODEL_NAMES = [
19534
+ ...exports.OPENAI_CHAT_MODEL_NAMES,
19535
+ ...exports.ANTHROPIC_CHAT_MODEL_NAMES,
19536
+ ...exports.GEMINI_CHAT_MODEL_NAMES,
19537
+ ...exports.GROK_CHAT_MODEL_NAMES,
19538
+ ];
19539
+ /**
19540
+ * Check if the given model name is a global AI chat model name.
19541
+ */
19542
+ function isVendorAiChatModelName(modelName) {
19543
+ return exports.VENDOR_AI_CHAT_MODEL_NAMES.includes(modelName);
19544
+ }
19545
+ /**
19546
+ * @category AI
19547
+ */
19548
+ exports.OPENAI_EMBEDDINGS_MODEL_NAMES = ['text-embedding-3-small'];
19549
+ /**
19550
+ * @category AI
19551
+ */
19552
+ exports.VOYAGE_EMBEDDING_MODEL_NAMES = ['voyage-3-large'];
19553
+ /**
19554
+ * @category AI
19555
+ */
19556
+ exports.AI_EMBEDDINGS_MODEL_NAMES = [...exports.OPENAI_EMBEDDINGS_MODEL_NAMES, ...exports.VOYAGE_EMBEDDING_MODEL_NAMES];
19557
+ /**
19558
+ * The supported AI image generation model names.
19559
+ * @category AI
19560
+ */
19561
+ exports.OPENAI_IMAGE_MODEL_NAMES = ['dall-e-3'];
19562
+ /**
19563
+ * @category AI
19564
+ */
19565
+ exports.OPENAI_AUDIO_TRANSCRIPTION_MODEL_NAMES = [
19566
+ 'whisper-1',
19567
+ 'gpt-4o-transcribe',
19568
+ 'gpt-4o-mini-transcribe',
19569
+ ];
19570
+ /**
19571
+ * @category AI
19572
+ */
19573
+ exports.OPENAI_AUDIO_CREATE_SPEECH_MODEL_NAMES = ['tts-1', 'tts-1-hd', 'gpt-4o-mini-tts'];
19574
+ /**
19575
+ * @category AI
19576
+ */
19577
+ exports.OPENAI_AUDIO_MODEL_NAMES = [
19578
+ ...exports.OPENAI_AUDIO_TRANSCRIPTION_MODEL_NAMES,
19579
+ ...exports.OPENAI_AUDIO_CREATE_SPEECH_MODEL_NAMES,
19580
+ ];
19581
+ /**
19582
+ * @category AI
19583
+ */
19584
+ exports.STABLE_DIFFUSION_MODEL_NAMES = ['stable-diffusion-core'];
19585
+ /**
19586
+ * @category AI
19587
+ */
19588
+ exports.FLUX_MODEL_NAMES = ['flux-pro-1.1', 'flux-kontext-pro'];
19589
+ /**
19590
+ * @category AI
19591
+ */
19592
+ exports.AI_IMAGE_MODEL_NAMES = [
19593
+ ...exports.OPENAI_IMAGE_MODEL_NAMES,
19594
+ ...exports.STABLE_DIFFUSION_MODEL_NAMES,
19595
+ ...exports.FLUX_MODEL_NAMES,
19596
+ ];
19597
+ /**
19598
+ * @category AI
19599
+ */
19600
+ exports.AI_AUDIO_TRANSCRIPTION_MODEL_NAMES = [...exports.OPENAI_AUDIO_TRANSCRIPTION_MODEL_NAMES];
19601
+ /**
19602
+ * @category AI
19603
+ */
19604
+ exports.AI_AUDIO_CREATE_SPEECH_MODEL_NAMES = [...exports.OPENAI_AUDIO_CREATE_SPEECH_MODEL_NAMES];
19605
+ /**
19606
+ * @category AI
19607
+ */
19608
+ exports.OPEN_AI_CREATE_SPEECH_FORMATS = ['mp3', 'opus', 'aac', 'flac', 'wav', 'pcm'];
19609
+
19610
+
19611
+ /***/ }),
19612
+
19613
+ /***/ 6589:
19614
+ /***/ ((module) => {
19615
+
19616
+ function webpackEmptyContext(req) {
19617
+ var e = new Error("Cannot find module '" + req + "'");
19618
+ e.code = 'MODULE_NOT_FOUND';
19619
+ throw e;
19620
+ }
19621
+ webpackEmptyContext.keys = () => ([]);
19622
+ webpackEmptyContext.resolve = webpackEmptyContext;
19623
+ webpackEmptyContext.id = 6589;
19624
+ module.exports = webpackEmptyContext;
19625
+
17827
19626
  /***/ }),
17828
19627
 
17829
19628
  /***/ 6807:
@@ -17868,20 +19667,60 @@ module.exports = () => input => {
17868
19667
 
17869
19668
  /***/ }),
17870
19669
 
17871
- /***/ 6928:
17872
- /***/ ((module) => {
19670
+ /***/ 6883:
19671
+ /***/ ((__unused_webpack_module, exports, __webpack_require__) => {
17873
19672
 
17874
19673
  "use strict";
17875
- module.exports = require("path");
17876
19674
 
17877
- /***/ }),
17878
-
17879
- /***/ 6932:
17880
- /***/ ((module, __unused_webpack_exports, __webpack_require__) => {
17881
-
17882
- "use strict";
17883
- /*
17884
- MIT License http://www.opensource.org/licenses/mit-license.php
19675
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
19676
+ exports.applicationAppConnectorsAssertion = exports.applicationAiSettingsAssertion = exports.assertAiProviderType = exports.assertNotBuiltInIntegrationType = exports.assertConnectorId = exports.assertIntegrationType = void 0;
19677
+ const assertic_1 = __webpack_require__(3205);
19678
+ const ai_common_public_types_1 = __webpack_require__(6587);
19679
+ const integration_public_types_1 = __webpack_require__(6205);
19680
+ const connector_types_1 = __webpack_require__(3420);
19681
+ /*** Asserts that `value` is a valid integration type and is not a built-in integration type. */
19682
+ const assertIntegrationType = (value, context = undefined) => {
19683
+ (0, assertic_1.assertTruthy)(integration_public_types_1.INTEGRATION_TYPES.includes(value), () => (0, assertic_1.formatError)(context, `Not a valid integration type`, value));
19684
+ };
19685
+ exports.assertIntegrationType = assertIntegrationType;
19686
+ const assertConnectorId = (value, context = undefined) => {
19687
+ (0, assertic_1.assertTruthy)(connector_types_1.CONNECTOR_IDS.includes(value), () => (0, assertic_1.formatError)(context, `Not a valid connector id`, value));
19688
+ };
19689
+ exports.assertConnectorId = assertConnectorId;
19690
+ /*** Asserts that `value` is a valid integration type and is not a built-in integration type. */
19691
+ const assertNotBuiltInIntegrationType = (value, context = undefined) => {
19692
+ (0, assertic_1.assertString)(value, context);
19693
+ (0, assertic_1.assertTruthy)(!['built_in_db', 'built_in_queue', 'built_in_s3'].includes(value), () => (0, assertic_1.formatError)(context, `The value can't be a built-in integration type`, value));
19694
+ };
19695
+ exports.assertNotBuiltInIntegrationType = assertNotBuiltInIntegrationType;
19696
+ const assertAiProviderType = (value, context = undefined) => {
19697
+ (0, assertic_1.assertTruthy)(ai_common_public_types_1.AI_PROVIDER_TYPES.includes(value), () => (0, assertic_1.formatError)(context, 'Invalid AI provider type', value));
19698
+ };
19699
+ exports.assertAiProviderType = assertAiProviderType;
19700
+ exports.applicationAiSettingsAssertion = {
19701
+ apiKeys: (0, assertic_1.recordAssertion)(assertic_1.assertString, { keyAssertion: exports.assertAiProviderType }),
19702
+ };
19703
+ exports.applicationAppConnectorsAssertion = {
19704
+ appConnectors: (0, assertic_1.arrayAssertion)(exports.assertConnectorId, { uniqueByIdentity: (v) => v }),
19705
+ };
19706
+
19707
+
19708
+ /***/ }),
19709
+
19710
+ /***/ 6928:
19711
+ /***/ ((module) => {
19712
+
19713
+ "use strict";
19714
+ module.exports = require("path");
19715
+
19716
+ /***/ }),
19717
+
19718
+ /***/ 6932:
19719
+ /***/ ((module, __unused_webpack_exports, __webpack_require__) => {
19720
+
19721
+ "use strict";
19722
+ /*
19723
+ MIT License http://www.opensource.org/licenses/mit-license.php
17885
19724
  Author Tobias Koppers @sokra
17886
19725
  */
17887
19726
 
@@ -19594,6 +21433,189 @@ module.exports = class JoinRequestPlugin {
19594
21433
  };
19595
21434
 
19596
21435
 
21436
+ /***/ }),
21437
+
21438
+ /***/ 7186:
21439
+ /***/ ((module, __unused_webpack_exports, __webpack_require__) => {
21440
+
21441
+ "use strict";
21442
+
21443
+
21444
+ // node crypt, we use it for generate salt
21445
+ // eslint-disable-next-line node/no-unsupported-features/node-builtins
21446
+ const { randomFillSync } = __webpack_require__(6982);
21447
+ const Errors = __webpack_require__(7583);
21448
+
21449
+ // generate CRC32 lookup table
21450
+ const crctable = new Uint32Array(256).map((t, crc) => {
21451
+ for (let j = 0; j < 8; j++) {
21452
+ if (0 !== (crc & 1)) {
21453
+ crc = (crc >>> 1) ^ 0xedb88320;
21454
+ } else {
21455
+ crc >>>= 1;
21456
+ }
21457
+ }
21458
+ return crc >>> 0;
21459
+ });
21460
+
21461
+ // C-style uInt32 Multiply (discards higher bits, when JS multiply discards lower bits)
21462
+ const uMul = (a, b) => Math.imul(a, b) >>> 0;
21463
+
21464
+ // crc32 byte single update (actually same function is part of utils.crc32 function :) )
21465
+ const crc32update = (pCrc32, bval) => {
21466
+ return crctable[(pCrc32 ^ bval) & 0xff] ^ (pCrc32 >>> 8);
21467
+ };
21468
+
21469
+ // function for generating salt for encrytion header
21470
+ const genSalt = () => {
21471
+ if ("function" === typeof randomFillSync) {
21472
+ return randomFillSync(Buffer.alloc(12));
21473
+ } else {
21474
+ // fallback if function is not defined
21475
+ return genSalt.node();
21476
+ }
21477
+ };
21478
+
21479
+ // salt generation with node random function (mainly as fallback)
21480
+ genSalt.node = () => {
21481
+ const salt = Buffer.alloc(12);
21482
+ const len = salt.length;
21483
+ for (let i = 0; i < len; i++) salt[i] = (Math.random() * 256) & 0xff;
21484
+ return salt;
21485
+ };
21486
+
21487
+ // general config
21488
+ const config = {
21489
+ genSalt
21490
+ };
21491
+
21492
+ // Class Initkeys handles same basic ops with keys
21493
+ function Initkeys(pw) {
21494
+ const pass = Buffer.isBuffer(pw) ? pw : Buffer.from(pw);
21495
+ this.keys = new Uint32Array([0x12345678, 0x23456789, 0x34567890]);
21496
+ for (let i = 0; i < pass.length; i++) {
21497
+ this.updateKeys(pass[i]);
21498
+ }
21499
+ }
21500
+
21501
+ Initkeys.prototype.updateKeys = function (byteValue) {
21502
+ const keys = this.keys;
21503
+ keys[0] = crc32update(keys[0], byteValue);
21504
+ keys[1] += keys[0] & 0xff;
21505
+ keys[1] = uMul(keys[1], 134775813) + 1;
21506
+ keys[2] = crc32update(keys[2], keys[1] >>> 24);
21507
+ return byteValue;
21508
+ };
21509
+
21510
+ Initkeys.prototype.next = function () {
21511
+ const k = (this.keys[2] | 2) >>> 0; // key
21512
+ return (uMul(k, k ^ 1) >> 8) & 0xff; // decode
21513
+ };
21514
+
21515
+ function make_decrypter(/*Buffer*/ pwd) {
21516
+ // 1. Stage initialize key
21517
+ const keys = new Initkeys(pwd);
21518
+
21519
+ // return decrypter function
21520
+ return function (/*Buffer*/ data) {
21521
+ // result - we create new Buffer for results
21522
+ const result = Buffer.alloc(data.length);
21523
+ let pos = 0;
21524
+ // process input data
21525
+ for (let c of data) {
21526
+ //c ^= keys.next();
21527
+ //result[pos++] = c; // decode & Save Value
21528
+ result[pos++] = keys.updateKeys(c ^ keys.next()); // update keys with decoded byte
21529
+ }
21530
+ return result;
21531
+ };
21532
+ }
21533
+
21534
+ function make_encrypter(/*Buffer*/ pwd) {
21535
+ // 1. Stage initialize key
21536
+ const keys = new Initkeys(pwd);
21537
+
21538
+ // return encrypting function, result and pos is here so we dont have to merge buffers later
21539
+ return function (/*Buffer*/ data, /*Buffer*/ result, /* Number */ pos = 0) {
21540
+ // result - we create new Buffer for results
21541
+ if (!result) result = Buffer.alloc(data.length);
21542
+ // process input data
21543
+ for (let c of data) {
21544
+ const k = keys.next(); // save key byte
21545
+ result[pos++] = c ^ k; // save val
21546
+ keys.updateKeys(c); // update keys with decoded byte
21547
+ }
21548
+ return result;
21549
+ };
21550
+ }
21551
+
21552
+ function decrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd) {
21553
+ if (!data || !Buffer.isBuffer(data) || data.length < 12) {
21554
+ return Buffer.alloc(0);
21555
+ }
21556
+
21557
+ // 1. We Initialize and generate decrypting function
21558
+ const decrypter = make_decrypter(pwd);
21559
+
21560
+ // 2. decrypt salt what is always 12 bytes and is a part of file content
21561
+ const salt = decrypter(data.slice(0, 12));
21562
+
21563
+ // if bit 3 (0x08) of the general-purpose flags field is set, check salt[11] with the high byte of the header time
21564
+ // 2 byte data block (as per Info-Zip spec), otherwise check with the high byte of the header entry
21565
+ const verifyByte = (header.flags & 0x8) === 0x8 ? header.timeHighByte : header.crc >>> 24;
21566
+
21567
+ //3. does password meet expectations
21568
+ if (salt[11] !== verifyByte) {
21569
+ throw Errors.WRONG_PASSWORD();
21570
+ }
21571
+
21572
+ // 4. decode content
21573
+ return decrypter(data.slice(12));
21574
+ }
21575
+
21576
+ // lets add way to populate salt, NOT RECOMMENDED for production but maybe useful for testing general functionality
21577
+ function _salter(data) {
21578
+ if (Buffer.isBuffer(data) && data.length >= 12) {
21579
+ // be aware - currently salting buffer data is modified
21580
+ config.genSalt = function () {
21581
+ return data.slice(0, 12);
21582
+ };
21583
+ } else if (data === "node") {
21584
+ // test salt generation with node random function
21585
+ config.genSalt = genSalt.node;
21586
+ } else {
21587
+ // if value is not acceptable config gets reset.
21588
+ config.genSalt = genSalt;
21589
+ }
21590
+ }
21591
+
21592
+ function encrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd, /*Boolean*/ oldlike = false) {
21593
+ // 1. test data if data is not Buffer we make buffer from it
21594
+ if (data == null) data = Buffer.alloc(0);
21595
+ // if data is not buffer be make buffer from it
21596
+ if (!Buffer.isBuffer(data)) data = Buffer.from(data.toString());
21597
+
21598
+ // 2. We Initialize and generate encrypting function
21599
+ const encrypter = make_encrypter(pwd);
21600
+
21601
+ // 3. generate salt (12-bytes of random data)
21602
+ const salt = config.genSalt();
21603
+ salt[11] = (header.crc >>> 24) & 0xff;
21604
+
21605
+ // old implementations (before PKZip 2.04g) used two byte check
21606
+ if (oldlike) salt[10] = (header.crc >>> 16) & 0xff;
21607
+
21608
+ // 4. create output
21609
+ const result = Buffer.alloc(data.length + 12);
21610
+ encrypter(salt, result);
21611
+
21612
+ // finally encode content
21613
+ return encrypter(data, result, 12);
21614
+ }
21615
+
21616
+ module.exports = { decrypt, encrypt, _salter };
21617
+
21618
+
19597
21619
  /***/ }),
19598
21620
 
19599
21621
  /***/ 7278:
@@ -20549,6 +22571,76 @@ function getConsoleAppRegionByStage(stage) {
20549
22571
  }
20550
22572
 
20551
22573
 
22574
+ /***/ }),
22575
+
22576
+ /***/ 7583:
22577
+ /***/ ((__unused_webpack_module, exports) => {
22578
+
22579
+ const errors = {
22580
+ /* Header error messages */
22581
+ INVALID_LOC: "Invalid LOC header (bad signature)",
22582
+ INVALID_CEN: "Invalid CEN header (bad signature)",
22583
+ INVALID_END: "Invalid END header (bad signature)",
22584
+
22585
+ /* Descriptor */
22586
+ DESCRIPTOR_NOT_EXIST: "No descriptor present",
22587
+ DESCRIPTOR_UNKNOWN: "Unknown descriptor format",
22588
+ DESCRIPTOR_FAULTY: "Descriptor data is malformed",
22589
+
22590
+ /* ZipEntry error messages*/
22591
+ NO_DATA: "Nothing to decompress",
22592
+ BAD_CRC: "CRC32 checksum failed {0}",
22593
+ FILE_IN_THE_WAY: "There is a file in the way: {0}",
22594
+ UNKNOWN_METHOD: "Invalid/unsupported compression method",
22595
+
22596
+ /* Inflater error messages */
22597
+ AVAIL_DATA: "inflate::Available inflate data did not terminate",
22598
+ INVALID_DISTANCE: "inflate::Invalid literal/length or distance code in fixed or dynamic block",
22599
+ TO_MANY_CODES: "inflate::Dynamic block code description: too many length or distance codes",
22600
+ INVALID_REPEAT_LEN: "inflate::Dynamic block code description: repeat more than specified lengths",
22601
+ INVALID_REPEAT_FIRST: "inflate::Dynamic block code description: repeat lengths with no first length",
22602
+ INCOMPLETE_CODES: "inflate::Dynamic block code description: code lengths codes incomplete",
22603
+ INVALID_DYN_DISTANCE: "inflate::Dynamic block code description: invalid distance code lengths",
22604
+ INVALID_CODES_LEN: "inflate::Dynamic block code description: invalid literal/length code lengths",
22605
+ INVALID_STORE_BLOCK: "inflate::Stored block length did not match one's complement",
22606
+ INVALID_BLOCK_TYPE: "inflate::Invalid block type (type == 3)",
22607
+
22608
+ /* ADM-ZIP error messages */
22609
+ CANT_EXTRACT_FILE: "Could not extract the file",
22610
+ CANT_OVERRIDE: "Target file already exists",
22611
+ DISK_ENTRY_TOO_LARGE: "Number of disk entries is too large",
22612
+ NO_ZIP: "No zip file was loaded",
22613
+ NO_ENTRY: "Entry doesn't exist",
22614
+ DIRECTORY_CONTENT_ERROR: "A directory cannot have content",
22615
+ FILE_NOT_FOUND: 'File not found: "{0}"',
22616
+ NOT_IMPLEMENTED: "Not implemented",
22617
+ INVALID_FILENAME: "Invalid filename",
22618
+ INVALID_FORMAT: "Invalid or unsupported zip format. No END header found",
22619
+ INVALID_PASS_PARAM: "Incompatible password parameter",
22620
+ WRONG_PASSWORD: "Wrong Password",
22621
+
22622
+ /* ADM-ZIP */
22623
+ COMMENT_TOO_LONG: "Comment is too long", // Comment can be max 65535 bytes long (NOTE: some non-US characters may take more space)
22624
+ EXTRA_FIELD_PARSE_ERROR: "Extra field parsing error"
22625
+ };
22626
+
22627
+ // template
22628
+ function E(message) {
22629
+ return function (...args) {
22630
+ if (args.length) { // Allow {0} .. {9} arguments in error message, based on argument number
22631
+ message = message.replace(/\{(\d)\}/g, (_, n) => args[n] || '');
22632
+ }
22633
+
22634
+ return new Error('ADM-ZIP: ' + message);
22635
+ };
22636
+ }
22637
+
22638
+ // Init errors with template
22639
+ for (const msg of Object.keys(errors)) {
22640
+ exports[msg] = E(errors[msg]);
22641
+ }
22642
+
22643
+
20552
22644
  /***/ }),
20553
22645
 
20554
22646
  /***/ 7804:
@@ -22186,6 +24278,16 @@ convert.rgb.gray = function (rgb) {
22186
24278
  };
22187
24279
 
22188
24280
 
24281
+ /***/ }),
24282
+
24283
+ /***/ 7964:
24284
+ /***/ ((__unused_webpack_module, exports, __webpack_require__) => {
24285
+
24286
+ exports.Deflater = __webpack_require__(4877);
24287
+ exports.Inflater = __webpack_require__(1297);
24288
+ exports.ZipCrypto = __webpack_require__(7186);
24289
+
24290
+
22189
24291
  /***/ }),
22190
24292
 
22191
24293
  /***/ 7982:
@@ -22378,32 +24480,988 @@ module.exports = class SelfReferencePlugin {
22378
24480
  );
22379
24481
  if (typeof name !== "string") return callback();
22380
24482
 
22381
- if (
22382
- req.startsWith(name) &&
22383
- (req.length === name.length ||
22384
- req.charCodeAt(name.length) === slashCode)
22385
- ) {
22386
- const remainingRequest = `.${req.slice(name.length)}`;
22387
- /** @type {ResolveRequest} */
22388
- const obj = {
22389
- ...request,
22390
- request: remainingRequest,
22391
- path: /** @type {string} */ (request.descriptionFileRoot),
22392
- relativePath: ".",
22393
- };
24483
+ if (
24484
+ req.startsWith(name) &&
24485
+ (req.length === name.length ||
24486
+ req.charCodeAt(name.length) === slashCode)
24487
+ ) {
24488
+ const remainingRequest = `.${req.slice(name.length)}`;
24489
+ /** @type {ResolveRequest} */
24490
+ const obj = {
24491
+ ...request,
24492
+ request: remainingRequest,
24493
+ path: /** @type {string} */ (request.descriptionFileRoot),
24494
+ relativePath: ".",
24495
+ };
24496
+
24497
+ resolver.doResolve(
24498
+ target,
24499
+ obj,
24500
+ "self reference",
24501
+ resolveContext,
24502
+ callback,
24503
+ );
24504
+ } else {
24505
+ return callback();
24506
+ }
24507
+ });
24508
+ }
24509
+ };
24510
+
24511
+
24512
+ /***/ }),
24513
+
24514
+ /***/ 8023:
24515
+ /***/ ((module, __unused_webpack_exports, __webpack_require__) => {
24516
+
24517
+ const Utils = __webpack_require__(5178);
24518
+ const pth = __webpack_require__(6928);
24519
+ const ZipEntry = __webpack_require__(8692);
24520
+ const ZipFile = __webpack_require__(3700);
24521
+
24522
+ const get_Bool = (...val) => Utils.findLast(val, (c) => typeof c === "boolean");
24523
+ const get_Str = (...val) => Utils.findLast(val, (c) => typeof c === "string");
24524
+ const get_Fun = (...val) => Utils.findLast(val, (c) => typeof c === "function");
24525
+
24526
+ const defaultOptions = {
24527
+ // option "noSort" : if true it disables files sorting
24528
+ noSort: false,
24529
+ // read entries during load (initial loading may be slower)
24530
+ readEntries: false,
24531
+ // default method is none
24532
+ method: Utils.Constants.NONE,
24533
+ // file system
24534
+ fs: null
24535
+ };
24536
+
24537
+ module.exports = function (/**String*/ input, /** object */ options) {
24538
+ let inBuffer = null;
24539
+
24540
+ // create object based default options, allowing them to be overwritten
24541
+ const opts = Object.assign(Object.create(null), defaultOptions);
24542
+
24543
+ // test input variable
24544
+ if (input && "object" === typeof input) {
24545
+ // if value is not buffer we accept it to be object with options
24546
+ if (!(input instanceof Uint8Array)) {
24547
+ Object.assign(opts, input);
24548
+ input = opts.input ? opts.input : undefined;
24549
+ if (opts.input) delete opts.input;
24550
+ }
24551
+
24552
+ // if input is buffer
24553
+ if (Buffer.isBuffer(input)) {
24554
+ inBuffer = input;
24555
+ opts.method = Utils.Constants.BUFFER;
24556
+ input = undefined;
24557
+ }
24558
+ }
24559
+
24560
+ // assign options
24561
+ Object.assign(opts, options);
24562
+
24563
+ // instanciate utils filesystem
24564
+ const filetools = new Utils(opts);
24565
+
24566
+ if (typeof opts.decoder !== "object" || typeof opts.decoder.encode !== "function" || typeof opts.decoder.decode !== "function") {
24567
+ opts.decoder = Utils.decoder;
24568
+ }
24569
+
24570
+ // if input is file name we retrieve its content
24571
+ if (input && "string" === typeof input) {
24572
+ // load zip file
24573
+ if (filetools.fs.existsSync(input)) {
24574
+ opts.method = Utils.Constants.FILE;
24575
+ opts.filename = input;
24576
+ inBuffer = filetools.fs.readFileSync(input);
24577
+ } else {
24578
+ throw Utils.Errors.INVALID_FILENAME();
24579
+ }
24580
+ }
24581
+
24582
+ // create variable
24583
+ const _zip = new ZipFile(inBuffer, opts);
24584
+
24585
+ const { canonical, sanitize, zipnamefix } = Utils;
24586
+
24587
+ function getEntry(/**Object*/ entry) {
24588
+ if (entry && _zip) {
24589
+ var item;
24590
+ // If entry was given as a file name
24591
+ if (typeof entry === "string") item = _zip.getEntry(pth.posix.normalize(entry));
24592
+ // if entry was given as a ZipEntry object
24593
+ if (typeof entry === "object" && typeof entry.entryName !== "undefined" && typeof entry.header !== "undefined") item = _zip.getEntry(entry.entryName);
24594
+
24595
+ if (item) {
24596
+ return item;
24597
+ }
24598
+ }
24599
+ return null;
24600
+ }
24601
+
24602
+ function fixPath(zipPath) {
24603
+ const { join, normalize, sep } = pth.posix;
24604
+ // convert windows file separators and normalize
24605
+ return join(".", normalize(sep + zipPath.split("\\").join(sep) + sep));
24606
+ }
24607
+
24608
+ function filenameFilter(filterfn) {
24609
+ if (filterfn instanceof RegExp) {
24610
+ // if filter is RegExp wrap it
24611
+ return (function (rx) {
24612
+ return function (filename) {
24613
+ return rx.test(filename);
24614
+ };
24615
+ })(filterfn);
24616
+ } else if ("function" !== typeof filterfn) {
24617
+ // if filter is not function we will replace it
24618
+ return () => true;
24619
+ }
24620
+ return filterfn;
24621
+ }
24622
+
24623
+ // keep last character on folders
24624
+ const relativePath = (local, entry) => {
24625
+ let lastChar = entry.slice(-1);
24626
+ lastChar = lastChar === filetools.sep ? filetools.sep : "";
24627
+ return pth.relative(local, entry) + lastChar;
24628
+ };
24629
+
24630
+ return {
24631
+ /**
24632
+ * Extracts the given entry from the archive and returns the content as a Buffer object
24633
+ * @param {ZipEntry|string} entry ZipEntry object or String with the full path of the entry
24634
+ * @param {Buffer|string} [pass] - password
24635
+ * @return Buffer or Null in case of error
24636
+ */
24637
+ readFile: function (entry, pass) {
24638
+ var item = getEntry(entry);
24639
+ return (item && item.getData(pass)) || null;
24640
+ },
24641
+
24642
+ /**
24643
+ * Returns how many child elements has on entry (directories) on files it is always 0
24644
+ * @param {ZipEntry|string} entry ZipEntry object or String with the full path of the entry
24645
+ * @returns {integer}
24646
+ */
24647
+ childCount: function (entry) {
24648
+ const item = getEntry(entry);
24649
+ if (item) {
24650
+ return _zip.getChildCount(item);
24651
+ }
24652
+ },
24653
+
24654
+ /**
24655
+ * Asynchronous readFile
24656
+ * @param {ZipEntry|string} entry ZipEntry object or String with the full path of the entry
24657
+ * @param {callback} callback
24658
+ *
24659
+ * @return Buffer or Null in case of error
24660
+ */
24661
+ readFileAsync: function (entry, callback) {
24662
+ var item = getEntry(entry);
24663
+ if (item) {
24664
+ item.getDataAsync(callback);
24665
+ } else {
24666
+ callback(null, "getEntry failed for:" + entry);
24667
+ }
24668
+ },
24669
+
24670
+ /**
24671
+ * Extracts the given entry from the archive and returns the content as plain text in the given encoding
24672
+ * @param {ZipEntry|string} entry - ZipEntry object or String with the full path of the entry
24673
+ * @param {string} encoding - Optional. If no encoding is specified utf8 is used
24674
+ *
24675
+ * @return String
24676
+ */
24677
+ readAsText: function (entry, encoding) {
24678
+ var item = getEntry(entry);
24679
+ if (item) {
24680
+ var data = item.getData();
24681
+ if (data && data.length) {
24682
+ return data.toString(encoding || "utf8");
24683
+ }
24684
+ }
24685
+ return "";
24686
+ },
24687
+
24688
+ /**
24689
+ * Asynchronous readAsText
24690
+ * @param {ZipEntry|string} entry ZipEntry object or String with the full path of the entry
24691
+ * @param {callback} callback
24692
+ * @param {string} [encoding] - Optional. If no encoding is specified utf8 is used
24693
+ *
24694
+ * @return String
24695
+ */
24696
+ readAsTextAsync: function (entry, callback, encoding) {
24697
+ var item = getEntry(entry);
24698
+ if (item) {
24699
+ item.getDataAsync(function (data, err) {
24700
+ if (err) {
24701
+ callback(data, err);
24702
+ return;
24703
+ }
24704
+
24705
+ if (data && data.length) {
24706
+ callback(data.toString(encoding || "utf8"));
24707
+ } else {
24708
+ callback("");
24709
+ }
24710
+ });
24711
+ } else {
24712
+ callback("");
24713
+ }
24714
+ },
24715
+
24716
+ /**
24717
+ * Remove the entry from the file or the entry and all it's nested directories and files if the given entry is a directory
24718
+ *
24719
+ * @param {ZipEntry|string} entry
24720
+ * @returns {void}
24721
+ */
24722
+ deleteFile: function (entry, withsubfolders = true) {
24723
+ // @TODO: test deleteFile
24724
+ var item = getEntry(entry);
24725
+ if (item) {
24726
+ _zip.deleteFile(item.entryName, withsubfolders);
24727
+ }
24728
+ },
24729
+
24730
+ /**
24731
+ * Remove the entry from the file or directory without affecting any nested entries
24732
+ *
24733
+ * @param {ZipEntry|string} entry
24734
+ * @returns {void}
24735
+ */
24736
+ deleteEntry: function (entry) {
24737
+ // @TODO: test deleteEntry
24738
+ var item = getEntry(entry);
24739
+ if (item) {
24740
+ _zip.deleteEntry(item.entryName);
24741
+ }
24742
+ },
24743
+
24744
+ /**
24745
+ * Adds a comment to the zip. The zip must be rewritten after adding the comment.
24746
+ *
24747
+ * @param {string} comment
24748
+ */
24749
+ addZipComment: function (comment) {
24750
+ // @TODO: test addZipComment
24751
+ _zip.comment = comment;
24752
+ },
24753
+
24754
+ /**
24755
+ * Returns the zip comment
24756
+ *
24757
+ * @return String
24758
+ */
24759
+ getZipComment: function () {
24760
+ return _zip.comment || "";
24761
+ },
24762
+
24763
+ /**
24764
+ * Adds a comment to a specified zipEntry. The zip must be rewritten after adding the comment
24765
+ * The comment cannot exceed 65535 characters in length
24766
+ *
24767
+ * @param {ZipEntry} entry
24768
+ * @param {string} comment
24769
+ */
24770
+ addZipEntryComment: function (entry, comment) {
24771
+ var item = getEntry(entry);
24772
+ if (item) {
24773
+ item.comment = comment;
24774
+ }
24775
+ },
24776
+
24777
+ /**
24778
+ * Returns the comment of the specified entry
24779
+ *
24780
+ * @param {ZipEntry} entry
24781
+ * @return String
24782
+ */
24783
+ getZipEntryComment: function (entry) {
24784
+ var item = getEntry(entry);
24785
+ if (item) {
24786
+ return item.comment || "";
24787
+ }
24788
+ return "";
24789
+ },
24790
+
24791
+ /**
24792
+ * Updates the content of an existing entry inside the archive. The zip must be rewritten after updating the content
24793
+ *
24794
+ * @param {ZipEntry} entry
24795
+ * @param {Buffer} content
24796
+ */
24797
+ updateFile: function (entry, content) {
24798
+ var item = getEntry(entry);
24799
+ if (item) {
24800
+ item.setData(content);
24801
+ }
24802
+ },
24803
+
24804
+ /**
24805
+ * Adds a file from the disk to the archive
24806
+ *
24807
+ * @param {string} localPath File to add to zip
24808
+ * @param {string} [zipPath] Optional path inside the zip
24809
+ * @param {string} [zipName] Optional name for the file
24810
+ * @param {string} [comment] Optional file comment
24811
+ */
24812
+ addLocalFile: function (localPath, zipPath, zipName, comment) {
24813
+ if (filetools.fs.existsSync(localPath)) {
24814
+ // fix ZipPath
24815
+ zipPath = zipPath ? fixPath(zipPath) : "";
24816
+
24817
+ // p - local file name
24818
+ const p = pth.win32.basename(pth.win32.normalize(localPath));
24819
+
24820
+ // add file name into zippath
24821
+ zipPath += zipName ? zipName : p;
24822
+
24823
+ // read file attributes
24824
+ const _attr = filetools.fs.statSync(localPath);
24825
+
24826
+ // get file content
24827
+ const data = _attr.isFile() ? filetools.fs.readFileSync(localPath) : Buffer.alloc(0);
24828
+
24829
+ // if folder
24830
+ if (_attr.isDirectory()) zipPath += filetools.sep;
24831
+
24832
+ // add file into zip file
24833
+ this.addFile(zipPath, data, comment, _attr);
24834
+ } else {
24835
+ throw Utils.Errors.FILE_NOT_FOUND(localPath);
24836
+ }
24837
+ },
24838
+
24839
+ /**
24840
+ * Callback for showing if everything was done.
24841
+ *
24842
+ * @callback doneCallback
24843
+ * @param {Error} err - Error object
24844
+ * @param {boolean} done - was request fully completed
24845
+ */
24846
+
24847
+ /**
24848
+ * Adds a file from the disk to the archive
24849
+ *
24850
+ * @param {(object|string)} options - options object, if it is string it us used as localPath.
24851
+ * @param {string} options.localPath - Local path to the file.
24852
+ * @param {string} [options.comment] - Optional file comment.
24853
+ * @param {string} [options.zipPath] - Optional path inside the zip
24854
+ * @param {string} [options.zipName] - Optional name for the file
24855
+ * @param {doneCallback} callback - The callback that handles the response.
24856
+ */
24857
+ addLocalFileAsync: function (options, callback) {
24858
+ options = typeof options === "object" ? options : { localPath: options };
24859
+ const localPath = pth.resolve(options.localPath);
24860
+ const { comment } = options;
24861
+ let { zipPath, zipName } = options;
24862
+ const self = this;
24863
+
24864
+ filetools.fs.stat(localPath, function (err, stats) {
24865
+ if (err) return callback(err, false);
24866
+ // fix ZipPath
24867
+ zipPath = zipPath ? fixPath(zipPath) : "";
24868
+ // p - local file name
24869
+ const p = pth.win32.basename(pth.win32.normalize(localPath));
24870
+ // add file name into zippath
24871
+ zipPath += zipName ? zipName : p;
24872
+
24873
+ if (stats.isFile()) {
24874
+ filetools.fs.readFile(localPath, function (err, data) {
24875
+ if (err) return callback(err, false);
24876
+ self.addFile(zipPath, data, comment, stats);
24877
+ return setImmediate(callback, undefined, true);
24878
+ });
24879
+ } else if (stats.isDirectory()) {
24880
+ zipPath += filetools.sep;
24881
+ self.addFile(zipPath, Buffer.alloc(0), comment, stats);
24882
+ return setImmediate(callback, undefined, true);
24883
+ }
24884
+ });
24885
+ },
24886
+
24887
+ /**
24888
+ * Adds a local directory and all its nested files and directories to the archive
24889
+ *
24890
+ * @param {string} localPath - local path to the folder
24891
+ * @param {string} [zipPath] - optional path inside zip
24892
+ * @param {(RegExp|function)} [filter] - optional RegExp or Function if files match will be included.
24893
+ */
24894
+ addLocalFolder: function (localPath, zipPath, filter) {
24895
+ // Prepare filter
24896
+ filter = filenameFilter(filter);
24897
+
24898
+ // fix ZipPath
24899
+ zipPath = zipPath ? fixPath(zipPath) : "";
24900
+
24901
+ // normalize the path first
24902
+ localPath = pth.normalize(localPath);
24903
+
24904
+ if (filetools.fs.existsSync(localPath)) {
24905
+ const items = filetools.findFiles(localPath);
24906
+ const self = this;
24907
+
24908
+ if (items.length) {
24909
+ for (const filepath of items) {
24910
+ const p = pth.join(zipPath, relativePath(localPath, filepath));
24911
+ if (filter(p)) {
24912
+ self.addLocalFile(filepath, pth.dirname(p));
24913
+ }
24914
+ }
24915
+ }
24916
+ } else {
24917
+ throw Utils.Errors.FILE_NOT_FOUND(localPath);
24918
+ }
24919
+ },
24920
+
24921
+ /**
24922
+ * Asynchronous addLocalFolder
24923
+ * @param {string} localPath
24924
+ * @param {callback} callback
24925
+ * @param {string} [zipPath] optional path inside zip
24926
+ * @param {RegExp|function} [filter] optional RegExp or Function if files match will
24927
+ * be included.
24928
+ */
24929
+ addLocalFolderAsync: function (localPath, callback, zipPath, filter) {
24930
+ // Prepare filter
24931
+ filter = filenameFilter(filter);
24932
+
24933
+ // fix ZipPath
24934
+ zipPath = zipPath ? fixPath(zipPath) : "";
24935
+
24936
+ // normalize the path first
24937
+ localPath = pth.normalize(localPath);
24938
+
24939
+ var self = this;
24940
+ filetools.fs.open(localPath, "r", function (err) {
24941
+ if (err && err.code === "ENOENT") {
24942
+ callback(undefined, Utils.Errors.FILE_NOT_FOUND(localPath));
24943
+ } else if (err) {
24944
+ callback(undefined, err);
24945
+ } else {
24946
+ var items = filetools.findFiles(localPath);
24947
+ var i = -1;
24948
+
24949
+ var next = function () {
24950
+ i += 1;
24951
+ if (i < items.length) {
24952
+ var filepath = items[i];
24953
+ var p = relativePath(localPath, filepath).split("\\").join("/"); //windows fix
24954
+ p = p
24955
+ .normalize("NFD")
24956
+ .replace(/[\u0300-\u036f]/g, "")
24957
+ .replace(/[^\x20-\x7E]/g, ""); // accent fix
24958
+ if (filter(p)) {
24959
+ filetools.fs.stat(filepath, function (er0, stats) {
24960
+ if (er0) callback(undefined, er0);
24961
+ if (stats.isFile()) {
24962
+ filetools.fs.readFile(filepath, function (er1, data) {
24963
+ if (er1) {
24964
+ callback(undefined, er1);
24965
+ } else {
24966
+ self.addFile(zipPath + p, data, "", stats);
24967
+ next();
24968
+ }
24969
+ });
24970
+ } else {
24971
+ self.addFile(zipPath + p + "/", Buffer.alloc(0), "", stats);
24972
+ next();
24973
+ }
24974
+ });
24975
+ } else {
24976
+ process.nextTick(() => {
24977
+ next();
24978
+ });
24979
+ }
24980
+ } else {
24981
+ callback(true, undefined);
24982
+ }
24983
+ };
24984
+
24985
+ next();
24986
+ }
24987
+ });
24988
+ },
24989
+
24990
+ /**
24991
+ * Adds a local directory and all its nested files and directories to the archive
24992
+ *
24993
+ * @param {object | string} options - options object, if it is string it us used as localPath.
24994
+ * @param {string} options.localPath - Local path to the folder.
24995
+ * @param {string} [options.zipPath] - optional path inside zip.
24996
+ * @param {RegExp|function} [options.filter] - optional RegExp or Function if files match will be included.
24997
+ * @param {function|string} [options.namefix] - optional function to help fix filename
24998
+ * @param {doneCallback} callback - The callback that handles the response.
24999
+ *
25000
+ */
25001
+ addLocalFolderAsync2: function (options, callback) {
25002
+ const self = this;
25003
+ options = typeof options === "object" ? options : { localPath: options };
25004
+ localPath = pth.resolve(fixPath(options.localPath));
25005
+ let { zipPath, filter, namefix } = options;
25006
+
25007
+ if (filter instanceof RegExp) {
25008
+ filter = (function (rx) {
25009
+ return function (filename) {
25010
+ return rx.test(filename);
25011
+ };
25012
+ })(filter);
25013
+ } else if ("function" !== typeof filter) {
25014
+ filter = function () {
25015
+ return true;
25016
+ };
25017
+ }
25018
+
25019
+ // fix ZipPath
25020
+ zipPath = zipPath ? fixPath(zipPath) : "";
25021
+
25022
+ // Check Namefix function
25023
+ if (namefix == "latin1") {
25024
+ namefix = (str) =>
25025
+ str
25026
+ .normalize("NFD")
25027
+ .replace(/[\u0300-\u036f]/g, "")
25028
+ .replace(/[^\x20-\x7E]/g, ""); // accent fix (latin1 characers only)
25029
+ }
25030
+
25031
+ if (typeof namefix !== "function") namefix = (str) => str;
25032
+
25033
+ // internal, create relative path + fix the name
25034
+ const relPathFix = (entry) => pth.join(zipPath, namefix(relativePath(localPath, entry)));
25035
+ const fileNameFix = (entry) => pth.win32.basename(pth.win32.normalize(namefix(entry)));
25036
+
25037
+ filetools.fs.open(localPath, "r", function (err) {
25038
+ if (err && err.code === "ENOENT") {
25039
+ callback(undefined, Utils.Errors.FILE_NOT_FOUND(localPath));
25040
+ } else if (err) {
25041
+ callback(undefined, err);
25042
+ } else {
25043
+ filetools.findFilesAsync(localPath, function (err, fileEntries) {
25044
+ if (err) return callback(err);
25045
+ fileEntries = fileEntries.filter((dir) => filter(relPathFix(dir)));
25046
+ if (!fileEntries.length) callback(undefined, false);
25047
+
25048
+ setImmediate(
25049
+ fileEntries.reverse().reduce(function (next, entry) {
25050
+ return function (err, done) {
25051
+ if (err || done === false) return setImmediate(next, err, false);
25052
+
25053
+ self.addLocalFileAsync(
25054
+ {
25055
+ localPath: entry,
25056
+ zipPath: pth.dirname(relPathFix(entry)),
25057
+ zipName: fileNameFix(entry)
25058
+ },
25059
+ next
25060
+ );
25061
+ };
25062
+ }, callback)
25063
+ );
25064
+ });
25065
+ }
25066
+ });
25067
+ },
25068
+
25069
+ /**
25070
+ * Adds a local directory and all its nested files and directories to the archive
25071
+ *
25072
+ * @param {string} localPath - path where files will be extracted
25073
+ * @param {object} props - optional properties
25074
+ * @param {string} [props.zipPath] - optional path inside zip
25075
+ * @param {RegExp|function} [props.filter] - optional RegExp or Function if files match will be included.
25076
+ * @param {function|string} [props.namefix] - optional function to help fix filename
25077
+ */
25078
+ addLocalFolderPromise: function (localPath, props) {
25079
+ return new Promise((resolve, reject) => {
25080
+ this.addLocalFolderAsync2(Object.assign({ localPath }, props), (err, done) => {
25081
+ if (err) reject(err);
25082
+ if (done) resolve(this);
25083
+ });
25084
+ });
25085
+ },
25086
+
25087
+ /**
25088
+ * Allows you to create a entry (file or directory) in the zip file.
25089
+ * If you want to create a directory the entryName must end in / and a null buffer should be provided.
25090
+ * Comment and attributes are optional
25091
+ *
25092
+ * @param {string} entryName
25093
+ * @param {Buffer | string} content - file content as buffer or utf8 coded string
25094
+ * @param {string} [comment] - file comment
25095
+ * @param {number | object} [attr] - number as unix file permissions, object as filesystem Stats object
25096
+ */
25097
+ addFile: function (entryName, content, comment, attr) {
25098
+ entryName = zipnamefix(entryName);
25099
+ let entry = getEntry(entryName);
25100
+ const update = entry != null;
25101
+
25102
+ // prepare new entry
25103
+ if (!update) {
25104
+ entry = new ZipEntry(opts);
25105
+ entry.entryName = entryName;
25106
+ }
25107
+ entry.comment = comment || "";
25108
+
25109
+ const isStat = "object" === typeof attr && attr instanceof filetools.fs.Stats;
25110
+
25111
+ // last modification time from file stats
25112
+ if (isStat) {
25113
+ entry.header.time = attr.mtime;
25114
+ }
25115
+
25116
+ // Set file attribute
25117
+ var fileattr = entry.isDirectory ? 0x10 : 0; // (MS-DOS directory flag)
25118
+
25119
+ // extended attributes field for Unix
25120
+ // set file type either S_IFDIR / S_IFREG
25121
+ let unix = entry.isDirectory ? 0x4000 : 0x8000;
25122
+
25123
+ if (isStat) {
25124
+ // File attributes from file stats
25125
+ unix |= 0xfff & attr.mode;
25126
+ } else if ("number" === typeof attr) {
25127
+ // attr from given attr values
25128
+ unix |= 0xfff & attr;
25129
+ } else {
25130
+ // Default values:
25131
+ unix |= entry.isDirectory ? 0o755 : 0o644; // permissions (drwxr-xr-x) or (-r-wr--r--)
25132
+ }
25133
+
25134
+ fileattr = (fileattr | (unix << 16)) >>> 0; // add attributes
25135
+
25136
+ entry.attr = fileattr;
25137
+
25138
+ entry.setData(content);
25139
+ if (!update) _zip.setEntry(entry);
25140
+
25141
+ return entry;
25142
+ },
25143
+
25144
+ /**
25145
+ * Returns an array of ZipEntry objects representing the files and folders inside the archive
25146
+ *
25147
+ * @param {string} [password]
25148
+ * @returns Array
25149
+ */
25150
+ getEntries: function (password) {
25151
+ _zip.password = password;
25152
+ return _zip ? _zip.entries : [];
25153
+ },
25154
+
25155
+ /**
25156
+ * Returns a ZipEntry object representing the file or folder specified by ``name``.
25157
+ *
25158
+ * @param {string} name
25159
+ * @return ZipEntry
25160
+ */
25161
+ getEntry: function (/**String*/ name) {
25162
+ return getEntry(name);
25163
+ },
25164
+
25165
+ getEntryCount: function () {
25166
+ return _zip.getEntryCount();
25167
+ },
25168
+
25169
+ forEach: function (callback) {
25170
+ return _zip.forEach(callback);
25171
+ },
25172
+
25173
+ /**
25174
+ * Extracts the given entry to the given targetPath
25175
+ * If the entry is a directory inside the archive, the entire directory and it's subdirectories will be extracted
25176
+ *
25177
+ * @param {string|ZipEntry} entry - ZipEntry object or String with the full path of the entry
25178
+ * @param {string} targetPath - Target folder where to write the file
25179
+ * @param {boolean} [maintainEntryPath=true] - If maintainEntryPath is true and the entry is inside a folder, the entry folder will be created in targetPath as well. Default is TRUE
25180
+ * @param {boolean} [overwrite=false] - If the file already exists at the target path, the file will be overwriten if this is true.
25181
+ * @param {boolean} [keepOriginalPermission=false] - The file will be set as the permission from the entry if this is true.
25182
+ * @param {string} [outFileName] - String If set will override the filename of the extracted file (Only works if the entry is a file)
25183
+ *
25184
+ * @return Boolean
25185
+ */
25186
+ extractEntryTo: function (entry, targetPath, maintainEntryPath, overwrite, keepOriginalPermission, outFileName) {
25187
+ overwrite = get_Bool(false, overwrite);
25188
+ keepOriginalPermission = get_Bool(false, keepOriginalPermission);
25189
+ maintainEntryPath = get_Bool(true, maintainEntryPath);
25190
+ outFileName = get_Str(keepOriginalPermission, outFileName);
25191
+
25192
+ var item = getEntry(entry);
25193
+ if (!item) {
25194
+ throw Utils.Errors.NO_ENTRY();
25195
+ }
25196
+
25197
+ var entryName = canonical(item.entryName);
25198
+
25199
+ var target = sanitize(targetPath, outFileName && !item.isDirectory ? outFileName : maintainEntryPath ? entryName : pth.basename(entryName));
25200
+
25201
+ if (item.isDirectory) {
25202
+ var children = _zip.getEntryChildren(item);
25203
+ children.forEach(function (child) {
25204
+ if (child.isDirectory) return;
25205
+ var content = child.getData();
25206
+ if (!content) {
25207
+ throw Utils.Errors.CANT_EXTRACT_FILE();
25208
+ }
25209
+ var name = canonical(child.entryName);
25210
+ var childName = sanitize(targetPath, maintainEntryPath ? name : pth.basename(name));
25211
+ // The reverse operation for attr depend on method addFile()
25212
+ const fileAttr = keepOriginalPermission ? child.header.fileAttr : undefined;
25213
+ filetools.writeFileTo(childName, content, overwrite, fileAttr);
25214
+ });
25215
+ return true;
25216
+ }
25217
+
25218
+ var content = item.getData(_zip.password);
25219
+ if (!content) throw Utils.Errors.CANT_EXTRACT_FILE();
25220
+
25221
+ if (filetools.fs.existsSync(target) && !overwrite) {
25222
+ throw Utils.Errors.CANT_OVERRIDE();
25223
+ }
25224
+ // The reverse operation for attr depend on method addFile()
25225
+ const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined;
25226
+ filetools.writeFileTo(target, content, overwrite, fileAttr);
25227
+
25228
+ return true;
25229
+ },
25230
+
25231
+ /**
25232
+ * Test the archive
25233
+ * @param {string} [pass]
25234
+ */
25235
+ test: function (pass) {
25236
+ if (!_zip) {
25237
+ return false;
25238
+ }
25239
+
25240
+ for (var entry in _zip.entries) {
25241
+ try {
25242
+ if (entry.isDirectory) {
25243
+ continue;
25244
+ }
25245
+ var content = _zip.entries[entry].getData(pass);
25246
+ if (!content) {
25247
+ return false;
25248
+ }
25249
+ } catch (err) {
25250
+ return false;
25251
+ }
25252
+ }
25253
+ return true;
25254
+ },
25255
+
25256
+ /**
25257
+ * Extracts the entire archive to the given location
25258
+ *
25259
+ * @param {string} targetPath Target location
25260
+ * @param {boolean} [overwrite=false] If the file already exists at the target path, the file will be overwriten if this is true.
25261
+ * Default is FALSE
25262
+ * @param {boolean} [keepOriginalPermission=false] The file will be set as the permission from the entry if this is true.
25263
+ * Default is FALSE
25264
+ * @param {string|Buffer} [pass] password
25265
+ */
25266
+ extractAllTo: function (targetPath, overwrite, keepOriginalPermission, pass) {
25267
+ keepOriginalPermission = get_Bool(false, keepOriginalPermission);
25268
+ pass = get_Str(keepOriginalPermission, pass);
25269
+ overwrite = get_Bool(false, overwrite);
25270
+ if (!_zip) throw Utils.Errors.NO_ZIP();
25271
+
25272
+ _zip.entries.forEach(function (entry) {
25273
+ var entryName = sanitize(targetPath, canonical(entry.entryName));
25274
+ if (entry.isDirectory) {
25275
+ filetools.makeDir(entryName);
25276
+ return;
25277
+ }
25278
+ var content = entry.getData(pass);
25279
+ if (!content) {
25280
+ throw Utils.Errors.CANT_EXTRACT_FILE();
25281
+ }
25282
+ // The reverse operation for attr depend on method addFile()
25283
+ const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined;
25284
+ filetools.writeFileTo(entryName, content, overwrite, fileAttr);
25285
+ try {
25286
+ filetools.fs.utimesSync(entryName, entry.header.time, entry.header.time);
25287
+ } catch (err) {
25288
+ throw Utils.Errors.CANT_EXTRACT_FILE();
25289
+ }
25290
+ });
25291
+ },
25292
+
25293
+ /**
25294
+ * Asynchronous extractAllTo
25295
+ *
25296
+ * @param {string} targetPath Target location
25297
+ * @param {boolean} [overwrite=false] If the file already exists at the target path, the file will be overwriten if this is true.
25298
+ * Default is FALSE
25299
+ * @param {boolean} [keepOriginalPermission=false] The file will be set as the permission from the entry if this is true.
25300
+ * Default is FALSE
25301
+ * @param {function} callback The callback will be executed when all entries are extracted successfully or any error is thrown.
25302
+ */
25303
+ extractAllToAsync: function (targetPath, overwrite, keepOriginalPermission, callback) {
25304
+ callback = get_Fun(overwrite, keepOriginalPermission, callback);
25305
+ keepOriginalPermission = get_Bool(false, keepOriginalPermission);
25306
+ overwrite = get_Bool(false, overwrite);
25307
+ if (!callback) {
25308
+ return new Promise((resolve, reject) => {
25309
+ this.extractAllToAsync(targetPath, overwrite, keepOriginalPermission, function (err) {
25310
+ if (err) {
25311
+ reject(err);
25312
+ } else {
25313
+ resolve(this);
25314
+ }
25315
+ });
25316
+ });
25317
+ }
25318
+ if (!_zip) {
25319
+ callback(Utils.Errors.NO_ZIP());
25320
+ return;
25321
+ }
25322
+
25323
+ targetPath = pth.resolve(targetPath);
25324
+ // convert entryName to
25325
+ const getPath = (entry) => sanitize(targetPath, pth.normalize(canonical(entry.entryName)));
25326
+ const getError = (msg, file) => new Error(msg + ': "' + file + '"');
25327
+
25328
+ // separate directories from files
25329
+ const dirEntries = [];
25330
+ const fileEntries = [];
25331
+ _zip.entries.forEach((e) => {
25332
+ if (e.isDirectory) {
25333
+ dirEntries.push(e);
25334
+ } else {
25335
+ fileEntries.push(e);
25336
+ }
25337
+ });
25338
+
25339
+ // Create directory entries first synchronously
25340
+ // this prevents race condition and assures folders are there before writing files
25341
+ for (const entry of dirEntries) {
25342
+ const dirPath = getPath(entry);
25343
+ // The reverse operation for attr depend on method addFile()
25344
+ const dirAttr = keepOriginalPermission ? entry.header.fileAttr : undefined;
25345
+ try {
25346
+ filetools.makeDir(dirPath);
25347
+ if (dirAttr) filetools.fs.chmodSync(dirPath, dirAttr);
25348
+ // in unix timestamp will change if files are later added to folder, but still
25349
+ filetools.fs.utimesSync(dirPath, entry.header.time, entry.header.time);
25350
+ } catch (er) {
25351
+ callback(getError("Unable to create folder", dirPath));
25352
+ }
25353
+ }
25354
+
25355
+ fileEntries.reverse().reduce(function (next, entry) {
25356
+ return function (err) {
25357
+ if (err) {
25358
+ next(err);
25359
+ } else {
25360
+ const entryName = pth.normalize(canonical(entry.entryName));
25361
+ const filePath = sanitize(targetPath, entryName);
25362
+ entry.getDataAsync(function (content, err_1) {
25363
+ if (err_1) {
25364
+ next(err_1);
25365
+ } else if (!content) {
25366
+ next(Utils.Errors.CANT_EXTRACT_FILE());
25367
+ } else {
25368
+ // The reverse operation for attr depend on method addFile()
25369
+ const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined;
25370
+ filetools.writeFileToAsync(filePath, content, overwrite, fileAttr, function (succ) {
25371
+ if (!succ) {
25372
+ next(getError("Unable to write file", filePath));
25373
+ }
25374
+ filetools.fs.utimes(filePath, entry.header.time, entry.header.time, function (err_2) {
25375
+ if (err_2) {
25376
+ next(getError("Unable to set times", filePath));
25377
+ } else {
25378
+ next();
25379
+ }
25380
+ });
25381
+ });
25382
+ }
25383
+ });
25384
+ }
25385
+ };
25386
+ }, callback)();
25387
+ },
25388
+
25389
+ /**
25390
+ * Writes the newly created zip file to disk at the specified location or if a zip was opened and no ``targetFileName`` is provided, it will overwrite the opened zip
25391
+ *
25392
+ * @param {string} targetFileName
25393
+ * @param {function} callback
25394
+ */
25395
+ writeZip: function (targetFileName, callback) {
25396
+ if (arguments.length === 1) {
25397
+ if (typeof targetFileName === "function") {
25398
+ callback = targetFileName;
25399
+ targetFileName = "";
25400
+ }
25401
+ }
25402
+
25403
+ if (!targetFileName && opts.filename) {
25404
+ targetFileName = opts.filename;
25405
+ }
25406
+ if (!targetFileName) return;
25407
+
25408
+ var zipData = _zip.compressToBuffer();
25409
+ if (zipData) {
25410
+ var ok = filetools.writeFileTo(targetFileName, zipData, true);
25411
+ if (typeof callback === "function") callback(!ok ? new Error("failed") : null, "");
25412
+ }
25413
+ },
25414
+
25415
+ /**
25416
+ *
25417
+ * @param {string} targetFileName
25418
+ * @param {object} [props]
25419
+ * @param {boolean} [props.overwrite=true] If the file already exists at the target path, the file will be overwriten if this is true.
25420
+ * @param {boolean} [props.perm] The file will be set as the permission from the entry if this is true.
25421
+
25422
+ * @returns {Promise<void>}
25423
+ */
25424
+ writeZipPromise: function (/**String*/ targetFileName, /* object */ props) {
25425
+ const { overwrite, perm } = Object.assign({ overwrite: true }, props);
25426
+
25427
+ return new Promise((resolve, reject) => {
25428
+ // find file name
25429
+ if (!targetFileName && opts.filename) targetFileName = opts.filename;
25430
+ if (!targetFileName) reject("ADM-ZIP: ZIP File Name Missing");
25431
+
25432
+ this.toBufferPromise().then((zipData) => {
25433
+ const ret = (done) => (done ? resolve(done) : reject("ADM-ZIP: Wasn't able to write zip file"));
25434
+ filetools.writeFileToAsync(targetFileName, zipData, overwrite, perm, ret);
25435
+ }, reject);
25436
+ });
25437
+ },
25438
+
25439
+ /**
25440
+ * @returns {Promise<Buffer>} A promise to the Buffer.
25441
+ */
25442
+ toBufferPromise: function () {
25443
+ return new Promise((resolve, reject) => {
25444
+ _zip.toAsyncBuffer(resolve, reject);
25445
+ });
25446
+ },
22394
25447
 
22395
- resolver.doResolve(
22396
- target,
22397
- obj,
22398
- "self reference",
22399
- resolveContext,
22400
- callback,
22401
- );
22402
- } else {
22403
- return callback();
22404
- }
22405
- });
22406
- }
25448
+ /**
25449
+ * Returns the content of the entire zip file as a Buffer object
25450
+ *
25451
+ * @prop {function} [onSuccess]
25452
+ * @prop {function} [onFail]
25453
+ * @prop {function} [onItemStart]
25454
+ * @prop {function} [onItemEnd]
25455
+ * @returns {Buffer}
25456
+ */
25457
+ toBuffer: function (onSuccess, onFail, onItemStart, onItemEnd) {
25458
+ if (typeof onSuccess === "function") {
25459
+ _zip.toAsyncBuffer(onSuccess, onFail, onItemStart, onItemEnd);
25460
+ return null;
25461
+ }
25462
+ return _zip.compressToBuffer();
25463
+ }
25464
+ };
22407
25465
  };
22408
25466
 
22409
25467
 
@@ -22769,7 +25827,7 @@ function exitWithError(...messages) {
22769
25827
  /***/ ((module) => {
22770
25828
 
22771
25829
  "use strict";
22772
- module.exports = /*#__PURE__*/JSON.parse('{"name":"@squidcloud/cli","version":"1.0.409","description":"The Squid CLI","main":"dist/index.js","scripts":{"start":"node dist/index.js","start-ts":"ts-node -r tsconfig-paths/register src/index.ts","prebuild":"rimraf dist","build":"webpack --mode=production","build:dev":"webpack --mode=development","lint":"eslint","link":"npm run build && chmod 755 dist/index.js && npm link","watch":"webpack --watch","deploy":"npm run build && npm pack --silent | xargs -I {} mv {} package.tgz && npm install -g package.tgz && rm -rf package.tgz","publish:public":"npm run build && npm publish --access public"},"files":["dist/**/*"],"bin":{"squid":"dist/index.js"},"keywords":[],"author":"","license":"ISC","engines":{"node":">=18.0.0"},"dependencies":{"@squidcloud/local-backend":"^1.0.409","copy-webpack-plugin":"^12.0.2","decompress":"^4.2.1","nodemon":"^3.1.3","terser-webpack-plugin":"^5.3.10","ts-loader":"^9.5.1","ts-node":"^10.9.2","tsconfig-paths":"^4.2.0","tsconfig-paths-webpack-plugin":"^4.1.0","webpack":"^5.101.3","zip-webpack-plugin":"^4.0.1"},"devDependencies":{"@types/decompress":"^4.2.7","@types/node":"^20.19.9","terminal-link":"^3.0.0"}}');
25830
+ module.exports = /*#__PURE__*/JSON.parse('{"name":"@squidcloud/cli","version":"1.0.411","description":"The Squid CLI","main":"dist/index.js","scripts":{"start":"node dist/index.js","start-ts":"ts-node -r tsconfig-paths/register src/index.ts","prebuild":"rimraf dist","build":"webpack --mode=production","build:dev":"webpack --mode=development","lint":"eslint","link":"npm run build && chmod 755 dist/index.js && npm link","watch":"webpack --watch","deploy":"npm run build && npm pack --silent | xargs -I {} mv {} package.tgz && npm install -g package.tgz && rm -rf package.tgz","publish:public":"npm run build && npm publish --access public"},"files":["dist/**/*"],"bin":{"squid":"dist/index.js"},"keywords":[],"author":"","license":"ISC","engines":{"node":">=18.0.0"},"dependencies":{"@squidcloud/local-backend":"^1.0.411","adm-zip":"^0.5.16","copy-webpack-plugin":"^12.0.2","decompress":"^4.2.1","nodemon":"^3.1.3","terser-webpack-plugin":"^5.3.10","ts-loader":"^9.5.1","ts-node":"^10.9.2","tsconfig-paths":"^4.2.0","tsconfig-paths-webpack-plugin":"^4.1.0","webpack":"^5.101.3","zip-webpack-plugin":"^4.0.1"},"devDependencies":{"@types/adm-zip":"^0.5.7","@types/decompress":"^4.2.7","@types/node":"^20.19.9","terminal-link":"^3.0.0"}}');
22773
25831
 
22774
25832
  /***/ }),
22775
25833
 
@@ -22783,6 +25841,143 @@ const ansiRegex = __webpack_require__(4764);
22783
25841
  module.exports = string => typeof string === 'string' ? string.replace(ansiRegex(), '') : string;
22784
25842
 
22785
25843
 
25844
+ /***/ }),
25845
+
25846
+ /***/ 8398:
25847
+ /***/ ((module, __unused_webpack_exports, __webpack_require__) => {
25848
+
25849
+ var Utils = __webpack_require__(5178),
25850
+ Constants = Utils.Constants;
25851
+
25852
+ /* The entries in the end of central directory */
25853
+ module.exports = function () {
25854
+ var _volumeEntries = 0,
25855
+ _totalEntries = 0,
25856
+ _size = 0,
25857
+ _offset = 0,
25858
+ _commentLength = 0;
25859
+
25860
+ return {
25861
+ get diskEntries() {
25862
+ return _volumeEntries;
25863
+ },
25864
+ set diskEntries(/*Number*/ val) {
25865
+ _volumeEntries = _totalEntries = val;
25866
+ },
25867
+
25868
+ get totalEntries() {
25869
+ return _totalEntries;
25870
+ },
25871
+ set totalEntries(/*Number*/ val) {
25872
+ _totalEntries = _volumeEntries = val;
25873
+ },
25874
+
25875
+ get size() {
25876
+ return _size;
25877
+ },
25878
+ set size(/*Number*/ val) {
25879
+ _size = val;
25880
+ },
25881
+
25882
+ get offset() {
25883
+ return _offset;
25884
+ },
25885
+ set offset(/*Number*/ val) {
25886
+ _offset = val;
25887
+ },
25888
+
25889
+ get commentLength() {
25890
+ return _commentLength;
25891
+ },
25892
+ set commentLength(/*Number*/ val) {
25893
+ _commentLength = val;
25894
+ },
25895
+
25896
+ get mainHeaderSize() {
25897
+ return Constants.ENDHDR + _commentLength;
25898
+ },
25899
+
25900
+ loadFromBinary: function (/*Buffer*/ data) {
25901
+ // data should be 22 bytes and start with "PK 05 06"
25902
+ // or be 56+ bytes and start with "PK 06 06" for Zip64
25903
+ if (
25904
+ (data.length !== Constants.ENDHDR || data.readUInt32LE(0) !== Constants.ENDSIG) &&
25905
+ (data.length < Constants.ZIP64HDR || data.readUInt32LE(0) !== Constants.ZIP64SIG)
25906
+ ) {
25907
+ throw Utils.Errors.INVALID_END();
25908
+ }
25909
+
25910
+ if (data.readUInt32LE(0) === Constants.ENDSIG) {
25911
+ // number of entries on this volume
25912
+ _volumeEntries = data.readUInt16LE(Constants.ENDSUB);
25913
+ // total number of entries
25914
+ _totalEntries = data.readUInt16LE(Constants.ENDTOT);
25915
+ // central directory size in bytes
25916
+ _size = data.readUInt32LE(Constants.ENDSIZ);
25917
+ // offset of first CEN header
25918
+ _offset = data.readUInt32LE(Constants.ENDOFF);
25919
+ // zip file comment length
25920
+ _commentLength = data.readUInt16LE(Constants.ENDCOM);
25921
+ } else {
25922
+ // number of entries on this volume
25923
+ _volumeEntries = Utils.readBigUInt64LE(data, Constants.ZIP64SUB);
25924
+ // total number of entries
25925
+ _totalEntries = Utils.readBigUInt64LE(data, Constants.ZIP64TOT);
25926
+ // central directory size in bytes
25927
+ _size = Utils.readBigUInt64LE(data, Constants.ZIP64SIZE);
25928
+ // offset of first CEN header
25929
+ _offset = Utils.readBigUInt64LE(data, Constants.ZIP64OFF);
25930
+
25931
+ _commentLength = 0;
25932
+ }
25933
+ },
25934
+
25935
+ toBinary: function () {
25936
+ var b = Buffer.alloc(Constants.ENDHDR + _commentLength);
25937
+ // "PK 05 06" signature
25938
+ b.writeUInt32LE(Constants.ENDSIG, 0);
25939
+ b.writeUInt32LE(0, 4);
25940
+ // number of entries on this volume
25941
+ b.writeUInt16LE(_volumeEntries, Constants.ENDSUB);
25942
+ // total number of entries
25943
+ b.writeUInt16LE(_totalEntries, Constants.ENDTOT);
25944
+ // central directory size in bytes
25945
+ b.writeUInt32LE(_size, Constants.ENDSIZ);
25946
+ // offset of first CEN header
25947
+ b.writeUInt32LE(_offset, Constants.ENDOFF);
25948
+ // zip file comment length
25949
+ b.writeUInt16LE(_commentLength, Constants.ENDCOM);
25950
+ // fill comment memory with spaces so no garbage is left there
25951
+ b.fill(" ", Constants.ENDHDR);
25952
+
25953
+ return b;
25954
+ },
25955
+
25956
+ toJSON: function () {
25957
+ // creates 0x0000 style output
25958
+ const offset = function (nr, len) {
25959
+ let offs = nr.toString(16).toUpperCase();
25960
+ while (offs.length < len) offs = "0" + offs;
25961
+ return "0x" + offs;
25962
+ };
25963
+
25964
+ return {
25965
+ diskEntries: _volumeEntries,
25966
+ totalEntries: _totalEntries,
25967
+ size: _size + " bytes",
25968
+ offset: offset(_offset, 4),
25969
+ commentLength: _commentLength
25970
+ };
25971
+ },
25972
+
25973
+ toString: function () {
25974
+ return JSON.stringify(this.toJSON(), null, "\t");
25975
+ }
25976
+ };
25977
+ };
25978
+ // Misspelled
25979
+
25980
+
22786
25981
  /***/ }),
22787
25982
 
22788
25983
  /***/ 8496:
@@ -22890,11 +26085,15 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
22890
26085
  };
22891
26086
  Object.defineProperty(exports, "__esModule", ({ value: true }));
22892
26087
  exports.build = build;
26088
+ const adm_zip_1 = __importDefault(__webpack_require__(8023));
22893
26089
  const assertic_1 = __webpack_require__(3205);
22894
26090
  const fsSync = __importStar(__webpack_require__(9896));
22895
26091
  const fs_1 = __webpack_require__(9896);
26092
+ const node_child_process_1 = __webpack_require__(1421);
22896
26093
  const path_1 = __importDefault(__webpack_require__(6928));
22897
26094
  const webpack_1 = __importDefault(__webpack_require__(6807));
26095
+ const connector_types_1 = __webpack_require__(3420);
26096
+ const assertion_utils_1 = __webpack_require__(6883);
22898
26097
  const process_env_utils_1 = __webpack_require__(9360);
22899
26098
  const resolve_1 = __webpack_require__(412);
22900
26099
  const tsoa_utils_1 = __webpack_require__(1431);
@@ -22972,6 +26171,19 @@ async function build({ verbose, dev, skipVersionCheck }) {
22972
26171
  resolve();
22973
26172
  });
22974
26173
  });
26174
+ // Generate connector metadata if this is a connector build.
26175
+ if (isSquidConnector) {
26176
+ if (verbose)
26177
+ console.log('Generating connector metadata...');
26178
+ await generateConnectorMetadata(distPath, verbose);
26179
+ // Add metadata to bundle.zip.
26180
+ const zipPath = path_1.default.join(distPath, 'bundle.zip');
26181
+ const zip = new adm_zip_1.default(zipPath);
26182
+ zip.addLocalFile(path_1.default.join(distPath, connector_types_1.CONNECTOR_METADATA_JSON_FILE));
26183
+ zip.writeZip(zipPath);
26184
+ if (verbose)
26185
+ console.log('Added connector metadata to bundle.zip');
26186
+ }
22975
26187
  // Show version check warning after successful build.
22976
26188
  await displayVersionWarning(versionCheckPromise);
22977
26189
  }
@@ -22984,6 +26196,139 @@ async function build({ verbose, dev, skipVersionCheck }) {
22984
26196
  (0, process_utils_1.exitWithError)(errorMessage);
22985
26197
  }
22986
26198
  }
26199
+ /**
26200
+ * These connectors do not have associated integration types.
26201
+ */
26202
+ const INTEGRATIONLESS_CONNECTORS = ['cotomi'];
26203
+ /** Generates connector metadata JSON file in the dist directory. */
26204
+ async function generateConnectorMetadata(distPath, verbose) {
26205
+ const packageJsonPath = path_1.default.resolve(process.cwd(), 'package.json');
26206
+ const packageJson = JSON.parse(await fs_1.promises.readFile(packageJsonPath, 'utf8'));
26207
+ (0, assertic_1.assertTruthy)(packageJson.version, 'Failed to read version from package.json');
26208
+ (0, assertic_1.assertTruthy)(packageJson.name, 'Failed to read name from package.json');
26209
+ // Extract connector ID from package name
26210
+ // (e.g., "@squidcloud/essentials" -> "essentials", "@squidcloud/google-calendar" -> "google_calendar").
26211
+ const connectorId = (packageJson.name.split('/').pop() || packageJson.name).replace('-', '_');
26212
+ (0, assertion_utils_1.assertConnectorId)(connectorId);
26213
+ // Load the built module to extract metadata.
26214
+ const indexJsPath = path_1.default.join(distPath, 'index.js');
26215
+ // Dynamically set NODE_PATH to include global npm modules. Required for connectors with 'puppeteer'.
26216
+ process.env.NODE_PATH = (0, node_child_process_1.execSync)('npm root -g').toString().trim();
26217
+ // eslint-disable-next-line @typescript-eslint/no-require-imports
26218
+ (__webpack_require__(3339).Module)._initPaths(); // Apply NODE_PATH changes.
26219
+ // eslint-disable-next-line @typescript-eslint/no-require-imports
26220
+ const tenantModule = __webpack_require__(6589)(indexJsPath);
26221
+ const defaultService = (0, assertic_1.truthy)(tenantModule['default'], '"default" service is not found');
26222
+ const metadataFn = (0, assertic_1.truthy)(defaultService['metadata'], '"metadata" function is not found');
26223
+ const bundleData = metadataFn();
26224
+ (0, assertic_1.assertTruthy)(typeof bundleData === 'object', `Unexpected "default:metadata" function response: ${typeof bundleData}`);
26225
+ const integrationTypes = INTEGRATIONLESS_CONNECTORS.includes(connectorId)
26226
+ ? []
26227
+ : buildSupportedIntegrationTypesFromBundleData(bundleData);
26228
+ const packageMetadata = {
26229
+ id: connectorId,
26230
+ version: packageJson.version,
26231
+ bundleData,
26232
+ integrationTypes,
26233
+ };
26234
+ const metadataPath = path_1.default.join(distPath, connector_types_1.CONNECTOR_METADATA_JSON_FILE);
26235
+ await fs_1.promises.writeFile(metadataPath, JSON.stringify(packageMetadata, null, 2));
26236
+ if (verbose) {
26237
+ console.log('Connector metadata generated successfully:');
26238
+ console.dir(packageMetadata, { depth: 2, colors: true });
26239
+ }
26240
+ }
26241
+ /** Returns a list of supported integration types. Checks AI function attributes for that. */
26242
+ function buildSupportedIntegrationTypesFromBundleData(bundleData) {
26243
+ const integrationTypesSet = new Set(Object.values(bundleData.aiFunctions || {}).flatMap(({ attributes }) => attributes?.integrationType ?? []));
26244
+ (0, assertic_1.assertTruthy)(integrationTypesSet.size > 0, 'No supported integrations found!');
26245
+ const integrationTypes = [...integrationTypesSet];
26246
+ console.log('List of supported integrations: ', integrationTypes);
26247
+ return integrationTypes;
26248
+ }
26249
+
26250
+
26251
+ /***/ }),
26252
+
26253
+ /***/ 8591:
26254
+ /***/ ((module, __unused_webpack_exports, __webpack_require__) => {
26255
+
26256
+ const pth = __webpack_require__(6928);
26257
+
26258
+ module.exports = function (/*String*/ path, /*Utils object*/ { fs }) {
26259
+ var _path = path || "",
26260
+ _obj = newAttr(),
26261
+ _stat = null;
26262
+
26263
+ function newAttr() {
26264
+ return {
26265
+ directory: false,
26266
+ readonly: false,
26267
+ hidden: false,
26268
+ executable: false,
26269
+ mtime: 0,
26270
+ atime: 0
26271
+ };
26272
+ }
26273
+
26274
+ if (_path && fs.existsSync(_path)) {
26275
+ _stat = fs.statSync(_path);
26276
+ _obj.directory = _stat.isDirectory();
26277
+ _obj.mtime = _stat.mtime;
26278
+ _obj.atime = _stat.atime;
26279
+ _obj.executable = (0o111 & _stat.mode) !== 0; // file is executable who ever har right not just owner
26280
+ _obj.readonly = (0o200 & _stat.mode) === 0; // readonly if owner has no write right
26281
+ _obj.hidden = pth.basename(_path)[0] === ".";
26282
+ } else {
26283
+ console.warn("Invalid path: " + _path);
26284
+ }
26285
+
26286
+ return {
26287
+ get directory() {
26288
+ return _obj.directory;
26289
+ },
26290
+
26291
+ get readOnly() {
26292
+ return _obj.readonly;
26293
+ },
26294
+
26295
+ get hidden() {
26296
+ return _obj.hidden;
26297
+ },
26298
+
26299
+ get mtime() {
26300
+ return _obj.mtime;
26301
+ },
26302
+
26303
+ get atime() {
26304
+ return _obj.atime;
26305
+ },
26306
+
26307
+ get executable() {
26308
+ return _obj.executable;
26309
+ },
26310
+
26311
+ decodeAttributes: function () {},
26312
+
26313
+ encodeAttributes: function () {},
26314
+
26315
+ toJSON: function () {
26316
+ return {
26317
+ path: _path,
26318
+ isDirectory: _obj.directory,
26319
+ isReadOnly: _obj.readonly,
26320
+ isHidden: _obj.hidden,
26321
+ isExecutable: _obj.executable,
26322
+ mTime: _obj.mtime,
26323
+ aTime: _obj.atime
26324
+ };
26325
+ },
26326
+
26327
+ toString: function () {
26328
+ return JSON.stringify(this.toJSON(), null, "\t");
26329
+ }
26330
+ };
26331
+ };
22987
26332
 
22988
26333
 
22989
26334
  /***/ }),
@@ -23421,6 +26766,418 @@ models.forEach(fromModel => {
23421
26766
  module.exports = convert;
23422
26767
 
23423
26768
 
26769
+ /***/ }),
26770
+
26771
+ /***/ 8692:
26772
+ /***/ ((module, __unused_webpack_exports, __webpack_require__) => {
26773
+
26774
+ var Utils = __webpack_require__(5178),
26775
+ Headers = __webpack_require__(4072),
26776
+ Constants = Utils.Constants,
26777
+ Methods = __webpack_require__(7964);
26778
+
26779
+ module.exports = function (/** object */ options, /*Buffer*/ input) {
26780
+ var _centralHeader = new Headers.EntryHeader(),
26781
+ _entryName = Buffer.alloc(0),
26782
+ _comment = Buffer.alloc(0),
26783
+ _isDirectory = false,
26784
+ uncompressedData = null,
26785
+ _extra = Buffer.alloc(0),
26786
+ _extralocal = Buffer.alloc(0),
26787
+ _efs = true;
26788
+
26789
+ // assign options
26790
+ const opts = options;
26791
+
26792
+ const decoder = typeof opts.decoder === "object" ? opts.decoder : Utils.decoder;
26793
+ _efs = decoder.hasOwnProperty("efs") ? decoder.efs : false;
26794
+
26795
+ function getCompressedDataFromZip() {
26796
+ //if (!input || !Buffer.isBuffer(input)) {
26797
+ if (!input || !(input instanceof Uint8Array)) {
26798
+ return Buffer.alloc(0);
26799
+ }
26800
+ _extralocal = _centralHeader.loadLocalHeaderFromBinary(input);
26801
+ return input.slice(_centralHeader.realDataOffset, _centralHeader.realDataOffset + _centralHeader.compressedSize);
26802
+ }
26803
+
26804
+ function crc32OK(data) {
26805
+ // if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the local header is written
26806
+ if (!_centralHeader.flags_desc) {
26807
+ if (Utils.crc32(data) !== _centralHeader.localHeader.crc) {
26808
+ return false;
26809
+ }
26810
+ } else {
26811
+ const descriptor = {};
26812
+ const dataEndOffset = _centralHeader.realDataOffset + _centralHeader.compressedSize;
26813
+ // no descriptor after compressed data, instead new local header
26814
+ if (input.readUInt32LE(dataEndOffset) == Constants.LOCSIG || input.readUInt32LE(dataEndOffset) == Constants.CENSIG) {
26815
+ throw Utils.Errors.DESCRIPTOR_NOT_EXIST();
26816
+ }
26817
+
26818
+ // get decriptor data
26819
+ if (input.readUInt32LE(dataEndOffset) == Constants.EXTSIG) {
26820
+ // descriptor with signature
26821
+ descriptor.crc = input.readUInt32LE(dataEndOffset + Constants.EXTCRC);
26822
+ descriptor.compressedSize = input.readUInt32LE(dataEndOffset + Constants.EXTSIZ);
26823
+ descriptor.size = input.readUInt32LE(dataEndOffset + Constants.EXTLEN);
26824
+ } else if (input.readUInt16LE(dataEndOffset + 12) === 0x4b50) {
26825
+ // descriptor without signature (we check is new header starting where we expect)
26826
+ descriptor.crc = input.readUInt32LE(dataEndOffset + Constants.EXTCRC - 4);
26827
+ descriptor.compressedSize = input.readUInt32LE(dataEndOffset + Constants.EXTSIZ - 4);
26828
+ descriptor.size = input.readUInt32LE(dataEndOffset + Constants.EXTLEN - 4);
26829
+ } else {
26830
+ throw Utils.Errors.DESCRIPTOR_UNKNOWN();
26831
+ }
26832
+
26833
+ // check data integrity
26834
+ if (descriptor.compressedSize !== _centralHeader.compressedSize || descriptor.size !== _centralHeader.size || descriptor.crc !== _centralHeader.crc) {
26835
+ throw Utils.Errors.DESCRIPTOR_FAULTY();
26836
+ }
26837
+ if (Utils.crc32(data) !== descriptor.crc) {
26838
+ return false;
26839
+ }
26840
+
26841
+ // @TODO: zip64 bit descriptor fields
26842
+ // if bit 3 is set and any value in local header "zip64 Extended information" extra field are set 0 (place holder)
26843
+ // then 64-bit descriptor format is used instead of 32-bit
26844
+ // central header - "zip64 Extended information" extra field should store real values and not place holders
26845
+ }
26846
+ return true;
26847
+ }
26848
+
26849
+ function decompress(/*Boolean*/ async, /*Function*/ callback, /*String, Buffer*/ pass) {
26850
+ if (typeof callback === "undefined" && typeof async === "string") {
26851
+ pass = async;
26852
+ async = void 0;
26853
+ }
26854
+ if (_isDirectory) {
26855
+ if (async && callback) {
26856
+ callback(Buffer.alloc(0), Utils.Errors.DIRECTORY_CONTENT_ERROR()); //si added error.
26857
+ }
26858
+ return Buffer.alloc(0);
26859
+ }
26860
+
26861
+ var compressedData = getCompressedDataFromZip();
26862
+
26863
+ if (compressedData.length === 0) {
26864
+ // File is empty, nothing to decompress.
26865
+ if (async && callback) callback(compressedData);
26866
+ return compressedData;
26867
+ }
26868
+
26869
+ if (_centralHeader.encrypted) {
26870
+ if ("string" !== typeof pass && !Buffer.isBuffer(pass)) {
26871
+ throw Utils.Errors.INVALID_PASS_PARAM();
26872
+ }
26873
+ compressedData = Methods.ZipCrypto.decrypt(compressedData, _centralHeader, pass);
26874
+ }
26875
+
26876
+ var data = Buffer.alloc(_centralHeader.size);
26877
+
26878
+ switch (_centralHeader.method) {
26879
+ case Utils.Constants.STORED:
26880
+ compressedData.copy(data);
26881
+ if (!crc32OK(data)) {
26882
+ if (async && callback) callback(data, Utils.Errors.BAD_CRC()); //si added error
26883
+ throw Utils.Errors.BAD_CRC();
26884
+ } else {
26885
+ //si added otherwise did not seem to return data.
26886
+ if (async && callback) callback(data);
26887
+ return data;
26888
+ }
26889
+ case Utils.Constants.DEFLATED:
26890
+ var inflater = new Methods.Inflater(compressedData, _centralHeader.size);
26891
+ if (!async) {
26892
+ const result = inflater.inflate(data);
26893
+ result.copy(data, 0);
26894
+ if (!crc32OK(data)) {
26895
+ throw Utils.Errors.BAD_CRC(`"${decoder.decode(_entryName)}"`);
26896
+ }
26897
+ return data;
26898
+ } else {
26899
+ inflater.inflateAsync(function (result) {
26900
+ result.copy(result, 0);
26901
+ if (callback) {
26902
+ if (!crc32OK(result)) {
26903
+ callback(result, Utils.Errors.BAD_CRC()); //si added error
26904
+ } else {
26905
+ callback(result);
26906
+ }
26907
+ }
26908
+ });
26909
+ }
26910
+ break;
26911
+ default:
26912
+ if (async && callback) callback(Buffer.alloc(0), Utils.Errors.UNKNOWN_METHOD());
26913
+ throw Utils.Errors.UNKNOWN_METHOD();
26914
+ }
26915
+ }
26916
+
26917
+ function compress(/*Boolean*/ async, /*Function*/ callback) {
26918
+ if ((!uncompressedData || !uncompressedData.length) && Buffer.isBuffer(input)) {
26919
+ // no data set or the data wasn't changed to require recompression
26920
+ if (async && callback) callback(getCompressedDataFromZip());
26921
+ return getCompressedDataFromZip();
26922
+ }
26923
+
26924
+ if (uncompressedData.length && !_isDirectory) {
26925
+ var compressedData;
26926
+ // Local file header
26927
+ switch (_centralHeader.method) {
26928
+ case Utils.Constants.STORED:
26929
+ _centralHeader.compressedSize = _centralHeader.size;
26930
+
26931
+ compressedData = Buffer.alloc(uncompressedData.length);
26932
+ uncompressedData.copy(compressedData);
26933
+
26934
+ if (async && callback) callback(compressedData);
26935
+ return compressedData;
26936
+ default:
26937
+ case Utils.Constants.DEFLATED:
26938
+ var deflater = new Methods.Deflater(uncompressedData);
26939
+ if (!async) {
26940
+ var deflated = deflater.deflate();
26941
+ _centralHeader.compressedSize = deflated.length;
26942
+ return deflated;
26943
+ } else {
26944
+ deflater.deflateAsync(function (data) {
26945
+ compressedData = Buffer.alloc(data.length);
26946
+ _centralHeader.compressedSize = data.length;
26947
+ data.copy(compressedData);
26948
+ callback && callback(compressedData);
26949
+ });
26950
+ }
26951
+ deflater = null;
26952
+ break;
26953
+ }
26954
+ } else if (async && callback) {
26955
+ callback(Buffer.alloc(0));
26956
+ } else {
26957
+ return Buffer.alloc(0);
26958
+ }
26959
+ }
26960
+
26961
+ function readUInt64LE(buffer, offset) {
26962
+ return (buffer.readUInt32LE(offset + 4) << 4) + buffer.readUInt32LE(offset);
26963
+ }
26964
+
26965
+ function parseExtra(data) {
26966
+ try {
26967
+ var offset = 0;
26968
+ var signature, size, part;
26969
+ while (offset + 4 < data.length) {
26970
+ signature = data.readUInt16LE(offset);
26971
+ offset += 2;
26972
+ size = data.readUInt16LE(offset);
26973
+ offset += 2;
26974
+ part = data.slice(offset, offset + size);
26975
+ offset += size;
26976
+ if (Constants.ID_ZIP64 === signature) {
26977
+ parseZip64ExtendedInformation(part);
26978
+ }
26979
+ }
26980
+ } catch (error) {
26981
+ throw Utils.Errors.EXTRA_FIELD_PARSE_ERROR();
26982
+ }
26983
+ }
26984
+
26985
+ //Override header field values with values from the ZIP64 extra field
26986
+ function parseZip64ExtendedInformation(data) {
26987
+ var size, compressedSize, offset, diskNumStart;
26988
+
26989
+ if (data.length >= Constants.EF_ZIP64_SCOMP) {
26990
+ size = readUInt64LE(data, Constants.EF_ZIP64_SUNCOMP);
26991
+ if (_centralHeader.size === Constants.EF_ZIP64_OR_32) {
26992
+ _centralHeader.size = size;
26993
+ }
26994
+ }
26995
+ if (data.length >= Constants.EF_ZIP64_RHO) {
26996
+ compressedSize = readUInt64LE(data, Constants.EF_ZIP64_SCOMP);
26997
+ if (_centralHeader.compressedSize === Constants.EF_ZIP64_OR_32) {
26998
+ _centralHeader.compressedSize = compressedSize;
26999
+ }
27000
+ }
27001
+ if (data.length >= Constants.EF_ZIP64_DSN) {
27002
+ offset = readUInt64LE(data, Constants.EF_ZIP64_RHO);
27003
+ if (_centralHeader.offset === Constants.EF_ZIP64_OR_32) {
27004
+ _centralHeader.offset = offset;
27005
+ }
27006
+ }
27007
+ if (data.length >= Constants.EF_ZIP64_DSN + 4) {
27008
+ diskNumStart = data.readUInt32LE(Constants.EF_ZIP64_DSN);
27009
+ if (_centralHeader.diskNumStart === Constants.EF_ZIP64_OR_16) {
27010
+ _centralHeader.diskNumStart = diskNumStart;
27011
+ }
27012
+ }
27013
+ }
27014
+
27015
+ return {
27016
+ get entryName() {
27017
+ return decoder.decode(_entryName);
27018
+ },
27019
+ get rawEntryName() {
27020
+ return _entryName;
27021
+ },
27022
+ set entryName(val) {
27023
+ _entryName = Utils.toBuffer(val, decoder.encode);
27024
+ var lastChar = _entryName[_entryName.length - 1];
27025
+ _isDirectory = lastChar === 47 || lastChar === 92;
27026
+ _centralHeader.fileNameLength = _entryName.length;
27027
+ },
27028
+
27029
+ get efs() {
27030
+ if (typeof _efs === "function") {
27031
+ return _efs(this.entryName);
27032
+ } else {
27033
+ return _efs;
27034
+ }
27035
+ },
27036
+
27037
+ get extra() {
27038
+ return _extra;
27039
+ },
27040
+ set extra(val) {
27041
+ _extra = val;
27042
+ _centralHeader.extraLength = val.length;
27043
+ parseExtra(val);
27044
+ },
27045
+
27046
+ get comment() {
27047
+ return decoder.decode(_comment);
27048
+ },
27049
+ set comment(val) {
27050
+ _comment = Utils.toBuffer(val, decoder.encode);
27051
+ _centralHeader.commentLength = _comment.length;
27052
+ if (_comment.length > 0xffff) throw Utils.Errors.COMMENT_TOO_LONG();
27053
+ },
27054
+
27055
+ get name() {
27056
+ var n = decoder.decode(_entryName);
27057
+ return _isDirectory
27058
+ ? n
27059
+ .substr(n.length - 1)
27060
+ .split("/")
27061
+ .pop()
27062
+ : n.split("/").pop();
27063
+ },
27064
+ get isDirectory() {
27065
+ return _isDirectory;
27066
+ },
27067
+
27068
+ getCompressedData: function () {
27069
+ return compress(false, null);
27070
+ },
27071
+
27072
+ getCompressedDataAsync: function (/*Function*/ callback) {
27073
+ compress(true, callback);
27074
+ },
27075
+
27076
+ setData: function (value) {
27077
+ uncompressedData = Utils.toBuffer(value, Utils.decoder.encode);
27078
+ if (!_isDirectory && uncompressedData.length) {
27079
+ _centralHeader.size = uncompressedData.length;
27080
+ _centralHeader.method = Utils.Constants.DEFLATED;
27081
+ _centralHeader.crc = Utils.crc32(value);
27082
+ _centralHeader.changed = true;
27083
+ } else {
27084
+ // folders and blank files should be stored
27085
+ _centralHeader.method = Utils.Constants.STORED;
27086
+ }
27087
+ },
27088
+
27089
+ getData: function (pass) {
27090
+ if (_centralHeader.changed) {
27091
+ return uncompressedData;
27092
+ } else {
27093
+ return decompress(false, null, pass);
27094
+ }
27095
+ },
27096
+
27097
+ getDataAsync: function (/*Function*/ callback, pass) {
27098
+ if (_centralHeader.changed) {
27099
+ callback(uncompressedData);
27100
+ } else {
27101
+ decompress(true, callback, pass);
27102
+ }
27103
+ },
27104
+
27105
+ set attr(attr) {
27106
+ _centralHeader.attr = attr;
27107
+ },
27108
+ get attr() {
27109
+ return _centralHeader.attr;
27110
+ },
27111
+
27112
+ set header(/*Buffer*/ data) {
27113
+ _centralHeader.loadFromBinary(data);
27114
+ },
27115
+
27116
+ get header() {
27117
+ return _centralHeader;
27118
+ },
27119
+
27120
+ packCentralHeader: function () {
27121
+ _centralHeader.flags_efs = this.efs;
27122
+ _centralHeader.extraLength = _extra.length;
27123
+ // 1. create header (buffer)
27124
+ var header = _centralHeader.centralHeaderToBinary();
27125
+ var addpos = Utils.Constants.CENHDR;
27126
+ // 2. add file name
27127
+ _entryName.copy(header, addpos);
27128
+ addpos += _entryName.length;
27129
+ // 3. add extra data
27130
+ _extra.copy(header, addpos);
27131
+ addpos += _centralHeader.extraLength;
27132
+ // 4. add file comment
27133
+ _comment.copy(header, addpos);
27134
+ return header;
27135
+ },
27136
+
27137
+ packLocalHeader: function () {
27138
+ let addpos = 0;
27139
+ _centralHeader.flags_efs = this.efs;
27140
+ _centralHeader.extraLocalLength = _extralocal.length;
27141
+ // 1. construct local header Buffer
27142
+ const localHeaderBuf = _centralHeader.localHeaderToBinary();
27143
+ // 2. localHeader - crate header buffer
27144
+ const localHeader = Buffer.alloc(localHeaderBuf.length + _entryName.length + _centralHeader.extraLocalLength);
27145
+ // 2.1 add localheader
27146
+ localHeaderBuf.copy(localHeader, addpos);
27147
+ addpos += localHeaderBuf.length;
27148
+ // 2.2 add file name
27149
+ _entryName.copy(localHeader, addpos);
27150
+ addpos += _entryName.length;
27151
+ // 2.3 add extra field
27152
+ _extralocal.copy(localHeader, addpos);
27153
+ addpos += _extralocal.length;
27154
+
27155
+ return localHeader;
27156
+ },
27157
+
27158
+ toJSON: function () {
27159
+ const bytes = function (nr) {
27160
+ return "<" + ((nr && nr.length + " bytes buffer") || "null") + ">";
27161
+ };
27162
+
27163
+ return {
27164
+ entryName: this.entryName,
27165
+ name: this.name,
27166
+ comment: this.comment,
27167
+ isDirectory: this.isDirectory,
27168
+ header: _centralHeader.toJSON(),
27169
+ compressedData: bytes(input),
27170
+ data: bytes(uncompressedData)
27171
+ };
27172
+ },
27173
+
27174
+ toString: function () {
27175
+ return JSON.stringify(this.toJSON(), null, "\t");
27176
+ }
27177
+ };
27178
+ };
27179
+
27180
+
23424
27181
  /***/ }),
23425
27182
 
23426
27183
  /***/ 8699:
@@ -26828,6 +30585,18 @@ function onceStrict (fn) {
26828
30585
  }
26829
30586
 
26830
30587
 
30588
+ /***/ }),
30589
+
30590
+ /***/ 9886:
30591
+ /***/ ((module) => {
30592
+
30593
+ module.exports = {
30594
+ efs: true,
30595
+ encode: (data) => Buffer.from(data, "utf8"),
30596
+ decode: (data) => data.toString("utf8")
30597
+ };
30598
+
30599
+
26831
30600
  /***/ }),
26832
30601
 
26833
30602
  /***/ 9896: