@loaders.gl/zip 4.0.0-alpha.22 → 4.0.0-alpha.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +156 -181
- package/dist/es5/filesystems/zip-filesystem.js +4 -6
- package/dist/es5/filesystems/zip-filesystem.js.map +1 -1
- package/dist/es5/hash-file-utility.js.map +1 -1
- package/dist/es5/index.js +2 -17
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/parse-zip/cd-file-header.js.map +1 -1
- package/dist/es5/parse-zip/end-of-central-directory.js.map +1 -1
- package/dist/es5/parse-zip/local-file-header.js.map +1 -1
- package/dist/es5/parse-zip/search-from-the-end.js.map +1 -1
- package/dist/es5/tar-builder.js +8 -8
- package/dist/es5/tar-builder.js.map +1 -1
- package/dist/es5/zip-loader.js +2 -4
- package/dist/es5/zip-loader.js.map +1 -1
- package/dist/es5/zip-writer.js +19 -10
- package/dist/es5/zip-writer.js.map +1 -1
- package/dist/esm/filesystems/zip-filesystem.js +3 -3
- package/dist/esm/filesystems/zip-filesystem.js.map +1 -1
- package/dist/esm/hash-file-utility.js.map +1 -1
- package/dist/esm/index.js +1 -3
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/parse-zip/cd-file-header.js.map +1 -1
- package/dist/esm/parse-zip/end-of-central-directory.js.map +1 -1
- package/dist/esm/parse-zip/local-file-header.js.map +1 -1
- package/dist/esm/parse-zip/search-from-the-end.js.map +1 -1
- package/dist/esm/tar-builder.js +2 -2
- package/dist/esm/tar-builder.js.map +1 -1
- package/dist/esm/zip-loader.js +1 -2
- package/dist/esm/zip-loader.js.map +1 -1
- package/dist/esm/zip-writer.js +8 -5
- package/dist/esm/zip-writer.js.map +1 -1
- package/dist/filesystems/zip-filesystem.d.ts +2 -2
- package/dist/filesystems/zip-filesystem.d.ts.map +1 -1
- package/dist/hash-file-utility.d.ts +1 -1
- package/dist/hash-file-utility.d.ts.map +1 -1
- package/dist/index.d.ts +1 -4
- package/dist/index.d.ts.map +1 -1
- package/dist/parse-zip/cd-file-header.d.ts +1 -1
- package/dist/parse-zip/cd-file-header.d.ts.map +1 -1
- package/dist/parse-zip/end-of-central-directory.d.ts +1 -1
- package/dist/parse-zip/end-of-central-directory.d.ts.map +1 -1
- package/dist/parse-zip/local-file-header.d.ts +1 -1
- package/dist/parse-zip/local-file-header.d.ts.map +1 -1
- package/dist/parse-zip/search-from-the-end.d.ts +1 -1
- package/dist/parse-zip/search-from-the-end.d.ts.map +1 -1
- package/dist/tar-builder.d.ts +2 -2
- package/dist/tar-builder.d.ts.map +1 -1
- package/dist/zip-loader.d.ts +0 -1
- package/dist/zip-loader.d.ts.map +1 -1
- package/dist/zip-writer.d.ts +12 -2
- package/dist/zip-writer.d.ts.map +1 -1
- package/package.json +4 -9
- package/src/filesystems/zip-filesystem.ts +3 -3
- package/src/hash-file-utility.ts +1 -1
- package/src/index.ts +3 -5
- package/src/parse-zip/cd-file-header.ts +1 -1
- package/src/parse-zip/end-of-central-directory.ts +1 -1
- package/src/parse-zip/local-file-header.ts +1 -1
- package/src/parse-zip/search-from-the-end.ts +1 -1
- package/src/tar-builder.ts +2 -2
- package/src/zip-loader.ts +2 -3
- package/src/zip-writer.ts +23 -10
- package/dist/bundle.js +0 -5
- package/dist/es5/file-provider/data-view-file.js +0 -146
- package/dist/es5/file-provider/data-view-file.js.map +0 -1
- package/dist/es5/file-provider/file-handle-file.js +0 -234
- package/dist/es5/file-provider/file-handle-file.js.map +0 -1
- package/dist/es5/file-provider/file-handle.js +0 -101
- package/dist/es5/file-provider/file-handle.js.map +0 -1
- package/dist/es5/file-provider/file-provider.js +0 -11
- package/dist/es5/file-provider/file-provider.js.map +0 -1
- package/dist/esm/file-provider/data-view-file.js +0 -33
- package/dist/esm/file-provider/data-view-file.js.map +0 -1
- package/dist/esm/file-provider/file-handle-file.js +0 -57
- package/dist/esm/file-provider/file-handle-file.js.map +0 -1
- package/dist/esm/file-provider/file-handle.js +0 -37
- package/dist/esm/file-provider/file-handle.js.map +0 -1
- package/dist/esm/file-provider/file-provider.js +0 -4
- package/dist/esm/file-provider/file-provider.js.map +0 -1
- package/dist/file-provider/data-view-file.d.ts +0 -37
- package/dist/file-provider/data-view-file.d.ts.map +0 -1
- package/dist/file-provider/data-view-file.js +0 -63
- package/dist/file-provider/file-handle-file.d.ts +0 -53
- package/dist/file-provider/file-handle-file.d.ts.map +0 -1
- package/dist/file-provider/file-handle-file.js +0 -90
- package/dist/file-provider/file-handle.d.ts +0 -40
- package/dist/file-provider/file-handle.d.ts.map +0 -1
- package/dist/file-provider/file-handle.js +0 -57
- package/dist/file-provider/file-provider.d.ts +0 -45
- package/dist/file-provider/file-provider.d.ts.map +0 -1
- package/dist/file-provider/file-provider.js +0 -13
- package/dist/filesystems/zip-filesystem.js +0 -128
- package/dist/hash-file-utility.js +0 -88
- package/dist/index.js +0 -34
- package/dist/lib/tar/header.js +0 -99
- package/dist/lib/tar/tar.js +0 -131
- package/dist/lib/tar/types.js +0 -2
- package/dist/lib/tar/utils.js +0 -54
- package/dist/parse-zip/cd-file-header.js +0 -68
- package/dist/parse-zip/end-of-central-directory.js +0 -40
- package/dist/parse-zip/local-file-header.js +0 -55
- package/dist/parse-zip/search-from-the-end.js +0 -31
- package/dist/tar-builder.js +0 -38
- package/dist/zip-loader.js +0 -61
- package/dist/zip-writer.js +0 -37
- package/src/file-provider/data-view-file.ts +0 -72
- package/src/file-provider/file-handle-file.ts +0 -114
- package/src/file-provider/file-handle.ts +0 -73
- package/src/file-provider/file-provider.ts +0 -56
|
@@ -1,88 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.generateHashInfo = exports.findBin = exports.parseHashFile = exports.compareHashes = void 0;
|
|
7
|
-
const md5_1 = __importDefault(require("md5"));
|
|
8
|
-
const cd_file_header_1 = require("./parse-zip/cd-file-header");
|
|
9
|
-
/**
|
|
10
|
-
* Comparing md5 hashes according to https://github.com/Esri/i3s-spec/blob/master/docs/2.0/slpk_hashtable.pcsl.md step 5
|
|
11
|
-
* @param hash1 hash to compare
|
|
12
|
-
* @param hash2 hash to compare
|
|
13
|
-
* @returns -1 if hash1 < hash2, 0 of hash1 === hash2, 1 if hash1 > hash2
|
|
14
|
-
*/
|
|
15
|
-
const compareHashes = (hash1, hash2) => {
|
|
16
|
-
const h1 = new BigUint64Array(hash1.buffer, hash1.byteOffset, 2);
|
|
17
|
-
const h2 = new BigUint64Array(hash2.buffer, hash2.byteOffset, 2);
|
|
18
|
-
const diff = h1[0] === h2[0] ? h1[1] - h2[1] : h1[0] - h2[0];
|
|
19
|
-
if (diff < 0n) {
|
|
20
|
-
return -1;
|
|
21
|
-
}
|
|
22
|
-
else if (diff === 0n) {
|
|
23
|
-
return 0;
|
|
24
|
-
}
|
|
25
|
-
return 1;
|
|
26
|
-
};
|
|
27
|
-
exports.compareHashes = compareHashes;
|
|
28
|
-
/**
|
|
29
|
-
* Reads hash file from buffer and returns it in ready-to-use form
|
|
30
|
-
* @param hashFile - bufer containing hash file
|
|
31
|
-
* @returns Array containing file info
|
|
32
|
-
*/
|
|
33
|
-
const parseHashFile = (hashFile) => {
|
|
34
|
-
const hashFileBuffer = Buffer.from(hashFile);
|
|
35
|
-
const hashArray = [];
|
|
36
|
-
for (let i = 0; i < hashFileBuffer.buffer.byteLength; i = i + 24) {
|
|
37
|
-
const offsetBuffer = new DataView(hashFileBuffer.buffer.slice(hashFileBuffer.byteOffset + i + 16, hashFileBuffer.byteOffset + i + 24));
|
|
38
|
-
const offset = offsetBuffer.getBigUint64(offsetBuffer.byteOffset, true);
|
|
39
|
-
hashArray.push({
|
|
40
|
-
hash: Buffer.from(hashFileBuffer.subarray(hashFileBuffer.byteOffset + i, hashFileBuffer.byteOffset + i + 16)),
|
|
41
|
-
offset
|
|
42
|
-
});
|
|
43
|
-
}
|
|
44
|
-
return hashArray;
|
|
45
|
-
};
|
|
46
|
-
exports.parseHashFile = parseHashFile;
|
|
47
|
-
/**
|
|
48
|
-
* Binary search in the hash info
|
|
49
|
-
* @param hashToSearch hash that we need to find
|
|
50
|
-
* @returns required hash element or undefined if not found
|
|
51
|
-
*/
|
|
52
|
-
const findBin = (hashToSearch, hashArray) => {
|
|
53
|
-
let lowerBorder = 0;
|
|
54
|
-
let upperBorder = hashArray.length;
|
|
55
|
-
while (upperBorder - lowerBorder > 1) {
|
|
56
|
-
const middle = lowerBorder + Math.floor((upperBorder - lowerBorder) / 2);
|
|
57
|
-
const value = (0, exports.compareHashes)(hashArray[middle].hash, hashToSearch);
|
|
58
|
-
if (value === 0) {
|
|
59
|
-
return hashArray[middle];
|
|
60
|
-
}
|
|
61
|
-
else if (value < 0) {
|
|
62
|
-
lowerBorder = middle;
|
|
63
|
-
}
|
|
64
|
-
else {
|
|
65
|
-
upperBorder = middle;
|
|
66
|
-
}
|
|
67
|
-
}
|
|
68
|
-
return undefined;
|
|
69
|
-
};
|
|
70
|
-
exports.findBin = findBin;
|
|
71
|
-
/**
|
|
72
|
-
* generates hash info from central directory
|
|
73
|
-
* @param fileProvider - provider of the archive
|
|
74
|
-
* @returns ready to use hash info
|
|
75
|
-
*/
|
|
76
|
-
const generateHashInfo = async (fileProvider) => {
|
|
77
|
-
const zipCDIterator = (0, cd_file_header_1.zipCDFileHeaderGenerator)(fileProvider);
|
|
78
|
-
const hashInfo = [];
|
|
79
|
-
for await (const cdHeader of zipCDIterator) {
|
|
80
|
-
hashInfo.push({
|
|
81
|
-
hash: Buffer.from((0, md5_1.default)(cdHeader.fileName.split('\\').join('/').toLocaleLowerCase()), 'hex'),
|
|
82
|
-
offset: cdHeader.localHeaderOffset
|
|
83
|
-
});
|
|
84
|
-
}
|
|
85
|
-
hashInfo.sort((a, b) => (0, exports.compareHashes)(a.hash, b.hash));
|
|
86
|
-
return hashInfo;
|
|
87
|
-
};
|
|
88
|
-
exports.generateHashInfo = generateHashInfo;
|
package/dist/index.js
DELETED
|
@@ -1,34 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.ZipFileSystem = exports.generateHashInfo = exports.findBin = exports.parseHashFile = exports.compareHashes = exports.searchFromTheEnd = exports.parseEoCDRecord = exports.localHeaderSignature = exports.parseZipLocalFileHeader = exports.cdSignature = exports.zipCDFileHeaderGenerator = exports.parseZipCDFileHeader = exports.DataViewFile = exports.FileHandleFile = exports.TarBuilder = exports.ZipWriter = exports.ZipLoader = void 0;
|
|
7
|
-
var zip_loader_1 = require("./zip-loader");
|
|
8
|
-
Object.defineProperty(exports, "ZipLoader", { enumerable: true, get: function () { return zip_loader_1.ZipLoader; } });
|
|
9
|
-
var zip_writer_1 = require("./zip-writer");
|
|
10
|
-
Object.defineProperty(exports, "ZipWriter", { enumerable: true, get: function () { return zip_writer_1.ZipWriter; } });
|
|
11
|
-
var tar_builder_1 = require("./tar-builder");
|
|
12
|
-
Object.defineProperty(exports, "TarBuilder", { enumerable: true, get: function () { return __importDefault(tar_builder_1).default; } });
|
|
13
|
-
var file_handle_file_1 = require("./file-provider/file-handle-file");
|
|
14
|
-
Object.defineProperty(exports, "FileHandleFile", { enumerable: true, get: function () { return file_handle_file_1.FileHandleFile; } });
|
|
15
|
-
var data_view_file_1 = require("./file-provider/data-view-file");
|
|
16
|
-
Object.defineProperty(exports, "DataViewFile", { enumerable: true, get: function () { return data_view_file_1.DataViewFile; } });
|
|
17
|
-
var cd_file_header_1 = require("./parse-zip/cd-file-header");
|
|
18
|
-
Object.defineProperty(exports, "parseZipCDFileHeader", { enumerable: true, get: function () { return cd_file_header_1.parseZipCDFileHeader; } });
|
|
19
|
-
Object.defineProperty(exports, "zipCDFileHeaderGenerator", { enumerable: true, get: function () { return cd_file_header_1.zipCDFileHeaderGenerator; } });
|
|
20
|
-
Object.defineProperty(exports, "cdSignature", { enumerable: true, get: function () { return cd_file_header_1.signature; } });
|
|
21
|
-
var local_file_header_1 = require("./parse-zip/local-file-header");
|
|
22
|
-
Object.defineProperty(exports, "parseZipLocalFileHeader", { enumerable: true, get: function () { return local_file_header_1.parseZipLocalFileHeader; } });
|
|
23
|
-
Object.defineProperty(exports, "localHeaderSignature", { enumerable: true, get: function () { return local_file_header_1.signature; } });
|
|
24
|
-
var end_of_central_directory_1 = require("./parse-zip/end-of-central-directory");
|
|
25
|
-
Object.defineProperty(exports, "parseEoCDRecord", { enumerable: true, get: function () { return end_of_central_directory_1.parseEoCDRecord; } });
|
|
26
|
-
var search_from_the_end_1 = require("./parse-zip/search-from-the-end");
|
|
27
|
-
Object.defineProperty(exports, "searchFromTheEnd", { enumerable: true, get: function () { return search_from_the_end_1.searchFromTheEnd; } });
|
|
28
|
-
var hash_file_utility_1 = require("./hash-file-utility");
|
|
29
|
-
Object.defineProperty(exports, "compareHashes", { enumerable: true, get: function () { return hash_file_utility_1.compareHashes; } });
|
|
30
|
-
Object.defineProperty(exports, "parseHashFile", { enumerable: true, get: function () { return hash_file_utility_1.parseHashFile; } });
|
|
31
|
-
Object.defineProperty(exports, "findBin", { enumerable: true, get: function () { return hash_file_utility_1.findBin; } });
|
|
32
|
-
Object.defineProperty(exports, "generateHashInfo", { enumerable: true, get: function () { return hash_file_utility_1.generateHashInfo; } });
|
|
33
|
-
var zip_filesystem_1 = require("./filesystems/zip-filesystem");
|
|
34
|
-
Object.defineProperty(exports, "ZipFileSystem", { enumerable: true, get: function () { return zip_filesystem_1.ZipFileSystem; } });
|
package/dist/lib/tar/header.js
DELETED
|
@@ -1,99 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
-
}) : function(o, v) {
|
|
16
|
-
o["default"] = v;
|
|
17
|
-
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || function (mod) {
|
|
19
|
-
if (mod && mod.__esModule) return mod;
|
|
20
|
-
var result = {};
|
|
21
|
-
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
22
|
-
__setModuleDefault(result, mod);
|
|
23
|
-
return result;
|
|
24
|
-
};
|
|
25
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
26
|
-
exports.format = void 0;
|
|
27
|
-
// This file is derived from the tar-js code base under MIT license
|
|
28
|
-
// See https://github.com/beatgammit/tar-js/blob/master/LICENSE
|
|
29
|
-
/*
|
|
30
|
-
* tar-js
|
|
31
|
-
* MIT (c) 2011 T. Jameson Little
|
|
32
|
-
*/
|
|
33
|
-
/* eslint-disable */
|
|
34
|
-
const utils = __importStar(require("./utils"));
|
|
35
|
-
/*
|
|
36
|
-
struct posix_header { // byte offset
|
|
37
|
-
char name[100]; // 0
|
|
38
|
-
char mode[8]; // 100
|
|
39
|
-
char uid[8]; // 108
|
|
40
|
-
char gid[8]; // 116
|
|
41
|
-
char size[12]; // 124
|
|
42
|
-
char mtime[12]; // 136
|
|
43
|
-
char chksum[8]; // 148
|
|
44
|
-
char typeflag; // 156
|
|
45
|
-
char linkname[100]; // 157
|
|
46
|
-
char magic[6]; // 257
|
|
47
|
-
char version[2]; // 263
|
|
48
|
-
char uname[32]; // 265
|
|
49
|
-
char gname[32]; // 297
|
|
50
|
-
char devmajor[8]; // 329
|
|
51
|
-
char devminor[8]; // 337
|
|
52
|
-
char prefix[155]; // 345
|
|
53
|
-
// 500
|
|
54
|
-
};
|
|
55
|
-
*/
|
|
56
|
-
const structure = {
|
|
57
|
-
fileName: 100,
|
|
58
|
-
fileMode: 8,
|
|
59
|
-
uid: 8,
|
|
60
|
-
gid: 8,
|
|
61
|
-
fileSize: 12,
|
|
62
|
-
mtime: 12,
|
|
63
|
-
checksum: 8,
|
|
64
|
-
type: 1,
|
|
65
|
-
linkName: 100,
|
|
66
|
-
ustar: 8,
|
|
67
|
-
owner: 32,
|
|
68
|
-
group: 32,
|
|
69
|
-
majorNumber: 8,
|
|
70
|
-
minorNumber: 8,
|
|
71
|
-
filenamePrefix: 155,
|
|
72
|
-
padding: 12
|
|
73
|
-
};
|
|
74
|
-
/**
|
|
75
|
-
* Getting the header
|
|
76
|
-
* @param data
|
|
77
|
-
* @param [cb]
|
|
78
|
-
* @returns {Uint8Array} | Array
|
|
79
|
-
*/
|
|
80
|
-
function format(data, cb) {
|
|
81
|
-
const buffer = utils.clean(512);
|
|
82
|
-
let offset = 0;
|
|
83
|
-
Object.entries(structure).forEach(([field, length]) => {
|
|
84
|
-
const str = data[field] || '';
|
|
85
|
-
let i;
|
|
86
|
-
let fieldLength;
|
|
87
|
-
for (i = 0, fieldLength = str.length; i < fieldLength; i += 1) {
|
|
88
|
-
buffer[offset] = str.charCodeAt(i);
|
|
89
|
-
offset += 1;
|
|
90
|
-
}
|
|
91
|
-
// space it out with nulls
|
|
92
|
-
offset += length - i;
|
|
93
|
-
});
|
|
94
|
-
if (typeof cb === 'function') {
|
|
95
|
-
return cb(buffer, offset);
|
|
96
|
-
}
|
|
97
|
-
return buffer;
|
|
98
|
-
}
|
|
99
|
-
exports.format = format;
|
package/dist/lib/tar/tar.js
DELETED
|
@@ -1,131 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
// This file is derived from the tar-js code base under MIT license
|
|
3
|
-
// See https://github.com/beatgammit/tar-js/blob/master/LICENSE
|
|
4
|
-
/*
|
|
5
|
-
* tar-js
|
|
6
|
-
* MIT (c) 2011 T. Jameson Little
|
|
7
|
-
*/
|
|
8
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
9
|
-
const utils_1 = require("./utils");
|
|
10
|
-
const header_1 = require("./header");
|
|
11
|
-
let blockSize;
|
|
12
|
-
let headerLength;
|
|
13
|
-
let inputLength;
|
|
14
|
-
const recordSize = 512;
|
|
15
|
-
class Tar {
|
|
16
|
-
/**
|
|
17
|
-
* @param [recordsPerBlock]
|
|
18
|
-
*/
|
|
19
|
-
constructor(recordsPerBlock) {
|
|
20
|
-
this.blocks = [];
|
|
21
|
-
this.written = 0;
|
|
22
|
-
blockSize = (recordsPerBlock || 20) * recordSize;
|
|
23
|
-
this.out = (0, utils_1.clean)(blockSize);
|
|
24
|
-
this.blocks = [];
|
|
25
|
-
this.length = 0;
|
|
26
|
-
this.save = this.save.bind(this);
|
|
27
|
-
this.clear = this.clear.bind(this);
|
|
28
|
-
this.append = this.append.bind(this);
|
|
29
|
-
}
|
|
30
|
-
/**
|
|
31
|
-
* Append a file to the tar archive
|
|
32
|
-
* @param filepath
|
|
33
|
-
* @param input
|
|
34
|
-
* @param [opts]
|
|
35
|
-
*/
|
|
36
|
-
// eslint-disable-next-line complexity
|
|
37
|
-
append(filepath, input, opts) {
|
|
38
|
-
let checksum;
|
|
39
|
-
if (typeof input === 'string') {
|
|
40
|
-
input = (0, utils_1.stringToUint8)(input);
|
|
41
|
-
}
|
|
42
|
-
else if (input.constructor && input.constructor !== Uint8Array.prototype.constructor) {
|
|
43
|
-
// @ts-ignore
|
|
44
|
-
const errorInputMatch = /function\s*([$A-Za-z_][0-9A-Za-z_]*)\s*\(/.exec(input.constructor.toString());
|
|
45
|
-
const errorInput = errorInputMatch && errorInputMatch[1];
|
|
46
|
-
const errorMessage = `Invalid input type. You gave me: ${errorInput}`;
|
|
47
|
-
throw errorMessage;
|
|
48
|
-
}
|
|
49
|
-
opts = opts || {};
|
|
50
|
-
const mode = opts.mode || parseInt('777', 8) & 0xfff;
|
|
51
|
-
const mtime = opts.mtime || Math.floor(Number(new Date()) / 1000);
|
|
52
|
-
const uid = opts.uid || 0;
|
|
53
|
-
const gid = opts.gid || 0;
|
|
54
|
-
const data = {
|
|
55
|
-
fileName: filepath,
|
|
56
|
-
fileMode: (0, utils_1.pad)(mode, 7),
|
|
57
|
-
uid: (0, utils_1.pad)(uid, 7),
|
|
58
|
-
gid: (0, utils_1.pad)(gid, 7),
|
|
59
|
-
fileSize: (0, utils_1.pad)(input.length, 11),
|
|
60
|
-
mtime: (0, utils_1.pad)(mtime, 11),
|
|
61
|
-
checksum: ' ',
|
|
62
|
-
// 0 = just a file
|
|
63
|
-
type: '0',
|
|
64
|
-
ustar: 'ustar ',
|
|
65
|
-
owner: opts.owner || '',
|
|
66
|
-
group: opts.group || ''
|
|
67
|
-
};
|
|
68
|
-
// calculate the checksum
|
|
69
|
-
checksum = 0;
|
|
70
|
-
Object.keys(data).forEach((key) => {
|
|
71
|
-
let i;
|
|
72
|
-
const value = data[key];
|
|
73
|
-
let length;
|
|
74
|
-
for (i = 0, length = value.length; i < length; i += 1) {
|
|
75
|
-
checksum += value.charCodeAt(i);
|
|
76
|
-
}
|
|
77
|
-
});
|
|
78
|
-
data.checksum = `${(0, utils_1.pad)(checksum, 6)}\u0000 `;
|
|
79
|
-
const headerArr = (0, header_1.format)(data);
|
|
80
|
-
headerLength = Math.ceil(headerArr.length / recordSize) * recordSize;
|
|
81
|
-
inputLength = Math.ceil(input.length / recordSize) * recordSize;
|
|
82
|
-
this.blocks.push({
|
|
83
|
-
header: headerArr,
|
|
84
|
-
input,
|
|
85
|
-
headerLength,
|
|
86
|
-
inputLength
|
|
87
|
-
});
|
|
88
|
-
}
|
|
89
|
-
/**
|
|
90
|
-
* Compiling data to a Blob object
|
|
91
|
-
* @returns {Blob}
|
|
92
|
-
*/
|
|
93
|
-
save() {
|
|
94
|
-
const buffers = [];
|
|
95
|
-
const chunks = new Array();
|
|
96
|
-
let length = 0;
|
|
97
|
-
const max = Math.pow(2, 20);
|
|
98
|
-
let chunk = new Array();
|
|
99
|
-
this.blocks.forEach((b = []) => {
|
|
100
|
-
if (length + b.headerLength + b.inputLength > max) {
|
|
101
|
-
chunks.push({ blocks: chunk, length });
|
|
102
|
-
chunk = [];
|
|
103
|
-
length = 0;
|
|
104
|
-
}
|
|
105
|
-
chunk.push(b);
|
|
106
|
-
length += b.headerLength + b.inputLength;
|
|
107
|
-
});
|
|
108
|
-
chunks.push({ blocks: chunk, length });
|
|
109
|
-
chunks.forEach((c = []) => {
|
|
110
|
-
const buffer = new Uint8Array(c.length);
|
|
111
|
-
let written = 0;
|
|
112
|
-
c.blocks.forEach((b = []) => {
|
|
113
|
-
buffer.set(b.header, written);
|
|
114
|
-
written += b.headerLength;
|
|
115
|
-
buffer.set(b.input, written);
|
|
116
|
-
written += b.inputLength;
|
|
117
|
-
});
|
|
118
|
-
buffers.push(buffer);
|
|
119
|
-
});
|
|
120
|
-
buffers.push(new Uint8Array(2 * recordSize));
|
|
121
|
-
return new Blob(buffers, { type: 'octet/stream' });
|
|
122
|
-
}
|
|
123
|
-
/**
|
|
124
|
-
* Clear the data by its blocksize
|
|
125
|
-
*/
|
|
126
|
-
clear() {
|
|
127
|
-
this.written = 0;
|
|
128
|
-
this.out = (0, utils_1.clean)(blockSize);
|
|
129
|
-
}
|
|
130
|
-
}
|
|
131
|
-
exports.default = Tar;
|
package/dist/lib/tar/types.js
DELETED
package/dist/lib/tar/utils.js
DELETED
|
@@ -1,54 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.stringToUint8 = exports.pad = exports.clean = void 0;
|
|
4
|
-
// This file is derived from the tar-js code base under MIT license
|
|
5
|
-
// See https://github.com/beatgammit/tar-js/blob/master/LICENSE
|
|
6
|
-
/*
|
|
7
|
-
* tar-js
|
|
8
|
-
* MIT (c) 2011 T. Jameson Little
|
|
9
|
-
*/
|
|
10
|
-
/**
|
|
11
|
-
* Returns the memory area specified by length
|
|
12
|
-
* @param length
|
|
13
|
-
* @returns {Uint8Array}
|
|
14
|
-
*/
|
|
15
|
-
function clean(length) {
|
|
16
|
-
let i;
|
|
17
|
-
const buffer = new Uint8Array(length);
|
|
18
|
-
for (i = 0; i < length; i += 1) {
|
|
19
|
-
buffer[i] = 0;
|
|
20
|
-
}
|
|
21
|
-
return buffer;
|
|
22
|
-
}
|
|
23
|
-
exports.clean = clean;
|
|
24
|
-
/**
|
|
25
|
-
* Converting data to a string
|
|
26
|
-
* @param num
|
|
27
|
-
* @param bytes
|
|
28
|
-
* @param base
|
|
29
|
-
* @returns string
|
|
30
|
-
*/
|
|
31
|
-
function pad(num, bytes, base) {
|
|
32
|
-
const numStr = num.toString(base || 8);
|
|
33
|
-
return '000000000000'.substr(numStr.length + 12 - bytes) + numStr;
|
|
34
|
-
}
|
|
35
|
-
exports.pad = pad;
|
|
36
|
-
/**
|
|
37
|
-
* Converting input to binary data
|
|
38
|
-
* @param input
|
|
39
|
-
* @param out
|
|
40
|
-
* @param offset
|
|
41
|
-
* @returns {Uint8Array}
|
|
42
|
-
*/
|
|
43
|
-
function stringToUint8(input, out, offset) {
|
|
44
|
-
let i;
|
|
45
|
-
let length;
|
|
46
|
-
out = out || clean(input.length);
|
|
47
|
-
offset = offset || 0;
|
|
48
|
-
for (i = 0, length = input.length; i < length; i += 1) {
|
|
49
|
-
out[offset] = input.charCodeAt(i);
|
|
50
|
-
offset += 1;
|
|
51
|
-
}
|
|
52
|
-
return out;
|
|
53
|
-
}
|
|
54
|
-
exports.stringToUint8 = stringToUint8;
|
|
@@ -1,68 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.zipCDFileHeaderGenerator = exports.parseZipCDFileHeader = exports.signature = void 0;
|
|
4
|
-
const end_of_central_directory_1 = require("./end-of-central-directory");
|
|
5
|
-
// offsets accroding to https://en.wikipedia.org/wiki/ZIP_(file_format)
|
|
6
|
-
const CD_COMPRESSED_SIZE_OFFSET = 20n;
|
|
7
|
-
const CD_UNCOMPRESSED_SIZE_OFFSET = 24n;
|
|
8
|
-
const CD_FILE_NAME_LENGTH_OFFSET = 28n;
|
|
9
|
-
const CD_EXTRA_FIELD_LENGTH_OFFSET = 30n;
|
|
10
|
-
const CD_LOCAL_HEADER_OFFSET_OFFSET = 42n;
|
|
11
|
-
const CD_FILE_NAME_OFFSET = 46n;
|
|
12
|
-
exports.signature = [0x50, 0x4b, 0x01, 0x02];
|
|
13
|
-
/**
|
|
14
|
-
* Parses central directory file header of zip file
|
|
15
|
-
* @param headerOffset - offset in the archive where header starts
|
|
16
|
-
* @param buffer - buffer containing whole array
|
|
17
|
-
* @returns Info from the header
|
|
18
|
-
*/
|
|
19
|
-
const parseZipCDFileHeader = async (headerOffset, buffer) => {
|
|
20
|
-
if (Buffer.from(await buffer.slice(headerOffset, headerOffset + 4n)).compare(Buffer.from(exports.signature)) !== 0) {
|
|
21
|
-
return null;
|
|
22
|
-
}
|
|
23
|
-
let compressedSize = BigInt(await buffer.getUint32(headerOffset + CD_COMPRESSED_SIZE_OFFSET));
|
|
24
|
-
let uncompressedSize = BigInt(await buffer.getUint32(headerOffset + CD_UNCOMPRESSED_SIZE_OFFSET));
|
|
25
|
-
const extraFieldLength = await buffer.getUint16(headerOffset + CD_EXTRA_FIELD_LENGTH_OFFSET);
|
|
26
|
-
const fileNameLength = await buffer.getUint16(headerOffset + CD_FILE_NAME_LENGTH_OFFSET);
|
|
27
|
-
const fileName = new TextDecoder().decode(await buffer.slice(headerOffset + CD_FILE_NAME_OFFSET, headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength)));
|
|
28
|
-
const extraOffset = headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength);
|
|
29
|
-
const oldFormatOffset = await buffer.getUint32(headerOffset + CD_LOCAL_HEADER_OFFSET_OFFSET);
|
|
30
|
-
let fileDataOffset = BigInt(oldFormatOffset);
|
|
31
|
-
let offsetInZip64Data = 4n;
|
|
32
|
-
// looking for info that might be also be in zip64 extra field
|
|
33
|
-
if (uncompressedSize === BigInt(0xffffffff)) {
|
|
34
|
-
uncompressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);
|
|
35
|
-
offsetInZip64Data += 8n;
|
|
36
|
-
}
|
|
37
|
-
if (compressedSize === BigInt(0xffffffff)) {
|
|
38
|
-
compressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);
|
|
39
|
-
offsetInZip64Data += 8n;
|
|
40
|
-
}
|
|
41
|
-
if (fileDataOffset === BigInt(0xffffffff)) {
|
|
42
|
-
fileDataOffset = await buffer.getBigUint64(extraOffset + offsetInZip64Data); // setting it to the one from zip64
|
|
43
|
-
}
|
|
44
|
-
const localHeaderOffset = fileDataOffset;
|
|
45
|
-
return {
|
|
46
|
-
compressedSize,
|
|
47
|
-
uncompressedSize,
|
|
48
|
-
extraFieldLength,
|
|
49
|
-
fileNameLength,
|
|
50
|
-
fileName,
|
|
51
|
-
extraOffset,
|
|
52
|
-
localHeaderOffset
|
|
53
|
-
};
|
|
54
|
-
};
|
|
55
|
-
exports.parseZipCDFileHeader = parseZipCDFileHeader;
|
|
56
|
-
/**
|
|
57
|
-
* Create iterator over files of zip archive
|
|
58
|
-
* @param fileProvider - file provider that provider random access to the file
|
|
59
|
-
*/
|
|
60
|
-
async function* zipCDFileHeaderGenerator(fileProvider) {
|
|
61
|
-
const { cdStartOffset } = await (0, end_of_central_directory_1.parseEoCDRecord)(fileProvider);
|
|
62
|
-
let cdHeader = await (0, exports.parseZipCDFileHeader)(cdStartOffset, fileProvider);
|
|
63
|
-
while (cdHeader) {
|
|
64
|
-
yield cdHeader;
|
|
65
|
-
cdHeader = await (0, exports.parseZipCDFileHeader)(cdHeader.extraOffset + BigInt(cdHeader.extraFieldLength), fileProvider);
|
|
66
|
-
}
|
|
67
|
-
}
|
|
68
|
-
exports.zipCDFileHeaderGenerator = zipCDFileHeaderGenerator;
|
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.parseEoCDRecord = void 0;
|
|
4
|
-
const search_from_the_end_1 = require("./search-from-the-end");
|
|
5
|
-
const eoCDSignature = [0x50, 0x4b, 0x05, 0x06];
|
|
6
|
-
const zip64EoCDLocatorSignature = Buffer.from([0x50, 0x4b, 0x06, 0x07]);
|
|
7
|
-
const zip64EoCDSignature = Buffer.from([0x50, 0x4b, 0x06, 0x06]);
|
|
8
|
-
// offsets accroding to https://en.wikipedia.org/wiki/ZIP_(file_format)
|
|
9
|
-
const CD_RECORDS_NUMBER_OFFSET = 8n;
|
|
10
|
-
const CD_START_OFFSET_OFFSET = 16n;
|
|
11
|
-
const ZIP64_EOCD_START_OFFSET_OFFSET = 8n;
|
|
12
|
-
const ZIP64_CD_RECORDS_NUMBER_OFFSET = 24n;
|
|
13
|
-
const ZIP64_CD_START_OFFSET_OFFSET = 48n;
|
|
14
|
-
/**
|
|
15
|
-
* Parses end of central directory record of zip file
|
|
16
|
-
* @param fileProvider - FileProvider instance
|
|
17
|
-
* @returns Info from the header
|
|
18
|
-
*/
|
|
19
|
-
const parseEoCDRecord = async (fileProvider) => {
|
|
20
|
-
const zipEoCDOffset = await (0, search_from_the_end_1.searchFromTheEnd)(fileProvider, eoCDSignature);
|
|
21
|
-
let cdRecordsNumber = BigInt(await fileProvider.getUint16(zipEoCDOffset + CD_RECORDS_NUMBER_OFFSET));
|
|
22
|
-
let cdStartOffset = BigInt(await fileProvider.getUint32(zipEoCDOffset + CD_START_OFFSET_OFFSET));
|
|
23
|
-
if (cdStartOffset === BigInt(0xffffffff) || cdRecordsNumber === BigInt(0xffffffff)) {
|
|
24
|
-
const zip64EoCDLocatorOffset = zipEoCDOffset - 20n;
|
|
25
|
-
if (Buffer.from(await fileProvider.slice(zip64EoCDLocatorOffset, zip64EoCDLocatorOffset + 4n)).compare(zip64EoCDLocatorSignature) !== 0) {
|
|
26
|
-
throw new Error('zip64 EoCD locator not found');
|
|
27
|
-
}
|
|
28
|
-
const zip64EoCDOffset = await fileProvider.getBigUint64(zip64EoCDLocatorOffset + ZIP64_EOCD_START_OFFSET_OFFSET);
|
|
29
|
-
if (Buffer.from(await fileProvider.slice(zip64EoCDOffset, zip64EoCDOffset + 4n)).compare(zip64EoCDSignature) !== 0) {
|
|
30
|
-
throw new Error('zip64 EoCD not found');
|
|
31
|
-
}
|
|
32
|
-
cdRecordsNumber = await fileProvider.getBigUint64(zip64EoCDOffset + ZIP64_CD_RECORDS_NUMBER_OFFSET);
|
|
33
|
-
cdStartOffset = await fileProvider.getBigUint64(zip64EoCDOffset + ZIP64_CD_START_OFFSET_OFFSET);
|
|
34
|
-
}
|
|
35
|
-
return {
|
|
36
|
-
cdRecordsNumber,
|
|
37
|
-
cdStartOffset
|
|
38
|
-
};
|
|
39
|
-
};
|
|
40
|
-
exports.parseEoCDRecord = parseEoCDRecord;
|
|
@@ -1,55 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.parseZipLocalFileHeader = exports.signature = void 0;
|
|
4
|
-
// offsets accroding to https://en.wikipedia.org/wiki/ZIP_(file_format)
|
|
5
|
-
const COMPRESSION_METHOD_OFFSET = 8n;
|
|
6
|
-
const COMPRESSED_SIZE_OFFSET = 18n;
|
|
7
|
-
const UNCOMPRESSED_SIZE_OFFSET = 22n;
|
|
8
|
-
const FILE_NAME_LENGTH_OFFSET = 26n;
|
|
9
|
-
const EXTRA_FIELD_LENGTH_OFFSET = 28n;
|
|
10
|
-
const FILE_NAME_OFFSET = 30n;
|
|
11
|
-
exports.signature = Buffer.from([0x50, 0x4b, 0x03, 0x04]);
|
|
12
|
-
/**
|
|
13
|
-
* Parses local file header of zip file
|
|
14
|
-
* @param headerOffset - offset in the archive where header starts
|
|
15
|
-
* @param buffer - buffer containing whole array
|
|
16
|
-
* @returns Info from the header
|
|
17
|
-
*/
|
|
18
|
-
const parseZipLocalFileHeader = async (headerOffset, buffer) => {
|
|
19
|
-
if (Buffer.from(await buffer.slice(headerOffset, headerOffset + 4n)).compare(exports.signature) !== 0) {
|
|
20
|
-
return null;
|
|
21
|
-
}
|
|
22
|
-
const fileNameLength = await buffer.getUint16(headerOffset + FILE_NAME_LENGTH_OFFSET);
|
|
23
|
-
const fileName = new TextDecoder()
|
|
24
|
-
.decode(await buffer.slice(headerOffset + FILE_NAME_OFFSET, headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength)))
|
|
25
|
-
.split('\\')
|
|
26
|
-
.join('/');
|
|
27
|
-
const extraFieldLength = await buffer.getUint16(headerOffset + EXTRA_FIELD_LENGTH_OFFSET);
|
|
28
|
-
let fileDataOffset = headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength);
|
|
29
|
-
const compressionMethod = await buffer.getUint16(headerOffset + COMPRESSION_METHOD_OFFSET);
|
|
30
|
-
let compressedSize = BigInt(await buffer.getUint32(headerOffset + COMPRESSED_SIZE_OFFSET)); // add zip 64 logic
|
|
31
|
-
let uncompressedSize = BigInt(await buffer.getUint32(headerOffset + UNCOMPRESSED_SIZE_OFFSET)); // add zip 64 logic
|
|
32
|
-
const extraOffset = headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength);
|
|
33
|
-
let offsetInZip64Data = 4n;
|
|
34
|
-
// looking for info that might be also be in zip64 extra field
|
|
35
|
-
if (uncompressedSize === BigInt(0xffffffff)) {
|
|
36
|
-
uncompressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);
|
|
37
|
-
offsetInZip64Data += 8n;
|
|
38
|
-
}
|
|
39
|
-
if (compressedSize === BigInt(0xffffffff)) {
|
|
40
|
-
compressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);
|
|
41
|
-
offsetInZip64Data += 8n;
|
|
42
|
-
}
|
|
43
|
-
if (fileDataOffset === BigInt(0xffffffff)) {
|
|
44
|
-
fileDataOffset = await buffer.getBigUint64(extraOffset + offsetInZip64Data); // setting it to the one from zip64
|
|
45
|
-
}
|
|
46
|
-
return {
|
|
47
|
-
fileNameLength,
|
|
48
|
-
fileName,
|
|
49
|
-
extraFieldLength,
|
|
50
|
-
fileDataOffset,
|
|
51
|
-
compressedSize,
|
|
52
|
-
compressionMethod
|
|
53
|
-
};
|
|
54
|
-
};
|
|
55
|
-
exports.parseZipLocalFileHeader = parseZipLocalFileHeader;
|
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.searchFromTheEnd = void 0;
|
|
4
|
-
/**
|
|
5
|
-
* looking for the last occurrence of the provided
|
|
6
|
-
* @param file
|
|
7
|
-
* @param target
|
|
8
|
-
* @returns
|
|
9
|
-
*/
|
|
10
|
-
const searchFromTheEnd = async (file, target) => {
|
|
11
|
-
const searchWindow = [
|
|
12
|
-
await file.getUint8(file.length - 1n),
|
|
13
|
-
await file.getUint8(file.length - 2n),
|
|
14
|
-
await file.getUint8(file.length - 3n),
|
|
15
|
-
undefined
|
|
16
|
-
];
|
|
17
|
-
let targetOffset = 0n;
|
|
18
|
-
// looking for the last record in the central directory
|
|
19
|
-
for (let i = file.length - 4n; i > -1; i--) {
|
|
20
|
-
searchWindow[3] = searchWindow[2];
|
|
21
|
-
searchWindow[2] = searchWindow[1];
|
|
22
|
-
searchWindow[1] = searchWindow[0];
|
|
23
|
-
searchWindow[0] = await file.getUint8(i);
|
|
24
|
-
if (searchWindow.every((val, index) => val === target[index])) {
|
|
25
|
-
targetOffset = i;
|
|
26
|
-
break;
|
|
27
|
-
}
|
|
28
|
-
}
|
|
29
|
-
return targetOffset;
|
|
30
|
-
};
|
|
31
|
-
exports.searchFromTheEnd = searchFromTheEnd;
|
package/dist/tar-builder.js
DELETED
|
@@ -1,38 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
const tar_1 = __importDefault(require("./lib/tar/tar"));
|
|
7
|
-
const TAR_BUILDER_OPTIONS = {
|
|
8
|
-
recordsPerBlock: 20
|
|
9
|
-
};
|
|
10
|
-
/**
|
|
11
|
-
* Build a tar file by adding files
|
|
12
|
-
*/
|
|
13
|
-
class TARBuilder {
|
|
14
|
-
static get properties() {
|
|
15
|
-
return {
|
|
16
|
-
id: 'tar',
|
|
17
|
-
name: 'TAR',
|
|
18
|
-
extensions: ['tar'],
|
|
19
|
-
mimeTypes: ['application/x-tar'],
|
|
20
|
-
builder: TARBuilder,
|
|
21
|
-
options: TAR_BUILDER_OPTIONS
|
|
22
|
-
};
|
|
23
|
-
}
|
|
24
|
-
constructor(options) {
|
|
25
|
-
this.count = 0;
|
|
26
|
-
this.options = { ...TAR_BUILDER_OPTIONS, ...options };
|
|
27
|
-
this.tape = new tar_1.default(this.options.recordsPerBlock);
|
|
28
|
-
}
|
|
29
|
-
/** Adds a file to the archive. */
|
|
30
|
-
addFile(filename, buffer) {
|
|
31
|
-
this.tape.append(filename, new Uint8Array(buffer));
|
|
32
|
-
this.count++;
|
|
33
|
-
}
|
|
34
|
-
async build() {
|
|
35
|
-
return new Response(this.tape.save()).arrayBuffer();
|
|
36
|
-
}
|
|
37
|
-
}
|
|
38
|
-
exports.default = TARBuilder;
|